diff --git a/.eslintrc.js b/.eslintrc.js
index 3029c55..41eeeaa 100644
--- a/.eslintrc.js
+++ b/.eslintrc.js
@@ -10,16 +10,11 @@ module.exports = {
'standard'
],
rules: {
- curly: [
- 'error',
- 'multi',
- 'consistent'
- ],
'no-throw-literal': 0,
'object-shorthand': [
'error',
'always'
],
- 'standard/no-callback-literal': 0
+ 'node/no-callback-literal': 0
}
}
diff --git a/controllers/albumsController.js b/controllers/albumsController.js
index fcff037..850730f 100644
--- a/controllers/albumsController.js
+++ b/controllers/albumsController.js
@@ -29,14 +29,10 @@ const zipOptions = config.uploads.jsZipOptions
zipOptions.type = 'nodebuffer'
// Apply fallbacks for missing config values
-if (zipOptions.streamFiles === undefined)
- zipOptions.streamFiles = true
-if (zipOptions.compression === undefined)
- zipOptions.compression = 'DEFLATE'
-if (zipOptions.compressionOptions === undefined)
- zipOptions.compressionOptions = {}
-if (zipOptions.compressionOptions.level === undefined)
- zipOptions.compressionOptions.level = 1
+if (zipOptions.streamFiles === undefined) zipOptions.streamFiles = true
+if (zipOptions.compression === undefined) zipOptions.compression = 'DEFLATE'
+if (zipOptions.compressionOptions === undefined) zipOptions.compressionOptions = {}
+if (zipOptions.compressionOptions.level === undefined) zipOptions.compressionOptions.level = 1
self.zipEmitters = new Map()
@@ -51,8 +47,7 @@ class ZipEmitter extends EventEmitter {
self.getUniqueRandomName = async () => {
for (let i = 0; i < utils.idMaxTries; i++) {
const identifier = randomstring.generate(config.uploads.albumIdentifierLength)
- if (self.onHold.has(identifier))
- continue
+ if (self.onHold.has(identifier)) continue
// Put token on-hold (wait for it to be inserted to DB)
self.onHold.add(identifier)
@@ -80,15 +75,15 @@ self.list = async (req, res, next) => {
const all = req.headers.all === '1'
const sidebar = req.headers.sidebar
const ismoderator = perms.is(user, 'moderator')
- if (all && !ismoderator)
- return res.status(403).end()
+ if (all && !ismoderator) return res.status(403).end()
const filter = function () {
- if (!all)
+ if (!all) {
this.where({
enabled: 1,
userid: user.id
})
+ }
}
try {
@@ -97,8 +92,7 @@ self.list = async (req, res, next) => {
.where(filter)
.count('id as count')
.then(rows => rows[0].count)
- if (!count)
- return res.json({ success: true, albums: [], count })
+ if (!count) return res.json({ success: true, albums: [], count })
const fields = ['id', 'name']
@@ -116,8 +110,7 @@ self.list = async (req, res, next) => {
else if (offset < 0) offset = Math.max(0, Math.ceil(count / 25) + offset)
fields.push('identifier', 'enabled', 'timestamp', 'editedAt', 'download', 'public', 'description')
- if (all)
- fields.push('userid')
+ if (all) fields.push('userid')
albums = await db.table('albums')
.where(filter)
@@ -140,13 +133,14 @@ self.list = async (req, res, next) => {
.whereIn('albumid', Object.keys(albumids))
.select('albumid')
- for (const upload of uploads)
- if (albumids[upload.albumid])
+ for (const upload of uploads) {
+ if (albumids[upload.albumid]) {
albumids[upload.albumid].uploads++
+ }
+ }
// If we are not listing all albums, send response
- if (!all)
- return res.json({ success: true, albums, count, homeDomain })
+ if (!all) return res.json({ success: true, albums, count, homeDomain })
// Otherwise proceed to querying usernames
const userids = albums
@@ -156,8 +150,7 @@ self.list = async (req, res, next) => {
})
// If there are no albums attached to a registered user, send response
- if (userids.length === 0)
- return res.json({ success: true, albums, count, homeDomain })
+ if (!userids.length) return res.json({ success: true, albums, count, homeDomain })
// Query usernames of user IDs from currently selected files
const usersTable = await db.table('users')
@@ -165,8 +158,9 @@ self.list = async (req, res, next) => {
.select('id', 'username')
const users = {}
- for (const user of usersTable)
+ for (const user of usersTable) {
users[user.id] = user.username
+ }
return res.json({ success: true, albums, count, users, homeDomain })
} catch (error) {
@@ -183,8 +177,7 @@ self.create = async (req, res, next) => {
? utils.escape(req.body.name.trim().substring(0, self.titleMaxLength))
: ''
- if (!name)
- return res.json({ success: false, description: 'No album name specified.' })
+ if (!name) return res.json({ success: false, description: 'No album name specified.' })
try {
const album = await db.table('albums')
@@ -195,8 +188,7 @@ self.create = async (req, res, next) => {
})
.first()
- if (album)
- return res.json({ success: false, description: 'There is already an album with that name.' })
+ if (album) return res.json({ success: false, description: 'There is already an album with that name.' })
const identifier = await self.getUniqueRandomName()
@@ -235,8 +227,7 @@ self.disable = async (req, res, next) => {
const id = req.body.id
const purge = req.body.purge
- if (!Number.isFinite(id))
- return res.json({ success: false, description: 'No album specified.' })
+ if (!Number.isFinite(id)) return res.json({ success: false, description: 'No album specified.' })
try {
if (purge) {
@@ -249,8 +240,7 @@ self.disable = async (req, res, next) => {
if (files.length) {
const ids = files.map(file => file.id)
const failed = await utils.bulkDeleteFromDb('id', ids, user)
- if (failed.length)
- return res.json({ success: false, failed })
+ if (failed.length) return res.json({ success: false, failed })
}
utils.invalidateStatsCache('uploads')
}
@@ -291,24 +281,23 @@ self.edit = async (req, res, next) => {
const ismoderator = perms.is(user, 'moderator')
const id = parseInt(req.body.id)
- if (isNaN(id))
- return res.json({ success: false, description: 'No album specified.' })
+ if (isNaN(id)) return res.json({ success: false, description: 'No album specified.' })
const name = typeof req.body.name === 'string'
? utils.escape(req.body.name.trim().substring(0, self.titleMaxLength))
: ''
- if (!name)
- return res.json({ success: false, description: 'No name specified.' })
+ if (!name) return res.json({ success: false, description: 'No name specified.' })
const filter = function () {
this.where('id', id)
- if (!ismoderator)
+ if (!ismoderator) {
this.andWhere({
enabled: 1,
userid: user.id
})
+ }
}
try {
@@ -316,13 +305,14 @@ self.edit = async (req, res, next) => {
.where(filter)
.first()
- if (!album)
+ if (!album) {
return res.json({ success: false, description: 'Could not get album with the specified ID.' })
- else if (album.id !== id)
+ } else if (album.id !== id) {
return res.json({ success: false, description: 'Name already in use.' })
- else if (req._old && (album.id === id))
+ } else if (req._old && (album.id === id)) {
// Old rename API
return res.json({ success: false, description: 'You did not specify a new name.' })
+ }
const update = {
name,
@@ -333,11 +323,13 @@ self.edit = async (req, res, next) => {
: ''
}
- if (ismoderator)
+ if (ismoderator) {
update.enabled = Boolean(req.body.enabled)
+ }
- if (req.body.requestLink)
+ if (req.body.requestLink) {
update.identifier = await self.getUniqueRandomName()
+ }
await db.table('albums')
.where(filter)
@@ -353,10 +345,9 @@ self.edit = async (req, res, next) => {
const oldZip = path.join(paths.zips, `${album.identifier}.zip`)
const newZip = path.join(paths.zips, `${update.identifier}.zip`)
await paths.rename(oldZip, newZip)
- } catch (err) {
+ } catch (error) {
// Re-throw error
- if (err.code !== 'ENOENT')
- throw err
+ if (error.code !== 'ENOENT') throw error
}
return res.json({
@@ -380,8 +371,9 @@ self.rename = async (req, res, next) => {
self.get = async (req, res, next) => {
const identifier = req.params.identifier
- if (identifier === undefined)
+ if (identifier === undefined) {
return res.status(401).json({ success: false, description: 'No identifier provided.' })
+ }
try {
const album = await db.table('albums')
@@ -391,16 +383,17 @@ self.get = async (req, res, next) => {
})
.first()
- if (!album)
+ if (!album) {
return res.json({
success: false,
description: 'Album not found.'
})
- else if (album.public === 0)
+ } else if (album.public === 0) {
return res.status(403).json({
success: false,
description: 'This album is not available for public.'
})
+ }
const title = album.name
const files = await db.table('files')
@@ -412,8 +405,9 @@ self.get = async (req, res, next) => {
file.file = `${config.domain}/${file.name}`
const extname = utils.extname(file.name)
- if (utils.mayGenerateThumb(extname))
+ if (utils.mayGenerateThumb(extname)) {
file.thumb = `${config.domain}/thumbs/${file.name.slice(0, -extname.length)}.png`
+ }
}
return res.json({
@@ -432,17 +426,19 @@ self.generateZip = async (req, res, next) => {
const versionString = parseInt(req.query.v)
const identifier = req.params.identifier
- if (identifier === undefined)
+ if (identifier === undefined) {
return res.status(401).json({
success: false,
description: 'No identifier provided.'
})
+ }
- if (!config.uploads.generateZips)
+ if (!config.uploads.generateZips) {
return res.status(401).json({
success: false,
description: 'Zip generation disabled.'
})
+ }
try {
const album = await db.table('albums')
@@ -452,32 +448,35 @@ self.generateZip = async (req, res, next) => {
})
.first()
- if (!album)
+ if (!album) {
return res.json({ success: false, description: 'Album not found.' })
- else if (album.download === 0)
+ } else if (album.download === 0) {
return res.json({ success: false, description: 'Download for this album is disabled.' })
+ }
- if ((isNaN(versionString) || versionString <= 0) && album.editedAt)
+ if ((isNaN(versionString) || versionString <= 0) && album.editedAt) {
return res.redirect(`${album.identifier}?v=${album.editedAt}`)
+ }
- if (album.zipGeneratedAt > album.editedAt)
+ if (album.zipGeneratedAt > album.editedAt) {
try {
const filePath = path.join(paths.zips, `${identifier}.zip`)
await paths.access(filePath)
return res.download(filePath, `${album.name}.zip`)
} catch (error) {
// Re-throw error
- if (error.code !== 'ENOENT')
- throw error
+ if (error.code !== 'ENOENT') throw error
}
+ }
if (self.zipEmitters.has(identifier)) {
logger.log(`Waiting previous zip task for album: ${identifier}.`)
return self.zipEmitters.get(identifier).once('done', (filePath, fileName, json) => {
- if (filePath && fileName)
+ if (filePath && fileName) {
res.download(filePath, fileName)
- else if (json)
+ } else if (json) {
res.json(json)
+ }
})
}
@@ -559,8 +558,9 @@ self.addFiles = async (req, res, next) => {
if (!user) return
const ids = req.body.ids
- if (!Array.isArray(ids) || !ids.length)
+ if (!Array.isArray(ids) || !ids.length) {
return res.json({ success: false, description: 'No files specified.' })
+ }
let albumid = parseInt(req.body.albumid)
if (isNaN(albumid) || albumid < 0) albumid = null
@@ -572,16 +572,18 @@ self.addFiles = async (req, res, next) => {
const album = await db.table('albums')
.where('id', albumid)
.where(function () {
- if (user.username !== 'root')
+ if (user.username !== 'root') {
this.where('userid', user.id)
+ }
})
.first()
- if (!album)
+ if (!album) {
return res.json({
success: false,
description: 'Album does not exist or it does not belong to the user.'
})
+ }
albumids.push(albumid)
}
@@ -597,8 +599,9 @@ self.addFiles = async (req, res, next) => {
.update('albumid', albumid)
files.forEach(file => {
- if (file.albumid && !albumids.includes(file.albumid))
+ if (file.albumid && !albumids.includes(file.albumid)) {
albumids.push(file.albumid)
+ }
})
await db.table('albums')
@@ -609,13 +612,14 @@ self.addFiles = async (req, res, next) => {
return res.json({ success: true, failed })
} catch (error) {
logger.error(error)
- if (failed.length === ids.length)
+ if (failed.length === ids.length) {
return res.json({
success: false,
description: `Could not ${albumid === null ? 'add' : 'remove'} any files ${albumid === null ? 'to' : 'from'} the album.`
})
- else
+ } else {
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
+ }
}
}
diff --git a/controllers/authController.js b/controllers/authController.js
index c2e449b..753457c 100644
--- a/controllers/authController.js
+++ b/controllers/authController.js
@@ -34,31 +34,30 @@ self.verify = async (req, res, next) => {
const username = typeof req.body.username === 'string'
? req.body.username.trim()
: ''
- if (!username)
- return res.json({ success: false, description: 'No username provided.' })
+ if (!username) return res.json({ success: false, description: 'No username provided.' })
const password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
- if (!password)
- return res.json({ success: false, description: 'No password provided.' })
+ if (!password) return res.json({ success: false, description: 'No password provided.' })
try {
const user = await db.table('users')
.where('username', username)
.first()
- if (!user)
- return res.json({ success: false, description: 'Username does not exist.' })
+ if (!user) return res.json({ success: false, description: 'Username does not exist.' })
- if (user.enabled === false || user.enabled === 0)
+ if (user.enabled === false || user.enabled === 0) {
return res.json({ success: false, description: 'This account has been disabled.' })
+ }
const result = await bcrypt.compare(password, user.password)
- if (result === false)
+ if (result === false) {
return res.json({ success: false, description: 'Wrong password.' })
- else
+ } else {
return res.json({ success: true, token: user.token })
+ }
} catch (error) {
logger.error(error)
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
@@ -66,34 +65,46 @@ self.verify = async (req, res, next) => {
}
self.register = async (req, res, next) => {
- if (config.enableUserAccounts === false)
+ if (config.enableUserAccounts === false) {
return res.json({ success: false, description: 'Registration is currently disabled.' })
+ }
const username = typeof req.body.username === 'string'
? req.body.username.trim()
: ''
- if (username.length < self.user.min || username.length > self.user.max)
- return res.json({ success: false, description: `Username must have ${self.user.min}-${self.user.max} characters.` })
+ if (username.length < self.user.min || username.length > self.user.max) {
+ return res.json({
+ success: false,
+ description: `Username must have ${self.user.min}-${self.user.max} characters.`
+ })
+ }
const password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
- if (password.length < self.pass.min || password.length > self.pass.max)
- return res.json({ success: false, description: `Password must have ${self.pass.min}-${self.pass.max} characters.` })
+ if (password.length < self.pass.min || password.length > self.pass.max) {
+ return res.json({
+ success: false,
+ description: `Password must have ${self.pass.min}-${self.pass.max} characters.`
+ })
+ }
try {
const user = await db.table('users')
.where('username', username)
.first()
- if (user)
- return res.json({ success: false, description: 'Username already exists.' })
+ if (user) return res.json({ success: false, description: 'Username already exists.' })
const hash = await bcrypt.hash(password, saltRounds)
const token = await tokens.generateUniqueToken()
- if (!token)
- return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
+ if (!token) {
+ return res.json({
+ success: false,
+ description: 'Sorry, we could not allocate a unique token. Try again?'
+ })
+ }
await db.table('users')
.insert({
@@ -121,8 +132,12 @@ self.changePassword = async (req, res, next) => {
const password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
- if (password.length < self.pass.min || password.length > self.pass.max)
- return res.json({ success: false, description: `Password must have ${self.pass.min}-${self.pass.max} characters.` })
+ if (password.length < self.pass.min || password.length > self.pass.max) {
+ return res.json({
+ success: false,
+ description: `Password must have ${self.pass.min}-${self.pass.max} characters.`
+ })
+ }
try {
const hash = await bcrypt.hash(password, saltRounds)
@@ -139,12 +154,13 @@ self.changePassword = async (req, res, next) => {
}
self.assertPermission = (user, target) => {
- if (!target)
+ if (!target) {
throw new Error('Could not get user with the specified ID.')
- else if (!perms.higher(user, target))
+ } else if (!perms.higher(user, target)) {
throw new Error('The user is in the same or higher group as you.')
- else if (target.username === 'root')
+ } else if (target.username === 'root') {
throw new Error('Root user may not be tampered with.')
+ }
}
self.createUser = async (req, res, next) => {
@@ -152,21 +168,28 @@ self.createUser = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
- if (!isadmin)
- return res.status(403).end()
+ if (!isadmin) return res.status(403).end()
const username = typeof req.body.username === 'string'
? req.body.username.trim()
: ''
- if (username.length < self.user.min || username.length > self.user.max)
- return res.json({ success: false, description: `Username must have ${self.user.min}-${self.user.max} characters.` })
+ if (username.length < self.user.min || username.length > self.user.max) {
+ return res.json({
+ success: false,
+ description: `Username must have ${self.user.min}-${self.user.max} characters.`
+ })
+ }
let password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
if (password.length) {
- if (password.length < self.pass.min || password.length > self.pass.max)
- return res.json({ success: false, description: `Password must have ${self.pass.min}-${self.pass.max} characters.` })
+ if (password.length < self.pass.min || password.length > self.pass.max) {
+ return res.json({
+ success: false,
+ description: `Password must have ${self.pass.min}-${self.pass.max} characters.`
+ })
+ }
} else {
password = randomstring.generate(self.pass.rand)
}
@@ -186,14 +209,17 @@ self.createUser = async (req, res, next) => {
.where('username', username)
.first()
- if (user)
- return res.json({ success: false, description: 'Username already exists.' })
+ if (user) return res.json({ success: false, description: 'Username already exists.' })
const hash = await bcrypt.hash(password, saltRounds)
const token = await tokens.generateUniqueToken()
- if (!token)
- return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
+ if (!token) {
+ return res.json({
+ success: false,
+ description: 'Sorry, we could not allocate a unique token. Try again?'
+ })
+ }
await db.table('users')
.insert({
@@ -219,12 +245,10 @@ self.editUser = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
- if (!isadmin)
- return res.status(403).end()
+ if (!isadmin) return res.status(403).end()
const id = parseInt(req.body.id)
- if (isNaN(id))
- return res.json({ success: false, description: 'No user specified.' })
+ if (isNaN(id)) return res.json({ success: false, description: 'No user specified.' })
try {
const target = await db.table('users')
@@ -236,17 +260,20 @@ self.editUser = async (req, res, next) => {
if (req.body.username !== undefined) {
update.username = String(req.body.username).trim()
- if (update.username.length < self.user.min || update.username.length > self.user.max)
+ if (update.username.length < self.user.min || update.username.length > self.user.max) {
throw new Error(`Username must have ${self.user.min}-${self.user.max} characters.`)
+ }
}
- if (req.body.enabled !== undefined)
+ if (req.body.enabled !== undefined) {
update.enabled = Boolean(req.body.enabled)
+ }
if (req.body.group !== undefined) {
update.permission = perms.permissions[req.body.group]
- if (typeof update.permission !== 'number' || update.permission < 0)
+ if (typeof update.permission !== 'number' || update.permission < 0) {
update.permission = target.permission
+ }
}
let password
@@ -282,13 +309,11 @@ self.deleteUser = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
- if (!isadmin)
- return res.status(403).end()
+ if (!isadmin) return res.status(403).end()
const id = parseInt(req.body.id)
const purge = req.body.purge
- if (isNaN(id))
- return res.json({ success: false, description: 'No user specified.' })
+ if (isNaN(id)) return res.json({ success: false, description: 'No user specified.' })
try {
const target = await db.table('users')
@@ -304,8 +329,7 @@ self.deleteUser = async (req, res, next) => {
const fileids = files.map(file => file.id)
if (purge) {
const failed = await utils.bulkDeleteFromDb('id', fileids, user)
- if (failed.length)
- return res.json({ success: false, failed })
+ if (failed.length) return res.json({ success: false, failed })
utils.invalidateStatsCache('uploads')
} else {
// Clear out userid attribute from the files
@@ -315,7 +339,8 @@ self.deleteUser = async (req, res, next) => {
}
}
- // TODO: Figure out obstacles of just deleting the albums
+ // TODO: Figure out why can't we just just delete the albums from DB
+ // DISCLAIMER: Upstream always had it coded this way for some reason
const albums = await db.table('albums')
.where('userid', id)
.where('enabled', 1)
@@ -333,8 +358,7 @@ self.deleteUser = async (req, res, next) => {
try {
await paths.unlink(path.join(paths.zips, `${album.identifier}.zip`))
} catch (error) {
- if (error.code !== 'ENOENT')
- throw error
+ if (error.code !== 'ENOENT') throw error
}
}))
}
@@ -362,15 +386,13 @@ self.listUsers = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
- if (!isadmin)
- return res.status(403).end()
+ if (!isadmin) return res.status(403).end()
try {
const count = await db.table('users')
.count('id as count')
.then(rows => rows[0].count)
- if (!count)
- return res.json({ success: true, users: [], count })
+ if (!count) return res.json({ success: true, users: [], count })
let offset = Number(req.params.page)
if (isNaN(offset)) offset = 0
diff --git a/controllers/multerStorageController.js b/controllers/multerStorageController.js
index ac5a930..c32fc10 100644
--- a/controllers/multerStorageController.js
+++ b/controllers/multerStorageController.js
@@ -36,8 +36,9 @@ DiskStorage.prototype._handleFile = function _handleFile (req, file, cb) {
file._chunksData.stream = fs.createWriteStream(finalPath, { flags: 'a' })
file._chunksData.stream.on('error', onerror)
}
- if (!file._chunksData.hasher)
+ if (!file._chunksData.hasher) {
file._chunksData.hasher = blake3.createHash()
+ }
outStream = file._chunksData.stream
hash = file._chunksData.hasher
diff --git a/controllers/pathsController.js b/controllers/pathsController.js
index 090dc6c..987c337 100644
--- a/controllers/pathsController.js
+++ b/controllers/pathsController.js
@@ -20,8 +20,9 @@ const fsFuncs = [
'writeFile'
]
-for (const fsFunc of fsFuncs)
+for (const fsFunc of fsFuncs) {
self[fsFunc] = promisify(fs[fsFunc])
+}
self.uploads = path.resolve(config.uploads.folder)
self.chunks = path.join(self.uploads, 'chunks')
@@ -51,7 +52,7 @@ const verify = [
self.init = async () => {
// Check & create directories
- for (const p of verify)
+ for (const p of verify) {
try {
await self.access(p)
} catch (err) {
@@ -59,10 +60,10 @@ self.init = async () => {
throw err
} else {
const mkdir = await self.mkdir(p)
- if (mkdir)
- logger.log(`Created directory: ${p}`)
+ if (mkdir) logger.log(`Created directory: ${p}`)
}
}
+ }
// Purge any leftover in chunks directory
const uuidDirs = await self.readdir(self.chunks)
@@ -74,8 +75,7 @@ self.init = async () => {
))
await self.rmdir(root)
}))
- if (uuidDirs.length)
- logger.log(`Purged ${uuidDirs.length} unfinished chunks`)
+ if (uuidDirs.length) logger.log(`Purged ${uuidDirs.length} unfinished chunks`)
}
module.exports = self
diff --git a/controllers/permissionController.js b/controllers/permissionController.js
index b20d42a..e6cd8e0 100644
--- a/controllers/permissionController.js
+++ b/controllers/permissionController.js
@@ -11,8 +11,7 @@ self.permissions = {
// returns true if user is in the group OR higher
self.is = (user, group) => {
// root bypass
- if (user.username === 'root')
- return true
+ if (user.username === 'root') return true
const permission = user.permission || 0
return permission >= self.permissions[group]
diff --git a/controllers/tokenController.js b/controllers/tokenController.js
index aaab402..6147e60 100644
--- a/controllers/tokenController.js
+++ b/controllers/tokenController.js
@@ -14,8 +14,7 @@ const self = {
self.generateUniqueToken = async () => {
for (let i = 0; i < self.tokenMaxTries; i++) {
const token = randomstring.generate(self.tokenLength)
- if (self.onHold.has(token))
- continue
+ if (self.onHold.has(token)) continue
// Put token on-hold (wait for it to be inserted to DB)
self.onHold.add(token)
@@ -40,8 +39,7 @@ self.verify = async (req, res, next) => {
? req.body.token.trim()
: ''
- if (!token)
- return res.json({ success: false, description: 'No token provided.' })
+ if (!token) return res.json({ success: false, description: 'No token provided.' })
try {
const user = await db.table('users')
@@ -49,8 +47,7 @@ self.verify = async (req, res, next) => {
.select('username', 'permission')
.first()
- if (!user)
- return res.json({ success: false, description: 'Invalid token.' })
+ if (!user) return res.json({ success: false, description: 'Invalid token.' })
const obj = {
success: true,
@@ -76,8 +73,12 @@ self.change = async (req, res, next) => {
if (!user) return
const newToken = await self.generateUniqueToken()
- if (!newToken)
- return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
+ if (!newToken) {
+ return res.json({
+ success: false,
+ description: 'Sorry, we could not allocate a unique token. Try again?'
+ })
+ }
try {
await db.table('users')
diff --git a/controllers/uploadController.js b/controllers/uploadController.js
index 33af0cb..9001461 100644
--- a/controllers/uploadController.js
+++ b/controllers/uploadController.js
@@ -54,10 +54,12 @@ class ChunksData {
}
onTimeout () {
- if (this.stream && !this.stream.writableEnded)
+ if (this.stream && !this.stream.writableEnded) {
this.stream.end()
- if (this.hasher)
+ }
+ if (this.hasher) {
this.hasher.dispose()
+ }
self.cleanUpChunks(this.uuid, true)
}
@@ -67,8 +69,9 @@ class ChunksData {
}
clearTimeout () {
- if (this._timeout)
+ if (this._timeout) {
clearTimeout(this._timeout)
+ }
}
}
@@ -79,8 +82,7 @@ const initChunks = async uuid => {
await paths.access(root)
} catch (err) {
// Re-throw error
- if (err && err.code !== 'ENOENT')
- throw err
+ if (err && err.code !== 'ENOENT') throw err
await paths.mkdir(root)
}
chunksData[uuid] = new ChunksData(uuid, root)
@@ -98,15 +100,16 @@ const executeMulter = multer({
// We don't use them for anything else.
fields: 6,
// Maximum number of file fields.
- // Chunked uploads still need to provide only 1 file field.
+ // Chunked uploads still need to provide ONLY 1 file field.
// Otherwise, only one of the files will end up being properly stored,
// and that will also be as a chunk.
files: maxFilesPerUpload
},
fileFilter (req, file, cb) {
file.extname = utils.extname(file.originalname)
- if (self.isExtensionFiltered(file.extname))
+ if (self.isExtensionFiltered(file.extname)) {
return cb(`${file.extname ? `${file.extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`)
+ }
// Re-map Dropzone keys so people can manually use the API without prepending 'dz'
for (const key in req.body) {
@@ -115,17 +118,18 @@ const executeMulter = multer({
delete req.body[key]
}
- if (req.body.chunkindex !== undefined && !chunkedUploads)
+ if (req.body.chunkindex !== undefined && !chunkedUploads) {
return cb('Chunked uploads are disabled at the moment.')
- else
+ } else {
return cb(null, true)
+ }
},
storage: multerStorage({
destination (req, file, cb) {
// Is file a chunk!?
file._isChunk = chunkedUploads && req.body.uuid !== undefined && req.body.chunkindex !== undefined
- if (file._isChunk)
+ if (file._isChunk) {
initChunks(req.body.uuid)
.then(chunksData => {
file._chunksData = chunksData
@@ -135,8 +139,9 @@ const executeMulter = multer({
logger.error(error)
return cb('Could not process the chunked upload. Try again?')
})
- else
+ } else {
return cb(null, paths.uploads)
+ }
},
filename (req, file, cb) {
@@ -154,32 +159,30 @@ const executeMulter = multer({
self.isExtensionFiltered = extname => {
// If empty extension needs to be filtered
- if (!extname && config.filterNoExtension)
- return true
+ if (!extname && config.filterNoExtension) return true
// If there are extensions that have to be filtered
if (extname && extensionsFilter) {
const match = config.extensionsFilter.some(extension => extname === extension.toLowerCase())
const whitelist = config.extensionsFilterMode === 'whitelist'
- if ((!whitelist && match) || (whitelist && !match))
- return true
+ if ((!whitelist && match) || (whitelist && !match)) return true
}
return false
}
self.parseFileIdentifierLength = fileLength => {
- if (!config.uploads.fileIdentifierLength)
- return fileIdentifierLengthFallback
+ if (!config.uploads.fileIdentifierLength) return fileIdentifierLengthFallback
const parsed = parseInt(fileLength)
if (isNaN(parsed) ||
!fileIdentifierLengthChangeable ||
parsed < config.uploads.fileIdentifierLength.min ||
- parsed > config.uploads.fileIdentifierLength.max)
+ parsed > config.uploads.fileIdentifierLength.max) {
return config.uploads.fileIdentifierLength.default || fileIdentifierLengthFallback
- else
+ } else {
return parsed
+ }
}
self.getUniqueRandomName = async (length, extension) => {
@@ -194,8 +197,7 @@ self.getUniqueRandomName = async (length, extension) => {
utils.idSet.add(identifier)
// logger.log(`Added ${identifier} to identifiers cache`)
} else if (config.uploads.queryDbForFileCollisions) {
- if (self.onHold.has(identifier))
- continue
+ if (self.onHold.has(identifier)) continue
// Put token on-hold (wait for it to be inserted to DB)
self.onHold.add(identifier)
@@ -216,8 +218,7 @@ self.getUniqueRandomName = async (length, extension) => {
continue
} catch (error) {
// Re-throw error
- if (error & error.code !== 'ENOENT')
- throw error
+ if (error & error.code !== 'ENOENT') throw error
}
}
return name
@@ -227,21 +228,24 @@ self.getUniqueRandomName = async (length, extension) => {
}
self.parseUploadAge = age => {
- if (age === undefined || age === null)
+ if (age === undefined || age === null) {
return config.uploads.temporaryUploadAges[0]
+ }
+
const parsed = parseFloat(age)
- if (config.uploads.temporaryUploadAges.includes(parsed))
+ if (config.uploads.temporaryUploadAges.includes(parsed)) {
return parsed
- else
+ } else {
return null
+ }
}
self.parseStripTags = stripTags => {
- if (!config.uploads.stripTags)
- return false
+ if (!config.uploads.stripTags) return false
- if (config.uploads.stripTags.force || stripTags === undefined)
+ if (config.uploads.stripTags.force || stripTags === undefined) {
return config.uploads.stripTags.default
+ }
return Boolean(parseInt(stripTags))
}
@@ -255,19 +259,20 @@ self.upload = async (req, res, next) => {
user = await db.table('users')
.where('token', req.headers.token)
.first()
- if (user && (user.enabled === false || user.enabled === 0))
+ if (user && (user.enabled === false || user.enabled === 0)) {
return res.json({ success: false, description: 'This account has been disabled.' })
+ }
}
let albumid = parseInt(req.headers.albumid || req.params.albumid)
- if (isNaN(albumid))
- albumid = null
+ if (isNaN(albumid)) albumid = null
let age = null
if (temporaryUploads) {
age = self.parseUploadAge(req.headers.age)
- if (!age && !config.uploads.temporaryUploadAges.includes(0))
+ if (!age && !config.uploads.temporaryUploadAges.includes(0)) {
return res.json({ success: false, description: 'Permanent uploads are not permitted.' })
+ }
}
try {
@@ -293,14 +298,16 @@ self.actuallyUploadFiles = async (req, res, user, albumid, age) => {
'LIMIT_FILE_SIZE',
'LIMIT_UNEXPECTED_FILE'
]
- if (suppress.includes(error.code))
+ if (suppress.includes(error.code)) {
throw error.toString()
- else
+ } else {
throw error
+ }
}
- if (!req.files || !req.files.length)
+ if (!req.files || !req.files.length) {
throw 'No files.'
+ }
// If chunked uploads is enabled and the uploaded file is a chunk, then just say that it was a success
const uuid = req.body.uuid
@@ -342,15 +349,18 @@ self.actuallyUploadFiles = async (req, res, user, albumid, age) => {
}
self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
- if (!config.uploads.urlMaxSize)
+ if (!config.uploads.urlMaxSize) {
throw 'Upload by URLs is disabled at the moment.'
+ }
const urls = req.body.urls
- if (!urls || !(urls instanceof Array))
+ if (!urls || !(urls instanceof Array)) {
throw 'Missing "urls" property (array).'
+ }
- if (urls.length > maxFilesPerUpload)
+ if (urls.length > maxFilesPerUpload) {
throw `Maximum ${maxFilesPerUpload} URLs at a time.`
+ }
const downloaded = []
const infoMap = []
@@ -361,7 +371,7 @@ self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
// Extensions filter
let filtered = false
- if (['blacklist', 'whitelist'].includes(config.uploads.urlExtensionsFilterMode))
+ if (['blacklist', 'whitelist'].includes(config.uploads.urlExtensionsFilterMode)) {
if (urlExtensionsFilter) {
const match = config.uploads.urlExtensionsFilter.some(extension => extname === extension.toLowerCase())
const whitelist = config.uploads.urlExtensionsFilterMode === 'whitelist'
@@ -369,16 +379,19 @@ self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
} else {
throw 'Invalid extensions filter, please contact the site owner.'
}
- else
+ } else {
filtered = self.isExtensionFiltered(extname)
+ }
- if (filtered)
+ if (filtered) {
throw `${extname ? `${extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`
+ }
- if (config.uploads.urlProxy)
+ if (config.uploads.urlProxy) {
url = config.uploads.urlProxy
.replace(/{url}/g, encodeURIComponent(url))
.replace(/{url-noprot}/g, encodeURIComponent(url.replace(/^https?:\/\//, '')))
+ }
const length = self.parseFileIdentifierLength(req.headers.filelength)
const name = await self.getUniqueRandomName(length, extname)
@@ -409,8 +422,9 @@ self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
}
}))
- if (fetchFile.status !== 200)
+ if (fetchFile.status !== 200) {
throw `${fetchFile.status} ${fetchFile.statusText}`
+ }
infoMap.push({
path: destination,
@@ -440,25 +454,28 @@ self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
} catch (error) {
// Unlink all downloaded files when at least one file threw an error from the for-loop
// Should continue even when encountering errors
- if (downloaded.length)
+ if (downloaded.length) {
await Promise.all(downloaded.map(file =>
utils.unlinkFile(file).catch(logger.error)
))
+ }
const errorString = error.toString()
const suppress = [
/ over limit:/
]
- if (!suppress.some(t => t.test(errorString)))
+ if (!suppress.some(t => t.test(errorString))) {
throw error
- else
+ } else {
throw errorString
+ }
}
}
self.finishChunks = async (req, res, next) => {
- if (!chunkedUploads)
+ if (!chunkedUploads) {
return res.json({ success: false, description: 'Chunked upload is disabled at the moment.' })
+ }
let user
if (config.private === true) {
@@ -468,8 +485,9 @@ self.finishChunks = async (req, res, next) => {
user = await db.table('users')
.where('token', req.headers.token)
.first()
- if (user && (user.enabled === false || user.enabled === 0))
+ if (user && (user.enabled === false || user.enabled === 0)) {
return res.json({ success: false, description: 'This account has been disabled.' })
+ }
}
try {
@@ -490,8 +508,9 @@ self.actuallyFinishChunks = async (req, res, user) => {
chunksData[file.uuid].chunks < 2
const files = req.body.files
- if (!Array.isArray(files) || !files.length || files.some(check))
+ if (!Array.isArray(files) || !files.length || files.some(check)) {
throw 'An unexpected error occurred.'
+ }
const infoMap = []
try {
@@ -499,30 +518,35 @@ self.actuallyFinishChunks = async (req, res, user) => {
// Close stream
chunksData[file.uuid].stream.end()
- if (chunksData[file.uuid].chunks > maxChunksCount)
+ if (chunksData[file.uuid].chunks > maxChunksCount) {
throw 'Too many chunks.'
+ }
file.extname = typeof file.original === 'string' ? utils.extname(file.original) : ''
- if (self.isExtensionFiltered(file.extname))
+ if (self.isExtensionFiltered(file.extname)) {
throw `${file.extname ? `${file.extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`
+ }
if (temporaryUploads) {
file.age = self.parseUploadAge(file.age)
- if (!file.age && !config.uploads.temporaryUploadAges.includes(0))
+ if (!file.age && !config.uploads.temporaryUploadAges.includes(0)) {
throw 'Permanent uploads are not permitted.'
+ }
}
file.size = chunksData[file.uuid].stream.bytesWritten
- if (config.filterEmptyFile && file.size === 0)
+ if (config.filterEmptyFile && file.size === 0) {
throw 'Empty files are not allowed.'
- else if (file.size > maxSizeBytes)
+ } else if (file.size > maxSizeBytes) {
throw `File too large. Chunks are bigger than ${maxSize} MB.`
+ }
// Double-check file size
const tmpfile = path.join(chunksData[file.uuid].root, chunksData[file.uuid].filename)
const lstat = await paths.lstat(tmpfile)
- if (lstat.size !== file.size)
+ if (lstat.size !== file.size) {
throw `File size mismatched (${lstat.size} vs. ${file.size}).`
+ }
// Generate name
const length = self.parseFileIdentifierLength(file.filelength)
@@ -537,8 +561,7 @@ self.actuallyFinishChunks = async (req, res, user) => {
await self.cleanUpChunks(file.uuid).catch(logger.error)
let albumid = parseInt(file.albumid)
- if (isNaN(albumid))
- albumid = null
+ if (isNaN(albumid)) albumid = null
const data = {
filename: name,
@@ -566,16 +589,15 @@ self.actuallyFinishChunks = async (req, res, user) => {
} catch (error) {
// Dispose unfinished hasher and clean up leftover chunks
// Should continue even when encountering errors
- await Promise.all(files.map(file => {
- // eslint-disable-next-line curly
- if (chunksData[file.uuid] !== undefined) {
- try {
- if (chunksData[file.uuid].hasher)
- chunksData[file.uuid].hasher.dispose()
- } catch (error) {}
- self.cleanUpChunks(file.uuid).catch(logger.error)
- }
- }))
+ files.forEach(file => {
+ if (chunksData[file.uuid] === undefined) return
+ try {
+ if (chunksData[file.uuid].hasher) {
+ chunksData[file.uuid].hasher.dispose()
+ }
+ } catch (error) {}
+ self.cleanUpChunks(file.uuid).catch(logger.error)
+ })
// Re-throw error
throw error
@@ -586,18 +608,18 @@ self.cleanUpChunks = async (uuid, onTimeout) => {
// Remove tmp file
await paths.unlink(path.join(chunksData[uuid].root, chunksData[uuid].filename))
.catch(error => {
- if (error.code !== 'ENOENT')
- logger.error(error)
+ if (error.code !== 'ENOENT') logger.error(error)
})
+
// Remove UUID dir
await paths.rmdir(chunksData[uuid].root)
+
// Delete cached chunks data
if (!onTimeout) chunksData[uuid].clearTimeout()
delete chunksData[uuid]
}
self.scanFiles = async (req, user, infoMap) => {
- // eslint-disable-next-line curly
if (user && utils.clamd.groupBypass && perms.is(user, utils.clamd.groupBypass)) {
// logger.log(`[ClamAV]: Skipping ${infoMap.length} file(s), ${utils.clamd.groupBypass} group bypass`)
return false
@@ -605,11 +627,13 @@ self.scanFiles = async (req, user, infoMap) => {
const foundThreats = []
const results = await Promise.all(infoMap.map(async info => {
- if (utils.clamd.whitelistExtensions && utils.clamd.whitelistExtensions.includes(info.data.extname))
+ if (utils.clamd.whitelistExtensions && utils.clamd.whitelistExtensions.includes(info.data.extname)) {
return // logger.log(`[ClamAV]: Skipping ${info.data.filename}, extension whitelisted`)
+ }
- if (utils.clamd.maxSize && info.data.size > utils.clamd.maxSize)
+ if (utils.clamd.maxSize && info.data.size > utils.clamd.maxSize) {
return // logger.log(`[ClamAV]: Skipping ${info.data.filename}, size ${info.data.size} > ${utils.clamd.maxSize}`)
+ }
const reply = await utils.clamd.scanner.scanFile(info.path, utils.clamd.timeout, utils.clamd.chunkSize)
if (!reply.includes('OK') || reply.includes('FOUND')) {
@@ -619,26 +643,27 @@ self.scanFiles = async (req, user, infoMap) => {
foundThreats.push(foundThreat)
}
})).then(() => {
- if (foundThreats.length)
+ if (foundThreats.length) {
return `Threat found: ${foundThreats[0]}${foundThreats.length > 1 ? ', and more' : ''}.`
+ }
}).catch(error => {
logger.error(`[ClamAV]: ${error.toString()}`)
return 'An unexpected error occurred with ClamAV, please contact the site owner.'
})
- if (results)
+ if (results) {
// Unlink all files when at least one threat is found OR any errors occurred
// Should continue even when encountering errors
await Promise.all(infoMap.map(info =>
utils.unlinkFile(info.data.filename).catch(logger.error)
))
+ }
return results
}
self.stripTags = async (req, infoMap) => {
- if (!self.parseStripTags(req.headers.striptags))
- return
+ if (!self.parseStripTags(req.headers.striptags)) return
try {
await Promise.all(infoMap.map(info =>
@@ -665,10 +690,11 @@ self.storeFilesToDb = async (req, res, user, infoMap) => {
// Check if the file exists by checking its hash and size
const dbFile = await db.table('files')
.where(function () {
- if (user === undefined)
+ if (user === undefined) {
this.whereNull('userid')
- else
+ } else {
this.where('userid', user.id)
+ }
})
.where({
hash: info.data.hash,
@@ -684,8 +710,9 @@ self.storeFilesToDb = async (req, res, user, infoMap) => {
// logger.log(`Unlinked ${info.data.filename} since a duplicate named ${dbFile.name} exists`)
// If on /nojs route, append original file name reported by client
- if (req.path === '/nojs')
+ if (req.path === '/nojs') {
dbFile.original = info.data.originalname
+ }
exists.push(dbFile)
return
@@ -706,18 +733,21 @@ self.storeFilesToDb = async (req, res, user, infoMap) => {
if (user) {
data.userid = user.id
data.albumid = info.data.albumid
- if (data.albumid !== null && !albumids.includes(data.albumid))
+ if (data.albumid !== null && !albumids.includes(data.albumid)) {
albumids.push(data.albumid)
+ }
}
- if (info.data.age)
+ if (info.data.age) {
data.expirydate = data.timestamp + (info.data.age * 3600) // Hours to seconds
+ }
files.push(data)
// Generate thumbs, but do not wait
- if (utils.mayGenerateThumb(info.data.extname))
+ if (utils.mayGenerateThumb(info.data.extname)) {
utils.generateThumbs(info.data.filename, info.data.extname, true).catch(logger.error)
+ }
}))
if (files.length) {
@@ -730,21 +760,24 @@ self.storeFilesToDb = async (req, res, user, infoMap) => {
.then(rows => rows.map(row => row.id))
// Remove albumid if user do not own the album
- for (const file of files)
- if (file.albumid !== null && !authorizedIds.includes(file.albumid))
+ for (const file of files) {
+ if (file.albumid !== null && !authorizedIds.includes(file.albumid)) {
file.albumid = null
+ }
+ }
}
// Insert new files to DB
await db.table('files').insert(files)
utils.invalidateStatsCache('uploads')
- if (config.uploads.queryDbForFileCollisions)
+ if (config.uploads.queryDbForFileCollisions) {
for (const file of files) {
const extname = utils.extname(file.name)
const identifier = file.name.slice(0, -(extname.length))
self.onHold.delete(identifier)
}
+ }
// Update albums' timestamp
if (authorizedIds.length) {
@@ -769,12 +802,14 @@ self.sendUploadResponse = async (req, res, user, result) => {
}
// If a temporary upload, add expiry date
- if (file.expirydate)
+ if (file.expirydate) {
map.expirydate = file.expirydate
+ }
// If on /nojs route, add original name
- if (req.path === '/nojs')
+ if (req.path === '/nojs') {
map.original = file.original
+ }
// If uploaded by user, add delete URL (intended for ShareX and its derivatives)
// Homepage uploader will not use this (use dashboard instead)
@@ -821,8 +856,9 @@ self.bulkDelete = async (req, res) => {
const field = req.body.field || 'id'
const values = req.body.values
- if (!Array.isArray(values) || !values.length)
+ if (!Array.isArray(values) || !values.length) {
return res.json({ success: false, description: 'No array of files specified.' })
+ }
try {
const failed = await utils.bulkDeleteFromDb(field, values, user)
@@ -841,8 +877,7 @@ self.list = async (req, res) => {
const filters = req.headers.filters
const minoffset = Number(req.headers.minoffset) || 0
const ismoderator = perms.is(user, 'moderator')
- if (all && !ismoderator)
- return res.status(403).end()
+ if (all && !ismoderator) return res.status(403).end()
const basedomain = config.domain
@@ -894,7 +929,7 @@ self.list = async (req, res) => {
// Look for any glob operators
const match = pattern.match(/(? {
.replace(/(? {
}
// For some reason, single value won't be in Array even with 'alwaysArray' option
- if (typeof filterObj.queries.exclude.text === 'string')
+ if (typeof filterObj.queries.exclude.text === 'string') {
filterObj.queries.exclude.text = [filterObj.queries.exclude.text]
+ }
// Text (non-keyed keywords) queries
let textQueries = 0
@@ -951,33 +986,38 @@ self.list = async (req, res) => {
if (filterObj.queries.exclude.text) textQueries += filterObj.queries.exclude.text.length
// Regular user threshold check
- if (!ismoderator && textQueries > MAX_TEXT_QUERIES)
+ if (!ismoderator && textQueries > MAX_TEXT_QUERIES) {
return res.json({
success: false,
description: `Users are only allowed to use ${MAX_TEXT_QUERIES} non-keyed keyword${MAX_TEXT_QUERIES === 1 ? '' : 's'} at a time.`
})
+ }
- if (filterObj.queries.text)
+ if (filterObj.queries.text) {
for (let i = 0; i < filterObj.queries.text.length; i++) {
const result = sqlLikeParser(filterObj.queries.text[i])
- if (!ismoderator && result.count > MAX_WILDCARDS_IN_KEY)
+ if (!ismoderator && result.count > MAX_WILDCARDS_IN_KEY) {
return res.json({
success: false,
description: `Users are only allowed to use ${MAX_WILDCARDS_IN_KEY} wildcard${MAX_WILDCARDS_IN_KEY === 1 ? '' : 's'} per key.`
})
+ }
filterObj.queries.text[i] = result.escaped
}
+ }
- if (filterObj.queries.exclude.text)
+ if (filterObj.queries.exclude.text) {
for (let i = 0; i < filterObj.queries.exclude.text.length; i++) {
const result = sqlLikeParser(filterObj.queries.exclude.text[i])
- if (!ismoderator && result.count > MAX_WILDCARDS_IN_KEY)
+ if (!ismoderator && result.count > MAX_WILDCARDS_IN_KEY) {
return res.json({
success: false,
description: `Users are only allowed to use ${MAX_WILDCARDS_IN_KEY} wildcard${MAX_WILDCARDS_IN_KEY === 1 ? '' : 's'} per key.`
})
+ }
filterObj.queries.exclude.text[i] = result.escaped
}
+ }
for (const key of keywords) {
let queryIndex = -1
@@ -999,18 +1039,22 @@ self.list = async (req, res) => {
if (inQuery || inExclude) {
// Prioritize exclude keys when both types found
filterObj.flags[`${key}Null`] = inExclude ? false : inQuery
- if (inQuery)
- if (filterObj.queries[key].length === 1)
+ if (inQuery) {
+ if (filterObj.queries[key].length === 1) {
// Delete key to avoid unexpected behavior
delete filterObj.queries[key]
- else
+ } else {
filterObj.queries[key].splice(queryIndex, 1)
- if (inExclude)
- if (filterObj.queries.exclude[key].length === 1)
+ }
+ }
+ if (inExclude) {
+ if (filterObj.queries.exclude[key].length === 1) {
// Delete key to avoid unexpected behavior
delete filterObj.queries.exclude[key]
- else
+ } else {
filterObj.queries.exclude[key].splice(excludeIndex, 1)
+ }
+ }
}
}
@@ -1021,23 +1065,27 @@ self.list = async (req, res) => {
if (match) {
let offset = 0
- if (minoffset !== undefined)
+ if (minoffset !== undefined) {
offset = 60000 * (utils.timezoneOffset - minoffset)
+ }
const dateObj = new Date(Date.now() + offset)
- if (match[1] !== undefined)
+ if (match[1] !== undefined) {
dateObj.setFullYear(Number(match[1]), // full year
match[2] !== undefined ? (Number(match[2].slice(1)) - 1) : 0, // month, zero-based
match[3] !== undefined ? Number(match[3].slice(1)) : 1) // date
+ }
- if (match[4] !== undefined)
+ if (match[4] !== undefined) {
dateObj.setHours(Number(match[4]), // hours
match[5] !== undefined ? Number(match[5].slice(1)) : 0, // minutes
match[6] !== undefined ? Number(match[6].slice(1)) : 0) // seconds
+ }
- if (resetMs)
+ if (resetMs) {
dateObj.setMilliseconds(0)
+ }
// Calculate timezone differences
return new Date(dateObj.getTime() - offset)
@@ -1047,7 +1095,7 @@ self.list = async (req, res) => {
}
// Parse dates to timestamps
- for (const range of ranges)
+ for (const range of ranges) {
if (filterObj.queries[range]) {
if (filterObj.queries[range].from) {
const parsed = parseDate(filterObj.queries[range].from, minoffset, true)
@@ -1058,14 +1106,17 @@ self.list = async (req, res) => {
filterObj.queries[range].to = parsed ? Math.ceil(parsed / 1000) : null
}
}
+ }
// Query users table for user IDs
if (filterObj.queries.user || filterObj.queries.exclude.user) {
const usernames = []
- if (filterObj.queries.user)
+ if (filterObj.queries.user) {
usernames.push(...filterObj.queries.user)
- if (filterObj.queries.exclude.user)
+ }
+ if (filterObj.queries.exclude.user) {
usernames.push(...filterObj.queries.exclude.user)
+ }
const uploaders = await db.table('users')
.whereIn('username', usernames)
@@ -1076,18 +1127,21 @@ self.list = async (req, res) => {
const notFound = usernames.filter(username => {
return !uploaders.find(uploader => uploader.username === username)
})
- if (notFound)
+ if (notFound) {
return res.json({
success: false,
description: `User${notFound.length === 1 ? '' : 's'} not found: ${notFound.join(', ')}.`
})
+ }
}
- for (const uploader of uploaders)
- if (filterObj.queries.user && filterObj.queries.user.includes(uploader.username))
+ for (const uploader of uploaders) {
+ if (filterObj.queries.user && filterObj.queries.user.includes(uploader.username)) {
filterObj.uploaders.push(uploader)
- else
+ } else {
filterObj.excludeUploaders.push(uploader)
+ }
+ }
// Delete keys to avoid unexpected behavior
delete filterObj.queries.user
@@ -1106,20 +1160,22 @@ self.list = async (req, res) => {
]
// Only allow sorting by 'albumid' when not listing album's uploads
- if (req.params.id === undefined)
- allowed.push('albumid')
+ if (req.params.id === undefined) allowed.push('albumid')
// Only allow sorting by 'ip' and 'userid' columns when listing all uploads
- if (all)
- allowed.push('ip', 'userid')
+ if (all) allowed.push('ip', 'userid')
for (const obQuery of filterObj.queries.sort) {
const tmp = obQuery.toLowerCase().split(':')
const column = sortObj.maps[tmp[0]] || tmp[0]
- if (!allowed.includes(column))
+ if (!allowed.includes(column)) {
// Alert users if using disallowed/missing columns
- return res.json({ success: false, description: `Column \`${column}\` cannot be used for sorting.\n\nTry the following instead:\n${allowed.join(', ')}` })
+ return res.json({
+ success: false,
+ description: `Column \`${column}\` cannot be used for sorting.\n\nTry the following instead:\n${allowed.join(', ')}`
+ })
+ }
sortObj.parsed.push({
column,
@@ -1130,11 +1186,12 @@ self.list = async (req, res) => {
}
// Regular user threshold check
- if (!ismoderator && sortObj.parsed.length > MAX_SORT_KEYS)
+ if (!ismoderator && sortObj.parsed.length > MAX_SORT_KEYS) {
return res.json({
success: false,
description: `Users are only allowed to use ${MAX_SORT_KEYS} sort key${MAX_SORT_KEYS === 1 ? '' : 's'} at a time.`
})
+ }
// Delete key to avoid unexpected behavior
delete filterObj.queries.sort
@@ -1151,11 +1208,12 @@ self.list = async (req, res) => {
// Prioritize exclude keys when both types found
if (inQuery || inExclude) {
filterObj.flags[`is${type}`] = inExclude ? false : inQuery
- if (isLast !== undefined && isLast !== filterObj.flags[`is${type}`])
+ if (isLast !== undefined && isLast !== filterObj.flags[`is${type}`]) {
return res.json({
success: false,
description: 'Cannot mix inclusion and exclusion type-is keys.'
})
+ }
isKeys++
isLast = filterObj.flags[`is${type}`]
}
@@ -1167,95 +1225,108 @@ self.list = async (req, res) => {
}
// Regular user threshold check
- if (!ismoderator && isKeys > MAX_IS_KEYS)
+ if (!ismoderator && isKeys > MAX_IS_KEYS) {
return res.json({
success: false,
description: `Users are only allowed to use ${MAX_IS_KEYS} type-is key${MAX_IS_KEYS === 1 ? '' : 's'} at a time.`
})
+ }
}
function filter () {
// If listing all uploads
- if (all)
+ if (all) {
this.where(function () {
// Filter uploads matching any of the supplied 'user' keys and/or NULL flag
// Prioritze exclude keys when both types found
this.orWhere(function () {
- if (filterObj.excludeUploaders.length)
+ if (filterObj.excludeUploaders.length) {
this.whereNotIn('userid', filterObj.excludeUploaders.map(v => v.id))
- else if (filterObj.uploaders.length)
+ } else if (filterObj.uploaders.length) {
this.orWhereIn('userid', filterObj.uploaders.map(v => v.id))
+ }
// Such overbearing logic for NULL values, smh...
if ((filterObj.excludeUploaders.length && filterObj.flags.userNull !== false) ||
(filterObj.uploaders.length && filterObj.flags.userNull) ||
- (!filterObj.excludeUploaders.length && !filterObj.uploaders.length && filterObj.flags.userNull))
+ (!filterObj.excludeUploaders.length && !filterObj.uploaders.length && filterObj.flags.userNull)) {
this.orWhereNull('userid')
- else if (filterObj.flags.userNull === false)
+ } else if (filterObj.flags.userNull === false) {
this.whereNotNull('userid')
+ }
})
// Filter uploads matching any of the supplied 'ip' keys and/or NULL flag
// Same prioritization logic as above
this.orWhere(function () {
- if (filterObj.queries.exclude.ip)
+ if (filterObj.queries.exclude.ip) {
this.whereNotIn('ip', filterObj.queries.exclude.ip)
- else if (filterObj.queries.ip)
+ } else if (filterObj.queries.ip) {
this.orWhereIn('ip', filterObj.queries.ip)
+ }
// ...
if ((filterObj.queries.exclude.ip && filterObj.flags.ipNull !== false) ||
(filterObj.queries.ip && filterObj.flags.ipNull) ||
- (!filterObj.queries.exclude.ip && !filterObj.queries.ip && filterObj.flags.ipNull))
+ (!filterObj.queries.exclude.ip && !filterObj.queries.ip && filterObj.flags.ipNull)) {
this.orWhereNull('ip')
- else if (filterObj.flags.ipNull === false)
+ } else if (filterObj.flags.ipNull === false) {
this.whereNotNull('ip')
+ }
})
})
- else
+ } else {
// If not listing all uploads, list user's uploads
this.where('userid', user.id)
+ }
// Then, refine using any of the supplied 'albumid' keys and/or NULL flag
// Same prioritization logic as 'userid' and 'ip' above
- if (req.params.id === undefined)
+ if (req.params.id === undefined) {
this.andWhere(function () {
- if (filterObj.queries.exclude.albumid)
+ if (filterObj.queries.exclude.albumid) {
this.whereNotIn('albumid', filterObj.queries.exclude.albumid)
- else if (filterObj.queries.albumid)
+ } else if (filterObj.queries.albumid) {
this.orWhereIn('albumid', filterObj.queries.albumid)
+ }
// ...
if ((filterObj.queries.exclude.albumid && filterObj.flags.albumidNull !== false) ||
- (filterObj.queries.albumid && filterObj.flags.albumidNull) ||
- (!filterObj.queries.exclude.albumid && !filterObj.queries.albumid && filterObj.flags.albumidNull))
+ (filterObj.queries.albumid && filterObj.flags.albumidNull) ||
+ (!filterObj.queries.exclude.albumid && !filterObj.queries.albumid && filterObj.flags.albumidNull)) {
this.orWhereNull('albumid')
- else if (filterObj.flags.albumidNull === false)
+ } else if (filterObj.flags.albumidNull === false) {
this.whereNotNull('albumid')
+ }
})
- else if (!all)
+ } else if (!all) {
// If not listing all uploads, list uploads from user's album
this.andWhere('albumid', req.params.id)
+ }
// Then, refine using the supplied 'date' ranges
this.andWhere(function () {
if (!filterObj.queries.date || (!filterObj.queries.date.from && !filterObj.queries.date.to)) return
- if (typeof filterObj.queries.date.from === 'number')
- if (typeof filterObj.queries.date.to === 'number')
+ if (typeof filterObj.queries.date.from === 'number') {
+ if (typeof filterObj.queries.date.to === 'number') {
this.andWhereBetween('timestamp', [filterObj.queries.date.from, filterObj.queries.date.to])
- else
+ } else {
this.andWhere('timestamp', '>=', filterObj.queries.date.from)
- else
+ }
+ } else {
this.andWhere('timestamp', '<=', filterObj.queries.date.to)
+ }
})
// Then, refine using the supplied 'expiry' ranges
this.andWhere(function () {
if (!filterObj.queries.expiry || (!filterObj.queries.expiry.from && !filterObj.queries.expiry.to)) return
- if (typeof filterObj.queries.expiry.from === 'number')
- if (typeof filterObj.queries.expiry.to === 'number')
+ if (typeof filterObj.queries.expiry.from === 'number') {
+ if (typeof filterObj.queries.expiry.to === 'number') {
this.andWhereBetween('expirydate', [filterObj.queries.expiry.from, filterObj.queries.expiry.to])
- else
+ } else {
this.andWhere('expirydate', '>=', filterObj.queries.expiry.from)
- else
+ }
+ } else {
this.andWhere('expirydate', '<=', filterObj.queries.expiry.to)
+ }
})
// Then, refine using type-is flags
@@ -1271,9 +1342,11 @@ self.list = async (req, res) => {
operator = 'not like'
}
- if (func)
- for (const pattern of utils[`${type}Exts`].map(ext => `%${ext}`))
+ if (func) {
+ for (const pattern of utils[`${type}Exts`].map(ext => `%${ext}`)) {
this[func]('name', operator, pattern)
+ }
+ }
}
})
@@ -1302,36 +1375,36 @@ self.list = async (req, res) => {
.where(filter)
.count('id as count')
.then(rows => rows[0].count)
- if (!count)
- return res.json({ success: true, files: [], count })
+ if (!count) return res.json({ success: true, files: [], count })
let offset = Number(req.params.page)
if (isNaN(offset)) offset = 0
else if (offset < 0) offset = Math.max(0, Math.ceil(count / 25) + offset)
const columns = ['id', 'name', 'original', 'userid', 'size', 'timestamp']
- if (temporaryUploads)
- columns.push('expirydate')
- if (!all || filterObj.queries.albumid || filterObj.queries.exclude.albumid ||
- filterObj.flags.albumidNull !== undefined)
- columns.push('albumid')
+ if (temporaryUploads) columns.push('expirydate')
+ if (!all ||
+ filterObj.queries.albumid ||
+ filterObj.queries.exclude.albumid ||
+ filterObj.flags.albumidNull !== undefined) columns.push('albumid')
// Only select IPs if we are listing all uploads
- if (all)
- columns.push('ip')
+ if (all) columns.push('ip')
// Build raw query for order by (sorting) operation
let orderByRaw
- if (sortObj.parsed.length)
+ if (sortObj.parsed.length) {
orderByRaw = sortObj.parsed.map(sort => {
// Use Knex.raw() to sanitize user inputs
- if (sort.cast)
+ if (sort.cast) {
return db.raw(`cast (?? as ${sort.cast}) ${sort.order} ${sort.clause}`.trim(), sort.column)
- else
+ } else {
return db.raw(`?? ${sort.order} ${sort.clause}`.trim(), sort.column)
+ }
}).join(', ')
- else
+ } else {
orderByRaw = '`id` desc'
+ }
const files = await db.table('files')
.where(filter)
@@ -1340,13 +1413,13 @@ self.list = async (req, res) => {
.offset(25 * offset)
.select(columns)
- if (!files.length)
- return res.json({ success: true, files, count, basedomain })
+ if (!files.length) return res.json({ success: true, files, count, basedomain })
for (const file of files) {
file.extname = utils.extname(file.name)
- if (utils.mayGenerateThumb(file.extname))
+ if (utils.mayGenerateThumb(file.extname)) {
file.thumb = `thumbs/${file.name.slice(0, -file.extname.length)}.png`
+ }
}
// If we queried albumid, query album names
@@ -1364,15 +1437,15 @@ self.list = async (req, res) => {
.then(rows => {
// Build Object indexed by their IDs
const obj = {}
- for (const row of rows)
+ for (const row of rows) {
obj[row.id] = row.name
+ }
return obj
})
}
// If we are not listing all uploads, send response
- if (!all)
- return res.json({ success: true, files, count, albums, basedomain })
+ if (!all) return res.json({ success: true, files, count, albums, basedomain })
// Otherwise proceed to querying usernames
let usersTable = filterObj.uploaders
@@ -1384,8 +1457,7 @@ self.list = async (req, res) => {
})
// If there are no uploads attached to a registered user, send response
- if (userids.length === 0)
- return res.json({ success: true, files, count, albums, basedomain })
+ if (!userids.length) return res.json({ success: true, files, count, albums, basedomain })
// Query usernames of user IDs from currently selected files
usersTable = await db.table('users')
@@ -1394,8 +1466,9 @@ self.list = async (req, res) => {
}
const users = {}
- for (const user of usersTable)
+ for (const user of usersTable) {
users[user.id] = user.username
+ }
return res.json({ success: true, files, count, users, albums, basedomain })
} catch (error) {
diff --git a/controllers/utilsController.js b/controllers/utilsController.js
index 86ac324..f7bc141 100644
--- a/controllers/utilsController.js
+++ b/controllers/utilsController.js
@@ -18,7 +18,9 @@ const self = {
chunkSize: config.uploads.scan.chunkSize || 64 * 1024,
groupBypass: config.uploads.scan.groupBypass || null,
whitelistExtensions: (Array.isArray(config.uploads.scan.whitelistExtensions) &&
- config.uploads.scan.whitelistExtensions.length) ? config.uploads.scan.whitelistExtensions : null,
+ config.uploads.scan.whitelistExtensions.length)
+ ? config.uploads.scan.whitelistExtensions
+ : null,
maxSize: (parseInt(config.uploads.scan.maxSize) * 1e6) || null
},
gitHash: null,
@@ -92,14 +94,16 @@ self.extname = filename => {
}
// check against extensions that must be preserved
- for (const extPreserve of extPreserves)
+ for (const extPreserve of extPreserves) {
if (lower.endsWith(extPreserve)) {
extname = extPreserve
break
}
+ }
- if (!extname)
+ if (!extname) {
extname = lower.slice(lower.lastIndexOf('.') - lower.length) // path.extname(lower)
+ }
return extname + multi
}
@@ -110,14 +114,12 @@ self.escape = string => {
// Copyright(c) 2015 Andreas Lubbe
// Copyright(c) 2015 Tiancheng "Timothy" Gu
- if (!string)
- return string
+ if (!string) return string
const str = String(string)
const match = /["'&<>]/.exec(str)
- if (!match)
- return str
+ if (!match) return str
let escape
let html = ''
@@ -145,8 +147,9 @@ self.escape = string => {
continue
}
- if (lastIndex !== index)
+ if (lastIndex !== index) {
html += str.substring(lastIndex, index)
+ }
lastIndex = index + 1
html += escape
@@ -203,16 +206,16 @@ self.generateThumbs = async (name, extname, force) => {
// Check if thumbnail already exists
try {
const lstat = await paths.lstat(thumbname)
- if (lstat.isSymbolicLink())
+ if (lstat.isSymbolicLink()) {
// Unlink if symlink (should be symlink to the placeholder)
await paths.unlink(thumbname)
- else if (!force)
+ } else if (!force) {
// Continue only if it does not exist, unless forced to
return true
+ }
} catch (error) {
// Re-throw error
- if (error.code !== 'ENOENT')
- throw error
+ if (error.code !== 'ENOENT') throw error
}
// Full path to input file
@@ -257,12 +260,14 @@ self.generateThumbs = async (name, extname, force) => {
const metadata = await self.ffprobe(input)
const duration = parseInt(metadata.format.duration)
- if (isNaN(duration))
+ if (isNaN(duration)) {
throw 'Warning: File does not have valid duration metadata'
+ }
const videoStream = metadata.streams && metadata.streams.find(s => s.codec_type === 'video')
- if (!videoStream || !videoStream.width || !videoStream.height)
+ if (!videoStream || !videoStream.width || !videoStream.height) {
throw 'Warning: File does not have valid video stream metadata'
+ }
await new Promise((resolve, reject) => {
ffmpeg(input)
@@ -287,10 +292,11 @@ self.generateThumbs = async (name, extname, force) => {
await paths.lstat(thumbname)
return true
} catch (err) {
- if (err.code === 'ENOENT')
+ if (err.code === 'ENOENT') {
throw error || 'Warning: FFMPEG exited with empty output file'
- else
+ } else {
throw error || err
+ }
}
})
} else {
@@ -361,8 +367,7 @@ self.unlinkFile = async (filename, predb) => {
await paths.unlink(path.join(paths.uploads, filename))
} catch (error) {
// Return true if file does not exist
- if (error.code !== 'ENOENT')
- throw error
+ if (error.code !== 'ENOENT') throw error
}
const identifier = filename.split('.')[0]
@@ -375,26 +380,26 @@ self.unlinkFile = async (filename, predb) => {
}
const extname = self.extname(filename)
- if (self.imageExts.includes(extname) || self.videoExts.includes(extname))
+ if (self.imageExts.includes(extname) || self.videoExts.includes(extname)) {
try {
await paths.unlink(path.join(paths.thumbs, `${identifier}.png`))
} catch (error) {
- if (error.code !== 'ENOENT')
- throw error
+ if (error.code !== 'ENOENT') throw error
}
+ }
}
self.bulkDeleteFromDb = async (field, values, user) => {
// Always return an empty array on failure
- if (!user || !['id', 'name'].includes(field) || !values.length)
- return []
+ if (!user || !['id', 'name'].includes(field) || !values.length) return []
// SQLITE_LIMIT_VARIABLE_NUMBER, which defaults to 999
// Read more: https://www.sqlite.org/limits.html
const MAX_VARIABLES_CHUNK_SIZE = 999
const chunks = []
- while (values.length)
+ while (values.length) {
chunks.push(values.splice(0, MAX_VARIABLES_CHUNK_SIZE))
+ }
const failed = []
const ismoderator = perms.is(user, 'moderator')
@@ -407,8 +412,9 @@ self.bulkDeleteFromDb = async (field, values, user) => {
const files = await db.table('files')
.whereIn(field, chunk)
.where(function () {
- if (!ismoderator)
+ if (!ismoderator) {
this.where('userid', user.id)
+ }
})
// Push files that could not be found in db
@@ -435,17 +441,19 @@ self.bulkDeleteFromDb = async (field, values, user) => {
.del()
self.invalidateStatsCache('uploads')
- if (self.idSet)
+ if (self.idSet) {
unlinked.forEach(file => {
const identifier = file.name.split('.')[0]
self.idSet.delete(identifier)
// logger.log(`Removed ${identifier} from identifiers cache (bulkDeleteFromDb)`)
})
+ }
// Push album ids
unlinked.forEach(file => {
- if (file.albumid && !albumids.includes(file.albumid))
+ if (file.albumid && !albumids.includes(file.albumid)) {
albumids.push(file.albumid)
+ }
})
// Push unlinked files
@@ -463,13 +471,16 @@ self.bulkDeleteFromDb = async (field, values, user) => {
}
// Purge Cloudflare's cache if necessary, but do not wait
- if (config.cloudflare.purgeCache)
+ if (config.cloudflare.purgeCache) {
self.purgeCloudflareCache(unlinkeds.map(file => file.name), true, true)
.then(results => {
- for (const result of results)
- if (result.errors.length)
+ for (const result of results) {
+ if (result.errors.length) {
result.errors.forEach(error => logger.error(`[CF]: ${error}`))
+ }
+ }
})
+ }
}
} catch (error) {
logger.error(error)
@@ -480,12 +491,15 @@ self.bulkDeleteFromDb = async (field, values, user) => {
self.purgeCloudflareCache = async (names, uploads, thumbs) => {
const errors = []
- if (!cloudflareAuth)
+ if (!cloudflareAuth) {
errors.push('Cloudflare auth is incomplete or missing')
- if (!Array.isArray(names) || !names.length)
+ }
+ if (!Array.isArray(names) || !names.length) {
errors.push('Names array is invalid or empty')
- if (errors.length)
+ }
+ if (errors.length) {
return [{ success: false, files: [], errors }]
+ }
let domain = config.domain
if (!uploads) domain = config.homeDomain
@@ -495,8 +509,9 @@ self.purgeCloudflareCache = async (names, uploads, thumbs) => {
if (uploads) {
const url = `${domain}/${name}`
const extname = self.extname(name)
- if (thumbs && self.mayGenerateThumb(extname))
+ if (thumbs && self.mayGenerateThumb(extname)) {
thumbNames.push(`${domain}/thumbs/${name.slice(0, -extname.length)}.png`)
+ }
return url
} else {
return name === 'home' ? domain : `${domain}/${name}`
@@ -509,8 +524,9 @@ self.purgeCloudflareCache = async (names, uploads, thumbs) => {
// TODO: Handle API rate limits
const MAX_LENGTH = 30
const chunks = []
- while (names.length)
+ while (names.length) {
chunks.push(names.splice(0, MAX_LENGTH))
+ }
const url = `https://api.cloudflare.com/client/v4/zones/${config.cloudflare.zoneId}/purge_cache`
const results = []
@@ -543,8 +559,9 @@ self.purgeCloudflareCache = async (names, uploads, thumbs) => {
const response = await purge.json()
result.success = response.success
- if (Array.isArray(response.errors) && response.errors.length)
+ if (Array.isArray(response.errors) && response.errors.length) {
result.errors = response.errors.map(error => `${error.code}: ${error.message}`)
+ }
} catch (error) {
result.errors = [error.toString()]
}
@@ -642,7 +659,7 @@ self.stats = async (req, res, next) => {
}
// Disk usage, only for Linux platform
- if (os.platform === 'linux')
+ if (os.platform === 'linux') {
if (!statsCache.disk.cache && statsCache.disk.generating) {
stats.disk = false
} else if (((Date.now() - statsCache.disk.generatedAt) <= 60000) || statsCache.disk.generating) {
@@ -727,8 +744,9 @@ self.stats = async (req, res, next) => {
stats.disk[basename] = parseInt(formatted[0])
// Add to types if necessary
- if (!stats.disk._types.byte.includes(basename))
+ if (!stats.disk._types.byte.includes(basename)) {
stats.disk._types.byte.push(basename)
+ }
})
const stderr = []
@@ -786,6 +804,7 @@ self.stats = async (req, res, next) => {
statsCache.disk.cache = stats.disk
statsCache.disk.generating = false
}
+ }
// Uploads
if (!statsCache.uploads.cache && statsCache.uploads.generating) {
@@ -812,8 +831,9 @@ self.stats = async (req, res, next) => {
stats.uploads.total = uploads.length
stats.uploads.sizeInDb = uploads.reduce((acc, upload) => acc + parseInt(upload.size), 0)
// Add type information for the new column
- if (!Array.isArray(stats.uploads._types.byte))
+ if (!Array.isArray(stats.uploads._types.byte)) {
stats.uploads._types.byte = []
+ }
stats.uploads._types.byte.push('sizeInDb')
} else {
stats.uploads.total = await db.table('files')
@@ -823,16 +843,18 @@ self.stats = async (req, res, next) => {
stats.uploads.images = await db.table('files')
.where(function () {
- for (const ext of self.imageExts)
+ for (const ext of self.imageExts) {
this.orWhere('name', 'like', `%${ext}`)
+ }
})
.count('id as count')
.then(rows => rows[0].count)
stats.uploads.videos = await db.table('files')
.where(function () {
- for (const ext of self.videoExts)
+ for (const ext of self.videoExts) {
this.orWhere('name', 'like', `%${ext}`)
+ }
})
.count('id as count')
.then(rows => rows[0].count)
@@ -870,16 +892,18 @@ self.stats = async (req, res, next) => {
const users = await db.table('users')
stats.users.total = users.length
for (const user of users) {
- if (user.enabled === false || user.enabled === 0)
+ if (user.enabled === false || user.enabled === 0) {
stats.users.disabled++
+ }
// This may be inaccurate on installations with customized permissions
user.permission = user.permission || 0
- for (const p of permissionKeys)
+ for (const p of permissionKeys) {
if (user.permission === perms.permissions[p]) {
stats.users[p]++
break
}
+ }
}
// Update cache
@@ -926,8 +950,7 @@ self.stats = async (req, res, next) => {
stats.albums.zipGenerated++
} catch (error) {
// Re-throw error
- if (error.code !== 'ENOENT')
- throw error
+ if (error.code !== 'ENOENT') throw error
}
}))
diff --git a/gulpfile.js b/gulpfile.js
index 0c90184..f5fa4e1 100644
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -29,8 +29,9 @@ const postcssPlugins = [
sass.compiler = sassCompiler
// Minify on production
-if (process.env.NODE_ENV !== 'development')
+if (process.env.NODE_ENV !== 'development') {
postcssPlugins.push(cssnano())
+}
/** TASKS: LINT */
diff --git a/logger.js b/logger.js
index b29c40d..de42377 100644
--- a/logger.js
+++ b/logger.js
@@ -54,8 +54,9 @@ self.debug = (...args) => {
Object.assign(options, args[args.length - 1])
args.splice(args.length - 1, 1)
}
- for (const arg of args)
+ for (const arg of args) {
console.log(inspect(arg, options))
+ }
}
module.exports = self
diff --git a/lolisafe.js b/lolisafe.js
index a18044a..09685a1 100644
--- a/lolisafe.js
+++ b/lolisafe.js
@@ -36,11 +36,13 @@ safe.use(helmet({
hsts: false
}))
-if (config.hsts instanceof Object && Object.keys(config.hsts).length)
+if (config.hsts instanceof Object && Object.keys(config.hsts).length) {
safe.use(helmet.hsts(config.hsts))
+}
-if (config.trustProxy)
+if (config.trustProxy) {
safe.set('trust proxy', 1)
+}
// https://mozilla.github.io/nunjucks/api.html#configure
nunjucks.configure('views', {
@@ -52,12 +54,14 @@ safe.set('view engine', 'njk')
safe.enable('view cache')
// Configure rate limits
-if (Array.isArray(config.rateLimits) && config.rateLimits.length)
+if (Array.isArray(config.rateLimits) && config.rateLimits.length) {
for (const rateLimit of config.rateLimits) {
const limiter = new RateLimit(rateLimit.config)
- for (const route of rateLimit.routes)
+ for (const route of rateLimit.routes) {
safe.use(route, limiter)
+ }
}
+}
safe.use(bodyParser.urlencoded({ extended: true }))
safe.use(bodyParser.json())
@@ -117,24 +121,27 @@ if (config.cacheControl) {
// If using CDN, cache public pages in CDN
if (config.cacheControl !== 2) {
cdnPages.push('api/check')
- for (const page of cdnPages)
+ for (const page of cdnPages) {
safe.use(`/${page === 'home' ? '' : page}`, (req, res, next) => {
res.set('Cache-Control', cacheControls.cdn)
next()
})
+ }
}
// If serving uploads with node
- if (config.serveFilesWithNode)
+ if (config.serveFilesWithNode) {
initServeStaticUploads({
setHeaders: res => {
res.set('Access-Control-Allow-Origin', '*')
// If using CDN, cache uploads in CDN as well
// Use with cloudflare.purgeCache enabled in config file
- if (config.cacheControl !== 2)
+ if (config.cacheControl !== 2) {
res.set('Cache-Control', cacheControls.cdn)
+ }
}
})
+ }
// Function for static assets.
// This requires the assets to use version in their query string,
@@ -148,10 +155,11 @@ if (config.cacheControl) {
safe.use(['/api/album/zip'], (req, res, next) => {
res.set('Access-Control-Allow-Origin', '*')
const versionString = parseInt(req.query.v)
- if (versionString > 0)
+ if (versionString > 0) {
res.set('Cache-Control', cacheControls.static)
- else
+ } else {
res.set('Cache-Control', cacheControls.disable)
+ }
next()
})
} else if (config.serveFilesWithNode) {
@@ -182,32 +190,36 @@ safe.use('/api', api)
// Re-map version strings if cache control is enabled (safe.fiery.me)
utils.versionStrings = {}
if (config.cacheControl) {
- for (const type in versions)
+ for (const type in versions) {
utils.versionStrings[type] = `?_=${versions[type]}`
- if (versions['1'])
+ }
+ if (versions['1']) {
utils.clientVersion = versions['1']
+ }
}
// Cookie Policy
- if (config.cookiePolicy)
+ if (config.cookiePolicy) {
config.pages.push('cookiepolicy')
+ }
// Check for custom pages, otherwise fallback to Nunjucks templates
for (const page of config.pages) {
const customPage = path.join(paths.customPages, `${page}.html`)
- if (!await paths.access(customPage).catch(() => true))
+ if (!await paths.access(customPage).catch(() => true)) {
safe.get(`/${page === 'home' ? '' : page}`, (req, res, next) => res.sendFile(customPage))
- else if (page === 'home')
+ } else if (page === 'home') {
safe.get('/', (req, res, next) => res.render(page, {
config,
versions: utils.versionStrings,
gitHash: utils.gitHash
}))
- else
+ } else {
safe.get(`/${page}`, (req, res, next) => res.render(page, {
config,
versions: utils.versionStrings
}))
+ }
}
// Error pages
@@ -240,8 +252,9 @@ safe.use('/api', api)
logger.log(`${ip}:${port} ${version}`)
utils.clamd.scanner = clamd.createScanner(ip, port)
- if (!utils.clamd.scanner)
+ if (!utils.clamd.scanner) {
throw 'Could not create clamd scanner'
+ }
}
// Cache file identifiers
@@ -260,7 +273,7 @@ safe.use('/api', api)
// Cache control (safe.fiery.me)
// Purge Cloudflare cache
- if (config.cacheControl && config.cacheControl !== 2)
+ if (config.cacheControl && config.cacheControl !== 2) {
if (config.cloudflare.purgeCache) {
logger.log('Cache control enabled, purging Cloudflare\'s cache...')
const results = await utils.purgeCloudflareCache(cdnPages)
@@ -274,11 +287,13 @@ safe.use('/api', api)
}
succeeded += result.files.length
}
- if (!errored)
+ if (!errored) {
logger.log(`Successfully purged ${succeeded} cache`)
+ }
} else {
logger.log('Cache control enabled without Cloudflare\'s cache purging')
}
+ }
// Temporary uploads (only check for expired uploads if config.uploads.temporaryUploadsInterval is also set)
if (Array.isArray(config.uploads.temporaryUploadAges) &&
@@ -286,8 +301,7 @@ safe.use('/api', api)
config.uploads.temporaryUploadsInterval) {
let temporaryUploadsInProgress = false
const temporaryUploadCheck = async () => {
- if (temporaryUploadsInProgress)
- return
+ if (temporaryUploadsInProgress) return
temporaryUploadsInProgress = true
try {
@@ -295,8 +309,9 @@ safe.use('/api', api)
if (result.expired.length) {
let logMessage = `Expired uploads: ${result.expired.length} deleted`
- if (result.failed.length)
+ if (result.failed.length) {
logMessage += `, ${result.failed.length} errored`
+ }
logger.log(logMessage)
}
@@ -321,10 +336,8 @@ safe.use('/api', api)
prompt: ''
}).on('line', line => {
try {
- if (line === 'rs')
- return
- if (line === '.exit')
- return process.exit(0)
+ if (line === 'rs') return
+ if (line === '.exit') return process.exit(0)
// eslint-disable-next-line no-eval
logger.log(eval(line))
} catch (error) {
diff --git a/src/js/.eslintrc.js b/src/js/.eslintrc.js
index 8b87bc7..21894a6 100644
--- a/src/js/.eslintrc.js
+++ b/src/js/.eslintrc.js
@@ -12,11 +12,6 @@ module.exports = {
'plugin:compat/recommended'
],
rules: {
- curly: [
- 'error',
- 'multi',
- 'consistent'
- ],
'object-shorthand': [
'error',
'always'
diff --git a/src/js/album.js b/src/js/album.js
index 97739a3..8f5b800 100644
--- a/src/js/album.js
+++ b/src/js/album.js
@@ -9,8 +9,9 @@ const page = {
window.addEventListener('DOMContentLoaded', () => {
const elements = document.querySelectorAll('.file-size')
- for (let i = 0; i < elements.length; i++)
+ for (let i = 0; i < elements.length; i++) {
elements[i].innerHTML = page.getPrettyBytes(parseInt(elements[i].innerHTML.replace(/\s*B$/i, '')))
+ }
page.lazyLoad = new LazyLoad()
})
diff --git a/src/js/auth.js b/src/js/auth.js
index 22d87cc..b994663 100644
--- a/src/js/auth.js
+++ b/src/js/auth.js
@@ -15,16 +15,13 @@ const page = {
page.unhide = () => {
const loaderSection = document.querySelector('#loader')
- if (loaderSection)
- loaderSection.classList.add('is-hidden')
+ if (loaderSection) loaderSection.classList.add('is-hidden')
const loginSection = document.querySelector('#login.is-hidden')
- if (loginSection)
- loginSection.classList.remove('is-hidden')
+ if (loginSection) loginSection.classList.remove('is-hidden')
const floatingBtn = document.querySelector('.floating-home-button.is-hidden')
- if (floatingBtn)
- floatingBtn.classList.remove('is-hidden')
+ if (floatingBtn) floatingBtn.classList.remove('is-hidden')
}
// Handler for Axios errors
@@ -54,12 +51,10 @@ page.onAxiosError = error => {
page.do = (dest, trigger) => {
const user = page.user.value.trim()
- if (!user)
- return swal('An error occurred!', 'You need to specify a username.', 'error')
+ if (!user) return swal('An error occurred!', 'You need to specify a username.', 'error')
const pass = page.pass.value.trim()
- if (!pass)
- return swal('An error occurred!', 'You need to specify a password.', 'error')
+ if (!pass) return swal('An error occurred!', 'You need to specify a password.', 'error')
trigger.classList.add('is-loading')
axios.post(`api/${dest}`, {
@@ -107,23 +102,24 @@ window.addEventListener('DOMContentLoaded', () => {
})
const loginBtn = document.querySelector('#loginBtn')
- if (loginBtn)
+ if (loginBtn) {
loginBtn.addEventListener('click', event => {
if (!form.checkValidity()) return
page.do('login', event.currentTarget)
})
+ }
const registerBtn = document.querySelector('#registerBtn')
- if (registerBtn)
+ if (registerBtn) {
registerBtn.addEventListener('click', event => {
- if (!form.checkValidity())
+ if (!form.checkValidity()) {
// Workaround for browsers to display native form error messages
return loginBtn.click()
+ }
page.do('register', event.currentTarget)
})
+ }
- if (page.token)
- page.verify()
- else
- page.unhide()
+ if (page.token) page.verify()
+ else page.unhide()
})
diff --git a/src/js/dashboard.js b/src/js/dashboard.js
index 54fd35e..6825251 100644
--- a/src/js/dashboard.js
+++ b/src/js/dashboard.js
@@ -146,16 +146,14 @@ page.onAxiosError = error => {
}
page.preparePage = () => {
- if (page.token)
- page.verifyToken(page.token, true)
- else
- window.location = 'auth'
+ if (page.token) page.verifyToken(page.token, true)
+ else window.location = 'auth'
}
page.checkClientVersion = apiVersion => {
const self = document.querySelector('#mainScript')
const match = self.src.match(/\?_=(\d+)$/)
- if (match && match[1] && match[1] !== apiVersion)
+ if (match && match[1] && match[1] !== apiVersion) {
return swal({
title: 'Update detected!',
text: 'Client assets have been updated. Reload to display the latest version?',
@@ -169,11 +167,12 @@ page.checkClientVersion = apiVersion => {
}).then(() => {
window.location.reload()
})
+ }
}
page.verifyToken = (token, reloadOnError) => {
axios.post('api/tokens/verify', { token }).then(response => {
- if (response.data.success === false)
+ if (response.data.success === false) {
return swal({
title: 'An error occurred!',
text: response.data.description,
@@ -183,12 +182,14 @@ page.verifyToken = (token, reloadOnError) => {
localStorage.removeItem(lsKeys.token)
window.location = 'auth'
})
+ }
axios.defaults.headers.common.token = token
localStorage[lsKeys.token] = token
- if (response.data.version)
+ if (response.data.version) {
page.checkClientVersion(response.data.version)
+ }
page.token = token
page.username = response.data.username
@@ -207,8 +208,9 @@ page.prepareDashboard = () => {
// Capture all submit events
page.dom.addEventListener('submit', event => {
// Prevent default if necessary
- if (event.target && event.target.classList.contains('prevent-default'))
+ if (event.target && event.target.classList.contains('prevent-default')) {
return event.preventDefault()
+ }
}, true)
page.menusContainer = document.querySelector('#menu')
@@ -229,14 +231,12 @@ page.prepareDashboard = () => {
for (let i = 0; i < itemMenus.length; i++) {
// Skip item menu if not enough permission
- if (itemMenus[i].group && !page.permissions[itemMenus[i].group])
- continue
+ if (itemMenus[i].group && !page.permissions[itemMenus[i].group]) continue
// Add onclick event listener
const item = document.querySelector(itemMenus[i].selector)
item.addEventListener('click', event => {
- if (page.isSomethingLoading)
- return page.warnSomethingLoading()
+ if (page.isSomethingLoading) return page.warnSomethingLoading()
// eslint-disable-next-line compat/compat
itemMenus[i].onclick.call(null, Object.assign(itemMenus[i].params || {}, {
@@ -263,8 +263,7 @@ page.prepareDashboard = () => {
// Load albums sidebar
page.getAlbumsSidebar()
- if (typeof page.prepareShareX === 'function')
- page.prepareShareX()
+ if (typeof page.prepareShareX === 'function') page.prepareShareX()
}
page.logout = params => {
@@ -296,8 +295,9 @@ page.updateTrigger = (trigger, newState) => {
trigger.classList.add('is-loading')
} else if (newState === 'active') {
if (trigger.parentNode.tagName === 'LI' && !trigger.className.includes('pagination-link')) {
- for (let i = 0; i < page.menus.length; i++)
+ for (let i = 0; i < page.menus.length; i++) {
page.menus[i].classList.remove('is-active')
+ }
trigger.classList.add('is-active')
}
trigger.classList.remove('is-loading')
@@ -326,10 +326,12 @@ page.domClick = event => {
if (!element) return
// Delegate click events to their A or BUTTON parents
- if (['I'].includes(element.tagName) && ['SPAN'].includes(element.parentNode.tagName))
+ if (['I'].includes(element.tagName) && ['SPAN'].includes(element.parentNode.tagName)) {
element = element.parentNode
- if (['SPAN'].includes(element.tagName) && ['A', 'BUTTON'].includes(element.parentNode.tagName))
+ }
+ if (['SPAN'].includes(element.tagName) && ['A', 'BUTTON'].includes(element.parentNode.tagName)) {
element = element.parentNode
+ }
// Skip elements that have no action data
if (!element.dataset || !element.dataset.action) return
@@ -449,8 +451,7 @@ page.getByView = (view, get) => {
}
page.switchPage = (action, element) => {
- if (page.isSomethingLoading)
- return page.warnSomethingLoading()
+ if (page.isSomethingLoading) return page.warnSomethingLoading()
// eslint-disable-next-line compat/compat
const params = Object.assign(page.views[page.currentView], {
@@ -462,8 +463,9 @@ page.switchPage = (action, element) => {
switch (action) {
case 'page-prev':
params.pageNum = page.views[page.currentView].pageNum - 1
- if (params.pageNum < 0)
+ if (params.pageNum < 0) {
return swal('An error occurred!', 'This is already the first page.', 'error')
+ }
return func(params)
case 'page-next':
params.pageNum = page.views[page.currentView].pageNum + 1
@@ -490,43 +492,42 @@ page.focusJumpToPage = element => {
}
page.getUploads = (params = {}) => {
- if (params && params.all && !page.permissions.moderator)
+ if (params && params.all && !page.permissions.moderator) {
return swal('An error occurred!', 'You cannot do this!', 'error')
+ }
- if (page.isSomethingLoading)
- return page.warnSomethingLoading()
+ if (page.isSomethingLoading) return page.warnSomethingLoading()
page.updateTrigger(params.trigger, 'loading')
- if (typeof params.pageNum !== 'number' || params.pageNum < 0)
+ if (typeof params.pageNum !== 'number' || params.pageNum < 0) {
params.pageNum = 0
+ }
const url = params.album !== undefined
? `api/album/${params.album}/${params.pageNum}`
: `api/uploads/${params.pageNum}`
const headers = {}
-
- if (params.all)
- headers.all = '1'
-
+ if (params.all) headers.all = '1'
if (params.filters) {
headers.filters = params.filters
-
// Send client timezone offset if properly using date: and/or :expiry filters
// Server will pretend client is on UTC if unset
- if (/(^|\s)(date|expiry):[\d"]/.test(params.filters))
+ if (/(^|\s)(date|expiry):[\d"]/.test(params.filters)) {
headers.minoffset = new Date().getTimezoneOffset()
+ }
}
axios.get(url, { headers }).then(response => {
- if (response.data.success === false)
+ if (response.data.success === false) {
if (response.data.description === 'No token provided') {
return page.verifyToken(page.token)
} else {
page.updateTrigger(params.trigger)
return swal('An error occurred!', response.data.description, 'error')
}
+ }
const pages = Math.ceil(response.data.count / 25)
const files = response.data.files
@@ -664,17 +665,19 @@ page.getUploads = (params = {}) => {
for (let i = 0; i < files.length; i++) {
// Build full URLs
files[i].file = `${basedomain}/${files[i].name}`
- if (files[i].thumb)
+ if (files[i].thumb) {
files[i].thumb = `${basedomain}/${files[i].thumb}`
+ }
// Determine types
files[i].type = 'other'
const exec = /.[\w]+(\?|$)/.exec(files[i].file)
const extname = exec && exec[0] ? exec[0].toLowerCase() : null
- if (page.imageExts.includes(extname))
+ if (page.imageExts.includes(extname)) {
files[i].type = 'picture'
- else if (page.videoExts.includes(extname))
+ } else if (page.videoExts.includes(extname)) {
files[i].type = 'video'
+ }
// Cache bare minimum data for thumbnails viewer
page.cache[files[i].id] = {
@@ -689,24 +692,26 @@ page.getUploads = (params = {}) => {
files[i].prettyBytes = page.getPrettyBytes(parseInt(files[i].size))
files[i].prettyDate = page.getPrettyDate(new Date(files[i].timestamp * 1000))
- if (hasExpiryDateColumn)
+ if (hasExpiryDateColumn) {
files[i].prettyExpiryDate = files[i].expirydate
? page.getPrettyDate(new Date(files[i].expirydate * 1000))
: null
+ }
// Update selected status
files[i].selected = page.selected[page.currentView].includes(files[i].id)
if (!files[i].selected) unselected = true
// Appendix (display album or user)
- if (params.all)
+ if (params.all) {
files[i].appendix = files[i].userid
? users[files[i].userid] || ''
: ''
- else if (params.album === undefined)
+ } else if (params.album === undefined) {
files[i].appendix = files[i].albumid
? albums[files[i].albumid] || ''
: ''
+ }
}
if (page.views[page.currentView].type === 'thumbs') {
@@ -729,10 +734,11 @@ page.getUploads = (params = {}) => {
div.className = 'image-container column'
div.dataset.id = upload.id
- if (upload.thumb !== undefined)
+ if (upload.thumb !== undefined) {
div.innerHTML = ``
- else
+ } else {
div.innerHTML = `${upload.extname || 'N/A'}
`
+ }
div.innerHTML += `
@@ -866,8 +872,7 @@ page.getUploads = (params = {}) => {
page.updateTrigger(params.trigger, 'active')
- if (page.currentView === 'uploads')
- page.views.uploads.album = params.album
+ if (page.currentView === 'uploads') page.views.uploads.album = params.album
page.views[page.currentView].filters = params.filters
page.views[page.currentView].pageNum = files.length ? params.pageNum : 0
}).catch(error => {
@@ -877,8 +882,7 @@ page.getUploads = (params = {}) => {
}
page.setUploadsView = (view, element) => {
- if (page.isSomethingLoading)
- return page.warnSomethingLoading()
+ if (page.isSomethingLoading) return page.warnSomethingLoading()
if (view === 'list') {
delete localStorage[lsKeys.viewType[page.currentView]]
@@ -895,8 +899,7 @@ page.setUploadsView = (view, element) => {
}
page.toggleOriginalNames = element => {
- if (page.isSomethingLoading)
- return page.warnSomethingLoading()
+ if (page.isSomethingLoading) return page.warnSomethingLoading()
if (page.views[page.currentView].originalNames) {
delete localStorage[lsKeys.originalNames[page.currentView]]
@@ -951,8 +954,7 @@ page.displayPreview = id => {
div.querySelector('#swalOriginal').addEventListener('click', event => {
const trigger = event.currentTarget
- if (trigger.classList.contains('is-danger'))
- return
+ if (trigger.classList.contains('is-danger')) return
trigger.classList.add('is-loading')
const thumb = div.querySelector('#swalThumb')
@@ -993,7 +995,7 @@ page.displayPreview = id => {
content: div,
buttons: false
}).then(() => {
- // Destroy video, if necessary
+ // Destroy video, if necessary
const video = div.querySelector('#swalVideo')
if (video) video.remove()
@@ -1008,17 +1010,19 @@ page.selectAll = element => {
if (isNaN(id)) continue
if (page.checkboxes[i].checked !== element.checked) {
page.checkboxes[i].checked = element.checked
- if (page.checkboxes[i].checked)
+ if (page.checkboxes[i].checked) {
page.selected[page.currentView].push(id)
- else
+ } else {
page.selected[page.currentView].splice(page.selected[page.currentView].indexOf(id), 1)
+ }
}
}
- if (page.selected[page.currentView].length)
+ if (page.selected[page.currentView].length) {
localStorage[lsKeys.selected[page.currentView]] = JSON.stringify(page.selected[page.currentView])
- else
+ } else {
delete localStorage[lsKeys.selected[page.currentView]]
+ }
element.title = element.checked ? 'Unselect all' : 'Select all'
}
@@ -1028,20 +1032,21 @@ page.selectInBetween = (element, lastElement) => {
const lastIndex = parseInt(lastElement.dataset.index)
const distance = Math.abs(thisIndex - lastIndex)
- if (distance < 2)
- return
+ if (distance < 2) return
- for (let i = 0; i < page.checkboxes.length; i++)
+ for (let i = 0; i < page.checkboxes.length; i++) {
if ((thisIndex > lastIndex && i > lastIndex && i < thisIndex) ||
(thisIndex < lastIndex && i > thisIndex && i < lastIndex)) {
// Check or uncheck depending on the state of the initial checkbox
const checked = page.checkboxes[i].checked = lastElement.checked
const id = page.getItemID(page.checkboxes[i])
- if (!page.selected[page.currentView].includes(id) && checked)
+ if (!page.selected[page.currentView].includes(id) && checked) {
page.selected[page.currentView].push(id)
- else if (page.selected[page.currentView].includes(id) && !checked)
+ } else if (page.selected[page.currentView].includes(id) && !checked) {
page.selected[page.currentView].splice(page.selected[page.currentView].indexOf(id), 1)
+ }
}
+ }
}
page.select = (element, event) => {
@@ -1056,24 +1061,25 @@ page.select = (element, event) => {
page.lastSelected = element
}
- if (!page.selected[page.currentView].includes(id) && element.checked)
+ if (!page.selected[page.currentView].includes(id) && element.checked) {
page.selected[page.currentView].push(id)
- else if (page.selected[page.currentView].includes(id) && !element.checked)
+ } else if (page.selected[page.currentView].includes(id) && !element.checked) {
page.selected[page.currentView].splice(page.selected[page.currentView].indexOf(id), 1)
+ }
// Update local storage
- if (page.selected[page.currentView].length)
+ if (page.selected[page.currentView].length) {
localStorage[lsKeys.selected[page.currentView]] = JSON.stringify(page.selected[page.currentView])
- else
+ } else {
delete localStorage[lsKeys.selected[page.currentView]]
+ }
}
page.clearSelection = () => {
const selected = page.selected[page.currentView]
const type = page.getByView(page.currentView, 'type')
const count = selected.length
- if (!count)
- return swal('An error occurred!', `You have not selected any ${type}.`, 'error')
+ if (!count) return swal('An error occurred!', `You have not selected any ${type}.`, 'error')
const suffix = count === 1 ? type.substring(0, type.length - 1) : type
return swal({
@@ -1084,9 +1090,11 @@ page.clearSelection = () => {
if (!proceed) return
const checkboxes = page.checkboxes
- for (let i = 0; i < checkboxes.length; i++)
- if (checkboxes[i].checked)
+ for (let i = 0; i < checkboxes.length; i++) {
+ if (checkboxes[i].checked) {
checkboxes[i].checked = false
+ }
+ }
page.selected[page.currentView] = []
delete localStorage[lsKeys.selected[page.currentView]]
@@ -1247,14 +1255,16 @@ page.deleteUpload = id => {
values: [id],
cb (failed) {
// Remove from remembered checkboxes if necessary
- if (!failed.length && page.selected[page.currentView].includes(id))
+ if (!failed.length && page.selected[page.currentView].includes(id)) {
page.selected[page.currentView].splice(page.selected[page.currentView].indexOf(id), 1)
+ }
// Update local storage
- if (page.selected[page.currentView].length)
+ if (page.selected[page.currentView].length) {
localStorage[lsKeys.selected[page.currentView]] = JSON.stringify(page.selected[page.currentView])
- else
+ } else {
delete localStorage[lsKeys.selected[page.currentView]]
+ }
// Reload upload list
// eslint-disable-next-line compat/compat
@@ -1267,8 +1277,7 @@ page.deleteUpload = id => {
page.bulkDeleteUploads = () => {
const count = page.selected[page.currentView].length
- if (!count)
- return swal('An error occurred!', 'You have not selected any uploads.', 'error')
+ if (!count) return swal('An error occurred!', 'You have not selected any uploads.', 'error')
page.postBulkDeleteUploads({
all: page.currentView === 'uploadsAll',
@@ -1276,19 +1285,19 @@ page.bulkDeleteUploads = () => {
values: page.selected[page.currentView],
cb (failed) {
// Update state of checkboxes
- if (failed.length)
+ if (failed.length) {
page.selected[page.currentView] = page.selected[page.currentView]
- .filter(id => {
- return failed.includes(id)
- })
- else
+ .filter(id => failed.includes(id))
+ } else {
page.selected[page.currentView] = []
+ }
// Update local storage
- if (page.selected[page.currentView].length)
+ if (page.selected[page.currentView].length) {
localStorage[lsKeys.selected[page.currentView]] = JSON.stringify(page.selected[page.currentView])
- else
+ } else {
delete localStorage[lsKeys.selected[page.currentView]]
+ }
// Reload uploads list
// eslint-disable-next-line compat/compat
@@ -1301,8 +1310,9 @@ page.bulkDeleteUploads = () => {
page.deleteUploadsByNames = (params = {}) => {
let appendix = ''
- if (page.permissions.moderator)
+ if (page.permissions.moderator) {
appendix = '
Hint: You can use this feature to delete uploads by other users.'
+ }
page.dom.innerHTML = `