Removed custom ESLint curly rule

Sigh, why did you do this, past me..?

Also fixed "Delete uploads by names".
This commit is contained in:
Bobby Wibowo 2020-10-31 01:12:09 +07:00
parent a86967c251
commit 47dd512910
No known key found for this signature in database
GPG Key ID: 51C3A1E1E22D26CF
20 changed files with 902 additions and 737 deletions

View File

@ -10,16 +10,11 @@ module.exports = {
'standard'
],
rules: {
curly: [
'error',
'multi',
'consistent'
],
'no-throw-literal': 0,
'object-shorthand': [
'error',
'always'
],
'standard/no-callback-literal': 0
'node/no-callback-literal': 0
}
}

View File

@ -29,14 +29,10 @@ const zipOptions = config.uploads.jsZipOptions
zipOptions.type = 'nodebuffer'
// Apply fallbacks for missing config values
if (zipOptions.streamFiles === undefined)
zipOptions.streamFiles = true
if (zipOptions.compression === undefined)
zipOptions.compression = 'DEFLATE'
if (zipOptions.compressionOptions === undefined)
zipOptions.compressionOptions = {}
if (zipOptions.compressionOptions.level === undefined)
zipOptions.compressionOptions.level = 1
if (zipOptions.streamFiles === undefined) zipOptions.streamFiles = true
if (zipOptions.compression === undefined) zipOptions.compression = 'DEFLATE'
if (zipOptions.compressionOptions === undefined) zipOptions.compressionOptions = {}
if (zipOptions.compressionOptions.level === undefined) zipOptions.compressionOptions.level = 1
self.zipEmitters = new Map()
@ -51,8 +47,7 @@ class ZipEmitter extends EventEmitter {
self.getUniqueRandomName = async () => {
for (let i = 0; i < utils.idMaxTries; i++) {
const identifier = randomstring.generate(config.uploads.albumIdentifierLength)
if (self.onHold.has(identifier))
continue
if (self.onHold.has(identifier)) continue
// Put token on-hold (wait for it to be inserted to DB)
self.onHold.add(identifier)
@ -80,15 +75,15 @@ self.list = async (req, res, next) => {
const all = req.headers.all === '1'
const sidebar = req.headers.sidebar
const ismoderator = perms.is(user, 'moderator')
if (all && !ismoderator)
return res.status(403).end()
if (all && !ismoderator) return res.status(403).end()
const filter = function () {
if (!all)
if (!all) {
this.where({
enabled: 1,
userid: user.id
})
}
}
try {
@ -97,8 +92,7 @@ self.list = async (req, res, next) => {
.where(filter)
.count('id as count')
.then(rows => rows[0].count)
if (!count)
return res.json({ success: true, albums: [], count })
if (!count) return res.json({ success: true, albums: [], count })
const fields = ['id', 'name']
@ -116,8 +110,7 @@ self.list = async (req, res, next) => {
else if (offset < 0) offset = Math.max(0, Math.ceil(count / 25) + offset)
fields.push('identifier', 'enabled', 'timestamp', 'editedAt', 'download', 'public', 'description')
if (all)
fields.push('userid')
if (all) fields.push('userid')
albums = await db.table('albums')
.where(filter)
@ -140,13 +133,14 @@ self.list = async (req, res, next) => {
.whereIn('albumid', Object.keys(albumids))
.select('albumid')
for (const upload of uploads)
if (albumids[upload.albumid])
for (const upload of uploads) {
if (albumids[upload.albumid]) {
albumids[upload.albumid].uploads++
}
}
// If we are not listing all albums, send response
if (!all)
return res.json({ success: true, albums, count, homeDomain })
if (!all) return res.json({ success: true, albums, count, homeDomain })
// Otherwise proceed to querying usernames
const userids = albums
@ -156,8 +150,7 @@ self.list = async (req, res, next) => {
})
// If there are no albums attached to a registered user, send response
if (userids.length === 0)
return res.json({ success: true, albums, count, homeDomain })
if (!userids.length) return res.json({ success: true, albums, count, homeDomain })
// Query usernames of user IDs from currently selected files
const usersTable = await db.table('users')
@ -165,8 +158,9 @@ self.list = async (req, res, next) => {
.select('id', 'username')
const users = {}
for (const user of usersTable)
for (const user of usersTable) {
users[user.id] = user.username
}
return res.json({ success: true, albums, count, users, homeDomain })
} catch (error) {
@ -183,8 +177,7 @@ self.create = async (req, res, next) => {
? utils.escape(req.body.name.trim().substring(0, self.titleMaxLength))
: ''
if (!name)
return res.json({ success: false, description: 'No album name specified.' })
if (!name) return res.json({ success: false, description: 'No album name specified.' })
try {
const album = await db.table('albums')
@ -195,8 +188,7 @@ self.create = async (req, res, next) => {
})
.first()
if (album)
return res.json({ success: false, description: 'There is already an album with that name.' })
if (album) return res.json({ success: false, description: 'There is already an album with that name.' })
const identifier = await self.getUniqueRandomName()
@ -235,8 +227,7 @@ self.disable = async (req, res, next) => {
const id = req.body.id
const purge = req.body.purge
if (!Number.isFinite(id))
return res.json({ success: false, description: 'No album specified.' })
if (!Number.isFinite(id)) return res.json({ success: false, description: 'No album specified.' })
try {
if (purge) {
@ -249,8 +240,7 @@ self.disable = async (req, res, next) => {
if (files.length) {
const ids = files.map(file => file.id)
const failed = await utils.bulkDeleteFromDb('id', ids, user)
if (failed.length)
return res.json({ success: false, failed })
if (failed.length) return res.json({ success: false, failed })
}
utils.invalidateStatsCache('uploads')
}
@ -291,24 +281,23 @@ self.edit = async (req, res, next) => {
const ismoderator = perms.is(user, 'moderator')
const id = parseInt(req.body.id)
if (isNaN(id))
return res.json({ success: false, description: 'No album specified.' })
if (isNaN(id)) return res.json({ success: false, description: 'No album specified.' })
const name = typeof req.body.name === 'string'
? utils.escape(req.body.name.trim().substring(0, self.titleMaxLength))
: ''
if (!name)
return res.json({ success: false, description: 'No name specified.' })
if (!name) return res.json({ success: false, description: 'No name specified.' })
const filter = function () {
this.where('id', id)
if (!ismoderator)
if (!ismoderator) {
this.andWhere({
enabled: 1,
userid: user.id
})
}
}
try {
@ -316,13 +305,14 @@ self.edit = async (req, res, next) => {
.where(filter)
.first()
if (!album)
if (!album) {
return res.json({ success: false, description: 'Could not get album with the specified ID.' })
else if (album.id !== id)
} else if (album.id !== id) {
return res.json({ success: false, description: 'Name already in use.' })
else if (req._old && (album.id === id))
} else if (req._old && (album.id === id)) {
// Old rename API
return res.json({ success: false, description: 'You did not specify a new name.' })
}
const update = {
name,
@ -333,11 +323,13 @@ self.edit = async (req, res, next) => {
: ''
}
if (ismoderator)
if (ismoderator) {
update.enabled = Boolean(req.body.enabled)
}
if (req.body.requestLink)
if (req.body.requestLink) {
update.identifier = await self.getUniqueRandomName()
}
await db.table('albums')
.where(filter)
@ -353,10 +345,9 @@ self.edit = async (req, res, next) => {
const oldZip = path.join(paths.zips, `${album.identifier}.zip`)
const newZip = path.join(paths.zips, `${update.identifier}.zip`)
await paths.rename(oldZip, newZip)
} catch (err) {
} catch (error) {
// Re-throw error
if (err.code !== 'ENOENT')
throw err
if (error.code !== 'ENOENT') throw error
}
return res.json({
@ -380,8 +371,9 @@ self.rename = async (req, res, next) => {
self.get = async (req, res, next) => {
const identifier = req.params.identifier
if (identifier === undefined)
if (identifier === undefined) {
return res.status(401).json({ success: false, description: 'No identifier provided.' })
}
try {
const album = await db.table('albums')
@ -391,16 +383,17 @@ self.get = async (req, res, next) => {
})
.first()
if (!album)
if (!album) {
return res.json({
success: false,
description: 'Album not found.'
})
else if (album.public === 0)
} else if (album.public === 0) {
return res.status(403).json({
success: false,
description: 'This album is not available for public.'
})
}
const title = album.name
const files = await db.table('files')
@ -412,8 +405,9 @@ self.get = async (req, res, next) => {
file.file = `${config.domain}/${file.name}`
const extname = utils.extname(file.name)
if (utils.mayGenerateThumb(extname))
if (utils.mayGenerateThumb(extname)) {
file.thumb = `${config.domain}/thumbs/${file.name.slice(0, -extname.length)}.png`
}
}
return res.json({
@ -432,17 +426,19 @@ self.generateZip = async (req, res, next) => {
const versionString = parseInt(req.query.v)
const identifier = req.params.identifier
if (identifier === undefined)
if (identifier === undefined) {
return res.status(401).json({
success: false,
description: 'No identifier provided.'
})
}
if (!config.uploads.generateZips)
if (!config.uploads.generateZips) {
return res.status(401).json({
success: false,
description: 'Zip generation disabled.'
})
}
try {
const album = await db.table('albums')
@ -452,32 +448,35 @@ self.generateZip = async (req, res, next) => {
})
.first()
if (!album)
if (!album) {
return res.json({ success: false, description: 'Album not found.' })
else if (album.download === 0)
} else if (album.download === 0) {
return res.json({ success: false, description: 'Download for this album is disabled.' })
}
if ((isNaN(versionString) || versionString <= 0) && album.editedAt)
if ((isNaN(versionString) || versionString <= 0) && album.editedAt) {
return res.redirect(`${album.identifier}?v=${album.editedAt}`)
}
if (album.zipGeneratedAt > album.editedAt)
if (album.zipGeneratedAt > album.editedAt) {
try {
const filePath = path.join(paths.zips, `${identifier}.zip`)
await paths.access(filePath)
return res.download(filePath, `${album.name}.zip`)
} catch (error) {
// Re-throw error
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
}
if (self.zipEmitters.has(identifier)) {
logger.log(`Waiting previous zip task for album: ${identifier}.`)
return self.zipEmitters.get(identifier).once('done', (filePath, fileName, json) => {
if (filePath && fileName)
if (filePath && fileName) {
res.download(filePath, fileName)
else if (json)
} else if (json) {
res.json(json)
}
})
}
@ -559,8 +558,9 @@ self.addFiles = async (req, res, next) => {
if (!user) return
const ids = req.body.ids
if (!Array.isArray(ids) || !ids.length)
if (!Array.isArray(ids) || !ids.length) {
return res.json({ success: false, description: 'No files specified.' })
}
let albumid = parseInt(req.body.albumid)
if (isNaN(albumid) || albumid < 0) albumid = null
@ -572,16 +572,18 @@ self.addFiles = async (req, res, next) => {
const album = await db.table('albums')
.where('id', albumid)
.where(function () {
if (user.username !== 'root')
if (user.username !== 'root') {
this.where('userid', user.id)
}
})
.first()
if (!album)
if (!album) {
return res.json({
success: false,
description: 'Album does not exist or it does not belong to the user.'
})
}
albumids.push(albumid)
}
@ -597,8 +599,9 @@ self.addFiles = async (req, res, next) => {
.update('albumid', albumid)
files.forEach(file => {
if (file.albumid && !albumids.includes(file.albumid))
if (file.albumid && !albumids.includes(file.albumid)) {
albumids.push(file.albumid)
}
})
await db.table('albums')
@ -609,13 +612,14 @@ self.addFiles = async (req, res, next) => {
return res.json({ success: true, failed })
} catch (error) {
logger.error(error)
if (failed.length === ids.length)
if (failed.length === ids.length) {
return res.json({
success: false,
description: `Could not ${albumid === null ? 'add' : 'remove'} any files ${albumid === null ? 'to' : 'from'} the album.`
})
else
} else {
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
}
}
}

View File

@ -34,31 +34,30 @@ self.verify = async (req, res, next) => {
const username = typeof req.body.username === 'string'
? req.body.username.trim()
: ''
if (!username)
return res.json({ success: false, description: 'No username provided.' })
if (!username) return res.json({ success: false, description: 'No username provided.' })
const password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
if (!password)
return res.json({ success: false, description: 'No password provided.' })
if (!password) return res.json({ success: false, description: 'No password provided.' })
try {
const user = await db.table('users')
.where('username', username)
.first()
if (!user)
return res.json({ success: false, description: 'Username does not exist.' })
if (!user) return res.json({ success: false, description: 'Username does not exist.' })
if (user.enabled === false || user.enabled === 0)
if (user.enabled === false || user.enabled === 0) {
return res.json({ success: false, description: 'This account has been disabled.' })
}
const result = await bcrypt.compare(password, user.password)
if (result === false)
if (result === false) {
return res.json({ success: false, description: 'Wrong password.' })
else
} else {
return res.json({ success: true, token: user.token })
}
} catch (error) {
logger.error(error)
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
@ -66,34 +65,46 @@ self.verify = async (req, res, next) => {
}
self.register = async (req, res, next) => {
if (config.enableUserAccounts === false)
if (config.enableUserAccounts === false) {
return res.json({ success: false, description: 'Registration is currently disabled.' })
}
const username = typeof req.body.username === 'string'
? req.body.username.trim()
: ''
if (username.length < self.user.min || username.length > self.user.max)
return res.json({ success: false, description: `Username must have ${self.user.min}-${self.user.max} characters.` })
if (username.length < self.user.min || username.length > self.user.max) {
return res.json({
success: false,
description: `Username must have ${self.user.min}-${self.user.max} characters.`
})
}
const password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
if (password.length < self.pass.min || password.length > self.pass.max)
return res.json({ success: false, description: `Password must have ${self.pass.min}-${self.pass.max} characters.` })
if (password.length < self.pass.min || password.length > self.pass.max) {
return res.json({
success: false,
description: `Password must have ${self.pass.min}-${self.pass.max} characters.`
})
}
try {
const user = await db.table('users')
.where('username', username)
.first()
if (user)
return res.json({ success: false, description: 'Username already exists.' })
if (user) return res.json({ success: false, description: 'Username already exists.' })
const hash = await bcrypt.hash(password, saltRounds)
const token = await tokens.generateUniqueToken()
if (!token)
return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
if (!token) {
return res.json({
success: false,
description: 'Sorry, we could not allocate a unique token. Try again?'
})
}
await db.table('users')
.insert({
@ -121,8 +132,12 @@ self.changePassword = async (req, res, next) => {
const password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
if (password.length < self.pass.min || password.length > self.pass.max)
return res.json({ success: false, description: `Password must have ${self.pass.min}-${self.pass.max} characters.` })
if (password.length < self.pass.min || password.length > self.pass.max) {
return res.json({
success: false,
description: `Password must have ${self.pass.min}-${self.pass.max} characters.`
})
}
try {
const hash = await bcrypt.hash(password, saltRounds)
@ -139,12 +154,13 @@ self.changePassword = async (req, res, next) => {
}
self.assertPermission = (user, target) => {
if (!target)
if (!target) {
throw new Error('Could not get user with the specified ID.')
else if (!perms.higher(user, target))
} else if (!perms.higher(user, target)) {
throw new Error('The user is in the same or higher group as you.')
else if (target.username === 'root')
} else if (target.username === 'root') {
throw new Error('Root user may not be tampered with.')
}
}
self.createUser = async (req, res, next) => {
@ -152,21 +168,28 @@ self.createUser = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
if (!isadmin)
return res.status(403).end()
if (!isadmin) return res.status(403).end()
const username = typeof req.body.username === 'string'
? req.body.username.trim()
: ''
if (username.length < self.user.min || username.length > self.user.max)
return res.json({ success: false, description: `Username must have ${self.user.min}-${self.user.max} characters.` })
if (username.length < self.user.min || username.length > self.user.max) {
return res.json({
success: false,
description: `Username must have ${self.user.min}-${self.user.max} characters.`
})
}
let password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
if (password.length) {
if (password.length < self.pass.min || password.length > self.pass.max)
return res.json({ success: false, description: `Password must have ${self.pass.min}-${self.pass.max} characters.` })
if (password.length < self.pass.min || password.length > self.pass.max) {
return res.json({
success: false,
description: `Password must have ${self.pass.min}-${self.pass.max} characters.`
})
}
} else {
password = randomstring.generate(self.pass.rand)
}
@ -186,14 +209,17 @@ self.createUser = async (req, res, next) => {
.where('username', username)
.first()
if (user)
return res.json({ success: false, description: 'Username already exists.' })
if (user) return res.json({ success: false, description: 'Username already exists.' })
const hash = await bcrypt.hash(password, saltRounds)
const token = await tokens.generateUniqueToken()
if (!token)
return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
if (!token) {
return res.json({
success: false,
description: 'Sorry, we could not allocate a unique token. Try again?'
})
}
await db.table('users')
.insert({
@ -219,12 +245,10 @@ self.editUser = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
if (!isadmin)
return res.status(403).end()
if (!isadmin) return res.status(403).end()
const id = parseInt(req.body.id)
if (isNaN(id))
return res.json({ success: false, description: 'No user specified.' })
if (isNaN(id)) return res.json({ success: false, description: 'No user specified.' })
try {
const target = await db.table('users')
@ -236,17 +260,20 @@ self.editUser = async (req, res, next) => {
if (req.body.username !== undefined) {
update.username = String(req.body.username).trim()
if (update.username.length < self.user.min || update.username.length > self.user.max)
if (update.username.length < self.user.min || update.username.length > self.user.max) {
throw new Error(`Username must have ${self.user.min}-${self.user.max} characters.`)
}
}
if (req.body.enabled !== undefined)
if (req.body.enabled !== undefined) {
update.enabled = Boolean(req.body.enabled)
}
if (req.body.group !== undefined) {
update.permission = perms.permissions[req.body.group]
if (typeof update.permission !== 'number' || update.permission < 0)
if (typeof update.permission !== 'number' || update.permission < 0) {
update.permission = target.permission
}
}
let password
@ -282,13 +309,11 @@ self.deleteUser = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
if (!isadmin)
return res.status(403).end()
if (!isadmin) return res.status(403).end()
const id = parseInt(req.body.id)
const purge = req.body.purge
if (isNaN(id))
return res.json({ success: false, description: 'No user specified.' })
if (isNaN(id)) return res.json({ success: false, description: 'No user specified.' })
try {
const target = await db.table('users')
@ -304,8 +329,7 @@ self.deleteUser = async (req, res, next) => {
const fileids = files.map(file => file.id)
if (purge) {
const failed = await utils.bulkDeleteFromDb('id', fileids, user)
if (failed.length)
return res.json({ success: false, failed })
if (failed.length) return res.json({ success: false, failed })
utils.invalidateStatsCache('uploads')
} else {
// Clear out userid attribute from the files
@ -315,7 +339,8 @@ self.deleteUser = async (req, res, next) => {
}
}
// TODO: Figure out obstacles of just deleting the albums
// TODO: Figure out why can't we just just delete the albums from DB
// DISCLAIMER: Upstream always had it coded this way for some reason
const albums = await db.table('albums')
.where('userid', id)
.where('enabled', 1)
@ -333,8 +358,7 @@ self.deleteUser = async (req, res, next) => {
try {
await paths.unlink(path.join(paths.zips, `${album.identifier}.zip`))
} catch (error) {
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
}))
}
@ -362,15 +386,13 @@ self.listUsers = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
if (!isadmin)
return res.status(403).end()
if (!isadmin) return res.status(403).end()
try {
const count = await db.table('users')
.count('id as count')
.then(rows => rows[0].count)
if (!count)
return res.json({ success: true, users: [], count })
if (!count) return res.json({ success: true, users: [], count })
let offset = Number(req.params.page)
if (isNaN(offset)) offset = 0

View File

@ -36,8 +36,9 @@ DiskStorage.prototype._handleFile = function _handleFile (req, file, cb) {
file._chunksData.stream = fs.createWriteStream(finalPath, { flags: 'a' })
file._chunksData.stream.on('error', onerror)
}
if (!file._chunksData.hasher)
if (!file._chunksData.hasher) {
file._chunksData.hasher = blake3.createHash()
}
outStream = file._chunksData.stream
hash = file._chunksData.hasher

View File

@ -20,8 +20,9 @@ const fsFuncs = [
'writeFile'
]
for (const fsFunc of fsFuncs)
for (const fsFunc of fsFuncs) {
self[fsFunc] = promisify(fs[fsFunc])
}
self.uploads = path.resolve(config.uploads.folder)
self.chunks = path.join(self.uploads, 'chunks')
@ -51,7 +52,7 @@ const verify = [
self.init = async () => {
// Check & create directories
for (const p of verify)
for (const p of verify) {
try {
await self.access(p)
} catch (err) {
@ -59,10 +60,10 @@ self.init = async () => {
throw err
} else {
const mkdir = await self.mkdir(p)
if (mkdir)
logger.log(`Created directory: ${p}`)
if (mkdir) logger.log(`Created directory: ${p}`)
}
}
}
// Purge any leftover in chunks directory
const uuidDirs = await self.readdir(self.chunks)
@ -74,8 +75,7 @@ self.init = async () => {
))
await self.rmdir(root)
}))
if (uuidDirs.length)
logger.log(`Purged ${uuidDirs.length} unfinished chunks`)
if (uuidDirs.length) logger.log(`Purged ${uuidDirs.length} unfinished chunks`)
}
module.exports = self

View File

@ -11,8 +11,7 @@ self.permissions = {
// returns true if user is in the group OR higher
self.is = (user, group) => {
// root bypass
if (user.username === 'root')
return true
if (user.username === 'root') return true
const permission = user.permission || 0
return permission >= self.permissions[group]

View File

@ -14,8 +14,7 @@ const self = {
self.generateUniqueToken = async () => {
for (let i = 0; i < self.tokenMaxTries; i++) {
const token = randomstring.generate(self.tokenLength)
if (self.onHold.has(token))
continue
if (self.onHold.has(token)) continue
// Put token on-hold (wait for it to be inserted to DB)
self.onHold.add(token)
@ -40,8 +39,7 @@ self.verify = async (req, res, next) => {
? req.body.token.trim()
: ''
if (!token)
return res.json({ success: false, description: 'No token provided.' })
if (!token) return res.json({ success: false, description: 'No token provided.' })
try {
const user = await db.table('users')
@ -49,8 +47,7 @@ self.verify = async (req, res, next) => {
.select('username', 'permission')
.first()
if (!user)
return res.json({ success: false, description: 'Invalid token.' })
if (!user) return res.json({ success: false, description: 'Invalid token.' })
const obj = {
success: true,
@ -76,8 +73,12 @@ self.change = async (req, res, next) => {
if (!user) return
const newToken = await self.generateUniqueToken()
if (!newToken)
return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
if (!newToken) {
return res.json({
success: false,
description: 'Sorry, we could not allocate a unique token. Try again?'
})
}
try {
await db.table('users')

File diff suppressed because it is too large Load Diff

View File

@ -18,7 +18,9 @@ const self = {
chunkSize: config.uploads.scan.chunkSize || 64 * 1024,
groupBypass: config.uploads.scan.groupBypass || null,
whitelistExtensions: (Array.isArray(config.uploads.scan.whitelistExtensions) &&
config.uploads.scan.whitelistExtensions.length) ? config.uploads.scan.whitelistExtensions : null,
config.uploads.scan.whitelistExtensions.length)
? config.uploads.scan.whitelistExtensions
: null,
maxSize: (parseInt(config.uploads.scan.maxSize) * 1e6) || null
},
gitHash: null,
@ -92,14 +94,16 @@ self.extname = filename => {
}
// check against extensions that must be preserved
for (const extPreserve of extPreserves)
for (const extPreserve of extPreserves) {
if (lower.endsWith(extPreserve)) {
extname = extPreserve
break
}
}
if (!extname)
if (!extname) {
extname = lower.slice(lower.lastIndexOf('.') - lower.length) // path.extname(lower)
}
return extname + multi
}
@ -110,14 +114,12 @@ self.escape = string => {
// Copyright(c) 2015 Andreas Lubbe
// Copyright(c) 2015 Tiancheng "Timothy" Gu
if (!string)
return string
if (!string) return string
const str = String(string)
const match = /["'&<>]/.exec(str)
if (!match)
return str
if (!match) return str
let escape
let html = ''
@ -145,8 +147,9 @@ self.escape = string => {
continue
}
if (lastIndex !== index)
if (lastIndex !== index) {
html += str.substring(lastIndex, index)
}
lastIndex = index + 1
html += escape
@ -203,16 +206,16 @@ self.generateThumbs = async (name, extname, force) => {
// Check if thumbnail already exists
try {
const lstat = await paths.lstat(thumbname)
if (lstat.isSymbolicLink())
if (lstat.isSymbolicLink()) {
// Unlink if symlink (should be symlink to the placeholder)
await paths.unlink(thumbname)
else if (!force)
} else if (!force) {
// Continue only if it does not exist, unless forced to
return true
}
} catch (error) {
// Re-throw error
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
// Full path to input file
@ -257,12 +260,14 @@ self.generateThumbs = async (name, extname, force) => {
const metadata = await self.ffprobe(input)
const duration = parseInt(metadata.format.duration)
if (isNaN(duration))
if (isNaN(duration)) {
throw 'Warning: File does not have valid duration metadata'
}
const videoStream = metadata.streams && metadata.streams.find(s => s.codec_type === 'video')
if (!videoStream || !videoStream.width || !videoStream.height)
if (!videoStream || !videoStream.width || !videoStream.height) {
throw 'Warning: File does not have valid video stream metadata'
}
await new Promise((resolve, reject) => {
ffmpeg(input)
@ -287,10 +292,11 @@ self.generateThumbs = async (name, extname, force) => {
await paths.lstat(thumbname)
return true
} catch (err) {
if (err.code === 'ENOENT')
if (err.code === 'ENOENT') {
throw error || 'Warning: FFMPEG exited with empty output file'
else
} else {
throw error || err
}
}
})
} else {
@ -361,8 +367,7 @@ self.unlinkFile = async (filename, predb) => {
await paths.unlink(path.join(paths.uploads, filename))
} catch (error) {
// Return true if file does not exist
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
const identifier = filename.split('.')[0]
@ -375,26 +380,26 @@ self.unlinkFile = async (filename, predb) => {
}
const extname = self.extname(filename)
if (self.imageExts.includes(extname) || self.videoExts.includes(extname))
if (self.imageExts.includes(extname) || self.videoExts.includes(extname)) {
try {
await paths.unlink(path.join(paths.thumbs, `${identifier}.png`))
} catch (error) {
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
}
}
self.bulkDeleteFromDb = async (field, values, user) => {
// Always return an empty array on failure
if (!user || !['id', 'name'].includes(field) || !values.length)
return []
if (!user || !['id', 'name'].includes(field) || !values.length) return []
// SQLITE_LIMIT_VARIABLE_NUMBER, which defaults to 999
// Read more: https://www.sqlite.org/limits.html
const MAX_VARIABLES_CHUNK_SIZE = 999
const chunks = []
while (values.length)
while (values.length) {
chunks.push(values.splice(0, MAX_VARIABLES_CHUNK_SIZE))
}
const failed = []
const ismoderator = perms.is(user, 'moderator')
@ -407,8 +412,9 @@ self.bulkDeleteFromDb = async (field, values, user) => {
const files = await db.table('files')
.whereIn(field, chunk)
.where(function () {
if (!ismoderator)
if (!ismoderator) {
this.where('userid', user.id)
}
})
// Push files that could not be found in db
@ -435,17 +441,19 @@ self.bulkDeleteFromDb = async (field, values, user) => {
.del()
self.invalidateStatsCache('uploads')
if (self.idSet)
if (self.idSet) {
unlinked.forEach(file => {
const identifier = file.name.split('.')[0]
self.idSet.delete(identifier)
// logger.log(`Removed ${identifier} from identifiers cache (bulkDeleteFromDb)`)
})
}
// Push album ids
unlinked.forEach(file => {
if (file.albumid && !albumids.includes(file.albumid))
if (file.albumid && !albumids.includes(file.albumid)) {
albumids.push(file.albumid)
}
})
// Push unlinked files
@ -463,13 +471,16 @@ self.bulkDeleteFromDb = async (field, values, user) => {
}
// Purge Cloudflare's cache if necessary, but do not wait
if (config.cloudflare.purgeCache)
if (config.cloudflare.purgeCache) {
self.purgeCloudflareCache(unlinkeds.map(file => file.name), true, true)
.then(results => {
for (const result of results)
if (result.errors.length)
for (const result of results) {
if (result.errors.length) {
result.errors.forEach(error => logger.error(`[CF]: ${error}`))
}
}
})
}
}
} catch (error) {
logger.error(error)
@ -480,12 +491,15 @@ self.bulkDeleteFromDb = async (field, values, user) => {
self.purgeCloudflareCache = async (names, uploads, thumbs) => {
const errors = []
if (!cloudflareAuth)
if (!cloudflareAuth) {
errors.push('Cloudflare auth is incomplete or missing')
if (!Array.isArray(names) || !names.length)
}
if (!Array.isArray(names) || !names.length) {
errors.push('Names array is invalid or empty')
if (errors.length)
}
if (errors.length) {
return [{ success: false, files: [], errors }]
}
let domain = config.domain
if (!uploads) domain = config.homeDomain
@ -495,8 +509,9 @@ self.purgeCloudflareCache = async (names, uploads, thumbs) => {
if (uploads) {
const url = `${domain}/${name}`
const extname = self.extname(name)
if (thumbs && self.mayGenerateThumb(extname))
if (thumbs && self.mayGenerateThumb(extname)) {
thumbNames.push(`${domain}/thumbs/${name.slice(0, -extname.length)}.png`)
}
return url
} else {
return name === 'home' ? domain : `${domain}/${name}`
@ -509,8 +524,9 @@ self.purgeCloudflareCache = async (names, uploads, thumbs) => {
// TODO: Handle API rate limits
const MAX_LENGTH = 30
const chunks = []
while (names.length)
while (names.length) {
chunks.push(names.splice(0, MAX_LENGTH))
}
const url = `https://api.cloudflare.com/client/v4/zones/${config.cloudflare.zoneId}/purge_cache`
const results = []
@ -543,8 +559,9 @@ self.purgeCloudflareCache = async (names, uploads, thumbs) => {
const response = await purge.json()
result.success = response.success
if (Array.isArray(response.errors) && response.errors.length)
if (Array.isArray(response.errors) && response.errors.length) {
result.errors = response.errors.map(error => `${error.code}: ${error.message}`)
}
} catch (error) {
result.errors = [error.toString()]
}
@ -642,7 +659,7 @@ self.stats = async (req, res, next) => {
}
// Disk usage, only for Linux platform
if (os.platform === 'linux')
if (os.platform === 'linux') {
if (!statsCache.disk.cache && statsCache.disk.generating) {
stats.disk = false
} else if (((Date.now() - statsCache.disk.generatedAt) <= 60000) || statsCache.disk.generating) {
@ -727,8 +744,9 @@ self.stats = async (req, res, next) => {
stats.disk[basename] = parseInt(formatted[0])
// Add to types if necessary
if (!stats.disk._types.byte.includes(basename))
if (!stats.disk._types.byte.includes(basename)) {
stats.disk._types.byte.push(basename)
}
})
const stderr = []
@ -786,6 +804,7 @@ self.stats = async (req, res, next) => {
statsCache.disk.cache = stats.disk
statsCache.disk.generating = false
}
}
// Uploads
if (!statsCache.uploads.cache && statsCache.uploads.generating) {
@ -812,8 +831,9 @@ self.stats = async (req, res, next) => {
stats.uploads.total = uploads.length
stats.uploads.sizeInDb = uploads.reduce((acc, upload) => acc + parseInt(upload.size), 0)
// Add type information for the new column
if (!Array.isArray(stats.uploads._types.byte))
if (!Array.isArray(stats.uploads._types.byte)) {
stats.uploads._types.byte = []
}
stats.uploads._types.byte.push('sizeInDb')
} else {
stats.uploads.total = await db.table('files')
@ -823,16 +843,18 @@ self.stats = async (req, res, next) => {
stats.uploads.images = await db.table('files')
.where(function () {
for (const ext of self.imageExts)
for (const ext of self.imageExts) {
this.orWhere('name', 'like', `%${ext}`)
}
})
.count('id as count')
.then(rows => rows[0].count)
stats.uploads.videos = await db.table('files')
.where(function () {
for (const ext of self.videoExts)
for (const ext of self.videoExts) {
this.orWhere('name', 'like', `%${ext}`)
}
})
.count('id as count')
.then(rows => rows[0].count)
@ -870,16 +892,18 @@ self.stats = async (req, res, next) => {
const users = await db.table('users')
stats.users.total = users.length
for (const user of users) {
if (user.enabled === false || user.enabled === 0)
if (user.enabled === false || user.enabled === 0) {
stats.users.disabled++
}
// This may be inaccurate on installations with customized permissions
user.permission = user.permission || 0
for (const p of permissionKeys)
for (const p of permissionKeys) {
if (user.permission === perms.permissions[p]) {
stats.users[p]++
break
}
}
}
// Update cache
@ -926,8 +950,7 @@ self.stats = async (req, res, next) => {
stats.albums.zipGenerated++
} catch (error) {
// Re-throw error
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
}))

View File

@ -29,8 +29,9 @@ const postcssPlugins = [
sass.compiler = sassCompiler
// Minify on production
if (process.env.NODE_ENV !== 'development')
if (process.env.NODE_ENV !== 'development') {
postcssPlugins.push(cssnano())
}
/** TASKS: LINT */

View File

@ -54,8 +54,9 @@ self.debug = (...args) => {
Object.assign(options, args[args.length - 1])
args.splice(args.length - 1, 1)
}
for (const arg of args)
for (const arg of args) {
console.log(inspect(arg, options))
}
}
module.exports = self

View File

@ -36,11 +36,13 @@ safe.use(helmet({
hsts: false
}))
if (config.hsts instanceof Object && Object.keys(config.hsts).length)
if (config.hsts instanceof Object && Object.keys(config.hsts).length) {
safe.use(helmet.hsts(config.hsts))
}
if (config.trustProxy)
if (config.trustProxy) {
safe.set('trust proxy', 1)
}
// https://mozilla.github.io/nunjucks/api.html#configure
nunjucks.configure('views', {
@ -52,12 +54,14 @@ safe.set('view engine', 'njk')
safe.enable('view cache')
// Configure rate limits
if (Array.isArray(config.rateLimits) && config.rateLimits.length)
if (Array.isArray(config.rateLimits) && config.rateLimits.length) {
for (const rateLimit of config.rateLimits) {
const limiter = new RateLimit(rateLimit.config)
for (const route of rateLimit.routes)
for (const route of rateLimit.routes) {
safe.use(route, limiter)
}
}
}
safe.use(bodyParser.urlencoded({ extended: true }))
safe.use(bodyParser.json())
@ -117,24 +121,27 @@ if (config.cacheControl) {
// If using CDN, cache public pages in CDN
if (config.cacheControl !== 2) {
cdnPages.push('api/check')
for (const page of cdnPages)
for (const page of cdnPages) {
safe.use(`/${page === 'home' ? '' : page}`, (req, res, next) => {
res.set('Cache-Control', cacheControls.cdn)
next()
})
}
}
// If serving uploads with node
if (config.serveFilesWithNode)
if (config.serveFilesWithNode) {
initServeStaticUploads({
setHeaders: res => {
res.set('Access-Control-Allow-Origin', '*')
// If using CDN, cache uploads in CDN as well
// Use with cloudflare.purgeCache enabled in config file
if (config.cacheControl !== 2)
if (config.cacheControl !== 2) {
res.set('Cache-Control', cacheControls.cdn)
}
}
})
}
// Function for static assets.
// This requires the assets to use version in their query string,
@ -148,10 +155,11 @@ if (config.cacheControl) {
safe.use(['/api/album/zip'], (req, res, next) => {
res.set('Access-Control-Allow-Origin', '*')
const versionString = parseInt(req.query.v)
if (versionString > 0)
if (versionString > 0) {
res.set('Cache-Control', cacheControls.static)
else
} else {
res.set('Cache-Control', cacheControls.disable)
}
next()
})
} else if (config.serveFilesWithNode) {
@ -182,32 +190,36 @@ safe.use('/api', api)
// Re-map version strings if cache control is enabled (safe.fiery.me)
utils.versionStrings = {}
if (config.cacheControl) {
for (const type in versions)
for (const type in versions) {
utils.versionStrings[type] = `?_=${versions[type]}`
if (versions['1'])
}
if (versions['1']) {
utils.clientVersion = versions['1']
}
}
// Cookie Policy
if (config.cookiePolicy)
if (config.cookiePolicy) {
config.pages.push('cookiepolicy')
}
// Check for custom pages, otherwise fallback to Nunjucks templates
for (const page of config.pages) {
const customPage = path.join(paths.customPages, `${page}.html`)
if (!await paths.access(customPage).catch(() => true))
if (!await paths.access(customPage).catch(() => true)) {
safe.get(`/${page === 'home' ? '' : page}`, (req, res, next) => res.sendFile(customPage))
else if (page === 'home')
} else if (page === 'home') {
safe.get('/', (req, res, next) => res.render(page, {
config,
versions: utils.versionStrings,
gitHash: utils.gitHash
}))
else
} else {
safe.get(`/${page}`, (req, res, next) => res.render(page, {
config,
versions: utils.versionStrings
}))
}
}
// Error pages
@ -240,8 +252,9 @@ safe.use('/api', api)
logger.log(`${ip}:${port} ${version}`)
utils.clamd.scanner = clamd.createScanner(ip, port)
if (!utils.clamd.scanner)
if (!utils.clamd.scanner) {
throw 'Could not create clamd scanner'
}
}
// Cache file identifiers
@ -260,7 +273,7 @@ safe.use('/api', api)
// Cache control (safe.fiery.me)
// Purge Cloudflare cache
if (config.cacheControl && config.cacheControl !== 2)
if (config.cacheControl && config.cacheControl !== 2) {
if (config.cloudflare.purgeCache) {
logger.log('Cache control enabled, purging Cloudflare\'s cache...')
const results = await utils.purgeCloudflareCache(cdnPages)
@ -274,11 +287,13 @@ safe.use('/api', api)
}
succeeded += result.files.length
}
if (!errored)
if (!errored) {
logger.log(`Successfully purged ${succeeded} cache`)
}
} else {
logger.log('Cache control enabled without Cloudflare\'s cache purging')
}
}
// Temporary uploads (only check for expired uploads if config.uploads.temporaryUploadsInterval is also set)
if (Array.isArray(config.uploads.temporaryUploadAges) &&
@ -286,8 +301,7 @@ safe.use('/api', api)
config.uploads.temporaryUploadsInterval) {
let temporaryUploadsInProgress = false
const temporaryUploadCheck = async () => {
if (temporaryUploadsInProgress)
return
if (temporaryUploadsInProgress) return
temporaryUploadsInProgress = true
try {
@ -295,8 +309,9 @@ safe.use('/api', api)
if (result.expired.length) {
let logMessage = `Expired uploads: ${result.expired.length} deleted`
if (result.failed.length)
if (result.failed.length) {
logMessage += `, ${result.failed.length} errored`
}
logger.log(logMessage)
}
@ -321,10 +336,8 @@ safe.use('/api', api)
prompt: ''
}).on('line', line => {
try {
if (line === 'rs')
return
if (line === '.exit')
return process.exit(0)
if (line === 'rs') return
if (line === '.exit') return process.exit(0)
// eslint-disable-next-line no-eval
logger.log(eval(line))
} catch (error) {

View File

@ -12,11 +12,6 @@ module.exports = {
'plugin:compat/recommended'
],
rules: {
curly: [
'error',
'multi',
'consistent'
],
'object-shorthand': [
'error',
'always'

View File

@ -9,8 +9,9 @@ const page = {
window.addEventListener('DOMContentLoaded', () => {
const elements = document.querySelectorAll('.file-size')
for (let i = 0; i < elements.length; i++)
for (let i = 0; i < elements.length; i++) {
elements[i].innerHTML = page.getPrettyBytes(parseInt(elements[i].innerHTML.replace(/\s*B$/i, '')))
}
page.lazyLoad = new LazyLoad()
})

View File

@ -15,16 +15,13 @@ const page = {
page.unhide = () => {
const loaderSection = document.querySelector('#loader')
if (loaderSection)
loaderSection.classList.add('is-hidden')
if (loaderSection) loaderSection.classList.add('is-hidden')
const loginSection = document.querySelector('#login.is-hidden')
if (loginSection)
loginSection.classList.remove('is-hidden')
if (loginSection) loginSection.classList.remove('is-hidden')
const floatingBtn = document.querySelector('.floating-home-button.is-hidden')
if (floatingBtn)
floatingBtn.classList.remove('is-hidden')
if (floatingBtn) floatingBtn.classList.remove('is-hidden')
}
// Handler for Axios errors
@ -54,12 +51,10 @@ page.onAxiosError = error => {
page.do = (dest, trigger) => {
const user = page.user.value.trim()
if (!user)
return swal('An error occurred!', 'You need to specify a username.', 'error')
if (!user) return swal('An error occurred!', 'You need to specify a username.', 'error')
const pass = page.pass.value.trim()
if (!pass)
return swal('An error occurred!', 'You need to specify a password.', 'error')
if (!pass) return swal('An error occurred!', 'You need to specify a password.', 'error')
trigger.classList.add('is-loading')
axios.post(`api/${dest}`, {
@ -107,23 +102,24 @@ window.addEventListener('DOMContentLoaded', () => {
})
const loginBtn = document.querySelector('#loginBtn')
if (loginBtn)
if (loginBtn) {
loginBtn.addEventListener('click', event => {
if (!form.checkValidity()) return
page.do('login', event.currentTarget)
})
}
const registerBtn = document.querySelector('#registerBtn')
if (registerBtn)
if (registerBtn) {
registerBtn.addEventListener('click', event => {
if (!form.checkValidity())
if (!form.checkValidity()) {
// Workaround for browsers to display native form error messages
return loginBtn.click()
}
page.do('register', event.currentTarget)
})
}
if (page.token)
page.verify()
else
page.unhide()
if (page.token) page.verify()
else page.unhide()
})

File diff suppressed because it is too large Load Diff

View File

@ -81,10 +81,8 @@ page.onInitError = error => {
window.location.reload()
})
if (error.response)
page.onAxiosError(error)
else
page.onError(error)
if (error.response) page.onAxiosError(error)
else page.onError(error)
}
// Handler for regular JS errors
@ -102,8 +100,7 @@ page.onError = error => {
// Handler for Axios errors
page.onAxiosError = (error, cont) => {
if (!cont)
console.error(error)
if (!cont) console.error(error)
// Better Cloudflare errors
const cloudflareErrors = {
@ -138,7 +135,7 @@ page.onAxiosError = (error, cont) => {
page.checkClientVersion = apiVersion => {
const self = document.querySelector('#mainScript')
const match = self.src.match(/\?_=(\d+)$/)
if (match && match[1] && match[1] !== apiVersion)
if (match && match[1] && match[1] !== apiVersion) {
return swal({
title: 'Update detected!',
text: 'Client assets have been updated. Reload to display the latest version?',
@ -152,27 +149,23 @@ page.checkClientVersion = apiVersion => {
}).then(() => {
window.location.reload()
})
}
}
page.checkIfPublic = () => {
return axios.get('api/check', {
onDownloadProgress: () => {
// Only do render and/or newsfeed after this request has been initiated to avoid blocking
/* global render */
if (typeof render !== 'undefined' && !render.done)
render.do()
if (typeof render !== 'undefined' && !render.done) render.do()
/* global newsfeed */
if (typeof newsfeed !== 'undefined' && !newsfeed.done)
newsfeed.do()
if (!page.apiChecked)
page.apiChecked = true
if (typeof newsfeed !== 'undefined' && !newsfeed.done) newsfeed.do()
if (!page.apiChecked) page.apiChecked = true
}
}).then(response => {
if (response.data.version)
if (response.data.version) {
page.checkClientVersion(response.data.version)
}
page.private = response.data.private
page.enableUserAccounts = response.data.enableUserAccounts
@ -193,25 +186,27 @@ page.checkIfPublic = () => {
}
page.preparePage = () => {
if (page.private)
if (page.private) {
if (page.token) {
return page.verifyToken(page.token, true)
} else {
const button = document.querySelector('#loginToUpload')
button.href = 'auth'
button.classList.remove('is-loading')
if (page.enableUserAccounts)
if (page.enableUserAccounts) {
button.innerText = 'Anonymous upload is disabled.\nLog in or register to upload.'
else
} else {
button.innerText = 'Running in private mode.\nLog in to upload.'
}
}
else
} else {
return page.prepareUpload()
}
}
page.verifyToken = (token, reloadOnError) => {
return axios.post('api/tokens/verify', { token }).then(response => {
if (response.data.success === false)
if (response.data.success === false) {
return swal({
title: 'An error occurred!',
text: response.data.description,
@ -221,6 +216,7 @@ page.verifyToken = (token, reloadOnError) => {
localStorage.removeItem('token')
window.location.reload()
})
}
localStorage[lsKeys.token] = token
page.token = token
@ -233,8 +229,7 @@ page.prepareUpload = () => {
if (page.token) {
// Change /auth link to /dashboard
const authLink = document.querySelector('#linksColumn a[href="auth"]')
if (authLink)
authLink.setAttribute('href', 'dashboard')
if (authLink) authLink.setAttribute('href', 'dashboard')
// Display the album selection
document.querySelector('#albumDiv').classList.remove('is-hidden')
@ -243,8 +238,7 @@ page.prepareUpload = () => {
page.albumSelectOnChange = () => {
page.album = parseInt(page.albumSelect.value)
// Re-generate ShareX config file
if (typeof page.prepareShareX === 'function')
page.prepareShareX()
if (typeof page.prepareShareX === 'function') page.prepareShareX()
}
page.albumSelect.addEventListener('change', page.albumSelectOnChange)
@ -265,8 +259,7 @@ page.prepareUpload = () => {
page.prepareDropzone()
// Generate ShareX config file
if (typeof page.prepareShareX === 'function')
page.prepareShareX()
if (typeof page.prepareShareX === 'function') page.prepareShareX()
// Prepare urls upload tab
const urlMaxSize = document.querySelector('#urlMaxSize')
@ -301,7 +294,7 @@ page.prepareUpload = () => {
}
page.setActiveTab = index => {
for (let i = 0; i < page.tabs.length; i++)
for (let i = 0; i < page.tabs.length; i++) {
if (i === index) {
page.tabs[i].tab.classList.add('is-active')
page.tabs[i].content.classList.remove('is-hidden')
@ -310,15 +303,17 @@ page.setActiveTab = index => {
page.tabs[i].tab.classList.remove('is-active')
page.tabs[i].content.classList.add('is-hidden')
}
}
}
page.fetchAlbums = () => {
return axios.get('api/albums', { headers: { token: page.token } }).then(response => {
if (response.data.success === false)
if (response.data.success === false) {
return swal('An error occurred!', response.data.description, 'error')
}
// Create an option for each album
if (Array.isArray(response.data.albums) && response.data.albums.length)
if (Array.isArray(response.data.albums) && response.data.albums.length) {
for (let i = 0; i < response.data.albums.length; i++) {
const album = response.data.albums[i]
const option = document.createElement('option')
@ -326,6 +321,7 @@ page.fetchAlbums = () => {
option.innerHTML = album.name
page.albumSelect.appendChild(option)
}
}
}).catch(page.onInitError)
}
@ -371,8 +367,7 @@ page.prepareDropzone = () => {
init () {
this.on('addedfile', file => {
// Set active tab to file uploads, if necessary
if (page.activeTab !== 0)
page.setActiveTab(0)
if (page.activeTab !== 0) page.setActiveTab(0)
// Add file entry
tabDiv.querySelector('.uploads').classList.remove('is-hidden')
@ -383,19 +378,21 @@ page.prepareDropzone = () => {
this.on('sending', (file, xhr) => {
// Add timeout listener (hacky method due to lack of built-in timeout handler)
if (!xhr.ontimeout)
if (!xhr.ontimeout) {
xhr.ontimeout = () => {
const instances = page.dropzone.getUploadingFiles()
.filter(instance => instance.xhr === xhr)
page.dropzone._handleUploadError(instances, xhr, 'Connection timed out. Try to reduce upload chunk size.')
}
}
// Attach necessary data for initial upload speed calculation
if (xhr._uplSpeedCalc === undefined)
if (xhr._uplSpeedCalc === undefined) {
xhr._uplSpeedCalc = {
lastSent: 0,
data: [{ timestamp: Date.now(), bytes: 0 }]
}
}
// If not chunked uploads, add extra headers
if (!file.upload.chunked) {
@ -405,10 +402,11 @@ page.prepareDropzone = () => {
if (page.stripTags !== null) xhr.setRequestHeader('striptags', page.stripTags)
}
if (!file.upload.chunked)
if (!file.upload.chunked) {
file.previewElement.querySelector('.descriptive-progress').innerHTML = 'Uploading\u2026'
else if (file.upload.chunks.length === 1)
} else if (file.upload.chunks.length === 1) {
file.previewElement.querySelector('.descriptive-progress').innerHTML = `Uploading chunk 1/${file.upload.totalChunkCount}\u2026`
}
})
// Update descriptive progress
@ -474,8 +472,7 @@ page.prepareDropzone = () => {
}
// If not enough data
if (!fullSec)
bytesPerSec = 1000 / elapsed * bytesPerSec
if (!fullSec) bytesPerSec = 1000 / elapsed * bytesPerSec
// Get pretty bytes
prettyBytesPerSec = page.getPrettyBytes(bytesPerSec)
@ -495,15 +492,16 @@ page.prepareDropzone = () => {
file.previewElement.querySelector('.error').classList.remove('is-hidden')
}
if (Array.isArray(data.files) && data.files[0])
if (Array.isArray(data.files) && data.files[0]) {
page.updateTemplate(file, data.files[0])
}
})
this.on('error', (file, error, xhr) => {
let err = error
if (typeof error === 'object' && error.description)
if (typeof error === 'object' && error.description) {
err = error.description
else if (xhr)
} else if (xhr) {
// Formatting the Object is necessary since the function expect Axios errors
err = page.onAxiosError({
response: {
@ -511,12 +509,14 @@ page.prepareDropzone = () => {
statusText: xhr.statusText
}
}, true).data.description
else if (error instanceof Error)
} else if (error instanceof Error) {
err = error.toString()
}
// Clean up file size errors
if (/^File is too big/.test(err) && /File too large/.test(err))
if (/^File is too big/.test(err) && /File too large/.test(err)) {
err = `File too large (${page.getPrettyBytes(file.size)}).`
}
page.updateTemplateIcon(file.previewElement, 'icon-block')
@ -556,8 +556,9 @@ page.prepareDropzone = () => {
file.previewElement.querySelector('.error').classList.remove('is-hidden')
}
if (response.data.files && response.data.files[0])
if (response.data.files && response.data.files[0]) {
page.updateTemplate(file, response.data.files[0])
}
return done()
})
@ -572,8 +573,9 @@ page.addUrlsToQueue = () => {
return url.trim().length
})
if (!urls.length)
if (!urls.length) {
return swal('An error occurred!', 'You have not entered any URLs.', 'error')
}
const tabDiv = document.querySelector('#tab-urls')
tabDiv.querySelector('.uploads').classList.remove('is-hidden')
@ -607,15 +609,17 @@ page.processUrlsQueue = () => {
if (data.success === false) {
const match = data.description.match(/ over limit: (\d+)$/)
if (match && match[1])
if (match && match[1]) {
data.description = `File exceeded limit of ${page.getPrettyBytes(match[1])}.`
}
file.previewElement.querySelector('.error').innerHTML = data.description
file.previewElement.querySelector('.error').classList.remove('is-hidden')
}
if (Array.isArray(data.files) && data.files[0])
if (Array.isArray(data.files) && data.files[0]) {
page.updateTemplate(file, data.files[0])
}
page.activeUrlsQueue--
return shiftQueue()
@ -673,7 +677,7 @@ page.updateTemplate = (file, response) => {
? exec[0].toLowerCase()
: null
if (page.imageExts.includes(extname))
if (page.imageExts.includes(extname)) {
if (page.previewImages) {
const img = file.previewElement.querySelector('img')
img.setAttribute('alt', response.name || '')
@ -689,10 +693,11 @@ page.updateTemplate = (file, response) => {
} else {
page.updateTemplateIcon(file.previewElement, 'icon-picture')
}
else if (page.videoExts.includes(extname))
} else if (page.videoExts.includes(extname)) {
page.updateTemplateIcon(file.previewElement, 'icon-video')
else
} else {
page.updateTemplateIcon(file.previewElement, 'icon-doc-inv')
}
if (response.expirydate) {
const expiryDate = file.previewElement.querySelector('.expiry-date')
@ -758,8 +763,9 @@ page.createAlbum = () => {
token: page.token
}
}).then(response => {
if (response.data.success === false)
if (response.data.success === false) {
return swal('An error occurred!', response.data.description, 'error')
}
const option = document.createElement('option')
page.albumSelect.appendChild(option)
@ -865,8 +871,9 @@ page.prepareUploadConfig = () => {
valueHandler (value) {
if (value === '0') {
const uploadFields = document.querySelectorAll('.tab-content > .uploads')
for (let i = 0; i < uploadFields.length; i++)
for (let i = 0; i < uploadFields.length; i++) {
uploadFields[i].classList.add('is-reversed')
}
}
}
},
@ -891,8 +898,9 @@ page.prepareUploadConfig = () => {
value: i === 0 ? 'default' : String(age),
text: page.getPrettyUploadAge(age)
})
if (age === stored)
if (age === stored) {
config.uploadAge.value = stored
}
}
}
@ -901,8 +909,9 @@ page.prepareUploadConfig = () => {
if (!page.fileIdentifierLength.force &&
!isNaN(stored) &&
stored >= page.fileIdentifierLength.min &&
stored <= page.fileIdentifierLength.max)
stored <= page.fileIdentifierLength.max) {
config.fileLength.value = stored
}
}
const tabContent = document.querySelector('#tab-config')
@ -915,8 +924,7 @@ page.prepareUploadConfig = () => {
const conf = config[key]
// Skip only if display attribute is explicitly set to false
if (conf.display === false)
continue
if (conf.display === false) continue
const field = document.createElement('div')
field.className = 'field'
@ -927,24 +935,29 @@ page.prepareUploadConfig = () => {
value = conf.value
} else if (conf.number !== undefined) {
const parsed = parseInt(localStorage[lsKeys[key]])
if (!isNaN(parsed) && parsed <= conf.number.max && parsed >= conf.number.min)
if (!isNaN(parsed) && parsed <= conf.number.max && parsed >= conf.number.min) {
value = parsed
}
} else {
const stored = localStorage[lsKeys[key]]
if (Array.isArray(conf.select))
value = conf.select.find(sel => sel.value === stored) ? stored : undefined
else
if (Array.isArray(conf.select)) {
value = conf.select.find(sel => sel.value === stored)
? stored
: undefined
} else {
value = stored
}
}
// If valueHandler function exists, defer to the function,
// otherwise pass value to global page object
if (typeof conf.valueHandler === 'function')
if (typeof conf.valueHandler === 'function') {
conf.valueHandler(value)
else if (value !== undefined)
} else if (value !== undefined) {
page[key] = value
else if (fallback[key] !== undefined)
} else if (fallback[key] !== undefined) {
page[key] = fallback[key]
}
}
let control
@ -975,34 +988,34 @@ page.prepareUploadConfig = () => {
control.className = 'input is-fullwidth'
control.type = 'number'
if (conf.number.min !== undefined)
control.min = conf.number.min
if (conf.number.max !== undefined)
control.max = conf.number.max
if (typeof value === 'number')
control.value = value
else if (conf.number.default !== undefined)
control.value = conf.number.default
if (conf.number.min !== undefined) control.min = conf.number.min
if (conf.number.max !== undefined) control.max = conf.number.max
if (typeof value === 'number') control.value = value
else if (conf.number.default !== undefined) control.value = conf.number.default
}
let help
if (conf.disabled) {
if (Array.isArray(conf.select))
if (Array.isArray(conf.select)) {
control.querySelector('select').disabled = conf.disabled
else
} else {
control.disabled = conf.disabled
}
help = 'This option is currently not configurable.'
} else if (typeof conf.help === 'string') {
help = conf.help
} else if (conf.help === true && conf.number !== undefined) {
const tmp = []
if (conf.number.default !== undefined)
if (conf.number.default !== undefined) {
tmp.push(`Default is ${conf.number.default}${conf.number.suffix || ''}.`)
if (conf.number.min !== undefined)
}
if (conf.number.min !== undefined) {
tmp.push(`Min is ${conf.number.min}${conf.number.suffix || ''}.`)
if (conf.number.max !== undefined)
}
if (conf.number.max !== undefined) {
tmp.push(`Max is ${conf.number.max}${conf.number.suffix || ''}.`)
}
help = tmp.join(' ')
}
@ -1036,8 +1049,7 @@ page.prepareUploadConfig = () => {
form.appendChild(submit)
form.querySelector('#saveConfig').addEventListener('click', () => {
if (!form.checkValidity())
return
if (!form.checkValidity()) return
const keys = Object.keys(config)
.filter(key => config[key].display !== false && config[key].disabled !== true)
@ -1046,18 +1058,18 @@ page.prepareUploadConfig = () => {
let value
if (config[key].select !== undefined) {
if (form.elements[key].value !== 'default')
if (form.elements[key].value !== 'default') {
value = form.elements[key].value
}
} else if (config[key].number !== undefined) {
const parsed = parseInt(form.elements[key].value)
if (!isNaN(parsed) && parsed !== config[key].number.default)
if (!isNaN(parsed) && parsed !== config[key].number.default) {
value = Math.min(Math.max(parsed, config[key].number.min), config[key].number.max)
}
}
if (value !== undefined)
localStorage[lsKeys[key]] = value
else
localStorage.removeItem(lsKeys[key])
if (value !== undefined) localStorage[lsKeys[key]] = value
else localStorage.removeItem(lsKeys[key])
}
swal({
@ -1104,7 +1116,7 @@ window.addEventListener('paste', event => {
})
window.addEventListener('DOMContentLoaded', () => {
if (window.cookieconsent)
if (window.cookieconsent) {
window.cookieconsent.initialise({
cookie: {
name: 'cookieconsent_status',
@ -1131,6 +1143,7 @@ window.addEventListener('DOMContentLoaded', () => {
href: 'cookiepolicy'
}
})
}
page.checkIfPublic()

View File

@ -92,9 +92,11 @@ newsfeed.dismissNotification = element => {
element.parentNode.removeChild(element)
const keys = Object.keys(newsfeed.dismissed)
if (keys.length > newsfeed.maxItems)
for (let i = 0; i < keys.length - newsfeed.maxItems; i++)
if (keys.length > newsfeed.maxItems) {
for (let i = 0; i < keys.length - newsfeed.maxItems; i++) {
delete newsfeed.dismissed[keys[i]]
}
}
localStorage[newsfeed.lsKey] = JSON.stringify(newsfeed.dismissed)
}
@ -108,8 +110,9 @@ newsfeed.do = () => {
if (items.length) {
const dismissed = localStorage[newsfeed.lsKey]
if (dismissed)
if (dismissed) {
newsfeed.dismissed = JSON.parse(dismissed)
}
const element = document.createElement('section')
element.id = 'newsfeed'
@ -142,11 +145,12 @@ newsfeed.do = () => {
})
const dismissTrigger = notificationElement.querySelector('.delete')
if (dismissTrigger)
if (dismissTrigger) {
dismissTrigger.addEventListener('click', function () {
event.preventDefault()
newsfeed.dismissNotification(event.target.parentNode)
})
}
column.appendChild(notificationElement)
}
@ -163,11 +167,13 @@ newsfeed.do = () => {
newsfeed.onloaded = () => {
// If the main script had already done its API check, yet newsfeed haven't been triggered, do it
// This would only happen if this newsfeed script only gets loaded after the main script's API check
if (typeof page !== 'undefined' && page.apiChecked && !newsfeed.done)
if (typeof page !== 'undefined' && page.apiChecked && !newsfeed.done) {
newsfeed.do()
}
}
if (document.readyState === 'interactive' || document.readyState === 'complete')
if (document.readyState === 'interactive' || document.readyState === 'complete') {
newsfeed.onloaded()
else
} else {
window.addEventListener('DOMContentLoaded', () => newsfeed.onloaded())
}

View File

@ -60,8 +60,7 @@ const render = {
}
// miku: Generate an array of file names from 001.png to 050.png
for (let i = 1; i <= 50; i++)
render.configs.miku.array.push(`${('00' + i).slice(-3)}.png`)
for (let i = 1; i <= 50; i++) render.configs.miku.array.push(`${('00' + i).slice(-3)}.png`)
render.showTogglePrompt = () => {
const renderEnabled = !(localStorage[render.lsKey] === '0')
@ -79,11 +78,12 @@ render.showTogglePrompt = () => {
`
const buttons = {}
if (renderEnabled)
if (renderEnabled) {
buttons.reload = {
text: 'Nah fam, show me a different render',
className: 'swal-button--cancel'
}
}
buttons.confirm = true
swal({
@ -95,10 +95,8 @@ render.showTogglePrompt = () => {
} else if (value) {
const newValue = div.querySelector('#swalRender').checked ? undefined : '0'
if (newValue !== localStorage[render.lsKey]) {
if (newValue)
localStorage[render.lsKey] = newValue
else
localStorage.removeItem(render.lsKey)
if (newValue) localStorage[render.lsKey] = newValue
else localStorage.removeItem(render.lsKey)
swal('', `Random render is now ${newValue ? 'disabled' : 'enabled'}.`, 'success', {
buttons: false,
timer: 1500
@ -111,26 +109,23 @@ render.showTogglePrompt = () => {
render.parseVersion = () => {
const renderScript = document.querySelector('#renderScript')
if (renderScript && renderScript.dataset.version)
return `?v=${renderScript.dataset.version}`
return ''
if (renderScript && renderScript.dataset.version) return `?v=${renderScript.dataset.version}`
else return ''
}
render.do = reload => {
if (!render.done)
render.done = true
if (!render.done) render.done = true
render.config = render.configs[render.type]
if (!render.config || !render.config.array.length)
return
if (!render.config || !render.config.array.length) return
const previousElement = document.querySelector('body > .render')
if (previousElement)
previousElement.remove()
if (previousElement) previousElement.remove()
const doRender = () => {
if (render.version === undefined)
if (render.version === undefined) {
render.version = render.parseVersion()
}
// Let us just allow people to get new render when toggling the option
render.selected = render.config.array[Math.floor(Math.random() * render.config.array.length)]
@ -158,11 +153,13 @@ render.do = reload => {
render.onloaded = () => {
// If the main script had already done its API check, yet render haven't been triggered, do it
// This would only happen if this render script only gets loaded after the main script's API check
if (typeof page !== 'undefined' && page.apiChecked && !render.done)
if (typeof page !== 'undefined' && page.apiChecked && !render.done) {
render.do()
}
}
if (document.readyState === 'interactive' || document.readyState === 'complete')
if (document.readyState === 'interactive' || document.readyState === 'complete') {
render.onloaded()
else
} else {
window.addEventListener('DOMContentLoaded', () => render.onloaded())
}

View File

@ -84,17 +84,15 @@ page.getPrettyUptime = seconds => {
let minutes = Math.floor(seconds / 60)
seconds %= 60
if (hours < 10)
hours = '0' + hours
if (minutes < 10)
minutes = '0' + minutes
if (seconds < 10)
seconds = '0' + seconds
if (hours < 10) hours = '0' + hours
if (minutes < 10) minutes = '0' + minutes
if (seconds < 10) seconds = '0' + seconds
if (days > 0)
if (days > 0) {
return days + 'd ' + hours + ':' + minutes + ':' + seconds
else
} else {
return hours + ':' + minutes + ':' + seconds
}
}
page.escape = string => {
@ -103,14 +101,12 @@ page.escape = string => {
// Copyright(c) 2015 Andreas Lubbe
// Copyright(c) 2015 Tiancheng "Timothy" Gu
if (!string)
return string
if (!string) return string
const str = String(string)
const match = /["'&<>]/.exec(str)
if (!match)
return str
if (!match) return str
let escape
let html = ''
@ -138,8 +134,9 @@ page.escape = string => {
continue
}
if (lastIndex !== index)
if (lastIndex !== index) {
html += str.substring(lastIndex, index)
}
lastIndex = index + 1
html += escape