!!! MASSIVE OVERHAUL !!!

As the title says, this commit is a massive overhaul.
I've rewritten/restrucuted almost everything in the controller scripts.
Because of that, there's a considerable possibility that I've broken
something somewhere.

Notable changes:

Added temporary uploads.

Removed file name length changer from dashboard,
in favor of an equivalent in homepage config tab.
This allows non-registered users to also set file name length.

A bunch of other undocmented stuff.
I don't know, I'm too tired to remember them all.
This commit is contained in:
Bobby Wibowo 2019-09-08 08:56:29 +07:00
parent c33affa68f
commit 02e2e402c3
No known key found for this signature in database
GPG Key ID: 51C3A1E1E22D26CF
34 changed files with 2528 additions and 2076 deletions

View File

@ -15,6 +15,8 @@
"multi", "multi",
"consistent" "consistent"
], ],
"no-throw-literal": 0,
"no-var": "error",
"prefer-const": [ "prefer-const": [
"error", "error",
{ {
@ -30,6 +32,6 @@
"error", "error",
"single" "single"
], ],
"no-var": "error" "standard/no-callback-literal": 0
} }
} }

View File

@ -196,6 +196,38 @@ module.exports = {
*/ */
urlExtensionsFilter: [], urlExtensionsFilter: [],
/*
An array of allowed ages for uploads (in hours).
Default age will be the value at the very top of the array.
If the array is populated but do not have a zero value,
permanent uploads will be rejected.
This only applies to new files uploaded after enabling the option.
If the array is empty or is set to falsy value, temporary uploads
feature will be disabled, and all uploads will be permanent (original behavior).
When temporary uploads feature is disabled, any existing temporary uploads
will not ever be automatically deleted, since the safe will not start the
periodical checkup task.
*/
temporaryUploadAges: [
0, // permanent
1 / 60 * 15, // 15 minutes
1 / 60 * 30, // 30 minutes
1, // 1 hour
6, // 6 hours
12, // 12 hours
24, // 24 hours (1 day)
168 // 168 hours (7 days)
],
/*
Interval of the periodical check up tasks for temporary uploads (in milliseconds).
NOTE: Set to falsy value if you prefer to use your own external script.
*/
temporaryUploadsInterval: 1 * 60000, // 1 minute
/* /*
Scan files using ClamAV through clamd. Scan files using ClamAV through clamd.
*/ */
@ -248,10 +280,9 @@ module.exports = {
may not be used by more than a single file (e.i. if "abcd.jpg" already exists, a new PNG may not be used by more than a single file (e.i. if "abcd.jpg" already exists, a new PNG
file may not be named as "abcd.png"). file may not be named as "abcd.png").
If this is enabled, the safe will then attempt to read file list of the uploads directory If this is enabled, the safe will query files from the database during first launch,
during first launch, parse the names, then cache the identifiers into memory. parse their names, then cache the identifiers into memory.
Its downside is that it will use a bit more memory, generally a few MBs increase Its downside is that it will use a bit more memory.
on a safe with over >10k uploads.
If this is disabled, collision check will become less strict. If this is disabled, collision check will become less strict.
As in, the same identifier may be used by multiple different extensions (e.i. if "abcd.jpg" As in, the same identifier may be used by multiple different extensions (e.i. if "abcd.jpg"

View File

@ -4,40 +4,68 @@ const EventEmitter = require('events')
const fs = require('fs') const fs = require('fs')
const logger = require('./../logger') const logger = require('./../logger')
const path = require('path') const path = require('path')
const paths = require('./pathsController')
const randomstring = require('randomstring') const randomstring = require('randomstring')
const utils = require('./utilsController') const utils = require('./utilsController')
const Zip = require('jszip') const Zip = require('jszip')
const albumsController = {} const self = {
onHold: new Set()
}
const maxTries = config.uploads.maxTries || 1
const homeDomain = config.homeDomain || config.domain const homeDomain = config.homeDomain || config.domain
const uploadsDir = path.resolve(config.uploads.folder)
const zipsDir = path.join(uploadsDir, 'zips') const zipMaxTotalSize = parseInt(config.cloudflare.zipMaxTotalSize)
const zipMaxTotalSize = config.cloudflare.zipMaxTotalSize const zipMaxTotalSizeBytes = config.cloudflare.zipMaxTotalSize * 1000000
const zipMaxTotalSizeBytes = parseInt(config.cloudflare.zipMaxTotalSize) * 1000000
const zipOptions = config.uploads.jsZipOptions const zipOptions = config.uploads.jsZipOptions
// Force 'type' option to 'nodebuffer' // Force 'type' option to 'nodebuffer'
zipOptions.type = 'nodebuffer' zipOptions.type = 'nodebuffer'
// Apply fallbacks for missing config values // Apply fallbacks for missing config values
if (zipOptions.streamFiles === undefined) zipOptions.streamFiles = true if (zipOptions.streamFiles === undefined)
if (zipOptions.compression === undefined) zipOptions.compression = 'DEFLATE' zipOptions.streamFiles = true
if (zipOptions.compression === undefined)
zipOptions.compression = 'DEFLATE'
if (zipOptions.compressionOptions === undefined || zipOptions.compressionOptions.level === undefined) if (zipOptions.compressionOptions === undefined || zipOptions.compressionOptions.level === undefined)
zipOptions.compressionOptions = { level: 1 } zipOptions.compressionOptions = { level: 1 }
albumsController.zipEmitters = new Map() self.zipEmitters = new Map()
class ZipEmitter extends EventEmitter { class ZipEmitter extends EventEmitter {
constructor (identifier) { constructor (identifier) {
super() super()
this.identifier = identifier this.identifier = identifier
this.once('done', () => albumsController.zipEmitters.delete(this.identifier)) this.once('done', () => self.zipEmitters.delete(this.identifier))
} }
} }
albumsController.list = async (req, res, next) => { self.getUniqueRandomName = async () => {
for (let i = 0; i < utils.idMaxTries; i++) {
const identifier = randomstring.generate(config.uploads.albumIdentifierLength)
if (self.onHold.has(identifier))
continue
// Put token on-hold (wait for it to be inserted to DB)
self.onHold.add(identifier)
const album = await db.table('albums')
.where('identifier', identifier)
.select('id')
.first()
if (album) {
self.onHold.delete(identifier)
logger.log(`Album with identifier ${identifier} already exists (${i + 1}/${utils.idMaxTries}).`)
continue
}
return identifier
}
throw 'Sorry, we could not allocate a unique random identifier. Try again?'
}
self.list = async (req, res, next) => {
const user = await utils.authorize(req, res) const user = await utils.authorize(req, res)
if (!user) return if (!user) return
@ -55,88 +83,77 @@ albumsController.list = async (req, res, next) => {
if (req.params.sidebar !== undefined) if (req.params.sidebar !== undefined)
return res.json({ success: true, albums }) return res.json({ success: true, albums })
const ids = [] const albumids = {}
for (const album of albums) { for (const album of albums) {
album.download = album.download !== 0 album.download = album.download !== 0
album.public = album.public !== 0 album.public = album.public !== 0
album.files = 0
ids.push(album.id) // Map by IDs
albumids[album.id] = album
} }
const files = await db.table('files') const files = await db.table('files')
.whereIn('albumid', ids) .whereIn('albumid', Object.keys(albumids))
.select('albumid') .select('albumid')
const albumsCount = {}
for (const id of ids) albumsCount[id] = 0 // Increment files count
for (const file of files) albumsCount[file.albumid] += 1 for (const file of files)
for (const album of albums) album.files = albumsCount[album.id] if (albumids[file.albumid])
albumids[file.albumid].files++
return res.json({ success: true, albums, homeDomain }) return res.json({ success: true, albums, homeDomain })
} }
albumsController.create = async (req, res, next) => { self.create = async (req, res, next) => {
const user = await utils.authorize(req, res) const user = await utils.authorize(req, res)
if (!user) return if (!user) return
const name = utils.escape(req.body.name) const name = typeof req.body.name === 'string'
if (name === undefined || name === '') ? utils.escape(req.body.name.trim())
: ''
if (!name)
return res.json({ success: false, description: 'No album name specified.' }) return res.json({ success: false, description: 'No album name specified.' })
const album = await db.table('albums') try {
.where({ const album = await db.table('albums')
.where({
name,
enabled: 1,
userid: user.id
})
.first()
if (album)
return res.json({ success: false, description: 'There is already an album with that name.' })
const identifier = await self.getUniqueRandomName()
const ids = await db.table('albums').insert({
name, name,
enabled: 1, enabled: 1,
userid: user.id userid: user.id,
identifier,
timestamp: Math.floor(Date.now() / 1000),
editedAt: 0,
zipGeneratedAt: 0,
download: (req.body.download === false || req.body.download === 0) ? 0 : 1,
public: (req.body.public === false || req.body.public === 0) ? 0 : 1,
description: typeof req.body.description === 'string'
? utils.escape(req.body.description.trim())
: ''
}) })
.first() utils.invalidateStatsCache('albums')
self.onHold.delete(identifier)
if (album) return res.json({ success: true, id: ids[0] })
return res.json({ success: false, description: 'There\'s already an album with that name.' }) } catch (error) {
logger.error(error)
const identifier = await albumsController.getUniqueRandomName() return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
.catch(error => { }
res.json({ success: false, description: error.toString() })
})
if (!identifier) return
const ids = await db.table('albums').insert({
name,
enabled: 1,
userid: user.id,
identifier,
timestamp: Math.floor(Date.now() / 1000),
editedAt: 0,
zipGeneratedAt: 0,
download: (req.body.download === false || req.body.download === 0) ? 0 : 1,
public: (req.body.public === false || req.body.public === 0) ? 0 : 1,
description: utils.escape(req.body.description) || ''
})
utils.invalidateStatsCache('albums')
return res.json({ success: true, id: ids[0] })
} }
albumsController.getUniqueRandomName = () => { self.delete = async (req, res, next) => {
return new Promise((resolve, reject) => {
const select = i => {
const identifier = randomstring.generate(config.uploads.albumIdentifierLength)
db.table('albums')
.where('identifier', identifier)
.then(rows => {
if (!rows || !rows.length) return resolve(identifier)
logger.log(`An album with identifier ${identifier} already exists (${++i}/${maxTries}).`)
if (i < maxTries) return select(i)
// eslint-disable-next-line prefer-promise-reject-errors
return reject('Sorry, we could not allocate a unique random identifier. Try again?')
})
}
// Get us a unique random identifier
select(0)
})
}
albumsController.delete = async (req, res, next) => {
const user = await utils.authorize(req, res) const user = await utils.authorize(req, res)
if (!user) return if (!user) return
@ -145,94 +162,31 @@ albumsController.delete = async (req, res, next) => {
if (id === undefined || id === '') if (id === undefined || id === '')
return res.json({ success: false, description: 'No album specified.' }) return res.json({ success: false, description: 'No album specified.' })
let failed = [] try {
if (purge) { if (purge) {
const files = await db.table('files') const files = await db.table('files')
.where({
albumid: id,
userid: user.id
})
if (files.length) {
const ids = files.map(file => file.id)
const failed = await utils.bulkDeleteFromDb('id', ids, user)
if (failed.length)
return res.json({ success: false, failed })
}
}
await db.table('albums')
.where({ .where({
albumid: id, id,
userid: user.id userid: user.id
}) })
.update('enabled', 0)
utils.invalidateStatsCache('albums')
if (files.length) { const identifier = await db.table('albums')
const ids = files.map(file => file.id)
failed = await utils.bulkDeleteFiles('id', ids, user)
if (failed.length === ids.length)
return res.json({ success: false, description: 'Could not delete any of the files associated with the album.' })
}
}
await db.table('albums')
.where({
id,
userid: user.id
})
.update('enabled', 0)
utils.invalidateStatsCache('albums')
const identifier = await db.table('albums')
.select('identifier')
.where({
id,
userid: user.id
})
.first()
.then(row => row.identifier)
// Unlink zip archive of the album if it exists
const zipPath = path.join(zipsDir, `${identifier}.zip`)
fs.unlink(zipPath, error => {
if (error && error.code !== 'ENOENT') {
logger.error(error)
return res.json({ success: false, description: error.toString(), failed })
}
res.json({ success: true, failed })
})
}
albumsController.edit = async (req, res, next) => {
const user = await utils.authorize(req, res)
if (!user) return
const id = parseInt(req.body.id)
if (isNaN(id))
return res.json({ success: false, description: 'No album specified.' })
const name = utils.escape(req.body.name)
if (name === undefined || name === '')
return res.json({ success: false, description: 'No name specified.' })
const album = await db.table('albums')
.where({
id,
userid: user.id,
enabled: 1
})
.first()
if (!album)
return res.json({ success: false, description: 'Could not get album with the specified ID.' })
else if (album.id !== id)
return res.json({ success: false, description: 'Name already in use.' })
else if (req._old && (album.id === id))
// Old rename API
return res.json({ success: false, description: 'You did not specify a new name.' })
await db.table('albums')
.where({
id,
userid: user.id
})
.update({
name,
download: Boolean(req.body.download),
public: Boolean(req.body.public),
description: utils.escape(req.body.description) || ''
})
utils.invalidateStatsCache('albums')
if (req.body.requestLink) {
const oldIdentifier = await db.table('albums')
.select('identifier') .select('identifier')
.where({ .where({
id, id,
@ -241,84 +195,158 @@ albumsController.edit = async (req, res, next) => {
.first() .first()
.then(row => row.identifier) .then(row => row.identifier)
const identifier = await albumsController.getUniqueRandomName() await paths.unlink(path.join(paths.zips, `${identifier}.zip`))
.catch(error => { } catch (error) {
res.json({ success: false, description: error.toString() }) if (error && error.code !== 'ENOENT') {
logger.error(error)
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
}
}
return res.json({ success: true })
}
self.edit = async (req, res, next) => {
const user = await utils.authorize(req, res)
if (!user) return
const id = parseInt(req.body.id)
if (isNaN(id))
return res.json({ success: false, description: 'No album specified.' })
const name = typeof req.body.name === 'string'
? utils.escape(req.body.name.trim())
: ''
if (!name)
return res.json({ success: false, description: 'No name specified.' })
try {
const album = await db.table('albums')
.where({
id,
userid: user.id,
enabled: 1
}) })
if (!identifier) return .first()
if (!album)
return res.json({ success: false, description: 'Could not get album with the specified ID.' })
else if (album.id !== id)
return res.json({ success: false, description: 'Name already in use.' })
else if (req._old && (album.id === id))
// Old rename API
return res.json({ success: false, description: 'You did not specify a new name.' })
await db.table('albums') await db.table('albums')
.where({ .where({
id, id,
userid: user.id userid: user.id
}) })
.update('identifier', identifier) .update({
name,
download: Boolean(req.body.download),
public: Boolean(req.body.public),
description: typeof req.body.description === 'string'
? utils.escape(req.body.description.trim())
: ''
})
utils.invalidateStatsCache('albums')
if (!req.body.requestLink)
return res.json({ success: true, name })
const oldIdentifier = album.identifier
const newIdentifier = await self.getUniqueRandomName()
await db.table('albums')
.where({
id,
userid: user.id
})
.update('identifier', newIdentifier)
utils.invalidateStatsCache('albums')
self.onHold.delete(newIdentifier)
// Rename zip archive of the album if it exists // Rename zip archive of the album if it exists
const zipPath = path.join(zipsDir, `${oldIdentifier}.zip`) try {
return fs.access(zipPath, error => { const oldZip = path.join(paths.zips, `${oldIdentifier}.zip`)
if (error) return res.json({ success: true, identifier }) // await paths.access(oldZip)
fs.rename(zipPath, path.join(zipsDir, `${identifier}.zip`), error => { const newZip = path.join(paths.zips, `${newIdentifier}.zip`)
if (!error) return res.json({ success: true, identifier }) await paths.rename(oldZip, newZip)
logger.error(error) } catch (err) {
res.json({ success: false, description: error.toString() }) // Re-throw error
}) if (err.code !== 'ENOENT')
}) throw err
} }
return res.json({ success: true, name }) return res.json({
success: true,
identifier: newIdentifier
})
} catch (error) {
logger.error(error)
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
}
} }
albumsController.rename = async (req, res, next) => { self.rename = async (req, res, next) => {
req._old = true req._old = true
req.body = { name: req.body.name } req.body = { name: req.body.name }
return albumsController.edit(req, res, next) return self.edit(req, res, next)
} }
albumsController.get = async (req, res, next) => { self.get = async (req, res, next) => {
// TODO: Something, can't remember...
const identifier = req.params.identifier const identifier = req.params.identifier
if (identifier === undefined) if (identifier === undefined)
return res.status(401).json({ success: false, description: 'No identifier provided.' }) return res.status(401).json({ success: false, description: 'No identifier provided.' })
const album = await db.table('albums') try {
.where({ const album = await db.table('albums')
identifier, .where({
enabled: 1 identifier,
enabled: 1
})
.first()
if (!album)
return res.json({
success: false,
description: 'Album not found.'
})
else if (album.public === 0)
return res.status(403).json({
success: false,
description: 'This album is not available for public.'
})
const title = album.name
const files = await db.table('files')
.select('name')
.where('albumid', album.id)
.orderBy('id', 'DESC')
for (const file of files) {
file.file = `${config.domain}/${file.name}`
const extname = utils.extname(file.name)
if (utils.mayGenerateThumb(extname))
file.thumb = `${config.domain}/thumbs/${file.name.slice(0, -extname.length)}.png`
}
return res.json({
success: true,
title,
count: files.length,
files
}) })
.first() } catch (error) {
logger.error(error)
if (!album) return res.status(500).json({ success: false, description: 'An unexpected error occcured. Try again?' })
return res.json({ success: false, description: 'Album not found.' })
else if (album.public === 0)
return res.status(401).json({
success: false,
description: 'This album is not available for public.'
})
const title = album.name
const files = await db.table('files')
.select('name')
.where('albumid', album.id)
.orderBy('id', 'DESC')
for (const file of files) {
file.file = `${config.domain}/${file.name}`
const extname = utils.extname(file.name)
if (utils.mayGenerateThumb(extname))
file.thumb = `${config.domain}/thumbs/${file.name.slice(0, -extname.length)}.png`
} }
return res.json({
success: true,
title,
count: files.length,
files
})
} }
albumsController.generateZip = async (req, res, next) => { self.generateZip = async (req, res, next) => {
const versionString = parseInt(req.query.v) const versionString = parseInt(req.query.v)
const download = (filePath, fileName) => { const download = (filePath, fileName) => {
const headers = {} const headers = {}
@ -337,160 +365,178 @@ albumsController.generateZip = async (req, res, next) => {
}) })
if (!config.uploads.generateZips) if (!config.uploads.generateZips)
return res.status(401).json({ success: false, description: 'Zip generation disabled.' }) return res.status(401).json({
success: false,
const album = await db.table('albums') description: 'Zip generation disabled.'
.where({
identifier,
enabled: 1
}) })
.first()
if (!album) try {
return res.json({ success: false, description: 'Album not found.' })
else if (album.download === 0)
return res.json({ success: false, description: 'Download for this album is disabled.' })
if ((isNaN(versionString) || versionString <= 0) && album.editedAt)
return res.redirect(`${album.identifier}?v=${album.editedAt}`)
if (album.zipGeneratedAt > album.editedAt) {
const filePath = path.join(zipsDir, `${identifier}.zip`)
const exists = await new Promise(resolve => fs.access(filePath, error => resolve(!error)))
if (exists) {
const fileName = `${album.name}.zip`
return download(filePath, fileName)
}
}
if (albumsController.zipEmitters.has(identifier)) {
logger.log(`Waiting previous zip task for album: ${identifier}.`)
return albumsController.zipEmitters.get(identifier).once('done', (filePath, fileName, json) => {
if (filePath && fileName)
download(filePath, fileName)
else if (json)
res.json(json)
})
}
albumsController.zipEmitters.set(identifier, new ZipEmitter(identifier))
logger.log(`Starting zip task for album: ${identifier}.`)
const files = await db.table('files')
.select('name', 'size')
.where('albumid', album.id)
if (files.length === 0) {
logger.log(`Finished zip task for album: ${identifier} (no files).`)
const json = { success: false, description: 'There are no files in the album.' }
albumsController.zipEmitters.get(identifier).emit('done', null, null, json)
return res.json(json)
}
if (zipMaxTotalSize) {
const totalSizeBytes = files.reduce((accumulator, file) => accumulator + parseInt(file.size), 0)
if (totalSizeBytes > zipMaxTotalSizeBytes) {
logger.log(`Finished zip task for album: ${identifier} (size exceeds).`)
const json = {
success: false,
description: `Total size of all files in the album exceeds the configured limit (${zipMaxTotalSize}).`
}
albumsController.zipEmitters.get(identifier).emit('done', null, null, json)
return res.json(json)
}
}
const zipPath = path.join(zipsDir, `${album.identifier}.zip`)
const archive = new Zip()
let iteration = 0
for (const file of files)
fs.readFile(path.join(uploadsDir, file.name), (error, data) => {
if (error)
logger.error(error)
else
archive.file(file.name, data)
iteration++
if (iteration === files.length)
archive
.generateNodeStream(zipOptions)
.pipe(fs.createWriteStream(zipPath))
.on('finish', async () => {
logger.log(`Finished zip task for album: ${identifier} (success).`)
await db.table('albums')
.where('id', album.id)
.update('zipGeneratedAt', Math.floor(Date.now() / 1000))
const filePath = path.join(zipsDir, `${identifier}.zip`)
const fileName = `${album.name}.zip`
albumsController.zipEmitters.get(identifier).emit('done', filePath, fileName)
utils.invalidateStatsCache('albums')
return download(filePath, fileName)
})
})
}
albumsController.addFiles = async (req, res, next) => {
const user = await utils.authorize(req, res)
if (!user) return
const ids = req.body.ids
if (!ids || !ids.length)
return res.json({ success: false, description: 'No files specified.' })
let albumid = req.body.albumid
if (typeof albumid !== 'number') albumid = parseInt(albumid)
if (isNaN(albumid) || (albumid < 0)) albumid = null
const albumids = []
if (albumid !== null) {
const album = await db.table('albums') const album = await db.table('albums')
.where('id', albumid) .where({
.where(function () { identifier,
if (user.username !== 'root') enabled: 1
this.where('userid', user.id)
}) })
.first() .first()
if (!album) if (!album)
return res.json({ success: false, description: 'Album doesn\'t exist or it doesn\'t belong to the user.' }) return res.json({ success: false, description: 'Album not found.' })
else if (album.download === 0)
return res.json({ success: false, description: 'Download for this album is disabled.' })
albumids.push(albumid) if ((isNaN(versionString) || versionString <= 0) && album.editedAt)
return res.redirect(`${album.identifier}?v=${album.editedAt}`)
if (album.zipGeneratedAt > album.editedAt) {
const filePath = path.join(paths.zips, `${identifier}.zip`)
const exists = await new Promise(resolve => fs.access(filePath, error => resolve(!error)))
if (exists) {
const fileName = `${album.name}.zip`
return download(filePath, fileName)
}
}
if (self.zipEmitters.has(identifier)) {
logger.log(`Waiting previous zip task for album: ${identifier}.`)
return self.zipEmitters.get(identifier).once('done', (filePath, fileName, json) => {
if (filePath && fileName)
download(filePath, fileName)
else if (json)
res.json(json)
})
}
self.zipEmitters.set(identifier, new ZipEmitter(identifier))
logger.log(`Starting zip task for album: ${identifier}.`)
const files = await db.table('files')
.select('name', 'size')
.where('albumid', album.id)
if (files.length === 0) {
logger.log(`Finished zip task for album: ${identifier} (no files).`)
const json = {
success: false,
description: 'There are no files in the album.'
}
self.zipEmitters.get(identifier).emit('done', null, null, json)
return res.json(json)
}
if (zipMaxTotalSize) {
const totalSizeBytes = files.reduce((accumulator, file) => accumulator + parseInt(file.size), 0)
if (totalSizeBytes > zipMaxTotalSizeBytes) {
logger.log(`Finished zip task for album: ${identifier} (size exceeds).`)
const json = {
success: false,
description: `Total size of all files in the album exceeds the configured limit (${zipMaxTotalSize} MB).`
}
self.zipEmitters.get(identifier).emit('done', null, null, json)
return res.json(json)
}
}
const zipPath = path.join(paths.zips, `${album.identifier}.zip`)
const archive = new Zip()
try {
for (const file of files) {
const data = await paths.readFile(path.join(paths.uploads, file.name))
archive.file(file.name, data)
}
await new Promise((resolve, reject) => {
archive.generateNodeStream(zipOptions)
.pipe(fs.createWriteStream(zipPath))
.on('error', error => reject(error))
.on('finish', () => resolve())
})
} catch (error) {
logger.error(error)
return res.status(500).json({
success: 'false',
description: error.toString()
})
}
logger.log(`Finished zip task for album: ${identifier} (success).`)
await db.table('albums')
.where('id', album.id)
.update('zipGeneratedAt', Math.floor(Date.now() / 1000))
utils.invalidateStatsCache('albums')
const filePath = path.join(paths.zips, `${identifier}.zip`)
const fileName = `${album.name}.zip`
self.zipEmitters.get(identifier).emit('done', filePath, fileName)
return download(filePath, fileName)
} catch (error) {
logger.error(error)
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
} }
const files = await db.table('files')
.whereIn('id', ids)
.where(function () {
if (user.username !== 'root')
this.where('userid', user.id)
})
const failed = ids.filter(id => !files.find(file => file.id === id))
const updateDb = await db.table('files')
.whereIn('id', files.map(file => file.id))
.update('albumid', albumid)
.catch(logger.error)
if (!updateDb)
return res.json({
success: false,
description: `Could not ${albumid === null ? 'add' : 'remove'} any files ${albumid === null ? 'to' : 'from'} the album.`
})
files.forEach(file => {
if (file.albumid && !albumids.includes(file.albumid))
albumids.push(file.albumid)
})
await db.table('albums')
.whereIn('id', albumids)
.update('editedAt', Math.floor(Date.now() / 1000))
.catch(logger.error)
return res.json({ success: true, failed })
} }
module.exports = albumsController self.addFiles = async (req, res, next) => {
const user = await utils.authorize(req, res)
if (!user) return
const ids = req.body.ids
if (!Array.isArray(ids) || !ids.length)
return res.json({ success: false, description: 'No files specified.' })
let albumid = parseInt(req.body.albumid)
if (isNaN(albumid) || albumid < 0) albumid = null
let failed = []
const albumids = []
try {
if (albumid !== null) {
const album = await db.table('albums')
.where('id', albumid)
.where(function () {
if (user.username !== 'root')
this.where('userid', user.id)
})
.first()
if (!album)
return res.json({
success: false,
description: 'Album does not exist or it does not belong to the user.'
})
albumids.push(albumid)
}
const files = await db.table('files')
.whereIn('id', ids)
.where('userid', user.id)
failed = ids.filter(id => !files.find(file => file.id === id))
await db.table('files')
.whereIn('id', files.map(file => file.id))
.update('albumid', albumid)
files.forEach(file => {
if (file.albumid && !albumids.includes(file.albumid))
albumids.push(file.albumid)
})
await db.table('albums')
.whereIn('id', albumids)
.update('editedAt', Math.floor(Date.now() / 1000))
return res.json({ success: true, failed })
} catch (error) {
logger.error(error)
if (failed.length === ids.length)
return res.json({
success: false,
description: `Could not ${albumid === null ? 'add' : 'remove'} any files ${albumid === null ? 'to' : 'from'} the album.`
})
else
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
}
}
module.exports = self

View File

@ -1,3 +1,4 @@
const { promisify } = require('util')
const bcrypt = require('bcrypt') const bcrypt = require('bcrypt')
const config = require('./../config') const config = require('./../config')
const db = require('knex')(config.database) const db = require('knex')(config.database)
@ -7,160 +8,119 @@ const randomstring = require('randomstring')
const tokens = require('./tokenController') const tokens = require('./tokenController')
const utils = require('./utilsController') const utils = require('./utilsController')
const authController = {} const self = {
compare: promisify(bcrypt.compare),
authController.verify = async (req, res, next) => { hash: promisify(bcrypt.hash)
let username = req.body.username
let password = req.body.password
if (username === undefined)
return res.json({ success: false, description: 'No username provided.' })
if (password === undefined)
return res.json({ success: false, description: 'No password provided.' })
username = username.trim()
password = password.trim()
const user = await db.table('users').where('username', username).first()
if (!user)
return res.json({ success: false, description: 'Username does not exist.' })
if (user.enabled === false || user.enabled === 0)
return res.json({ success: false, description: 'This account has been disabled.' })
bcrypt.compare(password, user.password, (error, result) => {
if (error) {
logger.error(error)
return res.json({ success: false, description: 'There was an error.' })
}
if (result === false) return res.json({ success: false, description: 'Wrong password.' })
return res.json({ success: true, token: user.token })
})
} }
authController.register = async (req, res, next) => { self.verify = async (req, res, next) => {
if (config.enableUserAccounts === false) const username = typeof req.body.username === 'string'
return res.json({ success: false, description: 'Register is disabled at the moment.' }) ? req.body.username.trim()
: ''
let username = req.body.username if (!username)
let password = req.body.password
if (username === undefined)
return res.json({ success: false, description: 'No username provided.' }) return res.json({ success: false, description: 'No username provided.' })
if (password === undefined)
const password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
if (!password)
return res.json({ success: false, description: 'No password provided.' }) return res.json({ success: false, description: 'No password provided.' })
username = username.trim() try {
password = password.trim() const user = await db.table('users')
.where('username', username)
.first()
if (!user)
return res.json({ success: false, description: 'Username does not exist.' })
if (user.enabled === false || user.enabled === 0)
return res.json({ success: false, description: 'This account has been disabled.' })
const result = await self.compare(password, user.password)
if (result === false)
return res.json({ success: false, description: 'Wrong password.' })
else
return res.json({ success: true, token: user.token })
} catch (error) {
logger.error(error)
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
}
}
self.register = async (req, res, next) => {
if (config.enableUserAccounts === false)
return res.json({ success: false, description: 'Registration is currently disabled.' })
const username = typeof req.body.username === 'string'
? req.body.username.trim()
: ''
if (username.length < 4 || username.length > 32) if (username.length < 4 || username.length > 32)
return res.json({ success: false, description: 'Username must have 4-32 characters.' }) return res.json({ success: false, description: 'Username must have 4-32 characters.' })
const password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
if (password.length < 6 || password.length > 64) if (password.length < 6 || password.length > 64)
return res.json({ success: false, description: 'Password must have 6-64 characters.' }) return res.json({ success: false, description: 'Password must have 6-64 characters.' })
const user = await db.table('users').where('username', username).first() try {
if (user) const user = await db.table('users')
return res.json({ success: false, description: 'Username already exists.' }) .where('username', username)
.first()
bcrypt.hash(password, 10, async (error, hash) => { if (user)
if (error) { return res.json({ success: false, description: 'Username already exists.' })
logger.error(error)
return res.json({ success: false, description: 'Error generating password hash (╯°□°)╯︵ ┻━┻.' }) const hash = await self.hash(password, 10)
}
const token = await tokens.generateUniqueToken() const token = await tokens.generateUniqueToken()
if (!token) if (!token)
return res.json({ success: false, description: 'Error generating unique token (╯°□°)╯︵ ┻━┻.' }) return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
await db.table('users').insert({
username,
password: hash,
token,
enabled: 1,
permission: perms.permissions.user
})
await db.table('users')
.insert({
username,
password: hash,
token,
enabled: 1,
permission: perms.permissions.user
})
utils.invalidateStatsCache('users') utils.invalidateStatsCache('users')
token.onHold.delete(token)
return res.json({ success: true, token }) return res.json({ success: true, token })
}) } catch (error) {
logger.error(error)
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
}
} }
authController.changePassword = async (req, res, next) => { self.changePassword = async (req, res, next) => {
const user = await utils.authorize(req, res) const user = await utils.authorize(req, res)
if (!user) return if (!user) return
const password = req.body.password const password = typeof req.body.password === 'string'
if (password === undefined) ? req.body.password.trim()
return res.json({ success: false, description: 'No password provided.' }) : ''
if (password.length < 6 || password.length > 64) if (password.length < 6 || password.length > 64)
return res.json({ success: false, description: 'Password must have 6-64 characters.' }) return res.json({ success: false, description: 'Password must have 6-64 characters.' })
bcrypt.hash(password, 10, async (error, hash) => { try {
if (error) { const hash = await self.hash(password, 10)
logger.error(error)
return res.json({ success: false, description: 'Error generating password hash (╯°□°)╯︵ ┻━┻.' })
}
await db.table('users') await db.table('users')
.where('id', user.id) .where('id', user.id)
.update('password', hash) .update('password', hash)
return res.json({ success: true }) return res.json({ success: true })
}) } catch (error) {
logger.error(error)
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
}
} }
authController.getFileLengthConfig = async (req, res, next) => { self.editUser = async (req, res, next) => {
const user = await utils.authorize(req, res)
if (!user) return
return res.json({
success: true,
fileLength: user.fileLength,
config: config.uploads.fileLength
})
}
authController.changeFileLength = async (req, res, next) => {
if (config.uploads.fileLength.userChangeable === false)
return res.json({
success: false,
description: 'Changing file name length is disabled at the moment.'
})
const user = await utils.authorize(req, res)
if (!user) return
const fileLength = parseInt(req.body.fileLength)
if (fileLength === undefined)
return res.json({
success: false,
description: 'No file name length provided.'
})
if (isNaN(fileLength))
return res.json({
success: false,
description: 'File name length is not a valid number.'
})
if (fileLength < config.uploads.fileLength.min || fileLength > config.uploads.fileLength.max)
return res.json({
success: false,
description: `File name length must be ${config.uploads.fileLength.min} to ${config.uploads.fileLength.max} characters.`
})
if (fileLength === user.fileLength)
return res.json({ success: true })
await db.table('users')
.where('id', user.id)
.update('fileLength', fileLength)
return res.json({ success: true })
}
authController.editUser = async (req, res, next) => {
const user = await utils.authorize(req, res) const user = await utils.authorize(req, res)
if (!user) return if (!user) return
@ -168,67 +128,61 @@ authController.editUser = async (req, res, next) => {
if (isNaN(id)) if (isNaN(id))
return res.json({ success: false, description: 'No user specified.' }) return res.json({ success: false, description: 'No user specified.' })
const target = await db.table('users') try {
.where('id', id) const target = await db.table('users')
.first() .where('id', id)
.first()
if (!target) if (!target)
return res.json({ success: false, description: 'Could not get user with the specified ID.' }) return res.json({ success: false, description: 'Could not get user with the specified ID.' })
else if (!perms.higher(user, target)) else if (!perms.higher(user, target))
return res.json({ success: false, description: 'The user is in the same or higher group as you.' }) return res.json({ success: false, description: 'The user is in the same or higher group as you.' })
else if (target.username === 'root') else if (target.username === 'root')
return res.json({ success: false, description: 'Root user may not be edited.' }) return res.json({ success: false, description: 'Root user may not be edited.' })
const update = {} const update = {}
if (req.body.username !== undefined) { if (req.body.username !== undefined) {
update.username = `${req.body.username}` update.username = String(req.body.username).trim()
if (update.username.length < 4 || update.username.length > 32) if (update.username.length < 4 || update.username.length > 32)
return res.json({ success: false, description: 'Username must have 4-32 characters.' }) return res.json({ success: false, description: 'Username must have 4-32 characters.' })
} }
if (req.body.enabled !== undefined) if (req.body.enabled !== undefined)
update.enabled = Boolean(req.body.enabled) update.enabled = Boolean(req.body.enabled)
if (req.body.group !== undefined) { if (req.body.group !== undefined) {
update.permission = perms.permissions[req.body.group] || target.permission update.permission = perms.permissions[req.body.group] || target.permission
if (typeof update.permission !== 'number' || update.permission < 0) if (typeof update.permission !== 'number' || update.permission < 0)
update.permission = target.permission update.permission = target.permission
} }
await db.table('users') let password
.where('id', id) if (req.body.resetPassword) {
.update(update) password = randomstring.generate(16)
utils.invalidateStatsCache('users') update.password = await self.hash(password, 10)
if (!req.body.resetPassword)
return res.json({ success: true, update })
const password = randomstring.generate(16)
bcrypt.hash(password, 10, async (error, hash) => {
if (error) {
logger.error(error)
return res.json({ success: false, description: 'Error generating password hash (╯°□°)╯︵ ┻━┻.' })
} }
await db.table('users') await db.table('users')
.where('id', id) .where('id', id)
.update('password', hash) .update(update)
utils.invalidateStatsCache('users')
return res.json({ success: true, update, password }) const response = { success: true, update }
}) if (password) response.password = password
} return res.json(response)
} catch (error) {
authController.disableUser = async (req, res, next) => { logger.error(error)
const body = { return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
id: req.body.id,
enabled: false
} }
req.body = body
return authController.editUser(req, res, next)
} }
authController.listUsers = async (req, res, next) => { self.disableUser = async (req, res, next) => {
req.body = { id: req.body.id, enabled: false }
return self.editUser(req, res, next)
}
self.listUsers = async (req, res, next) => {
const user = await utils.authorize(req, res) const user = await utils.authorize(req, res)
if (!user) return if (!user) return
@ -236,53 +190,55 @@ authController.listUsers = async (req, res, next) => {
if (!isadmin) if (!isadmin)
return res.status(403).end() return res.status(403).end()
const count = await db.table('users') try {
.count('id as count') const count = await db.table('users')
.then(rows => rows[0].count) .count('id as count')
if (!count) .then(rows => rows[0].count)
return res.json({ success: true, users: [], count }) if (!count)
return res.json({ success: true, users: [], count })
let offset = req.params.page let offset = req.params.page
if (offset === undefined) offset = 0 if (offset === undefined) offset = 0
const users = await db.table('users') const users = await db.table('users')
.limit(25) .limit(25)
.offset(25 * offset) .offset(25 * offset)
.select('id', 'username', 'enabled', 'fileLength', 'permission') .select('id', 'username', 'enabled', 'permission')
const userids = [] const userids = []
for (const user of users) { for (const user of users) {
user.groups = perms.mapPermissions(user) user.groups = perms.mapPermissions(user)
delete user.permission delete user.permission
userids.push(user.id) userids.push(user.id)
user.uploadsCount = 0 user.uploadsCount = 0
user.diskUsage = 0 user.diskUsage = 0
}
const maps = {}
const uploads = await db.table('files')
.whereIn('userid', userids)
for (const upload of uploads) {
if (maps[upload.userid] === undefined)
maps[upload.userid] = { count: 0, size: 0 }
maps[upload.userid].count++
maps[upload.userid].size += parseInt(upload.size)
}
for (const user of users) {
if (!maps[user.id]) continue
user.uploadsCount = maps[user.id].count
user.diskUsage = maps[user.id].size
}
return res.json({ success: true, users, count })
} catch (error) {
logger.error(error)
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
} }
const maps = {}
const uploads = await db.table('files').whereIn('userid', userids)
for (const upload of uploads) {
// This is the fastest method that I can think of
if (maps[upload.userid] === undefined)
maps[upload.userid] = {
count: 0,
size: 0
}
maps[upload.userid].count++
maps[upload.userid].size += parseInt(upload.size)
}
for (const user of users) {
if (!maps[user.id]) continue
user.uploadsCount = maps[user.id].count
user.diskUsage = maps[user.id].size
}
return res.json({ success: true, users, count })
} }
module.exports = authController module.exports = self

View File

@ -0,0 +1,79 @@
const { promisify } = require('util')
const config = require('./../config')
const fs = require('fs')
const logger = require('./../logger')
const path = require('path')
const self = {}
// Promisify these fs functions
const fsFuncs = [
'access',
'lstat',
'mkdir',
'readdir',
'readFile',
'rename',
'rmdir',
'symlink',
'unlink'
]
for (const fsFunc of fsFuncs)
self[fsFunc] = promisify(fs[fsFunc])
self.uploads = path.resolve(config.uploads.folder)
self.chunks = path.join(self.uploads, 'chunks')
self.thumbs = path.join(self.uploads, 'thumbs')
self.zips = path.join(self.uploads, 'zips')
self.thumbPlaceholder = path.resolve(config.uploads.generateThumbs.placeholder || 'public/images/unavailable.png')
self.logs = path.resolve(config.logsFolder)
self.customPages = path.resolve('pages/custom')
self.public = path.resolve('public')
self.errorRoot = path.resolve(config.errorPages.rootDir)
const verify = [
self.uploads,
self.chunks,
self.thumbs,
self.zips,
self.logs,
self.customPages
]
self.init = async () => {
try {
for (const p of verify)
try {
await self.access(p)
} catch (err) {
if (err.code !== 'ENOENT') {
logger.error(err)
} else {
const mkdir = await self.mkdir(p)
if (mkdir)
logger.log(`Created directory: ${p}`)
}
}
// Purge chunks directory
const uuidDirs = await self.readdir(self.chunks)
for (const uuid of uuidDirs) {
const root = path.join(self.chunks, uuid)
const chunks = await self.readdir(root)
for (const chunk of chunks)
await self.unlink(path.join(root, chunk))
await self.rmdir(root)
}
self.verified = true
} catch (error) {
logger.error(error)
}
}
module.exports = self

View File

@ -1,32 +1,34 @@
const permissionController = {} const self = {}
permissionController.permissions = { self.permissions = {
user: 0, // upload & delete own files, create & delete albums user: 0, // Upload & delete own files, create & delete albums
moderator: 50, // delete other user's files moderator: 50, // Delete other user's files
admin: 80, // manage users (disable accounts) & create moderators admin: 80, // Manage users (disable accounts) & create moderators
superadmin: 100 // create admins superadmin: 100 // Create admins
// groups will inherit permissions from groups which have lower value // Groups will inherit permissions from groups which have lower value
} }
permissionController.is = (user, group) => { self.is = (user, group) => {
// root bypass // root bypass
if (user.username === 'root') return true if (user.username === 'root')
return true
const permission = user.permission || 0 const permission = user.permission || 0
return permission >= permissionController.permissions[group] return permission >= self.permissions[group]
} }
permissionController.higher = (user, target) => { self.higher = (user, target) => {
const userPermission = user.permission || 0 const userPermission = user.permission || 0
const targetPermission = target.permission || 0 const targetPermission = target.permission || 0
return userPermission > targetPermission return userPermission > targetPermission
} }
permissionController.mapPermissions = user => { self.mapPermissions = user => {
const map = {} const map = {}
Object.keys(permissionController.permissions).forEach(group => { Object.keys(self.permissions).forEach(group => {
map[group] = permissionController.is(user, group) map[group] = self.is(user, group)
}) })
return map return map
} }
module.exports = permissionController module.exports = self

View File

@ -1,74 +1,99 @@
const config = require('./../config') const config = require('./../config')
const db = require('knex')(config.database) const db = require('knex')(config.database)
const logger = require('./../logger')
const perms = require('./permissionController') const perms = require('./permissionController')
const randomstring = require('randomstring') const randomstring = require('randomstring')
const utils = require('./utilsController') const utils = require('./utilsController')
const TOKEN_LENGTH = 64 const self = {
const UNIQUE_TOKEN_MAX_TRIES = 3 tokenLength: 64,
tokenMaxTries: 3,
onHold: new Set()
}
const tokenController = {} self.generateUniqueToken = async () => {
for (let i = 0; i < self.tokenMaxTries; i++) {
const token = randomstring.generate(self.tokenLength)
if (self.onHold.has(token))
continue
tokenController.generateUniqueToken = () => { // Put token on-hold (wait for it to be inserted to DB)
return new Promise(resolve => { self.onHold.add(token)
const query = async i => {
const token = randomstring.generate(TOKEN_LENGTH) const user = await db.table('users')
const user = await db.table('users').where('token', token).first().catch(() => undefined) .where('token', token)
if (user === undefined) return resolve(token) .select('id')
if (++i < UNIQUE_TOKEN_MAX_TRIES) return query(i) .first()
resolve(null) if (user) {
self.onHold.delete(token)
continue
} }
query(0)
}) return token
}
return null
} }
tokenController.verify = async (req, res, next) => { self.verify = async (req, res, next) => {
const token = req.body.token const token = typeof req.body.token === 'string'
if (token === undefined) ? req.body.token.trim()
return res.status(401).json({ : ''
success: false,
description: 'No token provided.'
})
const user = await db.table('users').where('token', token).first() if (!token)
if (!user) return res.status(401).json({ success: false, description: 'No token provided.' })
return res.status(401).json({
success: false,
description: 'Invalid token.'
})
return res.json({ try {
success: true, const user = await db.table('users')
username: user.username, .where('token', token)
permissions: perms.mapPermissions(user) .select('username', 'permission')
}) .first()
if (!user)
return res.status(401).json({ success: false, description: 'Invalid token.' })
return res.json({
success: true,
username: user.username,
permissions: perms.mapPermissions(user)
})
} catch (error) {
logger.error(error)
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
}
} }
tokenController.list = async (req, res, next) => { self.list = async (req, res, next) => {
const user = await utils.authorize(req, res) const user = await utils.authorize(req, res)
if (!user) return if (!user) return
return res.json({ return res.json({ success: true, token: user.token })
success: true,
token: user.token
})
} }
tokenController.change = async (req, res, next) => { self.change = async (req, res, next) => {
const user = await utils.authorize(req, res) const user = await utils.authorize(req, res)
if (!user) return if (!user) return
const newtoken = await tokenController.generateUniqueToken() const newToken = await self.generateUniqueToken()
if (!newtoken) if (!newToken)
return res.json({ success: false, description: 'Error generating unique token (╯°□°)╯︵ ┻━┻.' }) return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
await db.table('users').where('token', user.token).update({ try {
token: newtoken, await db.table('users')
timestamp: Math.floor(Date.now() / 1000) .where('token', user.token)
}) .update({
return res.json({ token: newToken,
success: true, timestamp: Math.floor(Date.now() / 1000)
token: newtoken })
}) self.onHold.delete(newToken)
return res.json({
success: true,
token: newToken
})
} catch (error) {
logger.error(error)
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
}
} }
module.exports = tokenController module.exports = self

File diff suppressed because it is too large Load Diff

View File

@ -1,21 +1,37 @@
const { spawn } = require('child_process') const { promisify } = require('util')
const config = require('./../config') const config = require('./../config')
const db = require('knex')(config.database) const db = require('knex')(config.database)
const fetch = require('node-fetch') const fetch = require('node-fetch')
const ffmpeg = require('fluent-ffmpeg') const ffmpeg = require('fluent-ffmpeg')
const fs = require('fs') const fs = require('fs')
const logger = require('./../logger') const logger = require('./../logger')
const os = require('os')
const path = require('path') const path = require('path')
const paths = require('./pathsController')
const perms = require('./permissionController') const perms = require('./permissionController')
const sharp = require('sharp') const sharp = require('sharp')
const si = require('systeminformation')
const utilsController = {} const self = {
const _stats = { clamd: {
scanner: null,
timeout: config.uploads.scan.timeout || 5000,
chunkSize: config.uploads.scan.chunkSize || 64 * 1024
},
gitHash: null,
idSet: null,
idMaxTries: config.uploads.maxTries || 1,
imageExts: ['.webp', '.jpg', '.jpeg', '.gif', '.png', '.tiff', '.tif', '.svg'],
videoExts: ['.webm', '.mp4', '.wmv', '.avi', '.mov', '.mkv'],
ffprobe: promisify(ffmpeg.ffprobe)
}
const statsCache = {
system: { system: {
cache: null, cache: null,
generating: false, generating: false
generatedAt: 0
}, },
albums: { albums: {
cache: null, cache: null,
@ -37,23 +53,17 @@ const _stats = {
} }
} }
const uploadsDir = path.resolve(config.uploads.folder)
const thumbsDir = path.join(uploadsDir, 'thumbs')
const thumbPlaceholder = path.resolve(config.uploads.generateThumbs.placeholder || 'public/images/unavailable.png')
const cloudflareAuth = config.cloudflare.apiKey && config.cloudflare.email && config.cloudflare.zoneId const cloudflareAuth = config.cloudflare.apiKey && config.cloudflare.email && config.cloudflare.zoneId
utilsController.imageExtensions = ['.webp', '.jpg', '.jpeg', '.gif', '.png', '.tiff', '.tif', '.svg'] self.mayGenerateThumb = extname => {
utilsController.videoExtensions = ['.webm', '.mp4', '.wmv', '.avi', '.mov', '.mkv'] return (config.uploads.generateThumbs.image && self.imageExts.includes(extname)) ||
(config.uploads.generateThumbs.video && self.videoExts.includes(extname))
utilsController.mayGenerateThumb = extname => {
return (config.uploads.generateThumbs.image && utilsController.imageExtensions.includes(extname)) ||
(config.uploads.generateThumbs.video && utilsController.videoExtensions.includes(extname))
} }
// expand if necessary (must be lower case); for now only preserves some known tarballs // Expand if necessary (must be lower case); for now only preserves some known tarballs
utilsController.preserves = ['.tar.gz', '.tar.z', '.tar.bz2', '.tar.lzma', '.tar.lzo', '.tar.xz'] const extPreserves = ['.tar.gz', '.tar.z', '.tar.bz2', '.tar.lzma', '.tar.lzo', '.tar.xz']
utilsController.extname = filename => { self.extname = filename => {
// Always return blank string if the filename does not seem to have a valid extension // Always return blank string if the filename does not seem to have a valid extension
// Files such as .DS_Store (anything that starts with a dot, without any extension after) will still be accepted // Files such as .DS_Store (anything that starts with a dot, without any extension after) will still be accepted
if (!/\../.test(filename)) return '' if (!/\../.test(filename)) return ''
@ -69,9 +79,9 @@ utilsController.extname = filename => {
} }
// check against extensions that must be preserved // check against extensions that must be preserved
for (let i = 0; i < utilsController.preserves.length; i++) for (const extPreserve of extPreserves)
if (lower.endsWith(utilsController.preserves[i])) { if (lower.endsWith(extPreserve)) {
extname = utilsController.preserves[i] extname = extPreserve
break break
} }
@ -81,18 +91,20 @@ utilsController.extname = filename => {
return extname + multi return extname + multi
} }
utilsController.escape = string => { self.escape = (string) => {
// MIT License // MIT License
// Copyright(c) 2012-2013 TJ Holowaychuk // Copyright(c) 2012-2013 TJ Holowaychuk
// Copyright(c) 2015 Andreas Lubbe // Copyright(c) 2015 Andreas Lubbe
// Copyright(c) 2015 Tiancheng "Timothy" Gu // Copyright(c) 2015 Tiancheng "Timothy" Gu
if (!string) return string if (!string)
return string
const str = '' + string const str = String(string)
const match = /["'&<>]/.exec(str) const match = /["'&<>]/.exec(str)
if (!match) return str if (!match)
return str
let escape let escape
let html = '' let html = ''
@ -132,258 +144,266 @@ utilsController.escape = string => {
: html : html
} }
utilsController.authorize = async (req, res) => { self.authorize = async (req, res) => {
// TODO: Improve usage of this function by the other APIs
const token = req.headers.token const token = req.headers.token
if (token === undefined) { if (token === undefined) {
res.status(401).json({ success: false, description: 'No token provided.' }) res.status(401).json({ success: false, description: 'No token provided.' })
return return
} }
const user = await db.table('users').where('token', token).first() try {
if (user) { const user = await db.table('users')
if (user.enabled === false || user.enabled === 0) { .where('token', token)
res.json({ success: false, description: 'This account has been disabled.' }) .first()
return if (user) {
if (user.enabled === false || user.enabled === 0) {
res.json({ success: false, description: 'This account has been disabled.' })
return
}
return user
}
res.status(401).json({ success: false, description: 'Invalid token.' })
} catch (error) {
logger.error(error)
res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
}
}
self.generateThumbs = async (name, extname, force) => {
const thumbname = path.join(paths.thumbs, name.slice(0, -extname.length) + '.png')
try {
// Check if thumbnail already exists
try {
const lstat = await paths.lstat(thumbname)
if (lstat.isSymbolicLink())
// Unlink if symlink (should be symlink to the placeholder)
await paths.unlink(thumbname)
else if (!force)
// Continue only if it does not exist, unless forced to
return true
} catch (error) {
// Re-throw error
if (error.code !== 'ENOENT')
throw error
}
// Full path to input file
const input = path.join(paths.uploads, name)
// If image extension
if (self.imageExts.includes(extname)) {
const resizeOptions = {
width: 200,
height: 200,
fit: 'contain',
background: {
r: 0,
g: 0,
b: 0,
alpha: 0
}
}
const image = sharp(input)
const metadata = await image.metadata()
if (metadata.width > resizeOptions.width || metadata.height > resizeOptions.height) {
await image
.resize(resizeOptions)
.toFile(thumbname)
} else if (metadata.width === resizeOptions.width && metadata.height === resizeOptions.height) {
await image
.toFile(thumbname)
} else {
const x = resizeOptions.width - metadata.width
const y = resizeOptions.height - metadata.height
await image
.extend({
top: Math.floor(y / 2),
bottom: Math.ceil(y / 2),
left: Math.floor(x / 2),
right: Math.ceil(x / 2),
background: resizeOptions.background
})
.toFile(thumbname)
}
} else if (self.videoExts.includes(extname)) {
const metadata = await self.ffprobe(input)
// Skip files that do not have video streams/channels
if (!metadata.streams || !metadata.streams.some(s => s.codec_type === 'video'))
throw 'File does not contain any video stream'
await new Promise((resolve, reject) => {
ffmpeg(input)
.inputOptions([
`-ss ${parseInt(metadata.format.duration) * 20 / 100}`
])
.output(thumbname)
.outputOptions([
'-vframes 1',
'-vf scale=200:200:force_original_aspect_ratio=decrease'
])
.on('error', async error => {
// Try to unlink thumbnail,
// since ffmpeg may have created an incomplete thumbnail
try {
await paths.unlink(thumbname)
} catch (err) {
if (err && err.code !== 'ENOENT')
logger.error(`[${name}]: ${err.toString()}`)
}
return reject(error)
})
.on('end', () => resolve(true))
.run()
})
} else {
return false
}
} catch (error) {
// Suppress error logging for errors these patterns
const errorString = error.toString()
const suppress = [
/Input file contains unsupported image format/,
/Invalid data found when processing input/,
/File does not contain any video stream/
]
if (!suppress.some(t => t.test(errorString)))
logger.error(`[${name}]: ${errorString}`)
try {
await paths.symlink(paths.thumbPlaceholder, thumbname)
return true
} catch (err) {
logger.error(err)
return false
} }
return user
} }
res.status(401).json({ return true
success: false,
description: 'Invalid token.'
})
} }
utilsController.generateThumbs = (name, force) => { self.unlinkFile = async (filename, predb) => {
return new Promise(resolve => { try {
const extname = utilsController.extname(name) await paths.unlink(path.join(paths.uploads, filename))
const thumbname = path.join(thumbsDir, name.slice(0, -extname.length) + '.png') } catch (error) {
fs.lstat(thumbname, async (error, stats) => { // Return true if file does not exist
if (error && error.code !== 'ENOENT') { if (error.code !== 'ENOENT')
logger.error(error) throw error
return resolve(false) }
}
if (!error && stats.isSymbolicLink()) { const identifier = filename.split('.')[0]
// Unlink symlink
const unlink = await new Promise(resolve => {
fs.unlink(thumbname, error => {
if (error) logger.error(error)
resolve(!error)
})
})
if (!unlink) return resolve(false)
}
// Only make thumbnail if it does not exist (ENOENT) // Do not remove from identifiers cache on pre-db-deletion
if (!error && !force) return resolve(true) // eslint-disable-next-line curly
if (!predb && self.idSet) {
self.idSet.delete(identifier)
// logger.log(`Removed ${identifier} from identifiers cache (deleteFile)`)
}
// Full path to input file const extname = self.extname(filename)
const input = path.join(__dirname, '..', config.uploads.folder, name) if (self.imageExts.includes(extname) || self.videoExts.includes(extname))
try {
new Promise((resolve, reject) => { await paths.unlink(path.join(paths.thumbs, `${identifier}.png`))
// If image extension } catch (error) {
if (utilsController.imageExtensions.includes(extname)) { if (error.code !== 'ENOENT')
const resizeOptions = { throw error
width: 200, }
height: 200,
fit: 'contain',
background: {
r: 0,
g: 0,
b: 0,
alpha: 0
}
}
const image = sharp(input)
return image
.metadata()
.then(metadata => {
if (metadata.width > resizeOptions.width || metadata.height > resizeOptions.height) {
return image
.resize(resizeOptions)
.toFile(thumbname)
} else if (metadata.width === resizeOptions.width && metadata.height === resizeOptions.height) {
return image
.toFile(thumbname)
} else {
const x = resizeOptions.width - metadata.width
const y = resizeOptions.height - metadata.height
return image
.extend({
top: Math.floor(y / 2),
bottom: Math.ceil(y / 2),
left: Math.floor(x / 2),
right: Math.ceil(x / 2),
background: resizeOptions.background
})
.toFile(thumbname)
}
})
.then(() => resolve(true))
.catch(reject)
}
// Otherwise video extension
ffmpeg.ffprobe(input, (error, metadata) => {
if (error) return reject(error)
// Skip files that do not have video streams/channels
if (!metadata.streams || !metadata.streams.some(s => s.codec_type === 'video'))
// eslint-disable-next-line prefer-promise-reject-errors
return reject('File does not contain any video stream')
ffmpeg(input)
.inputOptions([
`-ss ${parseInt(metadata.format.duration) * 20 / 100}`
])
.output(thumbname)
.outputOptions([
'-vframes 1',
'-vf scale=200:200:force_original_aspect_ratio=decrease'
])
.on('error', error => {
// Attempt to unlink thumbnail
// Since ffmpeg may have already created an incomplete thumbnail
fs.unlink(thumbname, err => {
if (err && err.code !== 'ENOENT')
logger.error(`[${name}]: ${err.toString()}`)
reject(error)
})
})
.on('end', () => resolve(true))
.run()
})
})
.then(resolve)
.catch(error => {
// Suppress error logging for errors these patterns
const errorString = error.toString()
const suppress = [
/Input file contains unsupported image format/,
/Invalid data found when processing input/,
/File does not contain any video stream/
]
if (!suppress.some(t => t.test(errorString)))
logger.error(`[${name}]: ${errorString}`)
fs.symlink(thumbPlaceholder, thumbname, err => {
if (err) logger.error(err)
// We return true anyway
// if we could make a symlink to the placeholder image
resolve(!err)
})
})
})
})
} }
utilsController.deleteFile = (filename, set) => { self.bulkDeleteFromDb = async (field, values, user) => {
return new Promise((resolve, reject) => {
const extname = utilsController.extname(filename)
return fs.unlink(path.join(uploadsDir, filename), error => {
if (error && error.code !== 'ENOENT') return reject(error)
const identifier = filename.split('.')[0]
// eslint-disable-next-line curly
if (set) {
set.delete(identifier)
// logger.log(`Removed ${identifier} from identifiers cache (deleteFile)`)
}
if (utilsController.imageExtensions.includes(extname) || utilsController.videoExtensions.includes(extname)) {
const thumb = `${identifier}.png`
return fs.unlink(path.join(thumbsDir, thumb), error => {
if (error && error.code !== 'ENOENT') return reject(error)
resolve(true)
})
}
resolve(true)
})
})
}
utilsController.bulkDeleteFiles = async (field, values, user, set) => {
if (!user || !['id', 'name'].includes(field)) return if (!user || !['id', 'name'].includes(field)) return
// SQLITE_LIMIT_VARIABLE_NUMBER, which defaults to 999 // SQLITE_LIMIT_VARIABLE_NUMBER, which defaults to 999
// Read more: https://www.sqlite.org/limits.html // Read more: https://www.sqlite.org/limits.html
const MAX_VARIABLES_CHUNK_SIZE = 999 const MAX_VARIABLES_CHUNK_SIZE = 999
const chunks = [] const chunks = []
const _values = values.slice() // Make a shallow copy of the array while (values.length)
while (_values.length) chunks.push(values.splice(0, MAX_VARIABLES_CHUNK_SIZE))
chunks.push(_values.splice(0, MAX_VARIABLES_CHUNK_SIZE))
const failed = [] let failed = []
const ismoderator = perms.is(user, 'moderator') const ismoderator = perms.is(user, 'moderator')
await Promise.all(chunks.map((chunk, index) => {
const job = async () => {
try {
const files = await db.table('files')
.whereIn(field, chunk)
.where(function () {
if (!ismoderator)
this.where('userid', user.id)
})
// Push files that could not be found in DB try {
failed.push.apply(failed, chunk.filter(v => !files.find(file => file[field] === v))) let unlinkeds = []
const albumids = []
// Delete all found files physically for (let i = 0; i < chunks.length; i++) {
const deletedFiles = [] const files = await db.table('files')
await Promise.all(files.map(file => .whereIn(field, chunks[i])
utilsController.deleteFile(file.name) .where(function () {
.then(() => deletedFiles.push(file)) if (!ismoderator)
.catch(error => { self.where('userid', user.id)
failed.push(file[field]) })
logger.error(error)
})
))
if (!deletedFiles.length) // Push files that could not be found in db
return true failed = failed.concat(chunks[i].filter(value => !files.find(file => file[field] === value)))
// Delete all found files from database // Unlink all found files
const deletedFromDb = await db.table('files') const unlinked = []
.whereIn('id', deletedFiles.map(file => file.id)) for (const file of files)
.del() try {
await self.unlinkFile(file.name, true)
if (set) unlinked.push(file)
deletedFiles.forEach(file => { } catch (error) {
const identifier = file.name.split('.')[0] logger.error(error)
set.delete(identifier) failed.push(file[field])
// logger.log(`Removed ${identifier} from identifiers cache (bulkDeleteFiles)`)
})
// Update albums if necessary
if (deletedFromDb) {
const albumids = []
deletedFiles.forEach(file => {
if (file.albumid && !albumids.includes(file.albumid))
albumids.push(file.albumid)
})
await db.table('albums')
.whereIn('id', albumids)
.update('editedAt', Math.floor(Date.now() / 1000))
.catch(logger.error)
} }
// Purge Cloudflare's cache if necessary if (!unlinked.length)
if (config.cloudflare.purgeCache) continue
utilsController.purgeCloudflareCache(deletedFiles.map(file => file.name), true, true)
.then(results => { // Delete all unlinked files from db
for (const result of results) await db.table('files')
if (result.errors.length) .whereIn('id', unlinked.map(file => file.id))
result.errors.forEach(error => logger.error(`[CF]: ${error}`)) .del()
}) self.invalidateStatsCache('uploads')
} catch (error) {
logger.error(error) if (self.idSet)
} unlinked.forEach(file => {
const identifier = file.name.split('.')[0]
self.idSet.delete(identifier)
// logger.log(`Removed ${identifier} from identifiers cache (bulkDeleteFromDb)`)
})
// Push album ids
unlinked.forEach(file => {
if (file.albumid && !albumids.includes(file.albumid))
albumids.push(file.albumid)
})
// Push unlinked files
unlinkeds = unlinkeds.concat(unlinked)
} }
return new Promise(resolve => job().then(() => resolve()))
})) if (unlinkeds.length) {
// Update albums if necessary, but do not wait
if (albumids.length)
db.table('albums')
.whereIn('id', albumids)
.update('editedAt', Math.floor(Date.now() / 1000))
.catch(logger.error)
// Purge Cloudflare's cache if necessary, but do not wait
if (config.cloudflare.purgeCache)
self.purgeCloudflareCache(unlinkeds.map(file => file.name), true, true)
.then(results => {
for (const result of results)
if (result.errors.length)
result.errors.forEach(error => logger.error(`[CF]: ${error}`))
})
}
} catch (error) {
logger.error(error)
}
return failed return failed
} }
utilsController.purgeCloudflareCache = async (names, uploads, thumbs) => { self.purgeCloudflareCache = async (names, uploads, thumbs) => {
if (!Array.isArray(names) || !names.length || !cloudflareAuth) if (!Array.isArray(names) || !names.length || !cloudflareAuth)
return [{ return [{
success: false, success: false,
@ -398,8 +418,8 @@ utilsController.purgeCloudflareCache = async (names, uploads, thumbs) => {
names = names.map(name => { names = names.map(name => {
if (uploads) { if (uploads) {
const url = `${domain}/${name}` const url = `${domain}/${name}`
const extname = utilsController.extname(name) const extname = self.extname(name)
if (thumbs && utilsController.mayGenerateThumb(extname)) if (thumbs && self.mayGenerateThumb(extname))
thumbNames.push(`${domain}/thumbs/${name.slice(0, -extname.length)}.png`) thumbNames.push(`${domain}/thumbs/${name.slice(0, -extname.length)}.png`)
return url return url
} else { } else {
@ -411,87 +431,70 @@ utilsController.purgeCloudflareCache = async (names, uploads, thumbs) => {
// Split array into multiple arrays with max length of 30 URLs // Split array into multiple arrays with max length of 30 URLs
// https://api.cloudflare.com/#zone-purge-files-by-url // https://api.cloudflare.com/#zone-purge-files-by-url
const MAX_LENGTH = 30 const MAX_LENGTH = 30
const files = [] const chunks = []
while (names.length) while (names.length)
files.push(names.splice(0, MAX_LENGTH)) chunks.push(names.splice(0, MAX_LENGTH))
const url = `https://api.cloudflare.com/client/v4/zones/${config.cloudflare.zoneId}/purge_cache` const url = `https://api.cloudflare.com/client/v4/zones/${config.cloudflare.zoneId}/purge_cache`
const results = [] const results = []
await new Promise(resolve => {
const purge = async i => {
const result = {
success: false,
files: files[i],
errors: []
}
try { for (const chunk of chunks) {
const fetchPurge = await fetch(url, { const result = {
method: 'POST', success: false,
body: JSON.stringify({ files: chunk,
files: result.files errors: []
}),
headers: {
'Content-Type': 'application/json',
'X-Auth-Email': config.cloudflare.email,
'X-Auth-Key': config.cloudflare.apiKey
}
}).then(res => res.json())
result.success = fetchPurge.success
if (Array.isArray(fetchPurge.errors) && fetchPurge.errors.length)
result.errors = fetchPurge.errors.map(error => `${error.code}: ${error.message}`)
} catch (error) {
result.errors = [error.toString()]
}
results.push(result)
if (i < files.length - 1)
purge(i + 1)
else
resolve()
} }
purge(0)
}) try {
const purge = await fetch(url, {
method: 'POST',
body: JSON.stringify({ files: chunk }),
headers: {
'Content-Type': 'application/json',
'X-Auth-Email': config.cloudflare.email,
'X-Auth-Key': config.cloudflare.apiKey
}
})
const response = await purge.json()
result.success = response.success
if (Array.isArray(response.errors) && response.errors.length)
result.errors = response.errors.map(error => `${error.code}: ${error.message}`)
} catch (error) {
result.errors = [error.toString()]
}
results.push(result)
}
return results return results
} }
utilsController.getMemoryUsage = () => { self.bulkDeleteExpired = async (dryrun) => {
// For now this is linux-only. Not sure if darwin has this too. const timestamp = Date.now() / 1000
return new Promise((resolve, reject) => { const field = 'id'
const prc = spawn('free', ['-b']) const sudo = { username: 'root' }
prc.stdout.setEncoding('utf8')
prc.stdout.on('data', data => { const result = {}
const parsed = {} result.expired = await db.table('files')
const str = data.toString() .where('expirydate', '<=', timestamp)
const lines = str.split(/\n/g) .select(field)
for (let i = 0; i < lines.length; i++) { .then(rows => rows.map(row => row[field]))
lines[i] = lines[i].split(/\s+/)
if (i === 0) continue if (!dryrun) {
const id = lines[i][0].toLowerCase().slice(0, -1) const values = result.expired.slice() // Make a shallow copy
if (!id) continue result.failed = await self.bulkDeleteFromDb(field, values, sudo)
if (!parsed[id]) parsed[id] = {} }
for (let j = 1; j < lines[i].length; j++) {
const bytes = parseInt(lines[i][j]) return result
parsed[id][lines[0][j]] = isNaN(bytes) ? null : bytes
}
}
resolve(parsed)
})
prc.on('close', code => {
reject(new Error(`Process exited with code ${code}.`))
})
})
} }
utilsController.invalidateStatsCache = type => { self.invalidateStatsCache = type => {
if (!['albums', 'users', 'uploads'].includes(type)) return if (!['albums', 'users', 'uploads'].includes(type)) return
_stats[type].invalidatedAt = Date.now() statsCache[type].invalidatedAt = Date.now()
} }
utilsController.stats = async (req, res, next) => { self.stats = async (req, res, next) => {
const user = await utilsController.authorize(req, res) const user = await self.authorize(req, res)
if (!user) return if (!user) return
const isadmin = perms.is(user, 'admin') const isadmin = perms.is(user, 'admin')
@ -499,48 +502,44 @@ utilsController.stats = async (req, res, next) => {
const stats = {} const stats = {}
if (!_stats.system.cache && _stats.system.generating) { // Re-use caches as long as they are still valid
if (!statsCache.system.cache && statsCache.system.generating) {
stats.system = false stats.system = false
} else if ((Date.now() - _stats.system.generatedAt <= 1000) || _stats.system.generating) { } else if (statsCache.system.generating) {
// Re-use system cache for only 1000ms stats.system = statsCache.system.cache
stats.system = _stats.system.cache
} else { } else {
_stats.system.generating = true statsCache.system.generating = true
const platform = os.platform()
const os = await si.osInfo()
const currentLoad = await si.currentLoad()
const mem = await si.mem()
stats.system = { stats.system = {
platform: `${platform}-${os.arch()}`, platform: `${os.platform} ${os.arch}`,
systemMemory: null, distro: `${os.distro} ${os.release}`,
nodeVersion: `${process.versions.node}`, kernel: os.kernel,
memoryUsage: process.memoryUsage().rss cpuLoad: `${currentLoad.currentload.toFixed(1)}%`,
cpusLoad: currentLoad.cpus.map(cpu => `${cpu.load.toFixed(1)}%`).join(', '),
systemMemory: {
used: mem.active,
total: mem.total
},
memoryUsage: process.memoryUsage().rss,
nodeVersion: `${process.versions.node}`
} }
if (platform === 'linux') {
const memoryUsage = await utilsController.getMemoryUsage()
stats.system.systemMemory = {
used: memoryUsage.mem.used,
total: memoryUsage.mem.total
}
} else {
delete stats.system.systemMemory
}
if (platform !== 'win32')
stats.system.loadAverage = `${os.loadavg().map(load => load.toFixed(2)).join(', ')}`
// Update cache // Update cache
_stats.system.cache = stats.system statsCache.system.cache = stats.system
_stats.system.generatedAt = Date.now() statsCache.system.generating = false
_stats.system.generating = false
} }
// Re-use albums, users, and uploads caches as long as they are still valid if (!statsCache.albums.cache && statsCache.albums.generating) {
if (!_stats.albums.cache && _stats.albums.generating) {
stats.albums = false stats.albums = false
} else if ((_stats.albums.invalidatedAt < _stats.albums.generatedAt) || _stats.albums.generating) { } else if ((statsCache.albums.invalidatedAt < statsCache.albums.generatedAt) || statsCache.albums.generating) {
stats.albums = _stats.albums.cache stats.albums = statsCache.albums.cache
} else { } else {
_stats.albums.generating = true statsCache.albums.generating = true
stats.albums = { stats.albums = {
total: 0, total: 0,
active: 0, active: 0,
@ -560,7 +559,7 @@ utilsController.stats = async (req, res, next) => {
if (album.zipGeneratedAt) identifiers.push(album.identifier) if (album.zipGeneratedAt) identifiers.push(album.identifier)
} }
const zipsDir = path.join(uploadsDir, 'zips') const zipsDir = path.join(paths.uploads, 'zips')
await Promise.all(identifiers.map(identifier => { await Promise.all(identifiers.map(identifier => {
return new Promise(resolve => { return new Promise(resolve => {
const filePath = path.join(zipsDir, `${identifier}.zip`) const filePath = path.join(zipsDir, `${identifier}.zip`)
@ -572,17 +571,17 @@ utilsController.stats = async (req, res, next) => {
})) }))
// Update cache // Update cache
_stats.albums.cache = stats.albums statsCache.albums.cache = stats.albums
_stats.albums.generatedAt = Date.now() statsCache.albums.generatedAt = Date.now()
_stats.albums.generating = false statsCache.albums.generating = false
} }
if (!_stats.users.cache && _stats.users.generating) { if (!statsCache.users.cache && statsCache.users.generating) {
stats.users = false stats.users = false
} else if ((_stats.users.invalidatedAt < _stats.users.generatedAt) || _stats.users.generating) { } else if ((statsCache.users.invalidatedAt < statsCache.users.generatedAt) || statsCache.users.generating) {
stats.users = _stats.users.cache stats.users = statsCache.users.cache
} else { } else {
_stats.users.generating = true statsCache.users.generating = true
stats.users = { stats.users = {
total: 0, total: 0,
disabled: 0 disabled: 0
@ -609,17 +608,17 @@ utilsController.stats = async (req, res, next) => {
} }
// Update cache // Update cache
_stats.users.cache = stats.users statsCache.users.cache = stats.users
_stats.users.generatedAt = Date.now() statsCache.users.generatedAt = Date.now()
_stats.users.generating = false statsCache.users.generating = false
} }
if (!_stats.uploads.cache && _stats.uploads.generating) { if (!statsCache.uploads.cache && statsCache.uploads.generating) {
stats.uploads = false stats.uploads = false
} else if ((_stats.uploads.invalidatedAt < _stats.uploads.generatedAt) || _stats.uploads.generating) { } else if ((statsCache.uploads.invalidatedAt < statsCache.uploads.generatedAt) || statsCache.uploads.generating) {
stats.uploads = _stats.uploads.cache stats.uploads = statsCache.uploads.cache
} else { } else {
_stats.uploads.generating = true statsCache.uploads.generating = true
stats.uploads = { stats.uploads = {
total: 0, total: 0,
size: 0, size: 0,
@ -632,22 +631,22 @@ utilsController.stats = async (req, res, next) => {
stats.uploads.total = uploads.length stats.uploads.total = uploads.length
for (const upload of uploads) { for (const upload of uploads) {
stats.uploads.size += parseInt(upload.size) stats.uploads.size += parseInt(upload.size)
const extname = utilsController.extname(upload.name) const extname = self.extname(upload.name)
if (utilsController.imageExtensions.includes(extname)) if (self.imageExts.includes(extname))
stats.uploads.images++ stats.uploads.images++
else if (utilsController.videoExtensions.includes(extname)) else if (self.videoExts.includes(extname))
stats.uploads.videos++ stats.uploads.videos++
else else
stats.uploads.others++ stats.uploads.others++
} }
// Update cache // Update cache
_stats.uploads.cache = stats.uploads statsCache.uploads.cache = stats.uploads
_stats.uploads.generatedAt = Date.now() statsCache.uploads.generatedAt = Date.now()
_stats.uploads.generating = false statsCache.uploads.generating = false
} }
return res.json({ success: true, stats }) return res.json({ success: true, stats })
} }
module.exports = utilsController module.exports = self

View File

@ -34,6 +34,7 @@ const init = function (db) {
table.string('ip') table.string('ip')
table.integer('albumid') table.integer('albumid')
table.integer('timestamp') table.integer('timestamp')
table.integer('expirydate')
}).then(() => {}) }).then(() => {})
}) })
@ -46,7 +47,6 @@ const init = function (db) {
table.string('token') table.string('token')
table.integer('enabled') table.integer('enabled')
table.integer('timestamp') table.integer('timestamp')
table.integer('fileLength')
table.integer('permission') table.integer('permission')
}).then(() => { }).then(() => {
db.table('users').where({ username: 'root' }).then((user) => { db.table('users').where({ username: 'root' }).then((user) => {

View File

@ -3,6 +3,9 @@ const db = require('knex')(config.database)
const perms = require('./../controllers/permissionController') const perms = require('./../controllers/permissionController')
const map = { const map = {
files: {
expirydate: 'integer'
},
albums: { albums: {
editedAt: 'integer', editedAt: 'integer',
zipGeneratedAt: 'integer', zipGeneratedAt: 'integer',
@ -12,26 +15,25 @@ const map = {
}, },
users: { users: {
enabled: 'integer', enabled: 'integer',
fileLength: 'integer',
permission: 'integer' permission: 'integer'
} }
} }
const migration = {} ;(async () => {
migration.start = async () => { const tableNames = Object.keys(map)
const tables = Object.keys(map) for (const tableName of tableNames) {
await Promise.all(tables.map(table => { const columnNames = Object.keys(map[tableName])
const columns = Object.keys(map[table]) for (const columnName of columnNames) {
return Promise.all(columns.map(async column => { if (await db.schema.hasColumn(tableName, columnName))
if (await db.schema.hasColumn(table, column)) continue
return // console.log(`SKIP: ${column} => ${table}.`)
const columnType = map[table][column] const columnType = map[tableName][columnName]
return db.schema.table(table, t => { t[columnType](column) }) await db.schema.table(tableName, table => {
.then(() => console.log(`OK: ${column} (${columnType}) => ${table}.`)) table[columnType](columnName)
.catch(console.error) })
})) console.log(`OK: ${tableName} <- ${columnName} (${columnType})`)
})) }
}
await db.table('users') await db.table('users')
.where('username', 'root') .where('username', 'root')
@ -39,15 +41,17 @@ migration.start = async () => {
.update({ .update({
permission: perms.permissions.superadmin permission: perms.permissions.superadmin
}) })
.then(rows => { .then(result => {
// NOTE: permissionController.js actually have a hard-coded check for "root" account so that // NOTE: permissionController.js actually has a hard-coded check for "root" account so that
// it will always have "superadmin" permission regardless of its permission value in database // it will always have "superadmin" permission regardless of its permission value in database
if (!rows) return console.log('Unable to update root\'s permission into superadmin.') if (!result) return console.log('Unable to update root\'s permission into superadmin.')
console.log(`Updated root's permission to ${perms.permissions.superadmin} (superadmin).`) console.log(`Updated root's permission to ${perms.permissions.superadmin} (superadmin).`)
}) })
console.log('Migration finished! Now you may start lolisafe normally.') console.log('Migration finished! Now you may start lolisafe normally.')
process.exit(0) })()
} .then(() => process.exit(0))
.catch(error => {
migration.start() console.error(error)
process.exit(1)
})

View File

@ -1,26 +1,26 @@
const { inspect } = require('util') const { inspect } = require('util')
const logger = {} const self = {}
logger.clean = item => { const clean = item => {
if (typeof item === 'string') return item if (typeof item === 'string') return item
const cleaned = inspect(item, { depth: 0 }) const cleaned = inspect(item, { depth: 0 })
return cleaned return cleaned
} }
logger.write = (content, options = {}) => { const write = (content, options = {}) => {
const date = new Date().toISOString() const date = new Date().toISOString()
.replace(/T/, ' ') .replace(/T/, ' ')
.replace(/\..*/, '') .replace(/\..*/, '')
const stream = options.error ? process.stderr : process.stdout const stream = options.error ? process.stderr : process.stdout
stream.write(`[${date}]: ${options.prefix || ''}${logger.clean(content)}\n`) stream.write(`[${date}]: ${options.prefix || ''}${clean(content)}\n`)
} }
logger.log = logger.write self.log = write
logger.error = (content, options = {}) => { self.error = (content, options = {}) => {
options.error = true options.error = true
logger.write(content, options) write(content, options)
} }
module.exports = logger module.exports = self

View File

@ -2,10 +2,10 @@ const bodyParser = require('body-parser')
const clamd = require('clamdjs') const clamd = require('clamdjs')
const config = require('./config') const config = require('./config')
const express = require('express') const express = require('express')
const fs = require('fs')
const helmet = require('helmet') const helmet = require('helmet')
const logger = require('./logger') const logger = require('./logger')
const nunjucks = require('nunjucks') const nunjucks = require('nunjucks')
const path = require('path')
const RateLimit = require('express-rate-limit') const RateLimit = require('express-rate-limit')
const readline = require('readline') const readline = require('readline')
const safe = express() const safe = express()
@ -17,6 +17,7 @@ process.on('unhandledRejection', error => {
logger.error(error, { prefix: 'Unhandled Rejection (Promise): ' }) logger.error(error, { prefix: 'Unhandled Rejection (Promise): ' })
}) })
const paths = require('./controllers/pathsController')
const utils = require('./controllers/utilsController') const utils = require('./controllers/utilsController')
const album = require('./routes/album') const album = require('./routes/album')
@ -26,14 +27,6 @@ const nojs = require('./routes/nojs')
const db = require('knex')(config.database) const db = require('knex')(config.database)
require('./database/db.js')(db) require('./database/db.js')(db)
// Check and create missing directories
fs.existsSync('./pages/custom') || fs.mkdirSync('./pages/custom')
fs.existsSync(`./${config.logsFolder}`) || fs.mkdirSync(`./${config.logsFolder}`)
fs.existsSync(`./${config.uploads.folder}`) || fs.mkdirSync(`./${config.uploads.folder}`)
fs.existsSync(`./${config.uploads.folder}/chunks`) || fs.mkdirSync(`./${config.uploads.folder}/chunks`)
fs.existsSync(`./${config.uploads.folder}/thumbs`) || fs.mkdirSync(`./${config.uploads.folder}/thumbs`)
fs.existsSync(`./${config.uploads.folder}/zips`) || fs.mkdirSync(`./${config.uploads.folder}/zips`)
safe.use(helmet()) safe.use(helmet())
if (config.trustProxy) safe.set('trust proxy', 1) if (config.trustProxy) safe.set('trust proxy', 1)
@ -57,7 +50,7 @@ if (Array.isArray(config.rateLimits) && config.rateLimits.length)
safe.use(bodyParser.urlencoded({ extended: true })) safe.use(bodyParser.urlencoded({ extended: true }))
safe.use(bodyParser.json()) safe.use(bodyParser.json())
// safe.fiery.me-exclusive cache control // Cache control (safe.fiery.me)
if (config.cacheControl) { if (config.cacheControl) {
const cacheControls = { const cacheControls = {
// max-age: 30 days // max-age: 30 days
@ -79,9 +72,9 @@ if (config.cacheControl) {
} }
if (config.serveFilesWithNode) if (config.serveFilesWithNode)
safe.use('/', express.static(config.uploads.folder, { setHeaders })) safe.use('/', express.static(paths.uploads, { setHeaders }))
safe.use('/', express.static('./public', { setHeaders })) safe.use('/', express.static(paths.public, { setHeaders }))
// Do NOT cache these dynamic routes // Do NOT cache these dynamic routes
safe.use(['/a', '/api', '/nojs'], (req, res, next) => { safe.use(['/a', '/api', '/nojs'], (req, res, next) => {
@ -102,112 +95,107 @@ if (config.cacheControl) {
}) })
} else { } else {
if (config.serveFilesWithNode) if (config.serveFilesWithNode)
safe.use('/', express.static(config.uploads.folder)) safe.use('/', express.static(paths.uploads))
safe.use('/', express.static('./public')) safe.use('/', express.static(paths.public))
} }
safe.use('/', album) safe.use('/', album)
safe.use('/', nojs) safe.use('/', nojs)
safe.use('/api', api) safe.use('/api', api)
if (!Array.isArray(config.pages) || !config.pages.length) { ;(async () => {
logger.error('Config does not haves any frontend pages enabled') try {
process.exit(1) // Verify paths, create missing ones, clean up temp ones
} await paths.init()
for (const page of config.pages) if (!Array.isArray(config.pages) || !config.pages.length) {
if (fs.existsSync(`./pages/custom/${page}.html`)) { logger.error('Config file does not have any frontend pages enabled')
safe.get(`/${page}`, (req, res, next) => res.sendFile(`${page}.html`, { process.exit(1)
root: './pages/custom/'
}))
} else if (page === 'home') {
safe.get('/', (req, res, next) => res.render('home', {
maxSize: config.uploads.maxSize,
urlMaxSize: config.uploads.urlMaxSize,
urlDisclaimerMessage: config.uploads.urlDisclaimerMessage,
urlExtensionsFilterMode: config.uploads.urlExtensionsFilterMode,
urlExtensionsFilter: config.uploads.urlExtensionsFilter,
gitHash: safe.get('git-hash')
}))
} else if (page === 'faq') {
const fileLength = config.uploads.fileLength
safe.get('/faq', (req, res, next) => res.render('faq', {
whitelist: config.extensionsFilterMode === 'whitelist',
extensionsFilter: config.extensionsFilter,
fileLength,
tooShort: (fileLength.max - fileLength.default) > (fileLength.default - fileLength.min),
noJsMaxSize: parseInt(config.cloudflare.noJsMaxSize) < parseInt(config.uploads.maxSize),
chunkSize: config.uploads.chunkSize
}))
} else {
safe.get(`/${page}`, (req, res, next) => res.render(page))
}
safe.use((req, res, next) => {
res.status(404).sendFile(config.errorPages[404], { root: config.errorPages.rootDir })
})
safe.use((error, req, res, next) => {
logger.error(error)
res.status(500).sendFile(config.errorPages[500], { root: config.errorPages.rootDir })
})
const start = async () => {
if (config.showGitHash) {
const gitHash = await new Promise((resolve, reject) => {
require('child_process').exec('git rev-parse HEAD', (error, stdout) => {
if (error) return reject(error)
resolve(stdout.replace(/\n$/, ''))
})
}).catch(logger.error)
if (!gitHash) return
logger.log(`Git commit: ${gitHash}`)
safe.set('git-hash', gitHash)
}
const scan = config.uploads.scan
if (scan && scan.enabled) {
const createScanner = async () => {
try {
if (!scan.ip || !scan.port)
throw new Error('clamd IP or port is missing')
const version = await clamd.version(scan.ip, scan.port)
logger.log(`${scan.ip}:${scan.port} ${version}`)
const scanner = clamd.createScanner(scan.ip, scan.port)
safe.set('clam-scanner', scanner)
return true
} catch (error) {
logger.error(`[ClamAV]: ${error.toString()}`)
return false
}
} }
if (!await createScanner()) return process.exit(1)
}
if (config.uploads.cacheFileIdentifiers) { for (const page of config.pages) {
// Cache tree of uploads directory const customPage = path.join(paths.customPages, `${page}.html`)
const setSize = await new Promise((resolve, reject) => { if (!await paths.access(customPage).catch(() => true))
const uploadsDir = `./${config.uploads.folder}` safe.get(`/${page === 'home' ? '' : page}`, (req, res, next) => res.sendFile(customPage))
fs.readdir(uploadsDir, (error, names) => { else if (page === 'home')
if (error) return reject(error) safe.get('/', (req, res, next) => res.render('home', {
const set = new Set() maxSize: parseInt(config.uploads.maxSize),
names.forEach(name => set.add(name.split('.')[0])) urlMaxSize: parseInt(config.uploads.urlMaxSize),
safe.set('uploads-set', set) urlDisclaimerMessage: config.uploads.urlDisclaimerMessage,
resolve(set.size) urlExtensionsFilterMode: config.uploads.urlExtensionsFilterMode,
urlExtensionsFilter: config.uploads.urlExtensionsFilter,
temporaryUploadAges: Array.isArray(config.uploads.temporaryUploadAges) &&
config.uploads.temporaryUploadAges.length,
gitHash: utils.gitHash
}))
else if (page === 'faq')
safe.get('/faq', (req, res, next) => res.render('faq', {
whitelist: config.extensionsFilterMode === 'whitelist',
extensionsFilter: config.extensionsFilter,
noJsMaxSize: parseInt(config.cloudflare.noJsMaxSize) < parseInt(config.uploads.maxSize),
chunkSize: parseInt(config.uploads.chunkSize)
}))
else
safe.get(`/${page}`, (req, res, next) => res.render(page))
}
// Error pages
safe.use((req, res, next) => {
res.status(404).sendFile(path.join(paths.errorRoot, config.errorPages[404]))
})
safe.use((error, req, res, next) => {
logger.error(error)
res.status(500).sendFile(path.join(paths.errorRoot, config.errorPages[500]))
})
// Git hash
if (config.showGitHash) {
utils.gitHash = await new Promise((resolve, reject) => {
require('child_process').exec('git rev-parse HEAD', (error, stdout) => {
if (error) return reject(error)
resolve(stdout.replace(/\n$/, ''))
})
}) })
}).catch(error => logger.error(error.toString())) logger.log(`Git commit: ${utils.gitHash}`)
if (!setSize) return process.exit(1) }
logger.log(`Cached ${setSize} identifiers in uploads directory`)
} // Clamd scanner
if (config.uploads.scan && config.uploads.scan.enabled) {
const { ip, port } = config.uploads.scan
const version = await clamd.version(ip, port)
logger.log(`${ip}:${port} ${version}`)
utils.clamd.scanner = clamd.createScanner(ip, port)
if (!utils.clamd.scanner)
throw 'Could not create clamd scanner'
}
// Cache file identifiers
if (config.uploads.cacheFileIdentifiers) {
utils.idSet = await db.table('files')
.select('name')
.then(rows => {
return new Set(rows.map(row => row.name.split('.')[0]))
})
logger.log(`Cached ${utils.idSet.size} file identifiers`)
}
// Binds Express to port
await new Promise((resolve, reject) => {
try {
safe.listen(config.port, () => resolve())
} catch (error) {
reject(error)
}
})
safe.listen(config.port, async () => {
logger.log(`lolisafe started on port ${config.port}`) logger.log(`lolisafe started on port ${config.port}`)
// safe.fiery.me-exclusive cache control // Cache control (safe.fiery.me)
if (config.cacheControl) { if (config.cacheControl) {
logger.log('Cache control enabled') logger.log('Cache control enabled, purging...')
const routes = config.pages.concat(['api/check']) const routes = config.pages.concat(['api/check'])
const results = await utils.purgeCloudflareCache(routes) const results = await utils.purgeCloudflareCache(routes)
let errored = false let errored = false
@ -224,6 +212,32 @@ const start = async () => {
logger.log(`Purged ${succeeded} Cloudflare's cache`) logger.log(`Purged ${succeeded} Cloudflare's cache`)
} }
// Temporary uploads
if (Array.isArray(config.uploads.temporaryUploadAges) && config.uploads.temporaryUploadAges.length) {
let temporaryUploadsInProgress = false
const temporaryUploadCheck = async () => {
if (temporaryUploadsInProgress)
return
temporaryUploadsInProgress = true
const result = await utils.bulkDeleteExpired()
if (result.expired.length) {
let logMessage = `Deleted ${result.expired.length} expired upload(s)`
if (result.failed.length)
logMessage += ` but unable to delete ${result.failed.length}`
logger.log(logMessage)
}
temporaryUploadsInProgress = false
}
temporaryUploadCheck()
if (config.uploads.temporaryUploadsInterval)
setInterval(temporaryUploadCheck, config.uploads.temporaryUploadsInterval)
}
// NODE_ENV=development yarn start // NODE_ENV=development yarn start
if (process.env.NODE_ENV === 'development') { if (process.env.NODE_ENV === 'development') {
// Add readline interface to allow evaluating arbitrary JavaScript from console // Add readline interface to allow evaluating arbitrary JavaScript from console
@ -242,9 +256,10 @@ const start = async () => {
}).on('SIGINT', () => { }).on('SIGINT', () => {
process.exit(0) process.exit(0)
}) })
logger.log('Development mode enabled (disabled Nunjucks caching & enabled readline interface)') logger.log('Development mode (disabled nunjucks caching & enabled readline interface)')
} }
}) } catch (error) {
} logger.error(error)
process.exit(1)
start() }
})()

View File

@ -18,8 +18,9 @@
"start": "node ./lolisafe.js", "start": "node ./lolisafe.js",
"startdev": "env NODE_ENV=development node ./lolisafe.js", "startdev": "env NODE_ENV=development node ./lolisafe.js",
"pm2": "pm2 start --name safe ./lolisafe.js", "pm2": "pm2 start --name safe ./lolisafe.js",
"cf-purge": "node ./scripts/cf-purge.js",
"delete-expired": "node ./scripts/delete-expired.js",
"thumbs": "node ./scripts/thumbs.js", "thumbs": "node ./scripts/thumbs.js",
"cfpurge": "node ./scripts/cfpurge.js",
"pull": "git stash; git pull; yarn install --production; git stash pop; echo OK." "pull": "git stash; git pull; yarn install --production; git stash pop; echo OK."
}, },
"dependencies": { "dependencies": {
@ -29,23 +30,23 @@
"express": "^4.17.1", "express": "^4.17.1",
"express-rate-limit": "^5.0.0", "express-rate-limit": "^5.0.0",
"fluent-ffmpeg": "^2.1.2", "fluent-ffmpeg": "^2.1.2",
"helmet": "^3.20.1", "helmet": "^3.21.0",
"jszip": "^3.2.2", "jszip": "^3.2.2",
"knex": "^0.19.3", "knex": "^0.19.3",
"multer": "^1.4.2", "multer": "^1.4.2",
"node-fetch": "^2.6.0", "node-fetch": "^2.6.0",
"nunjucks": "^3.2.0", "nunjucks": "^3.2.0",
"os": "^0.1.1",
"randomstring": "^1.1.5", "randomstring": "^1.1.5",
"readline": "^1.3.0", "readline": "^1.3.0",
"sharp": "^0.23.0", "sharp": "^0.23.0",
"sqlite3": "^4.1.0" "sqlite3": "^4.1.0",
"systeminformation": "^4.14.8"
}, },
"devDependencies": { "devDependencies": {
"eslint": "^6.3.0", "eslint": "^6.3.0",
"eslint-config-standard": "^14.1.0", "eslint-config-standard": "^14.1.0",
"eslint-plugin-import": "^2.18.2", "eslint-plugin-import": "^2.18.2",
"eslint-plugin-node": "^9.2.0", "eslint-plugin-node": "^10.0.0",
"eslint-plugin-promise": "^4.2.1", "eslint-plugin-promise": "^4.2.1",
"eslint-plugin-standard": "^4.0.1" "eslint-plugin-standard": "^4.0.1"
} }

View File

@ -64,15 +64,18 @@
-webkit-transform: scale(0.86); -webkit-transform: scale(0.86);
transform: scale(0.86); transform: scale(0.86);
} }
25% { 25% {
opacity: 100; opacity: 100;
} }
67% { 67% {
-webkit-box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); -webkit-box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);
box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);
-webkit-transform: scale(1); -webkit-transform: scale(1);
transform: scale(1); transform: scale(1);
} }
100% { 100% {
-webkit-box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); -webkit-box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);
box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);
@ -89,15 +92,18 @@
-webkit-transform: scale(0.86); -webkit-transform: scale(0.86);
transform: scale(0.86); transform: scale(0.86);
} }
25% { 25% {
opacity: 100; opacity: 100;
} }
67% { 67% {
-webkit-box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); -webkit-box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);
box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);
-webkit-transform: scale(1); -webkit-transform: scale(1);
transform: scale(1); transform: scale(1);
} }
100% { 100% {
-webkit-box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); -webkit-box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);
box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);

View File

@ -13,6 +13,7 @@ body {
0% { 0% {
opacity: 0; opacity: 0;
} }
100% { 100% {
opacity: 1; opacity: 1;
} }
@ -22,6 +23,7 @@ body {
0% { 0% {
opacity: 0; opacity: 0;
} }
100% { 100% {
opacity: 1; opacity: 1;
} }
@ -39,6 +41,12 @@ hr {
background-color: #898b8d; background-color: #898b8d;
} }
code,
.message-body code {
background-color: #222528;
border-radius: 5px;
}
.title { .title {
color: #eff0f1; color: #eff0f1;
} }
@ -127,7 +135,8 @@ hr {
} }
.progress.is-breeze:indeterminate { .progress.is-breeze:indeterminate {
background-image: linear-gradient(to right,#60a8dc 30%,#eff0f1 30%); background-image: -webkit-gradient(linear, left top, right top, color-stop(30%, #60a8dc), color-stop(30%, #eff0f1));
background-image: linear-gradient(to right, #60a8dc 30%, #eff0f1 30%);
} }
.message { .message {

View File

@ -31,13 +31,6 @@
color: #bdc3c7; color: #bdc3c7;
} }
.swal-content .is-code {
font-family: 'Courier New', Courier, monospace;
border: 1px dashed #eff0f1;
border-radius: 5px;
margin-top: 5px;
}
.swal-button { .swal-button {
background-color: #3794d2; background-color: #3794d2;
color: #eff0f1; color: #eff0f1;
@ -94,6 +87,7 @@
0% { 0% {
border-color: #ffaa60; border-color: #ffaa60;
} }
to { to {
border-color: #f67400; border-color: #f67400;
} }
@ -103,6 +97,7 @@
0% { 0% {
border-color: #ffaa60; border-color: #ffaa60;
} }
to { to {
border-color: #f67400; border-color: #f67400;
} }
@ -112,6 +107,7 @@
0% { 0% {
background-color: #ffaa60; background-color: #ffaa60;
} }
to { to {
background-color: #f67400; background-color: #f67400;
} }
@ -121,6 +117,7 @@
0% { 0% {
background-color: #ffaa60; background-color: #ffaa60;
} }
to { to {
background-color: #f67400; background-color: #f67400;
} }

View File

@ -14,18 +14,17 @@ const page = {
} }
page.do = function (dest) { page.do = function (dest) {
const user = page.user.value const user = page.user.value.trim()
const pass = page.pass.value
if (!user) if (!user)
return swal('An error occurred!', 'You need to specify a username.', 'error') return swal('An error occurred!', 'You need to specify a username.', 'error')
const pass = page.pass.value.trim()
if (!pass) if (!pass)
return swal('An error occurred!', 'You need to specify a password.', 'error') return swal('An error occurred!', 'You need to specify a password.', 'error')
axios.post(`api/${dest}`, { axios.post(`api/${dest}`, {
username: user.trim(), username: user,
password: pass.trim() password: pass
}).then(function (response) { }).then(function (response) {
if (response.data.success === false) if (response.data.success === false)
return swal(`Unable to ${dest}!`, response.data.description, 'error') return swal(`Unable to ${dest}!`, response.data.description, 'error')
@ -49,7 +48,7 @@ page.verify = function () {
window.location = 'dashboard' window.location = 'dashboard'
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
const description = error.response.data && error.response.data.description const description = error.response.data && error.response.data.description
? error.response.data.description ? error.response.data.description
: 'There was an error with the request, please check the console for more information.' : 'There was an error with the request, please check the console for more information.'

View File

@ -108,7 +108,7 @@ page.verifyToken = function (token, reloadOnError) {
page.permissions = response.data.permissions page.permissions = response.data.permissions
page.prepareDashboard() page.prepareDashboard()
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
} }
@ -167,11 +167,6 @@ page.prepareDashboard = function () {
page.getAlbums() page.getAlbums()
}) })
document.querySelector('#itemFileLength').addEventListener('click', function () {
page.setActiveMenu(this)
page.changeFileLength()
})
document.querySelector('#itemTokens').addEventListener('click', function () { document.querySelector('#itemTokens').addEventListener('click', function () {
page.setActiveMenu(this) page.setActiveMenu(this)
page.changeToken() page.changeToken()
@ -216,9 +211,11 @@ page.domClick = function (event) {
let element = event.target let element = event.target
if (!element) return if (!element) return
// If the clicked element is an icon, delegate event to its A parent; hacky // Delegate click events to their A or BUTTON parents
if (element.tagName === 'I' && element.parentNode.tagName === 'SPAN') element = element.parentNode if (['I'].includes(element.tagName) && ['SPAN'].includes(element.parentNode.tagName))
if (element.tagName === 'SPAN' && element.parentNode.tagName === 'A') element = element.parentNode element = element.parentNode
if (['SPAN'].includes(element.tagName) && ['A', 'BUTTON'].includes(element.parentNode.tagName))
element = element.parentNode
// Skip elements that have no action data // Skip elements that have no action data
if (!element.dataset || !element.dataset.action) return if (!element.dataset || !element.dataset.action) return
@ -325,8 +322,10 @@ page.switchPage = function (action, element) {
views.pageNum = parseInt(element.dataset.goto) views.pageNum = parseInt(element.dataset.goto)
return func(views, element) return func(views, element)
case 'jump-to-page': { case 'jump-to-page': {
const jumpToPage = parseInt(document.querySelector('#jumpToPage').value) const jumpToPage = document.querySelector('#jumpToPage')
views.pageNum = isNaN(jumpToPage) ? 0 : (jumpToPage - 1) if (!jumpToPage.checkValidity()) return
const parsed = parseInt(jumpToPage.value)
views.pageNum = isNaN(parsed) ? 0 : (parsed - 1)
if (views.pageNum < 0) views.pageNum = 0 if (views.pageNum < 0) views.pageNum = 0
return func(views, element) return func(views, element)
} }
@ -340,7 +339,7 @@ page.focusJumpToPage = function () {
element.select() element.select()
} }
page.getUploads = function ({ pageNum, album, all, filters } = {}, element) { page.getUploads = function ({ pageNum, album, all, filters, autoPage } = {}, element) {
if (element) page.isLoading(element, true) if (element) page.isLoading(element, true)
if ((all || filters) && !page.permissions.moderator) if ((all || filters) && !page.permissions.moderator)
@ -368,7 +367,15 @@ page.getUploads = function ({ pageNum, album, all, filters } = {}, element) {
const files = response.data.files const files = response.data.files
if (pageNum && (files.length === 0)) { if (pageNum && (files.length === 0)) {
if (element) page.isLoading(element, false) if (element) page.isLoading(element, false)
return swal('An error occurred!', `There are no more uploads to populate page ${pageNum + 1}.`, 'error') if (autoPage)
return page.getUploads({
pageNum: Math.ceil(response.data.count / 25) - 1,
album,
all,
filters
}, element)
else
return swal('An error occurred!', `There are no more uploads to populate page ${pageNum + 1}.`, 'error')
} }
page.currentView = all ? 'uploadsAll' : 'uploads' page.currentView = all ? 'uploadsAll' : 'uploads'
@ -413,7 +420,7 @@ page.getUploads = function ({ pageNum, album, all, filters } = {}, element) {
<form class="prevent-default"> <form class="prevent-default">
<div class="field has-addons"> <div class="field has-addons">
<div class="control is-expanded"> <div class="control is-expanded">
<input id="jumpToPage" class="input is-small" type="text" value="${pageNum + 1}"> <input id="jumpToPage" class="input is-small" type="number" value="${pageNum + 1}">
</div> </div>
<div class="control"> <div class="control">
<button type="submit" class="button is-small is-breeze" title="Jump to page" data-action="jump-to-page"> <button type="submit" class="button is-small is-breeze" title="Jump to page" data-action="jump-to-page">
@ -468,25 +475,43 @@ page.getUploads = function ({ pageNum, album, all, filters } = {}, element) {
// Set to true to tick "all files" checkbox in list view // Set to true to tick "all files" checkbox in list view
let allSelected = true let allSelected = true
const hasExpiryDateColumn = files.some(file => file.expirydate !== undefined)
for (let i = 0; i < files.length; i++) { for (let i = 0; i < files.length; i++) {
// Build full URLs // Build full URLs
files[i].file = `${basedomain}/${files[i].name}` files[i].file = `${basedomain}/${files[i].name}`
if (files[i].thumb) files[i].thumb = `${basedomain}/${files[i].thumb}` if (files[i].thumb)
files[i].thumb = `${basedomain}/${files[i].thumb}`
// Cache bare minimum data for thumbnails viewer // Cache bare minimum data for thumbnails viewer
page.cache.uploads[files[i].id] = { page.cache.uploads[files[i].id] = {
name: files[i].name, name: files[i].name,
thumb: files[i].thumb, thumb: files[i].thumb,
original: files[i].file original: files[i].file
} }
// Prettify // Prettify
files[i].prettyBytes = page.getPrettyBytes(parseInt(files[i].size)) files[i].prettyBytes = page.getPrettyBytes(parseInt(files[i].size))
files[i].prettyDate = page.getPrettyDate(new Date(files[i].timestamp * 1000)) files[i].prettyDate = page.getPrettyDate(new Date(files[i].timestamp * 1000))
if (hasExpiryDateColumn)
files[i].prettyExpiryDate = files[i].expirydate
? page.getPrettyDate(new Date(files[i].expirydate * 1000))
: '-'
// Update selected status // Update selected status
files[i].selected = page.selected[page.currentView].includes(files[i].id) files[i].selected = page.selected[page.currentView].includes(files[i].id)
if (allSelected && !files[i].selected) allSelected = false if (allSelected && !files[i].selected) allSelected = false
// Appendix (display album or user) // Appendix (display album or user)
if (all) files[i].appendix = files[i].userid ? users[files[i].userid] : '' if (all)
else files[i].appendix = files[i].albumid ? albums[files[i].albumid] : '' files[i].appendix = files[i].userid
? users[files[i].userid] || ''
: ''
else
files[i].appendix = files[i].albumid
? albums[files[i].albumid] || ''
: ''
} }
if (page.views[page.currentView].type === 'thumbs') { if (page.views[page.currentView].type === 'thumbs') {
@ -499,7 +524,6 @@ page.getUploads = function ({ pageNum, album, all, filters } = {}, element) {
<hr> <hr>
${pagination} ${pagination}
` `
page.fadeAndScroll()
const table = document.querySelector('#table') const table = document.querySelector('#table')
@ -508,6 +532,7 @@ page.getUploads = function ({ pageNum, album, all, filters } = {}, element) {
const div = document.createElement('div') const div = document.createElement('div')
div.className = 'image-container column is-narrow' div.className = 'image-container column is-narrow'
div.dataset.id = upload.id div.dataset.id = upload.id
if (upload.thumb !== undefined) if (upload.thumb !== undefined)
div.innerHTML = `<a class="image" href="${upload.file}" target="_blank" rel="noopener"><img alt="${upload.name}" data-src="${upload.thumb}"/></a>` div.innerHTML = `<a class="image" href="${upload.file}" target="_blank" rel="noopener"><img alt="${upload.name}" data-src="${upload.thumb}"/></a>`
else else
@ -557,11 +582,12 @@ page.getUploads = function ({ pageNum, album, all, filters } = {}, element) {
<thead> <thead>
<tr> <tr>
<th><input id="selectAll" class="checkbox" type="checkbox" title="Select all uploads" data-action="select-all"></th> <th><input id="selectAll" class="checkbox" type="checkbox" title="Select all uploads" data-action="select-all"></th>
<th style="width: 25%">File</th> <th style="width: 20%">File</th>
<th>${all ? 'User' : 'Album'}</th> <th>${all ? 'User' : 'Album'}</th>
<th>Size</th> <th>Size</th>
${all ? '<th>IP</th>' : ''} ${all ? '<th>IP</th>' : ''}
<th>Date</th> <th>Date</th>
${hasExpiryDateColumn ? '<th>Expiry date</th>' : ''}
<th></th> <th></th>
</tr> </tr>
</thead> </thead>
@ -572,7 +598,6 @@ page.getUploads = function ({ pageNum, album, all, filters } = {}, element) {
<hr> <hr>
${pagination} ${pagination}
` `
page.fadeAndScroll()
const table = document.querySelector('#table') const table = document.querySelector('#table')
@ -587,6 +612,7 @@ page.getUploads = function ({ pageNum, album, all, filters } = {}, element) {
<td>${upload.prettyBytes}</td> <td>${upload.prettyBytes}</td>
${all ? `<td>${upload.ip || ''}</td>` : ''} ${all ? `<td>${upload.ip || ''}</td>` : ''}
<td>${upload.prettyDate}</td> <td>${upload.prettyDate}</td>
${hasExpiryDateColumn ? `<td>${upload.prettyExpiryDate}</td>` : ''}
<td class="controls" style="text-align: right"> <td class="controls" style="text-align: right">
<a class="button is-small is-primary" title="View thumbnail" data-action="display-thumbnail"${upload.thumb ? '' : ' disabled'}> <a class="button is-small is-primary" title="View thumbnail" data-action="display-thumbnail"${upload.thumb ? '' : ' disabled'}>
<span class="icon"> <span class="icon">
@ -616,6 +642,7 @@ page.getUploads = function ({ pageNum, album, all, filters } = {}, element) {
page.checkboxes[page.currentView] = Array.from(table.querySelectorAll('.checkbox[data-action="select"]')) page.checkboxes[page.currentView] = Array.from(table.querySelectorAll('.checkbox[data-action="select"]'))
} }
} }
page.fadeAndScroll()
if (allSelected && files.length) { if (allSelected && files.length) {
const selectAll = document.querySelector('#selectAll') const selectAll = document.querySelector('#selectAll')
@ -627,7 +654,7 @@ page.getUploads = function ({ pageNum, album, all, filters } = {}, element) {
page.views[page.currentView].pageNum = files.length ? pageNum : 0 page.views[page.currentView].pageNum = files.length ? pageNum : 0
}).catch(function (error) { }).catch(function (error) {
if (element) page.isLoading(element, false) if (element) page.isLoading(element, false)
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
} }
@ -789,8 +816,8 @@ page.clearSelection = function () {
if (checkboxes[i].checked) if (checkboxes[i].checked)
checkboxes[i].checked = false checkboxes[i].checked = false
localStorage[lsKeys.selected[page.currentView]] = '[]'
page.selected[page.currentView] = [] page.selected[page.currentView] = []
delete localStorage[lsKeys.selected[page.currentView]]
const selectAll = document.querySelector('#selectAll') const selectAll = document.querySelector('#selectAll')
if (selectAll) selectAll.checked = false if (selectAll) selectAll.checked = false
@ -815,16 +842,16 @@ page.filtersHelp = function (element) {
Examples: Examples:
Uploads from user with username "demo": Uploads from user with username "demo":
<span class="is-code">user:demo</span> <code>user:demo</code>
Uploads from users with username either "John Doe" OR "demo": Uploads from users with username either "John Doe" OR "demo":
<span class="is-code">user:John\\ Doe user:demo</span> <code>user:John\\ Doe user:demo</code>
Uploads from IP "127.0.0.1" AND which file names match "*.rar" OR "*.zip": Uploads from IP "127.0.0.1" AND which file names match "*.rar" OR "*.zip":
<span class="is-code">ip:127.0.0.1 name:*.rar name:*.zip</span> <code>ip:127.0.0.1 name:*.rar name:*.zip</code>
Uploads from user with username "test" OR from non-registered users: Uploads from user with username "test" OR from non-registered users:
<span class="is-code">user:test -user</span> <code>user:test -user</code>
`.trim().replace(/^ {6}/gm, '').replace(/\n/g, '<br>') `.trim().replace(/^ {6}/gm, '').replace(/\n/g, '<br>')
swal({ content }) swal({ content })
} }
@ -869,9 +896,12 @@ page.deleteFile = function (id) {
} }
swal('Deleted!', 'The file has been deleted.', 'success') swal('Deleted!', 'The file has been deleted.', 'success')
page.getUploads(page.views[page.currentView])
const views = Object.assign({}, page.views[page.currentView])
views.autoPage = true
page.getUploads(views)
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
}) })
@ -915,22 +945,23 @@ page.deleteSelectedFiles = function () {
return swal('An error occurred!', bulkdelete.data.description, 'error') return swal('An error occurred!', bulkdelete.data.description, 'error')
} }
let deleted = count if (Array.isArray(bulkdelete.data.failed) && bulkdelete.data.failed.length) {
if (bulkdelete.data.failed && bulkdelete.data.failed.length) {
deleted -= bulkdelete.data.failed.length
page.selected[page.currentView] = page.selected[page.currentView].filter(function (id) { page.selected[page.currentView] = page.selected[page.currentView].filter(function (id) {
return bulkdelete.data.failed.includes(id) return bulkdelete.data.failed.includes(id)
}) })
localStorage[lsKeys.selected[page.currentView]] = JSON.stringify(page.selected[page.currentView])
swal('An error ocurrred!', `From ${count} ${suffix}, unable to delete ${bulkdelete.data.failed.length} of them.`, 'error')
} else { } else {
page.selected[page.currentView] = [] page.selected[page.currentView] = []
delete localStorage[lsKeys.selected[page.currentView]]
swal('Deleted!', `${count} ${suffix} ${count === 1 ? 'has' : 'have'} been deleted.`, 'success')
} }
localStorage[lsKeys.selected[page.currentView]] = JSON.stringify(page.selected[page.currentView]) const views = Object.assign({}, page.views[page.currentView])
views.autoPage = true
swal('Deleted!', `${deleted} file${deleted === 1 ? ' has' : 's have'} been deleted.`, 'success') page.getUploads(views)
return page.getUploads(page.views[page.currentView])
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
}) })
@ -1003,14 +1034,21 @@ page.deleteFileByNames = function () {
return swal('An error occurred!', bulkdelete.data.description, 'error') return swal('An error occurred!', bulkdelete.data.description, 'error')
} }
let deleted = count if (Array.isArray(bulkdelete.data.failed) && bulkdelete.data.failed.length) {
if (bulkdelete.data.failed && bulkdelete.data.failed.length) page.selected[page.currentView] = page.selected[page.currentView].filter(function (id) {
deleted -= bulkdelete.data.failed.length return bulkdelete.data.failed.includes(id)
})
localStorage[lsKeys.selected[page.currentView]] = JSON.stringify(page.selected[page.currentView])
swal('An error ocurrred!', `From ${count} ${suffix}, unable to delete ${bulkdelete.data.failed.length} of them.`, 'error')
} else {
page.selected[page.currentView] = []
delete localStorage[lsKeys.selected[page.currentView]]
swal('Deleted!', `${count} ${suffix} ${count === 1 ? 'has' : 'have'} been deleted.`, 'success')
}
document.querySelector('#names').value = bulkdelete.data.failed.join('\n') document.querySelector('#names').value = bulkdelete.data.failed.join('\n')
swal('Deleted!', `${deleted} file${deleted === 1 ? ' has' : 's have'} been deleted.`, 'success')
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
}) })
@ -1107,13 +1145,13 @@ page.addFilesToAlbum = function (ids, callback) {
return swal('An error occurred!', `Could not add the ${suffix} to the album.`, 'error') return swal('An error occurred!', `Could not add the ${suffix} to the album.`, 'error')
swal('Woohoo!', `Successfully ${albumid < 0 ? 'removed' : 'added'} ${added} ${suffix} ${albumid < 0 ? 'from' : 'to'} the album.`, 'success') swal('Woohoo!', `Successfully ${albumid < 0 ? 'removed' : 'added'} ${added} ${suffix} ${albumid < 0 ? 'from' : 'to'} the album.`, 'success')
return callback(add.data.failed) callback(add.data.failed)
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
@ -1139,7 +1177,7 @@ page.addFilesToAlbum = function (ids, callback) {
select.getElementsByTagName('option')[1].innerHTML = 'Choose an album' select.getElementsByTagName('option')[1].innerHTML = 'Choose an album'
select.removeAttribute('disabled') select.removeAttribute('disabled')
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
} }
@ -1200,7 +1238,6 @@ page.getAlbums = function () {
</table> </table>
</div> </div>
` `
page.fadeAndScroll()
const homeDomain = response.data.homeDomain const homeDomain = response.data.homeDomain
const table = document.querySelector('#table') const table = document.querySelector('#table')
@ -1252,8 +1289,9 @@ page.getAlbums = function () {
table.appendChild(tr) table.appendChild(tr)
} }
page.fadeAndScroll()
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
} }
@ -1315,8 +1353,8 @@ page.editAlbum = function (id) {
axios.post('api/albums/edit', { axios.post('api/albums/edit', {
id, id,
name: document.querySelector('#swalName').value, name: document.querySelector('#swalName').value.trim(),
description: document.querySelector('#swalDescription').value, description: document.querySelector('#swalDescription').value.trim(),
download: document.querySelector('#swalDownload').checked, download: document.querySelector('#swalDownload').checked,
public: document.querySelector('#swalPublic').checked, public: document.querySelector('#swalPublic').checked,
requestLink: document.querySelector('#swalRequestLink').checked requestLink: document.querySelector('#swalRequestLink').checked
@ -1340,7 +1378,7 @@ page.editAlbum = function (id) {
page.getAlbumsSidebar() page.getAlbumsSidebar()
page.getAlbums() page.getAlbums()
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
}) })
@ -1375,6 +1413,8 @@ page.deleteAlbum = function (id) {
if (response.data.success === false) if (response.data.success === false)
if (response.data.description === 'No token provided') { if (response.data.description === 'No token provided') {
return page.verifyToken(page.token) return page.verifyToken(page.token)
} else if (Array.isArray(response.data.failed) && response.data.failed.length) {
return swal('An error occurred!', 'Unable to delete ', 'error')
} else { } else {
return swal('An error occurred!', response.data.description, 'error') return swal('An error occurred!', response.data.description, 'error')
} }
@ -1383,7 +1423,7 @@ page.deleteAlbum = function (id) {
page.getAlbumsSidebar() page.getAlbumsSidebar()
page.getAlbums() page.getAlbums()
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
}) })
@ -1411,7 +1451,7 @@ page.submitAlbum = function (element) {
page.getAlbumsSidebar() page.getAlbumsSidebar()
page.getAlbums() page.getAlbums()
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
page.isLoading(element, false) page.isLoading(element, false)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
@ -1448,7 +1488,7 @@ page.getAlbumsSidebar = function () {
albumsContainer.appendChild(li) albumsContainer.appendChild(li)
} }
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
} }
@ -1458,82 +1498,6 @@ page.getAlbum = function (album) {
page.getUploads({ album: album.id }) page.getUploads({ album: album.id })
} }
page.changeFileLength = function () {
axios.get('api/filelength/config').then(function (response) {
if (response.data.success === false)
if (response.data.description === 'No token provided') {
return page.verifyToken(page.token)
} else {
return swal('An error occurred!', response.data.description, 'error')
}
// Shorter vars
const { max, min } = response.data.config
const [chg, def] = [response.data.config.userChangeable, response.data.config.default]
const len = response.data.fileLength
page.dom.innerHTML = `
<h2 class="subtitle">File name length</h2>
<form class="prevent-default">
<div class="field">
<div class="field">
<label class="label">Your current file name length:</label>
<div class="control">
<input id="fileLength" class="input" type="text" placeholder="Your file length" value="${len ? Math.min(Math.max(len, min), max) : def}">
</div>
<p class="help">Default file name length is <b>${def}</b> characters. ${(chg ? `Range allowed for user is <b>${min}</b> to <b>${max}</b> characters.` : 'Changing file name length is disabled at the moment.')}</p>
</div>
<div class="field">
<div class="control">
<button type="submit" id="setFileLength" class="button is-breeze is-fullwidth">
<span class="icon">
<i class="icon-paper-plane-empty"></i>
</span>
<span>Set file name length</span>
</button>
</div>
<div>
</div>
</form>
`
page.fadeAndScroll()
document.querySelector('#setFileLength').addEventListener('click', function () {
page.setFileLength(document.querySelector('#fileLength').value, this)
})
}).catch(function (error) {
console.log(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
})
}
page.setFileLength = function (fileLength, element) {
page.isLoading(element, true)
axios.post('api/filelength/change', { fileLength }).then(function (response) {
page.isLoading(element, false)
if (response.data.success === false)
if (response.data.description === 'No token provided') {
return page.verifyToken(page.token)
} else {
return swal('An error occurred!', response.data.description, 'error')
}
swal({
title: 'Woohoo!',
text: 'Your file length was successfully changed.',
icon: 'success'
}).then(function () {
page.changeFileLength()
})
}).catch(function (error) {
console.log(error)
page.isLoading(element, false)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
})
}
page.changeToken = function () { page.changeToken = function () {
axios.get('api/tokens').then(function (response) { axios.get('api/tokens').then(function (response) {
if (response.data.success === false) if (response.data.success === false)
@ -1566,7 +1530,7 @@ page.changeToken = function () {
` `
page.fadeAndScroll() page.fadeAndScroll()
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
} }
@ -1595,7 +1559,7 @@ page.getNewToken = function (element) {
page.changeToken() page.changeToken()
}) })
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
page.isLoading(element, false) page.isLoading(element, false)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
@ -1608,13 +1572,13 @@ page.changePassword = function () {
<div class="field"> <div class="field">
<label class="label">New password:</label> <label class="label">New password:</label>
<div class="control"> <div class="control">
<input id="password" class="input" type="password"> <input id="password" class="input" type="password" min="6" max="64">
</div> </div>
</div> </div>
<div class="field"> <div class="field">
<label class="label">Re-type new password:</label> <label class="label">Re-type new password:</label>
<div class="control"> <div class="control">
<input id="passwordConfirm" class="input" type="password"> <input id="passwordConfirm" class="input" type="password" min="6" max="64">
</div> </div>
</div> </div>
<div class="field"> <div class="field">
@ -1664,7 +1628,7 @@ page.sendNewPassword = function (pass, element) {
page.changePassword() page.changePassword()
}) })
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
page.isLoading(element, false) page.isLoading(element, false)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
@ -1713,7 +1677,7 @@ page.getUsers = function ({ pageNum } = {}, element) {
<form class="prevent-default"> <form class="prevent-default">
<div class="field has-addons"> <div class="field has-addons">
<div class="control is-expanded"> <div class="control is-expanded">
<input id="jumpToPage" class="input is-small" type="text" value="${pageNum + 1}"> <input id="jumpToPage" class="input is-small" type="number" value="${pageNum + 1}">
</div> </div>
<div class="control"> <div class="control">
<button type="submit" class="button is-small is-breeze" title="Jump to page" data-action="jump-to-page"> <button type="submit" class="button is-small is-breeze" title="Jump to page" data-action="jump-to-page">
@ -1765,10 +1729,9 @@ page.getUsers = function ({ pageNum } = {}, element) {
<tr> <tr>
<th><input id="selectAll" class="checkbox" type="checkbox" title="Select all users" data-action="select-all"></th> <th><input id="selectAll" class="checkbox" type="checkbox" title="Select all users" data-action="select-all"></th>
<th>ID</th> <th>ID</th>
<th style="width: 25%">Username</th> <th style="width: 20%">Username</th>
<th>Uploads</th> <th>Uploads</th>
<th>Usage</th> <th>Usage</th>
<th>File length</th>
<th>Group</th> <th>Group</th>
<th></th> <th></th>
</tr> </tr>
@ -1780,7 +1743,6 @@ page.getUsers = function ({ pageNum } = {}, element) {
<hr> <hr>
${pagination} ${pagination}
` `
page.fadeAndScroll()
const table = document.querySelector('#table') const table = document.querySelector('#table')
@ -1813,7 +1775,6 @@ page.getUsers = function ({ pageNum } = {}, element) {
<th${enabled ? '' : ' class="is-linethrough"'}>${user.username}</td> <th${enabled ? '' : ' class="is-linethrough"'}>${user.username}</td>
<th>${user.uploadsCount}</th> <th>${user.uploadsCount}</th>
<td>${page.getPrettyBytes(user.diskUsage)}</td> <td>${page.getPrettyBytes(user.diskUsage)}</td>
<td>${user.fileLength || 'default'}</td>
<td>${displayGroup}</td> <td>${displayGroup}</td>
<td class="controls" style="text-align: right"> <td class="controls" style="text-align: right">
<a class="button is-small is-primary" title="Edit user" data-action="edit-user"> <a class="button is-small is-primary" title="Edit user" data-action="edit-user">
@ -1842,6 +1803,7 @@ page.getUsers = function ({ pageNum } = {}, element) {
table.appendChild(tr) table.appendChild(tr)
page.checkboxes.users = Array.from(table.querySelectorAll('.checkbox[data-action="select"]')) page.checkboxes.users = Array.from(table.querySelectorAll('.checkbox[data-action="select"]'))
} }
page.fadeAndScroll()
if (allSelected && response.data.users.length) { if (allSelected && response.data.users.length) {
const selectAll = document.querySelector('#selectAll') const selectAll = document.querySelector('#selectAll')
@ -1851,7 +1813,7 @@ page.getUsers = function ({ pageNum } = {}, element) {
page.views.users.pageNum = response.data.users.length ? pageNum : 0 page.views.users.pageNum = response.data.users.length ? pageNum : 0
}).catch(function (error) { }).catch(function (error) {
if (element) page.isLoading(element, false) if (element) page.isLoading(element, false)
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
} }
@ -1935,7 +1897,7 @@ page.editUser = function (id) {
const div = document.createElement('div') const div = document.createElement('div')
div.innerHTML = ` div.innerHTML = `
<p>${user.username}'s new password is:</p> <p>${user.username}'s new password is:</p>
<p class="is-code">${response.data.password}</p> <p><code>${response.data.password}</code></p>
` `
swal({ swal({
title: 'Success!', title: 'Success!',
@ -1950,7 +1912,7 @@ page.editUser = function (id) {
page.getUsers(page.views.users) page.getUsers(page.views.users)
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
}) })
@ -1990,7 +1952,7 @@ page.disableUser = function (id) {
swal('Success!', 'The user has been disabled.', 'success') swal('Success!', 'The user has been disabled.', 'success')
page.getUsers(page.views.users) page.getUsers(page.views.users)
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
}) })
@ -2069,32 +2031,41 @@ page.getServerStats = function (element) {
const keys = Object.keys(response.data.stats) const keys = Object.keys(response.data.stats)
for (let i = 0; i < keys.length; i++) { for (let i = 0; i < keys.length; i++) {
let rows = '' let rows = ''
if (!response.data.stats[keys[i]]) { if (!response.data.stats[keys[i]])
rows += ` rows += `
<tr> <tr>
<td>Generating, please try again later\u2026</td> <td>Generating, please try again later\u2026</td>
<td></td> <td></td>
</tr> </tr>
` `
} else { else
const valKeys = Object.keys(response.data.stats[keys[i]]) try {
for (let j = 0; j < valKeys.length; j++) { const valKeys = Object.keys(response.data.stats[keys[i]])
const _value = response.data.stats[keys[i]][valKeys[j]] for (let j = 0; j < valKeys.length; j++) {
let value = _value const _value = response.data.stats[keys[i]][valKeys[j]]
if (['albums', 'users', 'uploads'].includes(keys[i])) let value = _value
value = _value.toLocaleString() if (['albums', 'users', 'uploads'].includes(keys[i]))
if (['memoryUsage', 'size'].includes(valKeys[j])) value = _value.toLocaleString()
value = page.getPrettyBytes(_value) if (['memoryUsage', 'size'].includes(valKeys[j]))
if (valKeys[j] === 'systemMemory') value = page.getPrettyBytes(_value)
value = `${page.getPrettyBytes(_value.used)} / ${page.getPrettyBytes(_value.total)} (${Math.round(_value.used / _value.total * 100)}%)` if (valKeys[j] === 'systemMemory')
rows += ` value = `${page.getPrettyBytes(_value.used)} / ${page.getPrettyBytes(_value.total)} (${Math.round(_value.used / _value.total * 100)}%)`
<tr> rows += `
<th>${valKeys[j].replace(/([A-Z])/g, ' $1').toUpperCase()}</th> <tr>
<td>${value}</td> <th>${valKeys[j].replace(/([A-Z])/g, ' $1').toUpperCase()}</th>
</tr> <td>${value}</td>
` </tr>
`
}
} catch (error) {
console.error(error)
rows = `
<tr>
<td>Error parsing response. Try again?</td>
<td></td>
</tr>
`
} }
}
content += ` content += `
<div class="table-container"> <div class="table-container">
@ -2117,7 +2088,6 @@ page.getServerStats = function (element) {
<h2 class="subtitle">Statistics</h2> <h2 class="subtitle">Statistics</h2>
${content} ${content}
` `
page.fadeAndScroll() page.fadeAndScroll()
}) })
} }

View File

@ -3,7 +3,9 @@
const lsKeys = { const lsKeys = {
token: 'token', token: 'token',
chunkSize: 'chunkSize', chunkSize: 'chunkSize',
parallelUploads: 'parallelUploads' parallelUploads: 'parallelUploads',
fileLength: 'fileLength',
uploadAge: 'uploadAge'
} }
const page = { const page = {
@ -15,15 +17,22 @@ const page = {
enableUserAccounts: null, enableUserAccounts: null,
maxSize: null, maxSize: null,
chunkSize: null, chunkSize: null,
temporaryUploadAges: null,
fileIdentifierLength: null,
// store album id that will be used with upload requests // store album id that will be used with upload requests
album: null, album: null,
parallelUploads: null, parallelUploads: null,
fileLength: null,
uploadAge: null,
maxSizeBytes: null, maxSizeBytes: null,
urlMaxSize: null, urlMaxSize: null,
urlMaxSizeBytes: null, urlMaxSizeBytes: null,
tabs: null,
activeTab: null,
albumSelect: null, albumSelect: null,
previewTemplate: null, previewTemplate: null,
@ -40,10 +49,14 @@ page.checkIfPublic = function () {
page.enableUserAccounts = response.data.enableUserAccounts page.enableUserAccounts = response.data.enableUserAccounts
page.maxSize = parseInt(response.data.maxSize) page.maxSize = parseInt(response.data.maxSize)
page.maxSizeBytes = page.maxSize * 1e6 page.maxSizeBytes = page.maxSize * 1e6
page.chunkSize = response.data.chunkSize page.chunkSize = parseInt(response.data.chunkSize)
page.temporaryUploadAges = response.data.temporaryUploadAges
page.fileIdentifierLength = response.data.fileIdentifierLength
page.preparePage() page.preparePage()
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
document.querySelector('#albumDiv').style.display = 'none'
document.querySelector('#tabs').style.display = 'none'
const button = document.querySelector('#loginToUpload') const button = document.querySelector('#loginToUpload')
button.classList.remove('is-loading') button.classList.remove('is-loading')
button.innerText = 'Error occurred. Reload the page?' button.innerText = 'Error occurred. Reload the page?'
@ -88,7 +101,7 @@ page.verifyToken = function (token, reloadOnError) {
page.token = token page.token = token
return page.prepareUpload() return page.prepareUpload()
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
} }
@ -99,6 +112,9 @@ page.prepareUpload = function () {
page.albumSelect = document.querySelector('#albumSelect') page.albumSelect = document.querySelector('#albumSelect')
page.albumSelect.addEventListener('change', function () { page.albumSelect.addEventListener('change', function () {
page.album = parseInt(page.albumSelect.value) page.album = parseInt(page.albumSelect.value)
// Re-generate ShareX config file
if (typeof page.prepareShareX === 'function')
page.prepareShareX()
}) })
page.prepareAlbums() page.prepareAlbums()
@ -121,6 +137,10 @@ page.prepareUpload = function () {
page.prepareDropzone() page.prepareDropzone()
// Generate ShareX config file
if (typeof page.prepareShareX === 'function')
page.prepareShareX()
const urlMaxSize = document.querySelector('#urlMaxSize') const urlMaxSize = document.querySelector('#urlMaxSize')
if (urlMaxSize) { if (urlMaxSize) {
page.urlMaxSize = parseInt(urlMaxSize.innerHTML) page.urlMaxSize = parseInt(urlMaxSize.innerHTML)
@ -132,13 +152,13 @@ page.prepareUpload = function () {
} }
const tabs = document.querySelector('#tabs') const tabs = document.querySelector('#tabs')
tabs.style.display = 'flex' page.tabs = tabs.querySelectorAll('li')
const items = tabs.getElementsByTagName('li') for (let i = 0; i < page.tabs.length; i++)
for (let i = 0; i < items.length; i++) page.tabs[i].addEventListener('click', function () {
items[i].addEventListener('click', function () {
page.setActiveTab(this.dataset.id) page.setActiveTab(this.dataset.id)
}) })
page.setActiveTab('tab-files') page.setActiveTab('tab-files')
tabs.style.display = 'flex'
} }
page.prepareAlbums = function () { page.prepareAlbums = function () {
@ -169,7 +189,7 @@ page.prepareAlbums = function () {
page.albumSelect.appendChild(option) page.albumSelect.appendChild(option)
} }
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
const description = error.response.data && error.response.data.description const description = error.response.data && error.response.data.description
? error.response.data.description ? error.response.data.description
: 'There was an error with the request, please check the console for more information.' : 'There was an error with the request, please check the console for more information.'
@ -177,18 +197,19 @@ page.prepareAlbums = function () {
}) })
} }
page.setActiveTab = function (activeId) { page.setActiveTab = function (tabId) {
const items = document.querySelector('#tabs').getElementsByTagName('li') if (tabId === page.activeTab) return
for (let i = 0; i < items.length; i++) { for (let i = 0; i < page.tabs.length; i++) {
const tabId = items[i].dataset.id const id = page.tabs[i].dataset.id
if (tabId === activeId) { if (id === tabId) {
items[i].classList.add('is-active') page.tabs[i].classList.add('is-active')
document.getElementById(tabId).style.display = 'block' document.querySelector(`#${id}`).style.display = 'block'
} else { } else {
items[i].classList.remove('is-active') page.tabs[i].classList.remove('is-active')
document.getElementById(tabId).style.display = 'none' document.querySelector(`#${id}`).style.display = 'none'
} }
} }
page.activeTab = tabId
} }
page.prepareDropzone = function () { page.prepareDropzone = function () {
@ -207,9 +228,10 @@ page.prepareDropzone = function () {
const previewsContainer = tabDiv.querySelector('#tab-files .field.uploads') const previewsContainer = tabDiv.querySelector('#tab-files .field.uploads')
page.dropzone = new Dropzone('#dropzone', { page.dropzone = new Dropzone(document.body, {
url: 'api/upload', url: 'api/upload',
paramName: 'files[]', paramName: 'files[]',
clickable: tabDiv.querySelector('#dropzone'),
maxFilesize: page.maxSizeBytes / 1024 / 1024, // this option expects MiB maxFilesize: page.maxSizeBytes / 1024 / 1024, // this option expects MiB
parallelUploads: page.parallelUploads, parallelUploads: page.parallelUploads,
uploadMultiple: false, uploadMultiple: false,
@ -230,14 +252,20 @@ page.prepareDropzone = function () {
files: [{ files: [{
uuid: file.upload.uuid, uuid: file.upload.uuid,
original: file.name, original: file.name,
size: file.size,
type: file.type, type: file.type,
count: file.upload.totalChunkCount, albumid: page.album,
albumid: page.album filelength: page.fileLength,
age: page.uploadAge
}] }]
}, { }, {
headers: { headers: { token: page.token }
token: page.token }).catch(function (error) {
if (error.response.data) return error.response
return {
data: {
success: false,
description: error.toString()
}
} }
}).then(function (response) { }).then(function (response) {
file.previewElement.querySelector('.progress').style.display = 'none' file.previewElement.querySelector('.progress').style.display = 'none'
@ -249,24 +277,24 @@ page.prepareDropzone = function () {
page.updateTemplate(file, response.data.files[0]) page.updateTemplate(file, response.data.files[0])
return done() return done()
}).catch(function (error) {
return {
success: false,
description: error.toString()
}
}) })
} }
}) })
page.dropzone.on('addedfile', function (file) { page.dropzone.on('addedfile', function (file) {
// Set active tab to file uploads
page.setActiveTab('tab-files')
// Add file entry
tabDiv.querySelector('.uploads').style.display = 'block' tabDiv.querySelector('.uploads').style.display = 'block'
file.previewElement.querySelector('.name').innerHTML = file.name file.previewElement.querySelector('.name').innerHTML = file.name
}) })
// Add the selected albumid, if an album is selected, as a header
page.dropzone.on('sending', function (file, xhr) { page.dropzone.on('sending', function (file, xhr) {
if (file.upload.chunked) return if (file.upload.chunked) return
if (page.album) xhr.setRequestHeader('albumid', page.album) // Add headers if not uploading chunks
if (page.album !== null) xhr.setRequestHeader('albumid', page.album)
if (page.fileLength !== null) xhr.setRequestHeader('filelength', page.fileLength)
if (page.uploadAge !== null) xhr.setRequestHeader('age', page.uploadAge)
}) })
// Update the total progress bar // Update the total progress bar
@ -290,16 +318,16 @@ page.prepareDropzone = function () {
}) })
page.dropzone.on('error', function (file, error) { page.dropzone.on('error', function (file, error) {
// Clean up file size errors
if ((typeof error === 'string' && /^File is too big/.test(error)) || if ((typeof error === 'string' && /^File is too big/.test(error)) ||
error.description === 'MulterError: File too large') (typeof error === 'object' && /File too large/.test(error.description)))
error = `File too large (${page.getPrettyBytes(file.size)}).` error = `File too large (${page.getPrettyBytes(file.size)}).`
page.updateTemplateIcon(file.previewElement, 'icon-block') page.updateTemplateIcon(file.previewElement, 'icon-block')
file.previewElement.querySelector('.progress').style.display = 'none' file.previewElement.querySelector('.progress').style.display = 'none'
file.previewElement.querySelector('.name').innerHTML = file.name file.previewElement.querySelector('.name').innerHTML = file.name
file.previewElement.querySelector('.error').innerHTML = error.description || error file.previewElement.querySelector('.error').innerHTML = error.description || error
}) })
if (typeof page.prepareShareX === 'function') page.prepareShareX()
} }
page.uploadUrls = function (button) { page.uploadUrls = function (button) {
@ -315,7 +343,13 @@ page.uploadUrls = function (button) {
} }
function run () { function run () {
const albumid = page.album const headers = {
token: page.token,
albumid: page.album,
age: page.uploadAge,
filelength: page.fileLength
}
const previewsContainer = tabDiv.querySelector('.uploads') const previewsContainer = tabDiv.querySelector('.uploads')
const urls = document.querySelector('#urls').value const urls = document.querySelector('#urls').value
.split(/\r?\n/) .split(/\r?\n/)
@ -334,38 +368,29 @@ page.uploadUrls = function (button) {
previewTemplate.innerHTML = page.previewTemplate.trim() previewTemplate.innerHTML = page.previewTemplate.trim()
const previewElement = previewTemplate.content.firstChild const previewElement = previewTemplate.content.firstChild
previewElement.querySelector('.name').innerHTML = url previewElement.querySelector('.name').innerHTML = url
previewElement.querySelector('.progress').removeAttribute('value')
previewsContainer.appendChild(previewElement) previewsContainer.appendChild(previewElement)
return { return { url, previewElement }
url,
previewElement
}
}) })
function post (i) { function post (i) {
if (i === files.length) return done() if (i === files.length)
return done()
const file = files[i]
function posted (result) { function posted (result) {
file.previewElement.querySelector('.progress').style.display = 'none' files[i].previewElement.querySelector('.progress').style.display = 'none'
if (result.success) { if (result.success) {
page.updateTemplate(file, result.files[0]) page.updateTemplate(files[i], result.files[0])
} else { } else {
page.updateTemplateIcon(file.previewElement, 'icon-block') page.updateTemplateIcon(files[i].previewElement, 'icon-block')
file.previewElement.querySelector('.error').innerHTML = result.description files[i].previewElement.querySelector('.error').innerHTML = result.description
} }
return post(i + 1) return post(i + 1)
} }
axios.post('api/upload', { // Animate progress bar
urls: [file.url] files[i].previewElement.querySelector('.progress').removeAttribute('value')
}, {
headers: { axios.post('api/upload', { urls: [files[i].url] }, { headers }).then(function (response) {
token: page.token,
albumid
}
}).then(function (response) {
return posted(response.data) return posted(response.data)
}).catch(function (error) { }).catch(function (error) {
return posted({ return posted({
@ -410,6 +435,12 @@ page.updateTemplate = function (file, response) {
} else { } else {
page.updateTemplateIcon(file.previewElement, 'icon-doc-inv') page.updateTemplateIcon(file.previewElement, 'icon-doc-inv')
} }
if (response.expirydate) {
const expiryDate = file.previewElement.querySelector('.expiry-date')
expiryDate.innerHTML = `Expiry date: ${page.getPrettyDate(new Date(response.expirydate * 1000))}`
expiryDate.style.display = 'block'
}
} }
page.createAlbum = function () { page.createAlbum = function () {
@ -456,10 +487,10 @@ page.createAlbum = function () {
}).then(function (value) { }).then(function (value) {
if (!value) return if (!value) return
const name = document.querySelector('#swalName').value const name = document.querySelector('#swalName').value.trim()
axios.post('api/albums', { axios.post('api/albums', {
name, name,
description: document.querySelector('#swalDescription').value, description: document.querySelector('#swalDescription').value.trim(),
download: document.querySelector('#swalDownload').checked, download: document.querySelector('#swalDownload').checked,
public: document.querySelector('#swalPublic').checked public: document.querySelector('#swalPublic').checked
}, { }, {
@ -478,7 +509,7 @@ page.createAlbum = function () {
swal('Woohoo!', 'Album was created successfully.', 'success') swal('Woohoo!', 'Album was created successfully.', 'success')
}).catch(function (error) { }).catch(function (error) {
console.log(error) console.error(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
}) })
}) })
@ -486,17 +517,89 @@ page.createAlbum = function () {
page.prepareUploadConfig = function () { page.prepareUploadConfig = function () {
const fallback = { const fallback = {
chunkSize: parseInt(page.chunkSize), chunkSize: page.chunkSize,
parallelUploads: 2 parallelUploads: 2
} }
document.querySelector('#defaultChunkSize').innerHTML = `${fallback.chunkSize} MB`
document.querySelector('#defaultParallelUploads').innerHTML = `${fallback.parallelUploads}`
page.chunkSize = localStorage[lsKeys.chunkSize] || fallback.chunkSize page.chunkSize = parseInt(localStorage[lsKeys.chunkSize]) || fallback.chunkSize
page.parallelUploads = localStorage[lsKeys.parallelUploads] || fallback.parallelUploads page.parallelUploads = parseInt(localStorage[lsKeys.parallelUploads]) || fallback.parallelUploads
document.querySelector('#chunkSize').value = page.chunkSize document.querySelector('#chunkSize').value = page.chunkSize
document.querySelector('#parallelUploads').value = page.parallelUploads document.querySelector('#parallelUploads').value = page.parallelUploads
const numConfig = {
chunkSize: { min: 1, max: 95 },
parallelUploads: { min: 1, max: Number.MAX_SAFE_INTEGER }
}
document.querySelector('#chunkSizeDiv .help').innerHTML =
`Default is ${fallback.chunkSize} MB. Max is ${numConfig.chunkSize.max}.`
document.querySelector('#parallelUploadsDiv .help').innerHTML =
`Default is ${fallback.parallelUploads}.`
const fileLengthDiv = document.querySelector('#fileLengthDiv')
if (page.fileIdentifierLength && fileLengthDiv) {
const element = document.querySelector('#fileLength')
const stored = parseInt(localStorage[lsKeys.fileLength])
fallback.fileLength = page.fileIdentifierLength.default
let helpText = `Default is ${page.fileIdentifierLength.default}.`
const range = typeof page.fileIdentifierLength.min === 'number' &&
typeof page.fileIdentifierLength.max === 'number'
if (range) {
helpText += ` Min is ${page.fileIdentifierLength.min}. Max is ${page.fileIdentifierLength.max}`
numConfig.fileLength = {
min: page.fileIdentifierLength.min,
max: page.fileIdentifierLength.max
}
}
if (page.fileIdentifierLength.force) {
helpText += ' This option is currently disabled.'
element.disabled = true
}
if (page.fileIdentifierLength.force ||
isNaN(stored) ||
!range ||
stored < page.fileIdentifierLength.min ||
stored > page.fileIdentifierLength.max) {
element.value = fallback.fileLength
page.fileLength = null
} else {
element.value = stored
page.fileLength = stored
}
fileLengthDiv.style.display = 'block'
fileLengthDiv.querySelector('.help').innerHTML = helpText
}
Object.keys(numConfig).forEach(function (key) {
document.querySelector(`#${key}`).setAttribute('min', numConfig[key].min)
document.querySelector(`#${key}`).setAttribute('max', numConfig[key].max)
})
const uploadAgeDiv = document.querySelector('#uploadAgeDiv')
if (Array.isArray(page.temporaryUploadAges) && page.temporaryUploadAges.length && uploadAgeDiv) {
const element = document.querySelector('#uploadAge')
const stored = parseFloat(localStorage[lsKeys.uploadAge])
for (let i = 0; i < page.temporaryUploadAges.length; i++) {
const age = page.temporaryUploadAges[i]
const option = document.createElement('option')
option.value = i === 0 ? 'default' : age
option.innerHTML = page.getPrettyUploadAge(age) +
(i === 0 ? ' (default)' : '')
element.appendChild(option)
if (age === stored) {
element.value = option.value
page.uploadAge = stored
}
}
uploadAgeDiv.style.display = 'block'
}
const tabContent = document.querySelector('#tab-config') const tabContent = document.querySelector('#tab-config')
const form = tabContent.querySelector('form') const form = tabContent.querySelector('form')
form.addEventListener('submit', function (event) { form.addEventListener('submit', function (event) {
@ -506,27 +609,23 @@ page.prepareUploadConfig = function () {
const siBytes = localStorage[lsKeys.siBytes] !== '0' const siBytes = localStorage[lsKeys.siBytes] !== '0'
if (!siBytes) document.querySelector('#siBytes').value = '0' if (!siBytes) document.querySelector('#siBytes').value = '0'
// Always display this in MB?
const maxChunkSize = 95
document.querySelector('#maxChunkSize').innerHTML = `${maxChunkSize} MB`
document.querySelector('#chunkSize').setAttribute('max', maxChunkSize)
document.querySelector('#saveConfig').addEventListener('click', function () { document.querySelector('#saveConfig').addEventListener('click', function () {
const prefKeys = ['siBytes'] if (!form.checkValidity())
return
const prefKeys = ['siBytes', 'uploadAge']
for (let i = 0; i < prefKeys.length; i++) { for (let i = 0; i < prefKeys.length; i++) {
const value = form.elements[prefKeys[i]].value const value = form.elements[prefKeys[i]].value
if (value !== '0' && value !== fallback[prefKeys[i]]) if (value !== 'default' && value !== fallback[prefKeys[i]])
localStorage.removeItem(lsKeys[prefKeys[i]])
else
localStorage[lsKeys[prefKeys[i]]] = value localStorage[lsKeys[prefKeys[i]]] = value
else
localStorage.removeItem(lsKeys[prefKeys[i]])
} }
const numKeys = ['chunkSize', 'parallelUploads'] const numKeys = Object.keys(numConfig)
for (let i = 0; i < numKeys.length; i++) { for (let i = 0; i < numKeys.length; i++) {
const parsed = parseInt(form.elements[numKeys[i]].value) const parsed = parseInt(form.elements[numKeys[i]].value) || 0
let value = isNaN(parsed) ? 0 : Math.max(parsed, 0) const value = Math.min(Math.max(parsed, numConfig[numKeys[i]].min), numConfig[numKeys[i]].max)
if (numKeys[i] === 'chunkSize') value = Math.min(value, maxChunkSize)
value = Math.min(value, Number.MAX_SAFE_INTEGER)
if (value > 0 && value !== fallback[numKeys[i]]) if (value > 0 && value !== fallback[numKeys[i]])
localStorage[lsKeys[numKeys[i]]] = value localStorage[lsKeys[numKeys[i]]] = value
else else
@ -535,7 +634,7 @@ page.prepareUploadConfig = function () {
swal({ swal({
title: 'Woohoo!', title: 'Woohoo!',
text: 'Upload configuration saved.', text: 'Configuration saved into this browser.',
icon: 'success' icon: 'success'
}).then(function () { }).then(function () {
location.reload() location.reload()
@ -543,6 +642,20 @@ page.prepareUploadConfig = function () {
}) })
} }
page.getPrettyUploadAge = function (hours) {
if (hours === 0) {
return 'Permanent'
} else if (hours < 1) {
const minutes = hours * 60
return `${minutes} minute${minutes === 1 ? '' : 's'}`
} else if (hours > 24) {
const days = hours / 24
return `${days} day${days === 1 ? '' : 's'}`
} else {
return `${hours} hour${hours === 1 ? '' : 's'}`
}
}
// Handle image paste event // Handle image paste event
window.addEventListener('paste', function (event) { window.addEventListener('paste', function (event) {
const items = (event.clipboardData || event.originalEvent.clipboardData).items const items = (event.clipboardData || event.originalEvent.clipboardData).items

View File

@ -4,9 +4,22 @@
lsKeys.siBytes = 'siBytes' lsKeys.siBytes = 'siBytes'
page.prepareShareX = function () { page.prepareShareX = function () {
if (!page.token) return const values = {
token: page.token || '',
albumid: page.album || '',
filelength: page.fileLength || '',
age: page.uploadAge || ''
}
const headers = []
const keys = Object.keys(values)
for (let i = 0; i < keys.length; i++)
// Pad by 4 space
headers.push(` "${keys[i]}": "${values[keys[i]]}"`)
const origin = (location.hostname + location.pathname).replace(/\/(dashboard)?$/, '') const origin = (location.hostname + location.pathname).replace(/\/(dashboard)?$/, '')
const originClean = origin.replace(/\//g, '_') const originClean = origin.replace(/\//g, '_')
const sharexElement = document.querySelector('#ShareX') const sharexElement = document.querySelector('#ShareX')
const sharexFile = `{ const sharexFile = `{
"Name": "${originClean}", "Name": "${originClean}",
@ -15,12 +28,13 @@ page.prepareShareX = function () {
"RequestURL": "${location.protocol}//${origin}/api/upload", "RequestURL": "${location.protocol}//${origin}/api/upload",
"FileFormName": "files[]", "FileFormName": "files[]",
"Headers": { "Headers": {
"token": "${page.token}" ${headers.join(',\n')}
}, },
"ResponseType": "Text", "ResponseType": "Text",
"URL": "$json:files[0].url$", "URL": "$json:files[0].url$",
"ThumbnailURL": "$json:files[0].url$" "ThumbnailURL": "$json:files[0].url$"
}\n` }`
const sharexBlob = new Blob([sharexFile], { type: 'application/octet-binary' }) const sharexBlob = new Blob([sharexFile], { type: 'application/octet-binary' })
sharexElement.setAttribute('href', URL.createObjectURL(sharexBlob)) sharexElement.setAttribute('href', URL.createObjectURL(sharexBlob))
sharexElement.setAttribute('download', `${originClean}.sxcu`) sharexElement.setAttribute('download', `${originClean}.sxcu`)

View File

@ -5,7 +5,10 @@
"RequestURL": "https://safe.fiery.me/api/upload", "RequestURL": "https://safe.fiery.me/api/upload",
"FileFormName": "files[]", "FileFormName": "files[]",
"Headers": { "Headers": {
"token": "" "token": "",
"albumid": "",
"filelength": "",
"age": ""
}, },
"ResponseType": "Text", "ResponseType": "Text",
"URL": "$json:files[0].url$", "URL": "$json:files[0].url$",

View File

@ -1,7 +1,8 @@
const config = require('./../config') const config = require('./../config')
const routes = require('express').Router()
const db = require('knex')(config.database) const db = require('knex')(config.database)
const path = require('path') const path = require('path')
const paths = require('./../controllers/pathsController')
const routes = require('express').Router()
const utils = require('./../controllers/utilsController') const utils = require('./../controllers/utilsController')
const homeDomain = config.homeDomain || config.domain const homeDomain = config.homeDomain || config.domain
@ -22,9 +23,9 @@ routes.get('/a/:identifier', async (req, res, next) => {
.first() .first()
if (!album) if (!album)
return res.status(404).sendFile('404.html', { root: './pages/error/' }) return res.status(404).sendFile(path.join(paths.errorRoot, config.errorPages[404]))
else if (album.public === 0) else if (album.public === 0)
return res.status(401).json({ return res.status(403).json({
success: false, success: false,
description: 'This album is not available for public.' description: 'This album is not available for public.'
}) })
@ -44,11 +45,10 @@ routes.get('/a/:identifier', async (req, res, next) => {
if (utils.mayGenerateThumb(file.extname)) { if (utils.mayGenerateThumb(file.extname)) {
file.thumb = `${basedomain}/thumbs/${file.name.slice(0, -file.extname.length)}.png` file.thumb = `${basedomain}/thumbs/${file.name.slice(0, -file.extname.length)}.png`
/* /*
If thumbnail for album is still not set, do it. If thumbnail for album is still not set, set it to current file's full URL.
A potential improvement would be to let the user upload a specific image as an album cover A potential improvement would be to let the user set a specific image as an album cover.
since embedding the first image could potentially result in nsfw content when pasting links.
*/ */
if (thumb === '') thumb = file.thumb if (thumb === '') thumb = file.file
} }
totalSize += parseInt(file.size) totalSize += parseInt(file.size)
} }
@ -61,7 +61,9 @@ routes.get('/a/:identifier', async (req, res, next) => {
files, files,
identifier, identifier,
generateZips: config.uploads.generateZips, generateZips: config.uploads.generateZips,
downloadLink: album.download === 0 ? null : `../api/album/zip/${album.identifier}?v=${album.editedAt}`, downloadLink: album.download === 0
? null
: `${homeDomain}/api/album/zip/${album.identifier}?v=${album.editedAt}`,
editedAt: album.editedAt, editedAt: album.editedAt,
url: `${homeDomain}/a/${album.identifier}`, url: `${homeDomain}/a/${album.identifier}`,
totalSize, totalSize,

View File

@ -11,7 +11,9 @@ routes.get('/check', (req, res, next) => {
private: config.private, private: config.private,
enableUserAccounts: config.enableUserAccounts, enableUserAccounts: config.enableUserAccounts,
maxSize: config.uploads.maxSize, maxSize: config.uploads.maxSize,
chunkSize: config.uploads.chunkSize chunkSize: config.uploads.chunkSize,
temporaryUploadAges: config.uploads.temporaryUploadAges,
fileIdentifierLength: config.uploads.fileIdentifierLength
}) })
}) })

View File

@ -1,6 +1,7 @@
const config = require('./../config') const config = require('./../config')
const routes = require('express').Router() const routes = require('express').Router()
const uploadController = require('./../controllers/uploadController') const uploadController = require('./../controllers/uploadController')
const utils = require('./../controllers/utilsController')
const renderOptions = { const renderOptions = {
uploadDisabled: false, uploadDisabled: false,
@ -16,19 +17,18 @@ if (config.private)
routes.get('/nojs', async (req, res, next) => { routes.get('/nojs', async (req, res, next) => {
const options = { renderOptions } const options = { renderOptions }
options.gitHash = req.app.get('git-hash') options.gitHash = utils.gitHash
return res.render('nojs', options) return res.render('nojs', options)
}) })
routes.post('/nojs', (req, res, next) => { routes.post('/nojs', (req, res, next) => {
// TODO: Support upload by URLs.
res._json = res.json res._json = res.json
res.json = (...args) => { res.json = (...args) => {
const result = args[0] const result = args[0]
const options = { renderOptions } const options = { renderOptions }
options.gitHash = req.app.get('git-hash') options.gitHash = utils.utils
options.errorMessage = result.success ? '' : (result.description || 'An unexpected error occurred.') options.errorMessage = result.success ? '' : (result.description || 'An unexpected error occurred.')
options.files = result.files || [{}] options.files = result.files || [{}]

View File

@ -1,27 +1,31 @@
const { stripIndents } = require('./_utils') const { stripIndents } = require('./_utils')
const utils = require('./../controllers/utilsController') const utils = require('./../controllers/utilsController')
const cfpurge = {} ;(async () => {
cfpurge.do = async () => {
const location = process.argv[1].replace(process.cwd() + '/', '') const location = process.argv[1].replace(process.cwd() + '/', '')
const args = process.argv.slice(2) const args = process.argv.slice(2)
if (!args.length || args.includes('--help') || args.includes('-h')) if (!args.length || args.includes('--help') || args.includes('-h'))
return console.log(stripIndents(` return console.log(stripIndents(`
Purge Cloudflare's cache. Purge Cloudflare's cache.
Usage:\nnode ${location} ...filename Usage:
node ${location} ...filename
filename: filename:
Upload names separated by space (will automatically include their thumbs if available). Upload names separated by space (will automatically include their thumbs if available).
`)) `))
const results = await utils.purgeCloudflareCache(args, true, true) const results = await utils.purgeCloudflareCache(args, true, true)
for (const result of results) for (const result of results)
if (result.errors.length) if (result.errors.length)
result.errors.forEach(error => console.error(`CF: ${error}`)) result.errors.forEach(error => console.error(`CF: ${error}`))
else else
console.log(`URLs:\n${result.files.join('\n')}\n\nSuccess: ${result.success}`) console.log(`URLs:\n${result.files.join('\n')}\n\nSuccess: ${result.success}`)
} })()
.then(() => process.exit(0))
cfpurge.do() .catch(error => {
console.error(error)
process.exit(1)
})

78
scripts/clean-up.js Normal file
View File

@ -0,0 +1,78 @@
const { stripIndents } = require('./_utils')
const config = require('./../config')
const db = require('knex')(config.database)
const path = require('path')
const paths = require('./../controllers/pathsController')
const self = {
mode: null
}
self.getFiles = async directory => {
const names = await paths.readdir(directory)
const files = []
for (const name of names) {
const lstat = await paths.lstat(path.join(directory, name))
if (lstat.isFile() && !name.startsWith('.'))
files.push(name)
}
return files
}
;(async () => {
const location = process.argv[1].replace(process.cwd() + '/', '')
const args = process.argv.slice(2)
self.mode = parseInt(args[0]) || 0
if (args.includes('--help') || args.includes('-h'))
return console.log(stripIndents(`
Clean up files that are not in the database.
Usage:
node ${location} [mode=0|1|2]
mode:
0 = Only list names of files that are not in the database.
1 = Clean up the files.
`))
const dryrun = self.mode === 0
const uploads = await self.getFiles(paths.uploads)
console.log(`Uploads: ${uploads.length}`)
const uploadsDb = await db.table('files')
.select('name')
.then(rows => rows.map(row => row.name))
console.log(`- In DB: ${uploadsDb.length}`)
const uploadsNotInDb = uploads.filter(upload => !uploadsDb.includes(upload))
console.log(`- Not in DB: ${uploadsNotInDb.length}`)
const thumbs = await self.getFiles(paths.thumbs)
console.log(`Thumbs: ${thumbs.length}`)
const uploadsDbSet = new Set(uploadsDb.map(upload => upload.split('.')[0]))
const thumbsNotInDb = thumbs.filter(thumb => !uploadsDbSet.has(thumb.slice(0, -4)))
console.log(`- Not in DB: ${thumbsNotInDb.length}`)
if (dryrun) {
console.log('U:', uploadsNotInDb.join(', '))
console.log('T:', thumbsNotInDb.join(', '))
} else if (!dryrun) {
for (const upload of uploadsNotInDb) {
await paths.unlink(path.join(paths.uploads, upload))
console.log(`${upload}: OK`)
}
for (const thumb of thumbsNotInDb) {
await paths.unlink(path.join(paths.thumbs, thumb))
console.log(`${thumb}: OK`)
}
}
})()
.then(() => process.exit(0))
.catch(error => {
console.error(error)
process.exit(1)
})

46
scripts/delete-expired.js Normal file
View File

@ -0,0 +1,46 @@
const { stripIndents } = require('./_utils')
const utils = require('./../controllers/utilsController')
const self = {
mode: null
}
;(async () => {
const location = process.argv[1].replace(process.cwd() + '/', '')
const args = process.argv.slice(2)
self.mode = parseInt(args[0]) || 0
if (args.includes('--help') || args.includes('-h'))
return console.log(stripIndents(`
Bulk delete expired files.
Usage:
node ${location} [mode=0|1|2]
mode:
0 = Only list names of the expired files.
1 = Delete expired files (output file names).
2 = Delete expired files (no output).
`))
const dryrun = self.mode === 0
const quiet = self.mode === 2
const result = await utils.bulkDeleteExpired(dryrun)
if (quiet) return
if (result.expired.length)
for (const expired of result.expired)
console.log(expired)
console.log(`Expired files: ${result.expired.length}`)
if (result.failed)
console.log(`Failed to delete: ${result.failed.length}`)
})()
.then(() => process.exit(0))
.catch(error => {
console.error(error)
process.exit(1)
})

View File

@ -1,57 +1,50 @@
const { stripIndents } = require('./_utils') const { stripIndents } = require('./_utils')
const config = require('./../config')
const fs = require('fs')
const path = require('path') const path = require('path')
const paths = require('./../controllers/pathsController')
const utils = require('./../controllers/utilsController') const utils = require('./../controllers/utilsController')
const thumbs = { const self = {
mode: null, mode: null,
force: null, force: null,
verbose: null, verbose: null,
cfcache: null cfcache: null
} }
thumbs.mayGenerateThumb = extname => { self.mayGenerateThumb = extname => {
return ([1, 3].includes(thumbs.mode) && utils.imageExtensions.includes(extname)) || return ([1, 3].includes(self.mode) && utils.imageExts.includes(extname)) ||
([2, 3].includes(thumbs.mode) && utils.videoExtensions.includes(extname)) ([2, 3].includes(self.mode) && utils.videoExts.includes(extname))
} }
thumbs.getFiles = directory => { self.getFiles = async directory => {
return new Promise((resolve, reject) => { const names = await paths.readdir(directory)
fs.readdir(directory, async (error, names) => { const files = []
if (error) return reject(error) for (const name of names) {
const files = [] const lstat = await paths.lstat(path.join(directory, name))
await Promise.all(names.map(name => { if (lstat.isFile() && !name.startsWith('.'))
return new Promise((resolve, reject) => { files.push(name)
fs.lstat(path.join(directory, name), (error, stats) => { }
if (error) return reject(error) return files
if (stats.isFile() && !name.startsWith('.')) files.push(name)
resolve()
})
})
}))
resolve(files)
})
})
} }
thumbs.do = async () => { ;(async () => {
const location = process.argv[1].replace(process.cwd() + '/', '') const location = process.argv[1].replace(process.cwd() + '/', '')
const args = process.argv.slice(2) const args = process.argv.slice(2)
thumbs.mode = parseInt(args[0]) self.mode = parseInt(args[0])
thumbs.force = parseInt(args[1] || 0) self.force = parseInt(args[1]) || 0
thumbs.verbose = parseInt(args[2] || 0) self.verbose = parseInt(args[2]) || 0
thumbs.cfcache = parseInt(args[3] || 0) self.cfcache = parseInt(args[3]) || 0
if (![1, 2, 3].includes(thumbs.mode) ||
![0, 1].includes(thumbs.force) || if (![1, 2, 3].includes(self.mode) ||
![0, 1].includes(thumbs.verbose) || ![0, 1].includes(self.force) ||
![0, 1].includes(self.verbose) ||
args.includes('--help') || args.includes('--help') ||
args.includes('-h')) args.includes('-h'))
return console.log(stripIndents(` return console.log(stripIndents(`
Generate thumbnails. Generate thumbnails.
Usage :\nnode ${location} <mode=1|2|3> [force=0|1] [verbose=0|1] [cfcache=0|1] Usage :
node ${location} <mode=1|2|3> [force=0|1] [verbose=0|1] [cfcache=0|1]
mode : 1 = images only, 2 = videos only, 3 = both images and videos mode : 1 = images only, 2 = videos only, 3 = both images and videos
force : 0 = no force (default), 1 = overwrite existing thumbnails force : 0 = no force (default), 1 = overwrite existing thumbnails
@ -59,46 +52,36 @@ thumbs.do = async () => {
cfcache: 0 = do not clear cloudflare cache (default), 1 = clear cloudflare cache cfcache: 0 = do not clear cloudflare cache (default), 1 = clear cloudflare cache
`)) `))
const uploadsDir = path.resolve(config.uploads.folder) const uploads = await self.getFiles(paths.uploads)
const thumbsDir = path.join(uploadsDir, 'thumbs') let thumbs = await self.getFiles(paths.thumbs)
const _uploads = await thumbs.getFiles(uploadsDir) thumbs = thumbs.map(thumb => {
const extname = path.extname(thumb)
let _thumbs = await thumbs.getFiles(thumbsDir) return thumb.slice(0, -extname.length)
_thumbs = _thumbs.map(_thumb => {
const extname = path.extname(_thumb)
return _thumb.slice(0, -extname.length)
}) })
const succeeded = [] const succeeded = []
let error = 0 let error = 0
let skipped = 0 let skipped = 0
await new Promise((resolve, reject) => { for (const upload of uploads) {
const generate = async i => { const extname = utils.extname(upload)
const _upload = _uploads[i] const basename = upload.slice(0, -extname.length)
if (!_upload) return resolve()
const extname = path.extname(_upload) if (thumbs.includes(basename) && !self.force) {
const basename = _upload.slice(0, -extname.length) if (self.verbose) console.log(`${upload}: thumb exists.`)
skipped++
if (_thumbs.includes(basename) && !thumbs.force) { } else if (!self.mayGenerateThumb(extname)) {
if (thumbs.verbose) console.log(`${_upload}: thumb exists.`) if (self.verbose) console.log(`${upload}: extension skipped.`)
skipped++ skipped++
} else if (!thumbs.mayGenerateThumb(extname)) { } else {
if (thumbs.verbose) console.log(`${_upload}: extension skipped.`) const start = Date.now()
skipped++ const generated = await utils.generateThumbs(upload, extname, self.force)
} else { console.log(`${upload}: ${(Date.now() - start) / 1000}s: ${generated ? 'OK' : 'ERROR'}`)
const start = Date.now() generated ? succeeded.push(upload) : error++
const generated = await utils.generateThumbs(_upload, thumbs.force)
console.log(`${_upload}: ${(Date.now() - start) / 1000}s: ${generated ? 'OK' : 'ERROR'}`)
generated ? succeeded.push(_upload) : error++
}
return generate(i + 1)
} }
return generate(0) }
})
console.log(`Success: ${succeeded.length}\nError: ${error}\nSkipped: ${skipped}`) console.log(`Success: ${succeeded.length}\nError: ${error}\nSkipped: ${skipped}`)
if (thumbs.cfcache && succeeded.length) { if (self.cfcache && succeeded.length) {
console.log('Purging Cloudflare\'s cache...') console.log('Purging Cloudflare\'s cache...')
const results = await utils.purgeCloudflareCache(succeeded.map(name => { const results = await utils.purgeCloudflareCache(succeeded.map(name => {
const extname = utils.extname(name) const extname = utils.extname(name)
@ -110,6 +93,9 @@ thumbs.do = async () => {
console.log(`Status [${i}]: ${results[i].success ? 'OK' : 'ERROR'}`) console.log(`Status [${i}]: ${results[i].success ? 'OK' : 'ERROR'}`)
} }
} }
} })()
.then(() => process.exit(0))
thumbs.do() .catch(error => {
console.error(error)
process.exit(1)
})

View File

@ -16,7 +16,7 @@
v3: CSS and JS files (libs such as bulma, lazyload, etc). v3: CSS and JS files (libs such as bulma, lazyload, etc).
v4: Renders in /public/render/* directories (to be used by render.js). v4: Renders in /public/render/* directories (to be used by render.js).
#} #}
{% set v1 = "tWLiAlAX5i" %} {% set v1 = "01mMpp1DzB" %}
{% set v2 = "hiboQUzAzp" %} {% set v2 = "hiboQUzAzp" %}
{% set v3 = "tWLiAlAX5i" %} {% set v3 = "tWLiAlAX5i" %}
{% set v4 = "S3TAWpPeFS" %} {% set v4 = "S3TAWpPeFS" %}

View File

@ -71,9 +71,6 @@
<li> <li>
<a id="ShareX">ShareX user profile</a> <a id="ShareX">ShareX user profile</a>
</li> </li>
<li>
<a id="itemFileLength">File name length</a>
</li>
<li> <li>
<a id="itemTokens">Manage your token</a> <a id="itemTokens">Manage your token</a>
</li> </li>

View File

@ -1,5 +1,12 @@
{% extends "_layout.njk" %} {% extends "_layout.njk" %}
{% macro extensions(obj) %}
{% set space = joiner(' ') %}
{% for id, val in obj -%}
{{ space() }}{{ id }}="{{ val }}"
{%- endfor %}
{% endmacro %}
{% block content %} {% block content %}
{{ super() }} {{ super() }}
<section class="section"> <section class="section">
@ -95,7 +102,7 @@
<h2 class='subtitle'>Does your API support chunked uploads?</h2> <h2 class='subtitle'>Does your API support chunked uploads?</h2>
<article class="message"> <article class="message">
<div class="message-body"> <div class="message-body">
Yes, the homepage uploader is hard-coded to chunk uploads into {{ chunkSize }} pieces by default.<br> Yes, the homepage uploader is hard-coded to chunk uploads into {{ chunkSize }} MB pieces by default.<br>
If you want to chunk your API uploads, feel free to read the source code to see how it works. If you want to chunk your API uploads, feel free to read the source code to see how it works.
</div> </div>
</article> </article>
@ -104,26 +111,21 @@
<h2 class='subtitle'>What are the allowed extensions here?</h2> <h2 class='subtitle'>What are the allowed extensions here?</h2>
<article class="message"> <article class="message">
<div class="message-body"> <div class="message-body">
{% if extensionsFilter.length and not whitelist -%} {% if extensionsFilter.length -%}
We support any file extensions except the following: {{ extensionsFilter | join(', ') }}. {%- if whitelist -%}
{%- elif extensionsFilter.length and whitelist -%} We support any file extensions except the following: {{ extensionsFilter | join(', ') }}.
We only support the following extensions: {{ extensionsFilter | join(', ') }}. {%- else -%}
We only support the following extensions:
{%- endif -%}<br>
{% set comma = joiner(' ') -%}
{%- for extension in extensionsFilter -%}
{{ comma() }}<code>{{ extension }}</code>
{%- endfor -%}
{%- else -%} {%- else -%}
We support any file extensions. We support any file extensions.
{%- endif %} {%- endif %}
</div> </div>
</article> </article>
<h2 class='subtitle'>How are the file URLs be determined?</h2>
<article class="message">
<div class="message-body">
The safe will generate random {{ fileLength.default }}-letter identifiers.
{% if fileLength.userChangeable %}<br>
If you find that too {{ "short" if tooShort else "long" }}, you can create an account which will let you to set your preferred length.<br>
You can choose from {{ fileLength.min }} to {{ fileLength.max }} letters.
{%- endif %}
</div>
</article>
</div> </div>
</section> </section>
{% endblock %} {% endblock %}

View File

@ -23,7 +23,7 @@
<script src="libs/lazyload/lazyload.min.js?v={{ globals.v3 }}"></script> <script src="libs/lazyload/lazyload.min.js?v={{ globals.v3 }}"></script>
<script src="js/home.js?v={{ globals.v1 }}"></script> <script src="js/home.js?v={{ globals.v1 }}"></script>
<script src="js/s/utils.js?v={{ globals.v1 }}"></script> <script src="js/s/utils.js?v={{ globals.v1 }}"></script>
<!-- We assign an ID for this so that the script can find out version string for render images --> {# We assign an ID for this so that the script can find out version string for render images #}
<script id="renderScript" data-version="{{ globals.v4 }}" src="js/s/render.js?v={{ globals.v1 }}"></script> <script id="renderScript" data-version="{{ globals.v4 }}" src="js/s/render.js?v={{ globals.v1 }}"></script>
{% endblock %} {% endblock %}
@ -92,7 +92,7 @@
</div> </div>
<p class="help"> <p class="help">
{% if urlMaxSize !== maxSize -%} {% if urlMaxSize !== maxSize -%}
Maximum file size for URL upload is <span id="urlMaxSize">{{ urlMaxSize }}</span>. Maximum file size per URL is <span id="urlMaxSize">{{ urlMaxSize }}</span>.
{%- endif %} {%- endif %}
{% if urlExtensionsFilter.length and (urlExtensionsFilterMode === 'blacklist') -%} {% if urlExtensionsFilter.length and (urlExtensionsFilterMode === 'blacklist') -%}
@ -132,19 +132,36 @@
</div> </div>
</div> </div>
</div> </div>
<div class="field"> <div id="fileLengthDiv" class="field" style="display: none">
<label class="label">File identifier length</label>
<div class="control is-expanded">
<input id="fileLength" class="input is-fullwidth" type="number" min="0">
</div>
<p class="help"></p>
</div>
{%- if temporaryUploadAges %}
<div id="uploadAgeDiv" class="field" style="display: none">
<label class="label">Upload age</label>
<div class="control is-expanded">
<div class="select is-fullwidth">
<select id="uploadAge"></select>
</div>
</div>
</div>
{%- endif %}
<div id="chunkSizeDiv" class="field">
<label class="label">Upload chunk size (MB)</label> <label class="label">Upload chunk size (MB)</label>
<div class="control is-expanded"> <div class="control is-expanded">
<input id="chunkSize" class="input is-fullwidth" type="number" min="0" step="5"> <input id="chunkSize" class="input is-fullwidth" type="number" min="0">
</div> </div>
<p class="help">Default is <span id="defaultChunkSize"></span>. Max is <span id="maxChunkSize"></span>.</p> <p class="help"></p>
</div> </div>
<div class="field"> <div id="parallelUploadsDiv" class="field">
<label class="label">Parallel uploads</label> <label class="label">Parallel uploads</label>
<div class="control is-expanded"> <div class="control is-expanded">
<input id="parallelUploads" class="input is-fullwidth" type="number" name="parallelUploads" min="0"> <input id="parallelUploads" class="input is-fullwidth" type="number" name="parallelUploads" min="0">
</div> </div>
<p class="help">Default is <span id="defaultParallelUploads"></span>.</p> <p class="help"></p>
</div> </div>
<div class="field"> <div class="field">
<p class="control is-expanded"> <p class="control is-expanded">
@ -155,6 +172,7 @@
<span>Save & reload</span> <span>Save & reload</span>
</button> </button>
</p> </p>
<p class="help">This configuration will only be used in this browser.</p>
</div> </div>
</form> </form>
</div> </div>
@ -167,13 +185,14 @@
<i class="icon" style="display: none"></i> <i class="icon" style="display: none"></i>
<img class="is-unselectable" style="display: none"> <img class="is-unselectable" style="display: none">
<p class="name is-unselectable"></p> <p class="name is-unselectable"></p>
<progress class="progress is-small is-danger" max="100"></progress> <progress class="progress is-small is-danger" max="100" value="0"></progress>
<p class="error"></p> <p class="error"></p>
<p class="link"> <p class="link">
<a target="_blank" rel="noopener"></a> <a target="_blank" rel="noopener"></a>
</p> </p>
<p class="clipboard-mobile is-hidden-desktop" style="display: none"> <p class="help expiry-date" style="display: none"></p>
<a class="button is-info is-outlined clipboard-js" style="display: flex"> <p class="clipboard-mobile" style="display: none">
<a class="button is-small is-info is-outlined clipboard-js" style="display: flex">
<span class="icon"> <span class="icon">
<i class="icon-clipboard-1"></i> <i class="icon-clipboard-1"></i>
</span> </span>

128
yarn.lock
View File

@ -288,10 +288,10 @@ body-parser@1.19.0, body-parser@^1.19.0:
raw-body "2.4.0" raw-body "2.4.0"
type-is "~1.6.17" type-is "~1.6.17"
bowser@2.5.3: bowser@2.5.4:
version "2.5.3" version "2.5.4"
resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.5.3.tgz#811b0a24219c566c9a6ab3402bc8a13f35a18a96" resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.5.4.tgz#850fccfebde92165440279b5ab19be3c7f05cfe1"
integrity sha512-aWCA+CKfKNL/WGzNgjmK+Whp57JMzboZMwJ5gy2jDj2bEIjbMCb3ImGX+V++5wsJftyFiDIbOjRXl60ycniVqg== integrity sha512-74GGwfc2nzYD19JCiA0RwCxdq7IY5jHeEaSrrgm/5kusEuK+7UK0qDG3gyzN47c4ViNyO4osaKtZE+aSV6nlpQ==
brace-expansion@^1.1.7: brace-expansion@^1.1.7:
version "1.1.11" version "1.1.11"
@ -798,16 +798,20 @@ error-ex@^1.2.0:
is-arrayish "^0.2.1" is-arrayish "^0.2.1"
es-abstract@^1.12.0, es-abstract@^1.7.0: es-abstract@^1.12.0, es-abstract@^1.7.0:
version "1.13.0" version "1.14.1"
resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.13.0.tgz#ac86145fdd5099d8dd49558ccba2eaf9b88e24e9" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.14.1.tgz#6e8d84b445ec9c610781e74a6d52cc31aac5b4ca"
integrity sha512-vDZfg/ykNxQVwup/8E1BZhVzFfBxs9NqMzGcvIJrqg5k2/5Za2bWo40dK2J1pgLngZ7c+Shh8lwYtLGyrwPutg== integrity sha512-cp/Tb1oA/rh2X7vqeSOvM+TSo3UkJLX70eNihgVEvnzwAgikjkTFr/QVgRCaxjm0knCNQzNoxxxcw2zO2LJdZA==
dependencies: dependencies:
es-to-primitive "^1.2.0" es-to-primitive "^1.2.0"
function-bind "^1.1.1" function-bind "^1.1.1"
has "^1.0.3" has "^1.0.3"
has-symbols "^1.0.0"
is-callable "^1.1.4" is-callable "^1.1.4"
is-regex "^1.0.4" is-regex "^1.0.4"
object-keys "^1.0.12" object-inspect "^1.6.0"
object-keys "^1.1.1"
string.prototype.trimleft "^2.0.0"
string.prototype.trimright "^2.0.0"
es-to-primitive@^1.2.0: es-to-primitive@^1.2.0:
version "1.2.0" version "1.2.0"
@ -849,13 +853,13 @@ eslint-module-utils@^2.4.0:
debug "^2.6.8" debug "^2.6.8"
pkg-dir "^2.0.0" pkg-dir "^2.0.0"
eslint-plugin-es@^1.4.1: eslint-plugin-es@^2.0.0:
version "1.4.1" version "2.0.0"
resolved "https://registry.yarnpkg.com/eslint-plugin-es/-/eslint-plugin-es-1.4.1.tgz#12acae0f4953e76ba444bfd1b2271081ac620998" resolved "https://registry.yarnpkg.com/eslint-plugin-es/-/eslint-plugin-es-2.0.0.tgz#0f5f5da5f18aa21989feebe8a73eadefb3432976"
integrity sha512-5fa/gR2yR3NxQf+UXkeLeP8FBBl6tSgdrAz1+cF84v1FMM4twGwQoqTnn+QxFLcPOrF4pdKEJKDB/q9GoyJrCA== integrity sha512-f6fceVtg27BR02EYnBhgWLFQfK6bN4Ll0nQFrBHOlCsAyxeZkn0NHns5O0YZOPrV1B3ramd6cgFwaoFLcSkwEQ==
dependencies: dependencies:
eslint-utils "^1.4.2" eslint-utils "^1.4.2"
regexpp "^2.0.1" regexpp "^3.0.0"
eslint-plugin-import@^2.18.2: eslint-plugin-import@^2.18.2:
version "2.18.2" version "2.18.2"
@ -874,12 +878,12 @@ eslint-plugin-import@^2.18.2:
read-pkg-up "^2.0.0" read-pkg-up "^2.0.0"
resolve "^1.11.0" resolve "^1.11.0"
eslint-plugin-node@^9.2.0: eslint-plugin-node@^10.0.0:
version "9.2.0" version "10.0.0"
resolved "https://registry.yarnpkg.com/eslint-plugin-node/-/eslint-plugin-node-9.2.0.tgz#b1911f111002d366c5954a6d96d3cd5bf2a3036a" resolved "https://registry.yarnpkg.com/eslint-plugin-node/-/eslint-plugin-node-10.0.0.tgz#fd1adbc7a300cf7eb6ac55cf4b0b6fc6e577f5a6"
integrity sha512-2abNmzAH/JpxI4gEOwd6K8wZIodK3BmHbTxz4s79OIYwwIt2gkpEXlAouJXu4H1c9ySTnRso0tsuthSOZbUMlA== integrity sha512-1CSyM/QCjs6PXaT18+zuAXsjXGIGo5Rw630rSKwokSs2jrYURQc4R5JZpoanNCqwNmepg+0eZ9L7YiRUJb8jiQ==
dependencies: dependencies:
eslint-plugin-es "^1.4.1" eslint-plugin-es "^2.0.0"
eslint-utils "^1.4.2" eslint-utils "^1.4.2"
ignore "^5.1.1" ignore "^5.1.1"
minimatch "^3.0.4" minimatch "^3.0.4"
@ -1311,7 +1315,7 @@ fsevents@^1.2.7:
nan "^2.12.1" nan "^2.12.1"
node-pre-gyp "^0.12.0" node-pre-gyp "^0.12.0"
function-bind@^1.1.1: function-bind@^1.0.2, function-bind@^1.1.1:
version "1.1.1" version "1.1.1"
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==
@ -1485,20 +1489,20 @@ helmet-crossdomain@0.4.0:
resolved "https://registry.yarnpkg.com/helmet-crossdomain/-/helmet-crossdomain-0.4.0.tgz#5f1fe5a836d0325f1da0a78eaa5fd8429078894e" resolved "https://registry.yarnpkg.com/helmet-crossdomain/-/helmet-crossdomain-0.4.0.tgz#5f1fe5a836d0325f1da0a78eaa5fd8429078894e"
integrity sha512-AB4DTykRw3HCOxovD1nPR16hllrVImeFp5VBV9/twj66lJ2nU75DP8FPL0/Jp4jj79JhTfG+pFI2MD02kWJ+fA== integrity sha512-AB4DTykRw3HCOxovD1nPR16hllrVImeFp5VBV9/twj66lJ2nU75DP8FPL0/Jp4jj79JhTfG+pFI2MD02kWJ+fA==
helmet-csp@2.9.0: helmet-csp@2.9.1:
version "2.9.0" version "2.9.1"
resolved "https://registry.yarnpkg.com/helmet-csp/-/helmet-csp-2.9.0.tgz#8524886b08c7f7d611cb5f36eae453dd604efd4c" resolved "https://registry.yarnpkg.com/helmet-csp/-/helmet-csp-2.9.1.tgz#39939a84ca3657ee3cba96f296169ccab02f97d5"
integrity sha512-DGGOQtOLM7ZQpjbf/uvUonq1yG/rFgsBuK10ZJt2AtxUJxqfkPvfmP9aLUmgH9IactiRiYoiFY72YYSPl1TLTQ== integrity sha512-HgdXSJ6AVyXiy5ohVGpK6L7DhjI9KVdKVB1xRoixxYKsFXFwoVqtLKgDnfe3u8FGGKf9Ml9k//C9rnncIIAmyA==
dependencies: dependencies:
bowser "2.5.3" bowser "2.5.4"
camelize "1.0.0" camelize "1.0.0"
content-security-policy-builder "2.1.0" content-security-policy-builder "2.1.0"
dasherize "2.0.0" dasherize "2.0.0"
helmet@^3.20.1: helmet@^3.21.0:
version "3.20.1" version "3.21.0"
resolved "https://registry.yarnpkg.com/helmet/-/helmet-3.20.1.tgz#802fcb39ac6865208cbc6879d3502e582c6f777e" resolved "https://registry.yarnpkg.com/helmet/-/helmet-3.21.0.tgz#e7c5e2ed3b8b7f42d2e387004a87198b295132cc"
integrity sha512-em+X5Wz/f0yqoRsBnpnVy3wJHSiIeskX3FQn30szBh1tILaOeSRRLkShuUVFlk/o4qTYjWxdHg4FrRe45iBWHg== integrity sha512-TS3GryQMPR7n/heNnGC0Cl3Ess30g8C6EtqZyylf+Y2/kF4lM8JinOR90rzIICsw4ymWTvji4OhDmqsqxkLrcg==
dependencies: dependencies:
depd "2.0.0" depd "2.0.0"
dns-prefetch-control "0.2.0" dns-prefetch-control "0.2.0"
@ -1507,14 +1511,14 @@ helmet@^3.20.1:
feature-policy "0.3.0" feature-policy "0.3.0"
frameguard "3.1.0" frameguard "3.1.0"
helmet-crossdomain "0.4.0" helmet-crossdomain "0.4.0"
helmet-csp "2.9.0" helmet-csp "2.9.1"
hide-powered-by "1.1.0" hide-powered-by "1.1.0"
hpkp "2.0.0" hpkp "2.0.0"
hsts "2.2.0" hsts "2.2.0"
ienoopen "1.1.0" ienoopen "1.1.0"
nocache "2.1.0" nocache "2.1.0"
referrer-policy "1.2.0" referrer-policy "1.2.0"
x-xss-protection "1.2.0" x-xss-protection "1.3.0"
hide-powered-by@1.1.0: hide-powered-by@1.1.0:
version "1.1.0" version "1.1.0"
@ -2419,7 +2423,12 @@ object-copy@^0.1.0:
define-property "^0.2.5" define-property "^0.2.5"
kind-of "^3.0.3" kind-of "^3.0.3"
object-keys@^1.0.12: object-inspect@^1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.6.0.tgz#c70b6cbf72f274aab4c34c0c82f5167bf82cf15b"
integrity sha512-GJzfBZ6DgDAmnuaM3104jR4s1Myxr3Y3zfIyN4z3UdqN69oSRacNK8UhnobDdC+7J2AHCjGwxQubNJfE70SXXQ==
object-keys@^1.0.12, object-keys@^1.1.1:
version "1.1.1" version "1.1.1"
resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e"
integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==
@ -2516,11 +2525,6 @@ os-tmpdir@^1.0.0, os-tmpdir@~1.0.2:
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=
os@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/os/-/os-0.1.1.tgz#208845e89e193ad4d971474b93947736a56d13f3"
integrity sha1-IIhF6J4ZOtTZcUdLk5R3NqVtE/M=
osenv@^0.1.4: osenv@^0.1.4:
version "0.1.5" version "0.1.5"
resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410"
@ -2713,9 +2717,9 @@ proxy-addr@~2.0.5:
ipaddr.js "1.9.0" ipaddr.js "1.9.0"
psl@^1.1.24: psl@^1.1.24:
version "1.3.0" version "1.3.1"
resolved "https://registry.yarnpkg.com/psl/-/psl-1.3.0.tgz#e1ebf6a3b5564fa8376f3da2275da76d875ca1bd" resolved "https://registry.yarnpkg.com/psl/-/psl-1.3.1.tgz#d5aa3873a35ec450bc7db9012ad5a7246f6fc8bd"
integrity sha512-avHdspHO+9rQTLbv1RO+MPYeP/SzsCoxofjVnHanETfQhTJrmB0HlDoW+EiN/R+C0BZ+gERab9NY0lPN2TxNag== integrity sha512-2KLd5fKOdAfShtY2d/8XDWVRnmp3zp40Qt6ge2zBPFARLXOGUf2fHD5eg+TV/5oxBtQKVhjUaKFsAaE4HnwfSA==
pump@^1.0.0: pump@^1.0.0:
version "1.0.3" version "1.0.3"
@ -2864,6 +2868,11 @@ regexpp@^2.0.1:
resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-2.0.1.tgz#8d19d31cf632482b589049f8281f93dbcba4d07f" resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-2.0.1.tgz#8d19d31cf632482b589049f8281f93dbcba4d07f"
integrity sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw== integrity sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==
regexpp@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.0.0.tgz#dd63982ee3300e67b41c1956f850aa680d9d330e"
integrity sha512-Z+hNr7RAVWxznLPuA7DIh8UNX1j9CDrUQxskw9IrBE1Dxue2lyXT+shqEIeLUjrokxIP8CMy1WkjgG3rTsd5/g==
remove-trailing-separator@^1.0.1: remove-trailing-separator@^1.0.1:
version "1.1.0" version "1.1.0"
resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef"
@ -2965,9 +2974,9 @@ run-async@^2.2.0:
is-promise "^2.1.0" is-promise "^2.1.0"
rxjs@^6.4.0: rxjs@^6.4.0:
version "6.5.2" version "6.5.3"
resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.5.2.tgz#2e35ce815cd46d84d02a209fb4e5921e051dbec7" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.5.3.tgz#510e26317f4db91a7eb1de77d9dd9ba0a4899a3a"
integrity sha512-HUb7j3kvb7p7eCUHE3FqjoDsC1xfZQ4AHFWfTKSpZ+sAhhz5X1WX0ZuUqWbzB2QhSLp3DoLUG+hMdEDKqWo2Zg== integrity sha512-wuYsAYYFdWTAnAaPoKGNhfpWwKZbJW+HgAJ+mImp+Epl7BG8oNWBCTyRM8gba9k4lk8BgWdoYm21Mo/RYhhbgA==
dependencies: dependencies:
tslib "^1.9.0" tslib "^1.9.0"
@ -3290,6 +3299,22 @@ string-width@^3.0.0:
is-fullwidth-code-point "^2.0.0" is-fullwidth-code-point "^2.0.0"
strip-ansi "^5.1.0" strip-ansi "^5.1.0"
string.prototype.trimleft@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.0.0.tgz#68b6aa8e162c6a80e76e3a8a0c2e747186e271ff"
integrity sha1-aLaqjhYsaoDnbjqKDC50cYbicf8=
dependencies:
define-properties "^1.1.2"
function-bind "^1.0.2"
string.prototype.trimright@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.0.0.tgz#ab4a56d802a01fbe7293e11e84f24dc8164661dd"
integrity sha1-q0pW2AKgH75yk+EehPJNyBZGYd0=
dependencies:
define-properties "^1.1.2"
function-bind "^1.0.2"
string_decoder@~0.10.x: string_decoder@~0.10.x:
version "0.10.31" version "0.10.31"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94"
@ -3345,6 +3370,11 @@ supports-color@^5.3.0:
dependencies: dependencies:
has-flag "^3.0.0" has-flag "^3.0.0"
systeminformation@^4.14.8:
version "4.14.8"
resolved "https://registry.yarnpkg.com/systeminformation/-/systeminformation-4.14.8.tgz#ebb9580e4da98daf438839e65b9c29d9869226a6"
integrity sha512-05wW1YaMBI6LlVtvw2wXQGr0thpX8E0IImYcpbqUiNanfmq8e+V89pDW2L5V/mN8kU37W0VtVySftQ0PwMIXKw==
table@^5.2.3: table@^5.2.3:
version "5.4.6" version "5.4.6"
resolved "https://registry.yarnpkg.com/table/-/table-5.4.6.tgz#1292d19500ce3f86053b05f0e8e7e4a3bb21079e" resolved "https://registry.yarnpkg.com/table/-/table-5.4.6.tgz#1292d19500ce3f86053b05f0e8e7e4a3bb21079e"
@ -3527,9 +3557,9 @@ unset-value@^1.0.0:
isobject "^3.0.0" isobject "^3.0.0"
upath@^1.1.1: upath@^1.1.1:
version "1.1.2" version "1.2.0"
resolved "https://registry.yarnpkg.com/upath/-/upath-1.1.2.tgz#3db658600edaeeccbe6db5e684d67ee8c2acd068" resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894"
integrity sha512-kXpym8nmDmlCBr7nKdIx8P2jNBa+pBpIUFRnKJ4dr8htyYGJFokkr2ZvERRtUN+9SY+JqXouNgUPtv6JQva/2Q== integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==
uri-js@^4.2.2: uri-js@^4.2.2:
version "4.2.2" version "4.2.2"
@ -3646,10 +3676,10 @@ write@1.0.3:
dependencies: dependencies:
mkdirp "^0.5.1" mkdirp "^0.5.1"
x-xss-protection@1.2.0: x-xss-protection@1.3.0:
version "1.2.0" version "1.3.0"
resolved "https://registry.yarnpkg.com/x-xss-protection/-/x-xss-protection-1.2.0.tgz#3170498ff8e7e8159f4896b27fa4d4810c2ff486" resolved "https://registry.yarnpkg.com/x-xss-protection/-/x-xss-protection-1.3.0.tgz#3e3a8dd638da80421b0e9fff11a2dbe168f6d52c"
integrity sha512-xN0kV+8XfOQM2OPPBdEbGtbvJNNP1pvZR7sE6d44cjJFQG4OiGDdienPg5iOUGswBTiGbBvtYDURd30BMJwwqg== integrity sha512-kpyBI9TlVipZO4diReZMAHWtS0MMa/7Kgx8hwG/EuZLiA6sg4Ah/4TRdASHhRRN3boobzcYgFRUFSgHRge6Qhg==
xtend@^4.0.0: xtend@^4.0.0:
version "4.0.2" version "4.0.2"