2019-09-08 01:56:29 +00:00
|
|
|
const { promisify } = require('util')
|
2019-09-10 16:31:27 +00:00
|
|
|
const { spawn } = require('child_process')
|
2018-09-23 16:28:15 +00:00
|
|
|
const fetch = require('node-fetch')
|
2018-04-13 16:20:57 +00:00
|
|
|
const ffmpeg = require('fluent-ffmpeg')
|
|
|
|
const path = require('path')
|
2018-12-03 07:20:13 +00:00
|
|
|
const sharp = require('sharp')
|
2019-09-08 01:56:29 +00:00
|
|
|
const si = require('systeminformation')
|
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
|
|
|
const paths = require('./pathsController')
|
|
|
|
const perms = require('./permissionController')
|
|
|
|
const config = require('./../config')
|
|
|
|
const logger = require('./../logger')
|
|
|
|
const db = require('knex')(config.database)
|
2017-03-17 04:14:10 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const self = {
|
|
|
|
clamd: {
|
|
|
|
scanner: null,
|
|
|
|
timeout: config.uploads.scan.timeout || 5000,
|
2019-11-05 20:35:04 +00:00
|
|
|
chunkSize: config.uploads.scan.chunkSize || 64 * 1024,
|
|
|
|
groupBypass: config.uploads.scan.groupBypass || null
|
2019-09-08 01:56:29 +00:00
|
|
|
},
|
|
|
|
gitHash: null,
|
|
|
|
idSet: null,
|
|
|
|
|
|
|
|
idMaxTries: config.uploads.maxTries || 1,
|
|
|
|
|
|
|
|
imageExts: ['.webp', '.jpg', '.jpeg', '.gif', '.png', '.tiff', '.tif', '.svg'],
|
|
|
|
videoExts: ['.webm', '.mp4', '.wmv', '.avi', '.mov', '.mkv'],
|
|
|
|
|
2019-09-17 04:13:41 +00:00
|
|
|
ffprobe: promisify(ffmpeg.ffprobe),
|
|
|
|
|
|
|
|
albumsCache: {}
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const statsCache = {
|
2019-04-12 00:45:33 +00:00
|
|
|
system: {
|
|
|
|
cache: null,
|
2019-11-14 08:08:56 +00:00
|
|
|
generating: false,
|
|
|
|
generatedAt: 0
|
2019-04-12 00:45:33 +00:00
|
|
|
},
|
2019-09-10 16:31:27 +00:00
|
|
|
disk: {
|
|
|
|
cache: null,
|
2019-11-14 08:08:56 +00:00
|
|
|
generating: false,
|
|
|
|
generatedAt: 0
|
2019-09-10 16:31:27 +00:00
|
|
|
},
|
2019-04-12 00:45:33 +00:00
|
|
|
albums: {
|
|
|
|
cache: null,
|
2019-06-03 19:40:24 +00:00
|
|
|
generating: false,
|
|
|
|
generatedAt: 0,
|
|
|
|
invalidatedAt: 0
|
2019-04-12 00:45:33 +00:00
|
|
|
},
|
|
|
|
users: {
|
|
|
|
cache: null,
|
2019-06-03 19:40:24 +00:00
|
|
|
generating: false,
|
|
|
|
generatedAt: 0,
|
|
|
|
invalidatedAt: 0
|
2019-04-12 00:45:33 +00:00
|
|
|
},
|
|
|
|
uploads: {
|
|
|
|
cache: null,
|
2019-06-03 19:40:24 +00:00
|
|
|
generating: false,
|
|
|
|
generatedAt: 0,
|
|
|
|
invalidatedAt: 0
|
2019-04-12 00:45:33 +00:00
|
|
|
}
|
|
|
|
}
|
2019-04-05 17:32:52 +00:00
|
|
|
|
2019-09-17 04:13:41 +00:00
|
|
|
const cloudflareAuth = config.cloudflare && config.cloudflare.apiKey && config.cloudflare.email && config.cloudflare.zoneId
|
2018-04-29 12:47:24 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.mayGenerateThumb = extname => {
|
|
|
|
return (config.uploads.generateThumbs.image && self.imageExts.includes(extname)) ||
|
|
|
|
(config.uploads.generateThumbs.video && self.videoExts.includes(extname))
|
2018-04-29 12:47:24 +00:00
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
// Expand if necessary (must be lower case); for now only preserves some known tarballs
|
|
|
|
const extPreserves = ['.tar.gz', '.tar.z', '.tar.bz2', '.tar.lzma', '.tar.lzo', '.tar.xz']
|
2018-09-17 19:32:27 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.extname = filename => {
|
2018-11-28 17:52:12 +00:00
|
|
|
// Always return blank string if the filename does not seem to have a valid extension
|
|
|
|
// Files such as .DS_Store (anything that starts with a dot, without any extension after) will still be accepted
|
2018-12-18 17:01:28 +00:00
|
|
|
if (!/\../.test(filename)) return ''
|
2018-11-28 17:52:12 +00:00
|
|
|
|
2018-09-17 19:32:27 +00:00
|
|
|
let lower = filename.toLowerCase() // due to this, the returned extname will always be lower case
|
|
|
|
let multi = ''
|
|
|
|
let extname = ''
|
|
|
|
|
|
|
|
// check for multi-archive extensions (.001, .002, and so on)
|
|
|
|
if (/\.\d{3}$/.test(lower)) {
|
|
|
|
multi = lower.slice(lower.lastIndexOf('.') - lower.length)
|
|
|
|
lower = lower.slice(0, lower.lastIndexOf('.'))
|
|
|
|
}
|
|
|
|
|
|
|
|
// check against extensions that must be preserved
|
2019-09-08 01:56:29 +00:00
|
|
|
for (const extPreserve of extPreserves)
|
|
|
|
if (lower.endsWith(extPreserve)) {
|
|
|
|
extname = extPreserve
|
2018-09-17 19:32:27 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2018-12-18 17:01:28 +00:00
|
|
|
if (!extname)
|
2018-09-17 19:32:27 +00:00
|
|
|
extname = lower.slice(lower.lastIndexOf('.') - lower.length) // path.extname(lower)
|
|
|
|
|
|
|
|
return extname + multi
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.escape = (string) => {
|
2018-12-13 09:09:46 +00:00
|
|
|
// MIT License
|
|
|
|
// Copyright(c) 2012-2013 TJ Holowaychuk
|
|
|
|
// Copyright(c) 2015 Andreas Lubbe
|
|
|
|
// Copyright(c) 2015 Tiancheng "Timothy" Gu
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
if (!string)
|
|
|
|
return string
|
2018-12-13 09:09:46 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const str = String(string)
|
2018-12-13 09:09:46 +00:00
|
|
|
const match = /["'&<>]/.exec(str)
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
if (!match)
|
|
|
|
return str
|
2018-12-13 09:09:46 +00:00
|
|
|
|
|
|
|
let escape
|
|
|
|
let html = ''
|
|
|
|
let index = 0
|
|
|
|
let lastIndex = 0
|
|
|
|
|
|
|
|
for (index = match.index; index < str.length; index++) {
|
|
|
|
switch (str.charCodeAt(index)) {
|
|
|
|
case 34: // "
|
|
|
|
escape = '"'
|
|
|
|
break
|
|
|
|
case 38: // &
|
|
|
|
escape = '&'
|
|
|
|
break
|
|
|
|
case 39: // '
|
|
|
|
escape = '''
|
|
|
|
break
|
|
|
|
case 60: // <
|
|
|
|
escape = '<'
|
|
|
|
break
|
|
|
|
case 62: // >
|
|
|
|
escape = '>'
|
|
|
|
break
|
|
|
|
default:
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2018-12-18 17:01:28 +00:00
|
|
|
if (lastIndex !== index)
|
2018-12-13 09:09:46 +00:00
|
|
|
html += str.substring(lastIndex, index)
|
|
|
|
|
|
|
|
lastIndex = index + 1
|
|
|
|
html += escape
|
|
|
|
}
|
|
|
|
|
|
|
|
return lastIndex !== index
|
|
|
|
? html + str.substring(lastIndex, index)
|
|
|
|
: html
|
|
|
|
}
|
|
|
|
|
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
|
|
|
self.stripIndents = string => {
|
|
|
|
if (!string) return
|
|
|
|
const result = string.replace(/^[^\S\n]+/gm, '')
|
|
|
|
const match = result.match(/^[^\S\n]*(?=\S)/gm)
|
|
|
|
const indent = match && Math.min(...match.map(el => el.length))
|
|
|
|
if (indent) {
|
|
|
|
const regexp = new RegExp(`^.{${indent}}`, 'gm')
|
|
|
|
return result.replace(regexp, '')
|
|
|
|
}
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.authorize = async (req, res) => {
|
|
|
|
// TODO: Improve usage of this function by the other APIs
|
2018-01-23 20:06:30 +00:00
|
|
|
const token = req.headers.token
|
2018-03-24 19:47:41 +00:00
|
|
|
if (token === undefined) {
|
|
|
|
res.status(401).json({ success: false, description: 'No token provided.' })
|
|
|
|
return
|
|
|
|
}
|
2017-10-04 00:13:38 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
try {
|
|
|
|
const user = await db.table('users')
|
|
|
|
.where('token', token)
|
|
|
|
.first()
|
|
|
|
if (user) {
|
|
|
|
if (user.enabled === false || user.enabled === 0) {
|
|
|
|
res.json({ success: false, description: 'This account has been disabled.' })
|
|
|
|
return
|
|
|
|
}
|
|
|
|
return user
|
2018-10-09 19:52:41 +00:00
|
|
|
}
|
2018-04-29 12:47:24 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
res.status(401).json({ success: false, description: 'Invalid token.' })
|
|
|
|
} catch (error) {
|
|
|
|
logger.error(error)
|
|
|
|
res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
|
|
|
|
}
|
2018-01-23 20:06:30 +00:00
|
|
|
}
|
2017-10-04 00:13:38 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.generateThumbs = async (name, extname, force) => {
|
|
|
|
const thumbname = path.join(paths.thumbs, name.slice(0, -extname.length) + '.png')
|
|
|
|
|
|
|
|
try {
|
|
|
|
// Check if thumbnail already exists
|
|
|
|
try {
|
|
|
|
const lstat = await paths.lstat(thumbname)
|
|
|
|
if (lstat.isSymbolicLink())
|
|
|
|
// Unlink if symlink (should be symlink to the placeholder)
|
|
|
|
await paths.unlink(thumbname)
|
|
|
|
else if (!force)
|
|
|
|
// Continue only if it does not exist, unless forced to
|
|
|
|
return true
|
|
|
|
} catch (error) {
|
|
|
|
// Re-throw error
|
|
|
|
if (error.code !== 'ENOENT')
|
|
|
|
throw error
|
|
|
|
}
|
2018-05-12 14:01:14 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
// Full path to input file
|
|
|
|
const input = path.join(paths.uploads, name)
|
|
|
|
|
|
|
|
// If image extension
|
|
|
|
if (self.imageExts.includes(extname)) {
|
|
|
|
const resizeOptions = {
|
|
|
|
width: 200,
|
|
|
|
height: 200,
|
|
|
|
fit: 'contain',
|
|
|
|
background: {
|
|
|
|
r: 0,
|
|
|
|
g: 0,
|
|
|
|
b: 0,
|
|
|
|
alpha: 0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
const image = sharp(input)
|
|
|
|
const metadata = await image.metadata()
|
|
|
|
if (metadata.width > resizeOptions.width || metadata.height > resizeOptions.height) {
|
|
|
|
await image
|
|
|
|
.resize(resizeOptions)
|
|
|
|
.toFile(thumbname)
|
|
|
|
} else if (metadata.width === resizeOptions.width && metadata.height === resizeOptions.height) {
|
|
|
|
await image
|
|
|
|
.toFile(thumbname)
|
|
|
|
} else {
|
|
|
|
const x = resizeOptions.width - metadata.width
|
|
|
|
const y = resizeOptions.height - metadata.height
|
|
|
|
await image
|
|
|
|
.extend({
|
|
|
|
top: Math.floor(y / 2),
|
|
|
|
bottom: Math.ceil(y / 2),
|
|
|
|
left: Math.floor(x / 2),
|
|
|
|
right: Math.ceil(x / 2),
|
|
|
|
background: resizeOptions.background
|
2018-09-04 17:29:53 +00:00
|
|
|
})
|
2019-09-08 01:56:29 +00:00
|
|
|
.toFile(thumbname)
|
2018-09-04 17:29:53 +00:00
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
} else if (self.videoExts.includes(extname)) {
|
|
|
|
const metadata = await self.ffprobe(input)
|
2019-10-15 10:53:23 +00:00
|
|
|
const duration = parseInt(metadata.format.duration)
|
2018-09-04 17:29:53 +00:00
|
|
|
|
2019-10-15 10:53:23 +00:00
|
|
|
// Skip files that have neither video streams/channels nor valid duration metadata
|
|
|
|
if (!metadata.streams || !metadata.streams.some(s => s.codec_type === 'video') || isNaN(duration))
|
|
|
|
throw 'File does not have valid required data'
|
2019-08-23 13:31:44 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
await new Promise((resolve, reject) => {
|
|
|
|
ffmpeg(input)
|
|
|
|
.inputOptions([
|
2019-10-15 10:53:23 +00:00
|
|
|
`-ss ${duration * 20 / 100}`
|
2019-09-08 01:56:29 +00:00
|
|
|
])
|
|
|
|
.output(thumbname)
|
|
|
|
.outputOptions([
|
|
|
|
'-vframes 1',
|
|
|
|
'-vf scale=200:200:force_original_aspect_ratio=decrease'
|
|
|
|
])
|
|
|
|
.on('error', async error => {
|
|
|
|
// Try to unlink thumbnail,
|
|
|
|
// since ffmpeg may have created an incomplete thumbnail
|
|
|
|
try {
|
|
|
|
await paths.unlink(thumbname)
|
|
|
|
} catch (err) {
|
|
|
|
if (err && err.code !== 'ENOENT')
|
|
|
|
logger.error(`[${name}]: ${err.toString()}`)
|
|
|
|
}
|
|
|
|
return reject(error)
|
2018-05-12 14:01:14 +00:00
|
|
|
})
|
2019-09-08 01:56:29 +00:00
|
|
|
.on('end', () => resolve(true))
|
|
|
|
.run()
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
} catch (error) {
|
2019-09-19 01:27:19 +00:00
|
|
|
// Suppress error logging for errors matching these patterns
|
2019-09-08 01:56:29 +00:00
|
|
|
const errorString = error.toString()
|
|
|
|
const suppress = [
|
|
|
|
/Input file contains unsupported image format/,
|
|
|
|
/Invalid data found when processing input/,
|
2019-10-15 10:53:23 +00:00
|
|
|
/File does not have valid required data/
|
2019-09-08 01:56:29 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
if (!suppress.some(t => t.test(errorString)))
|
|
|
|
logger.error(`[${name}]: ${errorString}`)
|
|
|
|
|
|
|
|
try {
|
|
|
|
await paths.symlink(paths.thumbPlaceholder, thumbname)
|
|
|
|
return true
|
|
|
|
} catch (err) {
|
|
|
|
logger.error(err)
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return true
|
2018-01-23 20:06:30 +00:00
|
|
|
}
|
2017-03-17 04:14:10 +00:00
|
|
|
|
2019-11-29 13:42:53 +00:00
|
|
|
self.stripTags = async (name, extname) => {
|
|
|
|
const fullpath = path.join(paths.uploads, name)
|
|
|
|
|
|
|
|
if (self.imageExts.includes(extname)) {
|
|
|
|
const tmpfile = path.join(paths.uploads, `tmp-${name}`)
|
|
|
|
await paths.rename(fullpath, tmpfile)
|
|
|
|
|
|
|
|
try {
|
|
|
|
await sharp(tmpfile)
|
|
|
|
.toFile(fullpath)
|
|
|
|
await paths.unlink(tmpfile)
|
|
|
|
} catch (error) {
|
|
|
|
await paths.unlink(tmpfile)
|
|
|
|
// Re-throw error
|
|
|
|
throw error
|
|
|
|
}
|
|
|
|
} else if (config.uploads.stripTags.video && self.videoExts.includes(extname)) {
|
|
|
|
const tmpfile = path.join(paths.uploads, `tmp-${name}`)
|
|
|
|
await paths.rename(fullpath, tmpfile)
|
|
|
|
|
|
|
|
try {
|
|
|
|
await new Promise((resolve, reject) => {
|
|
|
|
ffmpeg(tmpfile)
|
|
|
|
.output(fullpath)
|
|
|
|
.outputOptions([
|
|
|
|
// Experimental.
|
|
|
|
'-c copy',
|
|
|
|
'-map_metadata:g -1:g',
|
|
|
|
'-map_metadata:s:v -1:g',
|
|
|
|
'-map_metadata:s:a -1:g'
|
|
|
|
])
|
|
|
|
.on('error', error => reject(error))
|
|
|
|
.on('end', () => resolve(true))
|
|
|
|
.run()
|
|
|
|
})
|
|
|
|
await paths.unlink(tmpfile)
|
|
|
|
} catch (error) {
|
|
|
|
await paths.unlink(tmpfile)
|
|
|
|
// Re-throw error
|
|
|
|
throw error
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.unlinkFile = async (filename, predb) => {
|
|
|
|
try {
|
|
|
|
await paths.unlink(path.join(paths.uploads, filename))
|
|
|
|
} catch (error) {
|
|
|
|
// Return true if file does not exist
|
|
|
|
if (error.code !== 'ENOENT')
|
|
|
|
throw error
|
|
|
|
}
|
|
|
|
|
|
|
|
const identifier = filename.split('.')[0]
|
|
|
|
|
|
|
|
// Do not remove from identifiers cache on pre-db-deletion
|
|
|
|
// eslint-disable-next-line curly
|
|
|
|
if (!predb && self.idSet) {
|
|
|
|
self.idSet.delete(identifier)
|
|
|
|
// logger.log(`Removed ${identifier} from identifiers cache (deleteFile)`)
|
|
|
|
}
|
|
|
|
|
|
|
|
const extname = self.extname(filename)
|
|
|
|
if (self.imageExts.includes(extname) || self.videoExts.includes(extname))
|
|
|
|
try {
|
|
|
|
await paths.unlink(path.join(paths.thumbs, `${identifier}.png`))
|
|
|
|
} catch (error) {
|
|
|
|
if (error.code !== 'ENOENT')
|
|
|
|
throw error
|
|
|
|
}
|
2018-03-30 02:39:53 +00:00
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.bulkDeleteFromDb = async (field, values, user) => {
|
2019-09-23 08:09:15 +00:00
|
|
|
// Always return an empty array on failure
|
|
|
|
if (!user || !['id', 'name'].includes(field) || !values.length)
|
|
|
|
return []
|
2018-05-10 17:25:52 +00:00
|
|
|
|
2019-02-05 03:36:14 +00:00
|
|
|
// SQLITE_LIMIT_VARIABLE_NUMBER, which defaults to 999
|
|
|
|
// Read more: https://www.sqlite.org/limits.html
|
|
|
|
const MAX_VARIABLES_CHUNK_SIZE = 999
|
|
|
|
const chunks = []
|
2019-09-08 01:56:29 +00:00
|
|
|
while (values.length)
|
|
|
|
chunks.push(values.splice(0, MAX_VARIABLES_CHUNK_SIZE))
|
2019-02-05 03:36:14 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
let failed = []
|
2018-10-13 11:06:58 +00:00
|
|
|
const ismoderator = perms.is(user, 'moderator')
|
2018-05-10 17:25:52 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
try {
|
|
|
|
let unlinkeds = []
|
|
|
|
const albumids = []
|
2018-05-09 09:53:27 +00:00
|
|
|
|
2019-09-23 08:09:15 +00:00
|
|
|
await Promise.all(chunks.map(async chunk => {
|
2019-09-08 01:56:29 +00:00
|
|
|
const files = await db.table('files')
|
2019-09-23 08:09:15 +00:00
|
|
|
.whereIn(field, chunk)
|
2019-09-08 01:56:29 +00:00
|
|
|
.where(function () {
|
|
|
|
if (!ismoderator)
|
2019-09-08 18:33:07 +00:00
|
|
|
this.where('userid', user.id)
|
2019-09-08 01:56:29 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
// Push files that could not be found in db
|
2019-09-23 08:09:15 +00:00
|
|
|
failed = failed.concat(chunk.filter(value => !files.find(file => file[field] === value)))
|
2019-09-08 01:56:29 +00:00
|
|
|
|
|
|
|
// Unlink all found files
|
|
|
|
const unlinked = []
|
2019-09-23 08:09:15 +00:00
|
|
|
|
|
|
|
await Promise.all(files.map(async file => {
|
2019-09-08 01:56:29 +00:00
|
|
|
try {
|
|
|
|
await self.unlinkFile(file.name, true)
|
|
|
|
unlinked.push(file)
|
|
|
|
} catch (error) {
|
|
|
|
logger.error(error)
|
|
|
|
failed.push(file[field])
|
2019-08-20 02:16:34 +00:00
|
|
|
}
|
2019-09-23 08:09:15 +00:00
|
|
|
}))
|
2019-08-20 02:16:34 +00:00
|
|
|
|
2019-09-23 08:09:15 +00:00
|
|
|
if (!unlinked.length) return
|
2019-09-08 01:56:29 +00:00
|
|
|
|
|
|
|
// Delete all unlinked files from db
|
|
|
|
await db.table('files')
|
|
|
|
.whereIn('id', unlinked.map(file => file.id))
|
|
|
|
.del()
|
|
|
|
self.invalidateStatsCache('uploads')
|
|
|
|
|
|
|
|
if (self.idSet)
|
|
|
|
unlinked.forEach(file => {
|
|
|
|
const identifier = file.name.split('.')[0]
|
|
|
|
self.idSet.delete(identifier)
|
|
|
|
// logger.log(`Removed ${identifier} from identifiers cache (bulkDeleteFromDb)`)
|
|
|
|
})
|
|
|
|
|
|
|
|
// Push album ids
|
|
|
|
unlinked.forEach(file => {
|
|
|
|
if (file.albumid && !albumids.includes(file.albumid))
|
|
|
|
albumids.push(file.albumid)
|
|
|
|
})
|
|
|
|
|
|
|
|
// Push unlinked files
|
|
|
|
unlinkeds = unlinkeds.concat(unlinked)
|
2019-09-23 08:09:15 +00:00
|
|
|
}))
|
2019-09-08 01:56:29 +00:00
|
|
|
|
|
|
|
if (unlinkeds.length) {
|
|
|
|
// Update albums if necessary, but do not wait
|
|
|
|
if (albumids.length)
|
|
|
|
db.table('albums')
|
|
|
|
.whereIn('id', albumids)
|
|
|
|
.update('editedAt', Math.floor(Date.now() / 1000))
|
|
|
|
.catch(logger.error)
|
|
|
|
|
|
|
|
// Purge Cloudflare's cache if necessary, but do not wait
|
|
|
|
if (config.cloudflare.purgeCache)
|
|
|
|
self.purgeCloudflareCache(unlinkeds.map(file => file.name), true, true)
|
|
|
|
.then(results => {
|
|
|
|
for (const result of results)
|
|
|
|
if (result.errors.length)
|
|
|
|
result.errors.forEach(error => logger.error(`[CF]: ${error}`))
|
|
|
|
})
|
|
|
|
}
|
|
|
|
} catch (error) {
|
|
|
|
logger.error(error)
|
|
|
|
}
|
|
|
|
|
2018-05-05 19:44:58 +00:00
|
|
|
return failed
|
2018-03-30 02:39:53 +00:00
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.purgeCloudflareCache = async (names, uploads, thumbs) => {
|
2019-01-31 09:29:34 +00:00
|
|
|
if (!Array.isArray(names) || !names.length || !cloudflareAuth)
|
|
|
|
return [{
|
2019-01-09 10:11:45 +00:00
|
|
|
success: false,
|
|
|
|
files: [],
|
2019-01-31 09:29:34 +00:00
|
|
|
errors: ['An unexpected error occured.']
|
|
|
|
}]
|
2019-01-06 06:27:17 +00:00
|
|
|
|
|
|
|
let domain = config.domain
|
|
|
|
if (!uploads) domain = config.homeDomain
|
2018-05-09 09:53:27 +00:00
|
|
|
|
2019-01-18 03:40:15 +00:00
|
|
|
const thumbNames = []
|
2018-05-09 09:53:27 +00:00
|
|
|
names = names.map(name => {
|
2019-01-06 08:26:43 +00:00
|
|
|
if (uploads) {
|
|
|
|
const url = `${domain}/${name}`
|
2019-09-08 01:56:29 +00:00
|
|
|
const extname = self.extname(name)
|
|
|
|
if (thumbs && self.mayGenerateThumb(extname))
|
2019-01-18 03:40:15 +00:00
|
|
|
thumbNames.push(`${domain}/thumbs/${name.slice(0, -extname.length)}.png`)
|
2019-01-06 08:26:43 +00:00
|
|
|
return url
|
|
|
|
} else {
|
|
|
|
return name === 'home' ? domain : `${domain}/${name}`
|
|
|
|
}
|
2018-05-09 09:53:27 +00:00
|
|
|
})
|
2019-01-31 09:29:34 +00:00
|
|
|
names = names.concat(thumbNames)
|
|
|
|
|
|
|
|
// Split array into multiple arrays with max length of 30 URLs
|
|
|
|
// https://api.cloudflare.com/#zone-purge-files-by-url
|
2019-09-23 08:09:15 +00:00
|
|
|
// TODO: Handle API rate limits
|
2019-01-31 09:29:34 +00:00
|
|
|
const MAX_LENGTH = 30
|
2019-09-08 01:56:29 +00:00
|
|
|
const chunks = []
|
2019-01-31 09:29:34 +00:00
|
|
|
while (names.length)
|
2019-09-08 01:56:29 +00:00
|
|
|
chunks.push(names.splice(0, MAX_LENGTH))
|
2019-01-31 09:29:34 +00:00
|
|
|
|
|
|
|
const url = `https://api.cloudflare.com/client/v4/zones/${config.cloudflare.zoneId}/purge_cache`
|
|
|
|
const results = []
|
2018-05-09 09:53:27 +00:00
|
|
|
|
2019-09-23 08:09:15 +00:00
|
|
|
await Promise.all(chunks.map(async chunk => {
|
2019-09-08 01:56:29 +00:00
|
|
|
const result = {
|
|
|
|
success: false,
|
|
|
|
files: chunk,
|
|
|
|
errors: []
|
|
|
|
}
|
2019-01-31 09:29:34 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
try {
|
|
|
|
const purge = await fetch(url, {
|
|
|
|
method: 'POST',
|
|
|
|
body: JSON.stringify({ files: chunk }),
|
|
|
|
headers: {
|
|
|
|
'Content-Type': 'application/json',
|
|
|
|
'X-Auth-Email': config.cloudflare.email,
|
|
|
|
'X-Auth-Key': config.cloudflare.apiKey
|
|
|
|
}
|
|
|
|
})
|
|
|
|
const response = await purge.json()
|
|
|
|
result.success = response.success
|
|
|
|
if (Array.isArray(response.errors) && response.errors.length)
|
|
|
|
result.errors = response.errors.map(error => `${error.code}: ${error.message}`)
|
|
|
|
} catch (error) {
|
|
|
|
result.errors = [error.toString()]
|
2019-01-31 09:29:34 +00:00
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
|
|
|
|
results.push(result)
|
2019-09-23 08:09:15 +00:00
|
|
|
}))
|
2019-01-31 09:29:34 +00:00
|
|
|
|
|
|
|
return results
|
2018-05-09 09:53:27 +00:00
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.bulkDeleteExpired = async (dryrun) => {
|
|
|
|
const timestamp = Date.now() / 1000
|
|
|
|
const field = 'id'
|
|
|
|
const sudo = { username: 'root' }
|
|
|
|
|
|
|
|
const result = {}
|
|
|
|
result.expired = await db.table('files')
|
|
|
|
.where('expirydate', '<=', timestamp)
|
|
|
|
.select(field)
|
|
|
|
.then(rows => rows.map(row => row[field]))
|
|
|
|
|
|
|
|
if (!dryrun) {
|
|
|
|
const values = result.expired.slice() // Make a shallow copy
|
|
|
|
result.failed = await self.bulkDeleteFromDb(field, values, sudo)
|
|
|
|
}
|
|
|
|
|
|
|
|
return result
|
2019-04-05 17:32:52 +00:00
|
|
|
}
|
|
|
|
|
2019-09-17 04:13:41 +00:00
|
|
|
self.invalidateAlbumsCache = albumids => {
|
|
|
|
for (const albumid of albumids) {
|
|
|
|
delete self.albumsCache[albumid]
|
|
|
|
delete self.albumsCache[`${albumid}-nojs`]
|
|
|
|
}
|
|
|
|
self.invalidateStatsCache('albums')
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.invalidateStatsCache = type => {
|
2019-04-12 00:45:33 +00:00
|
|
|
if (!['albums', 'users', 'uploads'].includes(type)) return
|
2019-09-08 01:56:29 +00:00
|
|
|
statsCache[type].invalidatedAt = Date.now()
|
2019-04-12 00:45:33 +00:00
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.stats = async (req, res, next) => {
|
|
|
|
const user = await self.authorize(req, res)
|
2019-04-05 17:32:52 +00:00
|
|
|
if (!user) return
|
|
|
|
|
|
|
|
const isadmin = perms.is(user, 'admin')
|
|
|
|
if (!isadmin) return res.status(403).end()
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
try {
|
|
|
|
const stats = {}
|
|
|
|
const os = await si.osInfo()
|
2019-09-08 01:56:29 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// System info
|
|
|
|
if (!statsCache.system.cache && statsCache.system.generating) {
|
|
|
|
stats.system = false
|
2019-11-14 08:08:56 +00:00
|
|
|
} else if (((Date.now() - statsCache.system.generatedAt) <= 1000) || statsCache.system.generating) {
|
|
|
|
// Use cache for 1000 ms (1 second)
|
2019-09-10 16:31:27 +00:00
|
|
|
stats.system = statsCache.system.cache
|
|
|
|
} else {
|
|
|
|
statsCache.system.generating = true
|
2019-11-14 08:08:56 +00:00
|
|
|
statsCache.system.generatedAt = Date.now()
|
2019-09-10 16:31:27 +00:00
|
|
|
|
|
|
|
const currentLoad = await si.currentLoad()
|
|
|
|
const mem = await si.mem()
|
|
|
|
|
|
|
|
stats.system = {
|
|
|
|
_types: {
|
|
|
|
byte: ['memoryUsage'],
|
|
|
|
byteUsage: ['systemMemory']
|
|
|
|
},
|
|
|
|
platform: `${os.platform} ${os.arch}`,
|
|
|
|
distro: `${os.distro} ${os.release}`,
|
|
|
|
kernel: os.kernel,
|
|
|
|
cpuLoad: `${currentLoad.currentload.toFixed(1)}%`,
|
|
|
|
cpusLoad: currentLoad.cpus.map(cpu => `${cpu.load.toFixed(1)}%`).join(', '),
|
|
|
|
systemMemory: {
|
|
|
|
used: mem.active,
|
|
|
|
total: mem.total
|
|
|
|
},
|
|
|
|
memoryUsage: process.memoryUsage().rss,
|
|
|
|
nodeVersion: `${process.versions.node}`
|
|
|
|
}
|
2019-04-05 17:32:52 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// Update cache
|
|
|
|
statsCache.system.cache = stats.system
|
|
|
|
statsCache.system.generating = false
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
2019-04-05 17:32:52 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// Disk usage, only for Linux platform
|
|
|
|
if (os.platform === 'linux')
|
|
|
|
if (!statsCache.disk.cache && statsCache.disk.generating) {
|
|
|
|
stats.disk = false
|
2019-11-14 08:08:56 +00:00
|
|
|
} else if (((Date.now() - statsCache.disk.generatedAt) <= 60000) || statsCache.disk.generating) {
|
|
|
|
// Use cache for 60000 ms (60 seconds)
|
2019-09-10 16:31:27 +00:00
|
|
|
stats.disk = statsCache.disk.cache
|
|
|
|
} else {
|
|
|
|
statsCache.disk.generating = true
|
2019-11-14 08:08:56 +00:00
|
|
|
statsCache.disk.generatedAt = Date.now()
|
2019-09-10 16:31:27 +00:00
|
|
|
|
|
|
|
stats.disk = {
|
|
|
|
_types: {
|
|
|
|
byte: ['uploads', 'thumbs', 'zips', 'chunks'],
|
|
|
|
byteUsage: ['drive']
|
|
|
|
},
|
|
|
|
drive: null,
|
2019-11-13 22:06:59 +00:00
|
|
|
// We pre-assign the keys below to fix their order
|
2019-09-10 16:31:27 +00:00
|
|
|
uploads: 0,
|
|
|
|
thumbs: 0,
|
|
|
|
zips: 0,
|
|
|
|
chunks: 0
|
|
|
|
}
|
|
|
|
|
2019-11-13 22:06:59 +00:00
|
|
|
const subdirs = []
|
|
|
|
|
|
|
|
// Get size of uploads path (excluding sub-directories)
|
2019-09-10 16:31:27 +00:00
|
|
|
await new Promise((resolve, reject) => {
|
|
|
|
const proc = spawn('du', [
|
|
|
|
'--apparent-size',
|
|
|
|
'--block-size=1',
|
|
|
|
'--dereference',
|
2019-09-10 16:47:34 +00:00
|
|
|
'--max-depth=1',
|
2019-09-10 16:31:27 +00:00
|
|
|
'--separate-dirs',
|
|
|
|
paths.uploads
|
|
|
|
])
|
|
|
|
|
|
|
|
proc.stdout.on('data', data => {
|
|
|
|
const formatted = String(data)
|
|
|
|
.trim()
|
|
|
|
.split(/\s+/)
|
2019-11-13 22:06:59 +00:00
|
|
|
for (let i = 0; i < formatted.length; i += 2) {
|
|
|
|
const path = formatted[i + 1]
|
|
|
|
if (!path) return
|
2019-09-10 16:31:27 +00:00
|
|
|
|
2019-11-13 22:06:59 +00:00
|
|
|
if (path !== paths.uploads) {
|
|
|
|
subdirs.push(path)
|
|
|
|
continue
|
|
|
|
}
|
2019-09-10 16:31:27 +00:00
|
|
|
|
2019-11-13 22:06:59 +00:00
|
|
|
stats.disk.uploads = parseInt(formatted[i])
|
|
|
|
}
|
2019-09-10 16:31:27 +00:00
|
|
|
})
|
2019-04-12 00:45:33 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
const stderr = []
|
2019-11-14 07:46:49 +00:00
|
|
|
proc.stderr.on('data', data => stderr.push(String(data)))
|
2019-09-10 16:31:27 +00:00
|
|
|
|
|
|
|
proc.on('exit', code => {
|
|
|
|
if (code !== 0) return reject(stderr)
|
|
|
|
resolve()
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2019-11-13 22:06:59 +00:00
|
|
|
await Promise.all(subdirs.map(subdir => {
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
const proc = spawn('du', [
|
|
|
|
'--apparent-size',
|
|
|
|
'--block-size=1',
|
|
|
|
'--dereference',
|
|
|
|
'--summarize',
|
|
|
|
subdir
|
|
|
|
])
|
|
|
|
|
|
|
|
proc.stdout.on('data', data => {
|
|
|
|
const formatted = String(data)
|
|
|
|
.trim()
|
|
|
|
.split(/\s+/)
|
|
|
|
if (formatted.length !== 2) return
|
|
|
|
|
|
|
|
const basename = path.basename(formatted[1])
|
|
|
|
stats.disk[basename] = parseInt(formatted[0])
|
|
|
|
|
|
|
|
// Add to types if necessary
|
|
|
|
if (!stats.disk._types.byte.includes(basename))
|
|
|
|
stats.disk._types.byte.push(basename)
|
|
|
|
})
|
|
|
|
|
|
|
|
const stderr = []
|
2019-11-14 07:46:49 +00:00
|
|
|
proc.stderr.on('data', data => stderr.push(String(data)))
|
2019-11-13 22:06:59 +00:00
|
|
|
|
|
|
|
proc.on('exit', code => {
|
|
|
|
if (code !== 0) return reject(stderr)
|
|
|
|
resolve()
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}))
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// Get disk usage of whichever disk uploads path resides on
|
|
|
|
await new Promise((resolve, reject) => {
|
|
|
|
const proc = spawn('df', [
|
|
|
|
'--block-size=1',
|
|
|
|
'--output=used,size',
|
|
|
|
paths.uploads
|
|
|
|
])
|
|
|
|
|
|
|
|
proc.stdout.on('data', data => {
|
|
|
|
// Only use the first valid line
|
|
|
|
if (stats.disk.drive !== null) return
|
|
|
|
|
|
|
|
const lines = String(data)
|
|
|
|
.trim()
|
|
|
|
.split('\n')
|
|
|
|
if (lines.length !== 2) return
|
|
|
|
|
|
|
|
for (const line of lines) {
|
|
|
|
const columns = line.split(/\s+/)
|
|
|
|
// Skip lines that have non-number chars
|
|
|
|
if (columns.some(w => !/^\d+$/.test(w))) continue
|
|
|
|
|
|
|
|
stats.disk.drive = {
|
|
|
|
used: parseInt(columns[0]),
|
|
|
|
total: parseInt(columns[1])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
const stderr = []
|
2019-11-14 07:46:49 +00:00
|
|
|
proc.stderr.on('data', data => stderr.push(String(data)))
|
2019-09-10 16:31:27 +00:00
|
|
|
|
|
|
|
proc.on('exit', code => {
|
|
|
|
if (code !== 0) return reject(stderr)
|
|
|
|
resolve()
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
// Update cache
|
2019-11-14 07:42:09 +00:00
|
|
|
statsCache.disk.cache = stats.disk
|
2019-09-10 16:31:27 +00:00
|
|
|
statsCache.disk.generating = false
|
|
|
|
}
|
|
|
|
|
|
|
|
// Uploads
|
|
|
|
if (!statsCache.uploads.cache && statsCache.uploads.generating) {
|
|
|
|
stats.uploads = false
|
|
|
|
} else if ((statsCache.uploads.invalidatedAt < statsCache.uploads.generatedAt) || statsCache.uploads.generating) {
|
|
|
|
stats.uploads = statsCache.uploads.cache
|
|
|
|
} else {
|
|
|
|
statsCache.uploads.generating = true
|
2019-09-17 04:13:41 +00:00
|
|
|
statsCache.uploads.generatedAt = Date.now()
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
stats.uploads = {
|
|
|
|
_types: {
|
|
|
|
number: ['total', 'images', 'videos', 'others']
|
|
|
|
},
|
|
|
|
total: 0,
|
|
|
|
images: 0,
|
|
|
|
videos: 0,
|
|
|
|
others: 0
|
|
|
|
}
|
|
|
|
|
|
|
|
if (os.platform !== 'linux') {
|
2019-09-19 01:27:19 +00:00
|
|
|
// If not Linux platform, rely on DB for total size
|
2019-09-10 16:31:27 +00:00
|
|
|
const uploads = await db.table('files')
|
|
|
|
.select('size')
|
|
|
|
stats.uploads.total = uploads.length
|
|
|
|
stats.uploads.sizeInDb = uploads.reduce((acc, upload) => acc + parseInt(upload.size), 0)
|
|
|
|
// Add type information for the new column
|
|
|
|
if (!Array.isArray(stats.uploads._types.byte))
|
|
|
|
stats.uploads._types.byte = []
|
|
|
|
stats.uploads._types.byte.push('sizeInDb')
|
|
|
|
} else {
|
|
|
|
stats.uploads.total = await db.table('files')
|
|
|
|
.count('id as count')
|
|
|
|
.then(rows => rows[0].count)
|
|
|
|
}
|
|
|
|
|
|
|
|
stats.uploads.images = await db.table('files')
|
2019-10-22 03:52:52 +00:00
|
|
|
.where(function () {
|
|
|
|
for (const ext of self.imageExts)
|
|
|
|
this.orWhere('name', 'like', `%${ext}`)
|
|
|
|
})
|
2019-09-10 16:31:27 +00:00
|
|
|
.count('id as count')
|
|
|
|
.then(rows => rows[0].count)
|
|
|
|
|
|
|
|
stats.uploads.videos = await db.table('files')
|
2019-10-22 03:52:52 +00:00
|
|
|
.where(function () {
|
|
|
|
for (const ext of self.videoExts)
|
|
|
|
this.orWhere('name', 'like', `%${ext}`)
|
|
|
|
})
|
2019-09-10 16:31:27 +00:00
|
|
|
.count('id as count')
|
|
|
|
.then(rows => rows[0].count)
|
|
|
|
|
|
|
|
stats.uploads.others = stats.uploads.total - stats.uploads.images - stats.uploads.videos
|
|
|
|
|
|
|
|
// Update cache
|
|
|
|
statsCache.uploads.cache = stats.uploads
|
|
|
|
statsCache.uploads.generating = false
|
2019-04-12 00:45:33 +00:00
|
|
|
}
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// Users
|
|
|
|
if (!statsCache.users.cache && statsCache.users.generating) {
|
|
|
|
stats.users = false
|
|
|
|
} else if ((statsCache.users.invalidatedAt < statsCache.users.generatedAt) || statsCache.users.generating) {
|
|
|
|
stats.users = statsCache.users.cache
|
|
|
|
} else {
|
|
|
|
statsCache.users.generating = true
|
2019-09-17 04:13:41 +00:00
|
|
|
statsCache.users.generatedAt = Date.now()
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
stats.users = {
|
|
|
|
_types: {
|
|
|
|
number: ['total', 'disabled']
|
|
|
|
},
|
|
|
|
total: 0,
|
|
|
|
disabled: 0
|
2019-04-05 17:32:52 +00:00
|
|
|
}
|
2019-04-12 00:45:33 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
const permissionKeys = Object.keys(perms.permissions).reverse()
|
|
|
|
permissionKeys.forEach(p => {
|
|
|
|
stats.users[p] = 0
|
|
|
|
stats.users._types.number.push(p)
|
2019-04-12 00:45:33 +00:00
|
|
|
})
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
const users = await db.table('users')
|
|
|
|
stats.users.total = users.length
|
|
|
|
for (const user of users) {
|
|
|
|
if (user.enabled === false || user.enabled === 0)
|
|
|
|
stats.users.disabled++
|
|
|
|
|
|
|
|
// This may be inaccurate on installations with customized permissions
|
|
|
|
user.permission = user.permission || 0
|
|
|
|
for (const p of permissionKeys)
|
|
|
|
if (user.permission === perms.permissions[p]) {
|
|
|
|
stats.users[p]++
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
2019-04-05 17:32:52 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// Update cache
|
|
|
|
statsCache.users.cache = stats.users
|
|
|
|
statsCache.users.generating = false
|
2019-04-12 00:45:33 +00:00
|
|
|
}
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// Albums
|
|
|
|
if (!statsCache.albums.cache && statsCache.albums.generating) {
|
|
|
|
stats.albums = false
|
|
|
|
} else if ((statsCache.albums.invalidatedAt < statsCache.albums.generatedAt) || statsCache.albums.generating) {
|
|
|
|
stats.albums = statsCache.albums.cache
|
|
|
|
} else {
|
|
|
|
statsCache.albums.generating = true
|
2019-09-17 04:13:41 +00:00
|
|
|
statsCache.albums.generatedAt = Date.now()
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
stats.albums = {
|
|
|
|
_types: {
|
|
|
|
number: ['total', 'active', 'downloadable', 'public', 'generatedZip']
|
|
|
|
},
|
|
|
|
total: 0,
|
|
|
|
disabled: 0,
|
|
|
|
public: 0,
|
|
|
|
downloadable: 0,
|
|
|
|
zipGenerated: 0
|
|
|
|
}
|
2019-04-05 17:32:52 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
const albums = await db.table('albums')
|
|
|
|
stats.albums.total = albums.length
|
|
|
|
const identifiers = []
|
|
|
|
for (const album of albums) {
|
|
|
|
if (!album.enabled) {
|
|
|
|
stats.albums.disabled++
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if (album.download) stats.albums.downloadable++
|
|
|
|
if (album.public) stats.albums.public++
|
|
|
|
if (album.zipGeneratedAt) identifiers.push(album.identifier)
|
|
|
|
}
|
2019-04-05 17:32:52 +00:00
|
|
|
|
2019-09-23 08:09:15 +00:00
|
|
|
await Promise.all(identifiers.map(async identifier => {
|
2019-09-10 16:31:27 +00:00
|
|
|
try {
|
|
|
|
await paths.access(path.join(paths.zips, `${identifier}.zip`))
|
|
|
|
stats.albums.zipGenerated++
|
|
|
|
} catch (error) {
|
|
|
|
// Re-throw error
|
|
|
|
if (error.code !== 'ENOENT')
|
|
|
|
throw error
|
|
|
|
}
|
2019-09-23 08:09:15 +00:00
|
|
|
}))
|
2019-04-12 00:45:33 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// Update cache
|
|
|
|
statsCache.albums.cache = stats.albums
|
|
|
|
statsCache.albums.generating = false
|
2019-04-12 00:45:33 +00:00
|
|
|
}
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
return res.json({ success: true, stats })
|
|
|
|
} catch (error) {
|
|
|
|
logger.error(error)
|
2019-11-14 07:53:33 +00:00
|
|
|
// Reset generating state when encountering any errors
|
|
|
|
Object.keys(statsCache).forEach(key => {
|
|
|
|
statsCache[key].generating = false
|
|
|
|
})
|
2019-09-10 16:31:27 +00:00
|
|
|
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
|
2019-04-05 17:32:52 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
module.exports = self
|