2019-09-08 01:56:29 +00:00
|
|
|
const { promisify } = require('util')
|
2019-09-10 16:31:27 +00:00
|
|
|
const { spawn } = require('child_process')
|
2018-09-23 16:28:15 +00:00
|
|
|
const fetch = require('node-fetch')
|
2018-04-13 16:20:57 +00:00
|
|
|
const ffmpeg = require('fluent-ffmpeg')
|
|
|
|
const path = require('path')
|
2018-12-03 07:20:13 +00:00
|
|
|
const sharp = require('sharp')
|
2019-09-08 01:56:29 +00:00
|
|
|
const si = require('systeminformation')
|
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
|
|
|
const paths = require('./pathsController')
|
|
|
|
const perms = require('./permissionController')
|
|
|
|
const config = require('./../config')
|
|
|
|
const logger = require('./../logger')
|
|
|
|
const db = require('knex')(config.database)
|
2017-03-17 04:14:10 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const self = {
|
|
|
|
clamd: {
|
|
|
|
scanner: null,
|
|
|
|
timeout: config.uploads.scan.timeout || 5000,
|
2019-11-05 20:35:04 +00:00
|
|
|
chunkSize: config.uploads.scan.chunkSize || 64 * 1024,
|
2020-04-04 14:20:01 +00:00
|
|
|
groupBypass: config.uploads.scan.groupBypass || null,
|
|
|
|
whitelistExtensions: (Array.isArray(config.uploads.scan.whitelistExtensions) &&
|
|
|
|
config.uploads.scan.whitelistExtensions.length) ? config.uploads.scan.whitelistExtensions : null,
|
|
|
|
maxSize: (parseInt(config.uploads.scan.maxSize) * 1e6) || null
|
2019-09-08 01:56:29 +00:00
|
|
|
},
|
|
|
|
gitHash: null,
|
|
|
|
idSet: null,
|
|
|
|
|
|
|
|
idMaxTries: config.uploads.maxTries || 1,
|
|
|
|
|
2020-05-26 03:47:24 +00:00
|
|
|
imageExts: ['.gif', '.jpeg', '.jpg', '.png', '.svg', '.tif', '.tiff', '.webp'],
|
|
|
|
videoExts: ['.3g2', '.3gp', '.asf', '.avchd', '.avi', '.divx', '.evo', '.flv', '.h264', '.h265', '.hevc', '.m2p', '.m2ts', '.m4v', '.mk3d', '.mkv', '.mov', '.mp4', '.mpeg', '.mpg', '.mxf', '.ogg', '.ogv', '.ps', '.qt', '.rmvb', '.ts', '.vob', '.webm', '.wmv'],
|
2019-09-08 01:56:29 +00:00
|
|
|
|
2020-06-06 12:43:20 +00:00
|
|
|
thumbsSize: config.uploads.generateThumbs.size || 200,
|
2019-09-17 04:13:41 +00:00
|
|
|
ffprobe: promisify(ffmpeg.ffprobe),
|
|
|
|
|
2020-04-18 19:52:11 +00:00
|
|
|
albumsCache: {},
|
|
|
|
timezoneOffset: new Date().getTimezoneOffset()
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const statsCache = {
|
2019-04-12 00:45:33 +00:00
|
|
|
system: {
|
|
|
|
cache: null,
|
2019-11-14 08:08:56 +00:00
|
|
|
generating: false,
|
|
|
|
generatedAt: 0
|
2019-04-12 00:45:33 +00:00
|
|
|
},
|
2020-05-02 12:28:13 +00:00
|
|
|
disk: {
|
|
|
|
cache: null,
|
|
|
|
generating: false,
|
|
|
|
generatedAt: 0
|
|
|
|
},
|
2019-04-12 00:45:33 +00:00
|
|
|
albums: {
|
|
|
|
cache: null,
|
2019-06-03 19:40:24 +00:00
|
|
|
generating: false,
|
Improved albums public page cache and more
Removed its dependency towards albums' editedAt property.
Editing album's metas (name, description, etc) will no longer update
its editedAt property.
Instead it will now ONLY be updated when adding/removing files to/from
it. Just like how it was meant to be, which was to be used to check
whether it's necessary to re-generate their downloadable ZIPs.
Albums public page cache will still be properly invalidated when
adding/removing files to/from it, as well as after editing their metas.
Added views/album-notice.njk to be used to render okay-ish notice when
an album's public page is still being generated.
I was originally thinking of using it for disabled albums as well, but
I refrained from it to reduce the possibility of disabled album IDs from
being easily scanned (as it just returns 404 now).
Removed invalidatedAt property from stats cache. Instead their caches
will immediately be nullified as they should (thus frees up memory
slightly as well).
Stats cache for albums will now only be cleared when truly necessary.
As in, adding/removing files to/from albums will no longer clear them.
Updated Nunjucks files to properly use h1, h2, h3 tags in actual
hierarchical orders.
Elements that don't need to use hX tags will now use P instead.
Nothing changes visually, only structurally.
Fixed some elements in Nunjucks using single quotes instead of
double quotes. They'd have worked the same, but consistency.
Added h1 title in FAQ page.
Make text for no JS warning a bit bigger, and improved the phrasing
a little bit.
2020-06-03 03:44:24 +00:00
|
|
|
generatedAt: 0
|
2019-04-12 00:45:33 +00:00
|
|
|
},
|
|
|
|
users: {
|
|
|
|
cache: null,
|
2019-06-03 19:40:24 +00:00
|
|
|
generating: false,
|
Improved albums public page cache and more
Removed its dependency towards albums' editedAt property.
Editing album's metas (name, description, etc) will no longer update
its editedAt property.
Instead it will now ONLY be updated when adding/removing files to/from
it. Just like how it was meant to be, which was to be used to check
whether it's necessary to re-generate their downloadable ZIPs.
Albums public page cache will still be properly invalidated when
adding/removing files to/from it, as well as after editing their metas.
Added views/album-notice.njk to be used to render okay-ish notice when
an album's public page is still being generated.
I was originally thinking of using it for disabled albums as well, but
I refrained from it to reduce the possibility of disabled album IDs from
being easily scanned (as it just returns 404 now).
Removed invalidatedAt property from stats cache. Instead their caches
will immediately be nullified as they should (thus frees up memory
slightly as well).
Stats cache for albums will now only be cleared when truly necessary.
As in, adding/removing files to/from albums will no longer clear them.
Updated Nunjucks files to properly use h1, h2, h3 tags in actual
hierarchical orders.
Elements that don't need to use hX tags will now use P instead.
Nothing changes visually, only structurally.
Fixed some elements in Nunjucks using single quotes instead of
double quotes. They'd have worked the same, but consistency.
Added h1 title in FAQ page.
Make text for no JS warning a bit bigger, and improved the phrasing
a little bit.
2020-06-03 03:44:24 +00:00
|
|
|
generatedAt: 0
|
2019-04-12 00:45:33 +00:00
|
|
|
},
|
|
|
|
uploads: {
|
|
|
|
cache: null,
|
2019-06-03 19:40:24 +00:00
|
|
|
generating: false,
|
Improved albums public page cache and more
Removed its dependency towards albums' editedAt property.
Editing album's metas (name, description, etc) will no longer update
its editedAt property.
Instead it will now ONLY be updated when adding/removing files to/from
it. Just like how it was meant to be, which was to be used to check
whether it's necessary to re-generate their downloadable ZIPs.
Albums public page cache will still be properly invalidated when
adding/removing files to/from it, as well as after editing their metas.
Added views/album-notice.njk to be used to render okay-ish notice when
an album's public page is still being generated.
I was originally thinking of using it for disabled albums as well, but
I refrained from it to reduce the possibility of disabled album IDs from
being easily scanned (as it just returns 404 now).
Removed invalidatedAt property from stats cache. Instead their caches
will immediately be nullified as they should (thus frees up memory
slightly as well).
Stats cache for albums will now only be cleared when truly necessary.
As in, adding/removing files to/from albums will no longer clear them.
Updated Nunjucks files to properly use h1, h2, h3 tags in actual
hierarchical orders.
Elements that don't need to use hX tags will now use P instead.
Nothing changes visually, only structurally.
Fixed some elements in Nunjucks using single quotes instead of
double quotes. They'd have worked the same, but consistency.
Added h1 title in FAQ page.
Make text for no JS warning a bit bigger, and improved the phrasing
a little bit.
2020-06-03 03:44:24 +00:00
|
|
|
generatedAt: 0
|
2019-04-12 00:45:33 +00:00
|
|
|
}
|
|
|
|
}
|
2019-04-05 17:32:52 +00:00
|
|
|
|
2020-06-26 06:48:15 +00:00
|
|
|
const cloudflareAuth = config.cloudflare && config.cloudflare.zoneId &&
|
|
|
|
(config.cloudflare.apiToken || config.cloudflare.userServiceKey ||
|
|
|
|
(config.cloudflare.apiKey && config.cloudflare.email))
|
2018-04-29 12:47:24 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.mayGenerateThumb = extname => {
|
|
|
|
return (config.uploads.generateThumbs.image && self.imageExts.includes(extname)) ||
|
|
|
|
(config.uploads.generateThumbs.video && self.videoExts.includes(extname))
|
2018-04-29 12:47:24 +00:00
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
// Expand if necessary (must be lower case); for now only preserves some known tarballs
|
|
|
|
const extPreserves = ['.tar.gz', '.tar.z', '.tar.bz2', '.tar.lzma', '.tar.lzo', '.tar.xz']
|
2018-09-17 19:32:27 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.extname = filename => {
|
2018-11-28 17:52:12 +00:00
|
|
|
// Always return blank string if the filename does not seem to have a valid extension
|
|
|
|
// Files such as .DS_Store (anything that starts with a dot, without any extension after) will still be accepted
|
2018-12-18 17:01:28 +00:00
|
|
|
if (!/\../.test(filename)) return ''
|
2018-11-28 17:52:12 +00:00
|
|
|
|
2018-09-17 19:32:27 +00:00
|
|
|
let lower = filename.toLowerCase() // due to this, the returned extname will always be lower case
|
|
|
|
let multi = ''
|
|
|
|
let extname = ''
|
|
|
|
|
|
|
|
// check for multi-archive extensions (.001, .002, and so on)
|
|
|
|
if (/\.\d{3}$/.test(lower)) {
|
|
|
|
multi = lower.slice(lower.lastIndexOf('.') - lower.length)
|
|
|
|
lower = lower.slice(0, lower.lastIndexOf('.'))
|
|
|
|
}
|
|
|
|
|
|
|
|
// check against extensions that must be preserved
|
2019-09-08 01:56:29 +00:00
|
|
|
for (const extPreserve of extPreserves)
|
|
|
|
if (lower.endsWith(extPreserve)) {
|
|
|
|
extname = extPreserve
|
2018-09-17 19:32:27 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2018-12-18 17:01:28 +00:00
|
|
|
if (!extname)
|
2018-09-17 19:32:27 +00:00
|
|
|
extname = lower.slice(lower.lastIndexOf('.') - lower.length) // path.extname(lower)
|
|
|
|
|
|
|
|
return extname + multi
|
|
|
|
}
|
|
|
|
|
2020-04-18 19:52:11 +00:00
|
|
|
self.escape = string => {
|
2018-12-13 09:09:46 +00:00
|
|
|
// MIT License
|
|
|
|
// Copyright(c) 2012-2013 TJ Holowaychuk
|
|
|
|
// Copyright(c) 2015 Andreas Lubbe
|
|
|
|
// Copyright(c) 2015 Tiancheng "Timothy" Gu
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
if (!string)
|
|
|
|
return string
|
2018-12-13 09:09:46 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const str = String(string)
|
2018-12-13 09:09:46 +00:00
|
|
|
const match = /["'&<>]/.exec(str)
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
if (!match)
|
|
|
|
return str
|
2018-12-13 09:09:46 +00:00
|
|
|
|
|
|
|
let escape
|
|
|
|
let html = ''
|
|
|
|
let index = 0
|
|
|
|
let lastIndex = 0
|
|
|
|
|
|
|
|
for (index = match.index; index < str.length; index++) {
|
|
|
|
switch (str.charCodeAt(index)) {
|
|
|
|
case 34: // "
|
|
|
|
escape = '"'
|
|
|
|
break
|
|
|
|
case 38: // &
|
|
|
|
escape = '&'
|
|
|
|
break
|
|
|
|
case 39: // '
|
|
|
|
escape = '''
|
|
|
|
break
|
|
|
|
case 60: // <
|
|
|
|
escape = '<'
|
|
|
|
break
|
|
|
|
case 62: // >
|
|
|
|
escape = '>'
|
|
|
|
break
|
|
|
|
default:
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2018-12-18 17:01:28 +00:00
|
|
|
if (lastIndex !== index)
|
2018-12-13 09:09:46 +00:00
|
|
|
html += str.substring(lastIndex, index)
|
|
|
|
|
|
|
|
lastIndex = index + 1
|
|
|
|
html += escape
|
|
|
|
}
|
|
|
|
|
|
|
|
return lastIndex !== index
|
|
|
|
? html + str.substring(lastIndex, index)
|
|
|
|
: html
|
|
|
|
}
|
|
|
|
|
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
|
|
|
self.stripIndents = string => {
|
|
|
|
if (!string) return
|
|
|
|
const result = string.replace(/^[^\S\n]+/gm, '')
|
|
|
|
const match = result.match(/^[^\S\n]*(?=\S)/gm)
|
|
|
|
const indent = match && Math.min(...match.map(el => el.length))
|
|
|
|
if (indent) {
|
|
|
|
const regexp = new RegExp(`^.{${indent}}`, 'gm')
|
|
|
|
return result.replace(regexp, '')
|
|
|
|
}
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.authorize = async (req, res) => {
|
|
|
|
// TODO: Improve usage of this function by the other APIs
|
2018-01-23 20:06:30 +00:00
|
|
|
const token = req.headers.token
|
2018-03-24 19:47:41 +00:00
|
|
|
if (token === undefined) {
|
|
|
|
res.status(401).json({ success: false, description: 'No token provided.' })
|
|
|
|
return
|
|
|
|
}
|
2017-10-04 00:13:38 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
try {
|
|
|
|
const user = await db.table('users')
|
|
|
|
.where('token', token)
|
|
|
|
.first()
|
|
|
|
if (user) {
|
|
|
|
if (user.enabled === false || user.enabled === 0) {
|
|
|
|
res.json({ success: false, description: 'This account has been disabled.' })
|
|
|
|
return
|
|
|
|
}
|
|
|
|
return user
|
2018-10-09 19:52:41 +00:00
|
|
|
}
|
2018-04-29 12:47:24 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
res.status(401).json({ success: false, description: 'Invalid token.' })
|
|
|
|
} catch (error) {
|
|
|
|
logger.error(error)
|
|
|
|
res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
|
|
|
|
}
|
2018-01-23 20:06:30 +00:00
|
|
|
}
|
2017-10-04 00:13:38 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.generateThumbs = async (name, extname, force) => {
|
|
|
|
const thumbname = path.join(paths.thumbs, name.slice(0, -extname.length) + '.png')
|
|
|
|
|
|
|
|
try {
|
|
|
|
// Check if thumbnail already exists
|
|
|
|
try {
|
|
|
|
const lstat = await paths.lstat(thumbname)
|
|
|
|
if (lstat.isSymbolicLink())
|
|
|
|
// Unlink if symlink (should be symlink to the placeholder)
|
|
|
|
await paths.unlink(thumbname)
|
|
|
|
else if (!force)
|
|
|
|
// Continue only if it does not exist, unless forced to
|
|
|
|
return true
|
|
|
|
} catch (error) {
|
|
|
|
// Re-throw error
|
|
|
|
if (error.code !== 'ENOENT')
|
|
|
|
throw error
|
|
|
|
}
|
2018-05-12 14:01:14 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
// Full path to input file
|
|
|
|
const input = path.join(paths.uploads, name)
|
|
|
|
|
|
|
|
// If image extension
|
|
|
|
if (self.imageExts.includes(extname)) {
|
|
|
|
const resizeOptions = {
|
2020-06-06 12:43:20 +00:00
|
|
|
width: self.thumbsSize,
|
|
|
|
height: self.thumbsSize,
|
2019-09-08 01:56:29 +00:00
|
|
|
fit: 'contain',
|
|
|
|
background: {
|
|
|
|
r: 0,
|
|
|
|
g: 0,
|
|
|
|
b: 0,
|
|
|
|
alpha: 0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
const image = sharp(input)
|
|
|
|
const metadata = await image.metadata()
|
|
|
|
if (metadata.width > resizeOptions.width || metadata.height > resizeOptions.height) {
|
|
|
|
await image
|
|
|
|
.resize(resizeOptions)
|
|
|
|
.toFile(thumbname)
|
|
|
|
} else if (metadata.width === resizeOptions.width && metadata.height === resizeOptions.height) {
|
|
|
|
await image
|
|
|
|
.toFile(thumbname)
|
|
|
|
} else {
|
|
|
|
const x = resizeOptions.width - metadata.width
|
|
|
|
const y = resizeOptions.height - metadata.height
|
|
|
|
await image
|
|
|
|
.extend({
|
|
|
|
top: Math.floor(y / 2),
|
|
|
|
bottom: Math.ceil(y / 2),
|
|
|
|
left: Math.floor(x / 2),
|
|
|
|
right: Math.ceil(x / 2),
|
|
|
|
background: resizeOptions.background
|
2018-09-04 17:29:53 +00:00
|
|
|
})
|
2019-09-08 01:56:29 +00:00
|
|
|
.toFile(thumbname)
|
2018-09-04 17:29:53 +00:00
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
} else if (self.videoExts.includes(extname)) {
|
|
|
|
const metadata = await self.ffprobe(input)
|
2020-06-15 14:04:30 +00:00
|
|
|
|
2019-10-15 10:53:23 +00:00
|
|
|
const duration = parseInt(metadata.format.duration)
|
2020-06-15 14:04:30 +00:00
|
|
|
if (isNaN(duration))
|
|
|
|
throw 'Warning: File does not have valid duration metadata'
|
2018-09-04 17:29:53 +00:00
|
|
|
|
2020-06-15 14:04:30 +00:00
|
|
|
const videoStream = metadata.streams && metadata.streams.find(s => s.codec_type === 'video')
|
|
|
|
if (!videoStream || !videoStream.width || !videoStream.height)
|
|
|
|
throw 'Warning: File does not have valid video stream metadata'
|
2019-08-23 13:31:44 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
await new Promise((resolve, reject) => {
|
|
|
|
ffmpeg(input)
|
2020-06-15 14:04:30 +00:00
|
|
|
.on('error', error => reject(error))
|
|
|
|
.on('end', () => resolve())
|
|
|
|
.screenshots({
|
|
|
|
folder: paths.thumbs,
|
|
|
|
filename: name.slice(0, -extname.length) + '.png',
|
|
|
|
timestamps: ['20%'],
|
|
|
|
size: videoStream.width >= videoStream.height
|
|
|
|
? `${self.thumbsSize}x?`
|
|
|
|
: `?x${self.thumbsSize}`
|
2018-05-12 14:01:14 +00:00
|
|
|
})
|
2019-09-08 01:56:29 +00:00
|
|
|
})
|
2020-06-15 14:04:30 +00:00
|
|
|
.catch(error => error) // Error passthrough
|
|
|
|
.then(async error => {
|
|
|
|
// FFMPEG would just warn instead of exiting with errors when dealing with incomplete files
|
|
|
|
// Sometimes FFMPEG would throw errors but actually somehow succeeded in making the thumbnails
|
|
|
|
// (this could be a fallback mechanism of fluent-ffmpeg library instead)
|
|
|
|
// So instead we check if the thumbnail exists to really make sure
|
|
|
|
try {
|
|
|
|
await paths.lstat(thumbname)
|
|
|
|
return true
|
|
|
|
} catch (err) {
|
|
|
|
if (err.code === 'ENOENT')
|
|
|
|
throw error || 'Warning: FFMPEG exited with empty output file'
|
|
|
|
else
|
|
|
|
throw error || err
|
|
|
|
}
|
|
|
|
})
|
2019-09-08 01:56:29 +00:00
|
|
|
} else {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
} catch (error) {
|
2020-06-15 14:04:30 +00:00
|
|
|
logger.error(`[${name}]: ${error.toString().trim()}`)
|
2019-09-08 01:56:29 +00:00
|
|
|
try {
|
|
|
|
await paths.symlink(paths.thumbPlaceholder, thumbname)
|
|
|
|
return true
|
|
|
|
} catch (err) {
|
|
|
|
logger.error(err)
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return true
|
2018-01-23 20:06:30 +00:00
|
|
|
}
|
2017-03-17 04:14:10 +00:00
|
|
|
|
2019-11-29 13:42:53 +00:00
|
|
|
self.stripTags = async (name, extname) => {
|
|
|
|
const fullpath = path.join(paths.uploads, name)
|
|
|
|
|
|
|
|
if (self.imageExts.includes(extname)) {
|
|
|
|
const tmpfile = path.join(paths.uploads, `tmp-${name}`)
|
|
|
|
await paths.rename(fullpath, tmpfile)
|
|
|
|
|
|
|
|
try {
|
|
|
|
await sharp(tmpfile)
|
|
|
|
.toFile(fullpath)
|
|
|
|
await paths.unlink(tmpfile)
|
|
|
|
} catch (error) {
|
|
|
|
await paths.unlink(tmpfile)
|
|
|
|
// Re-throw error
|
|
|
|
throw error
|
|
|
|
}
|
|
|
|
} else if (config.uploads.stripTags.video && self.videoExts.includes(extname)) {
|
|
|
|
const tmpfile = path.join(paths.uploads, `tmp-${name}`)
|
|
|
|
await paths.rename(fullpath, tmpfile)
|
|
|
|
|
|
|
|
try {
|
|
|
|
await new Promise((resolve, reject) => {
|
|
|
|
ffmpeg(tmpfile)
|
|
|
|
.output(fullpath)
|
|
|
|
.outputOptions([
|
|
|
|
// Experimental.
|
|
|
|
'-c copy',
|
|
|
|
'-map_metadata:g -1:g',
|
|
|
|
'-map_metadata:s:v -1:g',
|
|
|
|
'-map_metadata:s:a -1:g'
|
|
|
|
])
|
|
|
|
.on('error', error => reject(error))
|
|
|
|
.on('end', () => resolve(true))
|
|
|
|
.run()
|
|
|
|
})
|
|
|
|
await paths.unlink(tmpfile)
|
|
|
|
} catch (error) {
|
|
|
|
await paths.unlink(tmpfile)
|
|
|
|
// Re-throw error
|
|
|
|
throw error
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.unlinkFile = async (filename, predb) => {
|
|
|
|
try {
|
|
|
|
await paths.unlink(path.join(paths.uploads, filename))
|
|
|
|
} catch (error) {
|
|
|
|
// Return true if file does not exist
|
|
|
|
if (error.code !== 'ENOENT')
|
|
|
|
throw error
|
|
|
|
}
|
|
|
|
|
|
|
|
const identifier = filename.split('.')[0]
|
|
|
|
|
|
|
|
// Do not remove from identifiers cache on pre-db-deletion
|
|
|
|
// eslint-disable-next-line curly
|
|
|
|
if (!predb && self.idSet) {
|
|
|
|
self.idSet.delete(identifier)
|
|
|
|
// logger.log(`Removed ${identifier} from identifiers cache (deleteFile)`)
|
|
|
|
}
|
|
|
|
|
|
|
|
const extname = self.extname(filename)
|
|
|
|
if (self.imageExts.includes(extname) || self.videoExts.includes(extname))
|
|
|
|
try {
|
|
|
|
await paths.unlink(path.join(paths.thumbs, `${identifier}.png`))
|
|
|
|
} catch (error) {
|
|
|
|
if (error.code !== 'ENOENT')
|
|
|
|
throw error
|
|
|
|
}
|
2018-03-30 02:39:53 +00:00
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.bulkDeleteFromDb = async (field, values, user) => {
|
2019-09-23 08:09:15 +00:00
|
|
|
// Always return an empty array on failure
|
|
|
|
if (!user || !['id', 'name'].includes(field) || !values.length)
|
|
|
|
return []
|
2018-05-10 17:25:52 +00:00
|
|
|
|
2019-02-05 03:36:14 +00:00
|
|
|
// SQLITE_LIMIT_VARIABLE_NUMBER, which defaults to 999
|
|
|
|
// Read more: https://www.sqlite.org/limits.html
|
|
|
|
const MAX_VARIABLES_CHUNK_SIZE = 999
|
|
|
|
const chunks = []
|
2019-09-08 01:56:29 +00:00
|
|
|
while (values.length)
|
|
|
|
chunks.push(values.splice(0, MAX_VARIABLES_CHUNK_SIZE))
|
2019-02-05 03:36:14 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
let failed = []
|
2018-10-13 11:06:58 +00:00
|
|
|
const ismoderator = perms.is(user, 'moderator')
|
2018-05-10 17:25:52 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
try {
|
|
|
|
let unlinkeds = []
|
|
|
|
const albumids = []
|
2018-05-09 09:53:27 +00:00
|
|
|
|
2019-09-23 08:09:15 +00:00
|
|
|
await Promise.all(chunks.map(async chunk => {
|
2019-09-08 01:56:29 +00:00
|
|
|
const files = await db.table('files')
|
2019-09-23 08:09:15 +00:00
|
|
|
.whereIn(field, chunk)
|
2019-09-08 01:56:29 +00:00
|
|
|
.where(function () {
|
|
|
|
if (!ismoderator)
|
2019-09-08 18:33:07 +00:00
|
|
|
this.where('userid', user.id)
|
2019-09-08 01:56:29 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
// Push files that could not be found in db
|
2019-09-23 08:09:15 +00:00
|
|
|
failed = failed.concat(chunk.filter(value => !files.find(file => file[field] === value)))
|
2019-09-08 01:56:29 +00:00
|
|
|
|
|
|
|
// Unlink all found files
|
|
|
|
const unlinked = []
|
2019-09-23 08:09:15 +00:00
|
|
|
|
|
|
|
await Promise.all(files.map(async file => {
|
2019-09-08 01:56:29 +00:00
|
|
|
try {
|
|
|
|
await self.unlinkFile(file.name, true)
|
|
|
|
unlinked.push(file)
|
|
|
|
} catch (error) {
|
|
|
|
logger.error(error)
|
|
|
|
failed.push(file[field])
|
2019-08-20 02:16:34 +00:00
|
|
|
}
|
2019-09-23 08:09:15 +00:00
|
|
|
}))
|
2019-08-20 02:16:34 +00:00
|
|
|
|
2019-09-23 08:09:15 +00:00
|
|
|
if (!unlinked.length) return
|
2019-09-08 01:56:29 +00:00
|
|
|
|
|
|
|
// Delete all unlinked files from db
|
|
|
|
await db.table('files')
|
|
|
|
.whereIn('id', unlinked.map(file => file.id))
|
|
|
|
.del()
|
|
|
|
self.invalidateStatsCache('uploads')
|
|
|
|
|
|
|
|
if (self.idSet)
|
|
|
|
unlinked.forEach(file => {
|
|
|
|
const identifier = file.name.split('.')[0]
|
|
|
|
self.idSet.delete(identifier)
|
|
|
|
// logger.log(`Removed ${identifier} from identifiers cache (bulkDeleteFromDb)`)
|
|
|
|
})
|
|
|
|
|
|
|
|
// Push album ids
|
|
|
|
unlinked.forEach(file => {
|
|
|
|
if (file.albumid && !albumids.includes(file.albumid))
|
|
|
|
albumids.push(file.albumid)
|
|
|
|
})
|
|
|
|
|
|
|
|
// Push unlinked files
|
|
|
|
unlinkeds = unlinkeds.concat(unlinked)
|
2019-09-23 08:09:15 +00:00
|
|
|
}))
|
2019-09-08 01:56:29 +00:00
|
|
|
|
|
|
|
if (unlinkeds.length) {
|
|
|
|
// Update albums if necessary, but do not wait
|
Improved albums public page cache and more
Removed its dependency towards albums' editedAt property.
Editing album's metas (name, description, etc) will no longer update
its editedAt property.
Instead it will now ONLY be updated when adding/removing files to/from
it. Just like how it was meant to be, which was to be used to check
whether it's necessary to re-generate their downloadable ZIPs.
Albums public page cache will still be properly invalidated when
adding/removing files to/from it, as well as after editing their metas.
Added views/album-notice.njk to be used to render okay-ish notice when
an album's public page is still being generated.
I was originally thinking of using it for disabled albums as well, but
I refrained from it to reduce the possibility of disabled album IDs from
being easily scanned (as it just returns 404 now).
Removed invalidatedAt property from stats cache. Instead their caches
will immediately be nullified as they should (thus frees up memory
slightly as well).
Stats cache for albums will now only be cleared when truly necessary.
As in, adding/removing files to/from albums will no longer clear them.
Updated Nunjucks files to properly use h1, h2, h3 tags in actual
hierarchical orders.
Elements that don't need to use hX tags will now use P instead.
Nothing changes visually, only structurally.
Fixed some elements in Nunjucks using single quotes instead of
double quotes. They'd have worked the same, but consistency.
Added h1 title in FAQ page.
Make text for no JS warning a bit bigger, and improved the phrasing
a little bit.
2020-06-03 03:44:24 +00:00
|
|
|
if (albumids.length) {
|
2019-09-08 01:56:29 +00:00
|
|
|
db.table('albums')
|
|
|
|
.whereIn('id', albumids)
|
|
|
|
.update('editedAt', Math.floor(Date.now() / 1000))
|
|
|
|
.catch(logger.error)
|
Improved albums public page cache and more
Removed its dependency towards albums' editedAt property.
Editing album's metas (name, description, etc) will no longer update
its editedAt property.
Instead it will now ONLY be updated when adding/removing files to/from
it. Just like how it was meant to be, which was to be used to check
whether it's necessary to re-generate their downloadable ZIPs.
Albums public page cache will still be properly invalidated when
adding/removing files to/from it, as well as after editing their metas.
Added views/album-notice.njk to be used to render okay-ish notice when
an album's public page is still being generated.
I was originally thinking of using it for disabled albums as well, but
I refrained from it to reduce the possibility of disabled album IDs from
being easily scanned (as it just returns 404 now).
Removed invalidatedAt property from stats cache. Instead their caches
will immediately be nullified as they should (thus frees up memory
slightly as well).
Stats cache for albums will now only be cleared when truly necessary.
As in, adding/removing files to/from albums will no longer clear them.
Updated Nunjucks files to properly use h1, h2, h3 tags in actual
hierarchical orders.
Elements that don't need to use hX tags will now use P instead.
Nothing changes visually, only structurally.
Fixed some elements in Nunjucks using single quotes instead of
double quotes. They'd have worked the same, but consistency.
Added h1 title in FAQ page.
Make text for no JS warning a bit bigger, and improved the phrasing
a little bit.
2020-06-03 03:44:24 +00:00
|
|
|
self.invalidateAlbumsCache(albumids)
|
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
|
|
|
|
// Purge Cloudflare's cache if necessary, but do not wait
|
|
|
|
if (config.cloudflare.purgeCache)
|
|
|
|
self.purgeCloudflareCache(unlinkeds.map(file => file.name), true, true)
|
|
|
|
.then(results => {
|
|
|
|
for (const result of results)
|
|
|
|
if (result.errors.length)
|
|
|
|
result.errors.forEach(error => logger.error(`[CF]: ${error}`))
|
|
|
|
})
|
|
|
|
}
|
|
|
|
} catch (error) {
|
|
|
|
logger.error(error)
|
|
|
|
}
|
|
|
|
|
2018-05-05 19:44:58 +00:00
|
|
|
return failed
|
2018-03-30 02:39:53 +00:00
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.purgeCloudflareCache = async (names, uploads, thumbs) => {
|
2020-06-26 06:48:15 +00:00
|
|
|
const errors = []
|
|
|
|
if (!cloudflareAuth)
|
|
|
|
errors.push('Cloudflare auth is incomplete or missing')
|
|
|
|
if (!Array.isArray(names) || !names.length)
|
|
|
|
errors.push('Names array is invalid or empty')
|
|
|
|
if (errors.length)
|
|
|
|
return [{ success: false, files: [], errors }]
|
2019-01-06 06:27:17 +00:00
|
|
|
|
|
|
|
let domain = config.domain
|
|
|
|
if (!uploads) domain = config.homeDomain
|
2018-05-09 09:53:27 +00:00
|
|
|
|
2019-01-18 03:40:15 +00:00
|
|
|
const thumbNames = []
|
2018-05-09 09:53:27 +00:00
|
|
|
names = names.map(name => {
|
2019-01-06 08:26:43 +00:00
|
|
|
if (uploads) {
|
|
|
|
const url = `${domain}/${name}`
|
2019-09-08 01:56:29 +00:00
|
|
|
const extname = self.extname(name)
|
|
|
|
if (thumbs && self.mayGenerateThumb(extname))
|
2019-01-18 03:40:15 +00:00
|
|
|
thumbNames.push(`${domain}/thumbs/${name.slice(0, -extname.length)}.png`)
|
2019-01-06 08:26:43 +00:00
|
|
|
return url
|
|
|
|
} else {
|
|
|
|
return name === 'home' ? domain : `${domain}/${name}`
|
|
|
|
}
|
2018-05-09 09:53:27 +00:00
|
|
|
})
|
2019-01-31 09:29:34 +00:00
|
|
|
names = names.concat(thumbNames)
|
|
|
|
|
|
|
|
// Split array into multiple arrays with max length of 30 URLs
|
|
|
|
// https://api.cloudflare.com/#zone-purge-files-by-url
|
2019-09-23 08:09:15 +00:00
|
|
|
// TODO: Handle API rate limits
|
2019-01-31 09:29:34 +00:00
|
|
|
const MAX_LENGTH = 30
|
2019-09-08 01:56:29 +00:00
|
|
|
const chunks = []
|
2019-01-31 09:29:34 +00:00
|
|
|
while (names.length)
|
2019-09-08 01:56:29 +00:00
|
|
|
chunks.push(names.splice(0, MAX_LENGTH))
|
2019-01-31 09:29:34 +00:00
|
|
|
|
|
|
|
const url = `https://api.cloudflare.com/client/v4/zones/${config.cloudflare.zoneId}/purge_cache`
|
|
|
|
const results = []
|
2018-05-09 09:53:27 +00:00
|
|
|
|
2019-09-23 08:09:15 +00:00
|
|
|
await Promise.all(chunks.map(async chunk => {
|
2019-09-08 01:56:29 +00:00
|
|
|
const result = {
|
|
|
|
success: false,
|
|
|
|
files: chunk,
|
|
|
|
errors: []
|
|
|
|
}
|
2019-01-31 09:29:34 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
try {
|
2020-06-26 06:48:15 +00:00
|
|
|
const headers = {
|
|
|
|
'Content-Type': 'application/json'
|
|
|
|
}
|
|
|
|
if (config.cloudflare.apiToken) {
|
|
|
|
headers.Authorization = `Bearer ${config.cloudflare.apiToken}`
|
|
|
|
} else if (config.cloudflare.userServiceKey) {
|
|
|
|
headers['X-Auth-User-Service-Key'] = config.cloudflare.userServiceKey
|
|
|
|
} else if (config.cloudflare.apiKey && config.cloudflare.email) {
|
|
|
|
headers['X-Auth-Key'] = config.cloudflare.apiKey
|
|
|
|
headers['X-Auth-Email'] = config.cloudflare.email
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const purge = await fetch(url, {
|
|
|
|
method: 'POST',
|
|
|
|
body: JSON.stringify({ files: chunk }),
|
2020-06-26 06:48:15 +00:00
|
|
|
headers
|
2019-09-08 01:56:29 +00:00
|
|
|
})
|
2020-06-26 06:48:15 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const response = await purge.json()
|
|
|
|
result.success = response.success
|
|
|
|
if (Array.isArray(response.errors) && response.errors.length)
|
|
|
|
result.errors = response.errors.map(error => `${error.code}: ${error.message}`)
|
|
|
|
} catch (error) {
|
|
|
|
result.errors = [error.toString()]
|
2019-01-31 09:29:34 +00:00
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
|
|
|
|
results.push(result)
|
2019-09-23 08:09:15 +00:00
|
|
|
}))
|
2019-01-31 09:29:34 +00:00
|
|
|
|
|
|
|
return results
|
2018-05-09 09:53:27 +00:00
|
|
|
}
|
|
|
|
|
2020-05-16 14:45:14 +00:00
|
|
|
self.bulkDeleteExpired = async (dryrun, verbose) => {
|
2019-09-08 01:56:29 +00:00
|
|
|
const timestamp = Date.now() / 1000
|
2020-05-16 14:45:14 +00:00
|
|
|
const fields = ['id']
|
|
|
|
if (verbose) fields.push('name')
|
2019-09-08 01:56:29 +00:00
|
|
|
const sudo = { username: 'root' }
|
|
|
|
|
|
|
|
const result = {}
|
|
|
|
result.expired = await db.table('files')
|
|
|
|
.where('expirydate', '<=', timestamp)
|
2020-05-16 14:45:14 +00:00
|
|
|
.select(fields)
|
2019-09-08 01:56:29 +00:00
|
|
|
|
|
|
|
if (!dryrun) {
|
2020-05-16 14:45:14 +00:00
|
|
|
// Make a shallow copy
|
|
|
|
const field = fields[0]
|
|
|
|
const values = result.expired.slice().map(row => row[field])
|
2019-09-08 01:56:29 +00:00
|
|
|
result.failed = await self.bulkDeleteFromDb(field, values, sudo)
|
|
|
|
}
|
|
|
|
|
|
|
|
return result
|
2019-04-05 17:32:52 +00:00
|
|
|
}
|
|
|
|
|
2019-09-17 04:13:41 +00:00
|
|
|
self.invalidateAlbumsCache = albumids => {
|
|
|
|
for (const albumid of albumids) {
|
|
|
|
delete self.albumsCache[albumid]
|
|
|
|
delete self.albumsCache[`${albumid}-nojs`]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.invalidateStatsCache = type => {
|
2019-04-12 00:45:33 +00:00
|
|
|
if (!['albums', 'users', 'uploads'].includes(type)) return
|
Improved albums public page cache and more
Removed its dependency towards albums' editedAt property.
Editing album's metas (name, description, etc) will no longer update
its editedAt property.
Instead it will now ONLY be updated when adding/removing files to/from
it. Just like how it was meant to be, which was to be used to check
whether it's necessary to re-generate their downloadable ZIPs.
Albums public page cache will still be properly invalidated when
adding/removing files to/from it, as well as after editing their metas.
Added views/album-notice.njk to be used to render okay-ish notice when
an album's public page is still being generated.
I was originally thinking of using it for disabled albums as well, but
I refrained from it to reduce the possibility of disabled album IDs from
being easily scanned (as it just returns 404 now).
Removed invalidatedAt property from stats cache. Instead their caches
will immediately be nullified as they should (thus frees up memory
slightly as well).
Stats cache for albums will now only be cleared when truly necessary.
As in, adding/removing files to/from albums will no longer clear them.
Updated Nunjucks files to properly use h1, h2, h3 tags in actual
hierarchical orders.
Elements that don't need to use hX tags will now use P instead.
Nothing changes visually, only structurally.
Fixed some elements in Nunjucks using single quotes instead of
double quotes. They'd have worked the same, but consistency.
Added h1 title in FAQ page.
Make text for no JS warning a bit bigger, and improved the phrasing
a little bit.
2020-06-03 03:44:24 +00:00
|
|
|
statsCache[type].cache = null
|
2019-04-12 00:45:33 +00:00
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.stats = async (req, res, next) => {
|
|
|
|
const user = await self.authorize(req, res)
|
2019-04-05 17:32:52 +00:00
|
|
|
if (!user) return
|
|
|
|
|
|
|
|
const isadmin = perms.is(user, 'admin')
|
|
|
|
if (!isadmin) return res.status(403).end()
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
try {
|
|
|
|
const stats = {}
|
|
|
|
const os = await si.osInfo()
|
2019-09-08 01:56:29 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// System info
|
|
|
|
if (!statsCache.system.cache && statsCache.system.generating) {
|
|
|
|
stats.system = false
|
2019-11-14 08:08:56 +00:00
|
|
|
} else if (((Date.now() - statsCache.system.generatedAt) <= 1000) || statsCache.system.generating) {
|
|
|
|
// Use cache for 1000 ms (1 second)
|
2019-09-10 16:31:27 +00:00
|
|
|
stats.system = statsCache.system.cache
|
|
|
|
} else {
|
|
|
|
statsCache.system.generating = true
|
2019-11-14 08:08:56 +00:00
|
|
|
statsCache.system.generatedAt = Date.now()
|
2019-09-10 16:31:27 +00:00
|
|
|
|
|
|
|
const currentLoad = await si.currentLoad()
|
|
|
|
const mem = await si.mem()
|
|
|
|
|
|
|
|
stats.system = {
|
|
|
|
_types: {
|
|
|
|
byte: ['memoryUsage'],
|
|
|
|
byteUsage: ['systemMemory']
|
|
|
|
},
|
|
|
|
platform: `${os.platform} ${os.arch}`,
|
|
|
|
distro: `${os.distro} ${os.release}`,
|
|
|
|
kernel: os.kernel,
|
|
|
|
cpuLoad: `${currentLoad.currentload.toFixed(1)}%`,
|
|
|
|
cpusLoad: currentLoad.cpus.map(cpu => `${cpu.load.toFixed(1)}%`).join(', '),
|
|
|
|
systemMemory: {
|
|
|
|
used: mem.active,
|
|
|
|
total: mem.total
|
|
|
|
},
|
|
|
|
memoryUsage: process.memoryUsage().rss,
|
|
|
|
nodeVersion: `${process.versions.node}`
|
|
|
|
}
|
2019-04-05 17:32:52 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// Update cache
|
|
|
|
statsCache.system.cache = stats.system
|
|
|
|
statsCache.system.generating = false
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
2019-04-05 17:32:52 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// Disk usage, only for Linux platform
|
2020-05-02 12:28:13 +00:00
|
|
|
if (os.platform === 'linux')
|
2019-09-10 16:31:27 +00:00
|
|
|
if (!statsCache.disk.cache && statsCache.disk.generating) {
|
|
|
|
stats.disk = false
|
2019-11-14 08:08:56 +00:00
|
|
|
} else if (((Date.now() - statsCache.disk.generatedAt) <= 60000) || statsCache.disk.generating) {
|
|
|
|
// Use cache for 60000 ms (60 seconds)
|
2019-09-10 16:31:27 +00:00
|
|
|
stats.disk = statsCache.disk.cache
|
|
|
|
} else {
|
|
|
|
statsCache.disk.generating = true
|
2019-11-14 08:08:56 +00:00
|
|
|
statsCache.disk.generatedAt = Date.now()
|
2019-09-10 16:31:27 +00:00
|
|
|
|
|
|
|
stats.disk = {
|
|
|
|
_types: {
|
|
|
|
byteUsage: ['drive']
|
|
|
|
},
|
2020-05-02 12:28:13 +00:00
|
|
|
drive: null
|
2019-09-10 16:31:27 +00:00
|
|
|
}
|
|
|
|
|
2020-05-02 12:28:13 +00:00
|
|
|
// Linux-only extended disk stats
|
|
|
|
if (config.linuxDiskStats) {
|
|
|
|
// We pre-assign the keys below to fix their order
|
|
|
|
stats.disk._types.byte = ['uploads', 'thumbs', 'zips', 'chunks']
|
|
|
|
stats.disk.uploads = 0
|
|
|
|
stats.disk.thumbs = 0
|
|
|
|
stats.disk.zips = 0
|
|
|
|
stats.disk.chunks = 0
|
2019-09-10 16:31:27 +00:00
|
|
|
|
2020-05-02 12:28:13 +00:00
|
|
|
const subdirs = []
|
2019-09-10 16:31:27 +00:00
|
|
|
|
2020-05-02 12:28:13 +00:00
|
|
|
// Get size of uploads path (excluding sub-directories)
|
|
|
|
await new Promise((resolve, reject) => {
|
2019-11-13 22:06:59 +00:00
|
|
|
const proc = spawn('du', [
|
|
|
|
'--apparent-size',
|
|
|
|
'--block-size=1',
|
|
|
|
'--dereference',
|
2020-05-02 12:28:13 +00:00
|
|
|
'--max-depth=1',
|
|
|
|
'--separate-dirs',
|
|
|
|
paths.uploads
|
2019-11-13 22:06:59 +00:00
|
|
|
])
|
|
|
|
|
|
|
|
proc.stdout.on('data', data => {
|
|
|
|
const formatted = String(data)
|
|
|
|
.trim()
|
|
|
|
.split(/\s+/)
|
2020-05-02 12:28:13 +00:00
|
|
|
for (let i = 0; i < formatted.length; i += 2) {
|
|
|
|
const path = formatted[i + 1]
|
|
|
|
if (!path) return
|
2019-11-13 22:06:59 +00:00
|
|
|
|
2020-05-02 12:28:13 +00:00
|
|
|
if (path !== paths.uploads) {
|
|
|
|
subdirs.push(path)
|
|
|
|
continue
|
|
|
|
}
|
2019-11-13 22:06:59 +00:00
|
|
|
|
2020-05-02 12:28:13 +00:00
|
|
|
stats.disk.uploads = parseInt(formatted[i])
|
|
|
|
}
|
2019-11-13 22:06:59 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
const stderr = []
|
2019-11-14 07:46:49 +00:00
|
|
|
proc.stderr.on('data', data => stderr.push(String(data)))
|
2019-11-13 22:06:59 +00:00
|
|
|
|
|
|
|
proc.on('exit', code => {
|
|
|
|
if (code !== 0) return reject(stderr)
|
|
|
|
resolve()
|
|
|
|
})
|
|
|
|
})
|
2020-05-02 12:28:13 +00:00
|
|
|
|
|
|
|
await Promise.all(subdirs.map(subdir => {
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
const proc = spawn('du', [
|
|
|
|
'--apparent-size',
|
|
|
|
'--block-size=1',
|
|
|
|
'--dereference',
|
|
|
|
'--summarize',
|
|
|
|
subdir
|
|
|
|
])
|
|
|
|
|
|
|
|
proc.stdout.on('data', data => {
|
|
|
|
const formatted = String(data)
|
|
|
|
.trim()
|
|
|
|
.split(/\s+/)
|
|
|
|
if (formatted.length !== 2) return
|
|
|
|
|
|
|
|
const basename = path.basename(formatted[1])
|
|
|
|
stats.disk[basename] = parseInt(formatted[0])
|
|
|
|
|
|
|
|
// Add to types if necessary
|
|
|
|
if (!stats.disk._types.byte.includes(basename))
|
|
|
|
stats.disk._types.byte.push(basename)
|
|
|
|
})
|
|
|
|
|
|
|
|
const stderr = []
|
|
|
|
proc.stderr.on('data', data => stderr.push(String(data)))
|
|
|
|
|
|
|
|
proc.on('exit', code => {
|
|
|
|
if (code !== 0) return reject(stderr)
|
|
|
|
resolve()
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}))
|
|
|
|
}
|
2019-11-13 22:06:59 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// Get disk usage of whichever disk uploads path resides on
|
|
|
|
await new Promise((resolve, reject) => {
|
|
|
|
const proc = spawn('df', [
|
|
|
|
'--block-size=1',
|
|
|
|
'--output=used,size',
|
|
|
|
paths.uploads
|
|
|
|
])
|
|
|
|
|
|
|
|
proc.stdout.on('data', data => {
|
|
|
|
// Only use the first valid line
|
|
|
|
if (stats.disk.drive !== null) return
|
|
|
|
|
|
|
|
const lines = String(data)
|
|
|
|
.trim()
|
|
|
|
.split('\n')
|
|
|
|
if (lines.length !== 2) return
|
|
|
|
|
|
|
|
for (const line of lines) {
|
|
|
|
const columns = line.split(/\s+/)
|
|
|
|
// Skip lines that have non-number chars
|
|
|
|
if (columns.some(w => !/^\d+$/.test(w))) continue
|
|
|
|
|
|
|
|
stats.disk.drive = {
|
|
|
|
used: parseInt(columns[0]),
|
|
|
|
total: parseInt(columns[1])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
const stderr = []
|
2019-11-14 07:46:49 +00:00
|
|
|
proc.stderr.on('data', data => stderr.push(String(data)))
|
2019-09-10 16:31:27 +00:00
|
|
|
|
|
|
|
proc.on('exit', code => {
|
|
|
|
if (code !== 0) return reject(stderr)
|
|
|
|
resolve()
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
// Update cache
|
2019-11-14 07:42:09 +00:00
|
|
|
statsCache.disk.cache = stats.disk
|
2019-09-10 16:31:27 +00:00
|
|
|
statsCache.disk.generating = false
|
|
|
|
}
|
|
|
|
|
|
|
|
// Uploads
|
|
|
|
if (!statsCache.uploads.cache && statsCache.uploads.generating) {
|
|
|
|
stats.uploads = false
|
Improved albums public page cache and more
Removed its dependency towards albums' editedAt property.
Editing album's metas (name, description, etc) will no longer update
its editedAt property.
Instead it will now ONLY be updated when adding/removing files to/from
it. Just like how it was meant to be, which was to be used to check
whether it's necessary to re-generate their downloadable ZIPs.
Albums public page cache will still be properly invalidated when
adding/removing files to/from it, as well as after editing their metas.
Added views/album-notice.njk to be used to render okay-ish notice when
an album's public page is still being generated.
I was originally thinking of using it for disabled albums as well, but
I refrained from it to reduce the possibility of disabled album IDs from
being easily scanned (as it just returns 404 now).
Removed invalidatedAt property from stats cache. Instead their caches
will immediately be nullified as they should (thus frees up memory
slightly as well).
Stats cache for albums will now only be cleared when truly necessary.
As in, adding/removing files to/from albums will no longer clear them.
Updated Nunjucks files to properly use h1, h2, h3 tags in actual
hierarchical orders.
Elements that don't need to use hX tags will now use P instead.
Nothing changes visually, only structurally.
Fixed some elements in Nunjucks using single quotes instead of
double quotes. They'd have worked the same, but consistency.
Added h1 title in FAQ page.
Make text for no JS warning a bit bigger, and improved the phrasing
a little bit.
2020-06-03 03:44:24 +00:00
|
|
|
} else if (statsCache.uploads.cache) {
|
2019-09-10 16:31:27 +00:00
|
|
|
stats.uploads = statsCache.uploads.cache
|
|
|
|
} else {
|
|
|
|
statsCache.uploads.generating = true
|
2019-09-17 04:13:41 +00:00
|
|
|
statsCache.uploads.generatedAt = Date.now()
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
stats.uploads = {
|
|
|
|
_types: {
|
|
|
|
number: ['total', 'images', 'videos', 'others']
|
|
|
|
},
|
|
|
|
total: 0,
|
|
|
|
images: 0,
|
|
|
|
videos: 0,
|
|
|
|
others: 0
|
|
|
|
}
|
|
|
|
|
2020-04-08 00:08:18 +00:00
|
|
|
if (!config.linuxDiskStats || os.platform !== 'linux') {
|
2019-09-10 16:31:27 +00:00
|
|
|
const uploads = await db.table('files')
|
|
|
|
.select('size')
|
|
|
|
stats.uploads.total = uploads.length
|
|
|
|
stats.uploads.sizeInDb = uploads.reduce((acc, upload) => acc + parseInt(upload.size), 0)
|
|
|
|
// Add type information for the new column
|
|
|
|
if (!Array.isArray(stats.uploads._types.byte))
|
|
|
|
stats.uploads._types.byte = []
|
|
|
|
stats.uploads._types.byte.push('sizeInDb')
|
|
|
|
} else {
|
|
|
|
stats.uploads.total = await db.table('files')
|
|
|
|
.count('id as count')
|
|
|
|
.then(rows => rows[0].count)
|
|
|
|
}
|
|
|
|
|
|
|
|
stats.uploads.images = await db.table('files')
|
2019-10-22 03:52:52 +00:00
|
|
|
.where(function () {
|
|
|
|
for (const ext of self.imageExts)
|
|
|
|
this.orWhere('name', 'like', `%${ext}`)
|
|
|
|
})
|
2019-09-10 16:31:27 +00:00
|
|
|
.count('id as count')
|
|
|
|
.then(rows => rows[0].count)
|
|
|
|
|
|
|
|
stats.uploads.videos = await db.table('files')
|
2019-10-22 03:52:52 +00:00
|
|
|
.where(function () {
|
|
|
|
for (const ext of self.videoExts)
|
|
|
|
this.orWhere('name', 'like', `%${ext}`)
|
|
|
|
})
|
2019-09-10 16:31:27 +00:00
|
|
|
.count('id as count')
|
|
|
|
.then(rows => rows[0].count)
|
|
|
|
|
|
|
|
stats.uploads.others = stats.uploads.total - stats.uploads.images - stats.uploads.videos
|
|
|
|
|
|
|
|
// Update cache
|
|
|
|
statsCache.uploads.cache = stats.uploads
|
|
|
|
statsCache.uploads.generating = false
|
2019-04-12 00:45:33 +00:00
|
|
|
}
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// Users
|
|
|
|
if (!statsCache.users.cache && statsCache.users.generating) {
|
|
|
|
stats.users = false
|
Improved albums public page cache and more
Removed its dependency towards albums' editedAt property.
Editing album's metas (name, description, etc) will no longer update
its editedAt property.
Instead it will now ONLY be updated when adding/removing files to/from
it. Just like how it was meant to be, which was to be used to check
whether it's necessary to re-generate their downloadable ZIPs.
Albums public page cache will still be properly invalidated when
adding/removing files to/from it, as well as after editing their metas.
Added views/album-notice.njk to be used to render okay-ish notice when
an album's public page is still being generated.
I was originally thinking of using it for disabled albums as well, but
I refrained from it to reduce the possibility of disabled album IDs from
being easily scanned (as it just returns 404 now).
Removed invalidatedAt property from stats cache. Instead their caches
will immediately be nullified as they should (thus frees up memory
slightly as well).
Stats cache for albums will now only be cleared when truly necessary.
As in, adding/removing files to/from albums will no longer clear them.
Updated Nunjucks files to properly use h1, h2, h3 tags in actual
hierarchical orders.
Elements that don't need to use hX tags will now use P instead.
Nothing changes visually, only structurally.
Fixed some elements in Nunjucks using single quotes instead of
double quotes. They'd have worked the same, but consistency.
Added h1 title in FAQ page.
Make text for no JS warning a bit bigger, and improved the phrasing
a little bit.
2020-06-03 03:44:24 +00:00
|
|
|
} else if (statsCache.users.cache) {
|
2019-09-10 16:31:27 +00:00
|
|
|
stats.users = statsCache.users.cache
|
|
|
|
} else {
|
|
|
|
statsCache.users.generating = true
|
2019-09-17 04:13:41 +00:00
|
|
|
statsCache.users.generatedAt = Date.now()
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
stats.users = {
|
|
|
|
_types: {
|
|
|
|
number: ['total', 'disabled']
|
|
|
|
},
|
|
|
|
total: 0,
|
|
|
|
disabled: 0
|
2019-04-05 17:32:52 +00:00
|
|
|
}
|
2019-04-12 00:45:33 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
const permissionKeys = Object.keys(perms.permissions).reverse()
|
|
|
|
permissionKeys.forEach(p => {
|
|
|
|
stats.users[p] = 0
|
|
|
|
stats.users._types.number.push(p)
|
2019-04-12 00:45:33 +00:00
|
|
|
})
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
const users = await db.table('users')
|
|
|
|
stats.users.total = users.length
|
|
|
|
for (const user of users) {
|
|
|
|
if (user.enabled === false || user.enabled === 0)
|
|
|
|
stats.users.disabled++
|
|
|
|
|
|
|
|
// This may be inaccurate on installations with customized permissions
|
|
|
|
user.permission = user.permission || 0
|
|
|
|
for (const p of permissionKeys)
|
|
|
|
if (user.permission === perms.permissions[p]) {
|
|
|
|
stats.users[p]++
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
2019-04-05 17:32:52 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// Update cache
|
|
|
|
statsCache.users.cache = stats.users
|
|
|
|
statsCache.users.generating = false
|
2019-04-12 00:45:33 +00:00
|
|
|
}
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// Albums
|
|
|
|
if (!statsCache.albums.cache && statsCache.albums.generating) {
|
|
|
|
stats.albums = false
|
Improved albums public page cache and more
Removed its dependency towards albums' editedAt property.
Editing album's metas (name, description, etc) will no longer update
its editedAt property.
Instead it will now ONLY be updated when adding/removing files to/from
it. Just like how it was meant to be, which was to be used to check
whether it's necessary to re-generate their downloadable ZIPs.
Albums public page cache will still be properly invalidated when
adding/removing files to/from it, as well as after editing their metas.
Added views/album-notice.njk to be used to render okay-ish notice when
an album's public page is still being generated.
I was originally thinking of using it for disabled albums as well, but
I refrained from it to reduce the possibility of disabled album IDs from
being easily scanned (as it just returns 404 now).
Removed invalidatedAt property from stats cache. Instead their caches
will immediately be nullified as they should (thus frees up memory
slightly as well).
Stats cache for albums will now only be cleared when truly necessary.
As in, adding/removing files to/from albums will no longer clear them.
Updated Nunjucks files to properly use h1, h2, h3 tags in actual
hierarchical orders.
Elements that don't need to use hX tags will now use P instead.
Nothing changes visually, only structurally.
Fixed some elements in Nunjucks using single quotes instead of
double quotes. They'd have worked the same, but consistency.
Added h1 title in FAQ page.
Make text for no JS warning a bit bigger, and improved the phrasing
a little bit.
2020-06-03 03:44:24 +00:00
|
|
|
} else if (statsCache.albums.cache) {
|
2019-09-10 16:31:27 +00:00
|
|
|
stats.albums = statsCache.albums.cache
|
|
|
|
} else {
|
|
|
|
statsCache.albums.generating = true
|
2019-09-17 04:13:41 +00:00
|
|
|
statsCache.albums.generatedAt = Date.now()
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
stats.albums = {
|
|
|
|
_types: {
|
|
|
|
number: ['total', 'active', 'downloadable', 'public', 'generatedZip']
|
|
|
|
},
|
|
|
|
total: 0,
|
|
|
|
disabled: 0,
|
|
|
|
public: 0,
|
|
|
|
downloadable: 0,
|
|
|
|
zipGenerated: 0
|
|
|
|
}
|
2019-04-05 17:32:52 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
const albums = await db.table('albums')
|
|
|
|
stats.albums.total = albums.length
|
|
|
|
const identifiers = []
|
|
|
|
for (const album of albums) {
|
|
|
|
if (!album.enabled) {
|
|
|
|
stats.albums.disabled++
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if (album.download) stats.albums.downloadable++
|
|
|
|
if (album.public) stats.albums.public++
|
|
|
|
if (album.zipGeneratedAt) identifiers.push(album.identifier)
|
|
|
|
}
|
2019-04-05 17:32:52 +00:00
|
|
|
|
2019-09-23 08:09:15 +00:00
|
|
|
await Promise.all(identifiers.map(async identifier => {
|
2019-09-10 16:31:27 +00:00
|
|
|
try {
|
|
|
|
await paths.access(path.join(paths.zips, `${identifier}.zip`))
|
|
|
|
stats.albums.zipGenerated++
|
|
|
|
} catch (error) {
|
|
|
|
// Re-throw error
|
|
|
|
if (error.code !== 'ENOENT')
|
|
|
|
throw error
|
|
|
|
}
|
2019-09-23 08:09:15 +00:00
|
|
|
}))
|
2019-04-12 00:45:33 +00:00
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
// Update cache
|
|
|
|
statsCache.albums.cache = stats.albums
|
|
|
|
statsCache.albums.generating = false
|
2019-04-12 00:45:33 +00:00
|
|
|
}
|
|
|
|
|
2019-09-10 16:31:27 +00:00
|
|
|
return res.json({ success: true, stats })
|
|
|
|
} catch (error) {
|
|
|
|
logger.error(error)
|
2019-11-14 07:53:33 +00:00
|
|
|
// Reset generating state when encountering any errors
|
|
|
|
Object.keys(statsCache).forEach(key => {
|
|
|
|
statsCache[key].generating = false
|
|
|
|
})
|
2019-09-10 16:31:27 +00:00
|
|
|
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
|
2019-04-05 17:32:52 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
module.exports = self
|