improved codes for statistics

much more expandable, and should be easier to understand overall.

make more statistics operations run concurrently to speed them up.

make linuxDiskStats config key obsolete by using systeminformation
package to also query for any mounted file systems.
This commit is contained in:
Bobby Wibowo 2020-12-25 21:06:21 +07:00
parent b30d5e4608
commit 346d9864f3
No known key found for this signature in database
GPG Key ID: 51C3A1E1E22D26CF
4 changed files with 274 additions and 372 deletions

View File

@ -581,14 +581,6 @@ module.exports = {
*/ */
cacheControl: false, cacheControl: false,
/*
Enable Linux-only extended disk stats in Dashboard's Statistics.
This will use "du" binary to query disk usage of each directories within uploads directory.
Disabled by default as I personally found it to be very slow with +100k uploads
with my ancient potato server.
*/
linuxDiskStats: false,
/* /*
Folder where to store logs. Folder where to store logs.
NOTE: This is currently unused. NOTE: This is currently unused.

View File

@ -1,5 +1,4 @@
const { promisify } = require('util') const { promisify } = require('util')
const { spawn } = require('child_process')
const fetch = require('node-fetch') const fetch = require('node-fetch')
const ffmpeg = require('fluent-ffmpeg') const ffmpeg = require('fluent-ffmpeg')
const path = require('path') const path = require('path')
@ -38,28 +37,33 @@ const self = {
timezoneOffset: new Date().getTimezoneOffset() timezoneOffset: new Date().getTimezoneOffset()
} }
const statsCache = { const statsData = {
system: { system: {
title: 'System',
cache: null, cache: null,
generating: false, generating: false,
generatedAt: 0 generatedAt: 0
}, },
disk: { fileSystems: {
cache: null, title: 'File Systems',
generating: false,
generatedAt: 0
},
albums: {
cache: null,
generating: false,
generatedAt: 0
},
users: {
cache: null, cache: null,
generating: false, generating: false,
generatedAt: 0 generatedAt: 0
}, },
uploads: { uploads: {
title: 'Uploads',
cache: null,
generating: false,
generatedAt: 0
},
users: {
title: 'Users',
cache: null,
generating: false,
generatedAt: 0
},
albums: {
title: 'Albums',
cache: null, cache: null,
generating: false, generating: false,
generatedAt: 0 generatedAt: 0
@ -602,7 +606,7 @@ self.invalidateAlbumsCache = albumids => {
self.invalidateStatsCache = type => { self.invalidateStatsCache = type => {
if (!['albums', 'users', 'uploads'].includes(type)) return if (!['albums', 'users', 'uploads'].includes(type)) return
statsCache[type].cache = null statsData[type].cache = null
} }
self.stats = async (req, res, next) => { self.stats = async (req, res, next) => {
@ -615,366 +619,266 @@ self.stats = async (req, res, next) => {
try { try {
const hrstart = process.hrtime() const hrstart = process.hrtime()
const stats = {} const stats = {}
Object.keys(statsData).forEach(key => {
// Pre-assign object keys to fix their display order
stats[statsData[key].title] = {}
})
const os = await si.osInfo() const os = await si.osInfo()
await Promise.all([
(async () => {
// System info
const data = statsData.system
// System info if (!data.cache && data.generating) {
if (!statsCache.system.cache && statsCache.system.generating) { stats[data.title] = false
stats.system = false } else if (((Date.now() - data.generatedAt) <= 1000) || data.generating) {
} else if (((Date.now() - statsCache.system.generatedAt) <= 1000) || statsCache.system.generating) { // Use cache for 1000 ms (1 second)
// Use cache for 1000 ms (1 second) stats[data.title] = data.cache
stats.system = statsCache.system.cache } else {
} else { data.generating = true
statsCache.system.generating = true data.generatedAt = Date.now()
statsCache.system.generatedAt = Date.now()
const currentLoad = await si.currentLoad() const currentLoad = await si.currentLoad()
const mem = await si.mem() const mem = await si.mem()
const time = si.time() const time = si.time()
const nodeUptime = process.uptime() const nodeUptime = process.uptime()
if (self.clamscan.instance) { if (self.clamscan.instance) {
try { try {
self.clamscan.version = await self.clamscan.instance.get_version().then(s => s.trim()) self.clamscan.version = await self.clamscan.instance.get_version().then(s => s.trim())
} catch (error) { } catch (error) {
logger.error(error) logger.error(error)
self.clamscan.version = 'Errored when querying version.' self.clamscan.version = 'Errored when querying version.'
}
}
stats[data.title] = {
Platform: `${os.platform} ${os.arch}`,
Distro: `${os.distro} ${os.release}`,
Kernel: os.kernel,
Scanner: self.clamscan.version || 'N/A',
'CPU Load': `${currentLoad.currentload.toFixed(1)}%`,
'CPUs Load': currentLoad.cpus.map(cpu => `${cpu.load.toFixed(1)}%`).join(', '),
'System Memory': {
value: {
used: mem.active,
total: mem.total
},
type: 'byteUsage'
},
'Memory Usage': {
value: process.memoryUsage().rss,
type: 'byte'
},
'System Uptime': {
value: time.uptime,
type: 'uptime'
},
'Node.js': `${process.versions.node}`,
'Service Uptime': {
value: Math.floor(nodeUptime),
type: 'uptime'
}
}
// Update cache
data.cache = stats[data.title]
data.generating = false
} }
} })(),
(async () => {
// File systems
const data = statsData.fileSystems
stats.system = { if (!data.cache && data.generating) {
_types: { stats[data.title] = false
byte: ['memoryUsage'], } else if (((Date.now() - data.generatedAt) <= 60000) || data.generating) {
byteUsage: ['systemMemory'], // Use cache for 60000 ms (60 seconds)
uptime: ['systemUptime', 'nodeUptime'] stats[data.title] = data.cache
}, } else {
platform: `${os.platform} ${os.arch}`, data.generating = true
distro: `${os.distro} ${os.release}`, data.generatedAt = Date.now()
kernel: os.kernel,
scanner: self.clamscan.version || 'N/A',
cpuLoad: `${currentLoad.currentload.toFixed(1)}%`,
cpusLoad: currentLoad.cpus.map(cpu => `${cpu.load.toFixed(1)}%`).join(', '),
systemMemory: {
used: mem.active,
total: mem.total
},
memoryUsage: process.memoryUsage().rss,
systemUptime: time.uptime,
nodeVersion: `${process.versions.node}`,
nodeUptime: Math.floor(nodeUptime)
}
// Update cache stats[data.title] = {}
statsCache.system.cache = stats.system
statsCache.system.generating = false
}
// Disk usage, only for Linux platform const fsSize = await si.fsSize()
if (os.platform === 'linux') { for (const fs of fsSize) {
if (!statsCache.disk.cache && statsCache.disk.generating) { stats[data.title][`${fs.fs} (${fs.type}) on ${fs.mount}`] = {
stats.disk = false value: {
} else if (((Date.now() - statsCache.disk.generatedAt) <= 60000) || statsCache.disk.generating) { total: fs.size,
// Use cache for 60000 ms (60 seconds) used: fs.used
stats.disk = statsCache.disk.cache },
} else { type: 'byteUsage'
statsCache.disk.generating = true }
statsCache.disk.generatedAt = Date.now() }
stats.disk = { // Update cache
_types: { data.cache = stats[data.title]
byteUsage: ['drive'] data.generating = false
},
drive: null
} }
})(),
(async () => {
// Uploads
const data = statsData.uploads
// Linux-only extended disk stats if (!data.cache && data.generating) {
if (config.linuxDiskStats) { stats[data.title] = false
// We pre-assign the keys below to fix their order } else if (data.cache) {
stats.disk._types.byte = ['uploads', 'thumbs', 'zips', 'chunks'] // Cache will be invalidated with self.invalidateStatsCache() after any related operations
stats.disk.uploads = 0 stats[data.title] = data.cache
stats.disk.thumbs = 0 } else {
stats.disk.zips = 0 data.generating = true
stats.disk.chunks = 0 data.generatedAt = Date.now()
const subdirs = [] stats[data.title] = {
Total: 0,
Images: 0,
Videos: 0,
Others: 0,
'Size in DB': {
value: 0,
type: 'byte'
}
}
// Get size of uploads path (excluding sub-directories) await Promise.all([
await new Promise((resolve, reject) => { (async () => {
const proc = spawn('du', [ const uploads = await db.table('files')
'--apparent-size', .select('size')
'--block-size=1', stats[data.title].Total = uploads.length
'--dereference', stats[data.title]['Size in DB'].value = uploads.reduce((acc, upload) => acc + parseInt(upload.size), 0)
'--max-depth=1', })(),
'--separate-dirs', (async () => {
paths.uploads stats[data.title].Images = await db.table('files')
]) .where(function () {
for (const ext of self.imageExts) {
proc.stdout.on('data', data => { this.orWhere('name', 'like', `%${ext}`)
const formatted = String(data) }
.trim() })
.split(/\s+/) .count('id as count')
for (let i = 0; i < formatted.length; i += 2) { .then(rows => rows[0].count)
const path = formatted[i + 1] })(),
if (!path) return (async () => {
stats[data.title].Videos = await db.table('files')
if (path !== paths.uploads) { .where(function () {
subdirs.push(path) for (const ext of self.videoExts) {
continue this.orWhere('name', 'like', `%${ext}`)
} }
})
stats.disk.uploads = parseInt(formatted[i]) .count('id as count')
} .then(rows => rows[0].count)
}) })()
const stderr = []
proc.stderr.on('data', data => stderr.push(String(data)))
proc.on('exit', code => {
if (code !== 0) return reject(stderr)
resolve()
})
})
await Promise.all(subdirs.map(subdir => {
return new Promise((resolve, reject) => {
const proc = spawn('du', [
'--apparent-size',
'--block-size=1',
'--dereference',
'--summarize',
subdir
])
proc.stdout.on('data', data => {
const formatted = String(data)
.trim()
.split(/\s+/)
if (formatted.length !== 2) return
const basename = path.basename(formatted[1])
stats.disk[basename] = parseInt(formatted[0])
// Add to types if necessary
if (!stats.disk._types.byte.includes(basename)) {
stats.disk._types.byte.push(basename)
}
})
const stderr = []
proc.stderr.on('data', data => stderr.push(String(data)))
proc.on('exit', code => {
if (code !== 0) return reject(stderr)
resolve()
})
})
}))
}
// Get disk usage of whichever disk uploads path resides on
await new Promise((resolve, reject) => {
const proc = spawn('df', [
'--block-size=1',
'--output=size,avail',
paths.uploads
]) ])
proc.stdout.on('data', data => { stats[data.title].Others = stats[data.title].Total - stats[data.title].Images - stats[data.title].Videos
// Only use the first valid line
if (stats.disk.drive !== null) return
const lines = String(data) // Update cache
.trim() data.cache = stats[data.title]
.split('\n') data.generating = false
if (lines.length !== 2) return }
})(),
(async () => {
// Users
const data = statsData.users
for (const line of lines) { if (!data.cache && data.generating) {
const columns = line.split(/\s+/) stats[data.title] = false
// Skip lines that have non-number chars } else if (data.cache) {
if (columns.some(w => !/^\d+$/.test(w))) continue // Cache will be invalidated with self.invalidateStatsCache() after any related operations
stats[data.title] = data.cache
} else {
data.generating = true
data.generatedAt = Date.now()
const total = parseInt(columns[0]) stats[data.title] = {
const avail = parseInt(columns[1]) Total: 0,
stats.disk.drive = { Disabled: 0
total, }
used: total - avail
const permissionKeys = Object.keys(perms.permissions).reverse()
permissionKeys.forEach(p => {
stats[data.title][p] = 0
})
const users = await db.table('users')
stats[data.title].Total = users.length
for (const user of users) {
if (user.enabled === false || user.enabled === 0) {
stats[data.title].Disabled++
}
user.permission = user.permission || 0
for (const p of permissionKeys) {
if (user.permission === perms.permissions[p]) {
stats[data.title][p]++
break
} }
} }
})
const stderr = []
proc.stderr.on('data', data => stderr.push(String(data)))
proc.on('exit', code => {
if (code !== 0) return reject(stderr)
resolve()
})
})
// Update cache
statsCache.disk.cache = stats.disk
statsCache.disk.generating = false
}
}
// Uploads
if (!statsCache.uploads.cache && statsCache.uploads.generating) {
stats.uploads = false
} else if (statsCache.uploads.cache) {
stats.uploads = statsCache.uploads.cache
} else {
statsCache.uploads.generating = true
statsCache.uploads.generatedAt = Date.now()
stats.uploads = {
_types: {
number: ['total', 'images', 'videos', 'others']
},
total: 0,
images: 0,
videos: 0,
others: 0
}
if (!config.linuxDiskStats || os.platform !== 'linux') {
const uploads = await db.table('files')
.select('size')
stats.uploads.total = uploads.length
stats.uploads.sizeInDb = uploads.reduce((acc, upload) => acc + parseInt(upload.size), 0)
// Add type information for the new column
if (!Array.isArray(stats.uploads._types.byte)) {
stats.uploads._types.byte = []
}
stats.uploads._types.byte.push('sizeInDb')
} else {
stats.uploads.total = await db.table('files')
.count('id as count')
.then(rows => rows[0].count)
}
stats.uploads.images = await db.table('files')
.where(function () {
for (const ext of self.imageExts) {
this.orWhere('name', 'like', `%${ext}`)
} }
})
.count('id as count')
.then(rows => rows[0].count)
stats.uploads.videos = await db.table('files') // Update cache
.where(function () { data.cache = stats[data.title]
for (const ext of self.videoExts) { data.generating = false
this.orWhere('name', 'like', `%${ext}`) }
})(),
(async () => {
// Albums
const data = statsData.albums
if (!data.cache && data.generating) {
stats[data.title] = false
} else if (data.cache) {
// Cache will be invalidated with self.invalidateStatsCache() after any related operations
stats[data.title] = data.cache
} else {
data.generating = true
data.generatedAt = Date.now()
stats[data.title] = {
Total: 0,
Disabled: 0,
Public: 0,
Downloadable: 0,
'ZIP Generated': 0
} }
})
.count('id as count')
.then(rows => rows[0].count)
stats.uploads.others = stats.uploads.total - stats.uploads.images - stats.uploads.videos const albums = await db.table('albums')
stats[data.title].Total = albums.length
// Update cache const identifiers = []
statsCache.uploads.cache = stats.uploads for (const album of albums) {
statsCache.uploads.generating = false if (!album.enabled) {
} stats[data.title].Disabled++
continue
// Users }
if (!statsCache.users.cache && statsCache.users.generating) { if (album.download) stats[data.title].Downloadable++
stats.users = false if (album.public) stats[data.title].Public++
} else if (statsCache.users.cache) { if (album.zipGeneratedAt) identifiers.push(album.identifier)
stats.users = statsCache.users.cache
} else {
statsCache.users.generating = true
statsCache.users.generatedAt = Date.now()
stats.users = {
_types: {
number: ['total', 'disabled']
},
total: 0,
disabled: 0
}
const permissionKeys = Object.keys(perms.permissions).reverse()
permissionKeys.forEach(p => {
stats.users[p] = 0
stats.users._types.number.push(p)
})
const users = await db.table('users')
stats.users.total = users.length
for (const user of users) {
if (user.enabled === false || user.enabled === 0) {
stats.users.disabled++
}
// This may be inaccurate on installations with customized permissions
user.permission = user.permission || 0
for (const p of permissionKeys) {
if (user.permission === perms.permissions[p]) {
stats.users[p]++
break
} }
await Promise.all(identifiers.map(async identifier => {
try {
await paths.access(path.join(paths.zips, `${identifier}.zip`))
stats[data.title]['ZIP Generated']++
} catch (error) {
// Re-throw error
if (error.code !== 'ENOENT') throw error
}
}))
// Update cache
data.cache = stats[data.title]
data.generating = false
} }
} })()
])
// Update cache
statsCache.users.cache = stats.users
statsCache.users.generating = false
}
// Albums
if (!statsCache.albums.cache && statsCache.albums.generating) {
stats.albums = false
} else if (statsCache.albums.cache) {
stats.albums = statsCache.albums.cache
} else {
statsCache.albums.generating = true
statsCache.albums.generatedAt = Date.now()
stats.albums = {
_types: {
number: ['total', 'active', 'downloadable', 'public', 'generatedZip']
},
total: 0,
disabled: 0,
public: 0,
downloadable: 0,
zipGenerated: 0
}
const albums = await db.table('albums')
stats.albums.total = albums.length
const identifiers = []
for (const album of albums) {
if (!album.enabled) {
stats.albums.disabled++
continue
}
if (album.download) stats.albums.downloadable++
if (album.public) stats.albums.public++
if (album.zipGeneratedAt) identifiers.push(album.identifier)
}
await Promise.all(identifiers.map(async identifier => {
try {
await paths.access(path.join(paths.zips, `${identifier}.zip`))
stats.albums.zipGenerated++
} catch (error) {
// Re-throw error
if (error.code !== 'ENOENT') throw error
}
}))
// Update cache
statsCache.albums.cache = stats.albums
statsCache.albums.generating = false
}
return res.json({ success: true, stats, hrtime: process.hrtime(hrstart) }) return res.json({ success: true, stats, hrtime: process.hrtime(hrstart) })
} catch (error) { } catch (error) {
logger.error(error) logger.error(error)
// Reset generating state when encountering any errors // Reset generating state when encountering any errors
Object.keys(statsCache).forEach(key => { Object.keys(statsData).forEach(key => {
statsCache[key].generating = false statsData[key].generating = false
}) })
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' }) return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
} }

View File

@ -259,11 +259,7 @@ fieldset[disabled] .select select:hover {
th { th {
color: $white-ter; color: $white-ter;
height: 2.25em; height: 2.25em;
font-weight: normal; font-weight: normal
&.capitalize {
text-transform: capitalize
}
} }
thead { thead {

View File

@ -2871,29 +2871,39 @@ page.getStatistics = (params = {}) => {
` `
} else { } else {
try { try {
const types = response.data.stats[keys[i]]._types || {}
const valKeys = Object.keys(response.data.stats[keys[i]]) const valKeys = Object.keys(response.data.stats[keys[i]])
for (let j = 0; j < valKeys.length; j++) { for (let j = 0; j < valKeys.length; j++) {
// Skip keys that starts with an underscore const data = response.data.stats[keys[i]][valKeys[j]]
if (/^_/.test(valKeys[j])) continue const type = typeof data === 'object' ? data.type : 'auto'
const value = typeof data === 'object' ? data.value : data
const value = response.data.stats[keys[i]][valKeys[j]] let parsed
let parsed = value switch (type) {
case 'byte':
// Parse values with some preset formatting parsed = page.getPrettyBytes(value)
if ((types.number || []).includes(valKeys[j])) parsed = value.toLocaleString() break
if ((types.byte || []).includes(valKeys[j])) parsed = page.getPrettyBytes(value) case 'byteUsage':
if ((types.byteUsage || []).includes(valKeys[j])) { parsed = `${page.getPrettyBytes(value.used)} / ${page.getPrettyBytes(value.total)} (${Math.floor(value.used / value.total * 100)}%)`
parsed = `${page.getPrettyBytes(value.used)} / ${page.getPrettyBytes(value.total)} (${Math.floor(value.used / value.total * 100)}%)` break
case 'uptime':
parsed = page.getPrettyUptime(value)
break
case 'auto':
switch (typeof value) {
case 'number':
parsed = value.toLocaleString()
break
default:
parsed = value
}
break
default:
parsed = value
} }
if ((types.uptime || []).includes(valKeys[j])) parsed = page.getPrettyUptime(value)
const string = valKeys[j]
.replace(/([A-Z])/g, ' $1')
.replace(/(^|\s)(cpu|db|zip)/gi, s => s.toUpperCase())
rows += ` rows += `
<tr> <tr>
<th class="capitalize">${string}</th> <th>${valKeys[j]}</th>
<td>${parsed}</td> <td>${parsed}</td>
</tr> </tr>
` `