Updated Linux-only disk stats

Restore disk usage stats even when config.linuxDiskStats is off

The said config will now only toggle the 'extended' stats which are
disk usage of each directories within the uploads directory
This commit is contained in:
Bobby Wibowo 2020-05-02 19:28:13 +07:00
parent ee443b5108
commit 19b4a5e217
No known key found for this signature in database
GPG Key ID: 51C3A1E1E22D26CF
2 changed files with 67 additions and 65 deletions

View File

@ -487,8 +487,8 @@ module.exports = {
cacheControl: false, cacheControl: false,
/* /*
Enable Linux-only disk stats in Dashboard's Statistics. Enable Linux-only extended disk stats in Dashboard's Statistics.
This will use a combination of both "du" and "df" binaries. This will use "du" binary to query disk usage of each directories within uploads directory.
Disabled by default as I personally found it to be very slow with +100k uploads Disabled by default as I personally found it to be very slow with +100k uploads
with my ancient potato server. with my ancient potato server.
*/ */

View File

@ -41,6 +41,11 @@ const statsCache = {
generating: false, generating: false,
generatedAt: 0 generatedAt: 0
}, },
disk: {
cache: null,
generating: false,
generatedAt: 0
},
albums: { albums: {
cache: null, cache: null,
generating: false, generating: false,
@ -61,13 +66,6 @@ const statsCache = {
} }
} }
if (config.linuxDiskStats)
statsCache.disk = {
cache: null,
generating: false,
generatedAt: 0
}
const cloudflareAuth = config.cloudflare && config.cloudflare.apiKey && const cloudflareAuth = config.cloudflare && config.cloudflare.apiKey &&
config.cloudflare.email && config.cloudflare.zoneId config.cloudflare.email && config.cloudflare.zoneId
@ -631,7 +629,7 @@ self.stats = async (req, res, next) => {
} }
// Disk usage, only for Linux platform // Disk usage, only for Linux platform
if (config.linuxDiskStats && os.platform === 'linux') if (os.platform === 'linux')
if (!statsCache.disk.cache && statsCache.disk.generating) { if (!statsCache.disk.cache && statsCache.disk.generating) {
stats.disk = false stats.disk = false
} else if (((Date.now() - statsCache.disk.generatedAt) <= 60000) || statsCache.disk.generating) { } else if (((Date.now() - statsCache.disk.generatedAt) <= 60000) || statsCache.disk.generating) {
@ -643,78 +641,48 @@ self.stats = async (req, res, next) => {
stats.disk = { stats.disk = {
_types: { _types: {
byte: ['uploads', 'thumbs', 'zips', 'chunks'],
byteUsage: ['drive'] byteUsage: ['drive']
}, },
drive: null, drive: null
// We pre-assign the keys below to fix their order
uploads: 0,
thumbs: 0,
zips: 0,
chunks: 0
} }
const subdirs = [] // Linux-only extended disk stats
if (config.linuxDiskStats) {
// We pre-assign the keys below to fix their order
stats.disk._types.byte = ['uploads', 'thumbs', 'zips', 'chunks']
stats.disk.uploads = 0
stats.disk.thumbs = 0
stats.disk.zips = 0
stats.disk.chunks = 0
// Get size of uploads path (excluding sub-directories) const subdirs = []
await new Promise((resolve, reject) => {
const proc = spawn('du', [
'--apparent-size',
'--block-size=1',
'--dereference',
'--max-depth=1',
'--separate-dirs',
paths.uploads
])
proc.stdout.on('data', data => { // Get size of uploads path (excluding sub-directories)
const formatted = String(data) await new Promise((resolve, reject) => {
.trim()
.split(/\s+/)
for (let i = 0; i < formatted.length; i += 2) {
const path = formatted[i + 1]
if (!path) return
if (path !== paths.uploads) {
subdirs.push(path)
continue
}
stats.disk.uploads = parseInt(formatted[i])
}
})
const stderr = []
proc.stderr.on('data', data => stderr.push(String(data)))
proc.on('exit', code => {
if (code !== 0) return reject(stderr)
resolve()
})
})
await Promise.all(subdirs.map(subdir => {
return new Promise((resolve, reject) => {
const proc = spawn('du', [ const proc = spawn('du', [
'--apparent-size', '--apparent-size',
'--block-size=1', '--block-size=1',
'--dereference', '--dereference',
'--summarize', '--max-depth=1',
subdir '--separate-dirs',
paths.uploads
]) ])
proc.stdout.on('data', data => { proc.stdout.on('data', data => {
const formatted = String(data) const formatted = String(data)
.trim() .trim()
.split(/\s+/) .split(/\s+/)
if (formatted.length !== 2) return for (let i = 0; i < formatted.length; i += 2) {
const path = formatted[i + 1]
if (!path) return
const basename = path.basename(formatted[1]) if (path !== paths.uploads) {
stats.disk[basename] = parseInt(formatted[0]) subdirs.push(path)
continue
}
// Add to types if necessary stats.disk.uploads = parseInt(formatted[i])
if (!stats.disk._types.byte.includes(basename)) }
stats.disk._types.byte.push(basename)
}) })
const stderr = [] const stderr = []
@ -725,7 +693,41 @@ self.stats = async (req, res, next) => {
resolve() resolve()
}) })
}) })
}))
await Promise.all(subdirs.map(subdir => {
return new Promise((resolve, reject) => {
const proc = spawn('du', [
'--apparent-size',
'--block-size=1',
'--dereference',
'--summarize',
subdir
])
proc.stdout.on('data', data => {
const formatted = String(data)
.trim()
.split(/\s+/)
if (formatted.length !== 2) return
const basename = path.basename(formatted[1])
stats.disk[basename] = parseInt(formatted[0])
// Add to types if necessary
if (!stats.disk._types.byte.includes(basename))
stats.disk._types.byte.push(basename)
})
const stderr = []
proc.stderr.on('data', data => stderr.push(String(data)))
proc.on('exit', code => {
if (code !== 0) return reject(stderr)
resolve()
})
})
}))
}
// Get disk usage of whichever disk uploads path resides on // Get disk usage of whichever disk uploads path resides on
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {