cleaned up some scripts

This commit is contained in:
Bobby Wibowo 2020-12-27 18:54:05 +07:00
parent 3de44243a2
commit 7f3b947eb0
No known key found for this signature in database
GPG Key ID: 51C3A1E1E22D26CF
4 changed files with 107 additions and 93 deletions

View File

@ -5,19 +5,17 @@ const config = require('./../config')
const db = require('knex')(config.database) const db = require('knex')(config.database)
const self = { const self = {
mode: null getFiles: async directory => {
} const names = await paths.readdir(directory)
const files = []
self.getFiles = async directory => { for (const name of names) {
const names = await paths.readdir(directory) const lstat = await paths.lstat(path.join(directory, name))
const files = [] if (lstat.isFile() && !name.startsWith('.')) {
for (const name of names) { files.push(name)
const lstat = await paths.lstat(path.join(directory, name)) }
if (lstat.isFile() && !name.startsWith('.')) {
files.push(name)
} }
return files
} }
return files
} }
;(async () => { ;(async () => {
@ -37,8 +35,10 @@ self.getFiles = async directory => {
`).trim()) `).trim())
} }
self.mode = parseInt(args[0]) || 0 const mode = parseInt(args[0]) || 0
const dryrun = self.mode === 0 const dryrun = mode === 0
console.log('Querying uploads\u2026')
const uploads = await self.getFiles(paths.uploads) const uploads = await self.getFiles(paths.uploads)
console.log(`Uploads: ${uploads.length}`) console.log(`Uploads: ${uploads.length}`)
@ -49,18 +49,19 @@ self.getFiles = async directory => {
console.log(`- In DB: ${uploadsDb.length}`) console.log(`- In DB: ${uploadsDb.length}`)
const uploadsNotInDb = uploads.filter(upload => !uploadsDb.includes(upload)) const uploadsNotInDb = uploads.filter(upload => !uploadsDb.includes(upload))
console.log(`- Not in DB: ${uploadsNotInDb.length}`) console.log(`- Stray: ${uploadsNotInDb.length}`)
const thumbs = await self.getFiles(paths.thumbs) const thumbs = await self.getFiles(paths.thumbs)
console.log(`Thumbs: ${thumbs.length}`) console.log(`Thumbs : ${thumbs.length}`)
const uploadsDbSet = new Set(uploadsDb.map(upload => upload.split('.')[0])) const uploadsDbSet = new Set(uploadsDb.map(upload => upload.split('.')[0]))
const thumbsNotInDb = thumbs.filter(thumb => !uploadsDbSet.has(thumb.slice(0, -4))) const thumbsNotInDb = thumbs.filter(thumb => !uploadsDbSet.has(thumb.slice(0, -4)))
console.log(`- Not in DB: ${thumbsNotInDb.length}`) console.log(`- Stray: ${thumbsNotInDb.length}`)
if (dryrun) { if (dryrun) {
console.log('U:', uploadsNotInDb.join(', ')) console.log('Stray uploads:', uploadsNotInDb.join(', '))
console.log('T:', thumbsNotInDb.join(', ')) console.log('Stray thumbs :', thumbsNotInDb.join(', '))
console.log('INFO: This was a dry run. No files had been deleted.')
} else if (!dryrun) { } else if (!dryrun) {
for (const upload of uploadsNotInDb) { for (const upload of uploadsNotInDb) {
await paths.unlink(path.join(paths.uploads, upload)) await paths.unlink(path.join(paths.uploads, upload))

View File

@ -1,9 +1,5 @@
const utils = require('../controllers/utilsController') const utils = require('../controllers/utilsController')
const self = {
mode: null
}
;(async () => { ;(async () => {
const location = process.argv[1].replace(process.cwd() + '/', '') const location = process.argv[1].replace(process.cwd() + '/', '')
const args = process.argv.slice(2) const args = process.argv.slice(2)
@ -22,9 +18,9 @@ const self = {
`).trim()) `).trim())
} }
self.mode = parseInt(args[0]) || 0 const mode = parseInt(args[0]) || 0
const dryrun = self.mode === 0 const dryrun = mode === 0
const quiet = self.mode === 2 const quiet = mode === 2
const result = await utils.bulkDeleteExpired(dryrun, true) const result = await utils.bulkDeleteExpired(dryrun, true)

View File

@ -6,44 +6,41 @@ const utils = require('../controllers/utilsController')
const config = require('./../config') const config = require('./../config')
const db = require('knex')(config.database) const db = require('knex')(config.database)
const self = {
mode: null
}
;(async () => { ;(async () => {
const location = process.argv[1].replace(process.cwd() + '/', '') const location = process.argv[1].replace(process.cwd() + '/', '')
const args = process.argv.slice(2) const args = process.argv.slice(2)
self.mode = parseInt(args[0]) const mode = parseInt(args[0])
if (![0, 1, 2].includes(self.mode) || if (![0, 1, 2].includes(mode) ||
args.includes('--help') || args.includes('--help') ||
args.includes('-h')) { args.includes('-h')) {
return console.log(utils.stripIndents(` return console.log(utils.stripIndents(`
Rebuild file hashes. Rebuild file hashes.
Usage : Usage:
node ${location} <mode=0|1|2> [parallel] node ${location} <mode=0|1|2> [parallel]
mode: mode:
0 = Dry run (recalculate hashes, print them, but do NOT store to DB). 0 = Dry run (recalculate hashes, print them, but do NOT store to DB).
1 = Recalculate hashes, print them, and store to DB. 1 = Recalculate hashes and store to DB.
2 = Quiet (recalculate hashes and store to DB). 2 = Verbose (recalculate hashes, print them, and store to DB).
parallel: parallel:
Amount of uploads to hash in parallel (not to be confused with multi-threading). Amount of uploads to hash in parallel (not to be confused with multi-threading).
`).trim()) `).trim())
} }
const dryrun = self.mode === 0 const dryrun = mode === 0
const quiet = self.mode === 2 const verbose = [0, 2].includes(mode)
const parallel = Math.min(Math.max(parseInt(args[1]), 0), 32) || 8 const parallel = Math.max(parseInt(args[1]), 1) || 1
console.log(`Parallel: ${parallel}`)
console.log('Querying uploads\u2026')
const hrstart = process.hrtime() const hrstart = process.hrtime()
const uploads = await db.table('files') const uploads = await db.table('files')
.select('id', 'name', 'hash') .select('id', 'name', 'hash')
console.log(`Uploads : ${uploads.length}`) console.log(`Uploads : ${uploads.length}`)
console.log(`Parallel: ${parallel}`)
let lastProgressOut let lastProgressOut
await utils.parallelLimit(uploads.map(upload => { await utils.parallelLimit(uploads.map(upload => {
@ -53,7 +50,7 @@ const self = {
.pipe(blake3.createHash()) .pipe(blake3.createHash())
.on('data', async hasher => { .on('data', async hasher => {
const hash = hasher.toString('hex') const hash = hasher.toString('hex')
if (!quiet) console.log(`${upload.name}: ${hash}`) if (verbose) console.log(`${upload.name}: ${hash}`)
if (!dryrun && upload.hash !== hash) { if (!dryrun && upload.hash !== hash) {
await db.table('files') await db.table('files')
.update('hash', hash) .update('hash', hash)

View File

@ -1,29 +1,26 @@
const path = require('path') const path = require('path')
const paths = require('../controllers/pathsController') const paths = require('../controllers/pathsController')
const utils = require('../controllers/utilsController') const utils = require('../controllers/utilsController')
const config = require('./../config')
const db = require('knex')(config.database)
const self = { const self = {
mode: null, mode: null,
force: null, mayGenerateThumb: extname => {
verbose: null, return ([1, 3].includes(self.mode) && utils.imageExts.includes(extname)) ||
cfcache: null
}
self.mayGenerateThumb = extname => {
return ([1, 3].includes(self.mode) && utils.imageExts.includes(extname)) ||
([2, 3].includes(self.mode) && utils.videoExts.includes(extname)) ([2, 3].includes(self.mode) && utils.videoExts.includes(extname))
} },
getFiles: async directory => {
self.getFiles = async directory => { const names = await paths.readdir(directory)
const names = await paths.readdir(directory) const files = []
const files = [] for (const name of names) {
for (const name of names) { const lstat = await paths.lstat(path.join(directory, name))
const lstat = await paths.lstat(path.join(directory, name)) if (lstat.isFile() && !name.startsWith('.')) {
if (lstat.isFile() && !name.startsWith('.')) { files.push(name)
files.push(name) }
} }
return files
} }
return files
} }
;(async () => { ;(async () => {
@ -31,69 +28,92 @@ self.getFiles = async directory => {
const args = process.argv.slice(2) const args = process.argv.slice(2)
self.mode = parseInt(args[0]) self.mode = parseInt(args[0])
self.force = parseInt(args[1]) || 0 const force = parseInt(args[1]) || 0
self.verbose = parseInt(args[2]) || 0 const verbose = parseInt(args[2]) || 0
self.cfcache = parseInt(args[3]) || 0 const cfcache = parseInt(args[3]) || 0
const parallel = Math.max(parseInt(args[1]), 1) || 1
if (![1, 2, 3].includes(self.mode) || if (![1, 2, 3].includes(self.mode) ||
![0, 1].includes(self.force) || ![0, 1].includes(force) ||
![0, 1].includes(self.verbose) || ![0, 1, 2].includes(verbose) ||
![0, 1].includes(cfcache) ||
args.includes('--help') || args.includes('--help') ||
args.includes('-h')) { args.includes('-h')) {
return console.log(utils.stripIndents(` return console.log(utils.stripIndents(`
Generate thumbnails. Generate thumbnails.
Usage : Usage:
node ${location} <mode=1|2|3> [force=0|1] [verbose=0|1] [cfcache=0|1] node ${location} <mode=1|2|3> [force=0|1] [verbose=0|1] [cfcache=0|1] [parallel]
mode : 1 = images only, 2 = videos only, 3 = both images and videos mode : 1 = images only, 2 = videos only, 3 = both images and videos
force : 0 = no force (default), 1 = overwrite existing thumbnails force : 0 = no force (default), 1 = overwrite existing thumbnails
verbose: 0 = only print missing thumbs (default), 1 = print all verbose : 0 = only print missing thumbs (default), 1 = print all, 2 = print nothing
cfcache: 0 = do not clear cloudflare cache (default), 1 = clear cloudflare cache cfcache : 0 = do not clear cloudflare cache (default), 1 = clear cloudflare cache
parallel: amount of thumbs to generate in parallel (not to be confused with multi-threading).
`).trim()) `).trim())
} }
console.log(`Parallel: ${parallel}`)
console.log('Looking through existing thumbnails\u2026') console.log('Looking through existing thumbnails\u2026')
const uploads = await self.getFiles(paths.uploads) const uploads = await db.table('files')
let thumbs = await self.getFiles(paths.thumbs) .select('id', 'name')
thumbs = thumbs.map(thumb => { const thumbs = await self.getFiles(paths.thumbs)
const extname = path.extname(thumb) .then(thumbs => thumbs.map(thumb => {
return thumb.slice(0, -extname.length) const extname = path.extname(thumb)
}) return thumb.slice(0, -extname.length)
}))
console.log(`Found ${thumbs.length} existing thumbnails (may include placeholder symlinks).`) console.log(`Found ${thumbs.length} existing thumbnails (may include placeholder symlinks).`)
if (!self.verbose) {
console.log('Verbose logging disabled! Please be patient, this script may appear to be frozen but is actually working in the background.')
}
const succeeded = [] const succeeded = []
let error = 0 let error = 0
let exists = 0
let skipped = 0 let skipped = 0
for (const upload of uploads) { let lastProgressOut
const extname = utils.extname(upload) await utils.parallelLimit(uploads.map(async upload => {
const basename = upload.slice(0, -extname.length) const extname = utils.extname(upload.name)
const basename = upload.name.slice(0, -extname.length)
if (thumbs.includes(basename) && !self.force) { if (thumbs.includes(basename) && !force) {
if (self.verbose) console.log(`${upload}: thumb exists.`) if (verbose === 1) {
skipped++ console.log(`${upload.name}: ALREADY EXISTS.`)
}
exists++
} else if (!self.mayGenerateThumb(extname)) { } else if (!self.mayGenerateThumb(extname)) {
if (self.verbose) console.log(`${upload}: extension skipped.`) if (verbose === 1) {
console.log(`${upload.name}: EXTENSION SKIPPED.`)
}
skipped++ skipped++
} else { } else {
const start = Date.now() const start = Date.now()
const generated = await utils.generateThumbs(upload, extname, self.force) const generated = await utils.generateThumbs(upload.name, extname, force)
console.log(`${upload}: ${(Date.now() - start) / 1000}s: ${generated ? 'OK' : 'ERROR'}`) if (verbose !== 2) {
generated ? succeeded.push(upload) : error++ console.log(`${upload.name}: ${(Date.now() - start) / 1000}s: ${generated ? 'OK' : 'ERROR'}`)
}
generated ? succeeded.push({ upload, extname }) : error++
} }
} }), parallel, progress => {
console.log(`Success: ${succeeded.length}\nError: ${error}\nSkipped: ${skipped}`) const now = Date.now()
if (!lastProgressOut || (now - lastProgressOut >= 1000) || progress.done === progress.total) {
console.log(`Progress: ${progress.done}/${progress.total}`)
lastProgressOut = now
}
})
if (self.cfcache && succeeded.length) { console.log(utils.stripIndents(`
---
Success: ${succeeded.length}
Error: ${error}
Already exists: ${exists}
Extension skipped: ${skipped}
---
`).trim())
if (cfcache && succeeded.length) {
console.log('Purging Cloudflare\'s cache...') console.log('Purging Cloudflare\'s cache...')
const results = await utils.purgeCloudflareCache(succeeded.map(name => { const results = await utils.purgeCloudflareCache(succeeded.map(data =>
const extname = utils.extname(name) `thumbs/${data.upload.name.slice(0, -data.extname.length)}.png`
return `thumbs/${name.slice(0, -extname.length)}.png` ), true, false)
}), true, false)
for (let i = 0; i < results.length; i++) { for (let i = 0; i < results.length; i++) {
if (results[i].errors.length) { if (results[i].errors.length) {
results[i].errors.forEach(error => console.error(`CF: ${error}`)) results[i].errors.forEach(error => console.error(`CF: ${error}`))