mirror of
https://github.com/BobbyWibowo/lolisafe.git
synced 2025-01-31 07:11:33 +00:00
removed utils.parallelLimit
well, that was a pointless endeavor. i wasn't thinking clearly. also updated all scripts that previously used them, to use a combo of for-loop and setInterval to print progress.
This commit is contained in:
parent
ed27ad18b5
commit
0a19b025a0
@ -176,36 +176,6 @@ self.stripIndents = string => {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
self.parallelLimit = (promiseFactories, limit, progressCallback) => {
|
|
||||||
// https://stackoverflow.com/a/40377750
|
|
||||||
// CC BY-SA 3.0
|
|
||||||
const hasProgressCallback = typeof progressCallback === 'function'
|
|
||||||
const total = promiseFactories.length
|
|
||||||
const result = []
|
|
||||||
let cnt = 0
|
|
||||||
|
|
||||||
function chain (promiseFactories) {
|
|
||||||
if (!promiseFactories.length) return
|
|
||||||
const i = cnt++ // preserve order in result
|
|
||||||
if (hasProgressCallback) {
|
|
||||||
progressCallback({ done: cnt, total })
|
|
||||||
}
|
|
||||||
return promiseFactories.shift().then((res) => {
|
|
||||||
result[i] = res // save result
|
|
||||||
return chain(promiseFactories) // append next promise
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const arrChains = []
|
|
||||||
while (limit-- > 0 && promiseFactories.length > 0) {
|
|
||||||
// create "limit" chains which run in parallel
|
|
||||||
arrChains.push(chain(promiseFactories))
|
|
||||||
}
|
|
||||||
|
|
||||||
// return when all arrChains are finished
|
|
||||||
return Promise.all(arrChains).then(() => result)
|
|
||||||
}
|
|
||||||
|
|
||||||
self.authorize = async (req, res) => {
|
self.authorize = async (req, res) => {
|
||||||
// TODO: Improve usage of this function by the other APIs
|
// TODO: Improve usage of this function by the other APIs
|
||||||
const token = req.headers.token
|
const token = req.headers.token
|
||||||
|
@ -38,7 +38,7 @@ const self = {
|
|||||||
const mode = parseInt(args[0]) || 0
|
const mode = parseInt(args[0]) || 0
|
||||||
const dryrun = mode === 0
|
const dryrun = mode === 0
|
||||||
|
|
||||||
console.log('Querying uploads\u2026')
|
console.log('Querying and mapping uploads\u2026')
|
||||||
|
|
||||||
const uploads = await self.getFiles(paths.uploads)
|
const uploads = await self.getFiles(paths.uploads)
|
||||||
console.log(`Uploads: ${uploads.length}`)
|
console.log(`Uploads: ${uploads.length}`)
|
||||||
|
@ -19,53 +19,57 @@ const db = require('knex')(config.database)
|
|||||||
Rebuild file hashes.
|
Rebuild file hashes.
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
node ${location} <mode=0|1|2> [parallel]
|
node ${location} <mode=0|1|2>
|
||||||
|
|
||||||
mode:
|
mode:
|
||||||
0 = Dry run (recalculate hashes, print them, but do NOT store to DB).
|
0 = Dry run (recalculate hashes, print them, but do NOT store to DB).
|
||||||
1 = Recalculate hashes and store to DB.
|
1 = Recalculate hashes and store to DB.
|
||||||
2 = Verbose (recalculate hashes, print them, and store to DB).
|
2 = Verbose (recalculate hashes, print them, and store to DB).
|
||||||
|
|
||||||
parallel:
|
|
||||||
Amount of uploads to hash in parallel (not to be confused with multi-threading).
|
|
||||||
`).trim())
|
`).trim())
|
||||||
}
|
}
|
||||||
|
|
||||||
const dryrun = mode === 0
|
const dryrun = mode === 0
|
||||||
const verbose = [0, 2].includes(mode)
|
const verbose = [0, 2].includes(mode)
|
||||||
const parallel = Math.max(parseInt(args[1]), 1) || 1
|
|
||||||
|
|
||||||
console.log(`Parallel: ${parallel}`)
|
|
||||||
console.log('Querying uploads\u2026')
|
console.log('Querying uploads\u2026')
|
||||||
const hrstart = process.hrtime()
|
const hrstart = process.hrtime()
|
||||||
const uploads = await db.table('files')
|
const uploads = await db.table('files')
|
||||||
.select('id', 'name', 'hash')
|
.select('id', 'name', 'hash')
|
||||||
console.log(`Uploads : ${uploads.length}`)
|
console.log(`Uploads : ${uploads.length}`)
|
||||||
|
|
||||||
let lastProgressOut
|
let done = 0
|
||||||
await utils.parallelLimit(uploads.map(upload => {
|
|
||||||
return new Promise((resolve, reject) => {
|
const printProgress = () => {
|
||||||
|
console.log(`PROGRESS: ${done}/${uploads.length}`)
|
||||||
|
if (done >= uploads.length) clearInterval(progressInterval)
|
||||||
|
}
|
||||||
|
const progressInterval = setInterval(printProgress, 1000)
|
||||||
|
printProgress()
|
||||||
|
|
||||||
|
for (const upload of uploads) {
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
fs.createReadStream(path.join(paths.uploads, upload.name))
|
fs.createReadStream(path.join(paths.uploads, upload.name))
|
||||||
.on('error', () => reject)
|
.on('error', reject)
|
||||||
.pipe(blake3.createHash())
|
.pipe(blake3.createHash())
|
||||||
.on('data', async hasher => {
|
.on('error', reject)
|
||||||
const hash = hasher.toString('hex')
|
.on('data', async source => {
|
||||||
|
const hash = source.toString('hex')
|
||||||
if (verbose) console.log(`${upload.name}: ${hash}`)
|
if (verbose) console.log(`${upload.name}: ${hash}`)
|
||||||
if (!dryrun && upload.hash !== hash) {
|
if (!dryrun && upload.hash !== hash) {
|
||||||
await db.table('files')
|
await db.table('files')
|
||||||
.update('hash', hash)
|
.update('hash', hash)
|
||||||
.where('id', upload.id)
|
.where('id', upload.id)
|
||||||
}
|
}
|
||||||
|
done++
|
||||||
resolve()
|
resolve()
|
||||||
})
|
})
|
||||||
|
}).catch(error => {
|
||||||
|
console.log(`${upload.name}: ${error.toString()}`)
|
||||||
})
|
})
|
||||||
}), parallel, progress => {
|
}
|
||||||
const now = Date.now()
|
|
||||||
if (!lastProgressOut || (now - lastProgressOut >= 1000) || progress.done === progress.total) {
|
clearInterval(progressInterval)
|
||||||
console.log(`Progress: ${progress.done}/${progress.total}`)
|
printProgress()
|
||||||
lastProgressOut = now
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
const hrend = process.hrtime(hrstart)
|
const hrend = process.hrtime(hrstart)
|
||||||
console.log(`Done in : ${(hrend[0] + (hrend[1] / 1e9)).toFixed(4)}s`)
|
console.log(`Done in : ${(hrend[0] + (hrend[1] / 1e9)).toFixed(4)}s`)
|
||||||
|
@ -31,7 +31,6 @@ const self = {
|
|||||||
const force = parseInt(args[1]) || 0
|
const force = parseInt(args[1]) || 0
|
||||||
const verbose = parseInt(args[2]) || 0
|
const verbose = parseInt(args[2]) || 0
|
||||||
const cfcache = parseInt(args[3]) || 0
|
const cfcache = parseInt(args[3]) || 0
|
||||||
const parallel = Math.max(parseInt(args[1]), 1) || 1
|
|
||||||
|
|
||||||
if (![1, 2, 3].includes(self.mode) ||
|
if (![1, 2, 3].includes(self.mode) ||
|
||||||
![0, 1].includes(force) ||
|
![0, 1].includes(force) ||
|
||||||
@ -43,17 +42,15 @@ const self = {
|
|||||||
Generate thumbnails.
|
Generate thumbnails.
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
node ${location} <mode=1|2|3> [force=0|1] [verbose=0|1] [cfcache=0|1] [parallel]
|
node ${location} <mode=1|2|3> [force=0|1] [verbose=0|1] [cfcache=0|1]
|
||||||
|
|
||||||
mode : 1 = images only, 2 = videos only, 3 = both images and videos
|
mode : 1 = images only, 2 = videos only, 3 = both images and videos
|
||||||
force : 0 = no force (default), 1 = overwrite existing thumbnails
|
force : 0 = no force (default), 1 = overwrite existing thumbnails
|
||||||
verbose : 0 = only print missing thumbs (default), 1 = print all, 2 = print nothing
|
verbose : 0 = only print missing thumbs (default), 1 = print all, 2 = print nothing
|
||||||
cfcache : 0 = do not clear cloudflare cache (default), 1 = clear cloudflare cache
|
cfcache : 0 = do not clear cloudflare cache (default), 1 = clear cloudflare cache
|
||||||
parallel: amount of thumbs to generate in parallel (not to be confused with multi-threading).
|
|
||||||
`).trim())
|
`).trim())
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Parallel: ${parallel}`)
|
|
||||||
console.log('Looking through existing thumbnails\u2026')
|
console.log('Looking through existing thumbnails\u2026')
|
||||||
const uploads = await db.table('files')
|
const uploads = await db.table('files')
|
||||||
.select('id', 'name')
|
.select('id', 'name')
|
||||||
@ -69,8 +66,16 @@ const self = {
|
|||||||
let error = 0
|
let error = 0
|
||||||
let exists = 0
|
let exists = 0
|
||||||
let skipped = 0
|
let skipped = 0
|
||||||
let lastProgressOut
|
|
||||||
await utils.parallelLimit(uploads.map(async upload => {
|
const printProgress = () => {
|
||||||
|
const done = succeeded.length + error + exists + skipped
|
||||||
|
console.log(`PROGRESS: ${done}/${uploads.length}`)
|
||||||
|
if (done >= uploads.length) clearInterval(progressInterval)
|
||||||
|
}
|
||||||
|
const progressInterval = setInterval(printProgress, 1000)
|
||||||
|
printProgress()
|
||||||
|
|
||||||
|
for (const upload of uploads) {
|
||||||
const extname = utils.extname(upload.name)
|
const extname = utils.extname(upload.name)
|
||||||
const basename = upload.name.slice(0, -extname.length)
|
const basename = upload.name.slice(0, -extname.length)
|
||||||
|
|
||||||
@ -92,13 +97,10 @@ const self = {
|
|||||||
}
|
}
|
||||||
generated ? succeeded.push({ upload, extname }) : error++
|
generated ? succeeded.push({ upload, extname }) : error++
|
||||||
}
|
}
|
||||||
}), parallel, progress => {
|
}
|
||||||
const now = Date.now()
|
|
||||||
if (!lastProgressOut || (now - lastProgressOut >= 1000) || progress.done === progress.total) {
|
clearInterval(progressInterval)
|
||||||
console.log(`Progress: ${progress.done}/${progress.total}`)
|
printProgress()
|
||||||
lastProgressOut = now
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
console.log(utils.stripIndents(`
|
console.log(utils.stripIndents(`
|
||||||
---
|
---
|
||||||
|
Loading…
Reference in New Issue
Block a user