Init ids-cache branch

This commit is contained in:
Bobby Wibowo 2018-12-03 16:18:52 +07:00
parent 6f03726b9d
commit 6af52341c9
No known key found for this signature in database
GPG Key ID: 51C3A1E1E22D26CF
5 changed files with 101 additions and 30 deletions

View File

@ -42,7 +42,7 @@ const storage = multer.diskStorage({
if (!chunkedUploads || (req.body.uuid === undefined && req.body.chunkindex === undefined)) {
const extension = utils.extname(file.originalname)
const length = uploadsController.getFileNameLength(req)
return uploadsController.getUniqueRandomName(length, extension)
return uploadsController.getUniqueRandomName(length, extension, req.app.get('uploads-set'))
.then(name => cb(null, name))
.catch(error => cb(error))
}
@ -111,25 +111,38 @@ uploadsController.getFileNameLength = req => {
return config.uploads.fileLength.default || 32
}
uploadsController.getUniqueRandomName = (length, extension) => {
uploadsController.getUniqueRandomName = (length, extension, set) => {
return new Promise((resolve, reject) => {
const access = i => {
const identifier = randomstring.generate(length)
// Read all files names from uploads directory, then filter matching names (as in the identifier)
fs.readdir(uploadsDir, (error, names) => {
if (error) { return reject(error) }
if (names.length) {
for (const name of names.filter(name => name.startsWith(identifier))) {
if (name.split('.')[0] === identifier) {
console.log(`Identifier ${identifier} is already used (${++i}/${maxTries}).`)
if (i < maxTries) { return access(i) }
// eslint-disable-next-line prefer-promise-reject-errors
return reject('Sorry, we could not allocate a unique random name. Try again?')
if (set) {
// Filter matching names from uploads tree (as in the identifier)
if (set.has(identifier)) {
console.log(`Identifier ${identifier} is already used (${++i}/${maxTries}).`)
if (i < maxTries) { return access(i) }
// eslint-disable-next-line prefer-promise-reject-errors
return reject('Sorry, we could not allocate a unique random name. Try again?')
}
set.add(identifier)
// console.log(`Added ${identifier} to identifiers cache`)
return resolve(identifier + extension)
} else {
// Read all files names from uploads directory, then filter matching names (as in the identifier)
fs.readdir(uploadsDir, (error, names) => {
if (error) { return reject(error) }
if (names.length) {
for (const name of names.filter(name => name.startsWith(identifier))) {
if (name.split('.')[0] === identifier) {
console.log(`Identifier ${identifier} is already used (${++i}/${maxTries}).`)
if (i < maxTries) { return access(i) }
// eslint-disable-next-line prefer-promise-reject-errors
return reject('Sorry, we could not allocate a unique random name. Try again?')
}
}
}
}
return resolve(identifier + extension)
})
return resolve(identifier + extension)
})
}
}
access(0)
})
@ -196,7 +209,7 @@ uploadsController.actuallyUpload = async (req, res, user, albumid) => {
})
if (config.uploads.scan && config.uploads.scan.enabled) {
const scan = await uploadsController.scanFiles(req, infoMap)
const scan = await uploadsController.scanFiles(req.app.get('clam-scanner'), infoMap)
if (scan) { return erred(scan) }
}
@ -260,7 +273,7 @@ uploadsController.actuallyUploadByUrl = async (req, res, user, albumid) => {
const file = await fetchFile.buffer()
const length = uploadsController.getFileNameLength(req)
const name = await uploadsController.getUniqueRandomName(length, extension)
const name = await uploadsController.getUniqueRandomName(length, extension, req.app.get('uploads-set'))
const destination = path.join(uploadsDir, name)
fs.writeFile(destination, file, async error => {
@ -282,7 +295,7 @@ uploadsController.actuallyUploadByUrl = async (req, res, user, albumid) => {
iteration++
if (iteration === urls.length) {
if (config.uploads.scan && config.uploads.scan.enabled) {
const scan = await uploadsController.scanFiles(req, infoMap)
const scan = await uploadsController.scanFiles(req.app.get('clam-scanner'), infoMap)
if (scan) { return erred(scan) }
}
@ -357,7 +370,7 @@ uploadsController.actuallyFinishChunks = async (req, res, user, albumid) => {
}
const length = uploadsController.getFileNameLength(req)
const name = await uploadsController.getUniqueRandomName(length, extension)
const name = await uploadsController.getUniqueRandomName(length, extension, req.app.get('uploads-set'))
.catch(erred)
if (!name) { return }
@ -407,7 +420,7 @@ uploadsController.actuallyFinishChunks = async (req, res, user, albumid) => {
iteration++
if (iteration === files.length) {
if (config.uploads.scan && config.uploads.scan.enabled) {
const scan = await uploadsController.scanFiles(req, infoMap)
const scan = await uploadsController.scanFiles(req.app.get('clam-scanner'), infoMap)
if (scan) { return erred(scan) }
}
@ -534,7 +547,13 @@ uploadsController.formatInfoMap = (req, res, user, infoMap) => {
timestamp: Math.floor(Date.now() / 1000)
})
} else {
const identifier = info.data.filename.split('.')[0]
utils.deleteFile(info.data.filename).catch(console.error)
const set = req.app.get('uploads-set')
if (set) {
set.delete(identifier)
// console.log(`Removed ${identifier} from identifiers cache (formatInfoMap)`)
}
existingFiles.push(dbFile)
}
@ -547,11 +566,9 @@ uploadsController.formatInfoMap = (req, res, user, infoMap) => {
})
}
uploadsController.scanFiles = (req, infoMap) => {
uploadsController.scanFiles = (scanner, infoMap) => {
return new Promise(async (resolve, reject) => {
let iteration = 0
const scanner = req.app.get('clam-scanner')
for (const info of infoMap) {
scanner.scanFile(info.path).then(reply => {
if (!reply.includes('OK') || reply.includes('FOUND')) {
@ -645,7 +662,7 @@ uploadsController.bulkDelete = async (req, res) => {
return res.json({ success: false, description: 'No array of files specified.' })
}
const failed = await utils.bulkDeleteFiles(field, values, user)
const failed = await utils.bulkDeleteFiles(field, values, user, req.app.get('uploads-set'))
if (failed.length < values.length) {
return res.json({ success: true, failed })
}

View File

@ -175,9 +175,10 @@ utilsController.deleteFile = file => {
* @param {string} field
* @param {any} values
* @param {user} user
* @param {Set} set
* @return {any[]} failed
*/
utilsController.bulkDeleteFiles = async (field, values, user) => {
utilsController.bulkDeleteFiles = async (field, values, user, set) => {
if (!user || !['id', 'name'].includes(field)) { return }
const ismoderator = perms.is(user, 'moderator')
@ -189,14 +190,17 @@ utilsController.bulkDeleteFiles = async (field, values, user) => {
}
})
const deleted = []
// an array of file object
const deletedFiles = []
// an array of value of the specified field
const failed = values.filter(value => !files.find(file => file[field] === value))
// Delete all files physically
await Promise.all(files.map(file => {
return new Promise(async resolve => {
await utilsController.deleteFile(file.name)
.then(() => deleted.push(file.id))
.then(() => deletedFiles.push(file))
.catch(error => {
failed.push(file[field])
console.error(error)
@ -205,16 +209,24 @@ utilsController.bulkDeleteFiles = async (field, values, user) => {
})
}))
if (!deleted.length) { return failed }
if (!deletedFiles.length) { return failed }
// Delete all files from database
const deletedIds = deletedFiles.map(file => file.id)
const deleteDb = await db.table('files')
.whereIn('id', deleted)
.whereIn('id', deletedIds)
.del()
.catch(console.error)
if (!deleteDb) { return failed }
const filtered = files.filter(file => deleted.includes(file.id))
if (set) {
deletedFiles.forEach(file => {
const identifier = file.name.split('.')[0]
set.delete(identifier)
// console.log(`Removed ${identifier} from identifiers cache (bulkDeleteFiles)`)
})
}
const filtered = files.filter(file => deletedIds.includes(file.id))
// Update albums if necessary
if (deleteDb) {

View File

@ -10,6 +10,7 @@ const fs = require('fs')
const helmet = require('helmet')
const nunjucks = require('nunjucks')
const RateLimit = require('express-rate-limit')
const readline = require('readline')
const safe = express()
// It appears to be best to catch these before doing anything else
@ -130,12 +131,47 @@ const start = async () => {
if (!created) { return process.exit(1) }
}
const IDS_CACHE = false // experimental, set to false to disable
if (IDS_CACHE) {
// Cache tree of uploads directory
process.stdout.write('Caching identifiers in uploads directory ...')
const setSize = await new Promise((resolve, reject) => {
const uploadsDir = `./${config.uploads.folder}`
fs.readdir(uploadsDir, (error, names) => {
if (error) { return reject(error) }
const set = new Set()
names.forEach(name => set.add(name.split('.')[0]))
safe.set('uploads-set', set)
resolve(set.size)
})
}).catch(error => console.error(error.toString()))
if (!setSize) { return process.exit(1) }
process.stdout.write(` ${setSize} OK!\n`)
}
safe.listen(config.port, () => {
console.log(`lolisafe started on port ${config.port}`)
if (process.env.DEV === '1') {
// DEV=1 yarn start
console.log('lolisafe is in development mode, nunjucks caching disabled')
}
// Add readline interface to allow evaluating arbitrary JavaScript from console
readline.createInterface({
input: process.stdin,
output: process.stdout,
prompt: ''
}).on('line', line => {
try {
if (line === '.exit') { process.exit(0) }
// eslint-disable-next-line no-eval
process.stdout.write(`${require('util').inspect(eval(line), { depth: 0 })}\n`)
} catch (error) {
console.error(error.toString())
}
}).on('SIGINT', () => {
process.exit(0)
})
})
}

View File

@ -35,6 +35,7 @@
"node-fetch": "^2.2.0",
"nunjucks": "^3.1.2",
"randomstring": "^1.1.5",
"readline": "^1.3.0",
"sharp": "^0.21.0",
"sqlite3": "^4.0.0"
},

View File

@ -2928,6 +2928,11 @@ readdirp@^2.0.0:
readable-stream "^2.0.2"
set-immediate-shim "^1.0.1"
readline@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/readline/-/readline-1.3.0.tgz#c580d77ef2cfc8752b132498060dc9793a7ac01c"
integrity sha1-xYDXfvLPyHUrEySYBg3JeTp6wBw=
rechoir@^0.6.2:
version "0.6.2"
resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384"