mirror of
https://github.com/BobbyWibowo/lolisafe.git
synced 2025-02-20 20:29:04 +00:00
Updated
Updated controllers to use Promise.all (concurrent processing) wherever applicable. Added 2 new entries to todo.md. Don't check "Select all" checkbox in dashboard when there are no uploads. Bumped v1 version string.
This commit is contained in:
parent
a233dd6bba
commit
98a8d03a7f
@ -394,14 +394,20 @@ self.generateZip = async (req, res, next) => {
|
||||
if ((isNaN(versionString) || versionString <= 0) && album.editedAt)
|
||||
return res.redirect(`${album.identifier}?v=${album.editedAt}`)
|
||||
|
||||
if (album.zipGeneratedAt > album.editedAt) {
|
||||
const filePath = path.join(paths.zips, `${identifier}.zip`)
|
||||
const exists = await new Promise(resolve => fs.access(filePath, error => resolve(!error)))
|
||||
if (exists) {
|
||||
const fileName = `${album.name}.zip`
|
||||
return download(filePath, fileName)
|
||||
// TODO: editedAt column will now be updated whenever
|
||||
// a user is simply editing the album's name/description.
|
||||
// Perhaps add a new timestamp column that will only be updated
|
||||
// when the files in the album are actually modified?
|
||||
if (album.zipGeneratedAt > album.editedAt)
|
||||
try {
|
||||
const filePath = path.join(paths.zips, `${identifier}.zip`)
|
||||
await paths.access(filePath)
|
||||
return download(filePath, `${album.name}.zip`)
|
||||
} catch (error) {
|
||||
// Re-throw error
|
||||
if (error.code !== 'ENOENT')
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (self.zipEmitters.has(identifier)) {
|
||||
logger.log(`Waiting previous zip task for album: ${identifier}.`)
|
||||
@ -447,10 +453,13 @@ self.generateZip = async (req, res, next) => {
|
||||
const archive = new Zip()
|
||||
|
||||
try {
|
||||
for (const file of files) {
|
||||
// Since we are adding all files concurrently,
|
||||
// their order in the ZIP file may not be in alphabetical order.
|
||||
// However, ZIP viewers in general should sort the files themselves.
|
||||
await Promise.all(files.map(async file => {
|
||||
const data = await paths.readFile(path.join(paths.uploads, file.name))
|
||||
archive.file(file.name, data)
|
||||
}
|
||||
}))
|
||||
await new Promise((resolve, reject) => {
|
||||
archive.generateNodeStream(zipOptions)
|
||||
.pipe(fs.createWriteStream(zipPath))
|
||||
|
@ -51,7 +51,7 @@ const verify = [
|
||||
|
||||
self.init = async () => {
|
||||
// Check & create directories
|
||||
for (const p of verify)
|
||||
await Promise.all(verify.map(async p => {
|
||||
try {
|
||||
await self.access(p)
|
||||
} catch (err) {
|
||||
@ -63,16 +63,18 @@ self.init = async () => {
|
||||
logger.log(`Created directory: ${p}`)
|
||||
}
|
||||
}
|
||||
}))
|
||||
|
||||
// Purge any leftover in chunks directory
|
||||
const uuidDirs = await self.readdir(self.chunks)
|
||||
for (const uuid of uuidDirs) {
|
||||
await Promise.all(uuidDirs.map(async uuid => {
|
||||
const root = path.join(self.chunks, uuid)
|
||||
const chunks = await self.readdir(root)
|
||||
for (const chunk of chunks)
|
||||
await self.unlink(path.join(root, chunk))
|
||||
await Promise.all(chunks.map(async chunk =>
|
||||
self.unlink(path.join(root, chunk))
|
||||
))
|
||||
await self.rmdir(root)
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
module.exports = self
|
||||
|
@ -22,6 +22,8 @@ const maxSize = parseInt(config.uploads.maxSize)
|
||||
const maxSizeBytes = maxSize * 1e6
|
||||
const urlMaxSizeBytes = parseInt(config.uploads.urlMaxSize) * 1e6
|
||||
|
||||
const maxFilesPerUpload = 20
|
||||
|
||||
const chunkedUploads = Boolean(config.uploads.chunkSize)
|
||||
const chunksData = {}
|
||||
// Hard-coded min chunk size of 1 MB (e.i. 50 MB = max 50 chunks)
|
||||
@ -62,7 +64,7 @@ const executeMulter = multer({
|
||||
// Chunked uploads still need to provide only 1 file field.
|
||||
// Otherwise, only one of the files will end up being properly stored,
|
||||
// and that will also be as a chunk.
|
||||
files: 20
|
||||
files: maxFilesPerUpload
|
||||
},
|
||||
fileFilter (req, file, cb) {
|
||||
file.extname = utils.extname(file.originalname)
|
||||
@ -258,9 +260,10 @@ self.actuallyUploadFiles = async (req, res, user, albumid, age) => {
|
||||
|
||||
if (config.filterEmptyFile && infoMap.some(file => file.data.size === 0)) {
|
||||
// Unlink all files when at least one file is an empty file
|
||||
for (const info of infoMap)
|
||||
// Continue even when encountering errors
|
||||
await utils.unlinkFile(info.data.filename).catch(logger.error)
|
||||
// Should continue even when encountering errors
|
||||
await Promise.all(infoMap.map(info =>
|
||||
utils.unlinkFile(info.data.filename).catch(logger.error)
|
||||
))
|
||||
|
||||
throw 'Empty files are not allowed.'
|
||||
}
|
||||
@ -282,10 +285,13 @@ self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
|
||||
if (!urls || !(urls instanceof Array))
|
||||
throw 'Missing "urls" property (array).'
|
||||
|
||||
if (urls.length > maxFilesPerUpload)
|
||||
throw `Maximum ${maxFilesPerUpload} URLs at a time.`
|
||||
|
||||
const downloaded = []
|
||||
const infoMap = []
|
||||
try {
|
||||
for (let url of urls) {
|
||||
await Promise.all(urls.map(async url => {
|
||||
const original = path.basename(url).split(/[?#]/)[0]
|
||||
const extname = utils.extname(original)
|
||||
|
||||
@ -337,9 +343,9 @@ self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
|
||||
age
|
||||
}
|
||||
})
|
||||
}
|
||||
}))
|
||||
|
||||
// If no errors found, clear cache of downloaded files
|
||||
// If no errors encountered, clear cache of downloaded files
|
||||
downloaded.length = 0
|
||||
|
||||
if (utils.clamd.scanner) {
|
||||
@ -351,10 +357,11 @@ self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
|
||||
await self.sendUploadResponse(req, res, result)
|
||||
} catch (error) {
|
||||
// Unlink all downloaded files when at least one file threw an error from the for-loop
|
||||
// Should continue even when encountering errors
|
||||
if (downloaded.length)
|
||||
for (const file of downloaded)
|
||||
// Continue even when encountering errors
|
||||
await utils.unlinkFile(file).catch(logger.error)
|
||||
await Promise.all(downloaded.map(file =>
|
||||
utils.unlinkFile(file).catch(logger.error)
|
||||
))
|
||||
|
||||
// Re-throw error
|
||||
throw error
|
||||
@ -400,7 +407,7 @@ self.actuallyFinishChunks = async (req, res, user) => {
|
||||
|
||||
const infoMap = []
|
||||
try {
|
||||
for (const file of files) {
|
||||
await Promise.all(files.map(async file => {
|
||||
if (chunksData[file.uuid].chunks.length > maxChunksCount)
|
||||
throw 'Too many chunks.'
|
||||
|
||||
@ -451,7 +458,7 @@ self.actuallyFinishChunks = async (req, res, user) => {
|
||||
}
|
||||
|
||||
infoMap.push({ path: destination, data })
|
||||
}
|
||||
}))
|
||||
|
||||
if (utils.clamd.scanner) {
|
||||
const scanResult = await self.scanFiles(req, infoMap)
|
||||
@ -462,10 +469,12 @@ self.actuallyFinishChunks = async (req, res, user) => {
|
||||
await self.sendUploadResponse(req, res, result)
|
||||
} catch (error) {
|
||||
// Clean up leftover chunks
|
||||
for (const file of files)
|
||||
// Should continue even when encountering errors
|
||||
await Promise.all(files.map(async file => {
|
||||
if (chunksData[file.uuid] !== undefined)
|
||||
// Continue even when encountering errors
|
||||
await self.cleanUpChunks(file.uuid).catch(logger.error)
|
||||
}))
|
||||
|
||||
// Re-throw error
|
||||
throw error
|
||||
}
|
||||
@ -497,8 +506,9 @@ self.combineChunks = async (destination, uuid) => {
|
||||
|
||||
self.cleanUpChunks = async (uuid) => {
|
||||
// Unlink chunks
|
||||
for (const chunk of chunksData[uuid].chunks)
|
||||
await paths.unlink(path.join(chunksData[uuid].root, chunk))
|
||||
await Promise.all(chunksData[uuid].chunks.map(chunk =>
|
||||
paths.unlink(path.join(chunksData[uuid].root, chunk))
|
||||
))
|
||||
// Remove UUID dir
|
||||
await paths.rmdir(chunksData[uuid].root)
|
||||
// Delete cached date
|
||||
@ -509,6 +519,8 @@ self.scanFiles = async (req, infoMap) => {
|
||||
let foundThreat
|
||||
let lastIteration
|
||||
let errorString
|
||||
// TODO: Should these be processed concurrently?
|
||||
// Not sure if it'll be too much load on ClamAV.
|
||||
for (let i = 0; i < infoMap.length; i++) {
|
||||
let reply
|
||||
try {
|
||||
@ -518,6 +530,7 @@ self.scanFiles = async (req, infoMap) => {
|
||||
errorString = `[ClamAV]: ${error.code !== undefined ? `${error.code}, p` : 'P'}lease contact the site owner.`
|
||||
break
|
||||
}
|
||||
|
||||
if (!reply.includes('OK') || reply.includes('FOUND')) {
|
||||
// eslint-disable-next-line no-control-regex
|
||||
foundThreat = reply.replace(/^stream: /, '').replace(/ FOUND\u0000$/, '')
|
||||
@ -531,9 +544,10 @@ self.scanFiles = async (req, infoMap) => {
|
||||
return false
|
||||
|
||||
// Unlink all files when at least one threat is found
|
||||
for (const info of infoMap)
|
||||
// Continue even when encountering errors
|
||||
await utils.unlinkFile(info.data.filename).catch(logger.error)
|
||||
// Should ontinue even when encountering errors
|
||||
await Promise.all(infoMap.map(info =>
|
||||
utils.unlinkFile(info.data.filename).catch(logger.error)
|
||||
))
|
||||
|
||||
return errorString ||
|
||||
`Threat found: ${foundThreat}${lastIteration ? '' : ', and maybe more'}.`
|
||||
@ -543,7 +557,7 @@ self.storeFilesToDb = async (req, res, user, infoMap) => {
|
||||
const files = []
|
||||
const exists = []
|
||||
const albumids = []
|
||||
for (const info of infoMap) {
|
||||
await Promise.all(infoMap.map(async info => {
|
||||
// Create hash of the file
|
||||
const hash = await new Promise((resolve, reject) => {
|
||||
const result = crypto.createHash('md5')
|
||||
@ -579,7 +593,7 @@ self.storeFilesToDb = async (req, res, user, infoMap) => {
|
||||
dbFile.original = info.data.originalname
|
||||
|
||||
exists.push(dbFile)
|
||||
continue
|
||||
return
|
||||
}
|
||||
|
||||
const timestamp = Math.floor(Date.now() / 1000)
|
||||
@ -609,7 +623,7 @@ self.storeFilesToDb = async (req, res, user, infoMap) => {
|
||||
// Generate thumbs, but do not wait
|
||||
if (utils.mayGenerateThumb(info.data.extname))
|
||||
utils.generateThumbs(info.data.filename, info.data.extname).catch(logger.error)
|
||||
}
|
||||
}))
|
||||
|
||||
if (files.length) {
|
||||
let authorizedIds = []
|
||||
|
@ -333,7 +333,9 @@ self.unlinkFile = async (filename, predb) => {
|
||||
}
|
||||
|
||||
self.bulkDeleteFromDb = async (field, values, user) => {
|
||||
if (!user || !['id', 'name'].includes(field)) return
|
||||
// Always return an empty array on failure
|
||||
if (!user || !['id', 'name'].includes(field) || !values.length)
|
||||
return []
|
||||
|
||||
// SQLITE_LIMIT_VARIABLE_NUMBER, which defaults to 999
|
||||
// Read more: https://www.sqlite.org/limits.html
|
||||
@ -349,20 +351,21 @@ self.bulkDeleteFromDb = async (field, values, user) => {
|
||||
let unlinkeds = []
|
||||
const albumids = []
|
||||
|
||||
for (let i = 0; i < chunks.length; i++) {
|
||||
await Promise.all(chunks.map(async chunk => {
|
||||
const files = await db.table('files')
|
||||
.whereIn(field, chunks[i])
|
||||
.whereIn(field, chunk)
|
||||
.where(function () {
|
||||
if (!ismoderator)
|
||||
this.where('userid', user.id)
|
||||
})
|
||||
|
||||
// Push files that could not be found in db
|
||||
failed = failed.concat(chunks[i].filter(value => !files.find(file => file[field] === value)))
|
||||
failed = failed.concat(chunk.filter(value => !files.find(file => file[field] === value)))
|
||||
|
||||
// Unlink all found files
|
||||
const unlinked = []
|
||||
for (const file of files)
|
||||
|
||||
await Promise.all(files.map(async file => {
|
||||
try {
|
||||
await self.unlinkFile(file.name, true)
|
||||
unlinked.push(file)
|
||||
@ -370,9 +373,9 @@ self.bulkDeleteFromDb = async (field, values, user) => {
|
||||
logger.error(error)
|
||||
failed.push(file[field])
|
||||
}
|
||||
}))
|
||||
|
||||
if (!unlinked.length)
|
||||
continue
|
||||
if (!unlinked.length) return
|
||||
|
||||
// Delete all unlinked files from db
|
||||
await db.table('files')
|
||||
@ -395,7 +398,7 @@ self.bulkDeleteFromDb = async (field, values, user) => {
|
||||
|
||||
// Push unlinked files
|
||||
unlinkeds = unlinkeds.concat(unlinked)
|
||||
}
|
||||
}))
|
||||
|
||||
if (unlinkeds.length) {
|
||||
// Update albums if necessary, but do not wait
|
||||
@ -448,6 +451,7 @@ self.purgeCloudflareCache = async (names, uploads, thumbs) => {
|
||||
|
||||
// Split array into multiple arrays with max length of 30 URLs
|
||||
// https://api.cloudflare.com/#zone-purge-files-by-url
|
||||
// TODO: Handle API rate limits
|
||||
const MAX_LENGTH = 30
|
||||
const chunks = []
|
||||
while (names.length)
|
||||
@ -456,7 +460,7 @@ self.purgeCloudflareCache = async (names, uploads, thumbs) => {
|
||||
const url = `https://api.cloudflare.com/client/v4/zones/${config.cloudflare.zoneId}/purge_cache`
|
||||
const results = []
|
||||
|
||||
for (const chunk of chunks) {
|
||||
await Promise.all(chunks.map(async chunk => {
|
||||
const result = {
|
||||
success: false,
|
||||
files: chunk,
|
||||
@ -482,7 +486,7 @@ self.purgeCloudflareCache = async (names, uploads, thumbs) => {
|
||||
}
|
||||
|
||||
results.push(result)
|
||||
}
|
||||
}))
|
||||
|
||||
return results
|
||||
}
|
||||
@ -791,7 +795,7 @@ self.stats = async (req, res, next) => {
|
||||
if (album.zipGeneratedAt) identifiers.push(album.identifier)
|
||||
}
|
||||
|
||||
for (const identifier of identifiers)
|
||||
await Promise.all(identifiers.map(async identifier => {
|
||||
try {
|
||||
await paths.access(path.join(paths.zips, `${identifier}.zip`))
|
||||
stats.albums.zipGenerated++
|
||||
@ -800,6 +804,7 @@ self.stats = async (req, res, next) => {
|
||||
if (error.code !== 'ENOENT')
|
||||
throw error
|
||||
}
|
||||
}))
|
||||
|
||||
// Update cache
|
||||
statsCache.albums.cache = stats.albums
|
||||
|
@ -2,9 +2,6 @@ const randomstring = require('randomstring')
|
||||
const perms = require('./../controllers/permissionController')
|
||||
const logger = require('./../logger')
|
||||
|
||||
// TODO: Auto-detect missing columns here
|
||||
// That way we will no longer need the migration script
|
||||
|
||||
const init = function (db) {
|
||||
// Create the tables we need to store galleries and files
|
||||
db.schema.hasTable('albums').then(exists => {
|
||||
|
2
dist/js/dashboard.js
vendored
2
dist/js/dashboard.js
vendored
File diff suppressed because one or more lines are too long
2
dist/js/dashboard.js.map
vendored
2
dist/js/dashboard.js.map
vendored
File diff suppressed because one or more lines are too long
@ -721,7 +721,7 @@ page.getUploads = (params = {}) => {
|
||||
}
|
||||
|
||||
const selectAll = document.querySelector('#selectAll')
|
||||
if (selectAll && !unselected) {
|
||||
if (selectAll && !unselected && files.length) {
|
||||
selectAll.checked = true
|
||||
selectAll.title = 'Unselect all'
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"1": "1569126344",
|
||||
"1": "1569225931",
|
||||
"2": "1568894058",
|
||||
"3": "1568894058",
|
||||
"4": "1568894058",
|
||||
|
6
todo.md
6
todo.md
@ -12,7 +12,7 @@ Normal priority:
|
||||
* [ ] Collapsible dashboard's sidebar albums menus.
|
||||
* [x] Change `title` attribute of disabled control buttons in uploads & users lists.
|
||||
* [x] Use Gatsby logo for link to [blog.fiery.me](https://blog.fiery.me/) on the homepage.
|
||||
* [ ] Auto-detect missing columns in `database/db.js`.
|
||||
* [ ] Automatically create missing columns in `database/db.js`. That way we will no longer need the migration script.
|
||||
* [x] Better error message when server is down.
|
||||
* [x] Show expiry date in thumbs view.
|
||||
* [ ] Add Select all checkbox somewhere in thumbs view.
|
||||
@ -21,7 +21,9 @@ Normal priority:
|
||||
* [ ] Add a copy all links to clipboard when there are more than 2 uploads in history.
|
||||
* [x] Update fb_share.png.
|
||||
* [ ] Support [fragments](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Identifying_resources_on_the_Web#Fragment) for dashboard sidebar menus.
|
||||
* [ ] I forsaked all `Promise.all()` in favor of `await-in-for-loop` a while back. I personally think it was fine, considering a lot of them were tasks that required serial processing (continuation be dependant on previous iterations), but maybe I should review the current codes to find any sections that would do just fine, or maybe even great, with `Promise.all()`.
|
||||
* [x] I forsaked all `Promise.all()` in favor of `await-in-for-loop` a while back. I personally think it was fine, considering a lot of them were tasks that required serial processing (continuation be dependant on previous iterations), but maybe I should review the current codes to find any sections that would do just fine, or maybe even great, with `Promise.all()`.
|
||||
* [ ] Find a way to detect whether a user had disabled their browser's built-in smooth scrolling capability. We will then use that to decide whether we should use smooth scrolling when auto-scrolling during navigation (for now smooth scrolling is always enabled; and polified if applicable).
|
||||
* [ ] Parallel URL uploads.
|
||||
|
||||
Low priority:
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user