mirror of
https://github.com/BobbyWibowo/lolisafe.git
synced 2025-02-21 20:59:04 +00:00
Updated
Updated controllers to use Promise.all (concurrent processing) wherever applicable. Added 2 new entries to todo.md. Don't check "Select all" checkbox in dashboard when there are no uploads. Bumped v1 version string.
This commit is contained in:
parent
a233dd6bba
commit
98a8d03a7f
@ -394,14 +394,20 @@ self.generateZip = async (req, res, next) => {
|
|||||||
if ((isNaN(versionString) || versionString <= 0) && album.editedAt)
|
if ((isNaN(versionString) || versionString <= 0) && album.editedAt)
|
||||||
return res.redirect(`${album.identifier}?v=${album.editedAt}`)
|
return res.redirect(`${album.identifier}?v=${album.editedAt}`)
|
||||||
|
|
||||||
if (album.zipGeneratedAt > album.editedAt) {
|
// TODO: editedAt column will now be updated whenever
|
||||||
const filePath = path.join(paths.zips, `${identifier}.zip`)
|
// a user is simply editing the album's name/description.
|
||||||
const exists = await new Promise(resolve => fs.access(filePath, error => resolve(!error)))
|
// Perhaps add a new timestamp column that will only be updated
|
||||||
if (exists) {
|
// when the files in the album are actually modified?
|
||||||
const fileName = `${album.name}.zip`
|
if (album.zipGeneratedAt > album.editedAt)
|
||||||
return download(filePath, fileName)
|
try {
|
||||||
|
const filePath = path.join(paths.zips, `${identifier}.zip`)
|
||||||
|
await paths.access(filePath)
|
||||||
|
return download(filePath, `${album.name}.zip`)
|
||||||
|
} catch (error) {
|
||||||
|
// Re-throw error
|
||||||
|
if (error.code !== 'ENOENT')
|
||||||
|
throw error
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (self.zipEmitters.has(identifier)) {
|
if (self.zipEmitters.has(identifier)) {
|
||||||
logger.log(`Waiting previous zip task for album: ${identifier}.`)
|
logger.log(`Waiting previous zip task for album: ${identifier}.`)
|
||||||
@ -447,10 +453,13 @@ self.generateZip = async (req, res, next) => {
|
|||||||
const archive = new Zip()
|
const archive = new Zip()
|
||||||
|
|
||||||
try {
|
try {
|
||||||
for (const file of files) {
|
// Since we are adding all files concurrently,
|
||||||
|
// their order in the ZIP file may not be in alphabetical order.
|
||||||
|
// However, ZIP viewers in general should sort the files themselves.
|
||||||
|
await Promise.all(files.map(async file => {
|
||||||
const data = await paths.readFile(path.join(paths.uploads, file.name))
|
const data = await paths.readFile(path.join(paths.uploads, file.name))
|
||||||
archive.file(file.name, data)
|
archive.file(file.name, data)
|
||||||
}
|
}))
|
||||||
await new Promise((resolve, reject) => {
|
await new Promise((resolve, reject) => {
|
||||||
archive.generateNodeStream(zipOptions)
|
archive.generateNodeStream(zipOptions)
|
||||||
.pipe(fs.createWriteStream(zipPath))
|
.pipe(fs.createWriteStream(zipPath))
|
||||||
|
@ -51,7 +51,7 @@ const verify = [
|
|||||||
|
|
||||||
self.init = async () => {
|
self.init = async () => {
|
||||||
// Check & create directories
|
// Check & create directories
|
||||||
for (const p of verify)
|
await Promise.all(verify.map(async p => {
|
||||||
try {
|
try {
|
||||||
await self.access(p)
|
await self.access(p)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@ -63,16 +63,18 @@ self.init = async () => {
|
|||||||
logger.log(`Created directory: ${p}`)
|
logger.log(`Created directory: ${p}`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}))
|
||||||
|
|
||||||
// Purge any leftover in chunks directory
|
// Purge any leftover in chunks directory
|
||||||
const uuidDirs = await self.readdir(self.chunks)
|
const uuidDirs = await self.readdir(self.chunks)
|
||||||
for (const uuid of uuidDirs) {
|
await Promise.all(uuidDirs.map(async uuid => {
|
||||||
const root = path.join(self.chunks, uuid)
|
const root = path.join(self.chunks, uuid)
|
||||||
const chunks = await self.readdir(root)
|
const chunks = await self.readdir(root)
|
||||||
for (const chunk of chunks)
|
await Promise.all(chunks.map(async chunk =>
|
||||||
await self.unlink(path.join(root, chunk))
|
self.unlink(path.join(root, chunk))
|
||||||
|
))
|
||||||
await self.rmdir(root)
|
await self.rmdir(root)
|
||||||
}
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = self
|
module.exports = self
|
||||||
|
@ -22,6 +22,8 @@ const maxSize = parseInt(config.uploads.maxSize)
|
|||||||
const maxSizeBytes = maxSize * 1e6
|
const maxSizeBytes = maxSize * 1e6
|
||||||
const urlMaxSizeBytes = parseInt(config.uploads.urlMaxSize) * 1e6
|
const urlMaxSizeBytes = parseInt(config.uploads.urlMaxSize) * 1e6
|
||||||
|
|
||||||
|
const maxFilesPerUpload = 20
|
||||||
|
|
||||||
const chunkedUploads = Boolean(config.uploads.chunkSize)
|
const chunkedUploads = Boolean(config.uploads.chunkSize)
|
||||||
const chunksData = {}
|
const chunksData = {}
|
||||||
// Hard-coded min chunk size of 1 MB (e.i. 50 MB = max 50 chunks)
|
// Hard-coded min chunk size of 1 MB (e.i. 50 MB = max 50 chunks)
|
||||||
@ -62,7 +64,7 @@ const executeMulter = multer({
|
|||||||
// Chunked uploads still need to provide only 1 file field.
|
// Chunked uploads still need to provide only 1 file field.
|
||||||
// Otherwise, only one of the files will end up being properly stored,
|
// Otherwise, only one of the files will end up being properly stored,
|
||||||
// and that will also be as a chunk.
|
// and that will also be as a chunk.
|
||||||
files: 20
|
files: maxFilesPerUpload
|
||||||
},
|
},
|
||||||
fileFilter (req, file, cb) {
|
fileFilter (req, file, cb) {
|
||||||
file.extname = utils.extname(file.originalname)
|
file.extname = utils.extname(file.originalname)
|
||||||
@ -258,9 +260,10 @@ self.actuallyUploadFiles = async (req, res, user, albumid, age) => {
|
|||||||
|
|
||||||
if (config.filterEmptyFile && infoMap.some(file => file.data.size === 0)) {
|
if (config.filterEmptyFile && infoMap.some(file => file.data.size === 0)) {
|
||||||
// Unlink all files when at least one file is an empty file
|
// Unlink all files when at least one file is an empty file
|
||||||
for (const info of infoMap)
|
// Should continue even when encountering errors
|
||||||
// Continue even when encountering errors
|
await Promise.all(infoMap.map(info =>
|
||||||
await utils.unlinkFile(info.data.filename).catch(logger.error)
|
utils.unlinkFile(info.data.filename).catch(logger.error)
|
||||||
|
))
|
||||||
|
|
||||||
throw 'Empty files are not allowed.'
|
throw 'Empty files are not allowed.'
|
||||||
}
|
}
|
||||||
@ -282,10 +285,13 @@ self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
|
|||||||
if (!urls || !(urls instanceof Array))
|
if (!urls || !(urls instanceof Array))
|
||||||
throw 'Missing "urls" property (array).'
|
throw 'Missing "urls" property (array).'
|
||||||
|
|
||||||
|
if (urls.length > maxFilesPerUpload)
|
||||||
|
throw `Maximum ${maxFilesPerUpload} URLs at a time.`
|
||||||
|
|
||||||
const downloaded = []
|
const downloaded = []
|
||||||
const infoMap = []
|
const infoMap = []
|
||||||
try {
|
try {
|
||||||
for (let url of urls) {
|
await Promise.all(urls.map(async url => {
|
||||||
const original = path.basename(url).split(/[?#]/)[0]
|
const original = path.basename(url).split(/[?#]/)[0]
|
||||||
const extname = utils.extname(original)
|
const extname = utils.extname(original)
|
||||||
|
|
||||||
@ -337,9 +343,9 @@ self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
|
|||||||
age
|
age
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}))
|
||||||
|
|
||||||
// If no errors found, clear cache of downloaded files
|
// If no errors encountered, clear cache of downloaded files
|
||||||
downloaded.length = 0
|
downloaded.length = 0
|
||||||
|
|
||||||
if (utils.clamd.scanner) {
|
if (utils.clamd.scanner) {
|
||||||
@ -351,10 +357,11 @@ self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
|
|||||||
await self.sendUploadResponse(req, res, result)
|
await self.sendUploadResponse(req, res, result)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Unlink all downloaded files when at least one file threw an error from the for-loop
|
// Unlink all downloaded files when at least one file threw an error from the for-loop
|
||||||
|
// Should continue even when encountering errors
|
||||||
if (downloaded.length)
|
if (downloaded.length)
|
||||||
for (const file of downloaded)
|
await Promise.all(downloaded.map(file =>
|
||||||
// Continue even when encountering errors
|
utils.unlinkFile(file).catch(logger.error)
|
||||||
await utils.unlinkFile(file).catch(logger.error)
|
))
|
||||||
|
|
||||||
// Re-throw error
|
// Re-throw error
|
||||||
throw error
|
throw error
|
||||||
@ -400,7 +407,7 @@ self.actuallyFinishChunks = async (req, res, user) => {
|
|||||||
|
|
||||||
const infoMap = []
|
const infoMap = []
|
||||||
try {
|
try {
|
||||||
for (const file of files) {
|
await Promise.all(files.map(async file => {
|
||||||
if (chunksData[file.uuid].chunks.length > maxChunksCount)
|
if (chunksData[file.uuid].chunks.length > maxChunksCount)
|
||||||
throw 'Too many chunks.'
|
throw 'Too many chunks.'
|
||||||
|
|
||||||
@ -451,7 +458,7 @@ self.actuallyFinishChunks = async (req, res, user) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
infoMap.push({ path: destination, data })
|
infoMap.push({ path: destination, data })
|
||||||
}
|
}))
|
||||||
|
|
||||||
if (utils.clamd.scanner) {
|
if (utils.clamd.scanner) {
|
||||||
const scanResult = await self.scanFiles(req, infoMap)
|
const scanResult = await self.scanFiles(req, infoMap)
|
||||||
@ -462,10 +469,12 @@ self.actuallyFinishChunks = async (req, res, user) => {
|
|||||||
await self.sendUploadResponse(req, res, result)
|
await self.sendUploadResponse(req, res, result)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Clean up leftover chunks
|
// Clean up leftover chunks
|
||||||
for (const file of files)
|
// Should continue even when encountering errors
|
||||||
|
await Promise.all(files.map(async file => {
|
||||||
if (chunksData[file.uuid] !== undefined)
|
if (chunksData[file.uuid] !== undefined)
|
||||||
// Continue even when encountering errors
|
|
||||||
await self.cleanUpChunks(file.uuid).catch(logger.error)
|
await self.cleanUpChunks(file.uuid).catch(logger.error)
|
||||||
|
}))
|
||||||
|
|
||||||
// Re-throw error
|
// Re-throw error
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
@ -497,8 +506,9 @@ self.combineChunks = async (destination, uuid) => {
|
|||||||
|
|
||||||
self.cleanUpChunks = async (uuid) => {
|
self.cleanUpChunks = async (uuid) => {
|
||||||
// Unlink chunks
|
// Unlink chunks
|
||||||
for (const chunk of chunksData[uuid].chunks)
|
await Promise.all(chunksData[uuid].chunks.map(chunk =>
|
||||||
await paths.unlink(path.join(chunksData[uuid].root, chunk))
|
paths.unlink(path.join(chunksData[uuid].root, chunk))
|
||||||
|
))
|
||||||
// Remove UUID dir
|
// Remove UUID dir
|
||||||
await paths.rmdir(chunksData[uuid].root)
|
await paths.rmdir(chunksData[uuid].root)
|
||||||
// Delete cached date
|
// Delete cached date
|
||||||
@ -509,6 +519,8 @@ self.scanFiles = async (req, infoMap) => {
|
|||||||
let foundThreat
|
let foundThreat
|
||||||
let lastIteration
|
let lastIteration
|
||||||
let errorString
|
let errorString
|
||||||
|
// TODO: Should these be processed concurrently?
|
||||||
|
// Not sure if it'll be too much load on ClamAV.
|
||||||
for (let i = 0; i < infoMap.length; i++) {
|
for (let i = 0; i < infoMap.length; i++) {
|
||||||
let reply
|
let reply
|
||||||
try {
|
try {
|
||||||
@ -518,6 +530,7 @@ self.scanFiles = async (req, infoMap) => {
|
|||||||
errorString = `[ClamAV]: ${error.code !== undefined ? `${error.code}, p` : 'P'}lease contact the site owner.`
|
errorString = `[ClamAV]: ${error.code !== undefined ? `${error.code}, p` : 'P'}lease contact the site owner.`
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!reply.includes('OK') || reply.includes('FOUND')) {
|
if (!reply.includes('OK') || reply.includes('FOUND')) {
|
||||||
// eslint-disable-next-line no-control-regex
|
// eslint-disable-next-line no-control-regex
|
||||||
foundThreat = reply.replace(/^stream: /, '').replace(/ FOUND\u0000$/, '')
|
foundThreat = reply.replace(/^stream: /, '').replace(/ FOUND\u0000$/, '')
|
||||||
@ -531,9 +544,10 @@ self.scanFiles = async (req, infoMap) => {
|
|||||||
return false
|
return false
|
||||||
|
|
||||||
// Unlink all files when at least one threat is found
|
// Unlink all files when at least one threat is found
|
||||||
for (const info of infoMap)
|
// Should ontinue even when encountering errors
|
||||||
// Continue even when encountering errors
|
await Promise.all(infoMap.map(info =>
|
||||||
await utils.unlinkFile(info.data.filename).catch(logger.error)
|
utils.unlinkFile(info.data.filename).catch(logger.error)
|
||||||
|
))
|
||||||
|
|
||||||
return errorString ||
|
return errorString ||
|
||||||
`Threat found: ${foundThreat}${lastIteration ? '' : ', and maybe more'}.`
|
`Threat found: ${foundThreat}${lastIteration ? '' : ', and maybe more'}.`
|
||||||
@ -543,7 +557,7 @@ self.storeFilesToDb = async (req, res, user, infoMap) => {
|
|||||||
const files = []
|
const files = []
|
||||||
const exists = []
|
const exists = []
|
||||||
const albumids = []
|
const albumids = []
|
||||||
for (const info of infoMap) {
|
await Promise.all(infoMap.map(async info => {
|
||||||
// Create hash of the file
|
// Create hash of the file
|
||||||
const hash = await new Promise((resolve, reject) => {
|
const hash = await new Promise((resolve, reject) => {
|
||||||
const result = crypto.createHash('md5')
|
const result = crypto.createHash('md5')
|
||||||
@ -579,7 +593,7 @@ self.storeFilesToDb = async (req, res, user, infoMap) => {
|
|||||||
dbFile.original = info.data.originalname
|
dbFile.original = info.data.originalname
|
||||||
|
|
||||||
exists.push(dbFile)
|
exists.push(dbFile)
|
||||||
continue
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const timestamp = Math.floor(Date.now() / 1000)
|
const timestamp = Math.floor(Date.now() / 1000)
|
||||||
@ -609,7 +623,7 @@ self.storeFilesToDb = async (req, res, user, infoMap) => {
|
|||||||
// Generate thumbs, but do not wait
|
// Generate thumbs, but do not wait
|
||||||
if (utils.mayGenerateThumb(info.data.extname))
|
if (utils.mayGenerateThumb(info.data.extname))
|
||||||
utils.generateThumbs(info.data.filename, info.data.extname).catch(logger.error)
|
utils.generateThumbs(info.data.filename, info.data.extname).catch(logger.error)
|
||||||
}
|
}))
|
||||||
|
|
||||||
if (files.length) {
|
if (files.length) {
|
||||||
let authorizedIds = []
|
let authorizedIds = []
|
||||||
|
@ -333,7 +333,9 @@ self.unlinkFile = async (filename, predb) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.bulkDeleteFromDb = async (field, values, user) => {
|
self.bulkDeleteFromDb = async (field, values, user) => {
|
||||||
if (!user || !['id', 'name'].includes(field)) return
|
// Always return an empty array on failure
|
||||||
|
if (!user || !['id', 'name'].includes(field) || !values.length)
|
||||||
|
return []
|
||||||
|
|
||||||
// SQLITE_LIMIT_VARIABLE_NUMBER, which defaults to 999
|
// SQLITE_LIMIT_VARIABLE_NUMBER, which defaults to 999
|
||||||
// Read more: https://www.sqlite.org/limits.html
|
// Read more: https://www.sqlite.org/limits.html
|
||||||
@ -349,20 +351,21 @@ self.bulkDeleteFromDb = async (field, values, user) => {
|
|||||||
let unlinkeds = []
|
let unlinkeds = []
|
||||||
const albumids = []
|
const albumids = []
|
||||||
|
|
||||||
for (let i = 0; i < chunks.length; i++) {
|
await Promise.all(chunks.map(async chunk => {
|
||||||
const files = await db.table('files')
|
const files = await db.table('files')
|
||||||
.whereIn(field, chunks[i])
|
.whereIn(field, chunk)
|
||||||
.where(function () {
|
.where(function () {
|
||||||
if (!ismoderator)
|
if (!ismoderator)
|
||||||
this.where('userid', user.id)
|
this.where('userid', user.id)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Push files that could not be found in db
|
// Push files that could not be found in db
|
||||||
failed = failed.concat(chunks[i].filter(value => !files.find(file => file[field] === value)))
|
failed = failed.concat(chunk.filter(value => !files.find(file => file[field] === value)))
|
||||||
|
|
||||||
// Unlink all found files
|
// Unlink all found files
|
||||||
const unlinked = []
|
const unlinked = []
|
||||||
for (const file of files)
|
|
||||||
|
await Promise.all(files.map(async file => {
|
||||||
try {
|
try {
|
||||||
await self.unlinkFile(file.name, true)
|
await self.unlinkFile(file.name, true)
|
||||||
unlinked.push(file)
|
unlinked.push(file)
|
||||||
@ -370,9 +373,9 @@ self.bulkDeleteFromDb = async (field, values, user) => {
|
|||||||
logger.error(error)
|
logger.error(error)
|
||||||
failed.push(file[field])
|
failed.push(file[field])
|
||||||
}
|
}
|
||||||
|
}))
|
||||||
|
|
||||||
if (!unlinked.length)
|
if (!unlinked.length) return
|
||||||
continue
|
|
||||||
|
|
||||||
// Delete all unlinked files from db
|
// Delete all unlinked files from db
|
||||||
await db.table('files')
|
await db.table('files')
|
||||||
@ -395,7 +398,7 @@ self.bulkDeleteFromDb = async (field, values, user) => {
|
|||||||
|
|
||||||
// Push unlinked files
|
// Push unlinked files
|
||||||
unlinkeds = unlinkeds.concat(unlinked)
|
unlinkeds = unlinkeds.concat(unlinked)
|
||||||
}
|
}))
|
||||||
|
|
||||||
if (unlinkeds.length) {
|
if (unlinkeds.length) {
|
||||||
// Update albums if necessary, but do not wait
|
// Update albums if necessary, but do not wait
|
||||||
@ -448,6 +451,7 @@ self.purgeCloudflareCache = async (names, uploads, thumbs) => {
|
|||||||
|
|
||||||
// Split array into multiple arrays with max length of 30 URLs
|
// Split array into multiple arrays with max length of 30 URLs
|
||||||
// https://api.cloudflare.com/#zone-purge-files-by-url
|
// https://api.cloudflare.com/#zone-purge-files-by-url
|
||||||
|
// TODO: Handle API rate limits
|
||||||
const MAX_LENGTH = 30
|
const MAX_LENGTH = 30
|
||||||
const chunks = []
|
const chunks = []
|
||||||
while (names.length)
|
while (names.length)
|
||||||
@ -456,7 +460,7 @@ self.purgeCloudflareCache = async (names, uploads, thumbs) => {
|
|||||||
const url = `https://api.cloudflare.com/client/v4/zones/${config.cloudflare.zoneId}/purge_cache`
|
const url = `https://api.cloudflare.com/client/v4/zones/${config.cloudflare.zoneId}/purge_cache`
|
||||||
const results = []
|
const results = []
|
||||||
|
|
||||||
for (const chunk of chunks) {
|
await Promise.all(chunks.map(async chunk => {
|
||||||
const result = {
|
const result = {
|
||||||
success: false,
|
success: false,
|
||||||
files: chunk,
|
files: chunk,
|
||||||
@ -482,7 +486,7 @@ self.purgeCloudflareCache = async (names, uploads, thumbs) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
results.push(result)
|
results.push(result)
|
||||||
}
|
}))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
}
|
}
|
||||||
@ -791,7 +795,7 @@ self.stats = async (req, res, next) => {
|
|||||||
if (album.zipGeneratedAt) identifiers.push(album.identifier)
|
if (album.zipGeneratedAt) identifiers.push(album.identifier)
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const identifier of identifiers)
|
await Promise.all(identifiers.map(async identifier => {
|
||||||
try {
|
try {
|
||||||
await paths.access(path.join(paths.zips, `${identifier}.zip`))
|
await paths.access(path.join(paths.zips, `${identifier}.zip`))
|
||||||
stats.albums.zipGenerated++
|
stats.albums.zipGenerated++
|
||||||
@ -800,6 +804,7 @@ self.stats = async (req, res, next) => {
|
|||||||
if (error.code !== 'ENOENT')
|
if (error.code !== 'ENOENT')
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
|
}))
|
||||||
|
|
||||||
// Update cache
|
// Update cache
|
||||||
statsCache.albums.cache = stats.albums
|
statsCache.albums.cache = stats.albums
|
||||||
|
@ -2,9 +2,6 @@ const randomstring = require('randomstring')
|
|||||||
const perms = require('./../controllers/permissionController')
|
const perms = require('./../controllers/permissionController')
|
||||||
const logger = require('./../logger')
|
const logger = require('./../logger')
|
||||||
|
|
||||||
// TODO: Auto-detect missing columns here
|
|
||||||
// That way we will no longer need the migration script
|
|
||||||
|
|
||||||
const init = function (db) {
|
const init = function (db) {
|
||||||
// Create the tables we need to store galleries and files
|
// Create the tables we need to store galleries and files
|
||||||
db.schema.hasTable('albums').then(exists => {
|
db.schema.hasTable('albums').then(exists => {
|
||||||
|
2
dist/js/dashboard.js
vendored
2
dist/js/dashboard.js
vendored
File diff suppressed because one or more lines are too long
2
dist/js/dashboard.js.map
vendored
2
dist/js/dashboard.js.map
vendored
File diff suppressed because one or more lines are too long
@ -721,7 +721,7 @@ page.getUploads = (params = {}) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const selectAll = document.querySelector('#selectAll')
|
const selectAll = document.querySelector('#selectAll')
|
||||||
if (selectAll && !unselected) {
|
if (selectAll && !unselected && files.length) {
|
||||||
selectAll.checked = true
|
selectAll.checked = true
|
||||||
selectAll.title = 'Unselect all'
|
selectAll.title = 'Unselect all'
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"1": "1569126344",
|
"1": "1569225931",
|
||||||
"2": "1568894058",
|
"2": "1568894058",
|
||||||
"3": "1568894058",
|
"3": "1568894058",
|
||||||
"4": "1568894058",
|
"4": "1568894058",
|
||||||
|
6
todo.md
6
todo.md
@ -12,7 +12,7 @@ Normal priority:
|
|||||||
* [ ] Collapsible dashboard's sidebar albums menus.
|
* [ ] Collapsible dashboard's sidebar albums menus.
|
||||||
* [x] Change `title` attribute of disabled control buttons in uploads & users lists.
|
* [x] Change `title` attribute of disabled control buttons in uploads & users lists.
|
||||||
* [x] Use Gatsby logo for link to [blog.fiery.me](https://blog.fiery.me/) on the homepage.
|
* [x] Use Gatsby logo for link to [blog.fiery.me](https://blog.fiery.me/) on the homepage.
|
||||||
* [ ] Auto-detect missing columns in `database/db.js`.
|
* [ ] Automatically create missing columns in `database/db.js`. That way we will no longer need the migration script.
|
||||||
* [x] Better error message when server is down.
|
* [x] Better error message when server is down.
|
||||||
* [x] Show expiry date in thumbs view.
|
* [x] Show expiry date in thumbs view.
|
||||||
* [ ] Add Select all checkbox somewhere in thumbs view.
|
* [ ] Add Select all checkbox somewhere in thumbs view.
|
||||||
@ -21,7 +21,9 @@ Normal priority:
|
|||||||
* [ ] Add a copy all links to clipboard when there are more than 2 uploads in history.
|
* [ ] Add a copy all links to clipboard when there are more than 2 uploads in history.
|
||||||
* [x] Update fb_share.png.
|
* [x] Update fb_share.png.
|
||||||
* [ ] Support [fragments](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Identifying_resources_on_the_Web#Fragment) for dashboard sidebar menus.
|
* [ ] Support [fragments](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Identifying_resources_on_the_Web#Fragment) for dashboard sidebar menus.
|
||||||
* [ ] I forsaked all `Promise.all()` in favor of `await-in-for-loop` a while back. I personally think it was fine, considering a lot of them were tasks that required serial processing (continuation be dependant on previous iterations), but maybe I should review the current codes to find any sections that would do just fine, or maybe even great, with `Promise.all()`.
|
* [x] I forsaked all `Promise.all()` in favor of `await-in-for-loop` a while back. I personally think it was fine, considering a lot of them were tasks that required serial processing (continuation be dependant on previous iterations), but maybe I should review the current codes to find any sections that would do just fine, or maybe even great, with `Promise.all()`.
|
||||||
|
* [ ] Find a way to detect whether a user had disabled their browser's built-in smooth scrolling capability. We will then use that to decide whether we should use smooth scrolling when auto-scrolling during navigation (for now smooth scrolling is always enabled; and polified if applicable).
|
||||||
|
* [ ] Parallel URL uploads.
|
||||||
|
|
||||||
Low priority:
|
Low priority:
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user