Merge branch 'ids-cache' into safe.fiery.me

This commit is contained in:
Bobby Wibowo 2018-12-04 18:59:47 +07:00
commit 446cacea7e
No known key found for this signature in database
GPG Key ID: 51C3A1E1E22D26CF
11 changed files with 247 additions and 46 deletions

View File

@ -103,7 +103,9 @@ module.exports = {
/*
Max file size allowed. Needs to be in MB.
Note: When maxSize is greater than 1 MiB, you must set the client_max_body_size to the same as maxSize.
Note: When maxSize is greater than 1 MiB and using nginx as reverse proxy,
you must set client_max_body_size to the same as maxSize.
https://nginx.org/en/docs/http/ngx_http_core_module.html#client_max_body_size
*/
maxSize: '512MB',
@ -123,7 +125,7 @@ module.exports = {
},
/*
Use DuckDuckGo's proxy when fetching any URL uploads.
Use DuckDuckGo's proxy when fetching URL uploads.
This may be considered a hack and not supported by DuckDuckGo, so USE AT YOUR OWN RISK.
This should work with any type of URLs, but they have to be direct links,
since DuckDuckGo's proxy will not follow redirects.
@ -141,13 +143,14 @@ module.exports = {
chunkSize: '10MB',
/*
The length of the randomly generated name for uploaded files.
The length of the randomly generated identifier for uploaded files.
If "userChangeable" is set to true, registered users will be able to change
their preferred file name length from the dashboard. The allowed range will
be set by "min" and "max". Otherwise it will use "default".
Technically it's possible to have "default" outside of the "min" and "max" range,
but please not. Otherwise, once a user has changed to a value within the range,
the user will no longer be able to use the default value.
their preferred length from the dashboard. The allowed range will be set
by "min" and "max". Otherwise it will use "default".
It's possible to have "default" be outside of the "min" and "max" range,
but be aware that once a user has changed their preferred length to be somewhere
within the range, they will no longer be able to restore it back to "default".
*/
fileLength: {
min: 4,
@ -156,6 +159,28 @@ module.exports = {
userChangeable: false
},
/*
Cache file identifiers.
They will be used for a stricter collision check, such that a single identifier
may not be used by more than a single file (e.i. if "abcd.jpg" already exists, a new PNG
file may not be named as "abcd.png").
If this is enabled, the safe will attempt to read file list of the uploads directory
during first launch, parse the names, then cache the identifiers into memory.
Its downside is that it will use a bit more memory, generally a few MBs increase
on a safe with over >10k uploads.
If this is disabled, the safe will instead read file list of the uploads directory EVERYTIME
there is a new upload.
Its downside will be slower upload handling and memory usage "spikes",
since reading the file list will still consume memory,
just that it will only stay in memory for a few moments.
On most caces, leaving this enabled is recommended.
*/
cacheFileIdentifiers: true,
/*
The length of the randomly generated identifier for albums.
*/
@ -165,6 +190,7 @@ module.exports = {
This option will limit how many times it will try to
generate a new random name when a collision occurrs.
The shorter the length is, the higher the chance for a collision to occur.
This applies to both file name and album identifier.
*/
maxTries: 1,
@ -178,7 +204,7 @@ module.exports = {
},
/*
Allows users to download a ZIP file of all files in an album.
Allow users to download a ZIP archive of all files in an album.
The file is generated when the user clicks the download button in the view
and is re-used if the album has not changed between download requests.
*/
@ -198,8 +224,8 @@ module.exports = {
noJsMaxSize: '100MB',
/*
If you have a Page Rule in Cloudflare to cache everything in the album zippping
API route (HOME_DOMAIN/api/album/zip/*), with this option you can limit the
If you have a Page Rule in Cloudflare to cache everything in the album zip
API route (homeDomain/api/album/zip/*), with this option you can limit the
maximum total size of files in an album that can be zipped.
Cloudflare will not cache files bigger than 512MB.
NOTE: Set to falsy value (false, null, etc.) to disable max total size.
@ -207,8 +233,8 @@ module.exports = {
zipMaxTotalSize: '512MB',
/*
If you want to make it automatically calls Cloudflare's API to purge cache on file delete,
fill your api key, email and your site's zone id below, then set "purgeCache" to true.
If you want to make it automatically call Cloudflare's API to purge cache on file delete,
fill your API key, email and your site's zone ID below, then set "purgeCache" to true.
This will only purge cache of the deleted file and its associated thumb.
*/
apiKey: '',
@ -219,6 +245,7 @@ module.exports = {
/*
Folder where to store logs.
NOTE: This is currently unused.
*/
logsFolder: 'logs',

View File

@ -42,7 +42,7 @@ const storage = multer.diskStorage({
if (!chunkedUploads || (req.body.uuid === undefined && req.body.chunkindex === undefined)) {
const extension = utils.extname(file.originalname)
const length = uploadsController.getFileNameLength(req)
return uploadsController.getUniqueRandomName(length, extension)
return uploadsController.getUniqueRandomName(length, extension, req.app.get('uploads-set'))
.then(name => cb(null, name))
.catch(error => cb(error))
}
@ -111,10 +111,22 @@ uploadsController.getFileNameLength = req => {
return config.uploads.fileLength.default || 32
}
uploadsController.getUniqueRandomName = (length, extension) => {
uploadsController.getUniqueRandomName = (length, extension, set) => {
return new Promise((resolve, reject) => {
const access = i => {
const identifier = randomstring.generate(length)
if (config.uploads.cacheFileIdentifiers) {
// Filter matching names from uploads tree (as in the identifier)
if (set.has(identifier)) {
console.log(`Identifier ${identifier} is already used (${++i}/${maxTries}).`)
if (i < maxTries) { return access(i) }
// eslint-disable-next-line prefer-promise-reject-errors
return reject('Sorry, we could not allocate a unique random name. Try again?')
}
set.add(identifier)
// console.log(`Added ${identifier} to identifiers cache`)
return resolve(identifier + extension)
} else {
// Read all files names from uploads directory, then filter matching names (as in the identifier)
fs.readdir(uploadsDir, (error, names) => {
if (error) { return reject(error) }
@ -131,6 +143,7 @@ uploadsController.getUniqueRandomName = (length, extension) => {
return resolve(identifier + extension)
})
}
}
access(0)
})
}
@ -196,7 +209,7 @@ uploadsController.actuallyUpload = async (req, res, user, albumid) => {
})
if (config.uploads.scan && config.uploads.scan.enabled) {
const scan = await uploadsController.scanFiles(req, infoMap)
const scan = await uploadsController.scanFiles(req.app.get('clam-scanner'), infoMap)
if (scan) { return erred(scan) }
}
@ -260,7 +273,7 @@ uploadsController.actuallyUploadByUrl = async (req, res, user, albumid) => {
const file = await fetchFile.buffer()
const length = uploadsController.getFileNameLength(req)
const name = await uploadsController.getUniqueRandomName(length, extension)
const name = await uploadsController.getUniqueRandomName(length, extension, req.app.get('uploads-set'))
const destination = path.join(uploadsDir, name)
fs.writeFile(destination, file, async error => {
@ -282,7 +295,7 @@ uploadsController.actuallyUploadByUrl = async (req, res, user, albumid) => {
iteration++
if (iteration === urls.length) {
if (config.uploads.scan && config.uploads.scan.enabled) {
const scan = await uploadsController.scanFiles(req, infoMap)
const scan = await uploadsController.scanFiles(req.app.get('clam-scanner'), infoMap)
if (scan) { return erred(scan) }
}
@ -357,7 +370,7 @@ uploadsController.actuallyFinishChunks = async (req, res, user, albumid) => {
}
const length = uploadsController.getFileNameLength(req)
const name = await uploadsController.getUniqueRandomName(length, extension)
const name = await uploadsController.getUniqueRandomName(length, extension, req.app.get('uploads-set'))
.catch(erred)
if (!name) { return }
@ -407,7 +420,7 @@ uploadsController.actuallyFinishChunks = async (req, res, user, albumid) => {
iteration++
if (iteration === files.length) {
if (config.uploads.scan && config.uploads.scan.enabled) {
const scan = await uploadsController.scanFiles(req, infoMap)
const scan = await uploadsController.scanFiles(req.app.get('clam-scanner'), infoMap)
if (scan) { return erred(scan) }
}
@ -534,7 +547,13 @@ uploadsController.formatInfoMap = (req, res, user, infoMap) => {
timestamp: Math.floor(Date.now() / 1000)
})
} else {
const identifier = info.data.filename.split('.')[0]
utils.deleteFile(info.data.filename).catch(console.error)
const set = req.app.get('uploads-set')
if (set) {
set.delete(identifier)
// console.log(`Removed ${identifier} from identifiers cache (formatInfoMap)`)
}
existingFiles.push(dbFile)
}
@ -547,11 +566,9 @@ uploadsController.formatInfoMap = (req, res, user, infoMap) => {
})
}
uploadsController.scanFiles = (req, infoMap) => {
uploadsController.scanFiles = (scanner, infoMap) => {
return new Promise(async (resolve, reject) => {
let iteration = 0
const scanner = req.app.get('clam-scanner')
for (const info of infoMap) {
scanner.scanFile(info.path).then(reply => {
if (!reply.includes('OK') || reply.includes('FOUND')) {
@ -645,7 +662,7 @@ uploadsController.bulkDelete = async (req, res) => {
return res.json({ success: false, description: 'No array of files specified.' })
}
const failed = await utils.bulkDeleteFiles(field, values, user)
const failed = await utils.bulkDeleteFiles(field, values, user, req.app.get('uploads-set'))
if (failed.length < values.length) {
return res.json({ success: true, failed })
}

View File

@ -175,9 +175,10 @@ utilsController.deleteFile = file => {
* @param {string} field
* @param {any} values
* @param {user} user
* @param {Set} set
* @return {any[]} failed
*/
utilsController.bulkDeleteFiles = async (field, values, user) => {
utilsController.bulkDeleteFiles = async (field, values, user, set) => {
if (!user || !['id', 'name'].includes(field)) { return }
const ismoderator = perms.is(user, 'moderator')
@ -189,14 +190,17 @@ utilsController.bulkDeleteFiles = async (field, values, user) => {
}
})
const deleted = []
// an array of file object
const deletedFiles = []
// an array of value of the specified field
const failed = values.filter(value => !files.find(file => file[field] === value))
// Delete all files physically
await Promise.all(files.map(file => {
return new Promise(async resolve => {
await utilsController.deleteFile(file.name)
.then(() => deleted.push(file.id))
.then(() => deletedFiles.push(file))
.catch(error => {
failed.push(file[field])
console.error(error)
@ -205,16 +209,24 @@ utilsController.bulkDeleteFiles = async (field, values, user) => {
})
}))
if (!deleted.length) { return failed }
if (!deletedFiles.length) { return failed }
// Delete all files from database
const deletedIds = deletedFiles.map(file => file.id)
const deleteDb = await db.table('files')
.whereIn('id', deleted)
.whereIn('id', deletedIds)
.del()
.catch(console.error)
if (!deleteDb) { return failed }
const filtered = files.filter(file => deleted.includes(file.id))
if (set) {
deletedFiles.forEach(file => {
const identifier = file.name.split('.')[0]
set.delete(identifier)
// console.log(`Removed ${identifier} from identifiers cache (bulkDeleteFiles)`)
})
}
const filtered = files.filter(file => deletedIds.includes(file.id))
// Update albums if necessary
if (deleteDb) {

View File

@ -10,6 +10,7 @@ const fs = require('fs')
const helmet = require('helmet')
const nunjucks = require('nunjucks')
const RateLimit = require('express-rate-limit')
const readline = require('readline')
const safe = express()
// It appears to be best to catch these before doing anything else
@ -130,12 +131,46 @@ const start = async () => {
if (!created) { return process.exit(1) }
}
if (config.uploads.cacheIdentifiers) {
// Cache tree of uploads directory
process.stdout.write('Caching identifiers in uploads directory ...')
const setSize = await new Promise((resolve, reject) => {
const uploadsDir = `./${config.uploads.folder}`
fs.readdir(uploadsDir, (error, names) => {
if (error) { return reject(error) }
const set = new Set()
names.forEach(name => set.add(name.split('.')[0]))
safe.set('uploads-set', set)
resolve(set.size)
})
}).catch(error => console.error(error.toString()))
if (!setSize) { return process.exit(1) }
process.stdout.write(` ${setSize} OK!\n`)
}
safe.listen(config.port, () => {
console.log(`lolisafe started on port ${config.port}`)
if (process.env.DEV === '1') {
// DEV=1 yarn start
console.log('lolisafe is in development mode, nunjucks caching disabled')
}
// Add readline interface to allow evaluating arbitrary JavaScript from console
readline.createInterface({
input: process.stdin,
output: process.stdout,
prompt: ''
}).on('line', line => {
try {
if (line === '.exit') { process.exit(0) }
// eslint-disable-next-line no-eval
process.stdout.write(`${require('util').inspect(eval(line), { depth: 0 })}\n`)
} catch (error) {
console.error(error.toString())
}
}).on('SIGINT', () => {
process.exit(0)
})
})
}

View File

@ -35,6 +35,7 @@
"node-fetch": "^2.2.0",
"nunjucks": "^3.1.2",
"randomstring": "^1.1.5",
"readline": "^1.3.0",
"sharp": "^0.21.0",
"sqlite3": "^4.0.0"
},

View File

@ -1,6 +1,13 @@
body {
-webkit-animation: none;
animation: none;
}
#auth,
#dashboard {
display: none
display: none;
-webkit-animation: fadeInOpacity .5s;
animation: fadeInOpacity .5s;
}
.section {
@ -35,6 +42,23 @@
background: none;
}
ul#albumsContainer {
border-left: 0;
padding-left: 0;
}
ul#albumsContainer li {
border-left: 1px solid #898b8d;
padding-left: .75em;
-webkit-animation: animateAlbum 1s;
animation: animateAlbum 1s
}
#page.fade-in {
-webkit-animation: fadeInOpacity .5s;
animation: fadeInOpacity .5s
}
.pagination a {
color: #eff0f1;
border-color: #4d4d4d;
@ -197,3 +221,25 @@
.is-linethrough {
text-decoration: line-through
}
@-webkit-keyframes animateAlbum {
0% {
opacity: 0;
max-height: 0;
}
100% {
opacity: 1;
max-height: 36px;
}
}
@keyframes animateAlbum {
0% {
opacity: 0;
max-height: 0;
}
100% {
opacity: 1;
max-height: 36px;
}
}

View File

@ -111,6 +111,11 @@
margin-bottom: 1rem;
}
.uploads>div {
-webkit-animation: fadeInOpacity .5s;
animation: fadeInOpacity .5s;
}
.uploads.nojs {
margin-bottom: 0;
}
@ -138,6 +143,11 @@
margin-top: 5px;
}
#albumDiv {
-webkit-animation: fadeInOpacity .5s;
animation: fadeInOpacity .5s;
}
#albumDiv .control {
text-align: inherit;
}
@ -145,6 +155,8 @@
#linksColumn {
margin-left: -0.25rem;
margin-right: -0.25rem;
-webkit-animation: fadeInOpacity .5s;
animation: fadeInOpacity .5s;
}
#linksColumn .column {
@ -159,6 +171,8 @@
#tabs {
margin-bottom: 1rem;
-webkit-animation: fadeInOpacity .5s;
animation: fadeInOpacity .5s;
}
#tabs ul {
@ -181,4 +195,6 @@
.tab-content {
margin-bottom: -.75rem;
-webkit-animation: fadeInOpacity .5s;
animation: fadeInOpacity .5s;
}

View File

@ -5,6 +5,8 @@ html {
body {
color: #eff0f1;
-webkit-animation: fadeInOpacity .5s;
animation: fadeInOpacity .5s;
}
.title {
@ -115,9 +117,28 @@ hr {
border-bottom-right-radius: 0;
right: 1%;
opacity: .25;
-webkit-transition: opacity .25s;
transition: opacity .25s;
}
.render.button:hover {
opacity: 1;
}
@-webkit-keyframes fadeInOpacity {
0% {
opacity: 0;
}
100% {
opacity: 1;
}
}
@keyframes fadeInOpacity {
0% {
opacity: 0;
}
100% {
opacity: 1;
}
}

View File

@ -68,7 +68,9 @@ const page = {
imageExtensions: ['.webp', '.jpg', '.jpeg', '.bmp', '.gif', '.png'],
// byte units for getPrettyBytes()
byteUnits: ['B', 'kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
byteUnits: ['B', 'kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'],
fadingIn: null
}
page.preparePage = function () {
@ -267,6 +269,17 @@ page.isLoading = function (element, state) {
element.classList.remove('is-loading')
}
page.fadeIn = function (content) {
if (page.fadingIn) {
clearTimeout(page.fadingIn)
page.dom.classList.remove('fade-in')
}
page.dom.classList.add('fade-in')
page.fadingIn = setTimeout(function () {
page.dom.classList.remove('fade-in')
}, 500)
}
page.getUploads = function ({ album, pageNum, all } = {}, element) {
if (element) { page.isLoading(element, true) }
if (pageNum === undefined) { pageNum = 0 }
@ -354,6 +367,7 @@ page.getUploads = function ({ album, pageNum, all } = {}, element) {
</div>
${pagination}
`
page.fadeIn()
const table = document.getElementById('table')
@ -446,6 +460,7 @@ page.getUploads = function ({ album, pageNum, all } = {}, element) {
<hr>
${pagination}
`
page.fadeIn()
const table = document.getElementById('table')
@ -823,6 +838,7 @@ page.deleteByNames = function () {
</div>
</div>
`
page.fadeIn()
}
page.deleteFileByNames = function () {
@ -1059,6 +1075,7 @@ page.getAlbums = function () {
</table>
</div>
`
page.fadeIn()
const homeDomain = response.data.homeDomain
const table = document.getElementById('table')
@ -1351,6 +1368,7 @@ page.changeFileLength = function () {
<div>
</div>
`
page.fadeIn()
document.getElementById('setFileLength').addEventListener('click', function () {
page.setFileLength(document.getElementById('fileLength').value, this)
@ -1420,6 +1438,7 @@ page.changeToken = function () {
</div>
</div>
`
page.fadeIn()
}).catch(function (error) {
console.log(error)
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
@ -1483,6 +1502,7 @@ page.changePassword = function () {
</div>
</div>
`
page.fadeIn()
document.getElementById('sendChangePassword').addEventListener('click', function () {
if (document.getElementById('password').value === document.getElementById('passwordConfirm').value) {
@ -1651,6 +1671,7 @@ page.getUsers = ({ pageNum } = {}, element) => {
<hr>
${pagination}
`
page.fadeIn()
const table = document.getElementById('table')

View File

@ -15,7 +15,7 @@
v2: Images and config files (manifest.json, browserconfig.xml, etc).
v3: CSS and JS files (libs such as bulma, lazyload, etc).
#}
{% set v1 = "Me9KhOnP5M" %}
{% set v1 = "pt1vlNTrbG" %}
{% set v2 = "Ii3JYKIhb0" %}
{% set v3 = "8xbKOM7u3w" %}

View File

@ -2928,6 +2928,11 @@ readdirp@^2.0.0:
readable-stream "^2.0.2"
set-immediate-shim "^1.0.1"
readline@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/readline/-/readline-1.3.0.tgz#c580d77ef2cfc8752b132498060dc9793a7ac01c"
integrity sha1-xYDXfvLPyHUrEySYBg3JeTp6wBw=
rechoir@^0.6.2:
version "0.6.2"
resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384"