mirror of
https://github.com/BobbyWibowo/lolisafe.git
synced 2025-01-18 09:21:32 +00:00
Updates (please update your config.js)
NOTICE: Please update your config.js. Use config.sample.js as the template. There were a couple of renames and restructures. * Album zipper API route will now internally save its state when it's generating zip files, and any subsequent requests will silently be "postponed" until the first spawned task is finished. This will guarantee that there are no multiple zipping tasks for the same album. The method may seem a bit hackish though. * All instances of console.log(error) were replaced with console.error(error). This will guarantee that any error goes to stderr instead of stdout. * Deleting file by names will now properly remove successful files from the textarea. There was a logic flaw. * Failure to generate thumbnails will no longer print the full stack, but instead only the error message. It will also then symlink a template image from /public/images/unavailable.png (it's only a simple image that says that it failed to generate thumbnail). This haven't been tested in Windows machines, but it'll probably work fine. I thought of adding a new column to files table which will store information whether the thumbnail generation is sucessful or not, but oh well, I'll go with this method for now.
This commit is contained in:
parent
d86dfc9b0e
commit
7991a63315
@ -1,5 +1,4 @@
|
||||
module.exports = {
|
||||
|
||||
/*
|
||||
If set to true the user will need to specify the auto-generated token
|
||||
on each API call, meaning random strangers won't be able to use the service
|
||||
@ -8,7 +7,9 @@ module.exports = {
|
||||
*/
|
||||
private: true,
|
||||
|
||||
// If true, users will be able to create accounts and access their uploaded files
|
||||
/*
|
||||
If true, users will be able to create accounts and access their uploaded files.
|
||||
*/
|
||||
enableUserAccounts: true,
|
||||
|
||||
/*
|
||||
@ -34,10 +35,14 @@ module.exports = {
|
||||
*/
|
||||
homeDomain: null,
|
||||
|
||||
// Port on which to run the server
|
||||
/*
|
||||
Port on which to run the server.
|
||||
*/
|
||||
port: 9999,
|
||||
|
||||
// Pages to process for the frontend
|
||||
/*
|
||||
Pages to process for the frontend.
|
||||
*/
|
||||
pages: ['home', 'auth', 'dashboard', 'faq'],
|
||||
|
||||
/*
|
||||
@ -58,34 +63,30 @@ module.exports = {
|
||||
'.sh'
|
||||
],
|
||||
|
||||
// Uploads config
|
||||
/*
|
||||
Uploads config.
|
||||
*/
|
||||
uploads: {
|
||||
|
||||
// Folder where images should be stored
|
||||
/*
|
||||
Folder where images should be stored.
|
||||
*/
|
||||
folder: 'uploads',
|
||||
|
||||
/*
|
||||
Max file size allowed. Needs to be in MB
|
||||
Max file size allowed. Needs to be in MB.
|
||||
Note: When maxSize is greater than 1 MiB, you must set the client_max_body_size to the same as maxSize.
|
||||
*/
|
||||
maxSize: '512MB',
|
||||
|
||||
/*
|
||||
Chunked uploads.
|
||||
Chunk size for chunk uploads. Needs to be in MB.
|
||||
If this is enabled, every files uploaded from the homepage uploader will forcibly be chunked
|
||||
by the size specified in "chunkSize". People will still be able to upload bigger files with
|
||||
the API as long as they don't surpass the limit specified in the "maxSize" option above.
|
||||
Total size of the whole chunks will also later be checked against the "maxSize" option.
|
||||
No-JS uploader page will not have chunked uploads support, if you want to change the maximum
|
||||
file size 'displayed' on it, you can change the value of "noJsMaxSize".
|
||||
You can also set it to null (or other falsy values) to inherit the value of "maxSize" option.
|
||||
"chunkSize", and "noJsMaxSize" if set, need to be in MB.
|
||||
NOTE: Set to falsy value (false, null, etc.) to disable.
|
||||
*/
|
||||
chunkedUploads: {
|
||||
enabled: true,
|
||||
chunkSize: '10MB',
|
||||
noJsMaxSize: null
|
||||
},
|
||||
chunkSize: '10MB',
|
||||
|
||||
/*
|
||||
The length of the randomly generated name for uploaded files.
|
||||
@ -103,7 +104,9 @@ module.exports = {
|
||||
userChangeable: false
|
||||
},
|
||||
|
||||
// The length of the randomly generated identifier for albums.
|
||||
/*
|
||||
The length of the randomly generated identifier for albums.
|
||||
*/
|
||||
albumIdentifierLength: 8,
|
||||
|
||||
/*
|
||||
@ -114,11 +117,11 @@ module.exports = {
|
||||
maxTries: 1,
|
||||
|
||||
/*
|
||||
NOTE: Thumbnails are only for the admin panel and they require you
|
||||
to install a separate binary called graphicsmagick (http://www.graphicsmagick.org)
|
||||
for images and ffmpeg (https://ffmpeg.org/) for video files
|
||||
Thumbnails are only for the admin panel and they require you to install
|
||||
a separate binary called graphicsmagick (http://www.graphicsmagick.org) for images
|
||||
and ffmpeg (https://ffmpeg.org/) for video files.
|
||||
*/
|
||||
generateThumbnails: {
|
||||
generateThumbs: {
|
||||
image: true,
|
||||
video: false
|
||||
},
|
||||
@ -127,21 +130,40 @@ module.exports = {
|
||||
Allows users to download a ZIP file of all files in an album.
|
||||
The file is generated when the user clicks the download button in the view
|
||||
and is re-used if the album has not changed between download requests.
|
||||
If "maxTotalSize" is set (needs to be in MB), generating ZIP file will be disabled
|
||||
if the total size of all the files in the album exceeds the set limit.
|
||||
If you have CloudFlare properly caching the zipping API route, it's recommended to
|
||||
set this to '512MB' as CloudFlare will not cache files bigger than that.
|
||||
*/
|
||||
generateZips: {
|
||||
enabled: true,
|
||||
maxTotalSize: null
|
||||
}
|
||||
generateZips: true
|
||||
},
|
||||
|
||||
// Folder where to store logs
|
||||
/*
|
||||
Cloudflare support.
|
||||
*/
|
||||
cloudflare: {
|
||||
/*
|
||||
No-JS uploader page will not chunk the uploads, so it's recommended to change this
|
||||
into the maximum upload size you have in Cloudflare.
|
||||
This limit will only be applied to the subtitle in the page.
|
||||
NOTE: Set to falsy value (false, null, etc.) to disable.
|
||||
*/
|
||||
noJsMaxSize: '100MB',
|
||||
|
||||
/*
|
||||
If you have a Page Rule in Cloudflare to cache everything in the album zippping
|
||||
API route (HOME_DOMAIN/api/album/zip/*), with this option you can limit the
|
||||
maximum total size of files in an album that can be zipped.
|
||||
Cloudflare will not cache files bigger than 512MB.
|
||||
NOTE: Set to falsy value (false, null, etc.) to disable.
|
||||
*/
|
||||
zipMaxTotalSize: '512MB'
|
||||
},
|
||||
|
||||
/*
|
||||
Folder where to store logs.
|
||||
*/
|
||||
logsFolder: 'logs',
|
||||
|
||||
// The following values shouldn't be touched
|
||||
/*
|
||||
The following values shouldn't be touched, unless you know what you are doing.
|
||||
*/
|
||||
database: {
|
||||
client: 'sqlite3',
|
||||
connection: { filename: './database/db' },
|
||||
|
@ -1,5 +1,6 @@
|
||||
const config = require('./../config')
|
||||
const db = require('knex')(config.database)
|
||||
const EventEmitter = require('events')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const randomstring = require('randomstring')
|
||||
@ -8,13 +9,24 @@ const Zip = require('jszip')
|
||||
|
||||
const albumsController = {}
|
||||
|
||||
// Let's default it to only 1 try (for missing config key)
|
||||
const maxTries = config.uploads.maxTries || 1
|
||||
const homeDomain = config.homeDomain || config.domain
|
||||
const uploadsDir = path.join(__dirname, '..', config.uploads.folder)
|
||||
const zipsDir = path.join(uploadsDir, 'zips')
|
||||
const maxTotalSize = config.uploads.generateZips.maxTotalSize
|
||||
const maxTotalSizeBytes = parseInt(maxTotalSize) * 1000000
|
||||
const zipMaxTotalSize = config.cloudflare.zipMaxTotalSize
|
||||
const zipMaxTotalSizeBytes = parseInt(config.cloudflare.zipMaxTotalSize) * 1000000
|
||||
|
||||
albumsController.zipEmitters = new Map()
|
||||
|
||||
class ZipEmitter extends EventEmitter {
|
||||
constructor (identifier) {
|
||||
super()
|
||||
this.identifier = identifier
|
||||
this.once('done', () => {
|
||||
albumsController.zipEmitters.delete(this.identifier)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
albumsController.list = async (req, res, next) => {
|
||||
const user = await utils.authorize(req, res)
|
||||
@ -165,7 +177,7 @@ albumsController.delete = async (req, res, next) => {
|
||||
const zipPath = path.join(zipsDir, `${identifier}.zip`)
|
||||
fs.unlink(zipPath, error => {
|
||||
if (error && error.code !== 'ENOENT') {
|
||||
console.log(error)
|
||||
console.error(error)
|
||||
return res.json({ success: false, description: error.toString(), failed })
|
||||
}
|
||||
res.json({ success: true, failed })
|
||||
@ -240,7 +252,7 @@ albumsController.edit = async (req, res, next) => {
|
||||
if (error) { return res.json({ success: true, identifier }) }
|
||||
fs.rename(zipPath, path.join(zipsDir, `${identifier}.zip`), error => {
|
||||
if (!error) { return res.json({ success: true, identifier }) }
|
||||
console.log(error)
|
||||
console.error(error)
|
||||
res.json({ success: false, description: error.toString() })
|
||||
})
|
||||
})
|
||||
@ -288,7 +300,7 @@ albumsController.get = async (req, res, next) => {
|
||||
file.file = `${config.domain}/${file.name}`
|
||||
|
||||
const ext = path.extname(file.name).toLowerCase()
|
||||
if ((config.uploads.generateThumbnails.image && utils.imageExtensions.includes(ext)) || (config.uploads.generateThumbnails.video && utils.videoExtensions.includes(ext))) {
|
||||
if ((config.uploads.generateThumbs.image && utils.imageExtensions.includes(ext)) || (config.uploads.generateThumbs.video && utils.videoExtensions.includes(ext))) {
|
||||
file.thumb = `${config.domain}/thumbs/${file.name.slice(0, -ext.length)}.png`
|
||||
}
|
||||
}
|
||||
@ -320,7 +332,7 @@ albumsController.generateZip = async (req, res, next) => {
|
||||
})
|
||||
}
|
||||
|
||||
if (!config.uploads.generateZips || !config.uploads.generateZips.enabled) {
|
||||
if (!config.uploads.generateZips) {
|
||||
return res.status(401).json({ success: false, description: 'Zip generation disabled.' })
|
||||
}
|
||||
|
||||
@ -350,21 +362,40 @@ albumsController.generateZip = async (req, res, next) => {
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Generating zip for album identifier: ${identifier}`)
|
||||
if (albumsController.zipEmitters.has(identifier)) {
|
||||
console.log(`Waiting previous zip task for album: ${identifier}.`)
|
||||
return albumsController.zipEmitters.get(identifier).once('done', (filePath, fileName, json) => {
|
||||
if (filePath && fileName) {
|
||||
download(filePath, fileName)
|
||||
} else if (json) {
|
||||
res.json(json)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
albumsController.zipEmitters.set(identifier, new ZipEmitter(identifier))
|
||||
|
||||
console.log(`Starting zip task for album: ${identifier}.`)
|
||||
const files = await db.table('files')
|
||||
.select('name', 'size')
|
||||
.where('albumid', album.id)
|
||||
if (files.length === 0) {
|
||||
return res.json({ success: false, description: 'There are no files in the album.' })
|
||||
console.log(`Finished zip task for album: ${identifier} (no files).`)
|
||||
const json = { success: false, description: 'There are no files in the album.' }
|
||||
albumsController.zipEmitters.get(identifier).emit('done', null, null, json)
|
||||
return res.json(json)
|
||||
}
|
||||
|
||||
if (maxTotalSize) {
|
||||
if (zipMaxTotalSize) {
|
||||
const totalSizeBytes = files.reduce((accumulator, file) => accumulator + parseInt(file.size), 0)
|
||||
if (totalSizeBytes > maxTotalSizeBytes) {
|
||||
return res.json({
|
||||
if (totalSizeBytes > zipMaxTotalSizeBytes) {
|
||||
console.log(`Finished zip task for album: ${identifier} (size exceeds).`)
|
||||
const json = {
|
||||
success: false,
|
||||
description: `Total size of all files in the album exceeds the configured limit (${maxTotalSize}).`
|
||||
})
|
||||
description: `Total size of all files in the album exceeds the configured limit (${zipMaxTotalSize}).`
|
||||
}
|
||||
albumsController.zipEmitters.get(identifier).emit('done', null, null, json)
|
||||
return res.json(json)
|
||||
}
|
||||
}
|
||||
|
||||
@ -375,7 +406,7 @@ albumsController.generateZip = async (req, res, next) => {
|
||||
for (const file of files) {
|
||||
fs.readFile(path.join(uploadsDir, file.name), (error, data) => {
|
||||
if (error) {
|
||||
console.log(error)
|
||||
console.error(error)
|
||||
} else {
|
||||
archive.file(file.name, data)
|
||||
}
|
||||
@ -391,13 +422,15 @@ albumsController.generateZip = async (req, res, next) => {
|
||||
})
|
||||
.pipe(fs.createWriteStream(zipPath))
|
||||
.on('finish', async () => {
|
||||
console.log(`Generated zip for album identifier: ${identifier}`)
|
||||
console.log(`Finished zip task for album: ${identifier} (success).`)
|
||||
await db.table('albums')
|
||||
.where('id', album.id)
|
||||
.update('zipGeneratedAt', Math.floor(Date.now() / 1000))
|
||||
|
||||
const filePath = path.join(zipsDir, `${identifier}.zip`)
|
||||
const fileName = `${album.name}.zip`
|
||||
|
||||
albumsController.zipEmitters.get(identifier).emit('done', filePath, fileName)
|
||||
return download(filePath, fileName)
|
||||
})
|
||||
}
|
||||
|
@ -21,7 +21,7 @@ authController.verify = async (req, res, next) => {
|
||||
|
||||
bcrypt.compare(password, user.password, (error, result) => {
|
||||
if (error) {
|
||||
console.log(error)
|
||||
console.error(error)
|
||||
return res.json({ success: false, description: 'There was an error.' })
|
||||
}
|
||||
if (result === false) { return res.json({ success: false, description: 'Wrong password.' }) }
|
||||
@ -52,7 +52,7 @@ authController.register = async (req, res, next) => {
|
||||
|
||||
bcrypt.hash(password, 10, async (error, hash) => {
|
||||
if (error) {
|
||||
console.log(error)
|
||||
console.error(error)
|
||||
return res.json({ success: false, description: 'Error generating password hash (╯°□°)╯︵ ┻━┻.' })
|
||||
}
|
||||
const token = randomstring.generate(64)
|
||||
@ -79,7 +79,7 @@ authController.changePassword = async (req, res, next) => {
|
||||
|
||||
bcrypt.hash(password, 10, async (error, hash) => {
|
||||
if (error) {
|
||||
console.log(error)
|
||||
console.error(error)
|
||||
return res.json({ success: false, description: 'Error generating password hash (╯°□°)╯︵ ┻━┻.' })
|
||||
}
|
||||
|
||||
|
@ -9,10 +9,9 @@ const utils = require('./utilsController')
|
||||
|
||||
const uploadsController = {}
|
||||
|
||||
// Let's default it to only 1 try (for missing config key)
|
||||
const maxTries = config.uploads.maxTries || 1
|
||||
const uploadsDir = path.join(__dirname, '..', config.uploads.folder)
|
||||
const chunkedUploads = config.uploads.chunkedUploads && config.uploads.chunkedUploads.enabled
|
||||
const chunkedUploads = Boolean(config.uploads.chunkSize)
|
||||
const chunksDir = path.join(uploadsDir, 'chunks')
|
||||
const maxSizeBytes = parseInt(config.uploads.maxSize) * 1000000
|
||||
|
||||
@ -28,7 +27,7 @@ const storage = multer.diskStorage({
|
||||
if (!error) { return cb(null, uuidDir) }
|
||||
fs.mkdir(uuidDir, error => {
|
||||
if (!error) { return cb(null, uuidDir) }
|
||||
console.log(error)
|
||||
console.error(error)
|
||||
// eslint-disable-next-line standard/no-callback-literal
|
||||
return cb('Could not process the chunked upload. Try again?')
|
||||
})
|
||||
@ -69,18 +68,21 @@ const upload = multer({
|
||||
}
|
||||
}
|
||||
|
||||
if (chunkedUploads) {
|
||||
// Re-map Dropzone keys so people can manually use the API without prepending 'dz'
|
||||
for (const key in req.body) {
|
||||
if (!/^dz/.test(key)) { continue }
|
||||
req.body[key.replace(/^dz/, '')] = req.body[key]
|
||||
delete req.body[key]
|
||||
}
|
||||
// Re-map Dropzone keys so people can manually use the API without prepending 'dz'
|
||||
for (const key in req.body) {
|
||||
if (!/^dz/.test(key)) { continue }
|
||||
req.body[key.replace(/^dz/, '')] = req.body[key]
|
||||
delete req.body[key]
|
||||
}
|
||||
|
||||
const totalFileSize = parseInt(req.body.totalfilesize)
|
||||
if (totalFileSize > maxSizeBytes) {
|
||||
if (req.body.chunkindex) {
|
||||
if (chunkedUploads && parseInt(req.body.totalfilesize) > maxSizeBytes) {
|
||||
// This will not be true if "totalfilesize" key does not exist, since "NaN > number" is false.
|
||||
// eslint-disable-next-line standard/no-callback-literal
|
||||
return cb('Chunk error occurred. Total file size is larger than the maximum file size.')
|
||||
} else if (!chunkedUploads) {
|
||||
// eslint-disable-next-line standard/no-callback-literal
|
||||
return cb('Chunked uploads is disabled at the moment.')
|
||||
}
|
||||
}
|
||||
|
||||
@ -139,7 +141,7 @@ uploadsController.upload = async (req, res, next) => {
|
||||
uploadsController.actuallyUpload = async (req, res, user, albumid) => {
|
||||
const erred = error => {
|
||||
const isError = error instanceof Error
|
||||
if (isError) { console.log(error) }
|
||||
if (isError) { console.error(error) }
|
||||
res.json({
|
||||
success: false,
|
||||
description: isError ? error.toString() : `Error: ${error}`
|
||||
@ -200,7 +202,7 @@ uploadsController.finishChunks = async (req, res, next) => {
|
||||
uploadsController.actuallyFinishChunks = async (req, res, user, albumid) => {
|
||||
const erred = error => {
|
||||
const isError = error instanceof Error
|
||||
if (isError) { console.log(error) }
|
||||
if (isError) { console.error(error) }
|
||||
res.json({
|
||||
success: false,
|
||||
description: isError ? error.toString() : `Error: ${error}`
|
||||
@ -299,7 +301,7 @@ uploadsController.appendToStream = (destFileStream, uuidDr, chunkNames) => {
|
||||
append(++i)
|
||||
})
|
||||
.on('error', error => {
|
||||
console.log(error)
|
||||
console.erred(error)
|
||||
destFileStream.end()
|
||||
return reject(error)
|
||||
})
|
||||
@ -398,7 +400,7 @@ uploadsController.processFilesForDisplay = async (req, res, files, existingFiles
|
||||
const albumids = []
|
||||
for (const file of files) {
|
||||
const ext = path.extname(file.name).toLowerCase()
|
||||
if ((config.uploads.generateThumbnails.image && utils.imageExtensions.includes(ext)) || (config.uploads.generateThumbnails.video && utils.videoExtensions.includes(ext))) {
|
||||
if ((config.uploads.generateThumbs.image && utils.imageExtensions.includes(ext)) || (config.uploads.generateThumbs.video && utils.videoExtensions.includes(ext))) {
|
||||
file.thumb = `${basedomain}/thumbs/${file.name.slice(0, -ext.length)}.png`
|
||||
utils.generateThumbs(file)
|
||||
}
|
||||
@ -415,7 +417,7 @@ uploadsController.processFilesForDisplay = async (req, res, files, existingFiles
|
||||
.update('editedAt', editedAt)
|
||||
.then(() => {})
|
||||
.catch(error => {
|
||||
console.log(error)
|
||||
console.erred(error)
|
||||
albumSuccess = false
|
||||
})
|
||||
}))
|
||||
@ -560,8 +562,8 @@ uploadsController.list = async (req, res) => {
|
||||
const isImageExt = utils.imageExtensions.includes(file.extname)
|
||||
|
||||
if ((!isVideoExt && !isImageExt) ||
|
||||
(isVideoExt && config.uploads.generateThumbnails.video !== true) ||
|
||||
(isImageExt && config.uploads.generateThumbnails.image !== true)) {
|
||||
(isVideoExt && config.uploads.generateThumbs.video !== true) ||
|
||||
(isImageExt && config.uploads.generateThumbs.image !== true)) {
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -15,8 +15,8 @@ utilsController.imageExtensions = ['.webp', '.jpg', '.jpeg', '.bmp', '.gif', '.p
|
||||
utilsController.videoExtensions = ['.webm', '.mp4', '.wmv', '.avi', '.mov', '.mkv']
|
||||
|
||||
utilsController.mayGenerateThumb = extname => {
|
||||
return (config.uploads.generateThumbnails.image && utilsController.imageExtensions.includes(extname)) ||
|
||||
(config.uploads.generateThumbnails.video && utilsController.videoExtensions.includes(extname))
|
||||
return (config.uploads.generateThumbs.image && utilsController.imageExtensions.includes(extname)) ||
|
||||
(config.uploads.generateThumbs.video && utilsController.videoExtensions.includes(extname))
|
||||
}
|
||||
|
||||
utilsController.getPrettyDate = date => {
|
||||
@ -78,7 +78,13 @@ utilsController.generateThumbs = (file, basedomain) => {
|
||||
.extent(size.width, size.height)
|
||||
.background('transparent')
|
||||
.write(thumbname, error => {
|
||||
if (error) { console.log('Error - ', error) }
|
||||
if (error) {
|
||||
console.error(`${file.name}: ${error.message.trim()}`)
|
||||
const placeholder = path.join(__dirname, '../public/images/unavailable.png')
|
||||
fs.symlink(placeholder, thumbname, error => {
|
||||
if (error) { console.error(error) }
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@ -90,7 +96,7 @@ utilsController.generateThumbs = (file, basedomain) => {
|
||||
folder: path.join(__dirname, '..', config.uploads.folder, 'thumbs'),
|
||||
size: '200x?'
|
||||
})
|
||||
.on('error', error => console.log('Error - ', error.message))
|
||||
.on('error', error => console.log(`${file.name}: ${error.message}`))
|
||||
})
|
||||
}
|
||||
|
||||
@ -142,7 +148,7 @@ utilsController.bulkDeleteFiles = async (field, values, user) => {
|
||||
return new Promise(async resolve => {
|
||||
const deleteFile = await utilsController.deleteFile(file.name)
|
||||
.catch(error => {
|
||||
console.log(error)
|
||||
console.error(error)
|
||||
failed.push(file[field])
|
||||
})
|
||||
if (!deleteFile) { return resolve() }
|
||||
|
@ -73,9 +73,9 @@ safe.use((error, req, res, next) => {
|
||||
safe.listen(config.port, () => console.log(`lolisafe started on port ${config.port}`))
|
||||
|
||||
process.on('uncaughtException', error => {
|
||||
console.error(`Uncaught Exception:\n${error.stack}`)
|
||||
console.error(`Uncaught Exception:\n${error}`)
|
||||
})
|
||||
|
||||
process.on('unhandledRejection', error => {
|
||||
console.error(`Unhandled Rejection (Promise):\n${error.stack}`)
|
||||
console.error(`Unhandled Rejection (Promise):\n${error}`)
|
||||
})
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 63 KiB |
BIN
public/icons/192pxr.png
Normal file
BIN
public/icons/192pxr.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 64 KiB |
Binary file not shown.
Before Width: | Height: | Size: 225 KiB |
BIN
public/icons/384pxr.png
Normal file
BIN
public/icons/384pxr.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 220 KiB |
@ -10,12 +10,12 @@
|
||||
"start_url": "/",
|
||||
"icons": [
|
||||
{
|
||||
"src": "/icons/192px.png",
|
||||
"src": "/icons/192pxr.png",
|
||||
"sizes": "192x192",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "/icons/384px.png",
|
||||
"src": "/icons/384pxr.png",
|
||||
"sizes": "384x384",
|
||||
"type": "image/png"
|
||||
},
|
||||
|
BIN
public/images/unavailable.png
Normal file
BIN
public/images/unavailable.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 12 KiB |
@ -22,7 +22,7 @@ page.do = async dest => {
|
||||
password: pass
|
||||
})
|
||||
.catch(error => {
|
||||
console.log(error)
|
||||
console.error(error)
|
||||
return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
|
||||
})
|
||||
if (!response) { return }
|
||||
@ -42,7 +42,7 @@ page.verify = async () => {
|
||||
token: page.token
|
||||
})
|
||||
.catch(error => {
|
||||
console.log(error)
|
||||
console.error(error)
|
||||
swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error')
|
||||
})
|
||||
if (!response) { return }
|
||||
|
@ -619,9 +619,9 @@ page.deleteFileByNames = async () => {
|
||||
let deleted = count
|
||||
if (bulkdelete.data.failed && bulkdelete.data.failed.length) {
|
||||
deleted -= bulkdelete.data.failed.length
|
||||
document.getElementById('names').value = bulkdelete.data.failed.join('\n')
|
||||
}
|
||||
|
||||
document.getElementById('names').value = bulkdelete.data.failed.join('\n')
|
||||
swal('Deleted!', `${deleted} file${deleted === 1 ? ' has' : 's have'} been deleted.`, 'success')
|
||||
}
|
||||
|
||||
|
@ -8,7 +8,7 @@ const page = {
|
||||
private: null,
|
||||
enableUserAccounts: null,
|
||||
maxFileSize: null,
|
||||
chunkedUploads: null, // chunked uploads config
|
||||
chunkSize: null,
|
||||
|
||||
// store album id that will be used with upload requests
|
||||
album: null,
|
||||
@ -36,7 +36,7 @@ page.checkIfPublic = async () => {
|
||||
page.private = response.data.private
|
||||
page.enableUserAccounts = response.data.enableUserAccounts
|
||||
page.maxFileSize = response.data.maxFileSize
|
||||
page.chunkedUploads = response.data.chunkedUploads
|
||||
page.chunkSize = response.data.chunkSize
|
||||
page.preparePage()
|
||||
}
|
||||
|
||||
@ -178,8 +178,8 @@ page.prepareDropzone = () => {
|
||||
maxFiles: 1000,
|
||||
autoProcessQueue: true,
|
||||
headers: { token: page.token },
|
||||
chunking: page.chunkedUploads.enabled,
|
||||
chunkSize: parseInt(page.chunkedUploads.chunkSize) * 1000000, // 1000000 B = 1 MB,
|
||||
chunking: Boolean(page.chunkSize),
|
||||
chunkSize: parseInt(page.chunkSize) * 1000000, // 1000000 B = 1 MB,
|
||||
parallelChunkUploads: false, // when set to true, sometimes it often hangs with hundreds of parallel uploads
|
||||
chunksUploaded: async (file, done) => {
|
||||
file.previewElement.querySelector('.progress').setAttribute('value', 100)
|
||||
|
@ -61,7 +61,7 @@ routes.get('/a/:identifier', async (req, res, next) => {
|
||||
thumb,
|
||||
files,
|
||||
identifier,
|
||||
generateZips: config.uploads.generateZips && config.uploads.generateZips.enabled,
|
||||
generateZips: config.uploads.generateZips,
|
||||
downloadLink: album.download === 0 ? null : `../api/album/zip/${album.identifier}?v=${album.editedAt}`,
|
||||
editedAt: album.editedAt,
|
||||
url: `${homeDomain}/a/${album.identifier}`
|
||||
|
@ -10,7 +10,7 @@ routes.get('/check', (req, res, next) => {
|
||||
private: config.private,
|
||||
enableUserAccounts: config.enableUserAccounts,
|
||||
maxFileSize: config.uploads.maxSize,
|
||||
chunkedUploads: config.uploads.chunkedUploads
|
||||
chunkSize: config.uploads.chunkSize
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -4,7 +4,7 @@ const uploadController = require('./../controllers/uploadController')
|
||||
|
||||
const renderOptions = {
|
||||
uploadDisabled: false,
|
||||
maxFileSize: config.uploads.chunkedUploads.noJsMaxSize || config.uploads.maxSize
|
||||
maxFileSize: config.cloudflare.noJsMaxSize || config.uploads.maxSize
|
||||
}
|
||||
|
||||
if (config.private) {
|
||||
|
@ -12,8 +12,8 @@
|
||||
v1: CSS and JS files.
|
||||
v2: Images and config files (manifest.json, browserconfig.xml, etcetera).
|
||||
#}
|
||||
{% set v1 = "eB7LXsb2eH" %}
|
||||
{% set v2 = "MSEpgpfFIQ" %}
|
||||
{% set v1 = "Ii3JYKIhb0" %}
|
||||
{% set v2 = "Ii3JYKIhb0" %}
|
||||
|
||||
{#
|
||||
These will be the links in the homepage and the No-JS uploader.
|
||||
|
Loading…
Reference in New Issue
Block a user