mirror of
https://github.com/BobbyWibowo/lolisafe.git
synced 2024-12-14 08:26:22 +00:00
perf: improve uploads flow
lessen temporary objects/variables creation, and refactor some variable names to be more obvious
This commit is contained in:
parent
ac63f8b76d
commit
29b16edc04
@ -54,8 +54,8 @@ class ChunksData {
|
|||||||
this.root = root
|
this.root = root
|
||||||
this.filename = 'tmp'
|
this.filename = 'tmp'
|
||||||
this.chunks = 0
|
this.chunks = 0
|
||||||
this.stream = null
|
this.writeStream = null
|
||||||
this.hasher = null
|
this.hashStream = null
|
||||||
}
|
}
|
||||||
|
|
||||||
onTimeout () {
|
onTimeout () {
|
||||||
@ -219,7 +219,8 @@ self.upload = async (req, res) => {
|
|||||||
req.body = {}
|
req.body = {}
|
||||||
|
|
||||||
// Initially try to parse as multipart
|
// Initially try to parse as multipart
|
||||||
const multipartErrors = []
|
const multipartFieldErrors = []
|
||||||
|
let hasMultipartField = false
|
||||||
await req.multipart({
|
await req.multipart({
|
||||||
// https://github.com/mscdex/busboy/tree/v1.6.0#exports
|
// https://github.com/mscdex/busboy/tree/v1.6.0#exports
|
||||||
limits: {
|
limits: {
|
||||||
@ -235,8 +236,11 @@ self.upload = async (req, res) => {
|
|||||||
files: maxFilesPerUpload
|
files: maxFilesPerUpload
|
||||||
}
|
}
|
||||||
}, async field => {
|
}, async field => {
|
||||||
// Wrap this to capture errors within this callback
|
// Wrap this to capture errors within this handler function,
|
||||||
|
// since it will be spawned in a separate context
|
||||||
try {
|
try {
|
||||||
|
hasMultipartField = true
|
||||||
|
|
||||||
// Keep non-files fields in Request.body
|
// Keep non-files fields in Request.body
|
||||||
// Since fields get processed in sequence depending on the order at which they were defined,
|
// Since fields get processed in sequence depending on the order at which they were defined,
|
||||||
// chunked uploads data must be set before the files[] field which contain the actual file
|
// chunked uploads data must be set before the files[] field which contain the actual file
|
||||||
@ -251,6 +255,7 @@ self.upload = async (req, res) => {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NOTE: This will probably never happen, but just in case
|
||||||
if (!field.file) {
|
if (!field.file) {
|
||||||
throw new ClientError(`Unexpected field: "${field.name}"`)
|
throw new ClientError(`Unexpected field: "${field.name}"`)
|
||||||
}
|
}
|
||||||
@ -260,120 +265,120 @@ self.upload = async (req, res) => {
|
|||||||
req.files = []
|
req.files = []
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Push immediately as we will only be adding props into the file object down the line
|
||||||
|
const file = {
|
||||||
|
albumid,
|
||||||
|
age,
|
||||||
|
mimetype: field.mime_type,
|
||||||
|
isChunk: req.body.uuid !== undefined &&
|
||||||
|
req.body.chunkindex !== undefined
|
||||||
|
}
|
||||||
|
req.files.push(file)
|
||||||
|
|
||||||
|
if (file.isChunk) {
|
||||||
|
if (!chunkedUploads) {
|
||||||
|
throw new ClientError('Chunked uploads are disabled at the moment.')
|
||||||
|
} else if (req.files.length > 1) {
|
||||||
|
throw new ClientError('Chunked uploads may only be uploaded 1 chunk at a time.')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// NOTE: Since busboy@1, filenames no longer get automatically parsed as UTF-8, so we force it here
|
// NOTE: Since busboy@1, filenames no longer get automatically parsed as UTF-8, so we force it here
|
||||||
const originalname = field.file.name &&
|
file.originalname = field.file.name &&
|
||||||
Buffer.from(field.file.name, 'latin1').toString('utf8')
|
Buffer.from(field.file.name, 'latin1').toString('utf8')
|
||||||
|
|
||||||
const extname = utils.extname(originalname)
|
file.extname = utils.extname(file.originalname)
|
||||||
if (self.isExtensionFiltered(extname)) {
|
if (self.isExtensionFiltered(file.extname)) {
|
||||||
throw new ClientError(`${extname ? `${extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`)
|
throw new ClientError(`${file.extname ? `${file.extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (req.body.chunkindex !== undefined && !chunkedUploads) {
|
|
||||||
throw new ClientError('Chunked uploads are disabled at the moment.')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Is it a chunk file?
|
|
||||||
const isChunk = chunkedUploads &&
|
|
||||||
req.body.uuid !== undefined &&
|
|
||||||
req.body.chunkindex !== undefined
|
|
||||||
|
|
||||||
let chunksData
|
|
||||||
let destination
|
let destination
|
||||||
let filename
|
if (file.isChunk) {
|
||||||
if (isChunk) {
|
// Calling this will also reset this chunked uploads' timeout
|
||||||
// Calling this will also reset its timeout
|
file.chunksData = await initChunks(req.body.uuid)
|
||||||
chunksData = await initChunks(req.body.uuid)
|
file.filename = file.chunksData.filename
|
||||||
destination = chunksData.root
|
destination = file.chunksData.root
|
||||||
filename = chunksData.filename
|
|
||||||
} else {
|
} else {
|
||||||
const length = self.parseFileIdentifierLength(req.headers.filelength)
|
const length = self.parseFileIdentifierLength(req.headers.filelength)
|
||||||
|
file.filename = await self.getUniqueRandomName(length, file.extname)
|
||||||
destination = paths.uploads
|
destination = paths.uploads
|
||||||
filename = await self.getUniqueRandomName(length, extname)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write the file into disk, and return an object containing the required file information
|
file.path = path.join(destination, file.filename)
|
||||||
const file = await new Promise((resolve, reject) => {
|
|
||||||
const finalPath = path.join(destination, filename)
|
|
||||||
|
|
||||||
|
// Write the file into disk, and supply required props into file object
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
// "weighted" resolve function, to be able to "await" multiple callbacks
|
// "weighted" resolve function, to be able to "await" multiple callbacks
|
||||||
const REQUIRED_WEIGHT = 2
|
const REQUIRED_WEIGHT = 2
|
||||||
let _result = {
|
|
||||||
destination,
|
|
||||||
extname,
|
|
||||||
filename,
|
|
||||||
mimetype: field.mime_type,
|
|
||||||
originalname,
|
|
||||||
path: finalPath
|
|
||||||
}
|
|
||||||
let _weight = 0
|
let _weight = 0
|
||||||
const _resolve = (result = {}, weight = 2) => {
|
const _resolve = (props = {}, weight = 2) => {
|
||||||
|
Object.assign(file, props)
|
||||||
_weight += weight
|
_weight += weight
|
||||||
_result = Object.assign(result, _result)
|
if (_weight >= REQUIRED_WEIGHT) resolve()
|
||||||
if (_weight >= REQUIRED_WEIGHT) {
|
|
||||||
resolve(_result)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let outStream
|
const readStream = field.file.stream
|
||||||
let hash
|
let writeStream
|
||||||
|
let hashStream
|
||||||
let scanStream
|
let scanStream
|
||||||
const onerror = error => {
|
const onerror = error => {
|
||||||
hash.dispose()
|
hashStream.dispose()
|
||||||
reject(error)
|
reject(error)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isChunk) {
|
if (file.isChunk) {
|
||||||
if (!chunksData.stream) {
|
if (!file.chunksData.writeStream) {
|
||||||
chunksData.stream = fs.createWriteStream(finalPath, { flags: 'a' })
|
file.chunksData.writeStream = fs.createWriteStream(file.path, { flags: 'a' })
|
||||||
chunksData.stream.on('error', onerror)
|
file.chunksData.writeStream.on('error', onerror)
|
||||||
}
|
}
|
||||||
if (!chunksData.hasher) {
|
if (!file.chunksData.hashStream) {
|
||||||
chunksData.hasher = blake3.createHash()
|
file.chunksData.hashStream = blake3.createHash()
|
||||||
}
|
}
|
||||||
|
|
||||||
outStream = chunksData.stream
|
writeStream = file.chunksData.writeStream
|
||||||
hash = chunksData.hasher
|
hashStream = file.chunksData.hashStream
|
||||||
} else {
|
} else {
|
||||||
outStream = fs.createWriteStream(finalPath)
|
writeStream = fs.createWriteStream(file.path)
|
||||||
outStream.on('error', onerror)
|
writeStream.on('error', onerror)
|
||||||
hash = blake3.createHash()
|
hashStream = blake3.createHash()
|
||||||
|
|
||||||
if (utils.scan.passthrough &&
|
if (utils.scan.passthrough &&
|
||||||
!self.scanHelpers.assertUserBypass(req._user, filename) &&
|
!self.scanHelpers.assertUserBypass(req._user, file.filename) &&
|
||||||
!self.scanHelpers.assertFileBypass({ filename })) {
|
!self.scanHelpers.assertFileBypass({ filename: file.filename })) {
|
||||||
scanStream = utils.scan.instance.passthrough()
|
scanStream = utils.scan.instance.passthrough()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
field.file.stream.on('error', onerror)
|
readStream.on('error', onerror)
|
||||||
field.file.stream.on('data', d => hash.update(d))
|
readStream.on('data', d => hashStream.update(d))
|
||||||
|
|
||||||
if (isChunk) {
|
if (file.isChunk) {
|
||||||
field.file.stream.on('end', () => _resolve())
|
readStream.on('end', () => _resolve())
|
||||||
field.file.stream.pipe(outStream, { end: false })
|
readStream.pipe(writeStream, { end: false })
|
||||||
} else {
|
} else {
|
||||||
outStream.on('finish', () => _resolve({
|
// Callback's weight is 1 when passthrough scanning is enabled,
|
||||||
size: outStream.bytesWritten,
|
// so that the Promise will be resolved only after
|
||||||
hash: hash.digest('hex')
|
// both writeStream and scanStream finish
|
||||||
}, scanStream ? 1 : 2)
|
writeStream.on('finish', () => _resolve({
|
||||||
)
|
size: writeStream.bytesWritten,
|
||||||
|
hash: hashStream.digest('hex')
|
||||||
|
}, scanStream ? 1 : 2))
|
||||||
|
|
||||||
if (scanStream) {
|
if (scanStream) {
|
||||||
logger.debug(`[ClamAV]: ${filename}: Passthrough scanning\u2026`)
|
logger.debug(`[ClamAV]: ${file.filename}: Passthrough scanning\u2026`)
|
||||||
scanStream.on('error', onerror)
|
scanStream.on('error', onerror)
|
||||||
scanStream.on('scan-complete', scan => _resolve({ scan }, 1))
|
scanStream.on('scan-complete', scan => _resolve({ scan }, 1))
|
||||||
field.file.stream.pipe(scanStream).pipe(outStream)
|
readStream
|
||||||
|
.pipe(scanStream)
|
||||||
|
.pipe(writeStream)
|
||||||
} else {
|
} else {
|
||||||
field.file.stream.pipe(outStream)
|
readStream
|
||||||
|
.pipe(writeStream)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
// Push file to Request.files array
|
|
||||||
req.files.push(file)
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
multipartErrors.push(error)
|
multipartFieldErrors.push(error)
|
||||||
}
|
}
|
||||||
}).catch(error => {
|
}).catch(error => {
|
||||||
// res.multipart() itself may throw string errors
|
// res.multipart() itself may throw string errors
|
||||||
@ -384,29 +389,29 @@ self.upload = async (req, res) => {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
if (multipartErrors.length) {
|
if (multipartFieldErrors.length) {
|
||||||
for (let i = 1; i < multipartErrors.length; i++) {
|
for (let i = 1; i < multipartFieldErrors.length; i++) {
|
||||||
if (multipartErrors[i] instanceof ClientError ||
|
if (multipartFieldErrors[i] instanceof ClientError ||
|
||||||
multipartErrors[i] instanceof ServerError) {
|
multipartFieldErrors[i] instanceof ServerError) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// Log additional errors to console if they are not generic ClientError or ServeError
|
// Log additional errors to console if they are not generic ClientError or ServeError
|
||||||
logger.error(multipartErrors[i])
|
logger.error(multipartFieldErrors[i])
|
||||||
}
|
}
|
||||||
// Re-throw the first multipart error into global error handler
|
// Re-throw the first multipart error into global error handler
|
||||||
throw multipartErrors[0]
|
throw multipartFieldErrors[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
if (Array.isArray(req.files)) {
|
if (hasMultipartField) {
|
||||||
return self.actuallyUpload(req, res, user, albumid, age)
|
return self.actuallyUpload(req, res, user)
|
||||||
} else {
|
} else {
|
||||||
// Parse POST body
|
// Parse POST body
|
||||||
req.body = await req.json()
|
req.body = await req.json()
|
||||||
return self.actuallyUploadUrls(req, res, user, albumid, age)
|
return self.actuallyUploadUrls(req, res, user, { albumid, age })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.actuallyUpload = async (req, res, user, albumid, age) => {
|
self.actuallyUpload = async (req, res, user, data = {}) => {
|
||||||
if (!req.files || !req.files.length) {
|
if (!req.files || !req.files.length) {
|
||||||
throw new ClientError('No files.')
|
throw new ClientError('No files.')
|
||||||
}
|
}
|
||||||
@ -420,20 +425,12 @@ self.actuallyUpload = async (req, res, user, albumid, age) => {
|
|||||||
return res.json({ success: true })
|
return res.json({ success: true })
|
||||||
}
|
}
|
||||||
|
|
||||||
const infoMap = req.files.map(file => {
|
const filesData = req.files
|
||||||
file.albumid = albumid
|
if (config.filterEmptyFile && filesData.some(file => file.size === 0)) {
|
||||||
file.age = age
|
|
||||||
return {
|
|
||||||
path: path.join(paths.uploads, file.filename),
|
|
||||||
data: file
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if (config.filterEmptyFile && infoMap.some(file => file.data.size === 0)) {
|
|
||||||
// Unlink all files when at least one file is an empty file
|
// Unlink all files when at least one file is an empty file
|
||||||
// Should continue even when encountering errors
|
// Should continue even when encountering errors
|
||||||
await Promise.all(infoMap.map(info =>
|
await Promise.all(filesData.map(file =>
|
||||||
utils.unlinkFile(info.data.filename).catch(logger.error)
|
utils.unlinkFile(file.filename).catch(logger.error)
|
||||||
))
|
))
|
||||||
|
|
||||||
throw new ClientError('Empty files are not allowed.')
|
throw new ClientError('Empty files are not allowed.')
|
||||||
@ -442,24 +439,24 @@ self.actuallyUpload = async (req, res, user, albumid, age) => {
|
|||||||
if (utils.scan.instance) {
|
if (utils.scan.instance) {
|
||||||
let scanResult
|
let scanResult
|
||||||
if (utils.scan.passthrough) {
|
if (utils.scan.passthrough) {
|
||||||
scanResult = await self.assertPassthroughScans(req, user, infoMap)
|
scanResult = await self.assertPassthroughScans(req, user, filesData)
|
||||||
} else {
|
} else {
|
||||||
scanResult = await self.scanFiles(req, user, infoMap)
|
scanResult = await self.scanFiles(req, user, filesData)
|
||||||
}
|
}
|
||||||
if (scanResult) {
|
if (scanResult) {
|
||||||
throw new ClientError(scanResult)
|
throw new ClientError(scanResult)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await self.stripTags(req, infoMap)
|
await self.stripTags(req, filesData)
|
||||||
|
|
||||||
const result = await self.storeFilesToDb(req, res, user, infoMap)
|
const result = await self.storeFilesToDb(req, res, user, filesData)
|
||||||
return self.sendUploadResponse(req, res, user, result)
|
return self.sendUploadResponse(req, res, user, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
/** URL uploads */
|
/** URL uploads */
|
||||||
|
|
||||||
self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
|
self.actuallyUploadUrls = async (req, res, user, data = {}) => {
|
||||||
if (!config.uploads.urlMaxSize) {
|
if (!config.uploads.urlMaxSize) {
|
||||||
throw new ClientError('Upload by URLs is disabled at the moment.', { statusCode: 403 })
|
throw new ClientError('Upload by URLs is disabled at the moment.', { statusCode: 403 })
|
||||||
}
|
}
|
||||||
@ -486,23 +483,31 @@ self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const downloaded = []
|
const downloaded = []
|
||||||
const infoMap = []
|
const filesData = []
|
||||||
await Promise.all(urls.map(async url => {
|
await Promise.all(urls.map(async url => {
|
||||||
const original = path.basename(url).split(/[?#]/)[0]
|
// Push immediately as we will only be adding props into the file object down the line
|
||||||
const extname = utils.extname(original)
|
const file = {
|
||||||
|
url,
|
||||||
|
albumid: data.albumid,
|
||||||
|
age: data.age
|
||||||
|
}
|
||||||
|
filesData.push(file)
|
||||||
|
|
||||||
|
file.originalname = path.basename(url).split(/[?#]/)[0]
|
||||||
|
file.extname = utils.extname(file.originalname)
|
||||||
|
|
||||||
// Extensions filter
|
// Extensions filter
|
||||||
let filtered = false
|
let filtered = false
|
||||||
if (urlExtensionsFilter && ['blacklist', 'whitelist'].includes(config.uploads.urlExtensionsFilterMode)) {
|
if (urlExtensionsFilter && ['blacklist', 'whitelist'].includes(config.uploads.urlExtensionsFilterMode)) {
|
||||||
const match = config.uploads.urlExtensionsFilter.includes(extname.toLowerCase())
|
const match = config.uploads.urlExtensionsFilter.includes(file.extname.toLowerCase())
|
||||||
const whitelist = config.uploads.urlExtensionsFilterMode === 'whitelist'
|
const whitelist = config.uploads.urlExtensionsFilterMode === 'whitelist'
|
||||||
filtered = ((!whitelist && match) || (whitelist && !match))
|
filtered = ((!whitelist && match) || (whitelist && !match))
|
||||||
} else {
|
} else {
|
||||||
filtered = self.isExtensionFiltered(extname)
|
filtered = self.isExtensionFiltered(file.extname)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (filtered) {
|
if (filtered) {
|
||||||
throw new ClientError(`${extname ? `${extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`)
|
throw new ClientError(`${file.extname ? `${file.extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (config.uploads.urlProxy) {
|
if (config.uploads.urlProxy) {
|
||||||
@ -529,29 +534,28 @@ self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const length = self.parseFileIdentifierLength(req.headers.filelength)
|
const length = self.parseFileIdentifierLength(req.headers.filelength)
|
||||||
const name = await self.getUniqueRandomName(length, extname)
|
file.filename = await self.getUniqueRandomName(length, file.extname)
|
||||||
|
file.path = path.join(paths.uploads, file.filename)
|
||||||
|
|
||||||
const destination = path.join(paths.uploads, name)
|
let writeStream
|
||||||
|
let hashStream
|
||||||
let outStream
|
|
||||||
let hasher
|
|
||||||
return Promise.resolve().then(async () => {
|
return Promise.resolve().then(async () => {
|
||||||
outStream = fs.createWriteStream(destination)
|
writeStream = fs.createWriteStream(file.path)
|
||||||
hasher = blake3.createHash()
|
hashStream = blake3.createHash()
|
||||||
|
|
||||||
// Push to array early, so regardless of its progress it will be deleted on errors
|
// Push to array early, so regardless of its progress it will be deleted on errors
|
||||||
downloaded.push(destination)
|
downloaded.push(file.path)
|
||||||
|
|
||||||
// Limit max response body size with maximum allowed size
|
// Limit max response body size with maximum allowed size
|
||||||
const fetchFile = await fetch(url, { method: 'GET', size: urlMaxSizeBytes })
|
const fetchFile = await fetch(url, { method: 'GET', size: urlMaxSizeBytes })
|
||||||
.then(res => new Promise((resolve, reject) => {
|
.then(res => new Promise((resolve, reject) => {
|
||||||
if (res.status === 200) {
|
if (res.status === 200) {
|
||||||
outStream.on('error', reject)
|
writeStream.on('error', reject)
|
||||||
res.body.on('error', reject)
|
res.body.on('error', reject)
|
||||||
res.body.on('data', d => hasher.update(d))
|
res.body.on('data', d => hashStream.update(d))
|
||||||
|
|
||||||
res.body.pipe(outStream)
|
res.body.pipe(writeStream)
|
||||||
outStream.on('finish', () => resolve(res))
|
writeStream.on('finish', () => resolve(res))
|
||||||
} else {
|
} else {
|
||||||
resolve(res)
|
resolve(res)
|
||||||
}
|
}
|
||||||
@ -561,31 +565,22 @@ self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
|
|||||||
throw new ServerError(`${fetchFile.status} ${fetchFile.statusText}`)
|
throw new ServerError(`${fetchFile.status} ${fetchFile.statusText}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
const contentType = fetchFile.headers.get('content-type')
|
|
||||||
// Re-test size via actual bytes written to physical file
|
// Re-test size via actual bytes written to physical file
|
||||||
assertSize(outStream.bytesWritten)
|
assertSize(writeStream.bytesWritten)
|
||||||
|
|
||||||
infoMap.push({
|
// Finalize file props
|
||||||
path: destination,
|
const contentType = fetchFile.headers.get('content-type')
|
||||||
data: {
|
file.mimetype = contentType ? contentType.split(';')[0] : 'application/octet-stream'
|
||||||
filename: name,
|
file.size = writeStream.bytesWritten
|
||||||
originalname: original,
|
file.hash = hashStream.digest('hex')
|
||||||
extname,
|
|
||||||
mimetype: contentType ? contentType.split(';')[0] : '',
|
|
||||||
size: outStream.bytesWritten,
|
|
||||||
hash: hasher.digest('hex'),
|
|
||||||
albumid,
|
|
||||||
age
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
// Dispose of unfinished write & hasher streams
|
// Dispose of unfinished write & hasher streams
|
||||||
if (outStream && !outStream.destroyed) {
|
if (writeStream && !writeStream.destroyed) {
|
||||||
outStream.destroy()
|
writeStream.destroy()
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
if (hasher) {
|
if (hashStream) {
|
||||||
hasher.dispose()
|
hashStream.dispose()
|
||||||
}
|
}
|
||||||
} catch (_) {}
|
} catch (_) {}
|
||||||
|
|
||||||
@ -614,11 +609,11 @@ self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
if (utils.scan.instance) {
|
if (utils.scan.instance) {
|
||||||
const scanResult = await self.scanFiles(req, user, infoMap)
|
const scanResult = await self.scanFiles(req, user, filesData)
|
||||||
if (scanResult) throw new ClientError(scanResult)
|
if (scanResult) throw new ClientError(scanResult)
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await self.storeFilesToDb(req, res, user, infoMap)
|
const result = await self.storeFilesToDb(req, res, user, filesData)
|
||||||
return self.sendUploadResponse(req, res, user, result)
|
return self.sendUploadResponse(req, res, user, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -659,7 +654,7 @@ self.finishChunks = async (req, res) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.actuallyFinishChunks = async (req, res, user, files) => {
|
self.actuallyFinishChunks = async (req, res, user, files) => {
|
||||||
const infoMap = []
|
const filesData = []
|
||||||
await Promise.all(files.map(async file => {
|
await Promise.all(files.map(async file => {
|
||||||
if (!file.uuid || typeof chunksData[file.uuid] === 'undefined') {
|
if (!file.uuid || typeof chunksData[file.uuid] === 'undefined') {
|
||||||
throw new ClientError('Invalid file UUID, or chunks data had already timed out. Try again?')
|
throw new ClientError('Invalid file UUID, or chunks data had already timed out. Try again?')
|
||||||
@ -670,8 +665,8 @@ self.actuallyFinishChunks = async (req, res, user, files) => {
|
|||||||
chunksData[file.uuid].clearTimeout()
|
chunksData[file.uuid].clearTimeout()
|
||||||
|
|
||||||
// Conclude write and hasher streams
|
// Conclude write and hasher streams
|
||||||
chunksData[file.uuid].stream.end()
|
chunksData[file.uuid].writeStream.end()
|
||||||
const hash = chunksData[file.uuid].hasher.digest('hex')
|
const hash = chunksData[file.uuid].hashStream.digest('hex')
|
||||||
|
|
||||||
if (chunksData[file.uuid].chunks < 2 || chunksData[file.uuid].chunks > maxChunksCount) {
|
if (chunksData[file.uuid].chunks < 2 || chunksData[file.uuid].chunks > maxChunksCount) {
|
||||||
throw new ClientError('Invalid chunks count.')
|
throw new ClientError('Invalid chunks count.')
|
||||||
@ -684,7 +679,7 @@ self.actuallyFinishChunks = async (req, res, user, files) => {
|
|||||||
|
|
||||||
file.age = self.assertRetentionPeriod(user, file.age)
|
file.age = self.assertRetentionPeriod(user, file.age)
|
||||||
|
|
||||||
file.size = chunksData[file.uuid].stream.bytesWritten
|
file.size = chunksData[file.uuid].writeStream.bytesWritten
|
||||||
if (config.filterEmptyFile && file.size === 0) {
|
if (config.filterEmptyFile && file.size === 0) {
|
||||||
throw new ClientError('Empty files are not allowed.')
|
throw new ClientError('Empty files are not allowed.')
|
||||||
} else if (file.size > maxSizeBytes) {
|
} else if (file.size > maxSizeBytes) {
|
||||||
@ -717,39 +712,38 @@ self.actuallyFinishChunks = async (req, res, user, files) => {
|
|||||||
let albumid = parseInt(file.albumid)
|
let albumid = parseInt(file.albumid)
|
||||||
if (isNaN(albumid)) albumid = null
|
if (isNaN(albumid)) albumid = null
|
||||||
|
|
||||||
const data = {
|
filesData.push({
|
||||||
filename: name,
|
filename: name,
|
||||||
originalname: file.original || '',
|
originalname: file.original || '',
|
||||||
extname: file.extname,
|
extname: file.extname,
|
||||||
mimetype: file.type || '',
|
mimetype: file.type || '',
|
||||||
|
path: destination,
|
||||||
size: file.size,
|
size: file.size,
|
||||||
hash,
|
hash,
|
||||||
albumid,
|
albumid,
|
||||||
age: file.age
|
age: file.age
|
||||||
}
|
})
|
||||||
|
|
||||||
infoMap.push({ path: destination, data })
|
|
||||||
}))
|
}))
|
||||||
|
|
||||||
if (utils.scan.instance) {
|
if (utils.scan.instance) {
|
||||||
const scanResult = await self.scanFiles(req, user, infoMap)
|
const scanResult = await self.scanFiles(req, user, filesData)
|
||||||
if (scanResult) throw new ClientError(scanResult)
|
if (scanResult) throw new ClientError(scanResult)
|
||||||
}
|
}
|
||||||
|
|
||||||
await self.stripTags(req, infoMap)
|
await self.stripTags(req, filesData)
|
||||||
|
|
||||||
const result = await self.storeFilesToDb(req, res, user, infoMap)
|
const result = await self.storeFilesToDb(req, res, user, filesData)
|
||||||
return self.sendUploadResponse(req, res, user, result)
|
return self.sendUploadResponse(req, res, user, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
self.cleanUpChunks = async uuid => {
|
self.cleanUpChunks = async uuid => {
|
||||||
// Dispose of unfinished write & hasher streams
|
// Dispose of unfinished write & hasher streams
|
||||||
if (chunksData[uuid].stream && !chunksData[uuid].stream.destroyed) {
|
if (chunksData[uuid].writeStream && !chunksData[uuid].writeStream.destroyed) {
|
||||||
chunksData[uuid].stream.destroy()
|
chunksData[uuid].writeStream.destroy()
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
if (chunksData[uuid].hasher) {
|
if (chunksData[uuid].hashStream) {
|
||||||
chunksData[uuid].hasher.dispose()
|
chunksData[uuid].hashStream.dispose()
|
||||||
}
|
}
|
||||||
} catch (_) {}
|
} catch (_) {}
|
||||||
|
|
||||||
@ -793,20 +787,20 @@ self.scanHelpers.assertFileBypass = data => {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
self.assertPassthroughScans = async (req, user, infoMap) => {
|
self.assertPassthroughScans = async (req, user, filesData) => {
|
||||||
const foundThreats = []
|
const foundThreats = []
|
||||||
const unableToScan = []
|
const unableToScan = []
|
||||||
|
|
||||||
for (const info of infoMap) {
|
for (const file of filesData) {
|
||||||
if (info.data.scan) {
|
if (file.scan) {
|
||||||
if (info.data.scan.isInfected) {
|
if (file.scan.isInfected) {
|
||||||
logger.log(`[ClamAV]: ${info.data.filename}: ${info.data.scan.viruses.join(', ')}`)
|
logger.log(`[ClamAV]: ${file.filename}: ${file.scan.viruses.join(', ')}`)
|
||||||
foundThreats.push(...info.data.scan.viruses)
|
foundThreats.push(...file.scan.viruses)
|
||||||
} else if (info.data.scan.isInfected === null) {
|
} else if (file.scan.isInfected === null) {
|
||||||
logger.log(`[ClamAV]: ${info.data.filename}: Unable to scan`)
|
logger.log(`[ClamAV]: ${file.filename}: Unable to scan`)
|
||||||
unableToScan.push(info.data.filename)
|
unableToScan.push(file.filename)
|
||||||
} else {
|
} else {
|
||||||
logger.debug(`[ClamAV]: ${info.data.filename}: File is clean`)
|
logger.debug(`[ClamAV]: ${file.filename}: File is clean`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -823,39 +817,39 @@ self.assertPassthroughScans = async (req, user, infoMap) => {
|
|||||||
if (result) {
|
if (result) {
|
||||||
// Unlink all files when at least one threat is found
|
// Unlink all files when at least one threat is found
|
||||||
// Should continue even when encountering errors
|
// Should continue even when encountering errors
|
||||||
await Promise.all(infoMap.map(info =>
|
await Promise.all(filesData.map(file =>
|
||||||
utils.unlinkFile(info.data.filename).catch(logger.error)
|
utils.unlinkFile(file.filename).catch(logger.error)
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
self.scanFiles = async (req, user, infoMap) => {
|
self.scanFiles = async (req, user, filesData) => {
|
||||||
const filenames = infoMap.map(info => info.data.filename)
|
const filenames = filesData.map(file => file.filename)
|
||||||
if (self.scanHelpers.assertUserBypass(user, filenames)) {
|
if (self.scanHelpers.assertUserBypass(user, filenames)) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
const foundThreats = []
|
const foundThreats = []
|
||||||
const unableToScan = []
|
const unableToScan = []
|
||||||
const result = await Promise.all(infoMap.map(async info => {
|
const result = await Promise.all(filesData.map(async file => {
|
||||||
if (self.scanHelpers.assertFileBypass({
|
if (self.scanHelpers.assertFileBypass({
|
||||||
filename: info.data.filename,
|
filename: file.filename,
|
||||||
extname: info.data.extname,
|
extname: file.extname,
|
||||||
size: info.data.size
|
size: file.size
|
||||||
})) return
|
})) return
|
||||||
|
|
||||||
logger.debug(`[ClamAV]: ${info.data.filename}: Scanning\u2026`)
|
logger.debug(`[ClamAV]: ${file.filename}: Scanning\u2026`)
|
||||||
const response = await utils.scan.instance.isInfected(info.path)
|
const response = await utils.scan.instance.isInfected(file.path)
|
||||||
if (response.isInfected) {
|
if (response.isInfected) {
|
||||||
logger.log(`[ClamAV]: ${info.data.filename}: ${response.viruses.join(', ')}`)
|
logger.log(`[ClamAV]: ${file.filename}: ${response.viruses.join(', ')}`)
|
||||||
foundThreats.push(...response.viruses)
|
foundThreats.push(...response.viruses)
|
||||||
} else if (response.isInfected === null) {
|
} else if (response.isInfected === null) {
|
||||||
logger.log(`[ClamAV]: ${info.data.filename}: Unable to scan`)
|
logger.log(`[ClamAV]: ${file.filename}: Unable to scan`)
|
||||||
unableToScan.push(info.data.filename)
|
unableToScan.push(file.filename)
|
||||||
} else {
|
} else {
|
||||||
logger.debug(`[ClamAV]: ${info.data.filename}: File is clean`)
|
logger.debug(`[ClamAV]: ${file.filename}: File is clean`)
|
||||||
}
|
}
|
||||||
})).then(() => {
|
})).then(() => {
|
||||||
if (foundThreats.length) {
|
if (foundThreats.length) {
|
||||||
@ -873,8 +867,8 @@ self.scanFiles = async (req, user, infoMap) => {
|
|||||||
if (result) {
|
if (result) {
|
||||||
// Unlink all files when at least one threat is found OR any errors occurred
|
// Unlink all files when at least one threat is found OR any errors occurred
|
||||||
// Should continue even when encountering errors
|
// Should continue even when encountering errors
|
||||||
await Promise.all(infoMap.map(info =>
|
await Promise.all(filesData.map(file =>
|
||||||
utils.unlinkFile(info.data.filename).catch(logger.error)
|
utils.unlinkFile(file.filename).catch(logger.error)
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -883,18 +877,18 @@ self.scanFiles = async (req, user, infoMap) => {
|
|||||||
|
|
||||||
/** Strip tags (EXIF, etc.) */
|
/** Strip tags (EXIF, etc.) */
|
||||||
|
|
||||||
self.stripTags = async (req, infoMap) => {
|
self.stripTags = async (req, filesData) => {
|
||||||
if (!self.parseStripTags(req.headers.striptags)) return
|
if (!self.parseStripTags(req.headers.striptags)) return
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await Promise.all(infoMap.map(info =>
|
await Promise.all(filesData.map(file =>
|
||||||
utils.stripTags(info.data.filename, info.data.extname)
|
utils.stripTags(file.filename, file.extname)
|
||||||
))
|
))
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Unlink all files when at least one threat is found OR any errors occurred
|
// Unlink all files when at least one threat is found OR any errors occurred
|
||||||
// Should continue even when encountering errors
|
// Should continue even when encountering errors
|
||||||
await Promise.all(infoMap.map(info =>
|
await Promise.all(filesData.map(file =>
|
||||||
utils.unlinkFile(info.data.filename).catch(logger.error)
|
utils.unlinkFile(file.filename).catch(logger.error)
|
||||||
))
|
))
|
||||||
|
|
||||||
// Re-throw error
|
// Re-throw error
|
||||||
@ -904,12 +898,12 @@ self.stripTags = async (req, infoMap) => {
|
|||||||
|
|
||||||
/** Database functions */
|
/** Database functions */
|
||||||
|
|
||||||
self.storeFilesToDb = async (req, res, user, infoMap) => {
|
self.storeFilesToDb = async (req, res, user, filesData) => {
|
||||||
const files = []
|
const files = []
|
||||||
const exists = []
|
const exists = []
|
||||||
const albumids = []
|
const albumids = []
|
||||||
|
|
||||||
await Promise.all(infoMap.map(async info => {
|
await Promise.all(filesData.map(async file => {
|
||||||
// Check if the file exists by checking its hash and size
|
// Check if the file exists by checking its hash and size
|
||||||
const dbFile = await utils.db.table('files')
|
const dbFile = await utils.db.table('files')
|
||||||
.where(function () {
|
.where(function () {
|
||||||
@ -920,8 +914,8 @@ self.storeFilesToDb = async (req, res, user, infoMap) => {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
.where({
|
.where({
|
||||||
hash: info.data.hash,
|
hash: file.hash,
|
||||||
size: String(info.data.size)
|
size: String(file.size)
|
||||||
})
|
})
|
||||||
// Select expirydate to display expiration date of existing files as well
|
// Select expirydate to display expiration date of existing files as well
|
||||||
.select('name', 'expirydate')
|
.select('name', 'expirydate')
|
||||||
@ -929,12 +923,13 @@ self.storeFilesToDb = async (req, res, user, infoMap) => {
|
|||||||
|
|
||||||
if (dbFile) {
|
if (dbFile) {
|
||||||
// Continue even when encountering errors
|
// Continue even when encountering errors
|
||||||
await utils.unlinkFile(info.data.filename).catch(logger.error)
|
await utils.unlinkFile(file.filename).catch(logger.error)
|
||||||
logger.debug(`Unlinked ${info.data.filename} since a duplicate named ${dbFile.name} exists`)
|
logger.debug(`Unlinked ${file.filename} since a duplicate named ${dbFile.name} exists`)
|
||||||
|
|
||||||
// If on /nojs route, append original file name reported by client
|
// If on /nojs route, append original name reported by client,
|
||||||
|
// instead of the actual original name from database
|
||||||
if (req.path === '/nojs') {
|
if (req.path === '/nojs') {
|
||||||
dbFile.original = info.data.originalname
|
dbFile.original = file.originalname
|
||||||
}
|
}
|
||||||
|
|
||||||
exists.push(dbFile)
|
exists.push(dbFile)
|
||||||
@ -943,11 +938,11 @@ self.storeFilesToDb = async (req, res, user, infoMap) => {
|
|||||||
|
|
||||||
const timestamp = Math.floor(Date.now() / 1000)
|
const timestamp = Math.floor(Date.now() / 1000)
|
||||||
const data = {
|
const data = {
|
||||||
name: info.data.filename,
|
name: file.filename,
|
||||||
original: info.data.originalname,
|
original: file.originalname,
|
||||||
type: info.data.mimetype,
|
type: file.mimetype,
|
||||||
size: String(info.data.size),
|
size: String(file.size),
|
||||||
hash: info.data.hash,
|
hash: file.hash,
|
||||||
// Only disable if explicitly set to false in config
|
// Only disable if explicitly set to false in config
|
||||||
ip: config.uploads.storeIP !== false ? req.ip : null,
|
ip: config.uploads.storeIP !== false ? req.ip : null,
|
||||||
timestamp
|
timestamp
|
||||||
@ -955,21 +950,21 @@ self.storeFilesToDb = async (req, res, user, infoMap) => {
|
|||||||
|
|
||||||
if (user) {
|
if (user) {
|
||||||
data.userid = user.id
|
data.userid = user.id
|
||||||
data.albumid = info.data.albumid
|
data.albumid = file.albumid
|
||||||
if (data.albumid !== null && !albumids.includes(data.albumid)) {
|
if (data.albumid !== null && !albumids.includes(data.albumid)) {
|
||||||
albumids.push(data.albumid)
|
albumids.push(data.albumid)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (info.data.age) {
|
if (file.age) {
|
||||||
data.expirydate = data.timestamp + (info.data.age * 3600) // Hours to seconds
|
data.expirydate = data.timestamp + (file.age * 3600) // Hours to seconds
|
||||||
}
|
}
|
||||||
|
|
||||||
files.push(data)
|
files.push(data)
|
||||||
|
|
||||||
// Generate thumbs, but do not wait
|
// Generate thumbs, but do not wait
|
||||||
if (utils.mayGenerateThumb(info.data.extname)) {
|
if (utils.mayGenerateThumb(file.extname)) {
|
||||||
utils.generateThumbs(info.data.filename, info.data.extname, true).catch(logger.error)
|
utils.generateThumbs(file.filename, file.extname, true).catch(logger.error)
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user