feat: multer -> hyper-express multipartfield

get outta here multer, lmao
This commit is contained in:
Bobby Wibowo 2022-07-12 13:07:13 +07:00
parent 8f3d6b1557
commit 7f9d05da26
No known key found for this signature in database
GPG Key ID: 51C3A1E1E22D26CF
4 changed files with 153 additions and 254 deletions

View File

@ -1,7 +1,6 @@
const blake3 = require('blake3')
const fetch = require('node-fetch')
const fs = require('fs')
const multer = require('multer')
const path = require('path')
const randomstring = require('randomstring')
const searchQuery = require('search-query-parser')
@ -9,7 +8,6 @@ const paths = require('./pathsController')
const perms = require('./permissionController')
const utils = require('./utilsController')
const ClientError = require('./utils/ClientError')
const multerStorage = require('./utils/multerStorage')
const ServerError = require('./utils/ServerError')
const config = require('./../config')
const logger = require('./../logger')
@ -92,84 +90,6 @@ const initChunks = async uuid => {
return chunksData[uuid]
}
/** Multer */
const executeMulter = multer({
// Guide: https://github.com/expressjs/multer/tree/v1.4.4#limits
limits: {
fileSize: maxSizeBytes,
// Maximum number of non-file fields.
// Dropzone.js will add 6 extra fields for chunked uploads.
// We don't use them for anything else.
fields: 6,
// Maximum number of file fields.
// Chunked uploads still need to provide ONLY 1 file field.
// Otherwise, only one of the files will end up being properly stored,
// and that will also be as a chunk.
files: maxFilesPerUpload
},
fileFilter (req, file, cb) {
// BUG: Since multer@1.4.5-lts.1, UTF-8 filenames are not handled properly, so we force it
file.originalname = file.originalname &&
Buffer.from(file.originalname, 'latin1').toString('utf8')
file.extname = utils.extname(file.originalname)
if (self.isExtensionFiltered(file.extname)) {
return cb(new ClientError(`${file.extname ? `${file.extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`))
}
// Re-map Dropzone chunked uploads keys so people can manually use the API without prepending 'dz'
for (const key in req.body) {
if (!key.startsWith('dz')) continue
req.body[key.replace(/^dz/, '')] = req.body[key]
delete req.body[key]
}
if (req.body.chunkindex !== undefined && !chunkedUploads) {
return cb(new ClientError('Chunked uploads are disabled at the moment.'))
} else {
return cb(null, true)
}
},
storage: multerStorage({
destination (req, file, cb) {
// Is file a chunk!?
file._isChunk = chunkedUploads && req.body.uuid !== undefined && req.body.chunkindex !== undefined
if (file._isChunk) {
// Calling this will also reset its timeout
initChunks(req.body.uuid)
.then(chunksData => {
file._chunksData = chunksData
cb(null, chunksData.root)
})
.catch(error => {
logger.error(error)
return cb(new ServerError('Could not process the chunked upload. Try again?'))
})
} else {
return cb(null, paths.uploads)
}
},
filename (req, file, cb) {
if (file._isChunk) {
return cb(null, chunksData[req.body.uuid].filename)
} else {
const length = self.parseFileIdentifierLength(req.headers.filelength)
return self.getUniqueRandomName(length, file.extname)
.then(name => cb(null, name))
.catch(error => cb(error))
}
},
scan: utils.scan,
scanHelpers: self.scanHelpers
})
}).array('files[]')
/** Helper functions */
self.isExtensionFiltered = extname => {
@ -295,30 +215,162 @@ self.upload = async (req, res) => {
const age = self.assertRetentionPeriod(user, req.headers.age)
const multerError = await new Promise(resolve => {
req._user = user
return executeMulter(req, res, err => resolve(err))
}).finally(() => delete req._user)
// Init empty Request.body
req.body = {}
if (multerError) {
const suppress = [
'LIMIT_FILE_SIZE',
'LIMIT_UNEXPECTED_FILE'
]
if (suppress.includes(multerError.code)) {
throw new ClientError(multerError.toString())
} else {
throw multerError
// Initially try to parse as multipart
await req.multipart(async field => {
// Keep non-files fields in Request.body
if (field.truncated) {
// Re-map Dropzone chunked uploads keys so people can manually use the API without prepending 'dz'
let name = field.name
if (name.startsWith('dz')) name = name.replace(/^dz/, '')
req.body[name] = field.value
return
}
}
// If req.files is unset, then Multer did not encounter any files
if (req.files === undefined) {
if (!field.file) {
throw new Error(`Unexpected non-truncated and non-file field: ${field.name}`)
}
// Init Request.files array if not previously set
if (req.files === undefined) {
req.files = []
}
// NOTE: Since busboy@1, filenames are not automatically parsed as UTF-8, so we force it here
const originalname = field.file.name &&
Buffer.from(field.file.name, 'latin1').toString('utf8')
const extname = utils.extname(originalname)
if (self.isExtensionFiltered(extname)) {
throw new ClientError(`${extname ? `${extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`)
}
if (req.body.chunkindex !== undefined && !chunkedUploads) {
throw new ClientError('Chunked uploads are disabled at the moment.')
}
// Is it a chunk file?
const isChunk = chunkedUploads &&
req.body.uuid !== undefined &&
req.body.chunkindex !== undefined
let chunksData
let destination
let filename
if (isChunk) {
// Calling this will also reset its timeout
chunksData = await initChunks(req.body.uuid)
destination = chunksData.root
filename = chunksData.filename
} else {
const length = self.parseFileIdentifierLength(req.headers.filelength)
destination = paths.uploads
filename = await self.getUniqueRandomName(length, extname)
}
// Write the file into disk, and return an object containing the required file information
const file = await new Promise((resolve, reject) => {
// "weighted" resolve function, to be able to "await" multiple callbacks
const REQUIRED_WEIGHT = 2
let tempObject = { originalname, extname }
let tempWeight = 0
const _resolve = (result = {}, weight = 2) => {
tempWeight += weight
tempObject = Object.assign(result, tempObject)
if (tempWeight >= REQUIRED_WEIGHT) {
resolve(tempObject)
}
}
let outStream
let hash
let scanStream
const onerror = error => {
hash.dispose()
reject(error)
}
const finalPath = path.join(destination, filename)
if (isChunk) {
if (!chunksData.stream) {
chunksData.stream = fs.createWriteStream(finalPath, { flags: 'a' })
chunksData.stream.on('error', onerror)
}
if (!chunksData.hasher) {
chunksData.hasher = blake3.createHash()
}
outStream = chunksData.stream
hash = chunksData.hasher
} else {
outStream = fs.createWriteStream(finalPath)
outStream.on('error', onerror)
hash = blake3.createHash()
if (utils.scan.passthrough &&
!self.scanHelpers.assertUserBypass(req._user, filename) &&
!self.scanHelpers.assertFileBypass({ filename })) {
scanStream = utils.scan.instance.passthrough()
}
}
field.file.stream.on('error', onerror)
field.file.stream.on('data', d => hash.update(d))
if (isChunk) {
field.file.stream.on('end', () => {
_resolve({
destination,
filename,
path: finalPath
})
})
field.file.stream.pipe(outStream, { end: false })
} else {
outStream.on('finish', () => {
_resolve({
destination,
filename,
path: finalPath,
size: outStream.bytesWritten,
hash: hash.digest('hex')
}, scanStream ? 1 : 2)
})
if (scanStream) {
logger.debug(`[ClamAV]: ${filename}: Passthrough scanning\u2026`)
scanStream.on('error', onerror)
scanStream.on('scan-complete', scan => {
_resolve({ scan }, 1)
})
field.file.stream.pipe(scanStream).pipe(outStream)
} else {
field.file.stream.pipe(outStream)
}
}
})
// Push file to Request.files array
req.files.push(file)
}).catch(error => {
// MulipartField may throw string errors
if (typeof error === 'string') {
throw new ServerError(error)
} else {
throw error
}
})
if (Array.isArray(req.files)) {
return self.actuallyUpload(req, res, user, albumid, age)
} else {
// Parse POST body
req.body = await req.json()
return self.actuallyUploadUrls(req, res, user, albumid, age)
} else {
return self.actuallyUpload(req, res, user, albumid, age)
}
}

View File

@ -1,127 +0,0 @@
const fs = require('fs')
const path = require('path')
const blake3 = require('blake3')
const mkdirp = require('mkdirp')
const logger = require('./../../logger')
const REQUIRED_WEIGHT = 2
function DiskStorage (opts) {
this.getFilename = opts.filename
if (typeof opts.destination === 'string') {
mkdirp.sync(opts.destination)
this.getDestination = function ($0, $1, cb) { cb(null, opts.destination) }
} else {
this.getDestination = opts.destination
}
this.scan = opts.scan
this.scanHelpers = opts.scanHelpers
}
DiskStorage.prototype._handleFile = function _handleFile (req, file, cb) {
const that = this
// "weighted" callback, to be able to "await" multiple callbacks
let tempError = null
let tempObject = {}
let tempWeight = 0
const _cb = (err = null, result = {}, weight = 2) => {
tempError = err
tempWeight += weight
tempObject = Object.assign(result, tempObject)
if (tempError || tempWeight >= REQUIRED_WEIGHT) {
cb(tempError, tempObject)
}
}
that.getDestination(req, file, function (err, destination) {
if (err) return _cb(err)
that.getFilename(req, file, function (err, filename) {
if (err) return _cb(err)
const finalPath = path.join(destination, filename)
const onerror = err => {
hash.dispose()
_cb(err)
}
let outStream
let hash
let scanStream
if (file._isChunk) {
if (!file._chunksData.stream) {
file._chunksData.stream = fs.createWriteStream(finalPath, { flags: 'a' })
file._chunksData.stream.on('error', onerror)
}
if (!file._chunksData.hasher) {
file._chunksData.hasher = blake3.createHash()
}
outStream = file._chunksData.stream
hash = file._chunksData.hasher
} else {
outStream = fs.createWriteStream(finalPath)
outStream.on('error', onerror)
hash = blake3.createHash()
if (that.scan.passthrough &&
!that.scanHelpers.assertUserBypass(req._user, filename) &&
!that.scanHelpers.assertFileBypass({ filename })) {
scanStream = that.scan.instance.passthrough()
}
}
file.stream.on('error', onerror)
file.stream.on('data', d => hash.update(d))
if (file._isChunk) {
file.stream.on('end', () => {
_cb(null, {
destination,
filename,
path: finalPath
})
})
file.stream.pipe(outStream, { end: false })
} else {
outStream.on('finish', () => {
_cb(null, {
destination,
filename,
path: finalPath,
size: outStream.bytesWritten,
hash: hash.digest('hex')
}, scanStream ? 1 : 2)
})
if (scanStream) {
logger.debug(`[ClamAV]: ${filename}: Passthrough scanning\u2026`)
scanStream.on('error', onerror)
scanStream.on('scan-complete', scan => {
_cb(null, { scan }, 1)
})
file.stream.pipe(scanStream).pipe(outStream)
} else {
file.stream.pipe(outStream)
}
}
})
})
}
DiskStorage.prototype._removeFile = function _removeFile (req, file, cb) {
const path = file.path
delete file.destination
delete file.filename
delete file.path
fs.unlink(path, cb)
}
module.exports = function (opts) {
return new DiskStorage(opts)
}

View File

@ -47,7 +47,6 @@
"knex": "~2.1.0",
"live-directory": "~2.3.2",
"markdown-it": "~13.0.1",
"multer": "~1.4.5-lts.1",
"node-fetch": "~2.6.7",
"nunjucks": "~3.2.3",
"randomstring": "~1.2.2",

View File

@ -561,11 +561,6 @@ append-buffer@^1.0.2:
dependencies:
buffer-equal "^1.0.0"
append-field@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/append-field/-/append-field-1.0.0.tgz#1e3440e915f0b1203d23748e78edd7b9b5b43e56"
integrity sha1-HjRA6RXwsSA9I3SOeO3XubW0PlY=
aproba@^1.0.3:
version "1.2.0"
resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a"
@ -1358,7 +1353,7 @@ concat-map@0.0.1:
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=
concat-stream@^1.5.2, concat-stream@^1.6.0:
concat-stream@^1.6.0:
version "1.6.2"
resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34"
integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==
@ -4120,13 +4115,6 @@ mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3:
resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113"
integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==
mkdirp@^0.5.4:
version "0.5.6"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6"
integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==
dependencies:
minimist "^1.2.6"
mkdirp@^1.0.3:
version "1.0.4"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e"
@ -4147,19 +4135,6 @@ ms@^2.1.1:
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
multer@~1.4.5-lts.1:
version "1.4.5-lts.1"
resolved "https://registry.yarnpkg.com/multer/-/multer-1.4.5-lts.1.tgz#803e24ad1984f58edffbc79f56e305aec5cfd1ac"
integrity sha512-ywPWvcDMeH+z9gQq5qYHCCy+ethsk4goepZ45GLD63fOu0YcNecQxi64nDs3qluZB+murG3/D4dJ7+dGctcCQQ==
dependencies:
append-field "^1.0.0"
busboy "^1.0.0"
concat-stream "^1.5.2"
mkdirp "^0.5.4"
object-assign "^4.1.1"
type-is "^1.6.4"
xtend "^4.0.0"
mute-stdout@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/mute-stdout/-/mute-stdout-1.0.1.tgz#acb0300eb4de23a7ddeec014e3e96044b3472331"
@ -6469,7 +6444,7 @@ type-fest@^0.8.1:
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d"
integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==
type-is@^1.6.18, type-is@^1.6.4:
type-is@^1.6.18:
version "1.6.18"
resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131"
integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==
@ -6844,7 +6819,7 @@ xdg-basedir@^4.0.0:
resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13"
integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==
xtend@^4.0.0, xtend@~4.0.0, xtend@~4.0.1:
xtend@~4.0.0, xtend@~4.0.1:
version "4.0.2"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"
integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==