filesafe/controllers/multerStorageController.js
Bobby Wibowo b4c8b1d90e
BLAZING FAST CHUNKED UPLOADS 🚀
Inspired by our recent switch to using blake3 for file hashing, chunks
will now be written to a tmp file directly as they're uploaded.
So no more waiting so long for "rebuilding chunks".
There will still be some delay on every following attempts of uploading
each chunks. I'm not sure the specifics, as we're already reusing the
write stream.
2020-06-15 23:14:33 +07:00

104 lines
2.6 KiB
JavaScript

const fs = require('fs')
const os = require('os')
const path = require('path')
const crypto = require('crypto')
const blake3 = require('blake3')
const mkdirp = require('mkdirp')
function getFilename (req, file, cb) {
// This won't be used since we use our own filename function.
crypto.randomBytes(16, function (err, raw) {
cb(err, err ? undefined : raw.toString('hex'))
})
}
function getDestination (req, file, cb) {
cb(null, os.tmpdir())
}
function DiskStorage (opts) {
this.getFilename = (opts.filename || getFilename)
if (typeof opts.destination === 'string') {
mkdirp.sync(opts.destination)
this.getDestination = function ($0, $1, cb) { cb(null, opts.destination) }
} else {
this.getDestination = (opts.destination || getDestination)
}
}
DiskStorage.prototype._handleFile = function _handleFile (req, file, cb) {
const that = this
that.getDestination(req, file, function (err, destination) {
if (err) return cb(err)
that.getFilename(req, file, function (err, filename) {
if (err) return cb(err)
const finalPath = path.join(destination, filename)
const onerror = err => {
hash.dispose()
cb(err)
}
let outStream
let hash
if (file._isChunk) {
if (!file._chunksData.stream) {
file._chunksData.stream = fs.createWriteStream(finalPath, { flags: 'a' })
file._chunksData.stream.on('error', onerror)
}
if (!file._chunksData.hasher)
file._chunksData.hasher = blake3.createHash()
outStream = file._chunksData.stream
hash = file._chunksData.hasher
} else {
outStream = fs.createWriteStream(finalPath)
outStream.on('error', onerror)
hash = blake3.createHash()
}
file.stream.on('error', onerror)
file.stream.on('data', d => hash.update(d))
if (file._isChunk) {
file.stream.on('end', () => {
cb(null, {
destination,
filename,
path: finalPath
})
})
file.stream.pipe(outStream, { end: false })
} else {
outStream.on('finish', () => {
cb(null, {
destination,
filename,
path: finalPath,
size: outStream.bytesWritten,
hash: hash.digest('hex')
})
})
file.stream.pipe(outStream)
}
})
})
}
DiskStorage.prototype._removeFile = function _removeFile (req, file, cb) {
const path = file.path
delete file.destination
delete file.filename
delete file.path
fs.unlink(path, cb)
}
module.exports = function (opts) {
return new DiskStorage(opts)
}