fs.copyFile() for chunks data on non-default path

Closes #314
This commit is contained in:
Bobby Wibowo 2020-11-21 06:31:36 +07:00
parent 95247daa80
commit 51ab9a6fc5
No known key found for this signature in database
GPG Key ID: 51C3A1E1E22D26CF
2 changed files with 11 additions and 3 deletions

View File

@ -9,6 +9,7 @@ const self = {}
// Promisify these fs functions // Promisify these fs functions
const fsFuncs = [ const fsFuncs = [
'access', 'access',
'copyFile',
'lstat', 'lstat',
'mkdir', 'mkdir',
'readdir', 'readdir',

View File

@ -33,8 +33,10 @@ const chunkedUploads = config.uploads.chunkSize &&
config.uploads.chunkSize.default config.uploads.chunkSize.default
const chunkedUploadsTimeout = config.uploads.chunkSize.timeout || 1800000 const chunkedUploadsTimeout = config.uploads.chunkSize.timeout || 1800000
const chunksData = {} const chunksData = {}
// Hard-coded min chunk size of 1 MB (e.g. 50 MB = max 50 chunks) // Hard-coded min chunk size of 1 MB (e.g. 50 MB = max 50 chunks)
const maxChunksCount = maxSize const maxChunksCount = maxSize
// Use fs.copyFile() instead of fs.rename() if chunks dir is NOT inside uploads dir
const chunksCopyFile = !paths.chunks.startsWith(paths.uploads)
const extensionsFilter = Array.isArray(config.extensionsFilter) && const extensionsFilter = Array.isArray(config.extensionsFilter) &&
config.extensionsFilter.length config.extensionsFilter.length
@ -553,8 +555,13 @@ self.actuallyFinishChunks = async (req, res, user) => {
const name = await self.getUniqueRandomName(length, file.extname) const name = await self.getUniqueRandomName(length, file.extname)
// Move tmp file to final destination // Move tmp file to final destination
// For fs.copyFile(), tmpfile will eventually be unlinked by self.cleanUpChunks()
const destination = path.join(paths.uploads, name) const destination = path.join(paths.uploads, name)
await paths.rename(tmpfile, destination) if (chunksCopyFile) {
await paths.copyFile(tmpfile, destination)
} else {
await paths.rename(tmpfile, destination)
}
const hash = chunksData[file.uuid].hasher.digest('hex') const hash = chunksData[file.uuid].hasher.digest('hex')
// Continue even when encountering errors // Continue even when encountering errors
@ -595,7 +602,7 @@ self.actuallyFinishChunks = async (req, res, user) => {
if (chunksData[file.uuid].hasher) { if (chunksData[file.uuid].hasher) {
chunksData[file.uuid].hasher.dispose() chunksData[file.uuid].hasher.dispose()
} }
} catch (error) {} } catch (_) {}
self.cleanUpChunks(file.uuid).catch(logger.error) self.cleanUpChunks(file.uuid).catch(logger.error)
}) })