mirror of
https://github.com/BobbyWibowo/lolisafe.git
synced 2024-12-14 16:36:21 +00:00
parent
95247daa80
commit
51ab9a6fc5
@ -9,6 +9,7 @@ const self = {}
|
|||||||
// Promisify these fs functions
|
// Promisify these fs functions
|
||||||
const fsFuncs = [
|
const fsFuncs = [
|
||||||
'access',
|
'access',
|
||||||
|
'copyFile',
|
||||||
'lstat',
|
'lstat',
|
||||||
'mkdir',
|
'mkdir',
|
||||||
'readdir',
|
'readdir',
|
||||||
|
@ -33,8 +33,10 @@ const chunkedUploads = config.uploads.chunkSize &&
|
|||||||
config.uploads.chunkSize.default
|
config.uploads.chunkSize.default
|
||||||
const chunkedUploadsTimeout = config.uploads.chunkSize.timeout || 1800000
|
const chunkedUploadsTimeout = config.uploads.chunkSize.timeout || 1800000
|
||||||
const chunksData = {}
|
const chunksData = {}
|
||||||
// Hard-coded min chunk size of 1 MB (e.g. 50 MB = max 50 chunks)
|
// Hard-coded min chunk size of 1 MB (e.g. 50 MB = max 50 chunks)
|
||||||
const maxChunksCount = maxSize
|
const maxChunksCount = maxSize
|
||||||
|
// Use fs.copyFile() instead of fs.rename() if chunks dir is NOT inside uploads dir
|
||||||
|
const chunksCopyFile = !paths.chunks.startsWith(paths.uploads)
|
||||||
|
|
||||||
const extensionsFilter = Array.isArray(config.extensionsFilter) &&
|
const extensionsFilter = Array.isArray(config.extensionsFilter) &&
|
||||||
config.extensionsFilter.length
|
config.extensionsFilter.length
|
||||||
@ -553,8 +555,13 @@ self.actuallyFinishChunks = async (req, res, user) => {
|
|||||||
const name = await self.getUniqueRandomName(length, file.extname)
|
const name = await self.getUniqueRandomName(length, file.extname)
|
||||||
|
|
||||||
// Move tmp file to final destination
|
// Move tmp file to final destination
|
||||||
|
// For fs.copyFile(), tmpfile will eventually be unlinked by self.cleanUpChunks()
|
||||||
const destination = path.join(paths.uploads, name)
|
const destination = path.join(paths.uploads, name)
|
||||||
await paths.rename(tmpfile, destination)
|
if (chunksCopyFile) {
|
||||||
|
await paths.copyFile(tmpfile, destination)
|
||||||
|
} else {
|
||||||
|
await paths.rename(tmpfile, destination)
|
||||||
|
}
|
||||||
const hash = chunksData[file.uuid].hasher.digest('hex')
|
const hash = chunksData[file.uuid].hasher.digest('hex')
|
||||||
|
|
||||||
// Continue even when encountering errors
|
// Continue even when encountering errors
|
||||||
@ -595,7 +602,7 @@ self.actuallyFinishChunks = async (req, res, user) => {
|
|||||||
if (chunksData[file.uuid].hasher) {
|
if (chunksData[file.uuid].hasher) {
|
||||||
chunksData[file.uuid].hasher.dispose()
|
chunksData[file.uuid].hasher.dispose()
|
||||||
}
|
}
|
||||||
} catch (error) {}
|
} catch (_) {}
|
||||||
self.cleanUpChunks(file.uuid).catch(logger.error)
|
self.cleanUpChunks(file.uuid).catch(logger.error)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user