2018-01-23 20:06:30 +00:00
const crypto = require ( 'crypto' )
2018-09-23 16:28:15 +00:00
const fetch = require ( 'node-fetch' )
2018-01-23 20:06:30 +00:00
const fs = require ( 'fs' )
2018-09-01 20:37:26 +00:00
const multer = require ( 'multer' )
const path = require ( 'path' )
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
const randomstring = require ( 'randomstring' )
2019-09-08 01:56:29 +00:00
const paths = require ( './pathsController' )
2018-10-13 11:06:58 +00:00
const perms = require ( './permissionController' )
2018-04-13 16:20:57 +00:00
const utils = require ( './utilsController' )
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
const config = require ( './../config' )
const logger = require ( './../logger' )
const db = require ( 'knex' ) ( config . database )
2017-01-13 07:34:21 +00:00
2019-09-08 01:56:29 +00:00
const self = { }
2017-01-13 07:34:21 +00:00
2019-09-08 01:56:29 +00:00
const fileIdentifierLengthFallback = 32
const fileIdentifierLengthChangeable = ! config . uploads . fileIdentifierLength . force &&
typeof config . uploads . fileIdentifierLength . min === 'number' &&
typeof config . uploads . fileIdentifierLength . max === 'number'
const maxSize = parseInt ( config . uploads . maxSize )
const maxSizeBytes = maxSize * 1e6
2019-08-20 02:16:34 +00:00
const urlMaxSizeBytes = parseInt ( config . uploads . urlMaxSize ) * 1e6
2018-03-13 14:51:39 +00:00
2019-09-23 08:09:15 +00:00
const maxFilesPerUpload = 20
2019-09-08 01:56:29 +00:00
const chunkedUploads = Boolean ( config . uploads . chunkSize )
const chunksData = { }
2019-11-29 13:42:53 +00:00
// Hard-coded min chunk size of 1 MB (e.g. 50 MB = max 50 chunks)
2019-09-08 01:56:29 +00:00
const maxChunksCount = maxSize
const extensionsFilter = Array . isArray ( config . extensionsFilter ) &&
config . extensionsFilter . length
const urlExtensionsFilter = Array . isArray ( config . uploads . urlExtensionsFilter ) &&
config . uploads . urlExtensionsFilter . length
const temporaryUploads = Array . isArray ( config . uploads . temporaryUploadAges ) &&
config . uploads . temporaryUploadAges . length
const initChunks = async uuid => {
if ( chunksData [ uuid ] === undefined ) {
const root = path . join ( paths . chunks , uuid )
try {
await paths . access ( root )
} catch ( err ) {
// Re-throw error
if ( err && err . code !== 'ENOENT' )
throw err
await paths . mkdir ( root )
2018-03-13 14:51:39 +00:00
}
2019-09-08 01:56:29 +00:00
chunksData [ uuid ] = { root , chunks : [ ] , size : 0 }
2018-01-23 20:06:30 +00:00
}
2019-09-08 01:56:29 +00:00
return chunksData [ uuid ] . root
}
2017-01-13 07:34:21 +00:00
2019-09-08 01:56:29 +00:00
const executeMulter = multer ( {
2019-09-17 04:13:41 +00:00
// Guide: https://github.com/expressjs/multer#limits
2018-03-28 11:36:28 +00:00
limits : {
2019-09-17 04:13:41 +00:00
fileSize : maxSizeBytes ,
// Maximum number of non-file fields.
// Dropzone.js will add 6 extra fields for chunked uploads.
// We don't use them for anything else.
fields : 6 ,
// Maximum number of file fields.
// Chunked uploads still need to provide only 1 file field.
// Otherwise, only one of the files will end up being properly stored,
// and that will also be as a chunk.
2019-09-23 08:09:15 +00:00
files : maxFilesPerUpload
2018-03-28 11:36:28 +00:00
} ,
2018-04-05 10:52:57 +00:00
fileFilter ( req , file , cb ) {
2019-09-08 01:56:29 +00:00
file . extname = utils . extname ( file . originalname )
if ( self . isExtensionFiltered ( file . extname ) )
return cb ( ` ${ file . extname ? ` ${ file . extname . substr ( 1 ) . toUpperCase ( ) } files ` : 'Files with no extension' } are not permitted. ` )
2018-03-28 11:36:28 +00:00
2018-05-09 08:41:30 +00:00
// Re-map Dropzone keys so people can manually use the API without prepending 'dz'
for ( const key in req . body ) {
2018-12-18 17:01:28 +00:00
if ( ! /^dz/ . test ( key ) ) continue
2018-05-09 08:41:30 +00:00
req . body [ key . replace ( /^dz/ , '' ) ] = req . body [ key ]
delete req . body [ key ]
}
2018-03-28 11:36:28 +00:00
2019-09-19 01:27:19 +00:00
if ( req . body . chunkindex !== undefined && ! chunkedUploads )
2019-09-08 01:56:29 +00:00
return cb ( 'Chunked uploads are disabled at the moment.' )
else
return cb ( null , true )
} ,
storage : multer . diskStorage ( {
destination ( req , file , cb ) {
// If chunked uploads is disabled or the uploaded file is not a chunk
if ( ! chunkedUploads || ( req . body . uuid === undefined && req . body . chunkindex === undefined ) )
return cb ( null , paths . uploads )
initChunks ( req . body . uuid )
. then ( uuidDir => cb ( null , uuidDir ) )
. catch ( error => {
logger . error ( error )
return cb ( 'Could not process the chunked upload. Try again?' )
} )
} ,
filename ( req , file , cb ) {
// If chunked uploads is disabled or the uploaded file is not a chunk
if ( ! chunkedUploads || ( req . body . uuid === undefined && req . body . chunkindex === undefined ) ) {
const length = self . parseFileIdentifierLength ( req . headers . filelength )
return self . getUniqueRandomName ( length , file . extname )
. then ( name => cb ( null , name ) )
. catch ( error => cb ( error ) )
2018-03-28 11:36:28 +00:00
}
2019-11-29 13:42:53 +00:00
// index.extension (i.e. 0, 1, ..., n - will prepend zeros depending on the amount of chunks)
2019-09-08 01:56:29 +00:00
const digits = req . body . totalchunkcount !== undefined ? ` ${ req . body . totalchunkcount - 1 } ` . length : 1
const zeros = new Array ( digits + 1 ) . join ( '0' )
const name = ( zeros + req . body . chunkindex ) . slice ( - digits )
return cb ( null , name )
}
} )
2019-09-19 01:27:19 +00:00
} ) . array ( 'files[]' )
2017-01-13 07:34:21 +00:00
2019-09-08 01:56:29 +00:00
self . isExtensionFiltered = extname => {
2018-12-20 11:53:37 +00:00
// If empty extension needs to be filtered
2019-09-08 01:56:29 +00:00
if ( ! extname && config . filterNoExtension )
return true
2018-05-11 14:34:13 +00:00
// If there are extensions that have to be filtered
2019-09-08 01:56:29 +00:00
if ( extname && extensionsFilter ) {
2018-05-11 14:34:13 +00:00
const match = config . extensionsFilter . some ( extension => extname === extension . toLowerCase ( ) )
2018-12-20 11:53:37 +00:00
const whitelist = config . extensionsFilterMode === 'whitelist'
2019-09-08 01:56:29 +00:00
if ( ( ! whitelist && match ) || ( whitelist && ! match ) )
return true
2018-05-11 14:34:13 +00:00
}
2019-09-08 01:56:29 +00:00
2018-05-11 14:34:13 +00:00
return false
}
2019-09-08 01:56:29 +00:00
self . parseFileIdentifierLength = fileLength => {
if ( ! config . uploads . fileIdentifierLength )
return fileIdentifierLengthFallback
2018-03-28 11:36:28 +00:00
2019-09-08 01:56:29 +00:00
const parsed = parseInt ( fileLength )
if ( isNaN ( parsed ) ||
! fileIdentifierLengthChangeable ||
parsed < config . uploads . fileIdentifierLength . min ||
parsed > config . uploads . fileIdentifierLength . max )
return config . uploads . fileIdentifierLength . default || fileIdentifierLengthFallback
else
return parsed
2018-03-28 11:36:28 +00:00
}
2019-09-08 01:56:29 +00:00
self . getUniqueRandomName = async ( length , extension ) => {
for ( let i = 0 ; i < utils . idMaxTries ; i ++ ) {
const identifier = randomstring . generate ( length )
const name = identifier + extension
if ( config . uploads . cacheFileIdentifiers ) {
if ( utils . idSet . has ( identifier ) ) {
logger . log ( ` Identifier ${ identifier } is already in use ( ${ i + 1 } / ${ utils . idMaxTries } ). ` )
continue
}
utils . idSet . add ( identifier )
// logger.log(`Added ${identifier} to identifiers cache`)
} else {
try {
await paths . access ( path . join ( paths . uploads , name ) )
logger . log ( ` ${ name } is already in use ( ${ i + 1 } / ${ utils . idMaxTries } ). ` )
continue
} catch ( error ) {
// Re-throw error
if ( error & error . code !== 'ENOENT' )
throw error
2018-12-03 09:18:52 +00:00
}
2018-04-28 17:26:39 +00:00
}
2019-09-08 01:56:29 +00:00
return name
}
throw 'Sorry, we could not allocate a unique random name. Try again?'
2018-03-28 11:36:28 +00:00
}
2019-09-08 01:56:29 +00:00
self . parseUploadAge = age => {
if ( age === undefined || age === null )
return config . uploads . temporaryUploadAges [ 0 ]
const parsed = parseFloat ( age )
if ( config . uploads . temporaryUploadAges . includes ( parsed ) )
return parsed
else
return null
}
2019-11-29 13:42:53 +00:00
self . parseStripTags = stripTags => {
if ( ! config . uploads . stripTags )
return false
if ( config . uploads . stripTags . force || stripTags === undefined )
return config . uploads . stripTags . default
return Boolean ( parseInt ( stripTags ) )
}
2019-09-08 01:56:29 +00:00
self . upload = async ( req , res , next ) => {
2018-03-24 19:47:41 +00:00
let user
2018-01-23 20:06:30 +00:00
if ( config . private === true ) {
2018-03-24 19:47:41 +00:00
user = await utils . authorize ( req , res )
2018-12-18 17:01:28 +00:00
if ( ! user ) return
2018-12-18 17:41:42 +00:00
} else if ( req . headers . token ) {
2019-09-08 01:56:29 +00:00
user = await db . table ( 'users' )
. where ( 'token' , req . headers . token )
. first ( )
if ( user && ( user . enabled === false || user . enabled === 0 ) )
return res . json ( { success : false , description : 'This account has been disabled.' } )
2018-12-18 17:41:42 +00:00
}
2018-01-23 20:06:30 +00:00
2018-04-05 12:54:24 +00:00
let albumid = parseInt ( req . headers . albumid || req . params . albumid )
2019-09-08 01:56:29 +00:00
if ( isNaN ( albumid ) )
albumid = null
let age = null
if ( temporaryUploads ) {
age = self . parseUploadAge ( req . headers . age )
if ( ! age && ! config . uploads . temporaryUploadAges . includes ( 0 ) )
2019-11-09 20:41:54 +00:00
return res . json ( { success : false , description : 'Permanent uploads are not permitted.' } )
2019-09-08 01:56:29 +00:00
}
2017-01-19 06:34:48 +00:00
2019-09-08 01:56:29 +00:00
try {
const func = req . body . urls ? self . actuallyUploadUrls : self . actuallyUploadFiles
await func ( req , res , user , albumid , age )
} catch ( error ) {
2018-04-25 13:16:34 +00:00
const isError = error instanceof Error
2019-08-26 17:02:06 +00:00
if ( isError ) logger . error ( error )
2019-09-08 01:56:29 +00:00
return res . status ( 400 ) . json ( {
2018-03-28 11:36:28 +00:00
success : false ,
2018-05-11 14:34:13 +00:00
description : isError ? error . toString ( ) : error
2018-03-28 11:36:28 +00:00
} )
}
2019-09-08 01:56:29 +00:00
}
2018-03-28 11:36:28 +00:00
2019-09-08 01:56:29 +00:00
self . actuallyUploadFiles = async ( req , res , user , albumid , age ) => {
const error = await new Promise ( resolve => {
return executeMulter ( req , res , err => resolve ( err ) )
} )
2018-03-28 11:36:28 +00:00
2019-09-08 01:56:29 +00:00
if ( error ) {
const suppress = [
'LIMIT_FILE_SIZE' ,
'LIMIT_UNEXPECTED_FILE'
]
if ( suppress . includes ( error . code ) )
throw error . toString ( )
else
throw error
}
2018-03-28 11:36:28 +00:00
2019-09-08 01:56:29 +00:00
if ( ! req . files || ! req . files . length )
throw 'No files.'
2018-03-28 11:36:28 +00:00
2019-09-08 01:56:29 +00:00
// If chunked uploads is enabled and the uploaded file is a chunk, then just say that it was a success
const uuid = req . body . uuid
if ( chunkedUploads && chunksData [ uuid ] !== undefined ) {
req . files . forEach ( file => {
chunksData [ uuid ] . chunks . push ( file . filename )
chunksData [ uuid ] . size += file . size
2018-03-28 11:36:28 +00:00
} )
2019-09-08 01:56:29 +00:00
return res . json ( { success : true } )
}
2018-03-28 11:36:28 +00:00
2019-09-08 01:56:29 +00:00
const infoMap = req . files . map ( file => {
file . albumid = albumid
file . age = age
return {
path : path . join ( paths . uploads , file . filename ) ,
data : file
2018-09-01 20:37:26 +00:00
}
2019-09-08 01:56:29 +00:00
} )
2018-09-01 20:37:26 +00:00
2019-09-08 01:56:29 +00:00
if ( config . filterEmptyFile && infoMap . some ( file => file . data . size === 0 ) ) {
// Unlink all files when at least one file is an empty file
2019-09-23 08:09:15 +00:00
// Should continue even when encountering errors
await Promise . all ( infoMap . map ( info =>
utils . unlinkFile ( info . data . filename ) . catch ( logger . error )
) )
2018-03-28 11:36:28 +00:00
2019-09-08 01:56:29 +00:00
throw 'Empty files are not allowed.'
}
2018-03-28 11:36:28 +00:00
2019-09-08 01:56:29 +00:00
if ( utils . clamd . scanner ) {
2019-11-05 20:35:04 +00:00
const scanResult = await self . scanFiles ( req , user , infoMap )
2019-09-08 01:56:29 +00:00
if ( scanResult ) throw scanResult
2018-05-11 14:34:13 +00:00
}
2019-11-29 13:42:53 +00:00
await self . stripTags ( req , infoMap )
2019-09-08 01:56:29 +00:00
const result = await self . storeFilesToDb ( req , res , user , infoMap )
await self . sendUploadResponse ( req , res , result )
}
self . actuallyUploadUrls = async ( req , res , user , albumid , age ) => {
if ( ! config . uploads . urlMaxSize )
throw 'Upload by URLs is disabled at the moment.'
2018-05-11 14:34:13 +00:00
2018-12-08 17:55:04 +00:00
const urls = req . body . urls
2019-09-08 01:56:29 +00:00
if ( ! urls || ! ( urls instanceof Array ) )
throw 'Missing "urls" property (array).'
2018-05-11 14:34:13 +00:00
2019-09-23 08:09:15 +00:00
if ( urls . length > maxFilesPerUpload )
throw ` Maximum ${ maxFilesPerUpload } URLs at a time. `
2019-09-08 01:56:29 +00:00
const downloaded = [ ]
2018-05-11 14:34:13 +00:00
const infoMap = [ ]
2019-09-08 01:56:29 +00:00
try {
2019-09-23 08:09:15 +00:00
await Promise . all ( urls . map ( async url => {
2019-09-08 01:56:29 +00:00
const original = path . basename ( url ) . split ( /[?#]/ ) [ 0 ]
const extname = utils . extname ( original )
// Extensions filter
let filtered = false
if ( [ 'blacklist' , 'whitelist' ] . includes ( config . uploads . urlExtensionsFilterMode ) )
if ( urlExtensionsFilter ) {
const match = config . uploads . urlExtensionsFilter . some ( extension => extname === extension . toLowerCase ( ) )
const whitelist = config . uploads . urlExtensionsFilterMode === 'whitelist'
filtered = ( ( ! whitelist && match ) || ( whitelist && ! match ) )
} else {
throw 'Invalid extensions filter, please contact the site owner.'
}
else
filtered = self . isExtensionFiltered ( extname )
2018-12-20 11:53:37 +00:00
2019-09-08 01:56:29 +00:00
if ( filtered )
2019-11-09 20:41:54 +00:00
throw ` ${ extname ? ` ${ extname . substr ( 1 ) . toUpperCase ( ) } files ` : 'Files with no extension' } are not permitted. `
2018-12-20 11:53:37 +00:00
2019-09-08 01:56:29 +00:00
if ( config . uploads . urlProxy )
url = config . uploads . urlProxy
. replace ( /{url}/g , encodeURIComponent ( url ) )
. replace ( /{url-noprot}/g , encodeURIComponent ( url . replace ( /^https?:\/\// , '' ) ) )
2018-05-11 14:34:13 +00:00
2019-04-11 15:27:45 +00:00
// Limit max response body size with maximum allowed size
const fetchFile = await fetch ( url , { size : urlMaxSizeBytes } )
2018-12-18 17:01:28 +00:00
if ( fetchFile . status !== 200 )
2019-09-08 01:56:29 +00:00
throw ` ${ fetchFile . status } ${ fetchFile . statusText } `
2018-05-11 14:34:13 +00:00
2019-04-11 15:27:45 +00:00
const headers = fetchFile . headers
2018-09-23 16:28:15 +00:00
const file = await fetchFile . buffer ( )
2018-05-11 14:34:13 +00:00
2019-09-08 01:56:29 +00:00
const length = self . parseFileIdentifierLength ( req . headers . filelength )
const name = await self . getUniqueRandomName ( length , extname )
2018-09-23 16:28:15 +00:00
2019-09-08 01:56:29 +00:00
const destination = path . join ( paths . uploads , name )
2019-09-10 16:31:27 +00:00
await paths . writeFile ( destination , file )
2019-09-08 01:56:29 +00:00
downloaded . push ( destination )
2018-09-23 16:28:15 +00:00
2019-09-08 01:56:29 +00:00
infoMap . push ( {
path : destination ,
data : {
2018-09-23 16:28:15 +00:00
filename : name ,
originalname : original ,
2019-09-08 01:56:29 +00:00
extname ,
2018-09-23 16:28:15 +00:00
mimetype : headers . get ( 'content-type' ) . split ( ';' ) [ 0 ] || '' ,
2019-04-11 15:27:45 +00:00
size : file . byteLength ,
2019-09-08 01:56:29 +00:00
albumid ,
age
2018-09-01 20:37:26 +00:00
}
2019-09-08 01:56:29 +00:00
} )
2019-09-23 08:09:15 +00:00
} ) )
2018-09-01 20:37:26 +00:00
2019-09-23 08:09:15 +00:00
// If no errors encountered, clear cache of downloaded files
2019-09-08 01:56:29 +00:00
downloaded . length = 0
2018-09-23 16:28:15 +00:00
2019-09-08 01:56:29 +00:00
if ( utils . clamd . scanner ) {
2019-11-05 20:35:04 +00:00
const scanResult = await self . scanFiles ( req , user , infoMap )
2019-09-08 01:56:29 +00:00
if ( scanResult ) throw scanResult
2018-12-18 17:41:42 +00:00
}
2019-09-08 01:56:29 +00:00
const result = await self . storeFilesToDb ( req , res , user , infoMap )
await self . sendUploadResponse ( req , res , result )
} catch ( error ) {
// Unlink all downloaded files when at least one file threw an error from the for-loop
2019-09-23 08:09:15 +00:00
// Should continue even when encountering errors
2019-09-08 01:56:29 +00:00
if ( downloaded . length )
2019-09-23 08:09:15 +00:00
await Promise . all ( downloaded . map ( file =>
utils . unlinkFile ( file ) . catch ( logger . error )
) )
2019-09-08 01:56:29 +00:00
2019-11-29 10:42:29 +00:00
const errorString = error . toString ( )
const suppress = [
/ over limit:/
]
if ( ! suppress . some ( t => t . test ( errorString ) ) )
throw error
else
throw errorString
2018-05-11 14:34:13 +00:00
}
}
2019-09-08 01:56:29 +00:00
self . finishChunks = async ( req , res , next ) => {
2018-12-18 17:01:28 +00:00
if ( ! chunkedUploads )
2018-05-11 14:34:13 +00:00
return res . json ( { success : false , description : 'Chunked upload is disabled at the moment.' } )
2018-01-23 20:06:30 +00:00
2018-03-28 11:36:28 +00:00
let user
if ( config . private === true ) {
user = await utils . authorize ( req , res )
2018-12-18 17:01:28 +00:00
if ( ! user ) return
2018-12-18 17:41:42 +00:00
} else if ( req . headers . token ) {
2019-09-08 01:56:29 +00:00
user = await db . table ( 'users' )
. where ( 'token' , req . headers . token )
. first ( )
if ( user && ( user . enabled === false || user . enabled === 0 ) )
return res . json ( { success : false , description : 'This account has been disabled.' } )
2018-12-18 17:41:42 +00:00
}
2018-01-23 20:06:30 +00:00
2019-09-08 01:56:29 +00:00
try {
await self . actuallyFinishChunks ( req , res , user )
} catch ( error ) {
2018-04-25 13:16:34 +00:00
const isError = error instanceof Error
2019-08-26 17:02:06 +00:00
if ( isError ) logger . error ( error )
2019-09-08 01:56:29 +00:00
return res . status ( 400 ) . json ( {
2018-03-28 11:36:28 +00:00
success : false ,
2018-05-11 14:34:13 +00:00
description : isError ? error . toString ( ) : error
2018-03-28 11:36:28 +00:00
} )
}
2019-09-08 01:56:29 +00:00
}
self . actuallyFinishChunks = async ( req , res , user ) => {
const check = file => typeof file . uuid !== 'string' ||
! chunksData [ file . uuid ] ||
chunksData [ file . uuid ] . chunks . length < 2
2018-03-28 11:36:28 +00:00
const files = req . body . files
2019-09-08 01:56:29 +00:00
if ( ! Array . isArray ( files ) || ! files . length || files . some ( check ) )
throw 'An unexpected error occurred.'
2018-03-28 11:36:28 +00:00
const infoMap = [ ]
2019-09-08 01:56:29 +00:00
try {
2019-09-23 08:09:15 +00:00
await Promise . all ( files . map ( async file => {
2019-09-08 01:56:29 +00:00
if ( chunksData [ file . uuid ] . chunks . length > maxChunksCount )
throw 'Too many chunks.'
file . extname = typeof file . original === 'string' ? utils . extname ( file . original ) : ''
if ( self . isExtensionFiltered ( file . extname ) )
2019-11-09 20:41:54 +00:00
throw ` ${ file . extname ? ` ${ file . extname . substr ( 1 ) . toUpperCase ( ) } files ` : 'Files with no extension' } are not permitted. `
2019-09-08 01:56:29 +00:00
if ( temporaryUploads ) {
file . age = self . parseUploadAge ( file . age )
if ( ! file . age && ! config . uploads . temporaryUploadAges . includes ( 0 ) )
2019-11-09 20:41:54 +00:00
throw 'Permanent uploads are not permitted.'
2018-11-28 17:52:12 +00:00
}
2018-04-28 17:26:39 +00:00
2019-09-08 01:56:29 +00:00
file . size = chunksData [ file . uuid ] . size
if ( config . filterEmptyFile && file . size === 0 )
throw 'Empty files are not allowed.'
else if ( file . size > maxSizeBytes )
throw ` File too large. Chunks are bigger than ${ maxSize } MB. `
2018-04-28 17:26:39 +00:00
2019-09-08 01:56:29 +00:00
// Generate name
const length = self . parseFileIdentifierLength ( file . filelength )
const name = await self . getUniqueRandomName ( length , file . extname )
2018-12-20 11:53:37 +00:00
2019-09-08 01:56:29 +00:00
// Combine chunks
const destination = path . join ( paths . uploads , name )
await self . combineChunks ( destination , file . uuid )
2018-12-20 11:53:37 +00:00
2019-09-08 01:56:29 +00:00
// Continue even when encountering errors
await self . cleanUpChunks ( file . uuid ) . catch ( logger . error )
2018-05-11 14:34:13 +00:00
2019-09-08 01:56:29 +00:00
// Double-check file size
const lstat = await paths . lstat ( destination )
if ( lstat . size !== file . size )
throw 'Chunks size mismatched.'
2018-04-28 17:26:39 +00:00
2019-09-08 01:56:29 +00:00
let albumid = parseInt ( file . albumid )
if ( isNaN ( albumid ) )
albumid = null
2018-04-05 12:54:24 +00:00
2018-05-11 14:34:13 +00:00
const data = {
filename : name ,
originalname : file . original || '' ,
2019-09-08 01:56:29 +00:00
extname : file . extname ,
2018-05-11 14:34:13 +00:00
mimetype : file . type || '' ,
2019-09-08 01:56:29 +00:00
size : file . size ,
albumid ,
age : file . age
2018-05-11 14:34:13 +00:00
}
2018-04-05 12:54:24 +00:00
2019-09-08 01:56:29 +00:00
infoMap . push ( { path : destination , data } )
2019-09-23 08:09:15 +00:00
} ) )
2018-09-01 20:37:26 +00:00
2019-09-08 01:56:29 +00:00
if ( utils . clamd . scanner ) {
2019-11-05 20:35:04 +00:00
const scanResult = await self . scanFiles ( req , user , infoMap )
2019-09-08 01:56:29 +00:00
if ( scanResult ) throw scanResult
}
2018-05-11 14:34:13 +00:00
2019-11-29 13:42:53 +00:00
await self . stripTags ( req , infoMap )
2019-09-08 01:56:29 +00:00
const result = await self . storeFilesToDb ( req , res , user , infoMap )
await self . sendUploadResponse ( req , res , result )
} catch ( error ) {
// Clean up leftover chunks
2019-09-23 08:09:15 +00:00
// Should continue even when encountering errors
2019-09-28 09:42:49 +00:00
await Promise . all ( files . map ( file => {
2019-09-08 01:56:29 +00:00
if ( chunksData [ file . uuid ] !== undefined )
2019-09-28 09:42:49 +00:00
return self . cleanUpChunks ( file . uuid ) . catch ( logger . error )
2019-09-23 08:09:15 +00:00
} ) )
2019-09-19 01:27:19 +00:00
// Re-throw error
2019-09-08 01:56:29 +00:00
throw error
2018-04-05 10:31:07 +00:00
}
2018-03-28 11:36:28 +00:00
}
2019-09-08 01:56:29 +00:00
self . combineChunks = async ( destination , uuid ) => {
let errorObj
const writeStream = fs . createWriteStream ( destination , { flags : 'a' } )
try {
chunksData [ uuid ] . chunks . sort ( )
for ( const chunk of chunksData [ uuid ] . chunks )
await new Promise ( ( resolve , reject ) => {
fs . createReadStream ( path . join ( chunksData [ uuid ] . root , chunk ) )
. on ( 'error' , error => reject ( error ) )
. on ( 'end' , ( ) => resolve ( ) )
. pipe ( writeStream , { end : false } )
2018-05-11 14:34:13 +00:00
} )
2019-09-08 01:56:29 +00:00
} catch ( error ) {
errorObj = error
}
2018-05-11 14:34:13 +00:00
2019-09-08 01:56:29 +00:00
// Close stream
writeStream . end ( )
// Re-throw error
2019-09-19 01:27:19 +00:00
if ( errorObj ) throw errorObj
2018-03-28 11:36:28 +00:00
}
2019-09-08 01:56:29 +00:00
self . cleanUpChunks = async ( uuid ) => {
// Unlink chunks
2019-09-23 08:09:15 +00:00
await Promise . all ( chunksData [ uuid ] . chunks . map ( chunk =>
paths . unlink ( path . join ( chunksData [ uuid ] . root , chunk ) )
) )
2019-09-08 01:56:29 +00:00
// Remove UUID dir
await paths . rmdir ( chunksData [ uuid ] . root )
// Delete cached date
delete chunksData [ uuid ]
2018-05-11 14:34:13 +00:00
}
2019-11-05 20:35:04 +00:00
self . scanFiles = async ( req , user , infoMap ) => {
2020-04-04 14:20:01 +00:00
// eslint-disable-next-line curly
if ( user && utils . clamd . groupBypass && perms . is ( user , utils . clamd . groupBypass ) ) {
// logger.log(`[ClamAV]: Skipping ${infoMap.length} file(s), ${utils.clamd.groupBypass} group bypass`)
2019-11-05 20:35:04 +00:00
return false
2020-04-04 14:20:01 +00:00
}
2019-09-23 08:09:15 +00:00
2019-11-05 20:35:04 +00:00
const foundThreats = [ ]
const results = await Promise . all ( infoMap . map ( async info => {
2020-04-04 14:20:01 +00:00
if ( utils . clamd . whitelistExtensions && utils . clamd . whitelistExtensions . includes ( info . data . extname ) )
return // logger.log(`[ClamAV]: Skipping ${info.data.filename}, extension whitelisted`)
if ( utils . clamd . maxSize && info . data . size > utils . clamd . maxSize )
return // logger.log(`[ClamAV]: Skipping ${info.data.filename}, size ${info.data.size} > ${utils.clamd.maxSize}`)
2019-11-05 20:35:04 +00:00
const reply = await utils . clamd . scanner . scanFile ( info . path , utils . clamd . timeout , utils . clamd . chunkSize )
2019-09-08 01:56:29 +00:00
if ( ! reply . includes ( 'OK' ) || reply . includes ( 'FOUND' ) ) {
// eslint-disable-next-line no-control-regex
2019-11-05 20:35:04 +00:00
const foundThreat = reply . replace ( /^stream: / , '' ) . replace ( / FOUND\u0000$/ , '' )
logger . log ( ` [ClamAV]: ${ info . data . filename } : ${ foundThreat } FOUND. ` )
foundThreats . push ( foundThreat )
2019-09-08 01:56:29 +00:00
}
2019-11-05 20:35:04 +00:00
} ) ) . then ( ( ) => {
if ( foundThreats . length )
return ` Threat found: ${ foundThreats [ 0 ] } ${ foundThreats . length > 1 ? ', and more' : '' } . `
} ) . catch ( error => {
logger . error ( ` [ClamAV]: ${ error . toString ( ) } ` )
return 'An unexpected error occurred with ClamAV, please contact the site owner.'
} )
2018-01-23 20:06:30 +00:00
2019-11-05 20:35:04 +00:00
if ( results )
// Unlink all files when at least one threat is found OR any errors occurred
// Should continue even when encountering errors
await Promise . all ( infoMap . map ( info =>
utils . unlinkFile ( info . data . filename ) . catch ( logger . error )
) )
2018-01-23 20:06:30 +00:00
2019-11-05 20:35:04 +00:00
return results
2019-09-08 01:56:29 +00:00
}
2019-11-29 13:42:53 +00:00
self . stripTags = async ( req , infoMap ) => {
if ( ! self . parseStripTags ( req . headers . striptags ) )
return
try {
await Promise . all ( infoMap . map ( info =>
utils . stripTags ( info . data . filename , info . data . extname )
) )
} catch ( error ) {
// Unlink all files when at least one threat is found OR any errors occurred
// Should continue even when encountering errors
await Promise . all ( infoMap . map ( info =>
utils . unlinkFile ( info . data . filename ) . catch ( logger . error )
) )
// Re-throw error
throw error
}
}
2019-09-08 01:56:29 +00:00
self . storeFilesToDb = async ( req , res , user , infoMap ) => {
const files = [ ]
const exists = [ ]
const albumids = [ ]
2019-11-29 13:42:53 +00:00
2019-09-23 08:09:15 +00:00
await Promise . all ( infoMap . map ( async info => {
2019-09-08 01:56:29 +00:00
// Create hash of the file
const hash = await new Promise ( ( resolve , reject ) => {
const result = crypto . createHash ( 'md5' )
fs . createReadStream ( info . path )
. on ( 'error' , error => reject ( error ) )
. on ( 'end' , ( ) => resolve ( result . digest ( 'hex' ) ) )
. on ( 'data' , data => result . update ( data , 'utf8' ) )
} )
2018-01-23 20:06:30 +00:00
2019-09-08 01:56:29 +00:00
// Check if the file exists by checking its hash and size
const dbFile = await db . table ( 'files' )
. where ( function ( ) {
if ( user === undefined )
this . whereNull ( 'userid' )
else
this . where ( 'userid' , user . id )
} )
. where ( {
hash ,
size : info . data . size
2018-01-23 20:06:30 +00:00
} )
2019-09-08 01:56:29 +00:00
// Select expirydate to display expiration date of existing files as well
. select ( 'name' , 'expirydate' )
. first ( )
if ( dbFile ) {
// Continue even when encountering errors
await utils . unlinkFile ( info . data . filename ) . catch ( logger . error )
// logger.log(`Unlinked ${info.data.filename} since a duplicate named ${dbFile.name} exists`)
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
// If on /nojs route, append original file name reported by client
if ( req . path === '/nojs' )
dbFile . original = info . data . originalname
2019-09-08 01:56:29 +00:00
exists . push ( dbFile )
2019-09-23 08:09:15 +00:00
return
2018-04-05 10:31:07 +00:00
}
2017-10-04 00:13:38 +00:00
2019-09-08 01:56:29 +00:00
const timestamp = Math . floor ( Date . now ( ) / 1000 )
const data = {
name : info . data . filename ,
original : info . data . originalname ,
type : info . data . mimetype ,
size : info . data . size ,
hash ,
// Only disable if explicitly set to false in config
ip : config . uploads . storeIP !== false ? req . ip : null ,
timestamp
}
if ( user ) {
data . userid = user . id
data . albumid = info . data . albumid
if ( data . albumid !== null && ! albumids . includes ( data . albumid ) )
albumids . push ( data . albumid )
}
if ( info . data . age )
data . expirydate = data . timestamp + ( info . data . age * 3600 ) // Hours to seconds
2018-09-01 20:37:26 +00:00
2019-09-08 01:56:29 +00:00
files . push ( data )
// Generate thumbs, but do not wait
if ( utils . mayGenerateThumb ( info . data . extname ) )
utils . generateThumbs ( info . data . filename , info . data . extname ) . catch ( logger . error )
2019-09-23 08:09:15 +00:00
} ) )
2018-03-28 11:36:28 +00:00
2018-04-12 14:37:42 +00:00
if ( files . length ) {
2019-09-08 01:56:29 +00:00
let authorizedIds = [ ]
if ( albumids . length ) {
authorizedIds = await db . table ( 'albums' )
. where ( { userid : user . id } )
. whereIn ( 'id' , albumids )
. select ( 'id' )
. then ( rows => rows . map ( row => row . id ) )
// Remove albumid if user do not own the album
for ( const file of files )
if ( file . albumid !== null && ! authorizedIds . includes ( file . albumid ) )
file . albumid = null
}
2018-04-12 14:37:42 +00:00
// Insert new files to DB
await db . table ( 'files' ) . insert ( files )
2019-09-08 01:56:29 +00:00
utils . invalidateStatsCache ( 'uploads' )
2018-01-23 20:06:30 +00:00
2019-09-08 01:56:29 +00:00
// Update albums' timestamp
2019-09-17 04:13:41 +00:00
if ( authorizedIds . length ) {
2019-09-08 01:56:29 +00:00
await db . table ( 'albums' )
. whereIn ( 'id' , authorizedIds )
. update ( 'editedAt' , Math . floor ( Date . now ( ) / 1000 ) )
2019-09-17 04:13:41 +00:00
utils . invalidateAlbumsCache ( authorizedIds )
}
2018-05-12 14:01:14 +00:00
}
2018-01-23 20:06:30 +00:00
2019-09-08 01:56:29 +00:00
return files . concat ( exists )
}
2018-04-04 17:38:15 +00:00
2019-09-08 01:56:29 +00:00
self . sendUploadResponse = async ( req , res , result ) => {
// Send response
2018-05-12 14:01:14 +00:00
res . json ( {
success : true ,
2019-09-08 01:56:29 +00:00
files : result . map ( file => {
const map = {
2018-04-12 14:37:42 +00:00
name : file . name ,
2018-05-12 14:01:14 +00:00
url : ` ${ config . domain } / ${ file . name } `
2018-04-04 17:38:15 +00:00
}
2018-05-12 14:01:14 +00:00
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
// If a temporary upload, add expiry date
2019-09-08 01:56:29 +00:00
if ( file . expirydate )
map . expirydate = file . expirydate
2018-12-18 17:01:28 +00:00
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
// If on /nojs route, add original name
2019-09-08 01:56:29 +00:00
if ( req . path === '/nojs' )
map . original = file . original
2018-04-12 14:37:42 +00:00
2019-09-08 01:56:29 +00:00
return map
} )
} )
2018-01-23 20:06:30 +00:00
}
2017-03-17 00:53:29 +00:00
2019-09-08 01:56:29 +00:00
self . delete = async ( req , res ) => {
// Map /delete requests to /bulkdelete route
2018-12-18 18:14:24 +00:00
const id = parseInt ( req . body . id )
2019-01-01 19:39:08 +00:00
const body = {
field : 'id' ,
values : isNaN ( id ) ? undefined : [ id ]
}
req . body = body
2019-09-08 01:56:29 +00:00
return self . bulkDelete ( req , res )
2018-01-23 20:06:30 +00:00
}
2019-09-08 01:56:29 +00:00
self . bulkDelete = async ( req , res ) => {
2018-03-29 23:22:08 +00:00
const user = await utils . authorize ( req , res )
2018-12-18 17:01:28 +00:00
if ( ! user ) return
2018-05-05 19:44:58 +00:00
const field = req . body . field || 'id'
const values = req . body . values
2018-09-04 15:49:37 +00:00
2019-09-08 01:56:29 +00:00
if ( ! Array . isArray ( values ) || ! values . length )
2018-09-04 15:49:37 +00:00
return res . json ( { success : false , description : 'No array of files specified.' } )
2018-03-29 23:22:08 +00:00
2019-09-08 01:56:29 +00:00
try {
const failed = await utils . bulkDeleteFromDb ( field , values , user )
2018-05-05 19:44:58 +00:00
return res . json ( { success : true , failed } )
2019-09-08 01:56:29 +00:00
} catch ( error ) {
logger . error ( error )
return res . status ( 500 ) . json ( { success : false , description : 'An unexpected error occurred. Try again?' } )
}
2018-01-23 20:06:30 +00:00
}
2017-10-04 00:13:38 +00:00
2019-09-08 01:56:29 +00:00
self . list = async ( req , res ) => {
2018-01-23 20:06:30 +00:00
const user = await utils . authorize ( req , res )
2018-12-18 17:01:28 +00:00
if ( ! user ) return
2018-01-23 20:06:30 +00:00
2019-09-19 01:27:19 +00:00
const all = Boolean ( req . headers . all )
2019-06-17 19:34:15 +00:00
const filters = req . headers . filters
2018-10-13 11:06:58 +00:00
const ismoderator = perms . is ( user , 'moderator' )
2019-09-08 01:56:29 +00:00
if ( ( all || filters ) && ! ismoderator )
return res . status ( 403 ) . end ( )
2019-01-03 04:49:56 +00:00
2019-06-04 00:57:37 +00:00
const basedomain = config . domain
2019-06-17 19:34:15 +00:00
// For filtering uploads
const _filters = {
uploaders : [ ] ,
names : [ ] ,
ips : [ ] ,
flags : {
nouser : false ,
noip : false
2020-04-17 06:36:57 +00:00
} ,
keywords : [ ]
2019-06-17 19:34:15 +00:00
}
2020-04-04 16:36:43 +00:00
// Cast column(s) to specific type if they're stored differently
const _orderByCasts = {
size : 'integer'
}
// Columns with which to use SQLite's NULLS LAST option
const _orderByNullsLast = [
'userid' ,
'expirydate' ,
'ip'
]
const _orderBy = [ ]
2019-06-17 19:34:15 +00:00
// Perhaps this can be simplified even further?
if ( filters ) {
const usernames = [ ]
filters
. split ( ' ' )
. map ( ( v , i , a ) => {
if ( /[^\\]\\$/ . test ( v ) && a [ i + 1 ] ) {
const tmp = ` ${ v . slice ( 0 , - 1 ) } ${ a [ i + 1 ] } `
a [ i + 1 ] = ''
return tmp
}
return v . replace ( /\\\\/ , '\\' )
} )
. map ( ( v , i ) => {
const x = v . indexOf ( ':' )
if ( x >= 0 && v . substring ( x + 1 ) )
return [ v . substring ( 0 , x ) , v . substring ( x + 1 ) ]
2020-04-17 06:36:57 +00:00
else
return v
2019-06-17 19:34:15 +00:00
} )
. forEach ( v => {
2020-04-17 06:36:57 +00:00
if ( Array . isArray ( v ) ) {
if ( v [ 0 ] === 'user' ) {
usernames . push ( v [ 1 ] )
} else if ( v [ 0 ] === 'name' ) {
_filters . names . push ( v [ 1 ] )
} else if ( v [ 0 ] === 'ip' ) {
_filters . ips . push ( v [ 1 ] )
} else if ( v [ 0 ] === 'orderby' ) {
const tmp = v [ 1 ] . split ( ':' )
let col = tmp [ 0 ]
let dir = 'asc'
if ( _orderByCasts [ col ] )
col = ` cast ( \` ${ col } \` as ${ _orderByCasts [ col ] } ) `
if ( tmp [ 1 ] && /^d/i . test ( tmp [ 1 ] ) )
dir = 'desc'
_orderBy . push ( ` ${ col } ${ dir } ${ _orderByNullsLast . includes ( col ) ? ' nulls last' : '' } ` )
}
} else {
if ( v === '-user' )
_filters . flags . nouser = true
else if ( v === '-ip' )
_filters . flags . noip = true
else
_filters . keywords . push ( v [ 0 ] )
2020-04-04 16:36:43 +00:00
}
2019-06-17 19:34:15 +00:00
} )
_filters . uploaders = await db . table ( 'users' )
. whereIn ( 'username' , usernames )
2019-06-17 19:48:42 +00:00
. select ( 'id' , 'username' )
2019-06-04 00:57:37 +00:00
}
2018-10-09 19:52:41 +00:00
2020-04-12 09:30:33 +00:00
if ( filters && ! ( _filters . uploaders . length || _filters . names . length || _filters . ips . length || _filters . flags . nouser || _filters . flags . noip || _orderBy . length ) )
2020-04-17 06:36:57 +00:00
if ( _filters . keywords . length )
// TODO: Support filtering using keywords only
return res . json ( { success : false , description : 'Filtering using keywords only is still work in progress. Please confirm valid filtering keys through the Help? button!' } )
else
return res . json ( { success : false , description : 'No valid filter or sort keys were used. Please confirm the valid keys through the Help? button!' } )
2020-04-12 09:30:33 +00:00
2019-01-01 19:39:08 +00:00
function filter ( ) {
2019-10-22 03:52:52 +00:00
if ( req . params . id !== undefined )
2019-01-01 19:39:08 +00:00
this . where ( 'albumid' , req . params . id )
2019-10-22 03:52:52 +00:00
else if ( ! all )
2019-01-01 19:39:08 +00:00
this . where ( 'userid' , user . id )
2019-10-22 03:52:52 +00:00
else
2019-06-17 19:34:15 +00:00
// Fisrt, look for uploads matching ANY of the supplied 'user' OR 'ip' filters
2019-06-17 19:48:42 +00:00
// Then, refine the matches using the supplied 'name' filters
2019-10-22 03:52:52 +00:00
this . where ( function ( ) {
if ( _filters . uploaders . length )
this . orWhereIn ( 'userid' , _filters . uploaders . map ( v => v . id ) )
if ( _filters . ips . length )
this . orWhereIn ( 'ip' , _filters . ips )
if ( _filters . flags . nouser )
this . orWhereNull ( 'userid' )
if ( _filters . flags . noip )
this . orWhereNull ( 'ip' )
} ) . andWhere ( function ( ) {
for ( const name of _filters . names )
if ( name . includes ( '*' ) )
this . orWhere ( 'name' , 'like' , name . replace ( /\*/g , '%' ) )
2019-06-17 19:34:15 +00:00
else
2019-10-22 03:52:52 +00:00
this . orWhere ( 'name' , name )
} )
2019-01-01 19:39:08 +00:00
}
2020-04-12 09:30:33 +00:00
try {
// Query uploads count for pagination
const count = await db . table ( 'files' )
. where ( filter )
. count ( 'id as count' )
. then ( rows => rows [ 0 ] . count )
if ( ! count )
return res . json ( { success : true , files : [ ] , count } )
2019-01-01 19:39:08 +00:00
2020-04-12 09:30:33 +00:00
let offset = req . params . page
if ( offset === undefined ) offset = 0
2019-01-01 19:39:08 +00:00
2020-04-12 09:30:33 +00:00
const columns = [ 'id' , 'name' , 'userid' , 'size' , 'timestamp' ]
2019-09-08 01:56:29 +00:00
2020-04-12 09:30:33 +00:00
if ( temporaryUploads )
columns . push ( 'expirydate' )
2019-09-08 01:56:29 +00:00
2020-04-12 09:30:33 +00:00
// Only select IPs if we are listing all uploads
columns . push ( all ? 'ip' : 'albumid' )
2019-06-04 00:57:37 +00:00
2020-04-12 09:30:33 +00:00
const files = await db . table ( 'files' )
. where ( filter )
. orderByRaw ( _orderBy . length ? _orderBy . join ( ', ' ) : '`id` desc' )
. limit ( 25 )
. offset ( 25 * offset )
. select ( columns )
2018-01-23 20:06:30 +00:00
2020-04-12 09:30:33 +00:00
if ( ! files . length )
return res . json ( { success : true , files , count , basedomain } )
2019-06-18 18:48:30 +00:00
2020-04-12 09:30:33 +00:00
for ( const file of files ) {
file . extname = utils . extname ( file . name )
if ( utils . mayGenerateThumb ( file . extname ) )
file . thumb = ` thumbs/ ${ file . name . slice ( 0 , - file . extname . length ) } .png `
}
2018-01-23 20:06:30 +00:00
2020-04-12 09:30:33 +00:00
// If we are not listing all uploads, query album names
let albums = { }
if ( ! all ) {
const albumids = files
. map ( file => file . albumid )
. filter ( ( v , i , a ) => {
return v !== null && v !== undefined && v !== '' && a . indexOf ( v ) === i
} )
albums = await db . table ( 'albums' )
. whereIn ( 'id' , albumids )
. where ( 'enabled' , 1 )
. where ( 'userid' , user . id )
. select ( 'id' , 'name' )
. then ( rows => {
2019-06-04 00:57:37 +00:00
// Build Object indexed by their IDs
2020-04-12 09:30:33 +00:00
const obj = { }
for ( const row of rows )
obj [ row . id ] = row . name
return obj
} )
}
2018-01-23 20:06:30 +00:00
2020-04-12 09:30:33 +00:00
// If we are not listing all uploads, send response
if ( ! all )
return res . json ( { success : true , files , count , albums , basedomain } )
// Otherwise proceed to querying usernames
let _users = _filters . uploaders
if ( ! _users . length ) {
const userids = files
. map ( file => file . userid )
. filter ( ( v , i , a ) => {
return v !== null && v !== undefined && v !== '' && a . indexOf ( v ) === i
} )
2019-06-04 00:57:37 +00:00
2020-04-12 09:30:33 +00:00
// If there are no uploads attached to a registered user, send response
if ( userids . length === 0 )
return res . json ( { success : true , files , count , basedomain } )
2019-06-17 19:48:42 +00:00
2020-04-12 09:30:33 +00:00
// Query usernames of user IDs from currently selected files
_users = await db . table ( 'users' )
. whereIn ( 'id' , userids )
. select ( 'id' , 'username' )
}
2019-06-17 19:48:42 +00:00
2020-04-12 09:30:33 +00:00
const users = { }
for ( const user of _users )
users [ user . id ] = user . username
2018-01-23 20:06:30 +00:00
2020-04-12 09:30:33 +00:00
return res . json ( { success : true , files , count , users , basedomain } )
} catch ( error ) {
// If moderator, capture SQLITE_ERROR and use its error message for the response's description
let errorString
if ( ismoderator && error . code === 'SQLITE_ERROR' ) {
const match = error . message . match ( /SQLITE_ERROR: .*$/ )
errorString = match && match [ 0 ]
}
// If not proper SQLITE_ERROR, log to console
if ( ! errorString ) {
logger . error ( error )
res . status ( 500 ) // Use 500 status code
}
return res . json ( {
success : false ,
description : errorString || 'An unexpected error occurred. Try again?'
} )
}
2018-01-23 20:06:30 +00:00
}
2019-09-08 01:56:29 +00:00
module . exports = self