Updates, now supports uploading by URLs!

* Added upload by URLs. It has its own max size (config.uploads.urlMaxSize), make sure your config matches config.sample.js.
Here's a brief video showing it in action: https://i.fiery.me/CUhQ.mp4.

* /api/upload now supports uploading by URLs. Devs will only need to POST a JSON request containing a key named "urls", which is an array of the urls to upload.

* Added file extension filter to /api/upload/finishchunks.

* Added proper total chunks size check to /api/upload/finishchunks.

* Various code improvements.
This commit is contained in:
Bobby Wibowo 2018-05-11 21:34:13 +07:00
parent 479db54cd3
commit bd722129de
No known key found for this signature in database
GPG Key ID: 51C3A1E1E22D26CF
9 changed files with 416 additions and 175 deletions

View File

@ -78,13 +78,19 @@ module.exports = {
*/
maxSize: '512MB',
/*
Max file size allowed for upload by URLs. Needs to be in MB.
NOTE: Set to falsy value (false, null, etc.) to disable upload by URLs.
*/
urlMaxSize: '32MB',
/*
Chunk size for chunk uploads. Needs to be in MB.
If this is enabled, every files uploaded from the homepage uploader will forcibly be chunked
by the size specified in "chunkSize". People will still be able to upload bigger files with
the API as long as they don't surpass the limit specified in the "maxSize" option above.
Total size of the whole chunks will also later be checked against the "maxSize" option.
NOTE: Set to falsy value (false, null, etc.) to disable.
NOTE: Set to falsy value (false, null, etc.) to disable chunked uploads.
*/
chunkSize: '10MB',
@ -142,7 +148,7 @@ module.exports = {
No-JS uploader page will not chunk the uploads, so it's recommended to change this
into the maximum upload size you have in Cloudflare.
This limit will only be applied to the subtitle in the page.
NOTE: Set to falsy value (false, null, etc.) to disable.
NOTE: Set to falsy value (false, null, etc.) to inherit "maxSize" option.
*/
noJsMaxSize: '100MB',
@ -151,7 +157,7 @@ module.exports = {
API route (HOME_DOMAIN/api/album/zip/*), with this option you can limit the
maximum total size of files in an album that can be zipped.
Cloudflare will not cache files bigger than 512MB.
NOTE: Set to falsy value (false, null, etc.) to disable.
NOTE: Set to falsy value (false, null, etc.) to disable max total size.
*/
zipMaxTotalSize: '512MB',

View File

@ -5,6 +5,7 @@ const randomstring = require('randomstring')
const db = require('knex')(config.database)
const crypto = require('crypto')
const fs = require('fs')
const snekfetch = require('snekfetch')
const utils = require('./utilsController')
const uploadsController = {}
@ -13,7 +14,9 @@ const maxTries = config.uploads.maxTries || 1
const uploadsDir = path.join(__dirname, '..', config.uploads.folder)
const chunkedUploads = Boolean(config.uploads.chunkSize)
const chunksDir = path.join(uploadsDir, 'chunks')
const maxSizeBytes = parseInt(config.uploads.maxSize) * 1000000
const maxSize = config.uploads.maxSize
const maxSizeBytes = parseInt(maxSize) * 1000000
const urlMaxSizeBytes = parseInt(config.uploads.urlMaxSize) * 1000000
const storage = multer.diskStorage({
destination (req, file, cb) {
@ -54,18 +57,13 @@ const storage = multer.diskStorage({
const upload = multer({
storage,
limits: {
fileSize: config.uploads.maxSize
fileSize: maxSizeBytes
},
fileFilter (req, file, cb) {
// If there are extensions that have to be filtered
if (config.extensionsFilter && config.extensionsFilter.length) {
const extname = path.extname(file.originalname).toLowerCase()
const match = config.extensionsFilter.some(extension => extname === extension.toLowerCase())
if ((config.filterBlacklist && match) || (!config.filterBlacklist && !match)) {
// eslint-disable-next-line standard/no-callback-literal
return cb(`Sorry, ${extname.substr(1).toUpperCase()} files are not permitted for security reasons.`)
}
const extname = path.extname(file.originalname).toLowerCase()
if (uploadsController.isExtensionFiltered(extname)) {
// eslint-disable-next-line standard/no-callback-literal
cb(`${extname.substr(1).toUpperCase()} files are not permitted for security reasons.`)
}
// Re-map Dropzone keys so people can manually use the API without prepending 'dz'
@ -90,6 +88,17 @@ const upload = multer({
}
}).array('files[]')
uploadsController.isExtensionFiltered = extname => {
// If there are extensions that have to be filtered
if (config.extensionsFilter && config.extensionsFilter.length) {
const match = config.extensionsFilter.some(extension => extname === extension.toLowerCase())
if ((config.filterBlacklist && match) || (!config.filterBlacklist && !match)) {
return true
}
}
return false
}
uploadsController.getFileNameLength = req => {
// If the user has a preferred file length, make sure it is within the allowed range
if (req.headers.filelength) {
@ -135,7 +144,12 @@ uploadsController.upload = async (req, res, next) => {
let albumid = parseInt(req.headers.albumid || req.params.albumid)
if (isNaN(albumid)) { albumid = null }
return uploadsController.actuallyUpload(req, res, user, albumid)
if (req.body.urls) {
return uploadsController.actuallyUploadByUrl(req, res, user, albumid)
} else {
return uploadsController.actuallyUpload(req, res, user, albumid)
}
}
uploadsController.actuallyUpload = async (req, res, user, albumid) => {
@ -144,14 +158,14 @@ uploadsController.actuallyUpload = async (req, res, user, albumid) => {
if (isError) { console.error(error) }
res.json({
success: false,
description: isError ? error.toString() : `Error: ${error}`
description: isError ? error.toString() : error
})
}
upload(req, res, async error => {
if (error) { return erred(error) }
if (error) { return erred(error.message) }
if (req.files.length === 0) { return erred('No files.') }
if (!req.files || !req.files.length) { return erred('No files.') }
// If chunked uploads is enabled and the uploaded file is a chunk, then just say that it was a success
if (chunkedUploads && req.body.uuid) { return res.json({ success: true }) }
@ -173,9 +187,82 @@ uploadsController.actuallyUpload = async (req, res, user, albumid) => {
})
}
uploadsController.actuallyUploadByUrl = async (req, res, user, albumid) => {
const erred = error => {
const isError = error instanceof Error
if (isError) { console.error(error) }
res.json({
success: false,
description: isError ? error.toString() : error
})
}
if (!config.uploads.urlMaxSize) { return erred('Upload by URLs is disabled at the moment.') }
const urls = req.body.urls
if (!urls || !(urls instanceof Array)) { return erred('Missing "urls" property (Array).') }
let iteration = 0
const infoMap = []
for (const url of urls) {
const original = path.basename(url).split(/[?#]/)[0]
const extension = path.extname(original)
if (uploadsController.isExtensionFiltered(extension)) {
return erred(`${extension.substr(1).toUpperCase()} files are not permitted for security reasons.`)
}
const head = await snekfetch.head(url)
.catch(erred)
const size = parseInt(head.headers['content-length'])
if (isNaN(size)) {
return erred('URLs with missing Content-Length HTTP header are not supported.')
}
if (size > urlMaxSizeBytes) {
return erred('File too large.')
}
const download = await snekfetch.get(url)
.catch(erred)
const length = uploadsController.getFileNameLength(req)
const name = await uploadsController.getUniqueRandomName(length, extension)
.catch(erred)
if (!name) { return }
const destination = path.join(uploadsDir, name)
fs.writeFile(destination, download.body, async error => {
if (error) { return erred(error) }
const data = {
filename: name,
originalname: original,
mimetype: download.headers['content-type'].split(';')[0] || '',
size,
albumid
}
infoMap.push({
path: destination,
data
})
iteration++
if (iteration === urls.length) {
const result = await uploadsController.formatInfoMap(req, res, user, infoMap)
.catch(erred)
if (result) {
return uploadsController.processFilesForDisplay(req, res, result.files, result.existingFiles)
}
}
})
}
}
uploadsController.finishChunks = async (req, res, next) => {
if (!chunkedUploads) {
return res.json({ success: false, description: 'Chunked uploads is disabled at the moment.' })
return res.json({ success: false, description: 'Chunked upload is disabled at the moment.' })
}
let user
@ -196,6 +283,7 @@ uploadsController.finishChunks = async (req, res, next) => {
let albumid = parseInt(req.headers.albumid || req.params.albumid)
if (isNaN(albumid)) { albumid = null }
return uploadsController.actuallyFinishChunks(req, res, user, albumid)
}
@ -205,19 +293,19 @@ uploadsController.actuallyFinishChunks = async (req, res, user, albumid) => {
if (isError) { console.error(error) }
res.json({
success: false,
description: isError ? error.toString() : `Error: ${error}`
description: isError ? error.toString() : error
})
}
const files = req.body.files
if (!files) { return erred('Missing files array.') }
if (!files || !(files instanceof Array)) { return erred('Missing "files" property (Array).') }
let iteration = 0
const infoMap = []
for (const file of files) {
const { uuid, original, count } = file
if (!uuid) { return erred('Missing UUID.') }
if (!count) { return erred('Missing chunks count.') }
if (!uuid || typeof uuid !== 'string') { return erred('Missing "uuid" property (string).') }
if (!count || typeof count !== 'number') { return erred('Missing "count" property (number).') }
const uuidDir = path.join(chunksDir, uuid)
fs.readdir(uuidDir, async (error, chunkNames) => {
@ -225,94 +313,128 @@ uploadsController.actuallyFinishChunks = async (req, res, user, albumid) => {
if (count < chunkNames.length) { return erred('Chunks count mismatch.') }
const extension = typeof original === 'string' ? path.extname(original) : ''
const length = uploadsController.getFileNameLength(req)
if (uploadsController.isExtensionFiltered(extension)) {
return erred(`${extension.substr(1).toUpperCase()} files are not permitted for security reasons.`)
}
const length = uploadsController.getFileNameLength(req)
const name = await uploadsController.getUniqueRandomName(length, extension)
.catch(erred)
if (!name) { return }
const destination = path.join(uploadsDir, name)
const destFileStream = fs.createWriteStream(destination, { flags: 'a' })
// Sort chunk names
chunkNames.sort()
// Get total chunks size
const chunksTotalSize = await uploadsController.getTotalSize(uuidDir, chunkNames)
.catch(erred)
if (chunksTotalSize > maxSizeBytes) {
// Delete all chunks and remove chunks dir
const chunksCleaned = await uploadsController.cleanUpChunks(uuidDir, chunkNames)
.catch(erred)
if (!chunksCleaned) { return }
return erred(`Total chunks size is bigger than ${maxSize}.`)
}
// Append all chunks
const destFileStream = fs.createWriteStream(destination, { flags: 'a' })
const chunksAppended = await uploadsController.appendToStream(destFileStream, uuidDir, chunkNames)
.then(() => true)
.catch(erred)
if (!chunksAppended) { return }
// Delete all chunks
const chunksDeleted = await Promise.all(chunkNames.map(chunkName => {
return new Promise((resolve, reject) => {
const chunkPath = path.join(uuidDir, chunkName)
fs.unlink(chunkPath, error => {
if (error && error.code !== 'ENOENT') {
return reject(error)
}
resolve()
})
})
})).catch(erred)
if (!chunksDeleted) { return }
// Delete all chunks and remove chunks dir
const chunksCleaned = await uploadsController.cleanUpChunks(uuidDir, chunkNames)
.catch(erred)
if (!chunksCleaned) { return }
// Delete UUID dir
fs.rmdir(uuidDir, async error => {
if (error) { return erred(error) }
const data = {
filename: name,
originalname: file.original || '',
mimetype: file.type || '',
size: file.size || 0
}
const data = {
filename: name,
originalname: file.original || '',
mimetype: file.type || '',
size: file.size || 0
}
data.albumid = parseInt(file.albumid)
if (isNaN(data.albumid)) { data.albumid = albumid }
data.albumid = parseInt(file.albumid)
if (isNaN(data.albumid)) { data.albumid = albumid }
infoMap.push({
path: destination,
data
})
iteration++
if (iteration === files.length) {
const result = await uploadsController.formatInfoMap(req, res, user, infoMap)
.catch(erred)
if (result) {
return uploadsController.processFilesForDisplay(req, res, result.files, result.existingFiles)
}
}
infoMap.push({
path: destination,
data
})
iteration++
if (iteration === files.length) {
const result = await uploadsController.formatInfoMap(req, res, user, infoMap)
.catch(erred)
if (result) {
return uploadsController.processFilesForDisplay(req, res, result.files, result.existingFiles)
}
}
})
}
}
uploadsController.getTotalSize = (uuidDir, chunkNames) => {
return new Promise((resolve, reject) => {
let size = 0
const stat = i => {
if (i === chunkNames.length) { return resolve(size) }
fs.stat(path.join(uuidDir, chunkNames[i]), (error, stats) => {
if (error) { return reject(error) }
size += stats.size
stat(i + 1)
})
}
stat(0)
})
}
uploadsController.appendToStream = (destFileStream, uuidDr, chunkNames) => {
return new Promise((resolve, reject) => {
const append = i => {
if (i < chunkNames.length) {
fs.createReadStream(path.join(uuidDr, chunkNames[i]))
.on('end', () => {
append(++i)
})
.on('error', error => {
console.error(error)
destFileStream.end()
return reject(error)
})
.pipe(destFileStream, { end: false })
} else {
if (i === chunkNames.length) {
destFileStream.end()
return resolve()
return resolve(true)
}
fs.createReadStream(path.join(uuidDr, chunkNames[i]))
.on('end', () => {
append(i + 1)
})
.on('error', error => {
console.error(error)
destFileStream.end()
return reject(error)
})
.pipe(destFileStream, { end: false })
}
append(0)
})
}
uploadsController.cleanUpChunks = (uuidDir, chunkNames) => {
return new Promise(async (resolve, reject) => {
await Promise.all(chunkNames.map(chunkName => {
return new Promise((resolve, reject) => {
const chunkPath = path.join(uuidDir, chunkName)
fs.unlink(chunkPath, error => {
if (error && error.code !== 'ENOENT') {
console.error(error)
return reject(error)
}
resolve()
})
})
})).catch(reject)
fs.rmdir(uuidDir, error => {
if (error) { return reject(error) }
resolve(true)
})
})
}
uploadsController.formatInfoMap = (req, res, user, infoMap) => {
return new Promise((resolve, reject) => {
let iteration = 0
@ -374,7 +496,7 @@ uploadsController.formatInfoMap = (req, res, user, infoMap) => {
iteration++
if (iteration === infoMap.length) {
return resolve({ files, existingFiles })
resolve({ files, existingFiles })
}
})
}

View File

@ -159,7 +159,7 @@ utilsController.bulkDeleteFiles = async (field, values, user) => {
failed.push(file[field])
console.error(error)
})
return resolve()
resolve()
})
}))

View File

@ -56,7 +56,7 @@ for (const page of config.pages) {
if (fs.existsSync(`./pages/custom/${page}.html`)) {
safe.get(`/${page}`, (req, res, next) => res.sendFile(`${page}.html`, { root: './pages/custom/' }))
} else if (page === 'home') {
safe.get('/', (req, res, next) => res.render('home'))
safe.get('/', (req, res, next) => res.render('home', { urlMaxSize: config.uploads.urlMaxSize }))
} else {
safe.get(`/${page}`, (req, res, next) => res.render(page))
}

View File

@ -24,47 +24,15 @@
max-height: 200px;
}
/*
#dropzone {
border: 1px solid #dbdbdb;
background-color: rgba(0, 0, 0, 0);
border-color: #ff3860;
color: #ff3860;
display: none;
width: 100%;
border-radius: 3px;
-webkit-box-shadow: none;
box-shadow: none;
height: 2.5em;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
-webkit-box-pack: center;
-ms-flex-pack: center;
justify-content: center;
padding-left: .75em;
padding-right: .75em;
text-align: center;
cursor: pointer;
}
*/
#dropzone * {
pointer-events: none;
}
#uploads,
#tokenContainer,
#panel {
display: none;
}
#dropzone:hover {
background-color: #ff3860;
border-color: #ff3860;
color: #fff;
}
#maxFileSize {
font-size: 1rem;
}
@ -88,10 +56,6 @@
display: none;
}
#uploads {
margin-bottom: 25px;
}
@-webkit-keyframes floatUp {
0% {
opacity: 0;
@ -142,11 +106,16 @@
}
}
#uploads progress {
.uploads {
margin-bottom: .75rem;
}
.uploads progress {
margin-top: .5rem;
margin-bottom: 1rem;
}
#uploads img {
.uploads img {
max-width: 200px;
}
@ -182,3 +151,24 @@
padding: 0 0.3rem;
color: #7f8c8d;
}
#tabs {
margin-bottom: 1rem;
}
#tabs ul {
border-bottom: 1px solid #bdc3c7;
}
#tabs li:not(.is-active) a {
color: #bdc3c7;
}
#tabs li:not(.is-active) a:hover {
color: #7f8c8d;
border-bottom-color: #7f8c8d;
}
.tab-content {
margin-bottom: -.75rem;
}

View File

@ -14,6 +14,7 @@ const page = {
album: null,
albumSelect: null,
previewTemplate: null,
dropzone: null,
clipboardJS: null,
@ -103,17 +104,6 @@ page.prepareUpload = () => {
document.getElementById('albumDiv').style.display = 'flex'
}
const div = document.createElement('div')
div.id = 'dropzone'
div.className = 'button is-danger is-unselectable'
div.innerHTML = `
<span class="icon">
<i class="icon-upload-cloud"></i>
</span>
<span>Click here or drag and drop files</span>
`
div.style.display = 'flex'
document.getElementById('maxFileSize').innerHTML = `Maximum upload size per file is ${page.maxFileSize}`
document.getElementById('loginToUpload').style.display = 'none'
@ -121,9 +111,28 @@ page.prepareUpload = () => {
document.getElementById('loginLinkText').innerHTML = 'Create an account and keep track of your uploads'
}
document.getElementById('uploadContainer').appendChild(div)
const previewNode = document.querySelector('#tpl')
page.previewTemplate = previewNode.innerHTML
previewNode.parentNode.removeChild(previewNode)
page.prepareDropzone()
const tabs = document.getElementById('tabs')
if (tabs) {
tabs.style.display = 'flex'
const items = tabs.getElementsByTagName('li')
for (const item of items) {
item.addEventListener('click', function () {
page.setActiveTab(this.dataset.id)
})
}
document.getElementById('uploadUrls').addEventListener('click', function () {
page.uploadUrls(this)
})
page.setActiveTab('tab-files')
} else {
document.getElementById('tab-files').style.display = 'block'
}
}
page.prepareAlbums = async () => {
@ -160,20 +169,43 @@ page.prepareAlbums = async () => {
}
}
page.prepareDropzone = () => {
const previewNode = document.querySelector('#template')
previewNode.id = ''
const previewTemplate = previewNode.parentNode.innerHTML
previewNode.parentNode.removeChild(previewNode)
page.setActiveTab = activeId => {
const items = document.getElementById('tabs').getElementsByTagName('li')
for (const item of items) {
const tabId = item.dataset.id
if (tabId === activeId) {
item.classList.add('is-active')
document.getElementById(tabId).style.display = 'block'
} else {
item.classList.remove('is-active')
document.getElementById(tabId).style.display = 'none'
}
}
}
page.prepareDropzone = () => {
const tabDiv = document.getElementById('tab-files')
const div = document.createElement('div')
div.className = 'control is-expanded'
div.innerHTML = `
<div id="dropzone" class="button is-danger is-fullwidth is-unselectable">
<span class="icon">
<i class="icon-upload-cloud"></i>
</span>
<span>Click here or drag and drop files</span>
</div>
`
tabDiv.getElementsByClassName('dz-container')[0].appendChild(div)
const previewsContainer = tabDiv.getElementsByClassName('uploads')[0]
page.dropzone = new Dropzone('#dropzone', {
url: 'api/upload',
paramName: 'files[]',
maxFilesize: parseInt(page.maxFileSize),
parallelUploads: 2,
uploadMultiple: false,
previewsContainer: '#uploads',
previewTemplate,
previewsContainer,
previewTemplate: page.previewTemplate,
createImageThumbnails: false,
maxFiles: 1000,
autoProcessQueue: true,
@ -206,10 +238,10 @@ page.prepareDropzone = () => {
}
})
file.previewTemplate.querySelector('.progress').style.display = 'none'
file.previewElement.querySelector('.progress').style.display = 'none'
if (response.success === false) {
file.previewTemplate.querySelector('.error').innerHTML = response.description
file.previewElement.querySelector('.error').innerHTML = response.description
}
if (response.files && response.files[0]) {
@ -220,7 +252,7 @@ page.prepareDropzone = () => {
})
page.dropzone.on('addedfile', file => {
document.getElementById('uploads').style.display = 'block'
file.previewElement.querySelector('.name').innerHTML = file.name
})
// Add the selected albumid, if an album is selected, as a header
@ -238,10 +270,11 @@ page.prepareDropzone = () => {
page.dropzone.on('success', (file, response) => {
if (!response) { return }
file.previewTemplate.querySelector('.progress').style.display = 'none'
file.previewElement.querySelector('.progress').style.display = 'none'
// file.previewElement.querySelector('.name').innerHTML = file.name
if (response.success === false) {
file.previewTemplate.querySelector('.error').innerHTML = response.description
file.previewElement.querySelector('.error').innerHTML = response.description
}
if (response.files && response.files[0]) {
@ -250,31 +283,89 @@ page.prepareDropzone = () => {
})
page.dropzone.on('error', (file, error) => {
file.previewTemplate.querySelector('.progress').style.display = 'none'
file.previewTemplate.querySelector('.error').innerHTML = error
file.previewElement.querySelector('.progress').style.display = 'none'
file.previewElement.querySelector('.name').innerHTML = file.name
file.previewElement.querySelector('.error').innerHTML = error
})
page.prepareShareX()
}
page.uploadUrls = async button => {
const tabDiv = document.getElementById('tab-urls')
if (!tabDiv) { return }
if (button.classList.contains('is-loading')) { return }
button.classList.add('is-loading')
const previewsContainer = tabDiv.getElementsByClassName('uploads')[0]
const files = document.getElementById('urls').value
.split(/\r?\n/)
.map(url => {
const previewTemplate = document.createElement('template')
previewTemplate.innerHTML = page.previewTemplate.trim()
const previewElement = previewTemplate.content.firstChild
return {
name: url,
url,
previewElement
}
})
await new Promise(resolve => {
const post = async i => {
if (i === files.length) { return resolve() }
const file = files[i]
file.previewElement.querySelector('.name').innerHTML = file.name
previewsContainer.appendChild(file.previewElement)
const response = await axios.post('api/upload',
{
urls: [file.url]
},
{
headers: {
token: page.token,
albumid: page.album
}
})
.then(response => response.data)
.catch(error => {
return {
success: false,
description: error.toString()
}
})
file.previewElement.querySelector('.progress').style.display = 'none'
if (response.success) {
page.updateTemplate(file, response.files[0])
} else {
file.previewElement.querySelector('.error').innerHTML = response.description
}
post(i + 1)
}
post(0)
})
button.classList.remove('is-loading')
}
page.updateTemplate = (file, response) => {
if (!response.url) { return }
const a = file.previewTemplate.querySelector('.link > a')
const clipboard = file.previewTemplate.querySelector('.clipboard-mobile > .clipboard-js')
const a = file.previewElement.querySelector('.link > a')
const clipboard = file.previewElement.querySelector('.clipboard-mobile > .clipboard-js')
a.href = a.innerHTML = clipboard.dataset['clipboardText'] = response.url
clipboard.parentElement.style.display = 'block'
const name = file.previewTemplate.querySelector('.name')
name.innerHTML = file.name
const exec = /.[\w]+(\?|$)/.exec(response.url)
if (exec && exec[0] && imageExtensions.includes(exec[0].toLowerCase())) {
const img = file.previewTemplate.querySelector('img')
const img = file.previewElement.querySelector('img')
img.setAttribute('alt', response.name || '')
img.dataset['src'] = response.url
img.onerror = function () { this.style.display = 'none' } // hide webp in firefox and ie
page.lazyLoad.update(file.previewTemplate.querySelectorAll('img'))
page.lazyLoad.update(file.previewElement.querySelectorAll('img'))
}
}

View File

@ -20,6 +20,7 @@ routes.get('/nojs', async (req, res, next) => {
})
routes.post('/nojs', (req, res, next) => {
// TODO: Support upload by URLs.
res._json = res.json
res.json = (...args) => {
const result = args[0]

View File

@ -12,7 +12,7 @@
v1: CSS and JS files.
v2: Images and config files (manifest.json, browserconfig.xml, etcetera).
#}
{% set v1 = "Ii3JYKIhb0" %}
{% set v1 = "qbU8fyQa3a" %}
{% set v2 = "Ii3JYKIhb0" %}
{#

View File

@ -32,7 +32,7 @@
<div class="columns is-gapless">
<div class="column is-hidden-mobile"></div>
<div id="uploadContainer" class="column">
<div class="column">
<a id="loginToUpload" class="button is-danger is-loading" style="display: flex"></a>
<div id="albumDiv" class="field has-addons" style="display: none">
<div class="control is-expanded">
@ -46,31 +46,62 @@
</a>
</div>
</div>
<div class="field">
{% if urlMaxSize -%}
<div id="tabs" class="tabs is-centered" style="display: none">
<ul>
<li data-id="tab-files" class="is-active"><a>Files</a></li>
<li data-id="tab-urls"><a>URLs</a></li>
</ul>
</div>
{%- endif %}
<div id="tab-files" class="tab-content" style="display: none">
<div class="field dz-container"></div>
<div class="field uploads"></div>
</div>
{% if urlMaxSize -%}
<div id="tab-urls" class="tab-content" style="display: none">
<div class="field">
<div class="control">
<textarea id="urls" class="textarea" rows="2"></textarea>
</div>
<p class="help">Maximum file size for URL upload is {{ urlMaxSize }}.</p>
</div>
<div class="field">
<div class="control is-expanded">
<a id="uploadUrls" class="button is-danger is-fullwidth is-unselectable">
<span class="icon">
<i class="icon-upload-cloud"></i>
</span>
<span>Upload URLs</span>
</a>
</div>
</div>
<div class="field uploads"></div>
</div>
{%- endif %}
</div>
</div>
<div class="column is-hidden-mobile"></div>
</div>
<div id="uploads">
<div id="template" class="columns is-gapless">
<div class="column is-hidden-mobile"></div>
<div class="column">
<progress class="progress is-small is-danger" value="0" max="100"></progress>
<img class="is-unselectable">
<p class="name is-unselectable"></p>
<p class="error"></p>
<p class="link">
<a target="_blank" rel="noopener"></a>
</p>
<p class="clipboard-mobile is-hidden-desktop" style="display: none">
<a class="button is-info is-outlined clipboard-js" style="display: flex">
<span class="icon">
<i class="icon-clipboard-1"></i>
</span>
<span>Copy link to clipboard</span>
</a>
</p>
</div>
<div class="column is-hidden-mobile"></div>
<div id="tpl" style="display: none">
<div class="field">
<img class="is-unselectable">
<p class="name is-unselectable"></p>
<progress class="progress is-small is-danger" value="0" max="100">0%</progress>
<p class="error"></p>
<p class="link">
<a target="_blank" rel="noopener"></a>
</p>
<p class="clipboard-mobile is-hidden-desktop" style="display: none">
<a class="button is-info is-outlined clipboard-js" style="display: flex">
<span class="icon">
<i class="icon-clipboard-1"></i>
</span>
<span>Copy link to clipboard</span>
</a>
</p>
</div>
</div>