abort uploads over maxfilesize
This commit is contained in:
parent
34f26fc017
commit
55d3d1a792
11 changed files with 415 additions and 2438 deletions
|
@ -1,5 +1,6 @@
|
|||
const FileReceiver = require('./fileReceiver');
|
||||
const { notify } = require('./utils');
|
||||
const bytes = require('bytes');
|
||||
const $ = require('jquery');
|
||||
require('jquery-circle-progress');
|
||||
|
||||
|
@ -29,20 +30,7 @@ $(document).ready(function() {
|
|||
// update progress bar
|
||||
$('#dl-progress').circleProgress('value', percent);
|
||||
$('.percent-number').html(`${Math.floor(percent * 100)}`);
|
||||
if (progress[1] < 1000000) {
|
||||
$('.progress-text').html(
|
||||
`${filename} (${(progress[0] / 1000).toFixed(1)}KB of
|
||||
${(progress[1] / 1000).toFixed(1)}KB)`
|
||||
);
|
||||
} else if (progress[1] < 1000000000) {
|
||||
$('.progress-text').html(
|
||||
`${filename} (${(progress[0] / 1000000).toFixed(1)}MB of ${(progress[1] / 1000000).toFixed(1)}MB)`
|
||||
);
|
||||
} else {
|
||||
$('.progress-text').html(
|
||||
`${filename} (${(progress[0] / 1000000).toFixed(1)}MB of ${(progress[1] / 1000000000).toFixed(1)}GB)`
|
||||
);
|
||||
}
|
||||
$('.progress-text').text(`${filename} (${bytes(progress[0])} of ${bytes(progress[1])})`);
|
||||
//on complete
|
||||
if (percent === 1) {
|
||||
fileReceiver.removeAllListeners('progress');
|
||||
|
|
|
@ -118,14 +118,16 @@ class FileSender extends EventEmitter {
|
|||
|
||||
xhr.onreadystatechange = () => {
|
||||
if (xhr.readyState === XMLHttpRequest.DONE) {
|
||||
// uuid field and url field
|
||||
const responseObj = JSON.parse(xhr.responseText);
|
||||
resolve({
|
||||
url: responseObj.url,
|
||||
fileId: responseObj.id,
|
||||
secretKey: keydata.k,
|
||||
deleteToken: responseObj.delete
|
||||
});
|
||||
if (xhr.status === 200) {
|
||||
const responseObj = JSON.parse(xhr.responseText);
|
||||
return resolve({
|
||||
url: responseObj.url,
|
||||
fileId: responseObj.id,
|
||||
secretKey: keydata.k,
|
||||
deleteToken: responseObj.delete
|
||||
});
|
||||
}
|
||||
reject(xhr.status);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
/* global MAXFILESIZE */
|
||||
const FileSender = require('./fileSender');
|
||||
const { notify, gcmCompliant } = require('./utils');
|
||||
const bytes = require('bytes');
|
||||
const $ = require('jquery');
|
||||
require('jquery-circle-progress');
|
||||
|
||||
|
@ -84,6 +86,10 @@ $(document).ready(function() {
|
|||
file = event.target.files[0];
|
||||
}
|
||||
|
||||
if (file.size > MAXFILESIZE) {
|
||||
return document.l10n.formatValue('fileTooBig', {size: bytes(MAXFILESIZE)}).then(alert);
|
||||
}
|
||||
|
||||
$('#page-one').attr('hidden', true);
|
||||
$('#upload-error').attr('hidden', true);
|
||||
$('#upload-progress').removeAttr('hidden');
|
||||
|
@ -108,19 +114,7 @@ $(document).ready(function() {
|
|||
$('#ul-progress').circleProgress().on('circle-animation-end', function() {
|
||||
$('.percent-number').html(`${Math.floor(percent * 100)}`);
|
||||
});
|
||||
if (progress[1] < 1000000) {
|
||||
$('.progress-text').text(
|
||||
`${file.name} (${(progress[0] / 1000).toFixed(1)}KB of ${(progress[1] / 1000).toFixed(1)}KB)`
|
||||
);
|
||||
} else if (progress[1] < 1000000000) {
|
||||
$('.progress-text').text(
|
||||
`${file.name} (${(progress[0] / 1000000).toFixed(1)}MB of ${(progress[1] / 1000000).toFixed(1)}MB)`
|
||||
);
|
||||
} else {
|
||||
$('.progress-text').text(
|
||||
`${file.name} (${(progress[0] / 1000000).toFixed(1)}MB of ${(progress[1] / 1000000000).toFixed(1)}GB)`
|
||||
);
|
||||
}
|
||||
$('.progress-text').text(`${file.name} (${bytes(progress[0])} of ${bytes(progress[1])})`);
|
||||
});
|
||||
|
||||
fileSender.on('loading', isStillLoading => {
|
||||
|
|
2723
package-lock.json
generated
2723
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -63,6 +63,7 @@ errorPageHeader = Something went wrong!
|
|||
errorPageMessage = There has been an error uploading the file.
|
||||
errorPageLink = Send another file
|
||||
|
||||
fileTooBig = That file is too big to upload. It should be less than { $size }.
|
||||
|
||||
linkExpiredAlt.alt = Link expired
|
||||
expiredPageHeader = This link has expired or never existed in the first place!
|
||||
|
|
|
@ -36,6 +36,11 @@ const conf = convict({
|
|||
format: ['production', 'development', 'test'],
|
||||
default: 'development',
|
||||
env: 'NODE_ENV'
|
||||
},
|
||||
max_file_size: {
|
||||
format: Number,
|
||||
default: (1024 * 1024 * 1024) * 2,
|
||||
env: 'P2P_MAX_FILE_SIZE'
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -62,7 +62,11 @@ app.use(
|
|||
}
|
||||
})
|
||||
);
|
||||
app.use(busboy());
|
||||
app.use(busboy({
|
||||
limits: {
|
||||
fileSize: conf.max_file_size
|
||||
}
|
||||
}));
|
||||
app.use(bodyParser.json());
|
||||
app.use(express.static(STATIC_PATH));
|
||||
app.use('/l20n', express.static(L20N));
|
||||
|
@ -77,6 +81,7 @@ app.get('/jsconfig.js', (req, res) => {
|
|||
res.render('jsconfig', {
|
||||
trackerId: conf.analytics_id,
|
||||
dsn: conf.sentry_id,
|
||||
maxFileSize: conf.max_file_size,
|
||||
layout: false
|
||||
});
|
||||
});
|
||||
|
@ -227,6 +232,12 @@ app.post('/upload', (req, res, next) => {
|
|||
delete: meta.delete,
|
||||
id: newId
|
||||
});
|
||||
},
|
||||
err => {
|
||||
if (err.message === 'limit') {
|
||||
return res.sendStatus(413);
|
||||
}
|
||||
res.sendStatus(500);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -129,20 +129,24 @@ function localGet(id) {
|
|||
|
||||
function localSet(newId, file, filename, meta) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const fstream = fs.createWriteStream(
|
||||
path.join(__dirname, '../static', newId)
|
||||
);
|
||||
const filepath = path.join(__dirname, '../static', newId);
|
||||
const fstream = fs.createWriteStream(filepath);
|
||||
file.pipe(fstream);
|
||||
fstream.on('close', () => {
|
||||
file.on('limit', () => {
|
||||
file.unpipe(fstream);
|
||||
fstream.destroy(new Error('limit'));
|
||||
});
|
||||
fstream.on('finish', () => {
|
||||
redis_client.hmset(newId, meta);
|
||||
redis_client.expire(newId, 86400000);
|
||||
log.info('localSet:', 'Upload Finished of ' + newId);
|
||||
resolve(meta.delete);
|
||||
});
|
||||
|
||||
fstream.on('error', () => {
|
||||
fstream.on('error', err => {
|
||||
log.error('localSet:', 'Failed upload of ' + newId);
|
||||
reject();
|
||||
fs.unlinkSync(filepath);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
@ -211,21 +215,25 @@ function awsSet(newId, file, filename, meta) {
|
|||
Key: newId,
|
||||
Body: file
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
s3.upload(params, function(err, _data) {
|
||||
if (err) {
|
||||
log.info('awsUploadError:', err.stack); // an error occurred
|
||||
reject();
|
||||
let hitLimit = false;
|
||||
const upload = s3.upload(params);
|
||||
file.on('limit', () => {
|
||||
hitLimit = true;
|
||||
upload.abort();
|
||||
});
|
||||
return upload.promise()
|
||||
.then(() => {
|
||||
redis_client.hmset(newId, meta);
|
||||
redis_client.expire(newId, 86400000);
|
||||
log.info('awsUploadFinish', 'Upload Finished of ' + filename);
|
||||
},
|
||||
err => {
|
||||
if (hitLimit) {
|
||||
throw new Error('limit');
|
||||
} else {
|
||||
redis_client.hmset(newId, meta);
|
||||
|
||||
redis_client.expire(newId, 86400000);
|
||||
log.info('awsUploadFinish', 'Upload Finished of ' + filename);
|
||||
resolve(meta.delete);
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function awsDelete(id, delete_token) {
|
||||
|
|
|
@ -110,9 +110,9 @@ describe('Testing Set using aws', function() {
|
|||
it('Should pass when the file is successfully uploaded', function() {
|
||||
const buf = Buffer.alloc(10);
|
||||
sinon.stub(crypto, 'randomBytes').returns(buf);
|
||||
s3Stub.upload.callsArgWith(1, null, {});
|
||||
s3Stub.upload.returns({promise: () => Promise.resolve()});
|
||||
return storage
|
||||
.set('123', {}, 'Filename.moz', {})
|
||||
.set('123', {on: sinon.stub()}, 'Filename.moz', {})
|
||||
.then(() => {
|
||||
assert(expire.calledOnce);
|
||||
assert(expire.calledWith('123', 86400000));
|
||||
|
@ -121,9 +121,9 @@ describe('Testing Set using aws', function() {
|
|||
});
|
||||
|
||||
it('Should fail if there was an error during uploading', function() {
|
||||
s3Stub.upload.callsArgWith(1, new Error(), null);
|
||||
s3Stub.upload.returns({promise: () => Promise.reject()});
|
||||
return storage
|
||||
.set('123', {}, 'Filename.moz', 'url.com')
|
||||
.set('123', {on: sinon.stub()}, 'Filename.moz', 'url.com')
|
||||
.then(_reply => assert.fail())
|
||||
.catch(err => assert(1));
|
||||
});
|
||||
|
|
|
@ -117,12 +117,12 @@ describe('Testing Get from local filesystem', function() {
|
|||
describe('Testing Set to local filesystem', function() {
|
||||
it('Successfully writes the file to the local filesystem', function() {
|
||||
const stub = sinon.stub();
|
||||
stub.withArgs('close', sinon.match.any).callsArgWithAsync(1);
|
||||
stub.withArgs('finish', sinon.match.any).callsArgWithAsync(1);
|
||||
stub.withArgs('error', sinon.match.any).returns(1);
|
||||
fsStub.createWriteStream.returns({ on: stub });
|
||||
|
||||
return storage
|
||||
.set('test', { pipe: sinon.stub() }, 'Filename.moz', {})
|
||||
.set('test', { pipe: sinon.stub(), on: sinon.stub() }, 'Filename.moz', {})
|
||||
.then(() => {
|
||||
assert(1);
|
||||
})
|
||||
|
|
|
@ -4,3 +4,4 @@ window.dsn = '{{{dsn}}}';
|
|||
{{#if trackerId}}
|
||||
window.trackerId = '{{{trackerId}}}';
|
||||
{{/if}}
|
||||
const MAXFILESIZE = {{{maxFileSize}}};
|
||||
|
|
Loading…
Reference in a new issue