fixed size limit on server to include crypto overhead

This commit is contained in:
Danny Coates 2019-03-06 10:31:50 -08:00
parent dce8b6e525
commit 7f9674f494
No known key found for this signature in database
GPG key ID: 4C442633C62E00CB
7 changed files with 24 additions and 11 deletions

View file

@ -281,11 +281,6 @@ class StreamSlicer {
} }
} }
export function encryptedSize(size, rs = ECE_RECORD_SIZE) {
const chunk_meta = TAG_LENGTH + 1; // Chunk metadata, tag and delimiter
return 21 + size + chunk_meta * Math.ceil(size / (rs - chunk_meta));
}
/* /*
input: a ReadableStream containing data to be transformed input: a ReadableStream containing data to be transformed
key: Uint8Array containing key of size KEY_LENGTH key: Uint8Array containing key of size KEY_LENGTH

View file

@ -3,7 +3,7 @@ import OwnedFile from './ownedFile';
import Keychain from './keychain'; import Keychain from './keychain';
import { arrayToB64, bytes } from './utils'; import { arrayToB64, bytes } from './utils';
import { uploadWs } from './api'; import { uploadWs } from './api';
import { encryptedSize } from './ece'; import { encryptedSize } from './utils';
export default class FileSender extends Nanobus { export default class FileSender extends Nanobus {
constructor() { constructor() {

View file

@ -249,6 +249,13 @@ function platform() {
return 'web'; return 'web';
} }
const ECE_RECORD_SIZE = 1024 * 64;
const TAG_LENGTH = 16;
function encryptedSize(size, rs = ECE_RECORD_SIZE, tagLength = TAG_LENGTH) {
const chunk_meta = tagLength + 1; // Chunk metadata, tag and delimiter
return 21 + size + chunk_meta * Math.ceil(size / (rs - chunk_meta));
}
module.exports = { module.exports = {
fadeOut, fadeOut,
delay, delay,
@ -267,5 +274,6 @@ module.exports = {
list, list,
secondsToL10nId, secondsToL10nId,
timeLeft, timeLeft,
platform platform,
encryptedSize
}; };

View file

@ -11,6 +11,7 @@ class Limiter extends Transform {
this.length += chunk.length; this.length += chunk.length;
this.push(chunk); this.push(chunk);
if (this.length > this.limit) { if (this.length > this.limit) {
console.error('LIMIT', this.length, this.limit);
return callback(new Error('limit')); return callback(new Error('limit'));
} }
callback(); callback();

View file

@ -3,6 +3,7 @@ const storage = require('../storage');
const config = require('../config'); const config = require('../config');
const mozlog = require('../log'); const mozlog = require('../log');
const Limiter = require('../limiter'); const Limiter = require('../limiter');
const { encryptedSize } = require('../../app/utils');
const log = mozlog('send.upload'); const log = mozlog('send.upload');
@ -22,7 +23,7 @@ module.exports = async function(req, res) {
}; };
try { try {
const limiter = new Limiter(config.max_file_size); const limiter = new Limiter(encryptedSize(config.max_file_size));
const fileStream = req.pipe(limiter); const fileStream = req.pipe(limiter);
//this hasn't been updated to expiration time setting yet //this hasn't been updated to expiration time setting yet
//if you want to fallback to this code add this //if you want to fallback to this code add this

View file

@ -6,6 +6,7 @@ const Limiter = require('../limiter');
const wsStream = require('websocket-stream/stream'); const wsStream = require('websocket-stream/stream');
const fxa = require('../fxa'); const fxa = require('../fxa');
const { statUploadEvent } = require('../amplitude'); const { statUploadEvent } = require('../amplitude');
const { encryptedSize } = require('../../app/utils');
const { Duplex } = require('stream'); const { Duplex } = require('stream');
@ -74,7 +75,7 @@ module.exports = function(ws, req) {
id: newId id: newId
}) })
); );
const limiter = new Limiter(maxFileSize); const limiter = new Limiter(encryptedSize(maxFileSize));
const flowControl = new Duplex({ const flowControl = new Duplex({
read() { read() {
ws.resume(); ws.resume();
@ -92,8 +93,8 @@ module.exports = function(ws, req) {
}); });
fileStream = wsStream(ws, { binary: true }) fileStream = wsStream(ws, { binary: true })
.pipe(limiter) .pipe(flowControl)
.pipe(flowControl); .pipe(limiter); // limiter needs to be the last in the chain
await storage.set(newId, fileStream, meta, timeLimit); await storage.set(newId, fileStream, meta, timeLimit);

View file

@ -6,6 +6,7 @@ import Archive from '../../../app/archive';
import { b64ToArray } from '../../../app/utils'; import { b64ToArray } from '../../../app/utils';
import { blobStream, concatStream } from '../../../app/streams'; import { blobStream, concatStream } from '../../../app/streams';
import { decryptStream, encryptStream } from '../../../app/ece.js'; import { decryptStream, encryptStream } from '../../../app/ece.js';
import { encryptedSize } from '../../../app/utils';
const rs = 36; const rs = 36;
@ -101,4 +102,10 @@ describe('Streaming', function() {
assert.deepEqual(result, decrypted); assert.deepEqual(result, decrypted);
}); });
}); });
describe('encryptedSize', function() {
it('matches the size of an encrypted buffer', function() {
assert.equal(encryptedSize(buffer.length, rs), encrypted.length);
});
});
}); });