fix download workflow tests

This commit is contained in:
Emily 2018-07-11 16:52:46 -07:00
parent 6db3009e5f
commit ff7969a7ef
8 changed files with 327 additions and 99 deletions

View file

@ -1,7 +1,4 @@
import { arrayToB64, b64ToArray, delay } from './utils';
import { ReadableStream as PolyRS } from 'web-streams-polyfill';
import { createReadableStreamWrapper } from '@mattiasbuelens/web-streams-adapter';
const RS = createReadableStreamWrapper(PolyRS);
function post(obj) {
return {
@ -58,10 +55,12 @@ export async function setParams(id, owner_token, params) {
export async function fileInfo(id, owner_token) {
const response = await fetch(`/api/info/${id}`, post({ owner_token }));
if (response.ok) {
const obj = await response.json();
return obj;
}
throw new Error(response.status);
}
@ -211,23 +210,17 @@ async function downloadS(id, keychain, signal) {
headers: { Authorization: auth }
});
if (response.status !== 200) {
throw new Error(response.status);
}
const authHeader = response.headers.get('WWW-Authenticate');
if (authHeader) {
keychain.nonce = parseNonce(authHeader);
}
const fileSize = response.headers.get('Content-Length');
//right now only chrome allows obtaining a stream from fetch
//for other browsers we fetch as a blob and convert to polyfill stream later
if (response.body) {
return RS(response.body);
if (response.status !== 200) {
throw new Error(response.status);
}
return response.blob();
//const fileSize = response.headers.get('Content-Length');
return response.body;
}
async function tryDownloadStream(id, keychain, signal, tries = 1) {

View file

@ -1,16 +1,4 @@
require('buffer');
/*
import {
TransformStream as PolyTS,
ReadableStream as PolyRS
} from 'web-streams-polyfill';
import {
createReadableStreamWrapper,
createTransformStreamWrapper
} from '@mattiasbuelens/web-streams-adapter';
const toTS = createTransformStreamWrapper(PolyTS);
const toRS = createReadableStreamWrapper(PolyRS);
*/
const NONCE_LENGTH = 12;
const TAG_LENGTH = 16;
@ -362,17 +350,19 @@ export default class ECE {
if (this.input instanceof Blob) {
inputStream = new ReadableStream(
new BlobSlicer(this.input, this.rs, this.mode)
); //inputStream = toRS(new ReadableStream(new BlobSlicer(this.input, this.rs, this.mode)));
);
} else {
// eslint-disable-next-line no-undef
const sliceStream = new TransformStream(
new StreamSlicer(this.rs, this.mode)
); //const sliceStream = toTS(new TransformStream(new StreamSlicer(this.rs, this.mode)));
);
inputStream = this.input.pipeThrough(sliceStream);
}
// eslint-disable-next-line no-undef
const cryptoStream = new TransformStream(
new ECETransformer(this.mode, this.key, this.rs, this.salt)
); //const cryptoStream = toTS(new TransformStream(new ECETransformer(this.mode, this.key, this.rs, this.salt)));
return inputStream.pipeThrough(cryptoStream); //return toRS(inputStream.pipeThrough(cryptoStream));
);
return inputStream.pipeThrough(cryptoStream);
}
}

View file

@ -1,7 +1,7 @@
import Nanobus from 'nanobus';
import Keychain from './keychain';
import { delay, bytes } from './utils';
import { parseNonce, metadata } from './api';
import { metadata } from './api';
export default class FileReceiver extends Nanobus {
constructor(fileInfo) {
@ -72,12 +72,15 @@ export default class FileReceiver extends Nanobus {
const channel = new MessageChannel();
channel.port1.onmessage = function(event) {
if (event.data.error !== undefined) {
if (event.data === undefined) {
reject('bad response from serviceWorker');
} else if (event.data.error !== undefined) {
reject(event.data.error);
} else {
resolve(event.data);
}
};
navigator.serviceWorker.controller.postMessage(msg, [channel.port2]);
});
}
@ -90,26 +93,34 @@ export default class FileReceiver extends Nanobus {
this.downloadRequest = {
cancel: () => {
this.sendMessageToSw('cancel');
//throw new Error(0);
this.sendMessageToSw({ request: 'cancel', id: this.fileInfo.id });
throw new Error(0);
}
};
try {
this.state = 'downloading';
const auth = await this.keychain.authHeader();
const info = {
key: this.fileInfo.secretKey,
nonce: this.fileInfo.nonce,
request: 'init',
id: this.fileInfo.id,
filename: this.fileInfo.name,
auth: auth
key: this.fileInfo.secretKey,
requiresPassword: this.fileInfo.requiresPassword,
password: this.fileInfo.password,
url: this.fileInfo.url,
noSave
};
await this.sendMessageToSw(info);
console.log('SENDING REQUEST FROM PAGE ONCE');
onprogress([0, this.fileInfo.size]);
if (!noSave) {
if (noSave) {
const res = await fetch(`/api/download/${this.fileInfo.id}`);
if (res.status !== 200) {
throw new Error(res.status);
}
} else {
const downloadUrl = `${location.protocol}//${
location.host
}/api/download/${this.fileInfo.id}`;
@ -119,14 +130,13 @@ export default class FileReceiver extends Nanobus {
a.click();
URL.revokeObjectURL(downloadUrl);
const auth = await this.sendMessageToSw('authHeader');
if (auth) {
this.keychain.nonce = parseNonce(auth);
}
let prog = 0;
while (prog < this.fileInfo.size) {
prog = await this.sendMessageToSw('progress');
const msg = await this.sendMessageToSw({
request: 'progress',
id: this.fileInfo.id
});
prog = msg.progress;
onprogress([prog, this.fileInfo.size]);
await delay();
}
@ -137,9 +147,6 @@ export default class FileReceiver extends Nanobus {
this.state = 'complete';
} catch (e) {
this.downloadRequest = null;
if (e === 'cancelled') {
throw new Error(0);
}
throw e;
}
}

View file

@ -1,44 +1,50 @@
import Keychain from './keychain';
import { downloadStream } from './api';
let noSave = false;
const map = new Map();
self.addEventListener('install', event => {
self.skipWaiting();
});
self.addEventListener('activate', event => {
self.clients.claim();
});
async function decryptStream(request) {
self.controller = new AbortController();
//console.log('SW INTERCEPTED DOWNLOAD');
const id = request.url.split('/')[5];
try {
const file = map.get(id);
const response = await fetch(request.url, {
method: 'GET',
headers: { Authorization: self.auth },
signal: controller.signal
});
file.download = downloadStream(id, file.keychain);
if (response.status !== 200) {
return response;
const stream = await file.download.result;
// eslint-disable-next-line no-undef
const progStream = new TransformStream({
transform: (chunk, controller) => {
file.progress += chunk.length;
controller.enqueue(chunk);
}
});
const readStream = stream.pipeThrough(progStream);
const decrypted = file.keychain.decryptStream(readStream);
const headers = {
'Content-Disposition': 'attachment; filename=' + file.filename
};
return new Response(decrypted, { headers });
} catch (e) {
if (noSave) {
return new Response(null, { status: e.message });
}
const redirectRes = await fetch(`/download/${id}`);
return new Response(redirectRes.body, { status: 302 });
}
self.authHeader = response.headers.get('WWW-Authenticate');
const body = response.body; //stream
const progStream = new TransformStream({
transform: (chunk, controller) => {
self.progress += chunk.length;
controller.enqueue(chunk);
}
});
const decrypted = self.keychain.decryptStream(body.pipeThrough(progStream));
const headers = {
headers: {
'Content-Disposition': 'attachment; filename=' + self.filename
}
};
const newRes = new Response(decrypted, headers);
return newRes;
}
self.onfetch = event => {
@ -49,25 +55,32 @@ self.onfetch = event => {
};
self.onmessage = event => {
if (event.data.key) {
self.keychain = new Keychain(event.data.key, event.data.nonce);
self.filename = event.data.filename;
self.auth = event.data.auth;
self.progress = 0;
self.cancelled = false;
if (event.data.request === 'init') {
noSave = event.data.noSave;
const info = {
keychain: new Keychain(event.data.key),
filename: event.data.filename,
progress: 0,
cancelled: false
};
if (event.data.requiresPassword) {
info.keychain.setPassword(event.data.password, event.data.url);
}
map.set(event.data.id, info);
event.ports[0].postMessage('file info received');
} else if (event.data === 'progress') {
if (self.cancelled) {
} else if (event.data.request === 'progress') {
const file = map.get(event.data.id);
if (file.cancelled) {
event.ports[0].postMessage({ error: 'cancelled' });
} else {
event.ports[0].postMessage(self.progress);
event.ports[0].postMessage({ progress: file.progress });
}
} else if (event.data === 'authHeader') {
event.ports[0].postMessage(self.authHeader);
} else if (event.data === 'cancel') {
self.cancelled = true;
if (self.controller) {
self.controller.abort();
} else if (event.data.request === 'cancel') {
const file = map.get(event.data.id);
file.cancelled = true;
if (file.download) {
file.download.cancel();
}
event.ports[0].postMessage('download cancelled');
}

View file

@ -30,7 +30,7 @@
"test:frontend": "cross-env NODE_ENV=development node test/frontend/runner.js && nyc report --reporter=html",
"test-integration": "docker-compose up --abort-on-container-exit --exit-code-from integration-tests --build --remove-orphans --quiet-pull && docker-compose down",
"test-integration-stage": "cross-env BASE_URL=https://send.stage.mozaws.net npm run test-integration",
"start": "npm run clean && cross-env NODE_ENV=development webpack-dev-server",
"start": "npm run clean && cross-env NODE_ENV=development webpack-dev-server --config webpack.dev.config.js",
"prod": "node server/bin/prod.js"
},
"lint-staged": {

View file

@ -1,5 +1,4 @@
const config = require('../config');
const assets = require('../../common/assets');
let sentry = '';
if (config.sentry_id) {
@ -37,7 +36,6 @@ if (isIE && !isUnsupportedPage) {
}
var MAXFILESIZE = ${config.max_file_size};
var EXPIRE_SECONDS = ${config.expire_seconds};
var SERVICEWORKER = '${assets.get('serviceWorker.js')}';
${ga}
${sentry}
`;

View file

@ -8,6 +8,7 @@ const noSave = !headless; // only run the saveFile code if headless
// FileSender uses a File in real life but a Blob works for testing
const blob = new Blob(['hello world!'], { type: 'text/plain' });
blob.name = 'test.txt';
navigator.serviceWorker.register('/serviceWorker.js');
describe('Upload / Download flow', function() {
it('can only download once by default', async function() {
@ -67,7 +68,7 @@ describe('Upload / Download flow', function() {
try {
// We can't decrypt without IV from metadata
// but let's try to download anyway
await fr.download();
await fr.download(noSave);
assert.fail('downloaded file with bad password');
} catch (e) {
assert.equal(e.message, '401');

226
webpack.dev.config.js Normal file
View file

@ -0,0 +1,226 @@
const path = require('path');
const webpack = require('webpack');
const CopyPlugin = require('copy-webpack-plugin');
const ManifestPlugin = require('webpack-manifest-plugin');
const ExtractTextPlugin = require('extract-text-webpack-plugin');
const IS_DEV = process.env.NODE_ENV === 'development';
const regularJSOptions = {
babelrc: false,
presets: [['env', { modules: false }], 'stage-2'],
// yo-yoify converts html template strings to direct dom api calls
plugins: ['yo-yoify']
};
const entry = {
// babel-polyfill and fluent are directly included in vendor
// because they are not explicitly referenced by app
vendor: ['babel-polyfill', 'fluent'],
app: ['./app/main.js'],
style: ['./app/main.css'],
serviceWorker: ['./app/serviceWorker.js']
};
if (IS_DEV) {
entry.tests = ['./test/frontend/index.js'];
// istanbul instruments the source for code coverage
regularJSOptions.plugins.push('istanbul');
}
module.exports = {
entry,
output: {
filename: '[name].js',
path: path.resolve(__dirname, 'dist'),
publicPath: '/'
},
devtool: IS_DEV && 'inline-source-map',
module: {
rules: [
{
test: /\.js$/,
oneOf: [
{
include: [require.resolve('./assets/cryptofill')],
use: [
{
loader: 'file-loader',
options: {
name: '[name].[hash:8].[ext]'
}
}
]
},
{
// inlines version from package.json into header/index.js
include: require.resolve('./app/templates/header'),
use: [
{
loader: 'babel-loader',
options: regularJSOptions
},
'./build/version_loader'
]
},
{
// fluent gets exposed as a global so that each language script
// can load independently and share it.
include: [path.dirname(require.resolve('fluent'))],
use: [
{
loader: 'expose-loader',
options: 'fluent'
},
{
loader: 'babel-loader',
options: {
presets: [['env', { modules: false }], 'stage-3']
}
}
]
},
{
include: [
path.resolve(__dirname, 'app'),
path.resolve(__dirname, 'common')
]
},
{
loader: 'babel-loader',
include: [
// some dependencies need to get re-babeled because we
// have different targets than their default configs
path.resolve(__dirname, 'node_modules/testpilot-ga/src'),
path.resolve(__dirname, 'node_modules/fluent-intl-polyfill'),
path.resolve(__dirname, 'node_modules/intl-pluralrules')
],
options: regularJSOptions
},
{
// Strip asserts from our deps, mainly choojs family
include: [path.resolve(__dirname, 'node_modules')],
loader: 'webpack-unassert-loader'
}
]
},
{
test: /\.(png|jpg)$/,
loader: 'file-loader',
options: {
name: '[name].[hash:8].[ext]'
}
},
{
test: /\.svg$/,
use: [
{
loader: 'file-loader',
options: {
name: '[name].[hash:8].[ext]'
}
},
{
loader: 'svgo-loader',
options: {
plugins: [
{ removeViewBox: false }, // true causes stretched images
{ convertStyleToAttrs: true }, // for CSP, no unsafe-eval
{ removeTitle: true } // for smallness
]
}
}
]
},
{
// creates style.css with all styles
test: /\.css$/,
use: ExtractTextPlugin.extract({
use: [
{
loader: 'css-loader',
options: { modules: false, importLoaders: 1 }
},
'postcss-loader'
]
})
},
{
// creates version.json for /__version__ from package.json
test: require.resolve('./package.json'),
use: [
{
loader: 'file-loader',
options: {
name: 'version.json'
}
},
'extract-loader',
'./build/package_json_loader'
]
},
{
// creates a js script for each ftl
test: /\.ftl$/,
use: [
{
loader: 'file-loader',
options: {
name: '[path][name].[hash:8].js'
}
},
'extract-loader',
'./build/fluent_loader'
]
},
{
// creates test.js for /test
test: require.resolve('./test/frontend/index.js'),
use: ['babel-loader', 'val-loader']
},
{
// loads all assets from assets/ for use by common/assets.js
test: require.resolve('./build/generate_asset_map.js'),
use: ['babel-loader', 'val-loader']
},
{
// loads all the ftl from public/locales for use by common/locales.js
test: require.resolve('./build/generate_l10n_map.js'),
use: ['babel-loader', 'val-loader']
}
]
},
plugins: [
new CopyPlugin([
{
context: 'public',
from: '*.*'
}
]),
new webpack.IgnorePlugin(/dist/), // used in common/*.js
new webpack.IgnorePlugin(/require-from-string/), // used in common/locales.js
new webpack.HashedModuleIdsPlugin(),
// new webpack.optimize.CommonsChunkPlugin({
// name: 'vendor',
// minChunks: ({ resource }) => /node_modules/.test(resource)
// }),
// new webpack.optimize.CommonsChunkPlugin({
// name: 'runtime'
// }),
new ExtractTextPlugin({
filename: 'style.[contenthash:8].css'
}),
new ManifestPlugin() // used by server side to resolve hashed assets
],
devServer: {
compress: true,
host: '0.0.0.0',
before: IS_DEV ? require('./server/bin/dev') : undefined,
proxy: {
'/api/ws': {
target: 'ws://localhost:8081',
ws: true,
secure: false
}
}
}
};