Implemented multi-file upload/download

This commit is contained in:
Danny Coates 2018-07-25 22:26:11 -07:00
parent b2aed06328
commit 7bf104960e
No known key found for this signature in database
GPG Key ID: 4C442633C62E00CB
18 changed files with 475 additions and 183 deletions

View File

@ -1,4 +1,5 @@
import { arrayToB64, b64ToArray, delay } from './utils';
import { ECE_RECORD_SIZE } from './ece';
function post(obj) {
return {
@ -78,7 +79,8 @@ export async function metadata(id, keychain) {
ttl: data.ttl,
iv: meta.iv,
name: meta.name,
type: meta.type
type: meta.type,
manifest: meta.manifest
};
}
throw new Error(result.response.status);
@ -126,14 +128,7 @@ function listenForResponse(ws, canceller) {
});
}
async function upload(
stream,
streamInfo,
metadata,
verifierB64,
onprogress,
canceller
) {
async function upload(stream, metadata, verifierB64, onprogress, canceller) {
const host = window.location.hostname;
const port = window.location.port;
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
@ -166,10 +161,10 @@ async function upload(
ws.send(buf);
onprogress([size, streamInfo.fileSize]);
onprogress(size);
size += buf.length;
state = await reader.read();
while (ws.bufferedAmount > streamInfo.recordSize * 2) {
while (ws.bufferedAmount > ECE_RECORD_SIZE * 2) {
await delay();
}
}
@ -185,7 +180,7 @@ async function upload(
}
}
export function uploadWs(encrypted, info, metadata, verifierB64, onprogress) {
export function uploadWs(encrypted, metadata, verifierB64, onprogress) {
const canceller = { cancelled: false };
return {
@ -193,14 +188,7 @@ export function uploadWs(encrypted, info, metadata, verifierB64, onprogress) {
canceller.error = new Error(0);
canceller.cancelled = true;
},
result: upload(
encrypted,
info,
metadata,
verifierB64,
onprogress,
canceller
)
result: upload(encrypted, metadata, verifierB64, onprogress, canceller)
};
}

33
app/archive.js Normal file
View File

@ -0,0 +1,33 @@
import { blobStream, concatStream } from './streams';
export default class Archive {
constructor(files) {
this.files = Array.from(files);
}
get name() {
return this.files.length > 1 ? 'Send-Archive.zip' : this.files[0].name;
}
get type() {
return this.files.length > 1 ? 'send-archive' : this.files[0].type;
}
get size() {
return this.files.reduce((total, file) => total + file.size, 0);
}
get manifest() {
return {
files: this.files.map(file => ({
name: file.name,
size: file.size,
type: file.type
}))
};
}
get stream() {
return concatStream(this.files.map(file => blobStream(file)));
}
}

View File

@ -1,27 +0,0 @@
export default class BlobSlicer {
constructor(blob, size) {
this.blob = blob;
this.index = 0;
this.chunkSize = size;
}
pull(controller) {
return new Promise((resolve, reject) => {
const bytesLeft = this.blob.size - this.index;
if (bytesLeft <= 0) {
controller.close();
return resolve();
}
const size = Math.min(this.chunkSize, bytesLeft);
const blob = this.blob.slice(this.index, this.index + size);
const reader = new FileReader();
reader.onload = () => {
controller.enqueue(new Uint8Array(reader.result));
resolve();
};
reader.onerror = reject;
reader.readAsArrayBuffer(blob);
this.index += size;
});
}
}

View File

@ -1,5 +1,6 @@
/* global MAXFILESIZE */
const { bytes } = require('./utils');
import Archive from './archive';
import { bytes } from './utils';
export default function(state, emitter) {
emitter.on('DOMContentLoaded', () => {
@ -18,11 +19,8 @@ export default function(state, emitter) {
if (target.files.length === 0) {
return;
}
if (target.files.length > 1) {
// eslint-disable-next-line no-alert
return alert(state.translate('uploadPageMultipleFilesAlert'));
}
const file = target.files[0];
const file = new Archive(target.files);
if (file.size === 0) {
return;
}

View File

@ -1,5 +1,4 @@
import 'buffer';
import BlobSlicer from './blobSlicer';
import { transformStream } from './streams';
const NONCE_LENGTH = 12;
@ -7,7 +6,7 @@ const TAG_LENGTH = 16;
const KEY_LENGTH = 16;
const MODE_ENCRYPT = 'encrypt';
const MODE_DECRYPT = 'decrypt';
const RS = 1024 * 64;
export const ECE_RECORD_SIZE = 1024 * 64;
const encoder = new TextEncoder();
@ -282,52 +281,34 @@ class StreamSlicer {
}
}
export function encryptedSize(size, rs = ECE_RECORD_SIZE) {
return 21 + size + 16 * Math.floor(size / (rs - 17));
}
/*
input: a blob or a ReadableStream containing data to be transformed
input: a ReadableStream containing data to be transformed
key: Uint8Array containing key of size KEY_LENGTH
mode: string, either 'encrypt' or 'decrypt'
rs: int containing record size, optional
salt: ArrayBuffer containing salt of KEY_LENGTH length, optional
*/
export default class ECE {
constructor(input, key, mode, rs, salt) {
this.input = input;
this.key = key;
this.mode = mode;
this.rs = rs;
this.salt = salt;
if (rs === undefined) {
this.rs = RS;
}
if (salt === undefined) {
this.salt = generateSalt(KEY_LENGTH);
}
}
info() {
return {
recordSize: this.rs,
fileSize:
21 + this.input.size + 16 * Math.floor(this.input.size / (this.rs - 17))
};
}
transform() {
let inputStream;
if (this.input instanceof Blob) {
inputStream = new ReadableStream(
new BlobSlicer(this.input, this.rs - 17)
);
} else {
inputStream = transformStream(
this.input,
new StreamSlicer(this.rs, this.mode)
);
}
return transformStream(
inputStream,
new ECETransformer(this.mode, this.key, this.rs, this.salt)
);
}
export function encryptStream(
input,
key,
rs = ECE_RECORD_SIZE,
salt = generateSalt(KEY_LENGTH)
) {
const mode = 'encrypt';
const inputStream = transformStream(input, new StreamSlicer(rs, mode));
return transformStream(inputStream, new ECETransformer(mode, key, rs, salt));
}
/*
input: a ReadableStream containing data to be transformed
key: Uint8Array containing key of size KEY_LENGTH
rs: int containing record size, optional
*/
export function decryptStream(input, key, rs = ECE_RECORD_SIZE) {
const mode = 'decrypt';
const inputStream = transformStream(input, new StreamSlicer(rs, mode));
return transformStream(inputStream, new ECETransformer(mode, key, rs));
}

View File

@ -48,6 +48,7 @@ export default class FileReceiver extends Nanobus {
this.fileInfo.type = meta.type;
this.fileInfo.iv = meta.iv;
this.fileInfo.size = meta.size;
this.fileInfo.manifest = meta.manifest;
this.state = 'ready';
}
@ -105,6 +106,7 @@ export default class FileReceiver extends Nanobus {
id: this.fileInfo.id,
filename: this.fileInfo.name,
type: this.fileInfo.type,
manifest: this.fileInfo.manifest,
key: this.fileInfo.secretKey,
requiresPassword: this.fileInfo.requiresPassword,
password: this.fileInfo.password,

View File

@ -4,6 +4,7 @@ import OwnedFile from './ownedFile';
import Keychain from './keychain';
import { arrayToB64, bytes } from './utils';
import { uploadWs } from './api';
import { encryptedSize } from './ece';
export default class FileSender extends Nanobus {
constructor(file) {
@ -64,21 +65,15 @@ export default class FileSender extends Nanobus {
}
this.msg = 'encryptingFile';
this.emit('encrypting');
const enc = await this.keychain.encryptStream(this.file);
const totalSize = encryptedSize(this.file.size);
const encStream = await this.keychain.encryptStream(this.file.stream);
const metadata = await this.keychain.encryptMetadata(this.file);
const authKeyB64 = await this.keychain.authKeyB64();
this.uploadRequest = uploadWs(
enc.stream,
enc.streamInfo,
metadata,
authKeyB64,
p => {
this.progress = p;
this.emit('progress');
}
);
this.uploadRequest = uploadWs(encStream, metadata, authKeyB64, p => {
this.progress = [p, totalSize];
this.emit('progress');
});
if (this.cancelled) {
throw new Error(0);

View File

@ -1,5 +1,5 @@
import { arrayToB64, b64ToArray } from './utils';
import ECE from './ece.js';
import { decryptStream, encryptStream } from './ece.js';
const encoder = new TextEncoder();
const decoder = new TextDecoder();
@ -173,24 +173,20 @@ export default class Keychain {
iv: arrayToB64(this.iv),
name: metadata.name,
size: metadata.size,
type: metadata.type || 'application/octet-stream'
type: metadata.type || 'application/octet-stream',
manifest: metadata.manifest || {}
})
)
);
return ciphertext;
}
encryptStream(plaintext) {
const ece = new ECE(plaintext, this.rawSecret, 'encrypt');
return {
stream: ece.transform(),
streamInfo: ece.info()
};
encryptStream(plainStream) {
return encryptStream(plainStream, this.rawSecret);
}
decryptStream(cryptotext) {
const ece = new ECE(cryptotext, this.rawSecret, 'decrypt');
return ece.transform();
return decryptStream(cryptotext, this.rawSecret);
}
async decryptFile(ciphertext) {

View File

@ -33,6 +33,7 @@ module.exports = function(state, emit) {
<input id="file-upload"
class="inputFile"
type="file"
multiple
name="fileUploaded"
onfocus=${onfocus}
onblur=${onblur}
@ -67,8 +68,10 @@ module.exports = function(state, emit) {
async function upload(event) {
event.preventDefault();
const Archive = require('../../archive').default;
const target = event.target;
const file = target.files[0];
const file = new Archive(target.files);
if (file.size === 0) {
return;
}

View File

@ -1,6 +1,7 @@
import Keychain from './keychain';
import { downloadStream } from './api';
import { transformStream } from './streams';
import Zip from './zip';
import contentDisposition from 'content-disposition';
let noSave = false;
@ -20,6 +21,8 @@ async function decryptStream(id) {
return new Response(null, { status: 400 });
}
try {
let size = file.size;
let type = file.type;
const keychain = new Keychain(file.key, file.nonce);
if (file.requiresPassword) {
keychain.setPassword(file.password, file.url);
@ -30,8 +33,16 @@ async function decryptStream(id) {
const body = await file.download.result;
const decrypted = keychain.decryptStream(body);
let zipStream = null;
if (file.type === 'send-archive') {
const zip = new Zip(file.manifest, decrypted);
zipStream = zip.stream;
type = 'application/zip';
size = zip.size;
}
const readStream = transformStream(
decrypted,
zipStream || decrypted,
{
transform(chunk, controller) {
file.progress += chunk.length;
@ -48,8 +59,8 @@ async function decryptStream(id) {
const headers = {
'Content-Disposition': contentDisposition(file.filename),
'Content-Type': file.type,
'Content-Length': file.size
'Content-Type': type,
'Content-Length': size
};
return new Response(readStream, { headers });
} catch (e) {
@ -81,6 +92,7 @@ self.onmessage = event => {
password: event.data.password,
url: event.data.url,
type: event.data.type,
manifest: event.data.manifest,
size: event.data.size,
progress: 0
};

View File

@ -1,40 +1,103 @@
/* global ReadableStream TransformStream */
export function transformStream(readable, transformer, oncancel) {
if (typeof TransformStream === 'function') {
try {
return readable.pipeThrough(new TransformStream(transformer));
}
const reader = readable.getReader();
return new ReadableStream({
start(controller) {
if (transformer.start) {
return transformer.start(controller);
}
},
async pull(controller) {
let enqueued = false;
const wrappedController = {
enqueue(d) {
enqueued = true;
controller.enqueue(d);
} catch (e) {
const reader = readable.getReader();
return new ReadableStream({
start(controller) {
if (transformer.start) {
return transformer.start(controller);
}
};
while (!enqueued) {
const data = await reader.read();
if (data.done) {
if (transformer.flush) {
await transformer.flush(controller);
},
async pull(controller) {
let enqueued = false;
const wrappedController = {
enqueue(d) {
enqueued = true;
controller.enqueue(d);
}
return controller.close();
};
while (!enqueued) {
const data = await reader.read();
if (data.done) {
if (transformer.flush) {
await transformer.flush(controller);
}
return controller.close();
}
await transformer.transform(data.value, wrappedController);
}
},
cancel(reason) {
readable.cancel(reason);
if (oncancel) {
oncancel(reason);
}
await transformer.transform(data.value, wrappedController);
}
},
cancel(reason) {
readable.cancel(reason);
if (oncancel) {
oncancel(reason);
}
}
});
});
}
}
class BlobStreamController {
constructor(blob, size) {
this.blob = blob;
this.index = 0;
this.chunkSize = size || 1024 * 64;
}
pull(controller) {
return new Promise((resolve, reject) => {
const bytesLeft = this.blob.size - this.index;
if (bytesLeft <= 0) {
controller.close();
return resolve();
}
const size = Math.min(this.chunkSize, bytesLeft);
const slice = this.blob.slice(this.index, this.index + size);
const reader = new FileReader();
reader.onload = () => {
controller.enqueue(new Uint8Array(reader.result));
resolve();
};
reader.onerror = reject;
reader.readAsArrayBuffer(slice);
this.index += size;
});
}
}
export function blobStream(blob, size) {
return new ReadableStream(new BlobStreamController(blob, size));
}
class ConcatStreamController {
constructor(streams) {
this.streams = streams;
this.index = 0;
this.reader = null;
this.nextReader();
}
nextReader() {
const next = this.streams[this.index++];
this.reader = next && next.getReader();
}
async pull(controller) {
if (!this.reader) {
return controller.close();
}
const data = await this.reader.read();
if (data.done) {
this.nextReader();
return this.pull(controller);
}
controller.enqueue(data.value);
}
}
export function concatStream(streams) {
return new ReadableStream(new ConcatStreamController(streams));
}

186
app/zip.js Normal file
View File

@ -0,0 +1,186 @@
import crc32 from 'crc/crc32';
const encoder = new TextEncoder();
function dosDateTime(dateTime = new Date()) {
const year = (dateTime.getFullYear() - 1980) << 9;
const month = (dateTime.getMonth() + 1) << 5;
const day = dateTime.getDate();
const date = year | month | day;
const hour = dateTime.getHours() << 11;
const minute = dateTime.getMinutes() << 5;
const second = Math.floor(dateTime.getSeconds() / 2);
const time = hour | minute | second;
return { date, time };
}
class File {
constructor(info) {
this.name = encoder.encode(info.name);
this.size = info.size;
this.bytesRead = 0;
this.crc = null;
this.dateTime = dosDateTime();
}
get header() {
const h = new ArrayBuffer(30 + this.name.byteLength);
const v = new DataView(h);
v.setUint32(0, 0x04034b50, true); // sig
v.setUint16(4, 20, true); // version
v.setUint16(6, 8, true); // bit flags (8 = use data descriptor)
v.setUint16(8, 0, true); // compression
v.setUint16(10, this.dateTime.time, true); // modified time
v.setUint16(12, this.dateTime.date, true); // modified date
v.setUint32(14, 0, true); // crc32 (in descriptor)
v.setUint32(18, 0, true); // compressed size (in descriptor)
v.setUint32(22, 0, true); // uncompressed size (in descriptor)
v.setUint16(26, this.name.byteLength, true); // name length
v.setUint16(28, 0, true); // extra field length
for (let i = 0; i < this.name.byteLength; i++) {
v.setUint8(30 + i, this.name[i]);
}
return new Uint8Array(h);
}
get dataDescriptor() {
const dd = new ArrayBuffer(16);
const v = new DataView(dd);
v.setUint32(0, 0x08074b50, true); // sig
v.setUint32(4, this.crc, true); // crc32
v.setUint32(8, this.size, true); // compressed size
v.setUint16(12, this.size, true); // uncompressed size
return new Uint8Array(dd);
}
directoryRecord(offset) {
const dr = new ArrayBuffer(46 + this.name.byteLength);
const v = new DataView(dr);
v.setUint32(0, 0x02014b50, true); // sig
v.setUint16(4, 20, true); // version made
v.setUint16(6, 20, true); // version required
v.setUint16(8, 0, true); // bit flags
v.setUint16(10, 0, true); // compression
v.setUint16(12, this.dateTime.time, true); // modified time
v.setUint16(14, this.dateTime.date, true); // modified date
v.setUint32(16, this.crc, true); // crc
v.setUint32(20, this.size, true); // compressed size
v.setUint32(24, this.size, true); // uncompressed size
v.setUint16(28, this.name.byteLength, true); // name length
v.setUint16(30, 0, true); // extra length
v.setUint16(32, 0, true); // comment length
v.setUint16(34, 0, true); // disk number
v.setUint16(36, 0, true); // internal file attrs
v.setUint32(38, 0, true); // external file attrs
v.setUint32(42, offset, true); // file offset
for (let i = 0; i < this.name.byteLength; i++) {
v.setUint8(46 + i, this.name[i]);
}
return new Uint8Array(dr);
}
get byteLength() {
return this.size + this.name.byteLength + 30 + 16;
}
append(data, controller) {
this.bytesRead += data.byteLength;
const endIndex = data.byteLength - Math.max(this.bytesRead - this.size, 0);
const buf = data.slice(0, endIndex);
this.crc = crc32(buf, this.crc);
controller.enqueue(buf);
if (endIndex < data.byteLength) {
return data.slice(endIndex, data.byteLength);
}
}
}
function centralDirectory(files, controller) {
let directoryOffset = 0;
let directorySize = 0;
for (let i = 0; i < files.length; i++) {
const file = files[i];
const record = file.directoryRecord(directoryOffset);
directoryOffset += file.byteLength;
controller.enqueue(record);
directorySize += record.byteLength;
}
controller.enqueue(eod(files.length, directorySize, directoryOffset));
}
function eod(fileCount, directorySize, directoryOffset) {
const e = new ArrayBuffer(22);
const v = new DataView(e);
v.setUint32(0, 0x06054b50, true); // sig
v.setUint16(4, 0, true); // disk number
v.setUint16(6, 0, true); // directory disk
v.setUint16(8, fileCount, true); // number of records
v.setUint16(10, fileCount, true); // total records
v.setUint32(12, directorySize, true); // size of directory
v.setUint32(16, directoryOffset, true); // offset of directory
v.setUint16(20, 0, true); // comment length
return new Uint8Array(e);
}
class ZipStreamController {
constructor(files, source) {
this.files = files;
this.fileIndex = 0;
this.file = null;
this.reader = source.getReader();
this.nextFile();
this.extra = null;
}
nextFile() {
this.file = this.files[this.fileIndex++];
}
async pull(controller) {
if (!this.file) {
// end of archive
centralDirectory(this.files, controller);
return controller.close();
}
if (this.file.bytesRead === 0) {
// beginning of file
controller.enqueue(this.file.header);
if (this.extra) {
this.extra = this.file.append(this.extra, controller);
}
}
if (this.file.bytesRead >= this.file.size) {
// end of file
controller.enqueue(this.file.dataDescriptor);
this.nextFile();
return this.pull(controller);
}
const data = await this.reader.read();
if (data.done) {
this.nextFile();
return this.pull(controller);
}
this.extra = this.file.append(data.value, controller);
}
}
export default class Zip {
constructor(manifest, source) {
this.files = manifest.files.map(info => new File(info));
this.source = source;
}
get stream() {
return new ReadableStream(new ZipStreamController(this.files, this.source));
}
get size() {
const entries = this.files.reduce(
(total, file) => total + file.byteLength * 2 - file.size,
0
);
const eod = 22;
return entries + eod;
}
}

21
package-lock.json generated
View File

@ -2788,6 +2788,27 @@
}
}
},
"crc": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/crc/-/crc-3.7.0.tgz",
"integrity": "sha512-ZwmUex488OBjSVOMxnR/dIa1yxisBMJNEi+UxzXpKhax8MPsQtoRQtl5Qgo+W7pcSVkRXa3BEVjaniaWKtvKvw==",
"dev": true,
"requires": {
"buffer": "5.1.0"
},
"dependencies": {
"buffer": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/buffer/-/buffer-5.1.0.tgz",
"integrity": "sha512-YkIRgwsZwJWTnyQrsBTWefizHh+8GYj3kbL1BTiAQ/9pwpino0G7B2gp5tx/FUBqUlvtxV85KNR3mwfAtv15Yw==",
"dev": true,
"requires": {
"base64-js": "1.3.0",
"ieee754": "1.1.8"
}
}
}
},
"create-ecdh": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.3.tgz",

View File

@ -68,6 +68,7 @@
"base64-js": "^1.3.0",
"content-disposition": "^0.5.2",
"copy-webpack-plugin": "^4.5.2",
"crc": "^3.7.0",
"cross-env": "^5.2.0",
"css-loader": "^1.0.0",
"css-mqpacker": "^6.0.2",

View File

@ -1,9 +1,10 @@
import assert from 'assert';
import Archive from '../../../app/archive';
import * as api from '../../../app/api';
import Keychain from '../../../app/keychain';
const encoder = new TextEncoder();
const plaintext = new Blob([encoder.encode('hello world!')]);
const plaintext = new Archive([new Blob([encoder.encode('hello world!')])]);
const metadata = {
name: 'test.txt',
type: 'text/plain'
@ -13,11 +14,11 @@ describe('API', function() {
describe('websocket upload', function() {
it('returns file info on success', async function() {
const keychain = new Keychain();
const enc = await keychain.encryptStream(plaintext);
const enc = await keychain.encryptStream(plaintext.stream);
const meta = await keychain.encryptMetadata(metadata);
const verifierB64 = await keychain.authKeyB64();
const p = function() {};
const up = api.uploadWs(enc.stream, enc.streamInfo, meta, verifierB64, p);
const up = api.uploadWs(enc, meta, verifierB64, p);
const result = await up.result;
assert.ok(result.url);
@ -27,11 +28,11 @@ describe('API', function() {
it('can be cancelled', async function() {
const keychain = new Keychain();
const enc = await keychain.encryptStream(plaintext);
const enc = await keychain.encryptStream(plaintext.stream);
const meta = await keychain.encryptMetadata(metadata);
const verifierB64 = await keychain.authKeyB64();
const p = function() {};
const up = api.uploadWs(enc.stream, enc.streamInfo, meta, verifierB64, p);
const up = api.uploadWs(enc, meta, verifierB64, p);
up.cancel();
try {
await up.result;

View File

@ -1,17 +1,19 @@
import assert from 'assert';
import FileSender from '../../../app/fileSender';
import Archive from '../../../app/archive';
// FileSender uses a File in real life but a Blob works for testing
const blob = new Blob(['hello world!'], { type: 'text/plain' });
blob.name = 'text.txt';
const archive = new Archive([blob]);
describe('FileSender', function() {
describe('upload', function() {
it('returns an OwnedFile on success', async function() {
const fs = new FileSender(blob);
const fs = new FileSender(archive);
const file = await fs.upload();
assert.ok(file.id);
assert.equal(file.name, blob.name);
assert.equal(file.name, archive.name);
});
});
});

View File

@ -2,9 +2,10 @@ const ece = require('http_ece');
require('buffer');
import assert from 'assert';
import Archive from '../../../app/archive';
import { b64ToArray } from '../../../app/utils';
import BlobSlicer from '../../../app/blobSlicer';
import ECE from '../../../app/ece.js';
import { blobStream, concatStream } from '../../../app/streams';
import { decryptStream, encryptStream } from '../../../app/ece.js';
const rs = 36;
@ -25,15 +26,52 @@ const encrypted = ece.encrypt(buffer, params);
const decrypted = ece.decrypt(encrypted, params);
describe('Streaming', function() {
describe('blobStream', function() {
it('reads the entire blob', async function() {
const len = 12345;
const chunkSize = 1024;
const blob = new Blob([new Uint8Array(len)]);
const stream = blobStream(blob, chunkSize);
const reader = stream.getReader();
let bytes = 0;
let data = await reader.read();
while (!data.done) {
bytes += data.value.byteLength;
assert.ok(data.value.byteLength <= chunkSize, 'chunk too big');
data = await reader.read();
}
assert.equal(bytes, len);
});
});
describe('concatStream', function() {
it('reads all the streams', async function() {
const count = 5;
const len = 12345;
const streams = Array.from({ length: count }, () =>
blobStream(new Blob([new Uint8Array(len)]))
);
const concat = concatStream(streams);
const reader = concat.getReader();
let bytes = 0;
let data = await reader.read();
while (!data.done) {
bytes += data.value.byteLength;
data = await reader.read();
}
assert.equal(bytes, len * count);
});
});
//testing against http_ece's implementation
describe('ECE', function() {
const key = b64ToArray(keystr);
const salt = b64ToArray(testSalt).buffer;
const blob = new Blob([str], { type: 'text/plain' });
it('can encrypt', async function() {
const ece = new ECE(blob, key, 'encrypt', rs, salt);
const encStream = await ece.transform();
const stream = new Archive([new Blob([str], { type: 'text/plain' })])
.stream;
const encStream = encryptStream(stream, key, rs, salt);
const reader = encStream.getReader();
let result = Buffer.from([]);
@ -48,11 +86,8 @@ describe('Streaming', function() {
});
it('can decrypt', async function() {
const blobStream = new ReadableStream(
new BlobSlicer(new Blob([encrypted]), rs)
);
const ece = new ECE(blobStream, key, 'decrypt', rs);
const decStream = await ece.transform();
const stream = new Archive([new Blob([encrypted])]).stream;
const decStream = decryptStream(stream, key, rs);
const reader = decStream.getReader();
let result = Buffer.from([]);

View File

@ -1,4 +1,5 @@
import assert from 'assert';
import Archive from '../../../app/archive';
import FileSender from '../../../app/fileSender';
import FileReceiver from '../../../app/fileReceiver';
@ -11,12 +12,13 @@ const noSave = true || !headless; // only run the saveFile code if headless
// FileSender uses a File in real life but a Blob works for testing
const blob = new Blob([new ArrayBuffer(1024 * 128)], { type: 'text/plain' });
blob.name = 'test.txt';
const archive = new Archive([blob]);
navigator.serviceWorker.register('/serviceWorker.js');
describe('Upload / Download flow', function() {
this.timeout(0);
it('can only download once by default', async function() {
const fs = new FileSender(blob);
const fs = new FileSender(archive);
const file = await fs.upload();
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
@ -36,7 +38,7 @@ describe('Upload / Download flow', function() {
});
it('downloads with the correct password', async function() {
const fs = new FileSender(blob);
const fs = new FileSender(archive);
const file = await fs.upload();
await file.setPassword('magic');
const fr = new FileReceiver({
@ -53,7 +55,7 @@ describe('Upload / Download flow', function() {
});
it('blocks invalid passwords from downloading', async function() {
const fs = new FileSender(blob);
const fs = new FileSender(archive);
const file = await fs.upload();
await file.setPassword('magic');
const fr = new FileReceiver({
@ -81,7 +83,7 @@ describe('Upload / Download flow', function() {
});
it('retries a bad nonce', async function() {
const fs = new FileSender(blob);
const fs = new FileSender(archive);
const file = await fs.upload();
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
@ -90,11 +92,11 @@ describe('Upload / Download flow', function() {
requiresPassword: false
});
await fr.getMetadata();
assert.equal(fr.fileInfo.name, blob.name);
assert.equal(fr.fileInfo.name, archive.name);
});
it('can cancel the upload', async function() {
const fs = new FileSender(blob);
const fs = new FileSender(archive);
const up = fs.upload();
fs.cancel(); // before encrypting
try {
@ -122,7 +124,7 @@ describe('Upload / Download flow', function() {
});
it('can cancel the download', async function() {
const fs = new FileSender(blob);
const fs = new FileSender(archive);
const file = await fs.upload();
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
@ -142,7 +144,7 @@ describe('Upload / Download flow', function() {
it('can increase download count on download', async function() {
this.timeout(0);
const fs = new FileSender(blob);
const fs = new FileSender(archive);
const file = await fs.upload();
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
@ -157,7 +159,7 @@ describe('Upload / Download flow', function() {
});
it('does not increase download count when download cancelled', async function() {
const fs = new FileSender(blob);
const fs = new FileSender(archive);
const file = await fs.upload();
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
@ -178,7 +180,7 @@ describe('Upload / Download flow', function() {
});
it('can allow multiple downloads', async function() {
const fs = new FileSender(blob);
const fs = new FileSender(archive);
const file = await fs.upload();
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,
@ -204,7 +206,7 @@ describe('Upload / Download flow', function() {
});
it('can delete the file before download', async function() {
const fs = new FileSender(blob);
const fs = new FileSender(archive);
const file = await fs.upload();
const fr = new FileReceiver({
secretKey: file.toJSON().secretKey,