24
1
Fork 0

fixed size limit on server to include crypto overhead

This commit is contained in:
Danny Coates 2019-03-06 10:31:50 -08:00
parent dce8b6e525
commit 7f9674f494
No known key found for this signature in database
GPG Key ID: 4C442633C62E00CB
7 changed files with 24 additions and 11 deletions

View File

@ -281,11 +281,6 @@ class StreamSlicer {
}
}
export function encryptedSize(size, rs = ECE_RECORD_SIZE) {
const chunk_meta = TAG_LENGTH + 1; // Chunk metadata, tag and delimiter
return 21 + size + chunk_meta * Math.ceil(size / (rs - chunk_meta));
}
/*
input: a ReadableStream containing data to be transformed
key: Uint8Array containing key of size KEY_LENGTH

View File

@ -3,7 +3,7 @@ import OwnedFile from './ownedFile';
import Keychain from './keychain';
import { arrayToB64, bytes } from './utils';
import { uploadWs } from './api';
import { encryptedSize } from './ece';
import { encryptedSize } from './utils';
export default class FileSender extends Nanobus {
constructor() {

View File

@ -249,6 +249,13 @@ function platform() {
return 'web';
}
const ECE_RECORD_SIZE = 1024 * 64;
const TAG_LENGTH = 16;
function encryptedSize(size, rs = ECE_RECORD_SIZE, tagLength = TAG_LENGTH) {
const chunk_meta = tagLength + 1; // Chunk metadata, tag and delimiter
return 21 + size + chunk_meta * Math.ceil(size / (rs - chunk_meta));
}
module.exports = {
fadeOut,
delay,
@ -267,5 +274,6 @@ module.exports = {
list,
secondsToL10nId,
timeLeft,
platform
platform,
encryptedSize
};

View File

@ -11,6 +11,7 @@ class Limiter extends Transform {
this.length += chunk.length;
this.push(chunk);
if (this.length > this.limit) {
console.error('LIMIT', this.length, this.limit);
return callback(new Error('limit'));
}
callback();

View File

@ -3,6 +3,7 @@ const storage = require('../storage');
const config = require('../config');
const mozlog = require('../log');
const Limiter = require('../limiter');
const { encryptedSize } = require('../../app/utils');
const log = mozlog('send.upload');
@ -22,7 +23,7 @@ module.exports = async function(req, res) {
};
try {
const limiter = new Limiter(config.max_file_size);
const limiter = new Limiter(encryptedSize(config.max_file_size));
const fileStream = req.pipe(limiter);
//this hasn't been updated to expiration time setting yet
//if you want to fallback to this code add this

View File

@ -6,6 +6,7 @@ const Limiter = require('../limiter');
const wsStream = require('websocket-stream/stream');
const fxa = require('../fxa');
const { statUploadEvent } = require('../amplitude');
const { encryptedSize } = require('../../app/utils');
const { Duplex } = require('stream');
@ -74,7 +75,7 @@ module.exports = function(ws, req) {
id: newId
})
);
const limiter = new Limiter(maxFileSize);
const limiter = new Limiter(encryptedSize(maxFileSize));
const flowControl = new Duplex({
read() {
ws.resume();
@ -92,8 +93,8 @@ module.exports = function(ws, req) {
});
fileStream = wsStream(ws, { binary: true })
.pipe(limiter)
.pipe(flowControl);
.pipe(flowControl)
.pipe(limiter); // limiter needs to be the last in the chain
await storage.set(newId, fileStream, meta, timeLimit);

View File

@ -6,6 +6,7 @@ import Archive from '../../../app/archive';
import { b64ToArray } from '../../../app/utils';
import { blobStream, concatStream } from '../../../app/streams';
import { decryptStream, encryptStream } from '../../../app/ece.js';
import { encryptedSize } from '../../../app/utils';
const rs = 36;
@ -101,4 +102,10 @@ describe('Streaming', function() {
assert.deepEqual(result, decrypted);
});
});
describe('encryptedSize', function() {
it('matches the size of an encrypted buffer', function() {
assert.equal(encryptedSize(buffer.length, rs), encrypted.length);
});
});
});