implemented download tokens

This commit is contained in:
Danny Coates 2020-07-27 11:18:52 -07:00
parent 87d46f7ef5
commit 81e9d81dab
No known key found for this signature in database
GPG Key ID: 4C442633C62E00CB
26 changed files with 271 additions and 126 deletions

View File

@ -292,20 +292,13 @@ export function uploadWs(
////////////////////////
async function downloadS(id, keychain, signal) {
const auth = await keychain.authHeader();
async function _downloadStream(id, dlToken, signal) {
const response = await fetch(getApiUrl(`/api/download/${id}`), {
signal: signal,
method: 'GET',
headers: { Authorization: auth }
headers: { Authorization: `Bearer ${dlToken}` }
});
const authHeader = response.headers.get('WWW-Authenticate');
if (authHeader) {
keychain.nonce = parseNonce(authHeader);
}
if (response.status !== 200) {
throw new Error(response.status);
}
@ -313,13 +306,13 @@ async function downloadS(id, keychain, signal) {
return response.body;
}
async function tryDownloadStream(id, keychain, signal, tries = 2) {
async function tryDownloadStream(id, dlToken, signal, tries = 2) {
try {
const result = await downloadS(id, keychain, signal);
const result = await _downloadStream(id, dlToken, signal);
return result;
} catch (e) {
if (e.message === '401' && --tries > 0) {
return tryDownloadStream(id, keychain, signal, tries);
return tryDownloadStream(id, dlToken, signal, tries);
}
if (e.name === 'AbortError') {
throw new Error('0');
@ -328,21 +321,20 @@ async function tryDownloadStream(id, keychain, signal, tries = 2) {
}
}
export function downloadStream(id, keychain) {
export function downloadStream(id, dlToken) {
const controller = new AbortController();
function cancel() {
controller.abort();
}
return {
cancel,
result: tryDownloadStream(id, keychain, controller.signal)
result: tryDownloadStream(id, dlToken, controller.signal)
};
}
//////////////////
async function download(id, keychain, onprogress, canceller) {
const auth = await keychain.authHeader();
async function download(id, dlToken, onprogress, canceller) {
const xhr = new XMLHttpRequest();
canceller.oncancel = function() {
xhr.abort();
@ -350,10 +342,6 @@ async function download(id, keychain, onprogress, canceller) {
return new Promise(function(resolve, reject) {
xhr.addEventListener('loadend', function() {
canceller.oncancel = function() {};
const authHeader = xhr.getResponseHeader('WWW-Authenticate');
if (authHeader) {
keychain.nonce = parseNonce(authHeader);
}
if (xhr.status !== 200) {
return reject(new Error(xhr.status));
}
@ -368,26 +356,26 @@ async function download(id, keychain, onprogress, canceller) {
}
});
xhr.open('get', getApiUrl(`/api/download/blob/${id}`));
xhr.setRequestHeader('Authorization', auth);
xhr.setRequestHeader('Authorization', `Bearer ${dlToken}`);
xhr.responseType = 'blob';
xhr.send();
onprogress(0);
});
}
async function tryDownload(id, keychain, onprogress, canceller, tries = 2) {
async function tryDownload(id, dlToken, onprogress, canceller, tries = 2) {
try {
const result = await download(id, keychain, onprogress, canceller);
const result = await download(id, dlToken, onprogress, canceller);
return result;
} catch (e) {
if (e.message === '401' && --tries > 0) {
return tryDownload(id, keychain, onprogress, canceller, tries);
return tryDownload(id, dlToken, onprogress, canceller, tries);
}
throw e;
}
}
export function downloadFile(id, keychain, onprogress) {
export function downloadFile(id, dlToken, onprogress) {
const canceller = {
oncancel: function() {} // download() sets this
};
@ -396,7 +384,7 @@ export function downloadFile(id, keychain, onprogress) {
}
return {
cancel,
result: tryDownload(id, keychain, onprogress, canceller)
result: tryDownload(id, dlToken, onprogress, canceller)
};
}
@ -458,3 +446,27 @@ export async function reportLink(id, keychain, reason) {
throw new Error(result.response.status);
}
export async function getDownloadToken(id, keychain) {
const result = await fetchWithAuthAndRetry(
getApiUrl(`/api/download/token/${id}`),
{
method: 'GET'
},
keychain
);
if (result.ok) {
return (await result.response.json()).token;
}
throw new Error(result.response.status);
}
export async function downloadDone(id, dlToken) {
const headers = new Headers({ Authorization: `Bearer ${dlToken}` });
const response = await fetch(getApiUrl(`/api/download/done/${id}`), {
headers,
method: 'POST'
});
return response.ok;
}

View File

@ -250,7 +250,8 @@ export default function(state, emitter) {
const start = Date.now();
try {
const dl = state.transfer.download({
stream: state.capabilities.streamDownload
stream: state.capabilities.streamDownload,
storage: state.storage
});
render();
await dl;
@ -269,7 +270,9 @@ export default function(state, emitter) {
} else {
// eslint-disable-next-line no-console
state.transfer = null;
const location = err.message === '404' ? '/404' : '/error';
const location = ['404', '403'].includes(err.message)
? '/404'
: '/error';
if (location === '/error') {
state.sentry.withScope(scope => {
scope.setExtra('duration', err.duration);

View File

@ -1,7 +1,14 @@
import Nanobus from 'nanobus';
import Keychain from './keychain';
import { delay, bytes, streamToArrayBuffer } from './utils';
import { downloadFile, metadata, getApiUrl, reportLink } from './api';
import {
downloadFile,
downloadDone,
metadata,
getApiUrl,
reportLink,
getDownloadToken
} from './api';
import { blobStream } from './streams';
import Zip from './zip';
@ -13,9 +20,14 @@ export default class FileReceiver extends Nanobus {
this.keychain.setPassword(fileInfo.password, fileInfo.url);
}
this.fileInfo = fileInfo;
this.dlToken = null;
this.reset();
}
get id() {
return this.fileInfo.id;
}
get progressRatio() {
return this.progress[0] / this.progress[1];
}
@ -79,7 +91,7 @@ export default class FileReceiver extends Nanobus {
this.state = 'downloading';
this.downloadRequest = await downloadFile(
this.fileInfo.id,
this.keychain,
this.dlToken,
p => {
this.progress = [p, this.fileInfo.size];
this.emit('progress');
@ -143,6 +155,7 @@ export default class FileReceiver extends Nanobus {
url: this.fileInfo.url,
size: this.fileInfo.size,
nonce: this.keychain.nonce,
dlToken: this.dlToken,
noSave
};
await this.sendMessageToSw(info);
@ -208,11 +221,19 @@ export default class FileReceiver extends Nanobus {
}
}
download(options) {
if (options.stream) {
return this.downloadStream(options.noSave);
async download({ stream, storage, noSave }) {
this.dlToken = storage.getDownloadToken(this.id);
if (!this.dlToken) {
this.dlToken = await getDownloadToken(this.id, this.keychain);
storage.setDownloadToken(this.id, this.dlToken);
}
return this.downloadBlob(options.noSave);
if (stream) {
await this.downloadStream(noSave);
} else {
await this.downloadBlob(noSave);
}
await downloadDone(this.id, this.dlToken);
storage.setDownloadToken(this.id);
}
}

View File

@ -34,7 +34,7 @@ async function decryptStream(id) {
keychain.setPassword(file.password, file.url);
}
file.download = downloadStream(id, keychain);
file.download = downloadStream(id, file.dlToken);
const body = await file.download.result;
@ -146,6 +146,7 @@ self.onmessage = event => {
type: event.data.type,
manifest: event.data.manifest,
size: event.data.size,
dlToken: event.data.dlToken,
progress: 0
};
map.set(event.data.id, info);

View File

@ -35,6 +35,7 @@ class Storage {
this.engine = new Mem();
}
this._files = this.loadFiles();
this.pruneTokens();
}
loadFiles() {
@ -180,6 +181,48 @@ class Storage {
downloadCount
};
}
setDownloadToken(id, token) {
let otherTokens = {};
try {
otherTokens = JSON.parse(this.get('dlTokens'));
} catch (e) {
//
}
if (token) {
const record = { token, ts: Date.now() };
this.set('dlTokens', JSON.stringify({ ...otherTokens, [id]: record }));
} else {
this.set('dlTokens', JSON.stringify({ ...otherTokens, [id]: undefined }));
}
}
getDownloadToken(id) {
try {
return JSON.parse(this.get('dlTokens'))[id].token;
} catch (e) {
return undefined;
}
}
pruneTokens() {
try {
const now = Date.now();
const tokens = JSON.parse(this.get('dlTokens'));
const keep = {};
for (const id of Object.keys(tokens)) {
const t = tokens[id];
if (t.ts > now - 7 * 86400 * 1000) {
keep[id] = t;
}
}
if (Object.keys(keep).length < Object.keys(tokens).length) {
this.set('dlTokens', JSON.stringify(keep));
}
} catch (e) {
console.error(e);
}
}
}
export default new Storage();

View File

@ -113,7 +113,7 @@ module.exports = function(state, emit) {
<main class="main">
${state.modal && modal(state, emit)}
<section
class="relative h-full w-full p-6 md:p-8 md:rounded-xl md:shadow-big md:flex md:flex-col"
class="relative overflow-hidden h-full w-full p-6 md:p-8 md:rounded-xl md:shadow-big md:flex md:flex-col"
>
${content}
</section>

View File

@ -71,7 +71,7 @@ const conf = convict({
},
redis_host: {
format: String,
default: 'localhost',
default: 'mock',
env: 'REDIS_HOST'
},
redis_event_expire: {

View File

@ -1,15 +1,45 @@
const crypto = require('crypto');
function makeToken(secret, counter) {
const hmac = crypto.createHmac('sha256', secret);
hmac.update(String(counter));
return hmac.digest('hex');
}
class Metadata {
constructor(obj) {
constructor(obj, storage) {
this.id = obj.id;
this.dl = +obj.dl || 0;
this.dlToken = +obj.dlToken || 0;
this.dlimit = +obj.dlimit || 1;
this.pwd = String(obj.pwd) === 'true';
this.pwd = !!+obj.pwd;
this.owner = obj.owner;
this.metadata = obj.metadata;
this.auth = obj.auth;
this.nonce = obj.nonce;
this.flagged = !!obj.flagged;
this.dead = !!obj.dead;
this.key = obj.key;
this.fxa = !!+obj.fxa;
this.storage = storage;
}
async getDownloadToken() {
if (this.dlToken >= this.dlimit) {
throw new Error('limit');
}
this.dlToken = await this.storage.incrementField(this.id, 'dlToken');
// another request could have also incremented
if (this.dlToken > this.dlimit) {
throw new Error('limit');
}
return makeToken(this.owner, this.dlToken);
}
async verifyDownloadToken(token) {
const validTokens = Array.from({ length: this.dlToken }, (_, i) =>
makeToken(this.owner, i + 1)
);
return validTokens.includes(token);
}
}

View File

@ -75,5 +75,22 @@ module.exports = {
} else {
res.sendStatus(401);
}
},
dlToken: async function(req, res, next) {
const authHeader = req.header('Authorization');
if (authHeader && /^Bearer\s/i.test(authHeader)) {
const token = authHeader.split(' ')[1];
const id = req.params.id;
req.meta = await storage.metadata(id);
if (!req.meta || req.meta.dead) {
return res.sendStatus(404);
}
req.authorized = await req.meta.verifyDownloadToken(token);
}
if (req.authorized) {
next();
} else {
res.sendStatus(401);
}
}
};

25
server/routes/done.js Normal file
View File

@ -0,0 +1,25 @@
const storage = require('../storage');
const { statDownloadEvent } = require('../amplitude');
module.exports = async function(req, res) {
try {
const id = req.params.id;
const meta = req.meta;
const ttl = await storage.ttl(id);
statDownloadEvent({
id,
ip: req.ip,
owner: meta.owner,
download_count: meta.dl,
ttl,
agent: req.ua.browser.name || req.ua.ua.substring(0, 6)
});
await storage.incrementField(id, 'dl');
if (meta.dlToken >= meta.dlimit) {
await storage.kill(id);
}
res.sendStatus(200);
} catch (e) {
res.sendStatus(404);
}
};

View File

@ -1,53 +1,14 @@
const storage = require('../storage');
const mozlog = require('../log');
const log = mozlog('send.download');
const { statDownloadEvent } = require('../amplitude');
module.exports = async function(req, res) {
const id = req.params.id;
try {
const meta = req.meta;
const contentLength = await storage.length(id);
const fileStream = await storage.get(id);
let cancelled = false;
req.on('aborted', () => {
cancelled = true;
fileStream.destroy();
});
const { length, stream } = await storage.get(id);
res.writeHead(200, {
'Content-Type': 'application/octet-stream',
'Content-Length': contentLength
});
fileStream.pipe(res).on('finish', async () => {
if (cancelled) {
return;
}
const dl = meta.dl + 1;
const dlimit = meta.dlimit;
const ttl = await storage.ttl(id);
statDownloadEvent({
id,
ip: req.ip,
country: req.geo.country,
state: req.geo.state,
owner: meta.owner,
download_count: dl,
ttl,
agent: req.ua.browser.name || req.ua.ua.substring(0, 6)
});
try {
if (dl >= dlimit) {
await storage.kill(id);
} else {
await storage.incrementField(id, 'dl');
}
} catch (e) {
log.info('StorageError:', id);
}
'Content-Length': length
});
stream.pipe(res);
} catch (e) {
res.sendStatus(404);
}

View File

@ -16,13 +16,12 @@ module.exports = {
const kid = req.params.id;
try {
const fileId = id(req.user, kid);
const contentLength = await storage.length(fileId);
const fileStream = await storage.get(fileId);
const { length, stream } = await storage.get(fileId);
res.writeHead(200, {
'Content-Type': 'application/octet-stream',
'Content-Length': contentLength
'Content-Length': length
});
fileStream.pipe(res);
stream.pipe(res);
} catch (e) {
res.sendStatus(404);
}

View File

@ -120,12 +120,18 @@ module.exports = function(app) {
app.get('/app.webmanifest', language, require('./webmanifest'));
app.get(`/download/:id${ID_REGEX}`, language, pages.download);
app.get('/unsupported/:reason', language, pages.unsupported);
app.get(`/api/download/:id${ID_REGEX}`, auth.hmac, require('./download'));
app.get(`/api/download/token/:id${ID_REGEX}`, auth.hmac, require('./token'));
app.get(`/api/download/:id${ID_REGEX}`, auth.dlToken, require('./download'));
app.get(
`/api/download/blob/:id${ID_REGEX}`,
auth.hmac,
auth.dlToken,
require('./download')
);
app.post(
`/api/download/done/:id${ID_REGEX}`,
auth.dlToken,
require('./done.js')
);
app.get(`/api/exists/:id${ID_REGEX}`, require('./exists'));
app.get(`/api/metadata/:id${ID_REGEX}`, auth.hmac, require('./metadata'));
app.get('/api/filelist/:id([\\w-]{16})', auth.fxa, filelist.get);
@ -133,12 +139,7 @@ module.exports = function(app) {
// app.post('/api/upload', auth.fxa, require('./upload'));
app.post(`/api/delete/:id${ID_REGEX}`, auth.owner, require('./delete'));
app.post(`/api/password/:id${ID_REGEX}`, auth.owner, require('./password'));
app.post(
`/api/params/:id${ID_REGEX}`,
auth.owner,
auth.fxa,
require('./params')
);
app.post(`/api/params/:id${ID_REGEX}`, auth.owner, require('./params'));
app.post(`/api/info/:id${ID_REGEX}`, auth.owner, require('./info'));
app.post(`/api/report/:id${ID_REGEX}`, auth.hmac, require('./report'));
app.post('/api/metrics', require('./metrics'));

View File

@ -11,7 +11,7 @@ module.exports = async function(req, res) {
res.send({
metadata: meta.metadata,
flagged: !!meta.flagged,
finalDownload: meta.dl + 1 === meta.dlimit,
finalDownload: meta.dlToken + 1 === meta.dlimit,
ttl
});
} catch (e) {

View File

@ -2,7 +2,7 @@ const config = require('../config');
const storage = require('../storage');
module.exports = function(req, res) {
const max = req.user ? config.max_downloads : config.anon_max_downloads;
const max = req.meta.fxa ? config.max_downloads : config.anon_max_downloads;
const dlimit = req.body.dlimit;
if (!dlimit || dlimit > max) {
return res.sendStatus(400);

View File

@ -9,7 +9,7 @@ module.exports = function(req, res) {
try {
storage.setField(id, 'auth', auth);
storage.setField(id, 'pwd', true);
storage.setField(id, 'pwd', 1);
res.sendStatus(200);
} catch (e) {
return res.sendStatus(404);

17
server/routes/token.js Normal file
View File

@ -0,0 +1,17 @@
module.exports = async function(req, res) {
const meta = req.meta;
try {
if (meta.dead || meta.flagged) {
return res.sendStatus(404);
}
const token = await meta.getDownloadToken();
res.send({
token
});
} catch (e) {
if (e.message === 'limit') {
return res.sendStatus(403);
}
res.sendStatus(404);
}
};

View File

@ -66,6 +66,7 @@ module.exports = function(ws, req) {
const meta = {
owner,
fxa: user ? 1 : 0,
metadata,
dlimit,
auth: auth.split(' ')[1],

View File

@ -1,10 +1,8 @@
const fs = require('fs');
const fss = require('fs');
const fs = fss.promises;
const path = require('path');
const promisify = require('util').promisify;
const mkdirp = require('mkdirp');
const stat = promisify(fs.stat);
class FSStorage {
constructor(config, log) {
this.log = log;
@ -13,32 +11,36 @@ class FSStorage {
}
async length(id) {
const result = await stat(path.join(this.dir, id));
const result = await fs.stat(path.join(this.dir, id));
return result.size;
}
getStream(id) {
return fs.createReadStream(path.join(this.dir, id));
return fss.createReadStream(path.join(this.dir, id));
}
set(id, file) {
return new Promise((resolve, reject) => {
const filepath = path.join(this.dir, id);
const fstream = fs.createWriteStream(filepath);
const fstream = fss.createWriteStream(filepath);
file.pipe(fstream);
file.on('error', err => {
fstream.destroy(err);
});
fstream.on('error', err => {
fs.unlinkSync(filepath);
this.del(id);
reject(err);
});
fstream.on('finish', resolve);
});
}
del(id) {
return Promise.resolve(fs.unlinkSync(path.join(this.dir, id)));
async del(id) {
try {
await fs.unlink(path.join(this.dir, id));
} catch (e) {
// ignore local fs issues
}
}
ping() {

View File

@ -56,7 +56,8 @@ class DB {
if (info.dead || info.flagged) {
throw new Error(info.flagged ? 'flagged' : 'dead');
}
return this.storage.getStream(info.filePath);
const length = await this.storage.length(info.filePath);
return { length, stream: this.storage.getStream(info.filePath) };
}
async set(id, file, meta, expireSeconds = config.default_expire_seconds) {
@ -75,15 +76,15 @@ class DB {
this.redis.hset(id, key, value);
}
incrementField(id, key, increment = 1) {
this.redis.hincrby(id, key, increment);
async incrementField(id, key, increment = 1) {
return await this.redis.hincrbyAsync(id, key, increment);
}
async kill(id) {
const { filePath, dead } = await this.getPrefixedInfo(id);
if (!dead) {
this.storage.del(filePath);
this.redis.hset(id, 'dead', 1);
this.storage.del(filePath);
}
}
@ -94,8 +95,8 @@ class DB {
async del(id) {
const { filePath } = await this.getPrefixedInfo(id);
this.storage.del(filePath);
this.redis.del(id);
this.storage.del(filePath);
}
async ping() {
@ -105,7 +106,7 @@ class DB {
async metadata(id) {
const result = await this.redis.hgetallAsync(id);
return result && new Metadata(result);
return result && new Metadata({ id, ...result }, this);
}
}

View File

@ -2,7 +2,7 @@ const promisify = require('util').promisify;
module.exports = function(config) {
const redis_lib =
config.env === 'development' && config.redis_host === 'localhost'
config.env === 'development' && config.redis_host === 'mock'
? 'redis-mock'
: 'redis';
@ -23,6 +23,7 @@ module.exports = function(config) {
client.ttlAsync = promisify(client.ttl);
client.hgetallAsync = promisify(client.hgetall);
client.hgetAsync = promisify(client.hget);
client.hincrbyAsync = promisify(client.hincrby);
client.hmgetAsync = promisify(client.hmget);
client.pingAsync = promisify(client.ping);
client.existsAsync = promisify(client.exists);

View File

@ -48,7 +48,7 @@ describe('/api/metadata', function() {
storage.ttl.returns(Promise.resolve(123));
const meta = {
dlimit: 1,
dl: 0,
dlToken: 0,
metadata: 'foo'
};
const res = response();

View File

@ -8,6 +8,7 @@ const storage = {
function request(id) {
return {
params: { id },
meta: { fxa: false },
body: {}
};
}

View File

@ -32,7 +32,7 @@ describe('/api/password', function() {
const res = response();
passwordRoute(req, res);
sinon.assert.calledWith(storage.setField, 'x', 'auth', 'z');
sinon.assert.calledWith(storage.setField, 'x', 'pwd', true);
sinon.assert.calledWith(storage.setField, 'x', 'pwd', 1);
sinon.assert.calledWith(res.sendStatus, 200);
});

View File

@ -25,7 +25,7 @@ const config = {
default_expire_seconds: 20,
expire_times_seconds: [10, 20, 30],
env: 'development',
redis_host: 'localhost'
redis_host: 'mock'
};
const storage = proxyquire('../../server/storage', {
@ -54,7 +54,7 @@ describe('Storage', function() {
describe('get', function() {
it('returns a stream', async function() {
const s = await storage.get('x');
const { stream: s } = await storage.get('x');
assert.equal(s, stream);
});
});
@ -123,9 +123,11 @@ describe('Storage', function() {
describe('metadata', function() {
it('returns all metadata fields', async function() {
const m = {
pwd: true,
id: 'a1',
pwd: 0,
dl: 1,
dlimit: 1,
fxa: 1,
auth: 'foo',
metadata: 'bar',
nonce: 'baz',
@ -133,12 +135,18 @@ describe('Storage', function() {
};
await storage.set('x', null, m);
const meta = await storage.metadata('x');
assert.deepEqual(meta, {
assert.deepEqual(
{ ...meta, storage: 'excluded' },
{
...m,
dead: false,
flagged: false,
key: undefined
});
dlToken: 0,
fxa: true,
pwd: false,
storage: 'excluded'
}
);
});
});
});

View File

@ -2,12 +2,13 @@ import assert from 'assert';
import Archive from '../../../app/archive';
import FileSender from '../../../app/fileSender';
import FileReceiver from '../../../app/fileReceiver';
import storage from '../../../app/storage';
const headless = /Headless/.test(navigator.userAgent);
// TODO: save on headless doesn't work as it used to since it now
// follows a link instead of fetch. Maybe there's a way to make it
// work? For now always set noSave.
const options = { noSave: true || !headless, stream: true }; // only run the saveFile code if headless
const options = { noSave: true || !headless, stream: true, storage }; // only run the saveFile code if headless
// FileSender uses a File in real life but a Blob works for testing
const blob = new Blob([new ArrayBuffer(1024 * 128)], { type: 'text/plain' });