Compare commits

...

90 Commits

Author SHA1 Message Date
timvisee
dee1e84e58
Mention Docker page in deployment section 2021-05-19 16:49:13 +02:00
timvisee
214191e743
Replace links from mozilla/send to timvisee/send 2021-05-19 16:09:01 +02:00
timvisee
84da34169d
Create static robots.txt file, remove dynamic route
Fixes https://gitlab.com/timvisee/send/-/issues/17
2021-05-19 16:00:58 +02:00
Tim Visée
93e1d2f41a Merge branch 'deploy' into 'master'
Fix chmod command

See merge request timvisee/send!17
2021-05-19 12:34:35 +00:00
David Dumas
43e1845d28 Fix chmod command 2021-05-19 12:34:35 +00:00
timvisee
c01d6b73ea
Bump version to 3.4.11 2021-05-19 12:04:52 +02:00
timvisee
a394fd995e
Update dependencies 2021-05-19 12:04:21 +02:00
timvisee
175712cfbd
Add REDIS_USER and REDIS_DB configuration variables
See https://github.com/timvisee/send/issues/23#issuecomment-843925819
2021-05-19 12:01:01 +02:00
timvisee
e5d7378fd9
Merge branch 'pirate-patch-3' into master
See https://github.com/timvisee/send/pull/36
2021-05-19 11:50:54 +02:00
timvisee
20cf722b54
Correctly parse config string values from int array 2021-05-19 11:48:20 +02:00
timvisee
1d6872e279
Merge branch 'master' into pirate-patch-3 2021-05-19 11:31:14 +02:00
Tim Visée
a1ca355771 Merge branch 'deploy' into 'master'
Documentation : full deployment example in AWS with Ubuntu 20.04

See merge request timvisee/send!16
2021-05-19 09:17:13 +00:00
David Dumas
dc816d0e59 Documentation: full deployment example in AWS with Ubuntu 20.04 2021-05-19 09:17:13 +00:00
Nick Sweeting
d6ac469e1a
remove signup-cta and tweak console log wording to remove anon user references 2021-05-19 05:13:47 -04:00
timvisee
62cfecd618
Merge branch 'pirate-patch-2' into master
See https://github.com/timvisee/send/pull/35
2021-05-19 10:18:44 +02:00
timvisee
9152d22913
Merge branch 'patch-2' of https://github.com/pirate/send into pirate-patch-2 2021-05-19 10:18:26 +02:00
timvisee
21b198fbd5
Merge branch 'pirate-patch-1' into master
See https://github.com/timvisee/send/pull/34
2021-05-19 10:13:25 +02:00
Nick Sweeting
0ffc960523
add comments 2021-05-19 01:52:37 -04:00
Nick Sweeting
77ea05a233
also handle arrays of strings 2021-05-19 01:46:12 -04:00
Nick Sweeting
a6162f7142
fix indentation 2021-05-19 01:41:22 -04:00
Nick Sweeting
4a6a3dfc36
coerce DOWNLOAD_COUNTS and EXPIRE_TIMES_SECONDS into positive integer arrays 2021-05-19 01:39:14 -04:00
Nick Sweeting
1e7efe3d98
fix signup-ctas blocking render 2021-05-19 00:43:08 -04:00
Nick Sweeting
46381fd516
Fix glitchy UI dropdown select for max downloads and expiration 2021-05-19 00:35:53 -04:00
Nick Sweeting
1fe74f2be0
deny search engines to limit discoverability of public instances 2021-05-18 22:49:16 -04:00
Nick Sweeting
35da83bf2a
improve README configuration list, example usage, and quickstart 2021-05-18 22:13:57 -04:00
timvisee
bcfb9c5d09
Update dependencies 2021-05-17 11:48:48 +02:00
timvisee
4df2578bb1
Merge branch 'dependabot/npm_and_yarn/hosted-git-info-2.8.9' into master 2021-05-16 15:58:54 +02:00
dependabot[bot]
e4f2955eae
Bump hosted-git-info from 2.8.8 to 2.8.9
Bumps [hosted-git-info](https://github.com/npm/hosted-git-info) from 2.8.8 to 2.8.9.
- [Release notes](https://github.com/npm/hosted-git-info/releases)
- [Changelog](https://github.com/npm/hosted-git-info/blob/v2.8.9/CHANGELOG.md)
- [Commits](https://github.com/npm/hosted-git-info/compare/v2.8.8...v2.8.9)

Signed-off-by: dependabot[bot] <support@github.com>
2021-05-11 22:45:30 +00:00
timvisee
72377d3438
Bump version to 3.4.10 2021-05-07 13:09:37 +02:00
timvisee
512c9803bd
Enable base URL detection by default with npm start, remove FXA_CLIENT_ID 2021-05-07 13:07:26 +02:00
timvisee
4c45d6217d
Properly derive base URL as configured in file upload logic
Fixes https://github.com/timvisee/send/issues/29
2021-05-07 13:07:17 +02:00
timvisee
b4b8060a78
Update dependencies 2021-05-07 12:40:16 +02:00
timvisee
ed042b8515
Merge branch 'ckwalsh-detect_base_url' into master 2021-05-07 12:38:24 +02:00
timvisee
06bc58c93c
Merge branch 'detect_base_url' of https://github.com/ckwalsh/send into ckwalsh-detect_base_url 2021-05-07 12:30:06 +02:00
timvisee
b58caed44f
Merge branch 'dependabot/npm_and_yarn/url-parse-1.5.1' into master 2021-05-06 18:45:36 +02:00
timvisee
174ade1c2e
Merge branch 'master' into dependabot/npm_and_yarn/url-parse-1.5.1 2021-05-06 18:44:28 +02:00
timvisee
31ce8c048b
Merge branch 'dependabot/npm_and_yarn/lodash-4.17.21' into master 2021-05-06 18:38:29 +02:00
dependabot[bot]
ce401881d7
Bump url-parse from 1.4.7 to 1.5.1
Bumps [url-parse](https://github.com/unshiftio/url-parse) from 1.4.7 to 1.5.1.
- [Release notes](https://github.com/unshiftio/url-parse/releases)
- [Commits](https://github.com/unshiftio/url-parse/compare/1.4.7...1.5.1)

Signed-off-by: dependabot[bot] <support@github.com>
2021-05-06 16:35:49 +00:00
dependabot[bot]
c49e8e1062
Bump lodash from 4.17.20 to 4.17.21
Bumps [lodash](https://github.com/lodash/lodash) from 4.17.20 to 4.17.21.
- [Release notes](https://github.com/lodash/lodash/releases)
- [Commits](https://github.com/lodash/lodash/compare/4.17.20...4.17.21)

Signed-off-by: dependabot[bot] <support@github.com>
2021-05-06 16:35:08 +00:00
timvisee
15648157c9
Update dependencies 2021-05-06 18:31:34 +02:00
timvisee
4280edd5af
Merge branch 'tjeerdhans-patch-1' into master
See https://github.com/timvisee/send/pull/26
2021-05-06 18:25:58 +02:00
Tjeerd Hans
a3d4e2c502
Some dutch grammar fixes 2021-05-06 17:24:31 +02:00
timvisee
bed5443685
Merge branch 'abhijitnathwani-patch-1' into master
See https://github.com/timvisee/send/pull/25
2021-05-06 11:31:27 +02:00
timvisee
f9f5d77cd0
Merge branch 'abhijitnathwani-patch-1' into master
See https://github.com/timvisee/send/pull/25
2021-05-06 11:30:59 +02:00
Abhijit Nathwani
0f8a6a107a
Update git url in deployment.md 2021-05-06 12:21:55 +05:30
Cullen Walsh
02e8cb264f Add detect_base_url config
This diff adds the detect_base_url config, controlled by the
DETECT_BASE_URL env variable. When set to true, the BASE_URL setting is
ignored, and the base_url is derived from the request protocol and host
header.

Test Plan: Started up a local instance in my homelab, running docker
node:15 image with a nginx reverse proxy. Configured nginx to use the
same backend with multiple hostnames on https. Opened in browser and
confirmed og:url meta tag uses correct url.
2021-05-05 22:19:11 -07:00
timvisee
385ac595b9
Fix linguist documentation marker for locale files
Thanks https://news.ycombinator.com/item?id=27055526
2021-05-05 22:46:55 +02:00
timvisee
6df0876286
Merge branch 'whalehub-patch-1' into master 2021-05-03 00:16:41 +02:00
Aaron
827a35f73e
main.css: Use ::marker to avoid browser console warning
Signed-off-by: Aaron <admin@datahoarder.dev>
2021-05-03 00:13:24 +02:00
timvisee
eb3a9e8c89
Bump version to 3.4.9 2021-04-21 21:52:18 +02:00
timvisee
6c3ac403f6
Update dependencies 2021-04-21 21:51:12 +02:00
timvisee
1ce2a60dd5
Merge branch 'whalehub-patch-1' into master
https://github.com/timvisee/send/pull/19
2021-04-21 21:49:21 +02:00
Aaron
f5bb74e921
index.js: Add "data:" as an allowed image source in CSP
Signed-off-by: Aaron <admin@datahoarder.dev>
2021-04-21 21:40:15 +02:00
timvisee
352fba6302
Update dependencies 2021-04-20 20:37:16 +02:00
timvisee
ace2aa5d73
Merge branch 'dependabot/npm_and_yarn/ssri-6.0.2' into master
See https://github.com/timvisee/send/pull/18
2021-04-20 20:36:17 +02:00
timvisee
3256b01276
Merge branch 'master' into dependabot/npm_and_yarn/ssri-6.0.2
See https://github.com/timvisee/send/pull/18
2021-04-20 20:35:35 +02:00
timvisee
96244132c6
Bump version to 3.4.8 2021-04-20 18:52:45 +02:00
timvisee
a9cdd13543
Update dependencies 2021-04-20 18:50:12 +02:00
timvisee
1b6c5b8f97
Only set Redis client password if password is specified
This attempts to fix a Redis connection issue when the Redis password
is an empty string.

See https://github.com/timvisee/send-docker-compose/issues/3#issuecomment-822885578
2021-04-20 18:37:19 +02:00
Tim Visée
27e6606516 Merge branch 'simao-silva-master-patch-09841' into 'master'
Update Alpine images to current tag

See merge request timvisee/send!15
2021-04-19 19:37:59 +00:00
Simão Silva
4902d304b6 Update Alpine images to current tag 2021-04-19 19:32:48 +00:00
timvisee
a182ff2dd1
Bump version to 3.4.7 2021-04-18 11:38:05 +02:00
timvisee
0361e3ce1c
Update dependencies 2021-04-18 11:35:16 +02:00
dependabot[bot]
32539e58ac
Bump ssri from 6.0.1 to 6.0.2
Bumps [ssri](https://github.com/npm/ssri) from 6.0.1 to 6.0.2.
- [Release notes](https://github.com/npm/ssri/releases)
- [Changelog](https://github.com/npm/ssri/blob/v6.0.2/CHANGELOG.md)
- [Commits](https://github.com/npm/ssri/compare/v6.0.1...v6.0.2)

Signed-off-by: dependabot[bot] <support@github.com>
2021-04-18 09:33:53 +00:00
Tim Visée
eeb1359d90 Merge branch 'moreopts' into 'master'
add env for redis pwd and port

See merge request timvisee/send!14
2021-04-18 09:32:39 +00:00
piaoger gong
e2dde364eb add env for redis pwd and port 2021-04-18 11:08:35 +08:00
timvisee
0c1ce9f598
Bump version to 3.4.6 2021-04-12 15:21:08 +02:00
timvisee
15d37da667
Remove obsolete anonymous limits
Related to https://gitlab.com/timvisee/send/-/issues/3
2021-04-12 15:19:02 +02:00
timvisee
9e4c063749
Update dependencies 2021-04-12 11:33:22 +02:00
timvisee
398d044ca2
Update dependencies 2021-04-09 12:52:30 +02:00
Tim Visée
d576003dd1 Merge branch 'thomassth-master-patch-74555' into 'master'
remove mozilla affiliation

See merge request timvisee/send!13
2021-04-02 09:20:08 +00:00
Thomas
fc0e8708b9 Merge branch 'thomassth-master-patch-60115' into 'thomassth-master-patch-74555'
Update localization.md

See merge request thomassth/send!1
2021-04-02 07:03:19 +00:00
Thomas
d05eb3e882 Update localization.md 2021-04-02 06:58:32 +00:00
Thomas
db2a55115a Update README.md 2021-04-02 06:55:18 +00:00
timvisee
c6316f2dad
Merge branch 'dependabot/npm_and_yarn/y18n-4.0.1' into 'master'
Fixes https://github.com/timvisee/send/pull/13
2021-04-01 13:17:40 +02:00
dependabot[bot]
3d6611455a
Bump y18n from 4.0.0 to 4.0.1
Bumps [y18n](https://github.com/yargs/y18n) from 4.0.0 to 4.0.1.
- [Release notes](https://github.com/yargs/y18n/releases)
- [Changelog](https://github.com/yargs/y18n/blob/master/CHANGELOG.md)
- [Commits](https://github.com/yargs/y18n/commits)

Signed-off-by: dependabot[bot] <support@github.com>
2021-04-01 13:17:25 +02:00
timvisee
5b37d2fc87
Update dependencies 2021-04-01 13:10:37 +02:00
timvisee
2fa214ea6f
Add link to public instances list 2021-03-26 12:23:27 +01:00
timvisee
6dafdcdebd
Bump version to 3.4.5 2021-03-15 21:17:44 +01:00
timvisee
dc03b42b96
Update dependencies 2021-03-15 21:16:57 +01:00
Tim Visée
3e07f648b3 Merge branch 'remove-fxa-dialog-on-big-file' into 'master'
Remove FxA dialog on file too big error

See merge request timvisee/send!12
2021-03-15 20:15:27 +00:00
f58597cece Remove FxA dialog on file too big error 2021-03-15 20:58:47 +01:00
Tim Visée
d3f9b82672 Merge branch 'remove-metrics' into 'master'
Remove metrics

Closes #4

See merge request timvisee/send!11
2021-03-15 19:49:26 +00:00
a0bc20aeb6 Remove metrics #4 2021-03-15 19:56:51 +01:00
timvisee
d03e83dd66
Merge branch 'dependabot/npm_and_yarn/elliptic-6.5.4'
Fixes https://github.com/timvisee/send/pull/8
2021-03-11 16:22:08 +01:00
dependabot[bot]
94e80ccee9
Bump elliptic from 6.5.3 to 6.5.4
Bumps [elliptic](https://github.com/indutny/elliptic) from 6.5.3 to 6.5.4.
- [Release notes](https://github.com/indutny/elliptic/releases)
- [Commits](https://github.com/indutny/elliptic/compare/v6.5.3...v6.5.4)

Signed-off-by: dependabot[bot] <support@github.com>
2021-03-11 16:21:38 +01:00
Tim Visée
f8358c4dac Merge branch 'help-review' into 'master'
Improvement of the reverse proxy guidelines, the given configuration example...

See merge request timvisee/send!10
2021-03-07 13:27:11 +00:00
Florian HEGRON
ec3cff63a2 Improvement of the reverse proxy guidelines, the given configuration example requires to have the apache rewrite mod enabled. 2021-03-07 14:24:47 +01:00
Tim Visée
8f192482b5 Merge branch 'help-review' into 'master'
Delete dead links about testing environments in the README.md

See merge request timvisee/send!9
2021-03-07 09:07:17 +00:00
Florian HEGRON
808a04b669 Delete dead links about testing environments in the README.md 2021-03-07 09:03:20 +00:00
41 changed files with 2244 additions and 1790 deletions

4
.gitattributes vendored
View File

@ -1,2 +1,2 @@
public/locales/* linguist-documentation
docs/* linguist-documentation
public/locales/*/*.ftl linguist-documentation
docs/** linguist-documentation

1
.gitignore vendored
View File

@ -1,6 +1,7 @@
node_modules
coverage
dist
.env
.idea
.DS_Store
.nyc_output

View File

@ -49,6 +49,7 @@ Cynthia Pereira
Daniel Thorn
Daniela Arcese
Danny Coates
David Dumas
Davide
Derek Tamsen
Dhyey Thakore

View File

@ -6,7 +6,7 @@
# Build project
FROM node:15.5.1-alpine AS builder
FROM node:current-alpine AS builder
RUN set -x \
# Add user
&& addgroup --gid 10001 app \
@ -26,7 +26,7 @@ RUN set -x \
# Main image
FROM node:15.5.1-alpine
FROM node:current-alpine
RUN set -x \
# Add user
&& addgroup --gid 10001 app \

View File

@ -53,7 +53,7 @@ Thanks [Mozilla][mozilla] for building this amazing tool!
---
**Docs:** [FAQ](docs/faq.md), [Encryption](docs/encryption.md), [Build](docs/build.md), [Docker](docs/docker.md), [Metrics](docs/metrics.md), [More](docs/)
**Docs:** [FAQ](docs/faq.md), [Encryption](docs/encryption.md), [Build](docs/build.md), [Docker](docs/docker.md), [More](docs/)
---
@ -66,7 +66,7 @@ Thanks [Mozilla][mozilla] for building this amazing tool!
* [Configuration](#configuration)
* [Localization](#localization)
* [Contributing](#contributing)
* [Testing](#testing)
* [Instances](#instances)
* [Deployment](#deployment)
* [Clients](#clients)
* [License](#license)
@ -81,7 +81,7 @@ A file sharing experiment which allows you to send encrypted files to other user
## Requirements
- [Node.js 12.x](https://nodejs.org/)
- [Node.js 15.x](https://nodejs.org/)
- [Redis server](https://redis.io/) (optional for development)
- [AWS S3](https://aws.amazon.com/s3/) or compatible service (optional)
@ -121,31 +121,29 @@ The server is configured with environment variables. See [server/config.js](serv
## Localization
Send localization is managed via [Pontoon](https://pontoon.mozilla.org/projects/test-pilot-firefox-send/), not direct pull requests to the repository. If you want to fix a typo, add a new language, or simply know more about localization, please get in touch with the [existing localization team](https://pontoon.mozilla.org/teams/) for your language or Mozillas [l10n-drivers](https://wiki.mozilla.org/L10n:Mozilla_Team#Mozilla_Corporation) for guidance.
see also [docs/localization.md](docs/localization.md)
See: [docs/localization.md](docs/localization.md)
---
## Contributing
Pull requests are always welcome! Feel free to check out the list of ["good first issues"](https://github.com/mozilla/send/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22).
Pull requests are always welcome! Feel free to check out the list of "good first issues" (to be implemented).
---
## Testing
## Instances
| ENVIRONMENT | URL
|-------------|-----
| Production | <https://send.firefox.com/>
| Stage | <https://stage.send.nonprod.cloudops.mozgcp.net/>
| Development | <https://send2.dev.lcip.org/>
Find a list of public instances here: https://github.com/timvisee/send-instances/
---
## Deployment
see also [docs/deployment.md](docs/deployment.md)
See: [docs/deployment.md](docs/deployment.md)
Docker quickstart: [docs/docker.md](docs/docker.md)
AWS example using Ubuntu Server `20.04`: [docs/AWS.md](docs/AWS.md)
---

View File

@ -4,7 +4,6 @@ import html from 'choo/html';
import * as Sentry from '@sentry/browser';
import { setApiUrlPrefix, getConstants } from '../app/api';
import metrics from '../app/metrics';
//import assets from '../common/assets';
import Archive from '../app/archive';
import Header from '../app/ui/header';
@ -83,7 +82,6 @@ function body(main) {
state.user = new User(storage, LIMITS);
state.sentry = Sentry;
});
app.use(metrics);
app.route('/', body(home));
app.route('/upload', upload);
app.route('/share/:id', share);

View File

@ -420,17 +420,6 @@ export async function setFileList(bearerToken, kid, data) {
return response.ok;
}
export function sendMetrics(blob) {
if (!navigator.sendBeacon) {
return;
}
try {
navigator.sendBeacon(getApiUrl('/api/metrics'), blob);
} catch (e) {
console.error(e);
}
}
export async function getConstants() {
const response = await fetch(getApiUrl('/config'));

View File

@ -1,4 +1,3 @@
import * as metrics from './metrics';
import FileReceiver from './fileReceiver';
import FileSender from './fileSender';
import copyDialog from './ui/copyDialog';
@ -54,7 +53,6 @@ export default function(state, emitter) {
emitter.on('logout', async () => {
await state.user.logout();
metrics.loggedOut({ trigger: 'button' });
emitter.emit('pushState', '/');
});
@ -68,14 +66,6 @@ export default function(state, emitter) {
emitter.on('delete', async ownedFile => {
try {
metrics.deletedUpload({
size: ownedFile.size,
time: ownedFile.time,
speed: ownedFile.speed,
type: ownedFile.type,
ttl: ownedFile.expiresAt - Date.now(),
location
});
state.storage.remove(ownedFile.id);
await ownedFile.del();
} catch (e) {
@ -101,9 +91,6 @@ export default function(state, emitter) {
state.LIMITS.MAX_FILES_PER_ARCHIVE
);
} catch (e) {
if (e.message === 'fileTooBig' && maxSize < state.LIMITS.MAX_FILE_SIZE) {
return emitter.emit('signup-cta', 'size');
}
state.modal = okDialog(
state.translate(e.message, {
size: bytes(maxSize),
@ -123,7 +110,7 @@ export default function(state, emitter) {
source: query.utm_source,
term: query.utm_term
});
state.modal = signupDialog(source);
state.modal = signupDialog();
render();
});
@ -159,12 +146,9 @@ export default function(state, emitter) {
const links = openLinksInNewTab();
await delay(200);
const start = Date.now();
try {
const ownedFile = await sender.upload(archive, state.user.bearerToken);
state.storage.totalUploads += 1;
const duration = Date.now() - start;
metrics.completedUpload(archive, duration);
faviconProgressbar.updateFavicon(0);
state.storage.addFile(ownedFile);
@ -181,7 +165,6 @@ export default function(state, emitter) {
} catch (err) {
if (err.message === '0') {
//cancelled. do nothing
metrics.cancelledUpload(archive, err.duration);
render();
} else if (err.message === '401') {
const refreshed = await state.user.refresh();
@ -197,7 +180,6 @@ export default function(state, emitter) {
scope.setExtra('size', err.size);
state.sentry.captureException(err);
});
metrics.stoppedUpload(archive, err.duration);
emitter.emit('pushState', '/error');
}
} finally {
@ -249,13 +231,11 @@ export default function(state, emitter) {
render();
});
emitter.on('download', async file => {
emitter.on('download', async () => {
state.transfer.on('progress', updateProgress);
state.transfer.on('decrypting', render);
state.transfer.on('complete', render);
const links = openLinksInNewTab();
const size = file.size;
const start = Date.now();
try {
const dl = state.transfer.download({
stream: state.capabilities.streamDownload
@ -263,12 +243,6 @@ export default function(state, emitter) {
render();
await dl;
state.storage.totalDownloads += 1;
const duration = Date.now() - start;
metrics.completedDownload({
size,
duration,
password_protected: file.requiresPassword
});
faviconProgressbar.updateFavicon(0);
} catch (err) {
if (err.message === '0') {
@ -286,12 +260,6 @@ export default function(state, emitter) {
scope.setExtra('progress', err.progress);
state.sentry.captureException(err);
});
const duration = Date.now() - start;
metrics.stoppedDownload({
size,
duration,
password_protected: file.requiresPassword
});
}
emitter.emit('pushState', location);
}
@ -302,7 +270,6 @@ export default function(state, emitter) {
emitter.on('copy', ({ url }) => {
copyToClipboard(url);
// metrics.copiedLink({ location });
});
emitter.on('closeModal', () => {

View File

@ -118,7 +118,7 @@ details {
overflow: hidden;
}
details > summary::-webkit-details-marker {
details > summary::marker {
display: none;
}

View File

@ -10,7 +10,6 @@ import controller from './controller';
import dragManager from './dragManager';
import pasteManager from './pasteManager';
import storage from './storage';
import metrics from './metrics';
import experiments from './experiments';
import * as Sentry from '@sentry/browser';
import './main.css';
@ -68,7 +67,6 @@ if (process.env.NODE_ENV === 'production') {
// eslint-disable-next-line require-atomic-updates
window.app = app;
app.use(experiments);
app.use(metrics);
app.use(controller);
app.use(dragManager);
app.use(pasteManager);

View File

@ -1,186 +0,0 @@
import storage from './storage';
import { platform, locale } from './utils';
import { sendMetrics } from './api';
let appState = null;
let experiment = null;
const HOUR = 1000 * 60 * 60;
const events = [];
let session_id = Date.now();
const lang = locale();
export default function initialize(state, emitter) {
appState = state;
emitter.on('DOMContentLoaded', () => {
experiment = storage.enrolled;
if (!appState.user.firstAction) {
appState.user.firstAction =
appState.route === '/' ? 'upload' : 'download';
}
const query = appState.query;
addEvent('client_visit', {
entrypoint: appState.route === '/' ? 'upload' : 'download',
referrer: document.referrer,
utm_campaign: query.utm_campaign,
utm_content: query.utm_content,
utm_medium: query.utm_medium,
utm_source: query.utm_source,
utm_term: query.utm_term
});
});
emitter.on('experiment', experimentEvent);
window.addEventListener('unload', submitEvents);
}
function sizeOrder(n) {
return Math.floor(Math.log10(n));
}
function submitEvents() {
if (navigator.doNotTrack === '1') {
return;
}
sendMetrics(
new Blob(
[
JSON.stringify({
now: Date.now(),
session_id,
lang,
platform: platform(),
events
})
],
{ type: 'text/plain' } // see http://crbug.com/490015
)
);
events.splice(0);
}
async function addEvent(event_type, event_properties) {
const user_id = await appState.user.metricId();
const device_id = await appState.user.deviceId();
const ab_id = Object.keys(experiment)[0];
if (ab_id) {
event_properties.experiment = ab_id;
event_properties.variant = experiment[ab_id];
}
events.push({
device_id,
event_properties,
event_type,
time: Date.now(),
user_id,
user_properties: {
anonymous: !appState.user.loggedIn,
first_action: appState.user.firstAction,
active_count: storage.files.length
}
});
if (events.length === 25) {
submitEvents();
}
}
function cancelledUpload(archive, duration) {
return addEvent('client_upload', {
download_limit: archive.dlimit,
duration: sizeOrder(duration),
file_count: archive.numFiles,
password_protected: !!archive.password,
size: sizeOrder(archive.size),
status: 'cancel',
time_limit: archive.timeLimit
});
}
function completedUpload(archive, duration) {
return addEvent('client_upload', {
download_limit: archive.dlimit,
duration: sizeOrder(duration),
file_count: archive.numFiles,
password_protected: !!archive.password,
size: sizeOrder(archive.size),
status: 'ok',
time_limit: archive.timeLimit
});
}
function stoppedUpload(archive, duration = 0) {
return addEvent('client_upload', {
download_limit: archive.dlimit,
duration: sizeOrder(duration),
file_count: archive.numFiles,
password_protected: !!archive.password,
size: sizeOrder(archive.size),
status: 'error',
time_limit: archive.timeLimit
});
}
function stoppedDownload(params) {
return addEvent('client_download', {
duration: sizeOrder(params.duration),
password_protected: params.password_protected,
size: sizeOrder(params.size),
status: 'error'
});
}
function completedDownload(params) {
return addEvent('client_download', {
duration: sizeOrder(params.duration),
password_protected: params.password_protected,
size: sizeOrder(params.size),
status: 'ok'
});
}
function deletedUpload(ownedFile) {
return addEvent('client_delete', {
age: Math.floor((Date.now() - ownedFile.createdAt) / HOUR),
downloaded: ownedFile.dtotal > 0,
status: 'ok'
});
}
function experimentEvent(params) {
return addEvent('client_experiment', params);
}
function submittedSignup(params) {
return addEvent('client_login', {
status: 'ok',
trigger: params.trigger
});
}
function canceledSignup(params) {
return addEvent('client_login', {
status: 'cancel',
trigger: params.trigger
});
}
function loggedOut(params) {
addEvent('client_logout', {
status: 'ok',
trigger: params.trigger
});
// flush events and start new anon session
submitEvents();
session_id = Date.now();
}
export {
cancelledUpload,
stoppedUpload,
completedUpload,
deletedUpload,
stoppedDownload,
completedDownload,
submittedSignup,
canceledSignup,
loggedOut
};

View File

@ -580,7 +580,7 @@ module.exports.preview = function(state, emit) {
function download(event) {
event.preventDefault();
event.target.disabled = true;
emit('download', archive);
emit('download');
}
};

View File

@ -31,12 +31,11 @@ module.exports = function(state, emit) {
counts,
num => state.translate('downloadCount', { num }),
value => {
const max = state.user.maxDownloads;
state.archive.dlimit = Math.min(value, max);
if (value > max) {
emit('signup-cta', 'count');
} else {
emit('render');
const selected = parseInt(value);
state.archive.dlimit = selected;
emit('render');
if (selected > parseInt(state.user.maxDownloads || '0')) {
console.log('Chosen max download count is larger than the allowed limit', selected)
}
},
'expire-after-dl-count-select'
@ -58,12 +57,11 @@ module.exports = function(state, emit) {
return state.translate(l10n.id, l10n);
},
value => {
const max = state.user.maxExpireSeconds;
state.archive.timeLimit = Math.min(value, max);
if (value > max) {
emit('signup-cta', 'time');
} else {
emit('render');
const selected = parseInt(value);
state.archive.timeLimit = selected;
emit('render');
if (selected > parseInt(state.user.maxExpireSeconds || '0')) {
console.log('Chosen download expiration is larger than the allowed limit', selected)
}
},
'expire-after-time-select'

View File

@ -100,7 +100,7 @@ module.exports = function(state, emit) {
);
break;
case 'download':
emit('download', archive);
emit('download');
break;
}
return false;

View File

@ -1,32 +1,28 @@
const html = require('choo/html');
module.exports = function(selected, options, translate, changed, htmlId) {
let x = selected;
function choose(event) {
if (event.target.value != selected) {
console.log('Selected new value from dropdown', htmlId, ':', selected, '->', event.target.value)
changed(event.target.value);
}
}
return html`
<select
id="${htmlId}"
class="appearance-none cursor-pointer border rounded bg-grey-10 hover:border-blue-50 focus:border-blue-50 pl-1 pr-8 py-1 my-1 h-8 dark:bg-grey-80"
data-selected="${selected}"
onchange="${choose}"
>
${options.map(
i =>
value =>
html`
<option value="${i}" ${i === selected ? 'selected' : ''}
>${translate(i)}</option
>
<option value="${value}" ${value == selected ? 'selected' : ''}>
${translate(value)}
</option>
`
)}
</select>
`;
function choose(event) {
const target = event.target;
const value = +target.value;
if (x !== value) {
x = value;
changed(value);
}
}
};

View File

@ -1,9 +1,8 @@
const html = require('choo/html');
const assets = require('../../common/assets');
const { bytes } = require('../utils');
const { canceledSignup, submittedSignup } = require('../metrics');
module.exports = function(trigger) {
module.exports = function() {
return function(state, emit, close) {
const DAYS = Math.floor(state.LIMITS.MAX_EXPIRE_SECONDS / 86400);
let submitting = false;
@ -72,7 +71,6 @@ module.exports = function(trigger) {
}
function cancel(event) {
canceledSignup({ trigger });
close(event);
}
@ -85,7 +83,6 @@ module.exports = function(trigger) {
const el = document.getElementById('email-input');
const email = el.value;
submittedSignup({ trigger });
emit('login', emailish(email) ? email : null);
}
};

View File

@ -11,7 +11,7 @@ module.exports = function(state, emit) {
why = html`
<a
class="text-blue"
href="https://github.com/mozilla/send/blob/master/docs/faq.md#why-is-my-browser-not-supported"
href="https://github.com/timvisee/send/blob/master/docs/faq.md#why-is-my-browser-not-supported"
>
${state.translate('notSupportedLink')}
</a>

View File

@ -81,21 +81,15 @@ export default class User {
}
get maxSize() {
return this.loggedIn
? this.limits.MAX_FILE_SIZE
: this.limits.ANON.MAX_FILE_SIZE;
return this.limits.MAX_FILE_SIZE;
}
get maxExpireSeconds() {
return this.loggedIn
? this.limits.MAX_EXPIRE_SECONDS
: this.limits.ANON.MAX_EXPIRE_SECONDS;
return this.limits.MAX_EXPIRE_SECONDS;
}
get maxDownloads() {
return this.loggedIn
? this.limits.MAX_DOWNLOADS
: this.limits.ANON.MAX_DOWNLOADS;
return this.limits.MAX_DOWNLOADS;
}
async metricId() {
@ -109,27 +103,8 @@ export default class User {
async startAuthFlow(trigger, utms = {}) {
this.utms = utms;
this.trigger = trigger;
try {
const params = new URLSearchParams({
entrypoint: `send-${trigger}`,
form_type: 'email',
utm_source: utms.source || 'send',
utm_campaign: utms.campaign || 'none'
});
const res = await fetch(
`${this.authConfig.issuer}/metrics-flow?${params.toString()}`,
{
mode: 'cors'
}
);
const { flowId, flowBeginTime } = await res.json();
this.flowId = flowId;
this.flowBeginTime = flowBeginTime;
} catch (e) {
console.error(e);
this.flowId = null;
this.flowBeginTime = null;
}
this.flowId = null;
this.flowBeginTime = null;
}
async login(email) {

236
docs/AWS.md Normal file
View File

@ -0,0 +1,236 @@
# Deployment to AWS
This document describes how to do a deployment of Send in AWS
## AWS requirements
### Security groups (2)
* ALB:
- inbound: allow traffic from anywhere on port 80 and 443
- ountbound: allow traffic to the instance security group on port `8080`
* Instance:
- inbound: allow SSH from your public IP or a bastion (changing the default SSH port is a good idea)
- inbound: allow traffic from the ALB security group on port `8080`
- ountbound: allow all traffic to anywhere
### Resources
* An S3 bucket (block all public access)
* A private EC2 instance running Ubuntu `20.04` (you can use the [Amazon EC2 AMI Locator](https://cloud-images.ubuntu.com/locator/ec2/) to find the latest)
Attach an IAM role to the instance with the following inline policy:
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"s3:ListAllMyBuckets"
],
"Resource": [
"*"
],
"Effect": "Allow"
},
{
"Action": [
"s3:ListBucket",
"s3:GetBucketLocation",
"s3:ListBucketMultipartUploads"
],
"Resource": [
"arn:aws:s3:::<s3_bucket_name>"
],
"Effect": "Allow"
},
{
"Action": [
"s3:GetObject",
"s3:GetObjectVersion",
"s3:ListMultipartUploadParts",
"s3:PutObject",
"s3:AbortMultipartUpload",
"s3:DeleteObject",
"s3:DeleteObjectVersion"
],
"Resource": [
"arn:aws:s3:::<s3_bucket_name>/*"
],
"Effect": "Allow"
}
]
}
```
* A public ALB:
- Create a target group with the instance registered (HTTP on port `8080` and path `/`)
- Configure HTTP (port 80) to redirect to HTTPS (port 443)
- HTTPS (port 443) using the latest security policy and an ACM certificate like `send.mydomain.com`
* A Route53 public record, alias from `send.mydomain.com` to the ALB
## Software requirements
* Git
* NodeJS `15.x` LTS
* Local Redis server
### Prerequisite packages
```bash
sudo apt update
sudo apt install -y apt-transport-https ca-certificates curl software-properties-common
```
### Add repositories
* NodeJS `15.x` LTS (checkout [package.json](../package.json)):
```bash
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add -
echo 'deb [arch=amd64] https://deb.nodesource.com/node_15.x focal main' | sudo tee /etc/apt/sources.list.d/nodejs.list
```
* Git (latest)
```bash
sudo add-apt-repository ppa:git-core/ppa
```
* Redis (latest)
```bash
sudo add-apt-repository ppa:redislabs/redis
```
### Install required packages
```bash
sudo apt update
sudo apt install git nodejs redis-server telnet
```
### Redis server
#### Password (optional)
Generate a strong password:
```bash
makepasswd --chars=100
```
Edit Redis configuration file `/etc/redis/redis.conf`:
```bash
requirepass <redis_password>
```
_Note: documentation on securing Redis https://redis.io/topics/security_
#### Systemd
Enable and (re)start the Redis server service:
```bash
sudo systemctl enable redis-server
sudo systemctl restart redis-server
sudo systemctl status redis-server
```
## Website directory
Setup a directory for the data
```
sudo mkdir -pv /var/www/send
sudo chown www-data:www-data /var/www/send
sudo 750 /var/www/send
```
### NodeJS
Update npm:
```bash
sudo npm install -g npm
```
Checkout current NodeJS and npm versions:
```bash
node --version
npm --version
```
Clone repository, install JavaScript packages and compiles the assets:
```bash
sudo su -l www-data -s /bin/bash
cd /var/www/send
git clone https://gitlab.com/timvisee/send.git .
npm install
npm run build
exit
```
Create the file `/var/www/send/.env` used by Systemd with your environment variables
(checkout [config.js](../server/config.js) for more configuration environment variables):
```
BASE_URL='https://send.mydomain.com'
NODE_ENV='production'
PORT='8080'
REDIS_PASSWORD='<redis_password>'
S3_BUCKET='<s3_bucket_name>'
```
Lower files and folders permissions to user and group `www-data`:
```
sudo find /var/www/send -type d -exec chmod 750 {} \;
sudo find /var/www/send -type f -exec chmod 640 {} \;
sudo find -L /var/www/send/node_modules/.bin/ -exec chmod 750 {} \;
```
### Systemd
Create the file `/etc/systemd/system/send.service` with `root` user and `644` mode:
```
[Unit]
Description=Send
After=network.target
Requires=redis-server.service
Documentation=https://gitlab.com/timvisee/send
[Service]
Type=simple
ExecStart=/usr/bin/npm run prod
EnvironmentFile=/var/www/send/.env
WorkingDirectory=/var/www/send
User=www-data
Group=www-data
Restart=on-failure
[Install]
WantedBy=multi-user.target
```
_Note: could be better tuner to secure the service by restricting system permissions,
check with `systemd-analyze security send`_
Enable and start the Send service, check logs:
```
sudo systemctl daemon-reload
sudo systemctl enable send
sudo systemctl start send
sudo systemctl status send
journalctl -fu send
```

View File

@ -1,68 +1,96 @@
## Requirements
This document describes how to do a full deployment of Send on your own Linux server. You will need:
* A working (and ideally somewhat recent) installation of NodeJS and NPM
* GIT
* An Apache webserver
* A working (and ideally somewhat recent) installation of NodeJS and npm
* Git
* Apache webserver
* Optionally telnet, to be able to quickly check your installation
For Debian/Ubuntu systems this probably just means something like this:
For example in Debian/Ubuntu systems:
* apt install git apache2 nodejs npm telnet
```bash
sudo apt install git apache2 nodejs npm telnet
```
## Building
* We assume an already configured virtual-host on your webserver with an existing empty htdocs folder
* First, remove that htdocs folder - we will replace it with Send's version now
* git clone https://github.com/mozilla/send.git htdocs
* git clone https://github.com/timvisee/send.git htdocs
* Make now sure you are NOT root but rather the user your webserver is serving files under (e.g. "su www-data" or whoever the owner of your htdocs folder is)
* npm install
* npm run build
## Running
To have a permanently running version of Send as a background process:
* Create a file "run.sh" with:
```
* Create a file `run.sh` with:
```bash
#!/bin/bash
nohup su www-data -c "npm run prod" 2>/dev/null &
```
* chmod +x run.sh
* ./run.sh
* Execute the script:
```bash
chmod +x run.sh
./run.sh
```
Now the Send backend should be running on port 1443. You can check with:
* telnet localhost 1443
```bash
telnet localhost 1443
```
## Reverse Proxy
Of course, we don't want to expose the service on port 1443. Instead we want our normal webserver to forward all requests to Send ("Reverse proxy").
# Apache webserver
* a2enmod proxy
* a2enmod proxy_http
* a2enmod proxy_wstunnel
* Enable Apache required modules:
In your Apache virtual host configuration file, insert this:
```bash
sudo a2enmod headers
sudo a2enmod proxy
sudo a2enmod proxy_http
sudo a2enmod proxy_wstunnel
sudo a2enmod rewrite
```
* Edit your Apache virtual host configuration file, insert this:
```
# Enable rewrite engine
RewriteEngine on
# Enable rewrite engine
RewriteEngine on
# Make sure the original domain name is forwarded to Send
# Otherwise the generated URLs will be wrong
ProxyPreserveHost on
# Make sure the original domain name is forwarded to Send
# Otherwise the generated URLs will be wrong
ProxyPreserveHost on
# Make sure the generated URL is https://
RequestHeader set X-Forwarded-Proto https
# Make sure the generated URL is https://
RequestHeader set X-Forwarded-Proto https
# If it's a normal file (e.g. PNG, CSS) just return it
RewriteCond %{REQUEST_FILENAME} -f
RewriteRule .* - [L]
# If it's a normal file (e.g. PNG, CSS) just return it
RewriteCond %{REQUEST_FILENAME} -f
RewriteRule .* - [L]
# If it's a websocket connection, redirect it to a Send WS connection
RewriteCond %{HTTP:Upgrade} =websocket [NC]
RewriteRule /(.*) ws://127.0.0.1:1443/$1 [P,L]
# If it's a websocket connection, redirect it to a Send WS connection
RewriteCond %{HTTP:Upgrade} =websocket [NC]
RewriteRule /(.*) ws://127.0.0.1:1443/$1 [P,L]
# Otherwise redirect it to a normal HTTP connection
RewriteRule ^/(.*)$ http://127.0.0.1:1443/$1 [P,QSA]
ProxyPassReverse "/" "http://127.0.0.1:1443"
# Otherwise redirect it to a normal HTTP connection
RewriteRule ^/(.*)$ http://127.0.0.1:1443/$1 [P,QSA]
ProxyPassReverse "/" "http://127.0.0.1:1443"
```
* Test configuration and restart Apache:
```bash
sudo apache2ctl configtest
sudo systemctl restart apache2
```

View File

@ -1,45 +1,119 @@
## Setup
## Docker Quickstart
Use `registry.gitlab.com/timvisee/send:latest` from [`timvisee/send`'s registry](https://gitlab.com/timvisee/send/container_registry) for the latest Docker image.
Use `registry.gitlab.com/timvisee/send:latest` from [`timvisee/send`'s Gitlab image registry](https://gitlab.com/timvisee/send/container_registry) for the latest Docker image.
```bash
docker pull registry.gitlab.com/timvisee/send:latest
# example quickstart (point REDIS_HOST to an already-running redis server)
docker run -v $PWD/uploads:/uploads -p 1443:1443 \
-e 'DETECT_BASE_URL=true' \
-e 'REDIS_HOST=localhost' \
registry.gitlab.com/timvisee/send:latest
```
Or run `docker build -t send:latest .` to create an image locally or `docker-compose up` to run a full testable stack. *We don't recommend using docker-compose for production.*
Or clone this repo and run `docker build -t send:latest .` to build an image locally.
## Environment variables:
*Note: for Docker Compose, see: https://github.com/timvisee/send-docker-compose*
| Name | Description
## Environment Variables
All the available config options and their defaults can be found here: https://github.com/timvisee/send/blob/master/server/config.js
Config options should be set as unquoted environment variables. Boolean options should be `true`/`false`, time/duration should be integers (seconds), and filesize values should be integers (bytes).
Config options expecting array values (e.g. `EXPIRE_TIMES_SECONDS`, `DOWNLOAD_COUNTS`) should be in unquoted CSV format. UI dropdowns will default to the first value in the CSV, e.g. `DOWNLOAD_COUNTS=5,1,10,100` will show four dropdown options, with `5` selected by the default.
#### Server Configuration
| Name | Description |
|------------------|-------------|
| `BASE_URL` | The HTTPS URL where traffic will be served (e.g. `https://send.firefox.com`)
| `PORT` | Port the server will listen on (defaults to 1443).
| `NODE_ENV` | `"production"`
| `FILE_DIR` | Uploads directory for local storage
| `S3_BUCKET` | The S3 bucket name.
| `S3_ENDPOINT`| Optional custom S3 endpoint host.
| `S3_USE_PATH_STYLE_ENDPOINTS`| `true` or `false`
| `AWS_ACCESS_KEY_ID` | S3 access key ID
| `AWS_SECRET_ACCESS_KEY` | S3 secret access key ID
| `MAX_FILE_SIZE` | Maximum upload file size in bytes (defaults to 2147483648)
| `MAX_EXPIRE_SECONDS` | Maximum upload expiry time in seconds (defaults to 604800)
| `REDIS_HOST` | Host name of the Redis server.
| `SENTRY_CLIENT` | Sentry Client ID
| `SENTRY_DSN` | Sentry DSN
| `DETECT_BASE_URL` | Autodetect the base URL using browser if `BASE_URL` is unset (defaults to `false`)
| `PORT` | Port the server will listen on (defaults to `1443`)
| `NODE_ENV` | Run in `development` mode (unsafe) or `production` mode (the default)
| `SEND_FOOTER_DMCA_URL` | A URL to a contact page for DMCA requests (empty / not shown by default)
| `SENTRY_CLIENT`, `SENTRY_DSN` | Sentry Client ID and DNS for error tracking (optional, disabled by default)
## Example:
*Note: more options can be found here: https://github.com/timvisee/send/blob/master/server/config.js*
#### Upload and Download Limits
Configure the limits for uploads and downloads. Long expiration times are risky on public servers as people may use you as free hosting for copyrighted content or malware (which is why Mozilla shut down their `send` service). It's advised to only expose your service on a LAN/intranet, password protect it with a proxy/gateway, or make sure to set `SEND_FOOTER_DMCA_URL` above so you can respond to takedown requests.
| Name | Description |
|------------------|-------------|
| `MAX_FILE_SIZE` | Maximum upload file size in bytes (defaults to `2147483648` aka 2GB)
| `MAX_FILES_PER_ARCHIVE` | Maximum number of files per archive (defaults to `64`)
| `MAX_EXPIRE_SECONDS` | Maximum upload expiry time in seconds (defaults to `604800` aka 7 days)
| `MAX_DOWNLOADS` | Maximum number of downloads (defaults to `100`)
| `DOWNLOAD_COUNTS` | Download limit options to show in UI dropdown, e.g. `10,1,2,5,10,15,25,50,100,1000`
| `EXPIRE_TIMES_SECONDS` | Expire time options to show in UI dropdown, e.g. `3600,86400,604800,2592000,31536000`
*Note: more options can be found here: https://github.com/timvisee/send/blob/master/server/config.js*
#### Storage Backend Options
Pick how you want to store uploaded files and set these config options accordingly:
- Local filesystem (the default): set `FILE_DIR` to the local path used inside the container for storage (or leave the default)
- S3-compatible object store: set `S3_BUCKET`, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY` (and `S3_ENDPOINT` if using something other than AWS)
- Google Cloud Storage: set `GCS_BUCKET` to the name of a GCS bucket (auth should be set up using [Application Default Credentials](https://cloud.google.com/docs/authentication/production#auth-cloud-implicit-nodejs))
Redis is used as the metadata database for the backend and is required no matter which storage method you use.
| Name | Description |
|------------------|-------------|
| `REDIS_HOST`, `REDIS_PORT`, `REDIS_USER`, `REDIS_PASSWORD`, `REDIS_DB` | Host name, port, and pass of the Redis server (defaults to `localhost`, `6379`, and no password)
| `FILE_DIR` | Directory for storage inside the Docker container (defaults to `/uploads`)
| `S3_BUCKET` | The S3 bucket name to use (only set if using S3 for storage)
| `S3_ENDPOINT` | An optional custom endpoint to use for S3 (defaults to AWS)
| `S3_USE_PATH_STYLE_ENDPOINT`| Whether to force [path style URLs](https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Config.html#s3ForcePathStyle-property) for S3 objects (defaults to `false`)
| `AWS_ACCESS_KEY_ID` | S3 access key ID (only set if using S3 for storage)
| `AWS_SECRET_ACCESS_KEY` | S3 secret access key ID (only set if using S3 for storage)
| `GCS_BUCKET` | Google Cloud Storage bucket (only set if using GCP for storage)
*Note: more options can be found here: https://github.com/timvisee/send/blob/master/server/config.js*
## Examples
**Run using an Amazon Elasticache for the Redis DB, Amazon S3 for the storage backend, and Sentry for error reporting.**
```bash
$ docker run --net=host -e 'NODE_ENV=production' \
$ docker run -p 1443:1443 \
-e 'S3_BUCKET=testpilot-p2p-dev' \
-e 'REDIS_HOST=dyf9s2r4vo3.bolxr4.0001.usw2.cache.amazonaws.com' \
-e 'SENTRY_CLIENT=https://51e23d7263e348a7a3b90a5357c61cb2@sentry.prod.mozaws.net/168' \
-e 'SENTRY_DSN=https://51e23d7263e348a7a3b90a5357c61cb2:65e23d7263e348a7a3b90a5357c61c44@sentry.prod.mozaws.net/168' \
-e 'BASE_URL=https://send.firefox.com' \
-e 'BASE_URL=https://send.example.com' \
registry.gitlab.com/timvisee/send:latest
```
## Docker compose
*Note: make sure to replace the example values above with your real values before running.*
**Run totally self-hosted using the current filesystem directry (`$PWD`) to store the Redis data and file uploads, with a `5GB` upload limit, 1 month expiry, and contact URL set.**
```bash
# create a network for the send backend and redis containers to talk to each other
$ docker network create timviseesend
# start the redis container
$ docker run --net=timviseesend -v $PWD/redis:/data redis-server --appendonly yes
# start the send backend container
$ docker run --net=timviseesend -v $PWD/uploads:/uploads -p 1443:1443 \
-e 'BASE_URL=http://localhost:1443' \
-e 'MAX_FILE_SIZE=5368709120' \
-e 'MAX_EXPIRE_SECONDS=2592000' \
-e 'SEND_FOOTER_DMCA_URL=https://example.com/dmca-contact-info' \
registry.gitlab.com/timvisee/send:latest
```
Then open http://localhost:1443 to view the UI. (change the `localhost` to your IP or hostname above to serve the UI to others)
To run with HTTPS, you will need to set up a reverse proxy with SSL termination in front of the backend. See Docker Compose below for an example setup.
## Docker Compose
For a Docker compose configuration example, see:

View File

@ -23,10 +23,10 @@ Send uses JavaScript to:
- Encrypt and decrypt files locally on the client instead of the server.
- Render the user interface.
- Manage translations on the website into [various different languages](https://github.com/mozilla/send#localization).
- Manage translations on the website into [various different languages](https://github.com/timvisee/send#localization).
- Collect data to help us improve Send in accordance with our [Terms & Privacy](https://send.firefox.com/legal).
Since Send is an open source project, you can see all of the cool ways we use JavaScript by [examining our code](https://github.com/mozilla/send/).
Since Send is an open source project, you can see all of the cool ways we use JavaScript by [examining our code](https://github.com/timvisee/send/).
## How long are files available for?

View File

@ -1,6 +1,6 @@
# Localization
Send is localized in over 50 languages. We use the [fluent](http://projectfluent.org/) library and store our translations in [FTL](http://projectfluent.org/fluent/guide/) files in `public/locales/`. `en-US` is our base language, and other languages are managed by [pontoon](https://pontoon.mozilla.org/projects/test-pilot-firefox-send/).
Send is localized in over 50 languages. We use the [fluent](http://projectfluent.org/) library and store our translations in [FTL](http://projectfluent.org/fluent/guide/) files in `public/locales/`. `en-US` is our base language.
## Process

View File

@ -1,128 +0,0 @@
# Send V2 Metrics Definitions
## Key Value Prop
Quickly and privately transfer large files from any device to any device.
## Key Business Question to Answer
Is the value proposition of a large encrypted file transfer service enough to drive Firefox Account relationships for non-Firefox users.
## Hypotheses to Test
### Primary - In support of Relationships KPI
We believe that a privacy-respecting file transfer service can drive Firefox Accounts beyond the Firefox Browser.
We will know this to be true when we see 250k Firefox Account creations from non-Firefox contexts w/in six months of launch.
### Secondary - In support of Revenue KPI
We believe that a privacy respecting service accessible beyond the reach of Firefox will provide a valuable platform to research, communicate with, and market to conscious choosers we have traditionally found hard to reach.
We will know this to be true when we can conduct six research tasks (surveys, A/B tests, fake doors, etc) in support of premium services KPIs in the first six months after launch.
## Overview of Key Measures
* Number of people using the service to send and receive files
* Why: measure of service size. Important for understanding addressable market size
* Percent of users who have or create an FxAccount via Send
* Why: representation of % of any service users who might be amenable to an upsell
* % of downloaders who convert into uploaders
* Why: represents a measure of our key growth-loop potential
* Count of uploads and size
* Why: Represents cost of service on a running basis
## Key Funnels
* App Open or Visit `--- DESIRED OUTCOME --->` Successful Upload
* Download UI Visit `--- DESIRED OUTCOME --->` Successful Download
* FxA UI Engagement `--- DESIRED OUTCOME --->` Authenticate
* **STRETCH** App Open or Visit `--- DESIRED OUTCOME --->` Successful Download
## Amplitude Schema
Please see, **See Amplitude HTTP API**(https://amplitude.zendesk.com/hc/en-us/articles/204771828) for HTTP API reference.
## Metric Events
In support of our KPIs we collect events from two separate contexts, server and client. The events are designed to have minimal correlation between contexts.
Server events collect lifecycle information about individual uploads but no user information; also time precision is truncated to hour increments. Client events collect information about how users interact with the UI but no upload identifiers.
### Server Events
Server events allow us to aggregate data about file lifecycle without collecting data about individual users. In this context `user_id` and `user_properties` describe the uploaded archive.
* `session_id` -1 (not part of a session)
* `user_id` hash of (archive_id + owner_id)
* `app_version` package.json version
* `time` timestamp truncated to hour precision
* `country`
* `region`
* `event_type` [server_upload | server_download | server_delete]
* `user_properties`
* `download_limit` set number of downloads
* `time_limit` set expiry duration
* `size` approximate size (log10)
* `anonymous` true if anonymous, false if fxa
* `event_properties`
* `download_count` downloads completed
* `ttl` time remaining before expiry truncated to hour
* `agent` the browser name or first 6 characters of the user agent that made the request
### Client Events
Client events allow us to aggregate data about how the user interface is being used without tracking the lifecycle of individual files. In this context `user_id` and `user_properties` describe the user. The `user_id` and `device_id` change for all users at the beginning of each month.
* `session_id` timestamp
* `user_id` hash of (fxa_id + Date.year + Date.month)
* `device_id` hash of (localStorage random id + Date.year + Date.month)
* `platform` [web | android]
* `country`
* `region`
* `language`
* `time` timestamp
* `os_name`
* `event_type` [client_visit | client_upload | client_download | client_delete | client_login | client_logout]
* `event_properties`
* `browser`
* `browser_version`
* `status` [ ok | error | cancel ]
* Event specific properties (see below)
* `user_properties`
* `active_count` number of active uploads
* `anonymous` true if anonymous, false if fxa
* `experiments` list of experiment ids the user is participating in
* `first_action` how this use came to Send the first time [ upload | download ]
#### Visit Event
* `entrypoint` [ upload | download ]
#### Upload Event
* `download_limit` download limit
* `file_count` number of files
* `password_protected` boolean
* `size` approximate size (log10)
* `time_limit` time limit
* `duration` approximate transfer duration (log10)
#### Download Event
* `password_protected` boolean
* `size` approximate size (log10)
* `duration` approximate transfer duration (log10)
#### Delete Event
* `age` hours since uploaded
* `downloaded` downloaded at least once
#### Login Event
* `trigger` [button | time | count | size]
#### Logout Event
* `trigger` [button | timeout]

2652
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
{
"name": "send",
"description": "File Sharing Experiment",
"version": "3.4.4",
"version": "3.4.11",
"author": "Mozilla (https://mozilla.org)",
"contributors": [
"Tim Visee <3a4fb3964f@sinenomine.email> (https://timvisee.com)"
@ -30,7 +30,7 @@
"test:report": "nyc report --reporter=html",
"test-integration": "cross-env NODE_ENV=development wdio test/wdio.docker.conf.js",
"circleci-test-integration": "echo 'webdriverio tests need to be updated to node 12'",
"start": "npm run clean && cross-env NODE_ENV=development L10N_DEV=true FXA_CLIENT_ID=fced6b5e3f4c66b9 BASE_URL=http://localhost:8080 webpack-dev-server --mode=development",
"start": "npm run clean && cross-env NODE_ENV=development L10N_DEV=true BASE_URL=http://localhost:8080 DETECT_BASE_URL=true webpack-dev-server --mode=development",
"android": "cross-env ANDROID=1 npm start",
"prod": "node server/bin/prod.js"
},
@ -64,10 +64,10 @@
"node": "^15.5.1"
},
"devDependencies": {
"@babel/core": "^7.12.16",
"@babel/plugin-proposal-class-properties": "^7.12.13",
"@babel/core": "^7.14.3",
"@babel/plugin-proposal-class-properties": "^7.13.0",
"@babel/plugin-syntax-dynamic-import": "^7.2.0",
"@babel/preset-env": "^7.12.16",
"@babel/preset-env": "^7.14.2",
"@dannycoates/webcrypto-liner": "^0.1.37",
"@fullhuman/postcss-purgecss": "^1.3.0",
"@mattiasbuelens/web-streams-polyfill": "0.2.1",
@ -78,12 +78,12 @@
"base64-js": "^1.5.1",
"content-disposition": "^0.5.3",
"copy-webpack-plugin": "^5.1.2",
"core-js": "^3.8.3",
"core-js": "^3.12.1",
"crc": "^3.8.0",
"cross-env": "^6.0.3",
"css-loader": "^3.6.0",
"css-mqpacker": "^7.0.0",
"cssnano": "^4.1.10",
"cssnano": "^4.1.11",
"eslint": "^6.6.0",
"eslint-config-prettier": "^6.15.0",
"eslint-plugin-mocha": "^6.2.1",
@ -102,7 +102,7 @@
"lint-staged": "^9.4.2",
"mocha": "^6.2.2",
"morgan": "^1.9.1",
"nanobus": "^4.4.0",
"nanobus": "^4.5.0",
"nanohtml": "^1.9.0",
"nanotiming": "^7.3.1",
"npm-run-all": "^4.1.5",
@ -117,11 +117,11 @@
"script-loader": "^0.7.2",
"sinon": "^7.5.0",
"string-hash": "^1.1.3",
"stylelint": "^13.10.0",
"stylelint": "^13.13.1",
"stylelint-config-standard": "^19.0.0",
"stylelint-no-unsupported-browser-features": "^4.1.4",
"svgo": "^1.3.2",
"svgo-loader": "^2.2.1",
"svgo-loader": "^2.2.2",
"tailwindcss": "^1.9.6",
"val-loader": "^1.1.1",
"webpack": "4.38.0",
@ -135,9 +135,9 @@
"@dannycoates/express-ws": "^5.0.3",
"@fluent/bundle": "^0.13.0",
"@fluent/langneg": "^0.3.0",
"@google-cloud/storage": "^5.7.4",
"@google-cloud/storage": "^5.8.5",
"@sentry/node": "^5.30.0",
"aws-sdk": "^2.844.0",
"aws-sdk": "^2.909.0",
"body-parser": "^1.19.0",
"choo": "^7.0.0",
"cldr-core": "^35.1.0",
@ -151,7 +151,7 @@
"redis": "^2.8.0",
"redis-mock": "^0.47.0",
"selenium-standalone": "^6.23.0",
"ua-parser-js": "^0.7.24"
"ua-parser-js": "^0.7.28"
},
"availableLanguages": [
"en-US",

View File

@ -2,22 +2,17 @@
"name": "firefox-send",
"description": "File Sharing Experiment",
"repository": {
"url": "https://github.com/mozilla/send/",
"url": "https://github.com/send/send/",
"license": "MPL-2.0"
},
"participate": {
"home": "https://github.com/mozilla/send/blob/master/README.md",
"docs": "https://github.com/mozilla/send/blob/master/README.md"
},
"bugs": {
"list": "https://github.com/mozilla/send/issues",
"report": "https://github.com/mozilla/send/issues/new"
},
"urls": {
"prod": "https://send.firefox.com/",
"stage": "https://stage.send.nonprod.cloudops.mozgcp.net/",
"dev": "https://send2.dev.lcip.org/"
},
"participate": {
"home": "https://github.com/send/send/blob/master/README.md",
"docs": "https://github.com/send/send/blob/master/README.md"
},
"bugs": {
"list": "https://gitlab.com/send/send/issues",
"report": "https://gitlab.com/send/send/issues/new"
},
"keywords": [
"JavaScript",
"jQuery",

View File

@ -28,7 +28,7 @@ notSupportedOutdatedDetail = Helaas ondersteunt deze versie van Firefox de webte
updateFirefox = Firefox bijwerken
deletePopupCancel = Annuleren
deleteButtonHover = Verwijderen
footerText = Niet aangesloten aan Mozilla of Firefox.
footerText = Niet gelieerd aan Mozilla of Firefox.
footerLinkDonate = Doneren
footerLinkCli = CLI
footerLinkDmca = DMCA
@ -52,7 +52,7 @@ passwordSetError = Dit wachtwoord kon niet worden ingesteld
-send-short-brand = Send
-firefox = Firefox
-mozilla = Mozilla
introTitle = Eenvoudig, privé bestanden delen
introTitle = Bestanden delen, eenvoudig en privé
introDescription = Met { -send-brand } kunt u bestanden delen met end-to-endversleuteling en een koppeling die automatisch verloopt. Hierdoor kunt u privé houden wat u wilt delen en er zeker van zijn dat uw zaken niet voor altijd online blijven.
notifyUploadEncryptDone = Uw bestand is versleuteld en klaar voor verzending
# downloadCount is from the downloadCount string and timespan is a timespanMinutes string. ex. 'Expires after 2 downloads or 25 minutes'

2
public/robots.txt Normal file
View File

@ -0,0 +1,2 @@
User-agent: *
Disallow: /

View File

@ -1,171 +0,0 @@
const crypto = require('crypto');
const fetch = require('node-fetch');
const config = require('./config');
const pkg = require('../package.json');
const HOUR = 1000 * 60 * 60;
function truncateToHour(timestamp) {
return Math.floor(timestamp / HOUR) * HOUR;
}
function orderOfMagnitude(n) {
return Math.floor(Math.log10(n));
}
function userId(fileId, ownerId) {
const hash = crypto.createHash('sha256');
hash.update(fileId);
hash.update(ownerId);
return hash.digest('hex').substring(32);
}
function statUploadEvent(data) {
const event = {
session_id: -1,
country: data.country,
region: data.state,
user_id: userId(data.id, data.owner),
app_version: pkg.version,
time: truncateToHour(Date.now()),
event_type: 'server_upload',
user_properties: {
download_limit: data.dlimit,
time_limit: data.timeLimit,
size: orderOfMagnitude(data.size),
anonymous: data.anonymous
},
event_properties: {
agent: data.agent
},
event_id: 0
};
return sendBatch([event]);
}
function statDownloadEvent(data) {
const event = {
session_id: -1,
country: data.country,
region: data.state,
user_id: userId(data.id, data.owner),
app_version: pkg.version,
time: truncateToHour(Date.now()),
event_type: 'server_download',
event_properties: {
agent: data.agent,
download_count: data.download_count,
ttl: data.ttl
},
event_id: data.download_count
};
return sendBatch([event]);
}
function statDeleteEvent(data) {
const event = {
session_id: -1,
country: data.country,
region: data.state,
user_id: userId(data.id, data.owner),
app_version: pkg.version,
time: truncateToHour(Date.now()),
event_type: 'server_delete',
event_properties: {
agent: data.agent,
download_count: data.download_count,
ttl: data.ttl
},
event_id: data.download_count + 1
};
return sendBatch([event]);
}
function clientEvent(
event,
ua,
language,
session_id,
deltaT,
platform,
country,
state
) {
const ep = event.event_properties || {};
const up = event.user_properties || {};
const event_properties = {
browser: ua.browser.name,
browser_version: ua.browser.version,
status: ep.status,
age: ep.age,
downloaded: ep.downloaded,
download_limit: ep.download_limit,
duration: ep.duration,
entrypoint: ep.entrypoint,
file_count: ep.file_count,
password_protected: ep.password_protected,
referrer: ep.referrer,
size: ep.size,
time_limit: ep.time_limit,
trigger: ep.trigger,
ttl: ep.ttl,
utm_campaign: ep.utm_campaign,
utm_content: ep.utm_content,
utm_medium: ep.utm_medium,
utm_source: ep.utm_source,
utm_term: ep.utm_term,
experiment: ep.experiment,
variant: ep.variant
};
const user_properties = {
active_count: up.active_count,
anonymous: up.anonymous,
experiments: up.experiments,
first_action: up.first_action
};
return {
app_version: pkg.version,
country: country,
device_id: event.device_id,
event_properties,
event_type: event.event_type,
language,
os_name: ua.os.name,
os_version: ua.os.version,
platform,
region: state,
session_id,
time: event.time + deltaT,
user_id: event.user_id,
user_properties
};
}
async function sendBatch(events, timeout = 1000) {
if (!config.amplitude_id) {
return 200;
}
try {
const result = await fetch('https://api.amplitude.com/batch', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
api_key: config.amplitude_id,
events
}),
timeout
});
return result.status;
} catch (e) {
return 500;
}
}
module.exports = {
statUploadEvent,
statDownloadEvent,
statDeleteEvent,
clientEvent,
sendBatch
};

View File

@ -2,11 +2,6 @@ const config = require('./config');
module.exports = {
LIMITS: {
ANON: {
MAX_FILE_SIZE: config.anon_max_file_size,
MAX_DOWNLOADS: config.anon_max_downloads,
MAX_EXPIRE_SECONDS: config.anon_max_expire_seconds
},
MAX_FILE_SIZE: config.max_file_size,
MAX_DOWNLOADS: config.max_downloads,
MAX_EXPIRE_SECONDS: config.max_expire_seconds,

View File

@ -3,6 +3,26 @@ const { tmpdir } = require('os');
const path = require('path');
const { randomBytes } = require('crypto');
convict.addFormat({
name: 'positive-int-array',
coerce: ints => {
// can take: int[] | string[] | string (csv), returns -> int[]
const ints_arr = Array.isArray(ints) ? ints : ints.trim().split(',');
return ints_arr.map(int =>
typeof int === 'number'
? int
: parseInt(int.replace(/['"]+/g, '').trim(), 10)
);
},
validate: ints => {
// takes: int[], errors if any NaNs, negatives, or floats present
for (const int of ints) {
if (typeof int !== 'number' || isNaN(int) || int < 0 || int % 1 > 0)
throw new Error('must be a comma-separated list of positive integers');
}
}
});
const conf = convict({
s3_bucket: {
format: String,
@ -25,7 +45,7 @@ const conf = convict({
env: 'GCS_BUCKET'
},
expire_times_seconds: {
format: Array,
format: 'positive-int-array',
default: [300, 3600, 86400, 604800],
env: 'EXPIRE_TIMES_SECONDS'
},
@ -39,13 +59,8 @@ const conf = convict({
default: 86400 * 7,
env: 'MAX_EXPIRE_SECONDS'
},
anon_max_expire_seconds: {
format: Number,
default: 86400,
env: 'ANON_MAX_EXPIRE_SECONDS'
},
download_counts: {
format: Array,
format: 'positive-int-array',
default: [1, 2, 3, 4, 5, 20, 50, 100],
env: 'DOWNLOAD_COUNTS'
},
@ -54,11 +69,6 @@ const conf = convict({
default: 100,
env: 'MAX_DOWNLOADS'
},
anon_max_downloads: {
format: Number,
default: 5,
env: 'ANON_MAX_DOWNLOADS'
},
max_files_per_archive: {
format: Number,
default: 64,
@ -74,6 +84,26 @@ const conf = convict({
default: 'localhost',
env: 'REDIS_HOST'
},
redis_port: {
format: Number,
default: 6379,
env: 'REDIS_PORT'
},
redis_user: {
format: String,
default: '',
env: 'REDIS_USER'
},
redis_password: {
format: String,
default: '',
env: 'REDIS_PASSWORD'
},
redis_db: {
format: String,
default: '',
env: 'REDIS_DB'
},
redis_event_expire: {
format: Boolean,
default: false,
@ -100,16 +130,6 @@ const conf = convict({
arg: 'port',
env: 'PORT'
},
amplitude_id: {
format: String,
default: '',
env: 'AMPLITUDE_ID'
},
analytics_id: {
format: String,
default: '',
env: 'GOOGLE_ANALYTICS_ID'
},
sentry_id: {
format: String,
default: '',
@ -130,11 +150,6 @@ const conf = convict({
default: 1024 * 1024 * 1024 * 2.5,
env: 'MAX_FILE_SIZE'
},
anon_max_file_size: {
format: Number,
default: 1024 * 1024 * 1024,
env: 'ANON_MAX_FILE_SIZE'
},
l10n_dev: {
format: Boolean,
default: false,
@ -145,6 +160,11 @@ const conf = convict({
default: 'https://send.firefox.com',
env: 'BASE_URL'
},
detect_base_url: {
format: Boolean,
default: false,
env: 'DETECT_BASE_URL'
},
file_dir: {
format: 'String',
default: `${tmpdir()}${path.sep}send-${randomBytes(4).toString('hex')}`,
@ -221,4 +241,17 @@ const conf = convict({
conf.validate({ allowed: 'strict' });
const props = conf.getProperties();
module.exports = props;
const deriveBaseUrl = req => {
if (!props.detect_base_url) {
return props.base_url;
}
const protocol = req.secure ? 'https://' : 'http://';
return `${protocol}${req.headers.host}`;
};
module.exports = {
...props,
deriveBaseUrl
};

View File

@ -1,23 +1,10 @@
const storage = require('../storage');
const { statDeleteEvent } = require('../amplitude');
module.exports = async function(req, res) {
try {
const id = req.params.id;
const meta = req.meta;
const ttl = await storage.ttl(id);
await storage.del(id);
res.sendStatus(200);
statDeleteEvent({
id,
ip: req.ip,
country: req.geo.country,
state: req.geo.state,
owner: meta.owner,
download_count: meta.dl,
ttl,
agent: req.ua.browser.name || req.ua.ua.substring(0, 6)
});
} catch (e) {
res.sendStatus(404);
}

View File

@ -1,7 +1,6 @@
const storage = require('../storage');
const mozlog = require('../log');
const log = mozlog('send.download');
const { statDownloadEvent } = require('../amplitude');
module.exports = async function(req, res) {
const id = req.params.id;
@ -27,17 +26,6 @@ module.exports = async function(req, res) {
const dl = meta.dl + 1;
const dlimit = meta.dlimit;
const ttl = await storage.ttl(id);
statDownloadEvent({
id,
ip: req.ip,
country: req.geo.country,
state: req.geo.state,
owner: meta.owner,
download_count: dl,
ttl,
agent: req.ua.browser.name || req.ua.ua.substring(0, 6)
});
try {
if (dl >= dlimit) {
await storage.del(id);

View File

@ -36,9 +36,14 @@ module.exports = function(app) {
defaultSrc: ["'self'"],
connectSrc: [
"'self'",
config.base_url.replace(/^https:\/\//, 'wss://')
function(req) {
const baseUrl = config.deriveBaseUrl(req);
const r = baseUrl.replace(/^http(s?):\/\//, 'ws$1://');
console.log([baseUrl, r]);
return r;
}
],
imgSrc: ["'self'"],
imgSrc: ["'self'", 'data:'],
scriptSrc: [
"'self'",
function(req) {
@ -52,10 +57,6 @@ module.exports = function(app) {
}
};
csp.directives.connectSrc.push(
config.base_url.replace(/^https:\/\//, 'wss://')
);
app.use(helmet.contentSecurityPolicy(csp));
}
@ -112,7 +113,6 @@ module.exports = function(app) {
require('./params')
);
app.post(`/api/info/:id${ID_REGEX}`, auth.owner, require('./info'));
app.post('/api/metrics', require('./metrics'));
app.get('/__version__', function(req, res) {
// eslint-disable-next-line node/no-missing-require
res.sendFile(require.resolve('../../dist/version.json'));

View File

@ -1,24 +0,0 @@
const { sendBatch, clientEvent } = require('../amplitude');
module.exports = async function(req, res) {
try {
const data = JSON.parse(req.body); // see http://crbug.com/490015
const deltaT = Date.now() - data.now;
const events = data.events.map(e =>
clientEvent(
e,
req.ua,
data.lang,
data.session_id + deltaT,
deltaT,
data.platform,
req.geo.country,
req.geo.state
)
);
const status = await sendBatch(events);
res.sendStatus(status);
} catch (e) {
res.sendStatus(500);
}
};

View File

@ -2,7 +2,7 @@ const config = require('../config');
const storage = require('../storage');
module.exports = function(req, res) {
const max = req.user ? config.max_downloads : config.anon_max_downloads;
const max = config.max_downloads;
const dlimit = req.body.dlimit;
if (!dlimit || dlimit > max) {
return res.sendStatus(400);

View File

@ -28,8 +28,7 @@ module.exports = async function(req, res) {
//this hasn't been updated to expiration time setting yet
//if you want to fallback to this code add this
await storage.set(newId, fileStream, meta, config.default_expire_seconds);
const protocol = config.env === 'production' ? 'https' : req.protocol;
const url = `${protocol}://${req.get('host')}/download/${newId}/`;
const url = `${config.deriveBaseUrl(req)}/download/${newId}/`;
res.set('WWW-Authenticate', `send-v1 ${meta.nonce}`);
res.json({
url,

View File

@ -4,7 +4,6 @@ const config = require('../config');
const mozlog = require('../log');
const Limiter = require('../limiter');
const fxa = require('../fxa');
const { statUploadEvent } = require('../amplitude');
const { encryptedSize } = require('../../app/utils');
const { Transform } = require('stream');
@ -31,15 +30,9 @@ module.exports = function(ws, req) {
const metadata = fileInfo.fileMetadata;
const auth = fileInfo.authorization;
const user = await fxa.verify(fileInfo.bearer);
const maxFileSize = user
? config.max_file_size
: config.anon_max_file_size;
const maxExpireSeconds = user
? config.max_expire_seconds
: config.anon_max_expire_seconds;
const maxDownloads = user
? config.max_downloads
: config.anon_max_downloads;
const maxFileSize = config.max_file_size;
const maxExpireSeconds = config.max_expire_seconds;
const maxDownloads = config.max_downloads;
if (config.fxa_required && !user) {
ws.send(
@ -72,8 +65,7 @@ module.exports = function(ws, req) {
nonce: crypto.randomBytes(16).toString('base64')
};
const protocol = config.env === 'production' ? 'https' : req.protocol;
const url = `${protocol}://${req.get('host')}/download/${newId}/`;
const url = `${config.deriveBaseUrl(req)}/download/${newId}/`;
ws.send(
JSON.stringify({
@ -108,18 +100,6 @@ module.exports = function(ws, req) {
// in order to avoid having to check socket state and clean
// up storage, possibly with an exception that we can catch.
ws.send(JSON.stringify({ ok: true }));
statUploadEvent({
id: newId,
ip: req.ip,
country: req.geo.country,
state: req.geo.state,
owner,
dlimit,
timeLimit,
anonymous: !user,
size: limiter.length,
agent: req.ua.browser.name || req.ua.ua.substring(0, 6)
});
}
} catch (e) {
log.error('upload', e);

View File

@ -23,6 +23,7 @@ module.exports = async function(req) {
if (config.survey_url) {
prefs.surveyUrl = config.survey_url;
}
const baseUrl = config.deriveBaseUrl(req);
return {
archive: {
numFiles: 0
@ -33,7 +34,7 @@ module.exports = async function(req) {
title: 'Send',
description:
'Encrypt and send files with a link that automatically expires to ensure your important documents dont stay online forever.',
baseUrl: config.base_url,
baseUrl,
ui: {},
storage: {
files: []

View File

@ -8,8 +8,10 @@ module.exports = function(config) {
//eslint-disable-next-line security/detect-non-literal-require
const redis = require(redis_lib);
const client = redis.createClient({
var client_config = {
host: config.redis_host,
port: config.redis_port,
retry_strategy: options => {
if (options.total_retry_time > config.redis_retry_time) {
client.emit('error', 'Retry time exhausted');
@ -18,7 +20,14 @@ module.exports = function(config) {
return config.redis_retry_delay;
}
});
};
if (config.redis_user != null && config.redis_user.length > 0)
client_config.user = config.redis_user;
if (config.redis_password != null && config.redis_password.length > 0)
client_config.password = config.redis_password;
if (config.redis_db != null && config.redis_db.length > 0)
client_config.db = config.redis_db;
const client = redis.createClient(client_config);
client.ttlAsync = promisify(client.ttl);
client.hgetallAsync = promisify(client.hgetall);