Create static robots.txt file, remove dynamic route
Fixes https://gitlab.com/timvisee/send/-/issues/17
This commit is contained in:
parent
93e1d2f41a
commit
84da34169d
@ -9,7 +9,6 @@ module.exports = function(app = choo({ hash: true })) {
|
||||
app.route('/unsupported/:reason', body(require('./ui/unsupported')));
|
||||
app.route('/error', body(require('./ui/error')));
|
||||
app.route('/blank', body(require('./ui/blank')));
|
||||
app.route('/robots.txt', function() {return 'User-agent: * Disallow: /'});
|
||||
app.route('/oauth', function(state, emit) {
|
||||
emit('authenticate', state.query.code, state.query.state);
|
||||
});
|
||||
|
2
public/robots.txt
Normal file
2
public/robots.txt
Normal file
@ -0,0 +1,2 @@
|
||||
User-agent: *
|
||||
Disallow: /
|
Loading…
Reference in New Issue
Block a user