implemented download tokens

This commit is contained in:
Danny Coates
2020-07-27 11:18:52 -07:00
parent 87d46f7ef5
commit 81e9d81dab
26 changed files with 271 additions and 126 deletions

View File

@@ -71,7 +71,7 @@ const conf = convict({
},
redis_host: {
format: String,
default: 'localhost',
default: 'mock',
env: 'REDIS_HOST'
},
redis_event_expire: {

View File

@@ -1,15 +1,45 @@
const crypto = require('crypto');
function makeToken(secret, counter) {
const hmac = crypto.createHmac('sha256', secret);
hmac.update(String(counter));
return hmac.digest('hex');
}
class Metadata {
constructor(obj) {
constructor(obj, storage) {
this.id = obj.id;
this.dl = +obj.dl || 0;
this.dlToken = +obj.dlToken || 0;
this.dlimit = +obj.dlimit || 1;
this.pwd = String(obj.pwd) === 'true';
this.pwd = !!+obj.pwd;
this.owner = obj.owner;
this.metadata = obj.metadata;
this.auth = obj.auth;
this.nonce = obj.nonce;
this.flagged = !!obj.flagged;
this.dead = !!obj.dead;
this.key = obj.key;
this.fxa = !!+obj.fxa;
this.storage = storage;
}
async getDownloadToken() {
if (this.dlToken >= this.dlimit) {
throw new Error('limit');
}
this.dlToken = await this.storage.incrementField(this.id, 'dlToken');
// another request could have also incremented
if (this.dlToken > this.dlimit) {
throw new Error('limit');
}
return makeToken(this.owner, this.dlToken);
}
async verifyDownloadToken(token) {
const validTokens = Array.from({ length: this.dlToken }, (_, i) =>
makeToken(this.owner, i + 1)
);
return validTokens.includes(token);
}
}

View File

@@ -75,5 +75,22 @@ module.exports = {
} else {
res.sendStatus(401);
}
},
dlToken: async function(req, res, next) {
const authHeader = req.header('Authorization');
if (authHeader && /^Bearer\s/i.test(authHeader)) {
const token = authHeader.split(' ')[1];
const id = req.params.id;
req.meta = await storage.metadata(id);
if (!req.meta || req.meta.dead) {
return res.sendStatus(404);
}
req.authorized = await req.meta.verifyDownloadToken(token);
}
if (req.authorized) {
next();
} else {
res.sendStatus(401);
}
}
};

25
server/routes/done.js Normal file
View File

@@ -0,0 +1,25 @@
const storage = require('../storage');
const { statDownloadEvent } = require('../amplitude');
module.exports = async function(req, res) {
try {
const id = req.params.id;
const meta = req.meta;
const ttl = await storage.ttl(id);
statDownloadEvent({
id,
ip: req.ip,
owner: meta.owner,
download_count: meta.dl,
ttl,
agent: req.ua.browser.name || req.ua.ua.substring(0, 6)
});
await storage.incrementField(id, 'dl');
if (meta.dlToken >= meta.dlimit) {
await storage.kill(id);
}
res.sendStatus(200);
} catch (e) {
res.sendStatus(404);
}
};

View File

@@ -1,53 +1,14 @@
const storage = require('../storage');
const mozlog = require('../log');
const log = mozlog('send.download');
const { statDownloadEvent } = require('../amplitude');
module.exports = async function(req, res) {
const id = req.params.id;
try {
const meta = req.meta;
const contentLength = await storage.length(id);
const fileStream = await storage.get(id);
let cancelled = false;
req.on('aborted', () => {
cancelled = true;
fileStream.destroy();
});
const { length, stream } = await storage.get(id);
res.writeHead(200, {
'Content-Type': 'application/octet-stream',
'Content-Length': contentLength
});
fileStream.pipe(res).on('finish', async () => {
if (cancelled) {
return;
}
const dl = meta.dl + 1;
const dlimit = meta.dlimit;
const ttl = await storage.ttl(id);
statDownloadEvent({
id,
ip: req.ip,
country: req.geo.country,
state: req.geo.state,
owner: meta.owner,
download_count: dl,
ttl,
agent: req.ua.browser.name || req.ua.ua.substring(0, 6)
});
try {
if (dl >= dlimit) {
await storage.kill(id);
} else {
await storage.incrementField(id, 'dl');
}
} catch (e) {
log.info('StorageError:', id);
}
'Content-Length': length
});
stream.pipe(res);
} catch (e) {
res.sendStatus(404);
}

View File

@@ -16,13 +16,12 @@ module.exports = {
const kid = req.params.id;
try {
const fileId = id(req.user, kid);
const contentLength = await storage.length(fileId);
const fileStream = await storage.get(fileId);
const { length, stream } = await storage.get(fileId);
res.writeHead(200, {
'Content-Type': 'application/octet-stream',
'Content-Length': contentLength
'Content-Length': length
});
fileStream.pipe(res);
stream.pipe(res);
} catch (e) {
res.sendStatus(404);
}

View File

@@ -120,12 +120,18 @@ module.exports = function(app) {
app.get('/app.webmanifest', language, require('./webmanifest'));
app.get(`/download/:id${ID_REGEX}`, language, pages.download);
app.get('/unsupported/:reason', language, pages.unsupported);
app.get(`/api/download/:id${ID_REGEX}`, auth.hmac, require('./download'));
app.get(`/api/download/token/:id${ID_REGEX}`, auth.hmac, require('./token'));
app.get(`/api/download/:id${ID_REGEX}`, auth.dlToken, require('./download'));
app.get(
`/api/download/blob/:id${ID_REGEX}`,
auth.hmac,
auth.dlToken,
require('./download')
);
app.post(
`/api/download/done/:id${ID_REGEX}`,
auth.dlToken,
require('./done.js')
);
app.get(`/api/exists/:id${ID_REGEX}`, require('./exists'));
app.get(`/api/metadata/:id${ID_REGEX}`, auth.hmac, require('./metadata'));
app.get('/api/filelist/:id([\\w-]{16})', auth.fxa, filelist.get);
@@ -133,12 +139,7 @@ module.exports = function(app) {
// app.post('/api/upload', auth.fxa, require('./upload'));
app.post(`/api/delete/:id${ID_REGEX}`, auth.owner, require('./delete'));
app.post(`/api/password/:id${ID_REGEX}`, auth.owner, require('./password'));
app.post(
`/api/params/:id${ID_REGEX}`,
auth.owner,
auth.fxa,
require('./params')
);
app.post(`/api/params/:id${ID_REGEX}`, auth.owner, require('./params'));
app.post(`/api/info/:id${ID_REGEX}`, auth.owner, require('./info'));
app.post(`/api/report/:id${ID_REGEX}`, auth.hmac, require('./report'));
app.post('/api/metrics', require('./metrics'));

View File

@@ -11,7 +11,7 @@ module.exports = async function(req, res) {
res.send({
metadata: meta.metadata,
flagged: !!meta.flagged,
finalDownload: meta.dl + 1 === meta.dlimit,
finalDownload: meta.dlToken + 1 === meta.dlimit,
ttl
});
} catch (e) {

View File

@@ -2,7 +2,7 @@ const config = require('../config');
const storage = require('../storage');
module.exports = function(req, res) {
const max = req.user ? config.max_downloads : config.anon_max_downloads;
const max = req.meta.fxa ? config.max_downloads : config.anon_max_downloads;
const dlimit = req.body.dlimit;
if (!dlimit || dlimit > max) {
return res.sendStatus(400);

View File

@@ -9,7 +9,7 @@ module.exports = function(req, res) {
try {
storage.setField(id, 'auth', auth);
storage.setField(id, 'pwd', true);
storage.setField(id, 'pwd', 1);
res.sendStatus(200);
} catch (e) {
return res.sendStatus(404);

17
server/routes/token.js Normal file
View File

@@ -0,0 +1,17 @@
module.exports = async function(req, res) {
const meta = req.meta;
try {
if (meta.dead || meta.flagged) {
return res.sendStatus(404);
}
const token = await meta.getDownloadToken();
res.send({
token
});
} catch (e) {
if (e.message === 'limit') {
return res.sendStatus(403);
}
res.sendStatus(404);
}
};

View File

@@ -66,6 +66,7 @@ module.exports = function(ws, req) {
const meta = {
owner,
fxa: user ? 1 : 0,
metadata,
dlimit,
auth: auth.split(' ')[1],

View File

@@ -1,10 +1,8 @@
const fs = require('fs');
const fss = require('fs');
const fs = fss.promises;
const path = require('path');
const promisify = require('util').promisify;
const mkdirp = require('mkdirp');
const stat = promisify(fs.stat);
class FSStorage {
constructor(config, log) {
this.log = log;
@@ -13,32 +11,36 @@ class FSStorage {
}
async length(id) {
const result = await stat(path.join(this.dir, id));
const result = await fs.stat(path.join(this.dir, id));
return result.size;
}
getStream(id) {
return fs.createReadStream(path.join(this.dir, id));
return fss.createReadStream(path.join(this.dir, id));
}
set(id, file) {
return new Promise((resolve, reject) => {
const filepath = path.join(this.dir, id);
const fstream = fs.createWriteStream(filepath);
const fstream = fss.createWriteStream(filepath);
file.pipe(fstream);
file.on('error', err => {
fstream.destroy(err);
});
fstream.on('error', err => {
fs.unlinkSync(filepath);
this.del(id);
reject(err);
});
fstream.on('finish', resolve);
});
}
del(id) {
return Promise.resolve(fs.unlinkSync(path.join(this.dir, id)));
async del(id) {
try {
await fs.unlink(path.join(this.dir, id));
} catch (e) {
// ignore local fs issues
}
}
ping() {

View File

@@ -56,7 +56,8 @@ class DB {
if (info.dead || info.flagged) {
throw new Error(info.flagged ? 'flagged' : 'dead');
}
return this.storage.getStream(info.filePath);
const length = await this.storage.length(info.filePath);
return { length, stream: this.storage.getStream(info.filePath) };
}
async set(id, file, meta, expireSeconds = config.default_expire_seconds) {
@@ -75,15 +76,15 @@ class DB {
this.redis.hset(id, key, value);
}
incrementField(id, key, increment = 1) {
this.redis.hincrby(id, key, increment);
async incrementField(id, key, increment = 1) {
return await this.redis.hincrbyAsync(id, key, increment);
}
async kill(id) {
const { filePath, dead } = await this.getPrefixedInfo(id);
if (!dead) {
this.storage.del(filePath);
this.redis.hset(id, 'dead', 1);
this.storage.del(filePath);
}
}
@@ -94,8 +95,8 @@ class DB {
async del(id) {
const { filePath } = await this.getPrefixedInfo(id);
this.storage.del(filePath);
this.redis.del(id);
this.storage.del(filePath);
}
async ping() {
@@ -105,7 +106,7 @@ class DB {
async metadata(id) {
const result = await this.redis.hgetallAsync(id);
return result && new Metadata(result);
return result && new Metadata({ id, ...result }, this);
}
}

View File

@@ -2,7 +2,7 @@ const promisify = require('util').promisify;
module.exports = function(config) {
const redis_lib =
config.env === 'development' && config.redis_host === 'localhost'
config.env === 'development' && config.redis_host === 'mock'
? 'redis-mock'
: 'redis';
@@ -23,6 +23,7 @@ module.exports = function(config) {
client.ttlAsync = promisify(client.ttl);
client.hgetallAsync = promisify(client.hgetall);
client.hgetAsync = promisify(client.hget);
client.hincrbyAsync = promisify(client.hincrby);
client.hmgetAsync = promisify(client.hmget);
client.pingAsync = promisify(client.ping);
client.existsAsync = promisify(client.exists);