2016-10-25 03:49:45 +00:00
|
|
|
/* jslint node: true */
|
|
|
|
'use strict';
|
|
|
|
|
|
|
|
// ENiGMA½
|
2018-06-21 01:57:06 +00:00
|
|
|
const Config = require('./config.js').get;
|
2016-10-25 03:49:45 +00:00
|
|
|
const FileDb = require('./database.js').dbs.file;
|
|
|
|
const getISOTimestampString = require('./database.js').getISOTimestampString;
|
|
|
|
const FileEntry = require('./file_entry.js');
|
|
|
|
const getServer = require('./listening_server.js').getServer;
|
|
|
|
const Errors = require('./enig_error.js').Errors;
|
2017-02-18 16:56:23 +00:00
|
|
|
const ErrNotEnabled = require('./enig_error.js').ErrorReasons.NotEnabled;
|
2017-02-18 20:21:18 +00:00
|
|
|
const StatLog = require('./stat_log.js');
|
|
|
|
const User = require('./user.js');
|
|
|
|
const Log = require('./logger.js').log;
|
|
|
|
const getConnectionByUserId = require('./client_connections.js').getConnectionByUserId;
|
2017-02-27 04:28:05 +00:00
|
|
|
const webServerPackageName = require('./servers/content/web.js').moduleInfo.packageName;
|
2018-06-03 23:02:28 +00:00
|
|
|
const Events = require('./events.js');
|
2016-10-25 03:49:45 +00:00
|
|
|
|
|
|
|
// deps
|
|
|
|
const hashids = require('hashids');
|
|
|
|
const moment = require('moment');
|
|
|
|
const paths = require('path');
|
|
|
|
const async = require('async');
|
2017-05-20 03:20:19 +00:00
|
|
|
const fs = require('graceful-fs');
|
2016-10-25 03:49:45 +00:00
|
|
|
const mimeTypes = require('mime-types');
|
2017-09-26 16:44:15 +00:00
|
|
|
const yazl = require('yazl');
|
2016-10-25 03:49:45 +00:00
|
|
|
|
2017-02-18 16:56:23 +00:00
|
|
|
function notEnabledError() {
|
2018-06-22 05:15:04 +00:00
|
|
|
return Errors.General('Web server is not enabled', ErrNotEnabled);
|
2017-02-18 16:56:23 +00:00
|
|
|
}
|
|
|
|
|
2016-10-25 03:49:45 +00:00
|
|
|
class FileAreaWebAccess {
|
2018-06-22 05:15:04 +00:00
|
|
|
constructor() {
|
|
|
|
this.hashids = new hashids(Config().general.boardName);
|
|
|
|
this.expireTimers = {}; // hashId->timer
|
|
|
|
}
|
|
|
|
|
|
|
|
startup(cb) {
|
|
|
|
const self = this;
|
|
|
|
|
|
|
|
async.series(
|
|
|
|
[
|
|
|
|
function initFromDb(callback) {
|
|
|
|
return self.load(callback);
|
|
|
|
},
|
|
|
|
function addWebRoute(callback) {
|
|
|
|
self.webServer = getServer(webServerPackageName);
|
|
|
|
if(!self.webServer) {
|
|
|
|
return callback(Errors.DoesNotExist(`Server with package name "${webServerPackageName}" does not exist`));
|
|
|
|
}
|
|
|
|
|
|
|
|
if(self.isEnabled()) {
|
|
|
|
const routeAdded = self.webServer.instance.addRoute({
|
|
|
|
method : 'GET',
|
|
|
|
path : Config().fileBase.web.routePath,
|
|
|
|
handler : self.routeWebRequest.bind(self),
|
|
|
|
});
|
|
|
|
return callback(routeAdded ? null : Errors.General('Failed adding route'));
|
|
|
|
} else {
|
|
|
|
return callback(null); // not enabled, but no error
|
|
|
|
}
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
shutdown(cb) {
|
|
|
|
return cb(null);
|
|
|
|
}
|
|
|
|
|
|
|
|
isEnabled() {
|
|
|
|
return this.webServer.instance.isEnabled();
|
|
|
|
}
|
|
|
|
|
|
|
|
static getHashIdTypes() {
|
|
|
|
return {
|
|
|
|
SingleFile : 0,
|
|
|
|
BatchArchive : 1,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
load(cb) {
|
|
|
|
//
|
|
|
|
// Load entries, register expiration timers
|
|
|
|
//
|
|
|
|
FileDb.each(
|
|
|
|
`SELECT hash_id, expire_timestamp
|
2016-12-07 01:58:56 +00:00
|
|
|
FROM file_web_serve;`,
|
2018-06-22 05:15:04 +00:00
|
|
|
(err, row) => {
|
|
|
|
if(row) {
|
|
|
|
this.scheduleExpire(row.hash_id, moment(row.expire_timestamp));
|
|
|
|
}
|
|
|
|
},
|
|
|
|
err => {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
removeEntry(hashId) {
|
|
|
|
//
|
|
|
|
// Delete record from DB, and our timer
|
|
|
|
//
|
|
|
|
FileDb.run(
|
|
|
|
`DELETE FROM file_web_serve
|
2016-12-07 01:58:56 +00:00
|
|
|
WHERE hash_id = ?;`,
|
2018-06-22 05:15:04 +00:00
|
|
|
[ hashId ]
|
|
|
|
);
|
|
|
|
|
|
|
|
delete this.expireTimers[hashId];
|
|
|
|
}
|
|
|
|
|
|
|
|
scheduleExpire(hashId, expireTime) {
|
|
|
|
|
|
|
|
// remove any previous entry for this hashId
|
|
|
|
const previous = this.expireTimers[hashId];
|
|
|
|
if(previous) {
|
|
|
|
clearTimeout(previous);
|
|
|
|
delete this.expireTimers[hashId];
|
|
|
|
}
|
|
|
|
|
|
|
|
const timeoutMs = expireTime.diff(moment());
|
|
|
|
|
|
|
|
if(timeoutMs <= 0) {
|
|
|
|
setImmediate( () => {
|
|
|
|
this.removeEntry(hashId);
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
this.expireTimers[hashId] = setTimeout( () => {
|
|
|
|
this.removeEntry(hashId);
|
|
|
|
}, timeoutMs);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
loadServedHashId(hashId, cb) {
|
|
|
|
FileDb.get(
|
|
|
|
`SELECT expire_timestamp FROM
|
2016-10-25 03:49:45 +00:00
|
|
|
file_web_serve
|
|
|
|
WHERE hash_id = ?`,
|
2018-06-22 05:15:04 +00:00
|
|
|
[ hashId ],
|
|
|
|
(err, result) => {
|
|
|
|
if(err || !result) {
|
|
|
|
return cb(err ? err : Errors.DoesNotExist('Invalid or missing hash ID'));
|
|
|
|
}
|
|
|
|
|
|
|
|
const decoded = this.hashids.decode(hashId);
|
|
|
|
|
|
|
|
// decode() should provide an array of [ userId, hashIdType, id, ... ]
|
|
|
|
if(!Array.isArray(decoded) || decoded.length < 3) {
|
|
|
|
return cb(Errors.Invalid('Invalid or unknown hash ID'));
|
|
|
|
}
|
|
|
|
|
|
|
|
const servedItem = {
|
|
|
|
hashId : hashId,
|
|
|
|
userId : decoded[0],
|
|
|
|
hashIdType : decoded[1],
|
|
|
|
expireTimestamp : moment(result.expire_timestamp),
|
|
|
|
};
|
|
|
|
|
|
|
|
if(FileAreaWebAccess.getHashIdTypes().SingleFile === servedItem.hashIdType) {
|
|
|
|
servedItem.fileIds = decoded.slice(2);
|
|
|
|
}
|
|
|
|
|
|
|
|
return cb(null, servedItem);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
getSingleFileHashId(client, fileEntry) {
|
|
|
|
return this.getHashId(client, FileAreaWebAccess.getHashIdTypes().SingleFile, [ fileEntry.fileId ] );
|
|
|
|
}
|
|
|
|
|
|
|
|
getBatchArchiveHashId(client, batchId) {
|
|
|
|
return this.getHashId(client, FileAreaWebAccess.getHashIdTypes().BatchArchive, batchId);
|
|
|
|
}
|
|
|
|
|
|
|
|
getHashId(client, hashIdType, identifier) {
|
|
|
|
return this.hashids.encode(client.user.userId, hashIdType, identifier);
|
|
|
|
}
|
|
|
|
|
|
|
|
buildSingleFileTempDownloadLink(client, fileEntry, hashId) {
|
|
|
|
hashId = hashId || this.getSingleFileHashId(client, fileEntry);
|
|
|
|
|
|
|
|
return this.webServer.instance.buildUrl(`${Config().fileBase.web.path}${hashId}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
buildBatchArchiveTempDownloadLink(client, hashId) {
|
|
|
|
return this.webServer.instance.buildUrl(`${Config().fileBase.web.path}${hashId}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
getExistingTempDownloadServeItem(client, fileEntry, cb) {
|
|
|
|
if(!this.isEnabled()) {
|
|
|
|
return cb(notEnabledError());
|
|
|
|
}
|
|
|
|
|
|
|
|
const hashId = this.getSingleFileHashId(client, fileEntry);
|
|
|
|
this.loadServedHashId(hashId, (err, servedItem) => {
|
|
|
|
if(err) {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
servedItem.url = this.buildSingleFileTempDownloadLink(client, fileEntry);
|
|
|
|
|
|
|
|
return cb(null, servedItem);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
_addOrUpdateHashIdRecord(dbOrTrans, hashId, expireTime, cb) {
|
|
|
|
// add/update rec with hash id and (latest) timestamp
|
|
|
|
dbOrTrans.run(
|
|
|
|
`REPLACE INTO file_web_serve (hash_id, expire_timestamp)
|
2016-10-25 03:49:45 +00:00
|
|
|
VALUES (?, ?);`,
|
2018-06-22 05:15:04 +00:00
|
|
|
[ hashId, getISOTimestampString(expireTime) ],
|
|
|
|
err => {
|
|
|
|
if(err) {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
this.scheduleExpire(hashId, expireTime);
|
|
|
|
|
|
|
|
return cb(null);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
createAndServeTempDownload(client, fileEntry, options, cb) {
|
|
|
|
if(!this.isEnabled()) {
|
|
|
|
return cb(notEnabledError());
|
|
|
|
}
|
|
|
|
|
|
|
|
const hashId = this.getSingleFileHashId(client, fileEntry);
|
|
|
|
const url = this.buildSingleFileTempDownloadLink(client, fileEntry, hashId);
|
|
|
|
options.expireTime = options.expireTime || moment().add(2, 'days');
|
|
|
|
|
|
|
|
this._addOrUpdateHashIdRecord(FileDb, hashId, options.expireTime, err => {
|
|
|
|
return cb(err, url);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
createAndServeTempBatchDownload(client, fileEntries, options, cb) {
|
|
|
|
if(!this.isEnabled()) {
|
|
|
|
return cb(notEnabledError());
|
|
|
|
}
|
|
|
|
|
|
|
|
const batchId = moment().utc().unix();
|
|
|
|
const hashId = this.getBatchArchiveHashId(client, batchId);
|
|
|
|
const url = this.buildBatchArchiveTempDownloadLink(client, hashId);
|
|
|
|
options.expireTime = options.expireTime || moment().add(2, 'days');
|
|
|
|
|
|
|
|
FileDb.beginTransaction( (err, trans) => {
|
|
|
|
if(err) {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
this._addOrUpdateHashIdRecord(trans, hashId, options.expireTime, err => {
|
|
|
|
if(err) {
|
|
|
|
return trans.rollback( () => {
|
|
|
|
return cb(err);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
async.eachSeries(fileEntries, (entry, nextEntry) => {
|
|
|
|
trans.run(
|
|
|
|
`INSERT INTO file_web_serve_batch (hash_id, file_id)
|
2017-09-26 16:44:15 +00:00
|
|
|
VALUES (?, ?);`,
|
2018-06-22 05:15:04 +00:00
|
|
|
[ hashId, entry.fileId ],
|
|
|
|
err => {
|
|
|
|
return nextEntry(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}, err => {
|
|
|
|
trans[err ? 'rollback' : 'commit']( () => {
|
|
|
|
return cb(err, url);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
});
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
fileNotFound(resp) {
|
|
|
|
return this.webServer.instance.fileNotFound(resp);
|
|
|
|
}
|
|
|
|
|
|
|
|
routeWebRequest(req, resp) {
|
|
|
|
const hashId = paths.basename(req.url);
|
|
|
|
|
|
|
|
Log.debug( { hashId : hashId, url : req.url }, 'File area web request');
|
|
|
|
|
|
|
|
this.loadServedHashId(hashId, (err, servedItem) => {
|
|
|
|
|
|
|
|
if(err) {
|
|
|
|
return this.fileNotFound(resp);
|
|
|
|
}
|
|
|
|
|
|
|
|
const hashIdTypes = FileAreaWebAccess.getHashIdTypes();
|
|
|
|
switch(servedItem.hashIdType) {
|
|
|
|
case hashIdTypes.SingleFile :
|
|
|
|
return this.routeWebRequestForSingleFile(servedItem, req, resp);
|
|
|
|
|
|
|
|
case hashIdTypes.BatchArchive :
|
|
|
|
return this.routeWebRequestForBatchArchive(servedItem, req, resp);
|
|
|
|
|
|
|
|
default :
|
|
|
|
return this.fileNotFound(resp);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
routeWebRequestForSingleFile(servedItem, req, resp) {
|
|
|
|
Log.debug( { servedItem : servedItem }, 'Single file web request');
|
|
|
|
|
|
|
|
const fileEntry = new FileEntry();
|
|
|
|
|
|
|
|
servedItem.fileId = servedItem.fileIds[0];
|
|
|
|
|
|
|
|
fileEntry.load(servedItem.fileId, err => {
|
|
|
|
if(err) {
|
|
|
|
return this.fileNotFound(resp);
|
|
|
|
}
|
|
|
|
|
|
|
|
const filePath = fileEntry.filePath;
|
|
|
|
if(!filePath) {
|
|
|
|
return this.fileNotFound(resp);
|
|
|
|
}
|
|
|
|
|
|
|
|
fs.stat(filePath, (err, stats) => {
|
|
|
|
if(err) {
|
|
|
|
return this.fileNotFound(resp);
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.on('close', () => {
|
|
|
|
// connection closed *before* the response was fully sent
|
|
|
|
// :TODO: Log and such
|
|
|
|
});
|
|
|
|
|
|
|
|
resp.on('finish', () => {
|
|
|
|
// transfer completed fully
|
|
|
|
this.updateDownloadStatsForUserIdAndSystem(servedItem.userId, stats.size, [ fileEntry ]);
|
|
|
|
});
|
|
|
|
|
|
|
|
const headers = {
|
|
|
|
'Content-Type' : mimeTypes.contentType(filePath) || mimeTypes.contentType('.bin'),
|
|
|
|
'Content-Length' : stats.size,
|
|
|
|
'Content-Disposition' : `attachment; filename="${fileEntry.fileName}"`,
|
|
|
|
};
|
|
|
|
|
|
|
|
const readStream = fs.createReadStream(filePath);
|
|
|
|
resp.writeHead(200, headers);
|
|
|
|
return readStream.pipe(resp);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
routeWebRequestForBatchArchive(servedItem, req, resp) {
|
|
|
|
Log.debug( { servedItem : servedItem }, 'Batch file web request');
|
|
|
|
|
|
|
|
//
|
|
|
|
// We are going to build an on-the-fly zip file stream of 1:n
|
|
|
|
// files in the batch.
|
|
|
|
//
|
|
|
|
// First, collect all file IDs
|
|
|
|
//
|
|
|
|
const self = this;
|
|
|
|
|
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
function fetchFileIds(callback) {
|
|
|
|
FileDb.all(
|
|
|
|
`SELECT file_id
|
2017-09-26 16:44:15 +00:00
|
|
|
FROM file_web_serve_batch
|
|
|
|
WHERE hash_id = ?;`,
|
2018-06-22 05:15:04 +00:00
|
|
|
[ servedItem.hashId ],
|
|
|
|
(err, fileIdRows) => {
|
|
|
|
if(err || !Array.isArray(fileIdRows) || 0 === fileIdRows.length) {
|
|
|
|
return callback(Errors.DoesNotExist('Could not get file IDs for batch'));
|
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null, fileIdRows.map(r => r.file_id));
|
|
|
|
}
|
|
|
|
);
|
|
|
|
},
|
|
|
|
function loadFileEntries(fileIds, callback) {
|
|
|
|
async.map(fileIds, (fileId, nextFileId) => {
|
|
|
|
const fileEntry = new FileEntry();
|
|
|
|
fileEntry.load(fileId, err => {
|
|
|
|
return nextFileId(err, fileEntry);
|
|
|
|
});
|
|
|
|
}, (err, fileEntries) => {
|
|
|
|
if(err) {
|
|
|
|
return callback(Errors.DoesNotExist('Could not load file IDs for batch'));
|
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null, fileEntries);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function createAndServeStream(fileEntries, callback) {
|
|
|
|
const filePaths = fileEntries.map(fe => fe.filePath);
|
|
|
|
Log.trace( { filePaths : filePaths }, 'Creating zip archive for batch web request');
|
|
|
|
|
|
|
|
const zipFile = new yazl.ZipFile();
|
|
|
|
|
|
|
|
zipFile.on('error', err => {
|
|
|
|
Log.warn( { error : err.message }, 'Error adding file to batch web request archive');
|
|
|
|
});
|
|
|
|
|
|
|
|
filePaths.forEach(fp => {
|
|
|
|
zipFile.addFile(
|
|
|
|
fp, // path to physical file
|
|
|
|
paths.basename(fp), // filename/path *stored in archive*
|
|
|
|
{
|
|
|
|
compress : false, // :TODO: do this smartly - if ext is in set = false, else true via isArchive() or such... mimeDB has this for us.
|
|
|
|
}
|
|
|
|
);
|
|
|
|
});
|
|
|
|
|
|
|
|
zipFile.end( finalZipSize => {
|
|
|
|
if(-1 === finalZipSize) {
|
|
|
|
return callback(Errors.UnexpectedState('Unable to acquire final zip size'));
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.on('close', () => {
|
|
|
|
// connection closed *before* the response was fully sent
|
|
|
|
// :TODO: Log and such
|
|
|
|
});
|
|
|
|
|
|
|
|
resp.on('finish', () => {
|
|
|
|
// transfer completed fully
|
|
|
|
self.updateDownloadStatsForUserIdAndSystem(servedItem.userId, finalZipSize, fileEntries);
|
|
|
|
});
|
|
|
|
|
|
|
|
const batchFileName = `batch_${servedItem.hashId}.zip`;
|
|
|
|
|
|
|
|
const headers = {
|
|
|
|
'Content-Type' : mimeTypes.contentType(batchFileName) || mimeTypes.contentType('.bin'),
|
|
|
|
'Content-Length' : finalZipSize,
|
|
|
|
'Content-Disposition' : `attachment; filename="${batchFileName}"`,
|
|
|
|
};
|
|
|
|
|
|
|
|
resp.writeHead(200, headers);
|
|
|
|
return zipFile.outputStream.pipe(resp);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
if(err) {
|
|
|
|
// :TODO: Log me!
|
|
|
|
return this.fileNotFound(resp);
|
|
|
|
}
|
|
|
|
|
|
|
|
// ...otherwise, we would have called resp() already.
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
updateDownloadStatsForUserIdAndSystem(userId, dlBytes, fileEntries) {
|
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
function fetchActiveUser(callback) {
|
|
|
|
const clientForUserId = getConnectionByUserId(userId);
|
|
|
|
if(clientForUserId) {
|
|
|
|
return callback(null, clientForUserId.user);
|
|
|
|
}
|
|
|
|
|
|
|
|
// not online now - look 'em up
|
|
|
|
User.getUser(userId, (err, assocUser) => {
|
|
|
|
return callback(err, assocUser);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function updateStats(user, callback) {
|
|
|
|
StatLog.incrementUserStat(user, 'dl_total_count', 1);
|
|
|
|
StatLog.incrementUserStat(user, 'dl_total_bytes', dlBytes);
|
|
|
|
StatLog.incrementSystemStat('dl_total_count', 1);
|
|
|
|
StatLog.incrementSystemStat('dl_total_bytes', dlBytes);
|
|
|
|
|
|
|
|
return callback(null, user);
|
|
|
|
},
|
|
|
|
function sendEvent(user, callback) {
|
|
|
|
Events.emit(
|
|
|
|
Events.getSystemEvents().UserDownload,
|
|
|
|
{
|
|
|
|
user : user,
|
|
|
|
files : fileEntries,
|
|
|
|
}
|
|
|
|
);
|
|
|
|
return callback(null);
|
|
|
|
}
|
|
|
|
]
|
|
|
|
);
|
|
|
|
}
|
2016-10-25 03:49:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
module.exports = new FileAreaWebAccess();
|