2016-10-25 03:49:45 +00:00
|
|
|
/* jslint node: true */
|
|
|
|
'use strict';
|
|
|
|
|
|
|
|
// ENiGMA½
|
|
|
|
const Config = require('./config.js').config;
|
|
|
|
const FileDb = require('./database.js').dbs.file;
|
|
|
|
const getISOTimestampString = require('./database.js').getISOTimestampString;
|
|
|
|
const FileEntry = require('./file_entry.js');
|
|
|
|
const getServer = require('./listening_server.js').getServer;
|
|
|
|
const Errors = require('./enig_error.js').Errors;
|
2017-02-18 16:56:23 +00:00
|
|
|
const ErrNotEnabled = require('./enig_error.js').ErrorReasons.NotEnabled;
|
2017-02-18 20:21:18 +00:00
|
|
|
const StatLog = require('./stat_log.js');
|
|
|
|
const User = require('./user.js');
|
|
|
|
const Log = require('./logger.js').log;
|
|
|
|
const getConnectionByUserId = require('./client_connections.js').getConnectionByUserId;
|
2017-02-27 04:28:05 +00:00
|
|
|
const webServerPackageName = require('./servers/content/web.js').moduleInfo.packageName;
|
2016-10-25 03:49:45 +00:00
|
|
|
|
|
|
|
// deps
|
|
|
|
const hashids = require('hashids');
|
|
|
|
const moment = require('moment');
|
|
|
|
const paths = require('path');
|
|
|
|
const async = require('async');
|
2017-05-20 03:20:19 +00:00
|
|
|
const fs = require('graceful-fs');
|
2016-10-25 03:49:45 +00:00
|
|
|
const mimeTypes = require('mime-types');
|
2017-09-26 16:44:15 +00:00
|
|
|
const yazl = require('yazl');
|
2016-10-25 03:49:45 +00:00
|
|
|
|
2017-02-18 16:56:23 +00:00
|
|
|
function notEnabledError() {
|
|
|
|
return Errors.General('Web server is not enabled', ErrNotEnabled);
|
|
|
|
}
|
|
|
|
|
2016-10-25 03:49:45 +00:00
|
|
|
class FileAreaWebAccess {
|
|
|
|
constructor() {
|
2016-12-07 01:58:56 +00:00
|
|
|
this.hashids = new hashids(Config.general.boardName);
|
|
|
|
this.expireTimers = {}; // hashId->timer
|
2016-10-25 03:49:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
startup(cb) {
|
|
|
|
const self = this;
|
|
|
|
|
|
|
|
async.series(
|
|
|
|
[
|
|
|
|
function initFromDb(callback) {
|
2016-12-07 01:58:56 +00:00
|
|
|
return self.load(callback);
|
2016-10-25 03:49:45 +00:00
|
|
|
},
|
|
|
|
function addWebRoute(callback) {
|
2017-02-27 04:28:05 +00:00
|
|
|
self.webServer = getServer(webServerPackageName);
|
2017-02-04 16:20:36 +00:00
|
|
|
if(!self.webServer) {
|
2017-02-27 04:28:05 +00:00
|
|
|
return callback(Errors.DoesNotExist(`Server with package name "${webServerPackageName}" does not exist`));
|
2016-10-25 03:49:45 +00:00
|
|
|
}
|
2017-02-18 16:56:23 +00:00
|
|
|
|
|
|
|
if(self.isEnabled()) {
|
|
|
|
const routeAdded = self.webServer.instance.addRoute({
|
|
|
|
method : 'GET',
|
|
|
|
path : Config.fileBase.web.routePath,
|
2017-09-26 16:44:15 +00:00
|
|
|
handler : self.routeWebRequest.bind(self),
|
2017-02-18 16:56:23 +00:00
|
|
|
});
|
|
|
|
return callback(routeAdded ? null : Errors.General('Failed adding route'));
|
|
|
|
} else {
|
|
|
|
return callback(null); // not enabled, but no error
|
|
|
|
}
|
2016-10-25 03:49:45 +00:00
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
shutdown(cb) {
|
|
|
|
return cb(null);
|
|
|
|
}
|
|
|
|
|
2017-02-18 16:56:23 +00:00
|
|
|
isEnabled() {
|
|
|
|
return this.webServer.instance.isEnabled();
|
|
|
|
}
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
static getHashIdTypes() {
|
|
|
|
return {
|
|
|
|
SingleFile : 0,
|
|
|
|
BatchArchive : 1,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2016-10-25 03:49:45 +00:00
|
|
|
load(cb) {
|
2016-12-07 01:58:56 +00:00
|
|
|
//
|
|
|
|
// Load entries, register expiration timers
|
|
|
|
//
|
|
|
|
FileDb.each(
|
|
|
|
`SELECT hash_id, expire_timestamp
|
|
|
|
FROM file_web_serve;`,
|
|
|
|
(err, row) => {
|
|
|
|
if(row) {
|
|
|
|
this.scheduleExpire(row.hash_id, moment(row.expire_timestamp));
|
|
|
|
}
|
|
|
|
},
|
|
|
|
err => {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
removeEntry(hashId) {
|
|
|
|
//
|
|
|
|
// Delete record from DB, and our timer
|
|
|
|
//
|
|
|
|
FileDb.run(
|
|
|
|
`DELETE FROM file_web_serve
|
|
|
|
WHERE hash_id = ?;`,
|
|
|
|
[ hashId ]
|
|
|
|
);
|
|
|
|
|
2016-12-31 21:50:29 +00:00
|
|
|
delete this.expireTimers[hashId];
|
2016-12-07 01:58:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
scheduleExpire(hashId, expireTime) {
|
|
|
|
|
|
|
|
// remove any previous entry for this hashId
|
|
|
|
const previous = this.expireTimers[hashId];
|
|
|
|
if(previous) {
|
|
|
|
clearTimeout(previous);
|
|
|
|
delete this.expireTimers[hashId];
|
|
|
|
}
|
|
|
|
|
|
|
|
const timeoutMs = expireTime.diff(moment());
|
|
|
|
|
|
|
|
if(timeoutMs <= 0) {
|
|
|
|
setImmediate( () => {
|
|
|
|
this.removeEntry(hashId);
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
this.expireTimers[hashId] = setTimeout( () => {
|
|
|
|
this.removeEntry(hashId);
|
|
|
|
}, timeoutMs);
|
|
|
|
}
|
2016-10-25 03:49:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
loadServedHashId(hashId, cb) {
|
|
|
|
FileDb.get(
|
|
|
|
`SELECT expire_timestamp FROM
|
|
|
|
file_web_serve
|
|
|
|
WHERE hash_id = ?`,
|
|
|
|
[ hashId ],
|
|
|
|
(err, result) => {
|
2017-09-26 16:44:15 +00:00
|
|
|
if(err || !result) {
|
|
|
|
return cb(err ? err : Errors.DoesNotExist('Invalid or missing hash ID'));
|
2016-10-25 03:49:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const decoded = this.hashids.decode(hashId);
|
2017-09-26 16:44:15 +00:00
|
|
|
|
|
|
|
// decode() should provide an array of [ userId, hashIdType, id, ... ]
|
|
|
|
if(!Array.isArray(decoded) || decoded.length < 3) {
|
2016-10-25 03:49:45 +00:00
|
|
|
return cb(Errors.Invalid('Invalid or unknown hash ID'));
|
|
|
|
}
|
|
|
|
|
2018-01-12 04:12:07 +00:00
|
|
|
const servedItem = {
|
|
|
|
hashId : hashId,
|
|
|
|
userId : decoded[0],
|
|
|
|
hashIdType : decoded[1],
|
|
|
|
expireTimestamp : moment(result.expire_timestamp),
|
|
|
|
};
|
|
|
|
|
|
|
|
if(FileAreaWebAccess.getHashIdTypes().SingleFile === servedItem.hashIdType) {
|
|
|
|
servedItem.fileIds = decoded.slice(2);
|
|
|
|
}
|
|
|
|
|
|
|
|
return cb(null, servedItem);
|
2016-10-25 03:49:45 +00:00
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
getSingleFileHashId(client, fileEntry) {
|
|
|
|
return this.getHashId(client, FileAreaWebAccess.getHashIdTypes().SingleFile, [ fileEntry.fileId ] );
|
2016-10-25 03:49:45 +00:00
|
|
|
}
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
getBatchArchiveHashId(client, batchId) {
|
|
|
|
return this.getHashId(client, FileAreaWebAccess.getHashIdTypes().BatchArchive, batchId);
|
|
|
|
}
|
|
|
|
|
|
|
|
getHashId(client, hashIdType, identifier) {
|
|
|
|
return this.hashids.encode(client.user.userId, hashIdType, identifier);
|
|
|
|
}
|
|
|
|
|
|
|
|
buildSingleFileTempDownloadLink(client, fileEntry, hashId) {
|
|
|
|
hashId = hashId || this.getSingleFileHashId(client, fileEntry);
|
2017-02-27 04:28:05 +00:00
|
|
|
|
|
|
|
return this.webServer.instance.buildUrl(`${Config.fileBase.web.path}${hashId}`);
|
2017-09-26 16:44:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
buildBatchArchiveTempDownloadLink(client, hashId) {
|
|
|
|
return this.webServer.instance.buildUrl(`${Config.fileBase.web.path}${hashId}`);
|
2016-10-25 03:49:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
getExistingTempDownloadServeItem(client, fileEntry, cb) {
|
2017-02-18 16:56:23 +00:00
|
|
|
if(!this.isEnabled()) {
|
|
|
|
return cb(notEnabledError());
|
|
|
|
}
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
const hashId = this.getSingleFileHashId(client, fileEntry);
|
2016-10-25 03:49:45 +00:00
|
|
|
this.loadServedHashId(hashId, (err, servedItem) => {
|
|
|
|
if(err) {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
servedItem.url = this.buildSingleFileTempDownloadLink(client, fileEntry);
|
2016-10-25 03:49:45 +00:00
|
|
|
|
|
|
|
return cb(null, servedItem);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
_addOrUpdateHashIdRecord(dbOrTrans, hashId, expireTime, cb) {
|
2016-10-25 03:49:45 +00:00
|
|
|
// add/update rec with hash id and (latest) timestamp
|
2017-09-26 16:44:15 +00:00
|
|
|
dbOrTrans.run(
|
2016-10-25 03:49:45 +00:00
|
|
|
`REPLACE INTO file_web_serve (hash_id, expire_timestamp)
|
|
|
|
VALUES (?, ?);`,
|
2017-09-26 16:44:15 +00:00
|
|
|
[ hashId, getISOTimestampString(expireTime) ],
|
2016-10-25 03:49:45 +00:00
|
|
|
err => {
|
|
|
|
if(err) {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
this.scheduleExpire(hashId, expireTime);
|
2016-12-07 01:58:56 +00:00
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
return cb(null);
|
2016-10-25 03:49:45 +00:00
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
createAndServeTempDownload(client, fileEntry, options, cb) {
|
|
|
|
if(!this.isEnabled()) {
|
|
|
|
return cb(notEnabledError());
|
|
|
|
}
|
|
|
|
|
|
|
|
const hashId = this.getSingleFileHashId(client, fileEntry);
|
|
|
|
const url = this.buildSingleFileTempDownloadLink(client, fileEntry, hashId);
|
|
|
|
options.expireTime = options.expireTime || moment().add(2, 'days');
|
|
|
|
|
|
|
|
this._addOrUpdateHashIdRecord(FileDb, hashId, options.expireTime, err => {
|
|
|
|
return cb(err, url);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
createAndServeTempBatchDownload(client, fileEntries, options, cb) {
|
|
|
|
if(!this.isEnabled()) {
|
|
|
|
return cb(notEnabledError());
|
|
|
|
}
|
|
|
|
|
|
|
|
const batchId = moment().utc().unix();
|
|
|
|
const hashId = this.getBatchArchiveHashId(client, batchId);
|
|
|
|
const url = this.buildBatchArchiveTempDownloadLink(client, hashId);
|
|
|
|
options.expireTime = options.expireTime || moment().add(2, 'days');
|
|
|
|
|
|
|
|
FileDb.beginTransaction( (err, trans) => {
|
|
|
|
if(err) {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
this._addOrUpdateHashIdRecord(trans, hashId, options.expireTime, err => {
|
|
|
|
if(err) {
|
|
|
|
return trans.rollback( () => {
|
|
|
|
return cb(err);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
async.eachSeries(fileEntries, (entry, nextEntry) => {
|
|
|
|
trans.run(
|
|
|
|
`INSERT INTO file_web_serve_batch (hash_id, file_id)
|
|
|
|
VALUES (?, ?);`,
|
|
|
|
[ hashId, entry.fileId ],
|
|
|
|
err => {
|
|
|
|
return nextEntry(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}, err => {
|
|
|
|
trans[err ? 'rollback' : 'commit']( () => {
|
|
|
|
return cb(err, url);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
});
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2016-10-25 03:49:45 +00:00
|
|
|
fileNotFound(resp) {
|
2017-02-27 04:28:05 +00:00
|
|
|
return this.webServer.instance.fileNotFound(resp);
|
2016-10-25 03:49:45 +00:00
|
|
|
}
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
routeWebRequest(req, resp) {
|
2016-10-25 03:49:45 +00:00
|
|
|
const hashId = paths.basename(req.url);
|
|
|
|
|
2018-01-10 01:43:04 +00:00
|
|
|
Log.debug( { hashId : hashId, url : req.url }, 'File area web request');
|
|
|
|
|
2016-10-25 03:49:45 +00:00
|
|
|
this.loadServedHashId(hashId, (err, servedItem) => {
|
|
|
|
|
|
|
|
if(err) {
|
|
|
|
return this.fileNotFound(resp);
|
|
|
|
}
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
const hashIdTypes = FileAreaWebAccess.getHashIdTypes();
|
|
|
|
switch(servedItem.hashIdType) {
|
|
|
|
case hashIdTypes.SingleFile :
|
|
|
|
return this.routeWebRequestForSingleFile(servedItem, req, resp);
|
|
|
|
|
|
|
|
case hashIdTypes.BatchArchive :
|
|
|
|
return this.routeWebRequestForBatchArchive(servedItem, req, resp);
|
|
|
|
|
|
|
|
default :
|
2016-10-25 03:49:45 +00:00
|
|
|
return this.fileNotFound(resp);
|
2017-09-26 16:44:15 +00:00
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
2016-10-25 03:49:45 +00:00
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
routeWebRequestForSingleFile(servedItem, req, resp) {
|
2018-01-10 01:43:04 +00:00
|
|
|
Log.debug( { servedItem : servedItem }, 'Single file web request');
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
const fileEntry = new FileEntry();
|
|
|
|
|
|
|
|
servedItem.fileId = servedItem.fileIds[0];
|
|
|
|
|
|
|
|
fileEntry.load(servedItem.fileId, err => {
|
|
|
|
if(err) {
|
|
|
|
return this.fileNotFound(resp);
|
|
|
|
}
|
|
|
|
|
|
|
|
const filePath = fileEntry.filePath;
|
|
|
|
if(!filePath) {
|
|
|
|
return this.fileNotFound(resp);
|
|
|
|
}
|
|
|
|
|
|
|
|
fs.stat(filePath, (err, stats) => {
|
|
|
|
if(err) {
|
2016-10-25 03:49:45 +00:00
|
|
|
return this.fileNotFound(resp);
|
|
|
|
}
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
resp.on('close', () => {
|
|
|
|
// connection closed *before* the response was fully sent
|
|
|
|
// :TODO: Log and such
|
|
|
|
});
|
2016-10-25 03:49:45 +00:00
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
resp.on('finish', () => {
|
|
|
|
// transfer completed fully
|
|
|
|
this.updateDownloadStatsForUserIdAndSystem(servedItem.userId, stats.size);
|
|
|
|
});
|
2016-10-25 03:49:45 +00:00
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
const headers = {
|
|
|
|
'Content-Type' : mimeTypes.contentType(filePath) || mimeTypes.contentType('.bin'),
|
|
|
|
'Content-Length' : stats.size,
|
|
|
|
'Content-Disposition' : `attachment; filename="${fileEntry.fileName}"`,
|
|
|
|
};
|
|
|
|
|
|
|
|
const readStream = fs.createReadStream(filePath);
|
|
|
|
resp.writeHead(200, headers);
|
|
|
|
return readStream.pipe(resp);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
routeWebRequestForBatchArchive(servedItem, req, resp) {
|
2018-01-10 01:43:04 +00:00
|
|
|
Log.debug( { servedItem : servedItem }, 'Batch file web request');
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
//
|
|
|
|
// We are going to build an on-the-fly zip file stream of 1:n
|
|
|
|
// files in the batch.
|
|
|
|
//
|
|
|
|
// First, collect all file IDs
|
|
|
|
//
|
|
|
|
const self = this;
|
|
|
|
|
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
function fetchFileIds(callback) {
|
|
|
|
FileDb.all(
|
|
|
|
`SELECT file_id
|
|
|
|
FROM file_web_serve_batch
|
|
|
|
WHERE hash_id = ?;`,
|
|
|
|
[ servedItem.hashId ],
|
|
|
|
(err, fileIdRows) => {
|
|
|
|
if(err || !Array.isArray(fileIdRows) || 0 === fileIdRows.length) {
|
|
|
|
return callback(Errors.DoesNotExist('Could not get file IDs for batch'));
|
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null, fileIdRows.map(r => r.file_id));
|
|
|
|
}
|
|
|
|
);
|
|
|
|
},
|
|
|
|
function loadFileEntries(fileIds, callback) {
|
|
|
|
const filePaths = [];
|
|
|
|
async.eachSeries(fileIds, (fileId, nextFileId) => {
|
|
|
|
const fileEntry = new FileEntry();
|
|
|
|
fileEntry.load(fileId, err => {
|
|
|
|
if(!err) {
|
|
|
|
filePaths.push(fileEntry.filePath);
|
|
|
|
}
|
|
|
|
return nextFileId(err);
|
|
|
|
});
|
|
|
|
}, err => {
|
|
|
|
if(err) {
|
|
|
|
return callback(Errors.DoesNotExist('Coudl not load file IDs for batch'));
|
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null, filePaths);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function createAndServeStream(filePaths, callback) {
|
2018-01-10 01:43:04 +00:00
|
|
|
Log.trace( { filePaths : filePaths }, 'Creating zip archive for batch web request');
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
const zipFile = new yazl.ZipFile();
|
|
|
|
|
2018-01-10 01:43:04 +00:00
|
|
|
zipFile.on('error', err => {
|
|
|
|
Log.warn( { error : err.message }, 'Error adding file to batch web request archive');
|
|
|
|
});
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
filePaths.forEach(fp => {
|
|
|
|
zipFile.addFile(
|
|
|
|
fp, // path to physical file
|
|
|
|
paths.basename(fp), // filename/path *stored in archive*
|
|
|
|
{
|
|
|
|
compress : false, // :TODO: do this smartly - if ext is in set = false, else true via isArchive() or such... mimeDB has this for us.
|
|
|
|
}
|
|
|
|
);
|
2016-10-25 03:49:45 +00:00
|
|
|
});
|
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
zipFile.end( finalZipSize => {
|
|
|
|
if(-1 === finalZipSize) {
|
|
|
|
return callback(Errors.UnexpectedState('Unable to acquire final zip size'));
|
|
|
|
}
|
2016-10-25 03:49:45 +00:00
|
|
|
|
2017-09-26 16:44:15 +00:00
|
|
|
resp.on('close', () => {
|
|
|
|
// connection closed *before* the response was fully sent
|
|
|
|
// :TODO: Log and such
|
|
|
|
});
|
|
|
|
|
|
|
|
resp.on('finish', () => {
|
|
|
|
// transfer completed fully
|
|
|
|
self.updateDownloadStatsForUserIdAndSystem(servedItem.userId, finalZipSize);
|
|
|
|
});
|
|
|
|
|
|
|
|
const batchFileName = `batch_${servedItem.hashId}.zip`;
|
|
|
|
|
|
|
|
const headers = {
|
|
|
|
'Content-Type' : mimeTypes.contentType(batchFileName) || mimeTypes.contentType('.bin'),
|
|
|
|
'Content-Length' : finalZipSize,
|
|
|
|
'Content-Disposition' : `attachment; filename="${batchFileName}"`,
|
|
|
|
};
|
|
|
|
|
|
|
|
resp.writeHead(200, headers);
|
|
|
|
return zipFile.outputStream.pipe(resp);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
if(err) {
|
|
|
|
// :TODO: Log me!
|
|
|
|
return this.fileNotFound(resp);
|
|
|
|
}
|
|
|
|
|
|
|
|
// ...otherwise, we would have called resp() already.
|
|
|
|
}
|
|
|
|
);
|
2016-10-25 03:49:45 +00:00
|
|
|
}
|
2017-02-18 20:21:18 +00:00
|
|
|
|
2017-07-10 02:00:36 +00:00
|
|
|
updateDownloadStatsForUserIdAndSystem(userId, dlBytes, cb) {
|
2017-02-18 20:21:18 +00:00
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
function fetchActiveUser(callback) {
|
|
|
|
const clientForUserId = getConnectionByUserId(userId);
|
|
|
|
if(clientForUserId) {
|
|
|
|
return callback(null, clientForUserId.user);
|
|
|
|
}
|
|
|
|
|
|
|
|
// not online now - look 'em up
|
|
|
|
User.getUser(userId, (err, assocUser) => {
|
|
|
|
return callback(err, assocUser);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function updateStats(user, callback) {
|
|
|
|
StatLog.incrementUserStat(user, 'dl_total_count', 1);
|
|
|
|
StatLog.incrementUserStat(user, 'dl_total_bytes', dlBytes);
|
|
|
|
StatLog.incrementSystemStat('dl_total_count', 1);
|
|
|
|
StatLog.incrementSystemStat('dl_total_bytes', dlBytes);
|
|
|
|
|
|
|
|
return callback(null);
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
if(cb) {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
2016-10-25 03:49:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
module.exports = new FileAreaWebAccess();
|