Merge branch 'master' of ssh://numinibsd/git/base/enigma-bbs

This commit is contained in:
Bryan Ashby 2017-05-31 21:43:46 -06:00
commit dfadc147df
11 changed files with 496 additions and 204 deletions

View File

@ -225,13 +225,12 @@ function getDefaultConfig() {
firstMenuNewUser : 'sshConnectedNewUser', firstMenuNewUser : 'sshConnectedNewUser',
}, },
webSocket : { webSocket : {
port : 8810, port : 8810, // ws://
enabled : true, // :TODO: default to false
},
secureWebSocket : {
port : 8811,
enabled : false, enabled : false,
} securePort : 8811, // wss:// - must provide certPem and keyPem
certPem : paths.join(__dirname, './../misc/https_cert.pem'),
keyPem : paths.join(__dirname, './../misc/https_cert_key.pem'),
},
}, },
contentServers : { contentServers : {
@ -280,7 +279,10 @@ function getDefaultConfig() {
cmd : 'exiftool', cmd : 'exiftool',
args : [ args : [
'-charset', 'utf8', '{filePath}', '-charset', 'utf8', '{filePath}',
'--directory', '--filepermissions', '--exiftoolversion', '--filename', '--filesize', '--filemodifydate', '--fileaccessdate', '--fileinodechangedate' // exclude the following:
'--directory', '--filepermissions', '--exiftoolversion', '--filename', '--filesize',
'--filemodifydate', '--fileaccessdate', '--fileinodechangedate', '--createdate', '--modifydate',
'--metadatadate', '--xmptoolkit'
] ]
} }
}, },

View File

@ -22,7 +22,7 @@ const crypto = require('crypto');
const paths = require('path'); const paths = require('path');
const temptmp = require('temptmp').createTrackedSession('file_area'); const temptmp = require('temptmp').createTrackedSession('file_area');
const iconv = require('iconv-lite'); const iconv = require('iconv-lite');
const exec = require('child_process').exec; const execFile = require('child_process').execFile;
const moment = require('moment'); const moment = require('moment');
exports.isInternalArea = isInternalArea; exports.isInternalArea = isInternalArea;
@ -234,6 +234,7 @@ function attemptSetEstimatedReleaseDate(fileEntry) {
// //
const maxYear = moment().add(2, 'year').year(); const maxYear = moment().add(2, 'year').year();
const match = getMatch(fileEntry.desc) || getMatch(fileEntry.descLong); const match = getMatch(fileEntry.desc) || getMatch(fileEntry.descLong);
if(match && match[1]) { if(match && match[1]) {
let year; let year;
if(2 === match[1].length) { if(2 === match[1].length) {
@ -262,48 +263,16 @@ function logDebug(obj, msg) {
} }
} }
function populateFileEntryWithArchive(fileEntry, filePath, stepInfo, iterator, cb) { function extractAndProcessDescFiles(fileEntry, filePath, archiveEntries, cb) {
const archiveUtil = ArchiveUtil.getInstance();
const archiveType = fileEntry.meta.archive_type; // we set this previous to populateFileEntryWithArchive()
async.waterfall( async.waterfall(
[ [
function getArchiveFileList(callback) { function extractDescFiles(callback) {
stepInfo.step = 'archive_list_start';
iterator(err => {
if(err) {
return callback(err);
}
archiveUtil.listEntries(filePath, archiveType, (err, entries) => {
if(err) {
stepInfo.step = 'archive_list_failed';
} else {
stepInfo.step = 'archive_list_finish';
stepInfo.archiveEntries = entries || [];
}
iterator(iterErr => {
return callback( iterErr, entries || [] ); // ignore original |err| here
});
});
});
},
function processDescFilesStart(entries, callback) {
stepInfo.step = 'desc_files_start';
iterator(err => {
return callback(err, entries);
});
},
function extractDescFiles(entries, callback) {
// :TODO: would be nice if these RegExp's were cached // :TODO: would be nice if these RegExp's were cached
// :TODO: this is long winded... // :TODO: this is long winded...
const extractList = []; const extractList = [];
const shortDescFile = entries.find( e => { const shortDescFile = archiveEntries.find( e => {
return Config.fileBase.fileNamePatterns.desc.find( pat => new RegExp(pat, 'i').test(e.fileName) ); return Config.fileBase.fileNamePatterns.desc.find( pat => new RegExp(pat, 'i').test(e.fileName) );
}); });
@ -311,7 +280,7 @@ function populateFileEntryWithArchive(fileEntry, filePath, stepInfo, iterator, c
extractList.push(shortDescFile.fileName); extractList.push(shortDescFile.fileName);
} }
const longDescFile = entries.find( e => { const longDescFile = archiveEntries.find( e => {
return Config.fileBase.fileNamePatterns.descLong.find( pat => new RegExp(pat, 'i').test(e.fileName) ); return Config.fileBase.fileNamePatterns.descLong.find( pat => new RegExp(pat, 'i').test(e.fileName) );
}); });
@ -328,7 +297,8 @@ function populateFileEntryWithArchive(fileEntry, filePath, stepInfo, iterator, c
return callback(err); return callback(err);
} }
archiveUtil.extractTo(filePath, tempDir, archiveType, extractList, err => { const archiveUtil = ArchiveUtil.getInstance();
archiveUtil.extractTo(filePath, tempDir, fileEntry.meta.archive_type, extractList, err => {
if(err) { if(err) {
return callback(err); return callback(err);
} }
@ -384,6 +354,101 @@ function populateFileEntryWithArchive(fileEntry, filePath, stepInfo, iterator, c
return callback(null); return callback(null);
}); });
}, },
],
err => {
return cb(err);
}
);
}
function extractAndProcessSingleArchiveEntry(fileEntry, filePath, archiveEntries, cb) {
async.waterfall(
[
function extractToTemp(callback) {
// :TODO: we may want to skip this if the compressed file is too large...
temptmp.mkdir( { prefix : 'enigextract-' }, (err, tempDir) => {
if(err) {
return callback(err);
}
const archiveUtil = ArchiveUtil.getInstance();
// ensure we only extract one - there should only be one anyway -- we also just need the fileName
const extractList = archiveEntries.slice(0, 1).map(entry => entry.fileName);
archiveUtil.extractTo(filePath, tempDir, fileEntry.meta.archive_type, extractList, err => {
if(err) {
return callback(err);
}
return callback(null, paths.join(tempDir, extractList[0]));
});
});
},
function processSingleExtractedFile(extractedFile, callback) {
populateFileEntryInfoFromFile(fileEntry, extractedFile, err => {
if(!fileEntry.desc) {
fileEntry.desc = getDescFromFileName(filePath);
}
return callback(err);
});
}
],
err => {
return cb(err);
}
);
}
function populateFileEntryWithArchive(fileEntry, filePath, stepInfo, iterator, cb) {
const archiveUtil = ArchiveUtil.getInstance();
const archiveType = fileEntry.meta.archive_type; // we set this previous to populateFileEntryWithArchive()
async.waterfall(
[
function getArchiveFileList(callback) {
stepInfo.step = 'archive_list_start';
iterator(err => {
if(err) {
return callback(err);
}
archiveUtil.listEntries(filePath, archiveType, (err, entries) => {
if(err) {
stepInfo.step = 'archive_list_failed';
} else {
stepInfo.step = 'archive_list_finish';
stepInfo.archiveEntries = entries || [];
}
iterator(iterErr => {
return callback( iterErr, entries || [] ); // ignore original |err| here
});
});
});
},
function processDescFilesStart(entries, callback) {
stepInfo.step = 'desc_files_start';
iterator(err => {
return callback(err, entries);
});
},
function extractDescFromArchive(entries, callback) {
//
// If we have a -single- entry in the archive, extract that file
// and try retrieving info in the non-archive manor. This should
// work for things like zipped up .pdf files.
//
// Otherwise, try to find particular desc files such as FILE_ID.DIZ
// and README.1ST
//
const archDescHandler = (1 === entries.length) ? extractAndProcessSingleArchiveEntry : extractAndProcessDescFiles;
archDescHandler(fileEntry, filePath, entries, err => {
return callback(err);
});
},
function attemptReleaseYearEstimation(callback) { function attemptReleaseYearEstimation(callback) {
attemptSetEstimatedReleaseDate(fileEntry); attemptSetEstimatedReleaseDate(fileEntry);
return callback(null); return callback(null);
@ -413,18 +478,10 @@ function getInfoExtractUtilForDesc(mimeType, descType) {
return util; return util;
} }
function populateFileEntryNonArchive(fileEntry, filePath, stepInfo, iterator, cb) { function populateFileEntryInfoFromFile(fileEntry, filePath, cb) {
async.series(
[
function processDescFilesStart(callback) {
stepInfo.step = 'desc_files_start';
return iterator(callback);
},
function getDescriptions(callback) {
const mimeType = resolveMimeType(filePath); const mimeType = resolveMimeType(filePath);
if(!mimeType) { if(!mimeType) {
return callback(null); return cb(null);
} }
async.eachSeries( [ 'short', 'long' ], (descType, nextDesc) => { async.eachSeries( [ 'short', 'long' ], (descType, nextDesc) => {
@ -435,10 +492,11 @@ function populateFileEntryNonArchive(fileEntry, filePath, stepInfo, iterator, cb
const args = (util.args || [ '{filePath}'] ).map( arg => stringFormat(arg, { filePath : filePath } ) ); const args = (util.args || [ '{filePath}'] ).map( arg => stringFormat(arg, { filePath : filePath } ) );
exec(`${util.cmd} ${args.join(' ')}`, (err, stdout) => { execFile(util.cmd, args, { timeout : 1000 * 30 }, (err, stdout) => {
if(err) { if(err || !stdout) {
const reason = err ? err.message : 'No description produced';
logDebug( logDebug(
{ error : err.message, cmd : util.cmd, args : args }, { reason : reason, cmd : util.cmd, args : args },
`${_.upperFirst(descType)} description command failed` `${_.upperFirst(descType)} description command failed`
); );
} else { } else {
@ -463,7 +521,24 @@ function populateFileEntryNonArchive(fileEntry, filePath, stepInfo, iterator, cb
return nextDesc(null); return nextDesc(null);
}); });
}, () => { }, () => {
return callback(null); return cb(null);
});
}
function populateFileEntryNonArchive(fileEntry, filePath, stepInfo, iterator, cb) {
async.series(
[
function processDescFilesStart(callback) {
stepInfo.step = 'desc_files_start';
return iterator(callback);
},
function getDescriptions(callback) {
populateFileEntryInfoFromFile(fileEntry, filePath, err => {
if(!fileEntry.desc) {
fileEntry.desc = getDescFromFileName(filePath);
}
return callback(err);
}); });
}, },
function processDescFilesFinish(callback) { function processDescFilesFinish(callback) {

View File

@ -353,6 +353,41 @@ module.exports = class FileEntry {
); );
} }
static findByFileNameWildcard(wc, cb) {
// convert any * -> % and ? -> _ for SQLite syntax - see https://www.sqlite.org/lang_expr.html
wc = wc.replace(/\*/g, '%').replace(/\?/g, '_');
fileDb.all(
`SELECT file_id
FROM file
WHERE file_name LIKE "${wc}"
`,
(err, fileIdRows) => {
if(err) {
return cb(err);
}
if(!fileIdRows || 0 === fileIdRows.length) {
return cb(Errors.DoesNotExist('No matches'));
}
const entries = [];
async.each(fileIdRows, (row, nextRow) => {
const fileEntry = new FileEntry();
fileEntry.load(row.file_id, err => {
if(!err) {
entries.push(fileEntry);
}
return nextRow(err);
});
},
err => {
return cb(err, entries);
});
}
);
}
static findFiles(filter, cb) { static findFiles(filter, cb) {
filter = filter || {}; filter = filter || {};

View File

@ -43,7 +43,7 @@ module.exports = class LoginServerModule extends ServerModule {
} }
client.session.serverName = modInfo.name; client.session.serverName = modInfo.name;
client.session.isSecure = modInfo.isSecure || false; client.session.isSecure = _.isBoolean(client.isSecure) ? client.isSecure : (modInfo.isSecure || false);
clientConns.addNewClient(client, clientSock); clientConns.addNewClient(client, clientSock);

View File

@ -171,35 +171,42 @@ function dumpAreaInfo(areaInfo, areaAndStorageInfo, cb) {
return cb(null); return cb(null);
} }
function getSpecificFileEntry(pattern, cb) { function getFileEntries(pattern, cb) {
// spec: FILE_ID|SHA|PARTIAL_SHA // spec: FILENAME_WC|FILE_ID|SHA|PARTIAL_SHA
const FileEntry = require('../../core/file_entry.js'); const FileEntry = require('../../core/file_entry.js');
async.waterfall( async.waterfall(
[ [
function getByFileId(callback) { function tryByFileId(callback) {
const fileId = parseInt(pattern); const fileId = parseInt(pattern);
if(!/^[0-9]+$/.test(pattern) || isNaN(fileId)) { if(!/^[0-9]+$/.test(pattern) || isNaN(fileId)) {
return callback(null, null); return callback(null, null); // try SHA
} }
const fileEntry = new FileEntry(); const fileEntry = new FileEntry();
fileEntry.load(fileId, () => { fileEntry.load(fileId, err => {
return callback(null, fileEntry); // try sha return callback(null, err ? null : [ fileEntry ] );
}); });
}, },
function getBySha(fileEntry, callback) { function tryByShaOrPartialSha(entries, callback) {
if(fileEntry) { if(entries) {
return callback(null, fileEntry); // already got it by sha return callback(null, entries); // already got it by FILE_ID
} }
FileEntry.findFileBySha(pattern, (err, fileEntry) => { FileEntry.findFileBySha(pattern, (err, fileEntry) => {
return callback(err, fileEntry); return callback(null, fileEntry ? [ fileEntry ] : null );
}); });
}, },
function tryByFileNameWildcard(entries, callback) {
if(entries) {
return callback(null, entries); // already got by FILE_ID|SHA
}
return FileEntry.findByFileNameWildcard(pattern, callback);
}
], ],
(err, fileEntry) => { (err, entries) => {
return cb(err, fileEntry); return cb(err, entries);
} }
); );
} }
@ -208,8 +215,12 @@ function dumpFileInfo(shaOrFileId, cb) {
async.waterfall( async.waterfall(
[ [
function getEntry(callback) { function getEntry(callback) {
getSpecificFileEntry(shaOrFileId, (err, fileEntry) => { getFileEntries(shaOrFileId, (err, entries) => {
return callback(err, fileEntry); if(err) {
return callback(err);
}
return callback(null, entries[0]);
}); });
}, },
function dumpInfo(fileEntry, callback) { function dumpInfo(fileEntry, callback) {
@ -332,7 +343,7 @@ function moveFiles() {
// //
// oputil fb move SRC [SRC2 ...] DST // oputil fb move SRC [SRC2 ...] DST
// //
// SRC: PATH|FILE_ID|SHA|AREA_TAG[@STORAGE_TAG] // SRC: FILENAME_WC|FILE_ID|SHA|AREA_TAG[@STORAGE_TAG]
// DST: AREA_TAG[@STORAGE_TAG] // DST: AREA_TAG[@STORAGE_TAG]
// //
if(argv._.length < 4) { if(argv._.length < 4) {
@ -368,11 +379,10 @@ function moveFiles() {
FileEntry = require('../../core/file_entry.js'); FileEntry = require('../../core/file_entry.js');
async.eachSeries(src, (areaAndStorage, next) => { async.eachSeries(src, (areaAndStorage, next) => {
//
// If this entry represents a area tag, it means *all files* in that area
//
const areaInfo = fileArea.getFileAreaByTag(areaAndStorage.areaTag); const areaInfo = fileArea.getFileAreaByTag(areaAndStorage.areaTag);
if(areaInfo) { if(areaInfo) {
// AREA_TAG[@STORAGE_TAG] - all files in area@tag
src.areaInfo = areaInfo; src.areaInfo = areaInfo;
const findFilter = { const findFilter = {
@ -403,12 +413,14 @@ function moveFiles() {
}); });
} else { } else {
// PATH|FILE_ID|SHA|PARTIAL_SHA // FILENAME_WC|FILE_ID|SHA|PARTIAL_SHA
getSpecificFileEntry(areaAndStorage.pattern, (err, fileEntry) => { // :TODO: FULL_PATH -> entries
getFileEntries(areaAndStorage.pattern, (err, entries) => {
if(err) { if(err) {
return next(err); return next(err);
} }
srcEntries.push(fileEntry);
srcEntries = srcEntries.concat(entries);
return next(null); return next(null);
}); });
} }
@ -448,18 +460,30 @@ function moveFiles() {
); );
} }
function removeFiles() {
//
// REMOVE SHA|FILE_ID [SHA|FILE_ID ...]
}
function handleFileBaseCommand() { function handleFileBaseCommand() {
function errUsage() {
return printUsageAndSetExitCode(
getHelpFor('FileBase') + getHelpFor('FileOpsInfo'),
ExitCodes.ERROR
);
}
if(true === argv.help) { if(true === argv.help) {
return printUsageAndSetExitCode(getHelpFor('FileBase'), ExitCodes.ERROR); return errUsage();
} }
const action = argv._[1]; const action = argv._[1];
switch(action) { return ({
case 'info' : return displayFileAreaInfo(); info : displayFileAreaInfo,
case 'scan' : return scanFileAreas(); scan : scanFileAreas,
case 'move' : return moveFiles(); move : moveFiles,
remove : removeFiles,
default : return printUsageAndSetExitCode(getHelpFor('FileBase'), ExitCodes.ERROR); }[action] || errUsage)();
}
} }

View File

@ -12,60 +12,71 @@ const usageHelp = exports.USAGE_HELP = {
<command> [<args>] <command> [<args>]
global args: global args:
-c, --config PATH : specify config path (${getDefaultConfigPath()}) -c, --config PATH specify config path (${getDefaultConfigPath()})
-n, --no-prompt : assume defaults/don't prompt for input where possible -n, --no-prompt assume defaults/don't prompt for input where possible
where <command> is one of:
user : user utilities
config : config file management
fb : file base management
commands:
user user utilities
config config file management
fb file base management
`, `,
User : User :
`usage: optutil.js user --user USERNAME <args> `usage: optutil.js user --user USERNAME <args>
valid args: valid args:
--user USERNAME : specify username for further actions --user USERNAME specify username for further actions
--password PASS : set new password --password PASS set new password
--delete : delete user --delete delete user
--activate : activate user --activate activate user
--deactivate : deactivate user --deactivate deactivate user
`, `,
Config : Config :
`usage: optutil.js config <action> [<args>] `usage: optutil.js config <action> [<args>]
where <action> is one of: actions:
new : generate a new/initial configuration new generate a new/initial configuration
import-areas PATH : import areas using fidonet *.NA or AREAS.BBS file from PATH import-areas PATH import areas using fidonet *.NA or AREAS.BBS file from PATH
valid import-areas <args>: import-areas args:
--conf CONF_TAG : specify conference tag in which to import areas --conf CONF_TAG specify conference tag in which to import areas
--network NETWORK : specify network name/key to associate FTN areas --network NETWORK specify network name/key to associate FTN areas
--uplinks UL1,UL2,... : specify one or more comma separated uplinks --uplinks UL1,UL2,... specify one or more comma separated uplinks
--type TYPE : specifies area import type. valid options are "bbs" and "na" --type TYPE specifies area import type. valid options are "bbs" and "na"
`, `,
FileBase : FileBase :
`usage: oputil.js fb <action> [<args>] <AREA_TAG|SHA|FILE_ID[@STORAGE_TAG] ...> [<args>] `usage: oputil.js fb <action> [<args>] <AREA_TAG|SHA|FILE_ID[@STORAGE_TAG] ...> [<args>]
where <action> is one of: actions:
scan AREA_TAG : scan specified areas scan AREA_TAG[@STORAGE_TAG] scan specified area
AREA_TAG may be suffixed with @STORAGE_TAG; for example: retro@bbs
info AREA_TAG|SHA|FILE_ID : display information about areas and/or files info AREA_TAG|SHA|FILE_ID display information about areas and/or files
SHA may be a full or partial SHA-256 SHA may be a full or partial SHA-256
move SRC DST : move entry(s) from SRC to DST where: move SRC [SRC...]] DST move entry(s) from SRC to DST
SRC may be FILE_ID|SHA|AREA_TAG * SRC: FILENAME_WC|SHA|FILE_ID|AREA_TAG[@STORAGE_TAG]
DST may be AREA_TAG, optionally suffixed with @STORAGE_TAG; for example: retro@bbs * DST: AREA_TAG[@STORAGE_TAG]
SHA may be a full or partial SHA-256
multiple instances of SRC may exist: SRC1 SRC2 ...
valid scan <args>: remove SHA|FILE_ID removes a entry from the system
--tags TAG1,TAG2,... : specify tag(s) to assign to discovered entries
valid info <args>: scan args:
--show-desc : display short description, if any --tags TAG1,TAG2,... specify tag(s) to assign to discovered entries
info args:
--show-desc display short description, if any
remove args:
--delete also remove underlying physical file
`,
FileOpsInfo :
`
general information:
AREA_TAG[@STORAGE_TAG] can specify an area tag and optionally, a storage specific tag
example: retro@bbs
FILENAME_WC filename with * and ? wildcard support. may match 0:n entries
SHA full or partial SHA-256
FILE_ID a file identifier. see file.sqlite3
` `
}; };

View File

@ -95,7 +95,7 @@ const PREDEFINED_MCI_GENERATORS = {
const byteSize = StatLog.getUserStatNum(client.user, 'dl_total_bytes'); const byteSize = StatLog.getUserStatNum(client.user, 'dl_total_bytes');
return formatByteSize(byteSize, true); // true=withAbbr return formatByteSize(byteSize, true); // true=withAbbr
}, },
UP : function userNumUploadsclient(client) { return userStatAsString(client, 'ul_total_count', 0); }, // Obv/2 UP : function userNumUploads(client) { return userStatAsString(client, 'ul_total_count', 0); }, // Obv/2
UK : function userByteUpload(client) { // Obv/2 uses UK=uploaded Kbytes UK : function userByteUpload(client) { // Obv/2 uses UK=uploaded Kbytes
const byteSize = StatLog.getUserStatNum(client.user, 'ul_total_bytes'); const byteSize = StatLog.getUserStatNum(client.user, 'ul_total_bytes');
return formatByteSize(byteSize, true); // true=withAbbr return formatByteSize(byteSize, true); // true=withAbbr

View File

@ -24,6 +24,8 @@ const ModuleInfo = exports.moduleInfo = {
packageName : 'codes.l33t.enigma.telnet.server', packageName : 'codes.l33t.enigma.telnet.server',
}; };
exports.TelnetClient = TelnetClient;
// //
// Telnet Protocol Resources // Telnet Protocol Resources
// * http://pcmicro.com/netfoss/telnet.html // * http://pcmicro.com/netfoss/telnet.html
@ -498,54 +500,6 @@ function TelnetClient(input, output) {
this.input.on('data', this.dataHandler); this.input.on('data', this.dataHandler);
/*
this.input.on('data', b => {
bufs.push(b);
let i;
while((i = bufs.indexOf(IAC_BUF)) >= 0) {
//
// Some clients will send even IAC separate from data
//
if(bufs.length <= (i + 1)) {
i = MORE_DATA_REQUIRED;
break;
}
assert(bufs.length > (i + 1));
if(i > 0) {
self.emit('data', bufs.splice(0, i).toBuffer());
}
i = parseBufs(bufs);
if(MORE_DATA_REQUIRED === i) {
break;
} else {
if(i.option) {
self.emit(i.option, i); // "transmit binary", "echo", ...
}
self.handleTelnetEvent(i);
if(i.data) {
self.emit('data', i.data);
}
}
}
if(MORE_DATA_REQUIRED !== i && bufs.length > 0) {
//
// Standard data payload. This can still be "non-user" data
// such as ANSI control, but we don't handle that here.
//
self.emit('data', bufs.splice(0).toBuffer());
}
});
*/
this.input.on('end', () => { this.input.on('end', () => {
self.emit('end'); self.emit('end');
}); });

View File

@ -0,0 +1,175 @@
/* jslint node: true */
'use strict';
// ENiGMA½
const Config = require('../../config.js').config;
const TelnetClient = require('./telnet.js').TelnetClient;
const Log = require('../../logger.js').log;
const LoginServerModule = require('../../login_server_module.js');
// deps
const _ = require('lodash');
const WebSocketServer = require('ws').Server;
const http = require('http');
const https = require('https');
const fs = require('graceful-fs');
const EventEmitter = require('events');
const ModuleInfo = exports.moduleInfo = {
name : 'WebSocket',
desc : 'WebSocket Server',
author : 'NuSkooler',
packageName : 'codes.l33t.enigma.websocket.server',
};
function WebSocketClient(ws, req, serverType) {
Object.defineProperty(this, 'isSecure', {
get : () => 'secure' === serverType ? true : false,
});
//
// This bridge makes accessible various calls that client sub classes
// want to access on I/O socket
//
this.socketBridge = new class SocketBridge extends EventEmitter {
constructor(ws) {
super();
this.ws = ws;
}
end() {
return ws.terminate();
}
write(data, cb) {
return this.ws.send(data, { binary : true }, cb);
}
get remoteAddress() {
return req.connection.remoteAddress;
}
}(ws);
ws.on('message', data => {
this.socketBridge.emit('data', data);
});
ws.on('close', () => {
this.end();
});
//
// Montior connection status with ping/pong
//
ws.on('pong', () => {
Log.trace(`Pong from ${this.socketBridge.remoteAddress}`);
ws.isConnectionAlive = true;
});
TelnetClient.call(this, this.socketBridge, this.socketBridge);
// start handshake process
this.banner();
}
require('util').inherits(WebSocketClient, TelnetClient);
const WSS_SERVER_TYPES = [ 'insecure', 'secure' ];
exports.getModule = class WebSocketLoginServer extends LoginServerModule {
constructor() {
super();
}
createServer() {
//
// We will actually create up to two servers:
// * insecure websocket (ws://)
// * secure (tls) websocket (wss://)
//
const config = _.get(Config, 'loginServers.webSocket') || { enabled : false };
if(!config || true !== config.enabled || !(config.port || config.securePort)) {
return;
}
if(config.port) {
const httpServer = http.createServer( (req, resp) => {
// dummy handler
resp.writeHead(200);
return resp.end('ENiGMA½ BBS WebSocket Server!');
});
this.insecure = {
httpServer : httpServer,
wsServer : new WebSocketServer( { server : httpServer } ),
};
}
if(config.securePort) {
const httpServer = https.createServer({
key : fs.readFileSync(Config.loginServers.webSocket.keyPem),
cert : fs.readFileSync(Config.loginServers.webSocket.certPem),
});
this.secure = {
httpServer : httpServer,
wsServer : new WebSocketServer( { server : httpServer } ),
};
}
}
listen() {
WSS_SERVER_TYPES.forEach(serverType => {
const server = this[serverType];
if(!server) {
return;
}
const serverName = `${ModuleInfo.name} (${serverType})`;
const port = parseInt(_.get(Config, [ 'loginServers', 'webSocket', 'secure' === serverType ? 'securePort' : 'port' ] ));
if(isNaN(port)) {
Log.error( { server : serverName, port : port }, 'Cannot load server (invalid port)' );
return;
}
server.httpServer.listen(port);
server.wsServer.on('connection', (ws, req) => {
const webSocketClient = new WebSocketClient(ws, req, serverType);
this.handleNewClient(webSocketClient, webSocketClient.socketBridge, ModuleInfo);
});
Log.info( { server : serverName, port : port }, 'Listening for connections' );
});
//
// Send pings every 30s
//
setInterval( () => {
WSS_SERVER_TYPES.forEach(serverType => {
if(this[serverType]) {
this[serverType].wsServer.clients.forEach(ws => {
if(false === ws.isConnectionAlive) {
Log.debug('WebSocket connection seems inactive. Terminating.');
return ws.terminate();
}
ws.isConnectionAlive = false; // pong will reset this
Log.trace('Ping to remote WebSocket client');
return ws.ping('', false, true);
});
}
});
}, 30000);
return true;
}
webSocketConnection(conn) {
const webSocketClient = new WebSocketClient(conn);
this.handleNewClient(webSocketClient, webSocketClient.socketShim, ModuleInfo);
}
};

View File

@ -43,8 +43,9 @@
"temptmp": "^1.0.0", "temptmp": "^1.0.0",
"uuid": "^3.0.1", "uuid": "^3.0.1",
"uuid-parse": "^1.0.0", "uuid-parse": "^1.0.0",
"ws" : "^2.3.1", "ws" : "^3.0.0",
"graceful-fs" : "^4.1.11" "graceful-fs" : "^4.1.11",
"exiftool" : "^0.0.3"
}, },
"devDependencies": {}, "devDependencies": {},
"engines": { "engines": {

View File

@ -19,6 +19,11 @@ const FILETYPE_HANDLERS = {};
[ 'PNG', 'JPEG', 'GIF', 'WEBP', 'XCF' ].forEach(ext => FILETYPE_HANDLERS[ext] = imageFile); [ 'PNG', 'JPEG', 'GIF', 'WEBP', 'XCF' ].forEach(ext => FILETYPE_HANDLERS[ext] = imageFile);
function audioFile(metadata) { function audioFile(metadata) {
// nothing if we don't know at least the author or title
if(!metadata.author && !metadata.title) {
return;
}
let desc = `${metadata.artist||'Unknown Artist'} - ${metadata.title||'Unknown'} (`; let desc = `${metadata.artist||'Unknown Artist'} - ${metadata.title||'Unknown'} (`;
if(metadata.year) { if(metadata.year) {
desc += `${metadata.year}, `; desc += `${metadata.year}, `;
@ -28,6 +33,11 @@ function audioFile(metadata) {
} }
function documentFile(metadata) { function documentFile(metadata) {
// nothing if we don't know at least the author or title
if(!metadata.author && !metadata.title) {
return;
}
let desc = `${metadata.author||'Unknown Author'} - ${metadata.title||'Unknown'}`; let desc = `${metadata.author||'Unknown Author'} - ${metadata.title||'Unknown'}`;
const created = moment(metadata.createdate); const created = moment(metadata.createdate);
if(created.isValid()) { if(created.isValid()) {
@ -86,7 +96,12 @@ function main() {
return -1; return -1;
} }
console.info(handler(metadata)); const info = handler(metadata);
if(!info) {
return -1;
}
console.info(info);
return 0; return 0;
}); });
}); });