2016-02-10 05:30:59 +00:00
|
|
|
/* jslint node: true */
|
|
|
|
'use strict';
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// ENiGMA½
|
|
|
|
const MessageScanTossModule = require('../msg_scan_toss_module.js').MessageScanTossModule;
|
|
|
|
const Config = require('../config.js').get;
|
|
|
|
const ftnMailPacket = require('../ftn_mail_packet.js');
|
|
|
|
const ftnUtil = require('../ftn_util.js');
|
|
|
|
const Address = require('../ftn_address.js');
|
|
|
|
const Log = require('../logger.js').log;
|
|
|
|
const ArchiveUtil = require('../archive_util.js');
|
|
|
|
const msgDb = require('../database.js').dbs.message;
|
|
|
|
const Message = require('../message.js');
|
|
|
|
const TicFileInfo = require('../tic_file_info.js');
|
|
|
|
const Errors = require('../enig_error.js').Errors;
|
|
|
|
const FileEntry = require('../file_entry.js');
|
|
|
|
const scanFile = require('../file_base_area.js').scanFile;
|
|
|
|
const getFileAreaByTag = require('../file_base_area.js').getFileAreaByTag;
|
|
|
|
const getDescFromFileName = require('../file_base_area.js').getDescFromFileName;
|
|
|
|
const copyFileWithCollisionHandling = require('../file_util.js').copyFileWithCollisionHandling;
|
|
|
|
const getAreaStorageDirectoryByTag = require('../file_base_area.js').getAreaStorageDirectoryByTag;
|
|
|
|
const isValidStorageTag = require('../file_base_area.js').isValidStorageTag;
|
|
|
|
const User = require('../user.js');
|
2018-11-28 04:21:00 +00:00
|
|
|
const StatLog = require('../stat_log.js');
|
|
|
|
const SysProps = require('../system_property.js');
|
2018-06-23 03:26:46 +00:00
|
|
|
|
|
|
|
// deps
|
|
|
|
const moment = require('moment');
|
|
|
|
const _ = require('lodash');
|
|
|
|
const paths = require('path');
|
|
|
|
const async = require('async');
|
|
|
|
const fs = require('graceful-fs');
|
|
|
|
const later = require('later');
|
|
|
|
const temptmp = require('temptmp').createTrackedSession('ftn_bso');
|
|
|
|
const assert = require('assert');
|
|
|
|
const sane = require('sane');
|
|
|
|
const fse = require('fs-extra');
|
|
|
|
const iconv = require('iconv-lite');
|
|
|
|
const uuidV4 = require('uuid/v4');
|
2016-02-10 05:30:59 +00:00
|
|
|
|
|
|
|
exports.moduleInfo = {
|
2018-06-23 03:26:46 +00:00
|
|
|
name : 'FTN BSO',
|
|
|
|
desc : 'BSO style message scanner/tosser for FTN networks',
|
|
|
|
author : 'NuSkooler',
|
2016-02-10 05:30:59 +00:00
|
|
|
};
|
|
|
|
|
2016-02-21 00:57:38 +00:00
|
|
|
/*
|
2018-06-23 03:26:46 +00:00
|
|
|
:TODO:
|
|
|
|
* Support (approx) max bundle size
|
|
|
|
* Validate packet passwords!!!!
|
|
|
|
=> secure vs insecure landing areas
|
2018-01-01 00:54:11 +00:00
|
|
|
*/
|
2016-02-21 00:57:38 +00:00
|
|
|
|
2016-02-10 05:30:59 +00:00
|
|
|
exports.getModule = FTNMessageScanTossModule;
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
const SCHEDULE_REGEXP = /(?:^|or )?(@watch:|@immediate)([^\0]+)?$/;
|
2016-02-29 05:04:03 +00:00
|
|
|
|
2016-02-10 05:30:59 +00:00
|
|
|
function FTNMessageScanTossModule() {
|
2018-06-22 05:15:04 +00:00
|
|
|
MessageScanTossModule.call(this);
|
|
|
|
|
|
|
|
const self = this;
|
|
|
|
|
|
|
|
this.archUtil = ArchiveUtil.getInstance();
|
|
|
|
|
|
|
|
const config = Config();
|
|
|
|
if(_.has(config, 'scannerTossers.ftn_bso')) {
|
|
|
|
this.moduleConfig = config.scannerTossers.ftn_bso;
|
|
|
|
}
|
|
|
|
|
|
|
|
this.getDefaultNetworkName = function() {
|
|
|
|
if(this.moduleConfig.defaultNetwork) {
|
|
|
|
return this.moduleConfig.defaultNetwork.toLowerCase();
|
|
|
|
}
|
|
|
|
|
|
|
|
const networkNames = Object.keys(config.messageNetworks.ftn.networks);
|
|
|
|
if(1 === networkNames.length) {
|
|
|
|
return networkNames[0].toLowerCase();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getDefaultZone = function(networkName) {
|
|
|
|
const config = Config();
|
|
|
|
if(_.isNumber(config.messageNetworks.ftn.networks[networkName].defaultZone)) {
|
|
|
|
return config.messageNetworks.ftn.networks[networkName].defaultZone;
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// non-explicit: default to local address zone
|
2018-06-22 05:15:04 +00:00
|
|
|
const networkLocalAddress = config.messageNetworks.ftn.networks[networkName].localAddress;
|
|
|
|
if(networkLocalAddress) {
|
|
|
|
const addr = Address.fromString(networkLocalAddress);
|
|
|
|
return addr.zone;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
/*
|
2018-06-23 03:26:46 +00:00
|
|
|
this.isDefaultDomainZone = function(networkName, address) {
|
|
|
|
const defaultNetworkName = this.getDefaultNetworkName();
|
|
|
|
return(networkName === defaultNetworkName && address.zone === this.moduleConfig.defaultZone);
|
|
|
|
};
|
|
|
|
*/
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
this.getNetworkNameByAddress = function(remoteAddress) {
|
|
|
|
return _.findKey(Config().messageNetworks.ftn.networks, network => {
|
|
|
|
const localAddress = Address.fromString(network.localAddress);
|
|
|
|
return !_.isUndefined(localAddress) && localAddress.isEqual(remoteAddress);
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getNetworkNameByAddressPattern = function(remoteAddressPattern) {
|
|
|
|
return _.findKey(Config().messageNetworks.ftn.networks, network => {
|
|
|
|
const localAddress = Address.fromString(network.localAddress);
|
|
|
|
return !_.isUndefined(localAddress) && localAddress.isPatternMatch(remoteAddressPattern);
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getLocalAreaTagByFtnAreaTag = function(ftnAreaTag) {
|
2018-06-23 03:26:46 +00:00
|
|
|
ftnAreaTag = ftnAreaTag.toUpperCase(); // always compare upper
|
2018-06-22 05:15:04 +00:00
|
|
|
return _.findKey(Config().messageNetworks.ftn.areas, areaConf => {
|
2018-07-05 00:42:59 +00:00
|
|
|
return _.isString(areaConf.tag) && areaConf.tag.toUpperCase() === ftnAreaTag;
|
2018-06-22 05:15:04 +00:00
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getExportType = function(nodeConfig) {
|
|
|
|
return _.isString(nodeConfig.exportType) ? nodeConfig.exportType.toLowerCase() : 'crash';
|
|
|
|
};
|
|
|
|
|
|
|
|
/*
|
2018-06-23 03:26:46 +00:00
|
|
|
this.getSeenByAddresses = function(messageSeenBy) {
|
|
|
|
if(!_.isArray(messageSeenBy)) {
|
|
|
|
messageSeenBy = [ messageSeenBy ];
|
|
|
|
}
|
|
|
|
|
|
|
|
let seenByAddrs = [];
|
|
|
|
messageSeenBy.forEach(sb => {
|
|
|
|
seenByAddrs = seenByAddrs.concat(ftnUtil.parseAbbreviatedNetNodeList(sb));
|
|
|
|
});
|
|
|
|
return seenByAddrs;
|
|
|
|
};
|
|
|
|
*/
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
this.messageHasValidMSGID = function(msg) {
|
|
|
|
return _.isString(msg.meta.FtnKludge.MSGID) && msg.meta.FtnKludge.MSGID.length > 0;
|
|
|
|
};
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
/*
|
2018-06-23 03:26:46 +00:00
|
|
|
this.getOutgoingEchoMailPacketDir = function(networkName, destAddress) {
|
|
|
|
let dir = this.moduleConfig.paths.outbound;
|
|
|
|
if(!this.isDefaultDomainZone(networkName, destAddress)) {
|
|
|
|
const hexZone = `000${destAddress.zone.toString(16)}`.substr(-3);
|
|
|
|
dir = paths.join(dir, `${networkName.toLowerCase()}.${hexZone}`);
|
|
|
|
}
|
|
|
|
return dir;
|
|
|
|
};
|
|
|
|
*/
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
this.getOutgoingEchoMailPacketDir = function(networkName, destAddress) {
|
|
|
|
networkName = networkName.toLowerCase();
|
|
|
|
|
|
|
|
let dir = this.moduleConfig.paths.outbound;
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
const defaultNetworkName = this.getDefaultNetworkName();
|
|
|
|
const defaultZone = this.getDefaultZone(networkName);
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
let zoneExt;
|
|
|
|
if(defaultZone !== destAddress.zone) {
|
|
|
|
zoneExt = '.' + `000${destAddress.zone.toString(16)}`.substr(-3);
|
|
|
|
} else {
|
|
|
|
zoneExt = '';
|
|
|
|
}
|
|
|
|
|
|
|
|
if(defaultNetworkName === networkName) {
|
|
|
|
dir = paths.join(dir, `outbound${zoneExt}`);
|
|
|
|
} else {
|
|
|
|
dir = paths.join(dir, `${networkName}${zoneExt}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
return dir;
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getOutgoingPacketFileName = function(basePath, messageId, isTemp, fileCase) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Generating an outgoing packet file name comes with a few issues:
|
|
|
|
// * We must use DOS 8.3 filenames due to legacy systems that receive
|
|
|
|
// the packet not understanding LFNs
|
|
|
|
// * We need uniqueness; This is especially important with packets that
|
|
|
|
// end up in bundles and on the receiving/remote system where conflicts
|
|
|
|
// with other systems could also occur
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// There are a lot of systems in use here for the name:
|
|
|
|
// * HEX CRC16/32 of data
|
|
|
|
// * HEX UNIX timestamp
|
|
|
|
// * Mystic at least at one point, used Hex8(day of month + seconds past midnight + hundredths of second)
|
|
|
|
// See https://groups.google.com/forum/#!searchin/alt.bbs.mystic/netmail$20filename/alt.bbs.mystic/m1xLnY8i1pU/YnG2excdl6MJ
|
|
|
|
// * SBBSEcho uses DDHHMMSS - see https://github.com/ftnapps/pkg-sbbs/blob/master/docs/fidonet.txt
|
|
|
|
// * We already have a system for 8-character serial number gernation that is
|
|
|
|
// used for e.g. in FTS-0009.001 MSGIDs... let's use that!
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
const name = ftnUtil.getMessageSerialNumber(messageId);
|
|
|
|
const ext = (true === isTemp) ? 'pk_' : 'pkt';
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
let fileName = `${name}.${ext}`;
|
|
|
|
if('upper' === fileCase) {
|
|
|
|
fileName = fileName.toUpperCase();
|
|
|
|
}
|
|
|
|
|
|
|
|
return paths.join(basePath, fileName);
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getOutgoingFlowFileExtension = function(destAddress, flowType, exportType, fileCase) {
|
|
|
|
let ext;
|
|
|
|
|
|
|
|
switch(flowType) {
|
2018-06-23 03:26:46 +00:00
|
|
|
case 'mail' : ext = `${exportType.toLowerCase()[0]}ut`; break;
|
|
|
|
case 'ref' : ext = `${exportType.toLowerCase()[0]}lo`; break;
|
|
|
|
case 'busy' : ext = 'bsy'; break;
|
|
|
|
case 'request' : ext = 'req'; break;
|
|
|
|
case 'requests' : ext = 'hrq'; break;
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if('upper' === fileCase) {
|
|
|
|
ext = ext.toUpperCase();
|
|
|
|
}
|
|
|
|
|
|
|
|
return ext;
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getOutgoingFlowFileName = function(basePath, destAddress, flowType, exportType, fileCase) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Refs
|
|
|
|
// * http://ftsc.org/docs/fts-5005.003
|
|
|
|
// * http://wiki.synchro.net/ref:fidonet_files#flow_files
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
let controlFileBaseName;
|
|
|
|
let pointDir;
|
|
|
|
|
|
|
|
const ext = self.getOutgoingFlowFileExtension(
|
|
|
|
destAddress,
|
|
|
|
flowType,
|
|
|
|
exportType,
|
|
|
|
fileCase
|
|
|
|
);
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
const netComponent = `0000${destAddress.net.toString(16)}`.substr(-4);
|
|
|
|
const nodeComponent = `0000${destAddress.node.toString(16)}`.substr(-4);
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
if(destAddress.point) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// point's go in an extra subdir, e.g. outbound/NNNNnnnn.pnt/00000001.pnt (for a point of 1)
|
|
|
|
pointDir = `${netComponent}${nodeComponent}.pnt`;
|
2018-06-22 05:15:04 +00:00
|
|
|
controlFileBaseName = `00000000${destAddress.point.toString(16)}`.substr(-8);
|
|
|
|
} else {
|
|
|
|
pointDir = '';
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Use |destAddress| nnnnNNNN.??? where nnnn is dest net and NNNN is dest
|
|
|
|
// node. This seems to match what Mystic does
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
controlFileBaseName = `${netComponent}${nodeComponent}`;
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// From FTS-5005.003: "Lower case filenames are prefered if supported by the file system."
|
|
|
|
// ...but we let the user override.
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
if('upper' === fileCase) {
|
2018-06-23 03:26:46 +00:00
|
|
|
controlFileBaseName = controlFileBaseName.toUpperCase();
|
|
|
|
pointDir = pointDir.toUpperCase();
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return paths.join(basePath, pointDir, `${controlFileBaseName}.${ext}`);
|
|
|
|
};
|
|
|
|
|
|
|
|
this.flowFileAppendRefs = function(filePath, fileRefs, directive, cb) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// We have to ensure the *directory* of |filePath| exists here esp.
|
|
|
|
// for cases such as point destinations where a subdir may be
|
|
|
|
// present in the path that doesn't yet exist.
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
const flowFileDir = paths.dirname(filePath);
|
2018-06-23 03:26:46 +00:00
|
|
|
fse.mkdirs(flowFileDir, () => { // note not checking err; let's try appendFile
|
2018-06-22 05:15:04 +00:00
|
|
|
const appendLines = fileRefs.reduce( (content, ref) => {
|
|
|
|
return content + `${directive}${ref}\n`;
|
|
|
|
}, '');
|
|
|
|
|
|
|
|
fs.appendFile(filePath, appendLines, err => {
|
|
|
|
return cb(err);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getOutgoingBundleFileName = function(basePath, sourceAddress, destAddress, cb) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Base filename is constructed as such:
|
|
|
|
// * If this |destAddress| is *not* a point address, we use NNNNnnnn where
|
|
|
|
// NNNN is 0 padded hex of dest net - source net and and nnnn is 0 padded
|
|
|
|
// hex of dest node - source node.
|
|
|
|
// * If |destAddress| is a point, NNNN becomes 0000 and nnnn becomes 'p' +
|
|
|
|
// 3 digit 0 padded hex point
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Extension is dd? where dd is Su...Mo and ? is 0...Z as collisions arise
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
let basename;
|
|
|
|
if(destAddress.point) {
|
|
|
|
const pointHex = `000${destAddress.point}`.substr(-3);
|
|
|
|
basename = `0000p${pointHex}`;
|
|
|
|
} else {
|
|
|
|
basename =
|
2018-06-23 03:26:46 +00:00
|
|
|
`0000${Math.abs(sourceAddress.net - destAddress.net).toString(16)}`.substr(-4) +
|
|
|
|
`0000${Math.abs(sourceAddress.node - destAddress.node).toString(16)}`.substr(-4);
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// We need to now find the first entry that does not exist starting
|
|
|
|
// with dd0 to ddz
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
const EXT_SUFFIXES = '0123456789abcdefghijklmnopqrstuvwxyz'.split('');
|
|
|
|
let fileName = `${basename}.${moment().format('dd').toLowerCase()}`;
|
|
|
|
async.detectSeries(EXT_SUFFIXES, (suffix, callback) => {
|
|
|
|
const checkFileName = fileName + suffix;
|
|
|
|
fs.stat(paths.join(basePath, checkFileName), err => {
|
|
|
|
callback(null, (err && 'ENOENT' === err.code) ? true : false);
|
|
|
|
});
|
|
|
|
}, (err, finalSuffix) => {
|
|
|
|
if(finalSuffix) {
|
|
|
|
return cb(null, paths.join(basePath, fileName + finalSuffix));
|
|
|
|
}
|
|
|
|
|
|
|
|
return cb(new Error('Could not acquire a bundle filename!'));
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.prepareMessage = function(message, options) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Set various FTN kludges/etc.
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
const localAddress = new Address(options.network.localAddress); // ensure we have an Address obj not a string version
|
2018-06-22 05:15:04 +00:00
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: create Address.toMeta() / similar
|
2018-06-22 05:15:04 +00:00
|
|
|
message.meta.FtnProperty = message.meta.FtnProperty || {};
|
|
|
|
message.meta.FtnKludge = message.meta.FtnKludge || {};
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
message.meta.FtnProperty.ftn_orig_node = localAddress.node;
|
|
|
|
message.meta.FtnProperty.ftn_orig_network = localAddress.net;
|
|
|
|
message.meta.FtnProperty.ftn_cost = 0;
|
|
|
|
message.meta.FtnProperty.ftn_msg_orig_node = localAddress.node;
|
|
|
|
message.meta.FtnProperty.ftn_msg_orig_net = localAddress.net;
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
const destAddress = options.routeAddress || options.destAddress;
|
2018-06-23 03:26:46 +00:00
|
|
|
message.meta.FtnProperty.ftn_dest_node = destAddress.node;
|
|
|
|
message.meta.FtnProperty.ftn_dest_network = destAddress.net;
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
if(destAddress.zone) {
|
2018-06-23 03:26:46 +00:00
|
|
|
message.meta.FtnProperty.ftn_dest_zone = destAddress.zone;
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
if(destAddress.point) {
|
2018-06-23 03:26:46 +00:00
|
|
|
message.meta.FtnProperty.ftn_dest_point = destAddress.point;
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// tear line and origin can both go in EchoMail & NetMail
|
|
|
|
message.meta.FtnProperty.ftn_tear_line = ftnUtil.getTearLine();
|
|
|
|
message.meta.FtnProperty.ftn_origin = ftnUtil.getOrigin(localAddress);
|
2018-06-22 05:15:04 +00:00
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
let ftnAttribute = ftnMailPacket.Packet.Attribute.Local; // message from our system
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
const config = Config();
|
|
|
|
if(self.isNetMailMessage(message)) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Set route and message destination properties -- they may differ
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
message.meta.FtnProperty.ftn_msg_dest_node = options.destAddress.node;
|
|
|
|
message.meta.FtnProperty.ftn_msg_dest_net = options.destAddress.net;
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
ftnAttribute |= ftnMailPacket.Packet.Attribute.Private;
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// NetMail messages need a FRL-1005.001 "Via" line
|
|
|
|
// http://ftsc.org/docs/frl-1005.001
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: We need to do this when FORWARDING NetMail
|
2018-06-22 05:15:04 +00:00
|
|
|
/*
|
2018-06-23 03:26:46 +00:00
|
|
|
if(_.isString(message.meta.FtnKludge.Via)) {
|
|
|
|
message.meta.FtnKludge.Via = [ message.meta.FtnKludge.Via ];
|
|
|
|
}
|
|
|
|
message.meta.FtnKludge.Via = message.meta.FtnKludge.Via || [];
|
|
|
|
message.meta.FtnKludge.Via.push(ftnUtil.getVia(options.network.localAddress));
|
|
|
|
*/
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// We need to set INTL, and possibly FMPT and/or TOPT
|
|
|
|
// See http://retro.fidoweb.ru/docs/index=ftsc&doc=FTS-4001&enc=mac
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
message.meta.FtnKludge.INTL = ftnUtil.getIntl(options.destAddress, localAddress);
|
|
|
|
|
|
|
|
if(_.isNumber(localAddress.point) && localAddress.point > 0) {
|
|
|
|
message.meta.FtnKludge.FMPT = localAddress.point;
|
|
|
|
}
|
|
|
|
|
|
|
|
if(_.isNumber(options.destAddress.point) && options.destAddress.point > 0) {
|
|
|
|
message.meta.FtnKludge.TOPT = options.destAddress.point;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Set appropriate attribute flag for export type
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
switch(this.getExportType(options.nodeConfig)) {
|
2018-06-23 03:26:46 +00:00
|
|
|
case 'crash' : ftnAttribute |= ftnMailPacket.Packet.Attribute.Crash; break;
|
|
|
|
case 'hold' : ftnAttribute |= ftnMailPacket.Packet.Attribute.Hold; break;
|
|
|
|
// :TODO: Others?
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// EchoMail requires some additional properties & kludges
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
message.meta.FtnProperty.ftn_area = config.messageNetworks.ftn.areas[message.areaTag].tag;
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// When exporting messages, we should create/update SEEN-BY
|
|
|
|
// with remote address(s) we are exporting to.
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
const seenByAdditions =
|
2018-06-23 03:26:46 +00:00
|
|
|
[ `${localAddress.net}/${localAddress.node}` ].concat(config.messageNetworks.ftn.areas[message.areaTag].uplinks);
|
2018-06-22 05:15:04 +00:00
|
|
|
message.meta.FtnProperty.ftn_seen_by =
|
2018-06-23 03:26:46 +00:00
|
|
|
ftnUtil.getUpdatedSeenByEntries(message.meta.FtnProperty.ftn_seen_by, seenByAdditions);
|
2016-02-16 00:56:05 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// And create/update PATH for ourself
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
message.meta.FtnKludge.PATH = ftnUtil.getUpdatedPathEntries(message.meta.FtnKludge.PATH, localAddress);
|
|
|
|
}
|
|
|
|
|
|
|
|
message.meta.FtnProperty.ftn_attr_flags = ftnAttribute;
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Additional kludges
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Check for existence of MSGID as we may already have stored it from a previous
|
|
|
|
// export that failed to finish
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
if(!message.meta.FtnKludge.MSGID) {
|
|
|
|
message.meta.FtnKludge.MSGID = ftnUtil.getMessageIdentifier(
|
|
|
|
message,
|
|
|
|
localAddress,
|
2018-06-23 03:26:46 +00:00
|
|
|
message.isPrivate() // true = isNetMail
|
2018-06-22 05:15:04 +00:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
message.meta.FtnKludge.TZUTC = ftnUtil.getUTCTimeZoneOffset();
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// According to FSC-0046:
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// "When a Conference Mail processor adds a TID to a message, it may not
|
|
|
|
// add a PID. An existing TID should, however, be replaced. TIDs follow
|
|
|
|
// the same format used for PIDs, as explained above."
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
message.meta.FtnKludge.TID = ftnUtil.getProductIdentifier();
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Determine CHRS and actual internal encoding name. If the message has an
|
|
|
|
// explicit encoding set, use it. Otherwise, try to preserve any CHRS/encoding already set.
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
let encoding = options.nodeConfig.encoding || config.scannerTossers.ftn_bso.packetMsgEncoding || 'utf8';
|
|
|
|
const explicitEncoding = _.get(message.meta, 'System.explicit_encoding');
|
|
|
|
if(explicitEncoding) {
|
|
|
|
encoding = explicitEncoding;
|
|
|
|
} else if(message.meta.FtnKludge.CHRS) {
|
|
|
|
const encFromChars = ftnUtil.getEncodingFromCharacterSetIdentifier(message.meta.FtnKludge.CHRS);
|
|
|
|
if(encFromChars) {
|
|
|
|
encoding = encFromChars;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Ensure we ended up with something useable. If not, back to utf8!
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
if(!iconv.encodingExists(encoding)) {
|
|
|
|
Log.debug( { encoding : encoding }, 'Unknown encoding. Falling back to utf8');
|
|
|
|
encoding = 'utf8';
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
options.encoding = encoding; // save for later
|
2018-06-22 05:15:04 +00:00
|
|
|
message.meta.FtnKludge.CHRS = ftnUtil.getCharacterSetIdentifierByEncoding(encoding);
|
|
|
|
};
|
|
|
|
|
|
|
|
this.setReplyKludgeFromReplyToMsgId = function(message, cb) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Look up MSGID kludge for |message.replyToMsgId|, if any.
|
|
|
|
// If found, we can create a REPLY kludge with the previously
|
|
|
|
// discovered MSGID.
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
|
|
|
|
if(0 === message.replyToMsgId) {
|
2018-06-23 03:26:46 +00:00
|
|
|
return cb(null); // nothing to do
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Message.getMetaValuesByMessageId(message.replyToMsgId, 'FtnKludge', 'MSGID', (err, msgIdVal) => {
|
|
|
|
if(!err) {
|
|
|
|
assert(_.isString(msgIdVal), 'Expected string but got ' + (typeof msgIdVal) + ' (' + msgIdVal + ')');
|
2018-06-23 03:26:46 +00:00
|
|
|
// got a MSGID - create a REPLY
|
2018-06-22 05:15:04 +00:00
|
|
|
message.meta.FtnKludge.REPLY = msgIdVal;
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
cb(null); // this method always passes
|
2018-06-22 05:15:04 +00:00
|
|
|
});
|
|
|
|
};
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// check paths, Addresses, etc.
|
2018-06-22 05:15:04 +00:00
|
|
|
this.isAreaConfigValid = function(areaConfig) {
|
|
|
|
if(!areaConfig || !_.isString(areaConfig.tag) || !_.isString(areaConfig.network)) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if(_.isString(areaConfig.uplinks)) {
|
|
|
|
areaConfig.uplinks = areaConfig.uplinks.split(' ');
|
|
|
|
}
|
|
|
|
|
|
|
|
return (_.isArray(areaConfig.uplinks));
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
this.hasValidConfiguration = function() {
|
|
|
|
if(!_.has(this, 'moduleConfig.nodes') || !_.has(Config(), 'messageNetworks.ftn.areas')) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: need to check more!
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
return true;
|
|
|
|
};
|
|
|
|
|
|
|
|
this.parseScheduleString = function(schedStr) {
|
|
|
|
if(!schedStr) {
|
2018-06-23 03:26:46 +00:00
|
|
|
return; // nothing to parse!
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
let schedule = {};
|
|
|
|
|
|
|
|
const m = SCHEDULE_REGEXP.exec(schedStr);
|
|
|
|
if(m) {
|
|
|
|
schedStr = schedStr.substr(0, m.index).trim();
|
|
|
|
|
|
|
|
if('@watch:' === m[1]) {
|
|
|
|
schedule.watchFile = m[2];
|
|
|
|
} else if('@immediate' === m[1]) {
|
|
|
|
schedule.immediate = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if(schedStr.length > 0) {
|
|
|
|
const sched = later.parse.text(schedStr);
|
|
|
|
if(-1 === sched.error) {
|
|
|
|
schedule.sched = sched;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// return undefined if we couldn't parse out anything useful
|
2018-06-22 05:15:04 +00:00
|
|
|
if(!_.isEmpty(schedule)) {
|
|
|
|
return schedule;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getAreaLastScanId = function(areaTag, cb) {
|
|
|
|
const sql =
|
2018-06-23 03:26:46 +00:00
|
|
|
`SELECT area_tag, message_id
|
|
|
|
FROM message_area_last_scan
|
|
|
|
WHERE scan_toss = "ftn_bso" AND area_tag = ?
|
|
|
|
LIMIT 1;`;
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
msgDb.get(sql, [ areaTag ], (err, row) => {
|
|
|
|
return cb(err, row ? row.message_id : 0);
|
|
|
|
});
|
|
|
|
};
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
this.setAreaLastScanId = function(areaTag, lastScanId, cb) {
|
|
|
|
const sql =
|
2018-06-23 03:26:46 +00:00
|
|
|
`REPLACE INTO message_area_last_scan (scan_toss, area_tag, message_id)
|
|
|
|
VALUES ("ftn_bso", ?, ?);`;
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
msgDb.run(sql, [ areaTag, lastScanId ], err => {
|
|
|
|
return cb(err);
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getNodeConfigByAddress = function(addr) {
|
|
|
|
addr = _.isString(addr) ? Address.fromString(addr) : addr;
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: sort wildcard nodes{} entries by most->least explicit according to FTN hierarchy
|
2018-06-22 05:15:04 +00:00
|
|
|
return _.find(this.moduleConfig.nodes, (node, nodeAddrWildcard) => {
|
|
|
|
return addr.isPatternMatch(nodeAddrWildcard);
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.exportNetMailMessagePacket = function(message, exportOpts, cb) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// For NetMail, we always create a *single* packet per message.
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
async.series(
|
|
|
|
[
|
|
|
|
function generalPrep(callback) {
|
|
|
|
self.prepareMessage(message, exportOpts);
|
|
|
|
|
|
|
|
return self.setReplyKludgeFromReplyToMsgId(message, callback);
|
|
|
|
},
|
|
|
|
function createPacket(callback) {
|
|
|
|
const packet = new ftnMailPacket.Packet();
|
|
|
|
|
|
|
|
const packetHeader = new ftnMailPacket.PacketHeader(
|
|
|
|
exportOpts.network.localAddress,
|
|
|
|
exportOpts.routeAddress,
|
|
|
|
exportOpts.nodeConfig.packetType
|
|
|
|
);
|
|
|
|
|
|
|
|
packetHeader.password = exportOpts.nodeConfig.packetPassword || '';
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// use current message ID for filename seed
|
2018-06-22 05:15:04 +00:00
|
|
|
exportOpts.pktFileName = self.getOutgoingPacketFileName(
|
|
|
|
self.exportTempDir,
|
|
|
|
message.messageId,
|
2018-06-23 03:26:46 +00:00
|
|
|
false, // createTempPacket=false
|
2018-06-22 05:15:04 +00:00
|
|
|
exportOpts.fileCase
|
|
|
|
);
|
|
|
|
|
|
|
|
const ws = fs.createWriteStream(exportOpts.pktFileName);
|
|
|
|
|
|
|
|
packet.writeHeader(ws, packetHeader);
|
|
|
|
|
|
|
|
packet.getMessageEntryBuffer(message, exportOpts, (err, msgBuf) => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
ws.write(msgBuf);
|
|
|
|
|
|
|
|
packet.writeTerminator(ws);
|
|
|
|
|
|
|
|
ws.end();
|
|
|
|
ws.once('finish', () => {
|
|
|
|
return callback(null);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
this.exportMessagesByUuid = function(messageUuids, exportOpts, cb) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// This method has a lot of madness going on:
|
|
|
|
// - Try to stuff messages into packets until we've hit the target size
|
|
|
|
// - We need to wait for write streams to finish before proceeding in many cases
|
|
|
|
// or data will be cut off when closing and creating a new stream
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
let exportedFiles = [];
|
|
|
|
let currPacketSize = self.moduleConfig.packetTargetByteSize;
|
2018-06-22 05:15:04 +00:00
|
|
|
let packet;
|
|
|
|
let ws;
|
|
|
|
let remainMessageBuf;
|
|
|
|
let remainMessageId;
|
|
|
|
const createTempPacket = !_.isString(exportOpts.nodeConfig.archiveType) || 0 === exportOpts.nodeConfig.archiveType.length;
|
|
|
|
|
|
|
|
function finalizePacket(cb) {
|
|
|
|
packet.writeTerminator(ws);
|
|
|
|
ws.end();
|
|
|
|
ws.once('finish', () => {
|
|
|
|
return cb(null);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
async.each(messageUuids, (msgUuid, nextUuid) => {
|
|
|
|
let message = new Message();
|
|
|
|
|
|
|
|
async.series(
|
|
|
|
[
|
|
|
|
function finalizePrevious(callback) {
|
|
|
|
if(packet && currPacketSize >= self.moduleConfig.packetTargetByteSize) {
|
|
|
|
return finalizePacket(callback);
|
|
|
|
} else {
|
|
|
|
callback(null);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
function loadMessage(callback) {
|
|
|
|
message.load( { uuid : msgUuid }, err => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// General preperation
|
2018-06-22 05:15:04 +00:00
|
|
|
self.prepareMessage(message, exportOpts);
|
|
|
|
|
|
|
|
self.setReplyKludgeFromReplyToMsgId(message, err => {
|
|
|
|
callback(err);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function createNewPacket(callback) {
|
|
|
|
if(currPacketSize >= self.moduleConfig.packetTargetByteSize) {
|
|
|
|
packet = new ftnMailPacket.Packet();
|
|
|
|
|
|
|
|
const packetHeader = new ftnMailPacket.PacketHeader(
|
|
|
|
exportOpts.network.localAddress,
|
|
|
|
exportOpts.destAddress,
|
|
|
|
exportOpts.nodeConfig.packetType);
|
|
|
|
|
|
|
|
packetHeader.password = exportOpts.nodeConfig.packetPassword || '';
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// use current message ID for filename seed
|
2018-06-22 05:15:04 +00:00
|
|
|
const pktFileName = self.getOutgoingPacketFileName(
|
|
|
|
self.exportTempDir,
|
|
|
|
message.messageId,
|
|
|
|
createTempPacket,
|
|
|
|
exportOpts.fileCase
|
|
|
|
);
|
|
|
|
|
|
|
|
exportedFiles.push(pktFileName);
|
|
|
|
|
|
|
|
ws = fs.createWriteStream(pktFileName);
|
|
|
|
|
|
|
|
currPacketSize = packet.writeHeader(ws, packetHeader);
|
|
|
|
|
|
|
|
if(remainMessageBuf) {
|
|
|
|
currPacketSize += packet.writeMessageEntry(ws, remainMessageBuf);
|
|
|
|
remainMessageBuf = null;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
callback(null);
|
|
|
|
},
|
|
|
|
function appendMessage(callback) {
|
|
|
|
packet.getMessageEntryBuffer(message, exportOpts, (err, msgBuf) => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
currPacketSize += msgBuf.length;
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
if(currPacketSize >= self.moduleConfig.packetTargetByteSize) {
|
2018-06-23 03:26:46 +00:00
|
|
|
remainMessageBuf = msgBuf; // save for next packet
|
|
|
|
remainMessageId = message.messageId;
|
2018-06-22 05:15:04 +00:00
|
|
|
} else {
|
|
|
|
ws.write(msgBuf);
|
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function storeStateFlags0Meta(callback) {
|
|
|
|
message.persistMetaValue('System', 'state_flags0', Message.StateFlags0.Exported.toString(), err => {
|
|
|
|
callback(err);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function storeMsgIdMeta(callback) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// We want to store some meta as if we had imported
|
|
|
|
// this message for later reference
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
if(message.meta.FtnKludge.MSGID) {
|
|
|
|
message.persistMetaValue('FtnKludge', 'MSGID', message.meta.FtnKludge.MSGID, err => {
|
|
|
|
callback(err);
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
callback(null);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
nextUuid(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}, err => {
|
|
|
|
if(err) {
|
|
|
|
cb(err);
|
|
|
|
} else {
|
|
|
|
async.series(
|
|
|
|
[
|
|
|
|
function terminateLast(callback) {
|
|
|
|
if(packet) {
|
|
|
|
return finalizePacket(callback);
|
|
|
|
} else {
|
|
|
|
callback(null);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
function writeRemainPacket(callback) {
|
|
|
|
if(remainMessageBuf) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: DRY this with the code above -- they are basically identical
|
2018-06-22 05:15:04 +00:00
|
|
|
packet = new ftnMailPacket.Packet();
|
|
|
|
|
|
|
|
const packetHeader = new ftnMailPacket.PacketHeader(
|
|
|
|
exportOpts.network.localAddress,
|
|
|
|
exportOpts.destAddress,
|
|
|
|
exportOpts.nodeConfig.packetType);
|
|
|
|
|
|
|
|
packetHeader.password = exportOpts.nodeConfig.packetPassword || '';
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// use current message ID for filename seed
|
2018-06-22 05:15:04 +00:00
|
|
|
const pktFileName = self.getOutgoingPacketFileName(
|
|
|
|
self.exportTempDir,
|
|
|
|
remainMessageId,
|
|
|
|
createTempPacket,
|
|
|
|
exportOpts.filleCase
|
|
|
|
);
|
|
|
|
|
|
|
|
exportedFiles.push(pktFileName);
|
|
|
|
|
|
|
|
ws = fs.createWriteStream(pktFileName);
|
|
|
|
|
|
|
|
packet.writeHeader(ws, packetHeader);
|
|
|
|
ws.write(remainMessageBuf);
|
|
|
|
return finalizePacket(callback);
|
|
|
|
} else {
|
|
|
|
callback(null);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
cb(err, exportedFiles);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getNetMailRoute = function(dstAddr) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Route full|wildcard -> full adddress/network lookup
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
const routes = _.get(Config(), 'scannerTossers.ftn_bso.netMail.routes');
|
|
|
|
if(!routes) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
return _.find(routes, (route, addrWildcard) => {
|
|
|
|
return dstAddr.isPatternMatch(addrWildcard);
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getNetMailRouteInfoFromAddress = function(destAddress, cb) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Attempt to find route information for |destAddress|:
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// 1) Routes: scannerTossers.ftn_bso.netMail.routes{} -> scannerTossers.ftn_bso.nodes{} -> config
|
|
|
|
// - Where we send may not be where destAddress is (it's routed!)
|
|
|
|
// 2) Direct to nodes: scannerTossers.ftn_bso.nodes{} -> config
|
|
|
|
// - Where we send is direct to destAddress
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// In both cases, attempt to look up Zone:Net/* to discover local "from" network/address
|
|
|
|
// falling back to Config.scannerTossers.ftn_bso.defaultNetwork
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
const route = this.getNetMailRoute(destAddress);
|
|
|
|
|
|
|
|
let routeAddress;
|
|
|
|
let networkName;
|
|
|
|
let isRouted;
|
|
|
|
if(route) {
|
2018-06-23 03:26:46 +00:00
|
|
|
routeAddress = Address.fromString(route.address);
|
|
|
|
networkName = route.network;
|
|
|
|
isRouted = true;
|
2018-06-22 05:15:04 +00:00
|
|
|
} else {
|
2018-06-23 03:26:46 +00:00
|
|
|
routeAddress = destAddress;
|
|
|
|
isRouted = false;
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
networkName = networkName || this.getNetworkNameByAddress(routeAddress);
|
|
|
|
|
|
|
|
const config = _.find(this.moduleConfig.nodes, (node, nodeAddrWildcard) => {
|
|
|
|
return routeAddress.isPatternMatch(nodeAddrWildcard);
|
2018-06-23 03:26:46 +00:00
|
|
|
}) || { packetType : '2+', encoding : Config().scannerTossers.ftn_bso.packetMsgEncoding };
|
2018-06-22 05:15:04 +00:00
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// we should never be failing here; we may just be using defaults.
|
2018-06-22 05:15:04 +00:00
|
|
|
return cb(
|
|
|
|
networkName ? null : Errors.DoesNotExist(`No NetMail route for ${destAddress.toString()}`),
|
|
|
|
{ destAddress, routeAddress, networkName, config, isRouted }
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
this.exportNetMailMessagesToUplinks = function(messagesOrMessageUuids, cb) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// for each message/UUID, find where to send the thing
|
2018-06-22 05:15:04 +00:00
|
|
|
async.each(messagesOrMessageUuids, (msgOrUuid, nextMessageOrUuid) => {
|
|
|
|
|
|
|
|
const exportOpts = {};
|
|
|
|
const message = new Message();
|
|
|
|
|
|
|
|
async.series(
|
|
|
|
[
|
|
|
|
function loadMessage(callback) {
|
|
|
|
if(_.isString(msgOrUuid)) {
|
|
|
|
message.load( { uuid : msgOrUuid }, err => {
|
|
|
|
return callback(err, message);
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
return callback(null, msgOrUuid);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
function discoverUplink(callback) {
|
|
|
|
const dstAddr = new Address(message.meta.System[Message.SystemMetaNames.RemoteToUser]);
|
|
|
|
|
|
|
|
self.getNetMailRouteInfoFromAddress(dstAddr, (err, routeInfo) => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
exportOpts.nodeConfig = routeInfo.config;
|
|
|
|
exportOpts.destAddress = dstAddr;
|
|
|
|
exportOpts.routeAddress = routeInfo.routeAddress;
|
|
|
|
exportOpts.fileCase = routeInfo.config.fileCase || 'lower';
|
|
|
|
exportOpts.network = Config().messageNetworks.ftn.networks[routeInfo.networkName];
|
|
|
|
exportOpts.networkName = routeInfo.networkName;
|
|
|
|
exportOpts.outgoingDir = self.getOutgoingEchoMailPacketDir(exportOpts.networkName, exportOpts.destAddress);
|
|
|
|
exportOpts.exportType = self.getExportType(routeInfo.config);
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
if(!exportOpts.network) {
|
|
|
|
return callback(Errors.DoesNotExist(`No configuration found for network ${routeInfo.networkName}`));
|
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function createOutgoingDir(callback) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// ensure outgoing NetMail directory exists
|
2018-06-22 05:15:04 +00:00
|
|
|
return fse.mkdirs(exportOpts.outgoingDir, callback);
|
|
|
|
},
|
|
|
|
function exportPacket(callback) {
|
|
|
|
return self.exportNetMailMessagePacket(message, exportOpts, callback);
|
|
|
|
},
|
|
|
|
function moveToOutgoing(callback) {
|
|
|
|
const newExt = exportOpts.fileCase === 'lower' ? '.pkt' : '.PKT';
|
|
|
|
exportOpts.exportedToPath = paths.join(
|
|
|
|
exportOpts.outgoingDir,
|
|
|
|
`${paths.basename(exportOpts.pktFileName, paths.extname(exportOpts.pktFileName))}${newExt}`
|
|
|
|
);
|
|
|
|
|
|
|
|
return fse.move(exportOpts.pktFileName, exportOpts.exportedToPath, callback);
|
|
|
|
},
|
|
|
|
function prepareFloFile(callback) {
|
|
|
|
const flowFilePath = self.getOutgoingFlowFileName(
|
|
|
|
exportOpts.outgoingDir,
|
|
|
|
exportOpts.routeAddress,
|
|
|
|
'ref',
|
|
|
|
exportOpts.exportType,
|
|
|
|
exportOpts.fileCase
|
|
|
|
);
|
|
|
|
|
|
|
|
return self.flowFileAppendRefs(flowFilePath, [ exportOpts.exportedToPath ], '^', callback);
|
|
|
|
},
|
|
|
|
function storeStateFlags0Meta(callback) {
|
|
|
|
return message.persistMetaValue('System', 'state_flags0', Message.StateFlags0.Exported.toString(), callback);
|
|
|
|
},
|
|
|
|
function storeMsgIdMeta(callback) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// Store meta as if we had imported this message -- for later reference
|
2018-06-22 05:15:04 +00:00
|
|
|
if(message.meta.FtnKludge.MSGID) {
|
|
|
|
return message.persistMetaValue('FtnKludge', 'MSGID', message.meta.FtnKludge.MSGID, callback);
|
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null);
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
if(err) {
|
|
|
|
Log.warn( { error : err.message }, 'Error exporting message' );
|
|
|
|
}
|
|
|
|
return nextMessageOrUuid(null);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}, err => {
|
|
|
|
if(err) {
|
|
|
|
Log.warn( { error : err.message }, 'Error(s) during NetMail export');
|
|
|
|
}
|
|
|
|
return cb(err);
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.exportEchoMailMessagesToUplinks = function(messageUuids, areaConfig, cb) {
|
|
|
|
const config = Config();
|
|
|
|
async.each(areaConfig.uplinks, (uplink, nextUplink) => {
|
|
|
|
const nodeConfig = self.getNodeConfigByAddress(uplink);
|
|
|
|
if(!nodeConfig) {
|
|
|
|
return nextUplink();
|
|
|
|
}
|
|
|
|
|
|
|
|
const exportOpts = {
|
|
|
|
nodeConfig,
|
2018-06-23 03:26:46 +00:00
|
|
|
network : config.messageNetworks.ftn.networks[areaConfig.network],
|
|
|
|
destAddress : Address.fromString(uplink),
|
|
|
|
networkName : areaConfig.network,
|
|
|
|
fileCase : nodeConfig.fileCase || 'lower',
|
2018-06-22 05:15:04 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
if(_.isString(exportOpts.network.localAddress)) {
|
|
|
|
exportOpts.network.localAddress = Address.fromString(exportOpts.network.localAddress);
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
const outgoingDir = self.getOutgoingEchoMailPacketDir(exportOpts.networkName, exportOpts.destAddress);
|
|
|
|
const exportType = self.getExportType(exportOpts.nodeConfig);
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
function createOutgoingDir(callback) {
|
|
|
|
fse.mkdirs(outgoingDir, err => {
|
|
|
|
callback(err);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function exportToTempArea(callback) {
|
|
|
|
self.exportMessagesByUuid(messageUuids, exportOpts, callback);
|
|
|
|
},
|
|
|
|
function createArcMailBundle(exportedFileNames, callback) {
|
|
|
|
if(self.archUtil.haveArchiver(exportOpts.nodeConfig.archiveType)) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: support bundleTargetByteSize:
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Compress to a temp location then we'll move it in the next step
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Note that we must use the *final* output dir for getOutgoingBundleFileName()
|
|
|
|
// as it checks for collisions in bundle names!
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
self.getOutgoingBundleFileName(outgoingDir, exportOpts.network.localAddress, exportOpts.destAddress, (err, bundlePath) => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// adjust back to temp path
|
2018-06-22 05:15:04 +00:00
|
|
|
const tempBundlePath = paths.join(self.exportTempDir, paths.basename(bundlePath));
|
|
|
|
|
|
|
|
self.archUtil.compressTo(
|
|
|
|
exportOpts.nodeConfig.archiveType,
|
|
|
|
tempBundlePath,
|
|
|
|
exportedFileNames, err => {
|
|
|
|
callback(err, [ tempBundlePath ] );
|
|
|
|
}
|
|
|
|
);
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
callback(null, exportedFileNames);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
function moveFilesToOutgoing(exportedFileNames, callback) {
|
|
|
|
async.each(exportedFileNames, (oldPath, nextFile) => {
|
|
|
|
const ext = paths.extname(oldPath).toLowerCase();
|
|
|
|
if('.pk_' === ext.toLowerCase()) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// For a given temporary .pk_ file, we need to move it to the outoing
|
|
|
|
// directory with the appropriate BSO style filename.
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
const newExt = self.getOutgoingFlowFileExtension(
|
|
|
|
exportOpts.destAddress,
|
|
|
|
'mail',
|
|
|
|
exportType,
|
|
|
|
exportOpts.fileCase
|
|
|
|
);
|
|
|
|
|
|
|
|
const newPath = paths.join(
|
|
|
|
outgoingDir,
|
|
|
|
`${paths.basename(oldPath, ext)}${newExt}`);
|
|
|
|
|
|
|
|
fse.move(oldPath, newPath, nextFile);
|
|
|
|
} else {
|
|
|
|
const newPath = paths.join(outgoingDir, paths.basename(oldPath));
|
|
|
|
fse.move(oldPath, newPath, err => {
|
|
|
|
if(err) {
|
|
|
|
Log.warn(
|
|
|
|
{ oldPath : oldPath, newPath : newPath, error : err.toString() },
|
|
|
|
'Failed moving temporary bundle file!');
|
|
|
|
|
|
|
|
return nextFile();
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// For bundles, we need to append to the appropriate flow file
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
const flowFilePath = self.getOutgoingFlowFileName(
|
|
|
|
outgoingDir,
|
|
|
|
exportOpts.destAddress,
|
|
|
|
'ref',
|
|
|
|
exportType,
|
|
|
|
exportOpts.fileCase
|
|
|
|
);
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// directive of '^' = delete file after transfer
|
2018-06-22 05:15:04 +00:00
|
|
|
self.flowFileAppendRefs(flowFilePath, [ newPath ], '^', err => {
|
|
|
|
if(err) {
|
|
|
|
Log.warn( { path : flowFilePath }, 'Failed appending flow reference record!');
|
|
|
|
}
|
|
|
|
nextFile();
|
|
|
|
});
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}, callback);
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: do something with |err| ?
|
2018-06-22 05:15:04 +00:00
|
|
|
if(err) {
|
|
|
|
Log.warn(err.message);
|
|
|
|
}
|
|
|
|
nextUplink();
|
|
|
|
}
|
|
|
|
);
|
2018-06-23 03:26:46 +00:00
|
|
|
}, cb); // complete
|
2018-06-22 05:15:04 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
this.setReplyToMsgIdFtnReplyKludge = function(message, cb) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Given a FTN REPLY kludge, set |message.replyToMsgId|, if possible,
|
|
|
|
// by looking up an associated MSGID kludge meta.
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// See also: http://ftsc.org/docs/fts-0009.001
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
if(!_.isString(message.meta.FtnKludge.REPLY)) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// nothing to do
|
2018-06-22 05:15:04 +00:00
|
|
|
return cb();
|
|
|
|
}
|
|
|
|
|
|
|
|
Message.getMessageIdsByMetaValue('FtnKludge', 'MSGID', message.meta.FtnKludge.REPLY, (err, msgIds) => {
|
|
|
|
if(msgIds && msgIds.length > 0) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// expect a single match, but dupe checking is not perfect - warn otherwise
|
2018-06-22 05:15:04 +00:00
|
|
|
if(1 === msgIds.length) {
|
|
|
|
message.replyToMsgId = msgIds[0];
|
|
|
|
} else {
|
|
|
|
Log.warn( { msgIds : msgIds, replyKludge : message.meta.FtnKludge.REPLY }, 'Found 2:n MSGIDs matching REPLY kludge!');
|
|
|
|
}
|
|
|
|
}
|
|
|
|
cb();
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getLocalUserNameFromAlias = function(lookup) {
|
|
|
|
lookup = lookup.toLowerCase();
|
|
|
|
|
|
|
|
const aliases = _.get(Config(), 'messageNetworks.ftn.netMail.aliases');
|
|
|
|
if(!aliases) {
|
2018-06-23 03:26:46 +00:00
|
|
|
return lookup; // keep orig
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const alias = _.find(aliases, (localName, alias) => {
|
|
|
|
return alias.toLowerCase() === lookup;
|
|
|
|
});
|
|
|
|
|
|
|
|
return alias || lookup;
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getAddressesFromNetMailMessage = function(message) {
|
|
|
|
const intlKludge = _.get(message, 'meta.FtnKludge.INTL');
|
|
|
|
|
|
|
|
if(!intlKludge) {
|
|
|
|
return {};
|
|
|
|
}
|
|
|
|
|
|
|
|
let [ to, from ] = intlKludge.split(' ');
|
|
|
|
if(!to || !from) {
|
|
|
|
return {};
|
|
|
|
}
|
|
|
|
|
|
|
|
const fromPoint = _.get(message, 'meta.FtnKludge.FMPT');
|
2018-06-23 03:26:46 +00:00
|
|
|
const toPoint = _.get(message, 'meta.FtnKludge.TOPT');
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
if(fromPoint) {
|
|
|
|
from += `.${fromPoint}`;
|
|
|
|
}
|
|
|
|
|
|
|
|
if(toPoint) {
|
|
|
|
to += `.${toPoint}`;
|
|
|
|
}
|
|
|
|
|
|
|
|
return { to : Address.fromString(to), from : Address.fromString(from) };
|
|
|
|
};
|
|
|
|
|
|
|
|
this.importMailToArea = function(config, header, message, cb) {
|
|
|
|
async.series(
|
|
|
|
[
|
|
|
|
function validateDestinationAddress(callback) {
|
|
|
|
const localNetworkPattern = `${message.meta.FtnProperty.ftn_dest_network}/${message.meta.FtnProperty.ftn_dest_node}`;
|
|
|
|
const localNetworkName = self.getNetworkNameByAddressPattern(localNetworkPattern);
|
|
|
|
|
|
|
|
return callback(_.isString(localNetworkName) ? null : new Error('Packet destination is not us'));
|
|
|
|
},
|
|
|
|
function checkForDupeMSGID(callback) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// If we have a MSGID, don't allow a dupe
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
if(!_.has(message.meta, 'FtnKludge.MSGID')) {
|
|
|
|
return callback(null);
|
|
|
|
}
|
|
|
|
|
|
|
|
Message.getMessageIdsByMetaValue('FtnKludge', 'MSGID', message.meta.FtnKludge.MSGID, (err, msgIds) => {
|
|
|
|
if(msgIds && msgIds.length > 0) {
|
|
|
|
const err = new Error('Duplicate MSGID');
|
|
|
|
err.code = 'DUPE_MSGID';
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function basicSetup(callback) {
|
|
|
|
message.areaTag = config.localAreaTag;
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// indicate this was imported from FTN
|
2018-06-22 05:15:04 +00:00
|
|
|
message.meta.System[Message.SystemMetaNames.ExternalFlavor] = Message.AddressFlavor.FTN;
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// If we *allow* dupes (disabled by default), then just generate
|
|
|
|
// a random UUID. Otherwise, don't assign the UUID just yet. It will be
|
|
|
|
// generated at persist() time and should be consistent across import/exports
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
if(true === _.get(Config(), [ 'messageNetworks', 'ftn', 'areas', config.localAreaTag, 'allowDupes' ], false)) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// just generate a UUID & therefor always allow for dupes
|
2018-06-22 05:15:04 +00:00
|
|
|
message.uuid = uuidV4();
|
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null);
|
|
|
|
},
|
|
|
|
function setReplyToMessageId(callback) {
|
|
|
|
self.setReplyToMsgIdFtnReplyKludge(message, () => {
|
|
|
|
return callback(null);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function setupPrivateMessage(callback) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// If this is a private message (e.g. NetMail) we set the local user ID
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
if(Message.WellKnownAreaTags.Private !== config.localAreaTag) {
|
|
|
|
return callback(null);
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Create a meta value for the *remote* from user. In the case here with FTN,
|
|
|
|
// their fully qualified FTN from address
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
const { from } = self.getAddressesFromNetMailMessage(message);
|
|
|
|
|
|
|
|
if(!from) {
|
|
|
|
return callback(Errors.Invalid('Cannot import FTN NetMail without valid INTL line'));
|
|
|
|
}
|
|
|
|
|
|
|
|
message.meta.System[Message.SystemMetaNames.RemoteFromUser] = from.toString();
|
|
|
|
|
|
|
|
const lookupName = self.getLocalUserNameFromAlias(message.toUserName);
|
|
|
|
|
|
|
|
User.getUserIdAndNameByLookup(lookupName, (err, localToUserId, localUserName) => {
|
|
|
|
if(err) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Couldn't find a local username. If the toUserName itself is a FTN address
|
|
|
|
// we can only assume the message is to the +op, else we'll have to fail.
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
const toUserNameAsAddress = Address.fromString(message.toUserName);
|
2018-08-12 02:45:50 +00:00
|
|
|
if(toUserNameAsAddress && toUserNameAsAddress.isValid()) {
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
Log.info(
|
|
|
|
{ toUserName : message.toUserName, fromUserName : message.fromUserName },
|
|
|
|
'No local "to" username for FTN message. Appears to be a FTN address only; assuming addressed to SysOp'
|
|
|
|
);
|
|
|
|
|
|
|
|
User.getUserName(User.RootUserID, (err, sysOpUserName) => {
|
|
|
|
if(err) {
|
|
|
|
return callback(Errors.UnexpectedState('Failed to get SysOp user information'));
|
|
|
|
}
|
|
|
|
|
|
|
|
message.meta.System[Message.SystemMetaNames.LocalToUserID] = User.RootUserID;
|
|
|
|
message.toUserName = sysOpUserName;
|
|
|
|
return callback(null);
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
return callback(Errors.DoesNotExist(`Could not get local user ID for "${message.toUserName}": ${err.message}`));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-28 04:21:00 +00:00
|
|
|
// we do this after such that error cases can be preserved above
|
2018-06-22 05:15:04 +00:00
|
|
|
if(lookupName !== message.toUserName) {
|
|
|
|
message.toUserName = localUserName;
|
|
|
|
}
|
|
|
|
|
2018-11-28 04:21:00 +00:00
|
|
|
// set the meta information - used elsewhere for retrieval
|
2018-06-22 05:15:04 +00:00
|
|
|
message.meta.System[Message.SystemMetaNames.LocalToUserID] = localToUserId;
|
|
|
|
return callback(null);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function persistImport(callback) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// mark as imported
|
2018-06-22 05:15:04 +00:00
|
|
|
message.meta.System.state_flags0 = Message.StateFlags0.Imported.toString();
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// save to disc
|
2018-06-22 05:15:04 +00:00
|
|
|
message.persist(err => {
|
2018-11-28 04:21:00 +00:00
|
|
|
if(!message.isPrivate()) {
|
|
|
|
StatLog.incrementNonPersistentSystemStat(SysProps.MessageTotalCount, 1);
|
|
|
|
StatLog.incrementNonPersistentSystemStat(SysProps.MessagesToday, 1);
|
|
|
|
}
|
2018-06-22 05:15:04 +00:00
|
|
|
return callback(err);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
this.appendTearAndOrigin = function(message) {
|
|
|
|
if(message.meta.FtnProperty.ftn_tear_line) {
|
|
|
|
message.message += `\r\n${message.meta.FtnProperty.ftn_tear_line}\r\n`;
|
|
|
|
}
|
|
|
|
|
|
|
|
if(message.meta.FtnProperty.ftn_origin) {
|
|
|
|
message.message += `${message.meta.FtnProperty.ftn_origin}\r\n`;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Ref. implementations on import:
|
|
|
|
// * https://github.com/larsks/crashmail/blob/26e5374710c7868dab3d834be14bf4041041aae5/crashmail/pkt.c
|
|
|
|
// https://github.com/larsks/crashmail/blob/26e5374710c7868dab3d834be14bf4041041aae5/crashmail/handle.c
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
this.importMessagesFromPacketFile = function(packetPath, password, cb) {
|
|
|
|
let packetHeader;
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
const packetOpts = { keepTearAndOrigin : false }; // needed so we can calc message UUID without these; we'll add later
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
let importStats = {
|
2018-06-23 03:26:46 +00:00
|
|
|
areaSuccess : {}, // areaTag->count
|
|
|
|
areaFail : {}, // areaTag->count
|
|
|
|
otherFail : 0,
|
2018-06-22 05:15:04 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
new ftnMailPacket.Packet(packetOpts).read(packetPath, (entryType, entryData, next) => {
|
|
|
|
if('header' === entryType) {
|
|
|
|
packetHeader = entryData;
|
|
|
|
|
|
|
|
const localNetworkName = self.getNetworkNameByAddress(packetHeader.destAddress);
|
|
|
|
if(!_.isString(localNetworkName)) {
|
|
|
|
const addrString = new Address(packetHeader.destAddress).toString();
|
|
|
|
return next(new Error(`No local configuration for packet addressed to ${addrString}`));
|
|
|
|
} else {
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: password needs validated - need to determine if it will use the same node config (which can have wildcards) or something else?!
|
2018-06-22 05:15:04 +00:00
|
|
|
return next(null);
|
|
|
|
}
|
|
|
|
|
|
|
|
} else if('message' === entryType) {
|
|
|
|
const message = entryData;
|
|
|
|
const areaTag = message.meta.FtnProperty.ftn_area;
|
|
|
|
|
|
|
|
let localAreaTag;
|
|
|
|
if(areaTag) {
|
|
|
|
localAreaTag = self.getLocalAreaTagByFtnAreaTag(areaTag);
|
|
|
|
|
|
|
|
if(!localAreaTag) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// No local area configured for this import
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: Handle the "catch all" area bucket case if configured
|
2018-06-22 05:15:04 +00:00
|
|
|
Log.warn( { areaTag : areaTag }, 'No local area configured for this packet file!');
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// bump generic failure
|
2018-06-22 05:15:04 +00:00
|
|
|
importStats.otherFail += 1;
|
|
|
|
|
|
|
|
return next(null);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// No area tag: If marked private in attributes, this is a NetMail
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
if(message.meta.FtnProperty.ftn_attr_flags & ftnMailPacket.Packet.Attribute.Private) {
|
|
|
|
localAreaTag = Message.WellKnownAreaTags.Private;
|
|
|
|
} else {
|
|
|
|
Log.warn('Non-private message without area tag');
|
|
|
|
importStats.otherFail += 1;
|
|
|
|
return next(null);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
message.uuid = Message.createMessageUUID(
|
|
|
|
localAreaTag,
|
|
|
|
message.modTimestamp,
|
|
|
|
message.subject,
|
|
|
|
message.message);
|
|
|
|
|
|
|
|
self.appendTearAndOrigin(message);
|
|
|
|
|
|
|
|
const importConfig = {
|
2018-06-23 03:26:46 +00:00
|
|
|
localAreaTag : localAreaTag,
|
2018-06-22 05:15:04 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
self.importMailToArea(importConfig, packetHeader, message, err => {
|
|
|
|
if(err) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// bump area fail stats
|
2018-06-22 05:15:04 +00:00
|
|
|
importStats.areaFail[localAreaTag] = (importStats.areaFail[localAreaTag] || 0) + 1;
|
|
|
|
|
|
|
|
if('SQLITE_CONSTRAINT' === err.code || 'DUPE_MSGID' === err.code) {
|
|
|
|
const msgId = _.has(message.meta, 'FtnKludge.MSGID') ? message.meta.FtnKludge.MSGID : 'N/A';
|
|
|
|
Log.info(
|
|
|
|
{ area : localAreaTag, subject : message.subject, uuid : message.uuid, MSGID : msgId },
|
|
|
|
'Not importing non-unique message');
|
|
|
|
|
|
|
|
return next(null);
|
|
|
|
}
|
|
|
|
} else {
|
2018-06-23 03:26:46 +00:00
|
|
|
// bump area success
|
2018-06-22 05:15:04 +00:00
|
|
|
importStats.areaSuccess[localAreaTag] = (importStats.areaSuccess[localAreaTag] || 0) + 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
return next(err);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}, err => {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// try to produce something helpful in the log
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
const finalStats = Object.assign(importStats, { packetPath : packetPath } );
|
|
|
|
if(err || Object.keys(finalStats.areaFail).length > 0) {
|
|
|
|
if(err) {
|
|
|
|
Object.assign(finalStats, { error : err.message } );
|
|
|
|
}
|
|
|
|
|
|
|
|
Log.warn(finalStats, 'Import completed with error(s)');
|
|
|
|
} else {
|
|
|
|
Log.info(finalStats, 'Import complete');
|
|
|
|
}
|
|
|
|
|
|
|
|
cb(err);
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.maybeArchiveImportFile = function(origPath, type, status, cb) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// type : pkt|tic|bundle
|
|
|
|
// status : good|reject
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Status of "good" is only applied to pkt files & placed
|
|
|
|
// in |retain| if set. This is generally used for debugging only.
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
let archivePath;
|
|
|
|
const ts = moment().format('YYYY-MM-DDTHH.mm.ss.SSS');
|
|
|
|
const fn = paths.basename(origPath);
|
|
|
|
|
|
|
|
if('good' === status && type === 'pkt') {
|
|
|
|
if(!_.isString(self.moduleConfig.paths.retain)) {
|
|
|
|
return cb(null);
|
|
|
|
}
|
|
|
|
|
|
|
|
archivePath = paths.join(self.moduleConfig.paths.retain, `good-pkt-${ts}--${fn}`);
|
|
|
|
} else if('good' !== status) {
|
|
|
|
archivePath = paths.join(self.moduleConfig.paths.reject, `${status}-${type}--${ts}-${fn}`);
|
|
|
|
} else {
|
2018-06-23 03:26:46 +00:00
|
|
|
return cb(null); // don't archive non-good/pkt files
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Log.debug( { origPath : origPath, archivePath : archivePath, type : type, status : status }, 'Archiving import file');
|
|
|
|
|
|
|
|
fse.copy(origPath, archivePath, err => {
|
|
|
|
if(err) {
|
|
|
|
Log.warn( { error : err.message, origPath : origPath, archivePath : archivePath, type : type, status : status }, 'Failed to archive packet file');
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
return cb(null); // never fatal
|
2018-06-22 05:15:04 +00:00
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.importPacketFilesFromDirectory = function(importDir, password, cb) {
|
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
function getPacketFiles(callback) {
|
|
|
|
fs.readdir(importDir, (err, files) => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
callback(null, files.filter(f => '.pkt' === paths.extname(f).toLowerCase()));
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function importPacketFiles(packetFiles, callback) {
|
|
|
|
let rejects = [];
|
|
|
|
async.eachSeries(packetFiles, (packetFile, nextFile) => {
|
|
|
|
self.importMessagesFromPacketFile(paths.join(importDir, packetFile), '', err => {
|
|
|
|
if(err) {
|
|
|
|
Log.debug(
|
|
|
|
{ path : paths.join(importDir, packetFile), error : err.toString() },
|
|
|
|
'Failed to import packet file');
|
|
|
|
|
|
|
|
rejects.push(packetFile);
|
|
|
|
}
|
|
|
|
nextFile();
|
|
|
|
});
|
|
|
|
}, err => {
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: Handle err! we should try to keep going though...
|
2018-06-22 05:15:04 +00:00
|
|
|
callback(err, packetFiles, rejects);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function handleProcessedFiles(packetFiles, rejects, callback) {
|
|
|
|
async.each(packetFiles, (packetFile, nextFile) => {
|
2018-06-23 03:26:46 +00:00
|
|
|
// possibly archive, then remove original
|
2018-06-22 05:15:04 +00:00
|
|
|
const fullPath = paths.join(importDir, packetFile);
|
|
|
|
self.maybeArchiveImportFile(
|
|
|
|
fullPath,
|
|
|
|
'pkt',
|
|
|
|
rejects.includes(packetFile) ? 'reject' : 'good',
|
|
|
|
() => {
|
|
|
|
fs.unlink(fullPath, () => {
|
|
|
|
return nextFile(null);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}, err => {
|
|
|
|
callback(err);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
this.importFromDirectory = function(inboundType, importDir, cb) {
|
|
|
|
async.waterfall(
|
|
|
|
[
|
2018-06-23 03:26:46 +00:00
|
|
|
// start with .pkt files
|
2018-06-22 05:15:04 +00:00
|
|
|
function importPacketFiles(callback) {
|
|
|
|
self.importPacketFilesFromDirectory(importDir, '', err => {
|
|
|
|
callback(err);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function discoverBundles(callback) {
|
|
|
|
fs.readdir(importDir, (err, files) => {
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: if we do much more of this, probably just use the glob module
|
2018-06-22 05:15:04 +00:00
|
|
|
const bundleRegExp = /\.(su|mo|tu|we|th|fr|sa)[0-9a-z]/i;
|
|
|
|
files = files.filter(f => {
|
|
|
|
const fext = paths.extname(f);
|
|
|
|
return bundleRegExp.test(fext);
|
|
|
|
});
|
|
|
|
|
|
|
|
async.map(files, (file, transform) => {
|
|
|
|
const fullPath = paths.join(importDir, file);
|
|
|
|
self.archUtil.detectType(fullPath, (err, archName) => {
|
|
|
|
transform(null, { path : fullPath, archName : archName } );
|
|
|
|
});
|
|
|
|
}, (err, bundleFiles) => {
|
|
|
|
callback(err, bundleFiles);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function importBundles(bundleFiles, callback) {
|
|
|
|
let rejects = [];
|
|
|
|
|
|
|
|
async.each(bundleFiles, (bundleFile, nextFile) => {
|
|
|
|
if(_.isUndefined(bundleFile.archName)) {
|
|
|
|
Log.warn(
|
|
|
|
{ fileName : bundleFile.path },
|
|
|
|
'Unknown bundle archive type');
|
|
|
|
|
|
|
|
rejects.push(bundleFile.path);
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
return nextFile(); // unknown archive type
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Log.debug( { bundleFile : bundleFile }, 'Processing bundle' );
|
|
|
|
|
|
|
|
self.archUtil.extractTo(
|
|
|
|
bundleFile.path,
|
|
|
|
self.importTempDir,
|
|
|
|
bundleFile.archName,
|
|
|
|
err => {
|
|
|
|
if(err) {
|
|
|
|
Log.warn(
|
|
|
|
{ path : bundleFile.path, error : err.message },
|
|
|
|
'Failed to extract bundle');
|
|
|
|
|
|
|
|
rejects.push(bundleFile.path);
|
|
|
|
}
|
|
|
|
|
|
|
|
nextFile();
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}, err => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// All extracted - import .pkt's
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-22 05:35:52 +00:00
|
|
|
self.importPacketFilesFromDirectory(self.importTempDir, '', () => {
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: handle |err|
|
2018-06-22 05:15:04 +00:00
|
|
|
callback(null, bundleFiles, rejects);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function handleProcessedBundleFiles(bundleFiles, rejects, callback) {
|
|
|
|
async.each(bundleFiles, (bundleFile, nextFile) => {
|
|
|
|
self.maybeArchiveImportFile(
|
|
|
|
bundleFile.path,
|
|
|
|
'bundle',
|
|
|
|
rejects.includes(bundleFile.path) ? 'reject' : 'good',
|
|
|
|
() => {
|
|
|
|
fs.unlink(bundleFile.path, err => {
|
|
|
|
if(err) {
|
|
|
|
Log.error( { path : bundleFile.path, error : err.message }, 'Failed unlinking bundle');
|
|
|
|
}
|
|
|
|
return nextFile(null);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}, err => {
|
|
|
|
callback(err);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function importTicFiles(callback) {
|
|
|
|
self.processTicFilesInDirectory(importDir, err => {
|
|
|
|
return callback(err);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
this.createTempDirectories = function(cb) {
|
|
|
|
temptmp.mkdir( { prefix : 'enigftnexport-' }, (err, tempDir) => {
|
|
|
|
if(err) {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
self.exportTempDir = tempDir;
|
|
|
|
|
|
|
|
temptmp.mkdir( { prefix : 'enigftnimport-' }, (err, tempDir) => {
|
|
|
|
self.importTempDir = tempDir;
|
|
|
|
|
|
|
|
cb(err);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// Starts an export block - returns true if we can proceed
|
2018-06-22 05:15:04 +00:00
|
|
|
this.exportingStart = function() {
|
|
|
|
if(!this.exportRunning) {
|
|
|
|
this.exportRunning = true;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
};
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// ends an export block
|
2018-06-22 05:15:04 +00:00
|
|
|
this.exportingEnd = function(cb) {
|
|
|
|
this.exportRunning = false;
|
|
|
|
|
|
|
|
if(cb) {
|
|
|
|
return cb(null);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
this.copyTicAttachment = function(src, dst, isUpdate, cb) {
|
|
|
|
if(isUpdate) {
|
|
|
|
fse.copy(src, dst, { overwrite : true }, err => {
|
|
|
|
return cb(err, dst);
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
copyFileWithCollisionHandling(src, dst, (err, finalPath) => {
|
|
|
|
return cb(err, finalPath);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getLocalAreaTagsForTic = function() {
|
|
|
|
const config = Config();
|
|
|
|
return _.union(Object.keys(config.scannerTossers.ftn_bso.ticAreas || {} ), Object.keys(config.fileBase.areas));
|
|
|
|
};
|
|
|
|
|
|
|
|
this.processSingleTicFile = function(ticFileInfo, cb) {
|
|
|
|
Log.debug( { tic : ticFileInfo.path, file : ticFileInfo.getAsString('File') }, 'Processing TIC file');
|
|
|
|
|
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
function generalValidation(callback) {
|
|
|
|
const sysConfig = Config();
|
|
|
|
const config = {
|
2018-06-23 03:26:46 +00:00
|
|
|
nodes : sysConfig.scannerTossers.ftn_bso.nodes,
|
|
|
|
defaultPassword : sysConfig.scannerTossers.ftn_bso.tic.password,
|
|
|
|
localAreaTags : self.getLocalAreaTagsForTic(),
|
2018-06-22 05:15:04 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
return ticFileInfo.validate(config, (err, localInfo) => {
|
|
|
|
if(err) {
|
|
|
|
Log.trace( { reason : err.message }, 'Validation failure');
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// We may need to map |localAreaTag| back to real areaTag if it's a mapping/alias
|
2018-06-22 05:15:04 +00:00
|
|
|
const mappedLocalAreaTag = _.get(Config().scannerTossers.ftn_bso, [ 'ticAreas', localInfo.areaTag ]);
|
|
|
|
|
|
|
|
if(mappedLocalAreaTag) {
|
|
|
|
if(_.isString(mappedLocalAreaTag.areaTag)) {
|
2018-06-23 03:26:46 +00:00
|
|
|
localInfo.areaTag = mappedLocalAreaTag.areaTag;
|
|
|
|
localInfo.hashTags = mappedLocalAreaTag.hashTags; // override default for node
|
|
|
|
localInfo.storageTag = mappedLocalAreaTag.storageTag; // override default
|
2018-06-22 05:15:04 +00:00
|
|
|
} else if(_.isString(mappedLocalAreaTag)) {
|
|
|
|
localInfo.areaTag = mappedLocalAreaTag;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null, localInfo);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function findExistingItem(localInfo, callback) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// We will need to look for an existing item to replace/update if:
|
|
|
|
// a) The TIC file has a "Replaces" field
|
|
|
|
// b) The general or node specific |allowReplace| is true
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Replace specifies a DOS 8.3 *pattern* which is allowed to have
|
|
|
|
// ? and * characters. For example, RETRONET.*
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Lastly, we will only replace if the item is in the same/specified area
|
|
|
|
// and that come from the same origin as a previous entry.
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
const allowReplace = _.get(Config().scannerTossers.ftn_bso.nodes, [ localInfo.node, 'tic', 'allowReplace' ], Config().scannerTossers.ftn_bso.tic.allowReplace);
|
|
|
|
const replaces = ticFileInfo.getAsString('Replaces');
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
if(!allowReplace || !replaces) {
|
|
|
|
return callback(null, localInfo);
|
|
|
|
}
|
|
|
|
|
|
|
|
const metaPairs = [
|
|
|
|
{
|
2018-06-23 03:26:46 +00:00
|
|
|
name : 'short_file_name',
|
|
|
|
value : replaces.toUpperCase(), // we store upper as well
|
|
|
|
wildcards : true, // value may contain wildcards
|
2018-06-22 05:15:04 +00:00
|
|
|
},
|
|
|
|
{
|
2018-06-23 03:26:46 +00:00
|
|
|
name : 'tic_origin',
|
|
|
|
value : ticFileInfo.getAsString('Origin'),
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
];
|
|
|
|
|
|
|
|
FileEntry.findFiles( { metaPairs : metaPairs, areaTag : localInfo.areaTag }, (err, fileIds) => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// 0:1 allowed
|
2018-06-22 05:15:04 +00:00
|
|
|
if(1 === fileIds.length) {
|
|
|
|
localInfo.existingFileId = fileIds[0];
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// fetch old filename - we may need to remove it if replacing with a new name
|
2018-06-22 05:15:04 +00:00
|
|
|
FileEntry.loadBasicEntry(localInfo.existingFileId, {}, (err, info) => {
|
|
|
|
if(info) {
|
|
|
|
Log.trace(
|
|
|
|
{ fileId : localInfo.existingFileId, oldFileName : info.fileName, oldStorageTag : info.storageTag },
|
|
|
|
'Existing TIC file target to be replaced'
|
|
|
|
);
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
localInfo.oldFileName = info.fileName;
|
|
|
|
localInfo.oldStorageTag = info.storageTag;
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
2018-06-23 03:26:46 +00:00
|
|
|
return callback(null, localInfo); // continue even if we couldn't find an old match
|
2018-06-22 05:15:04 +00:00
|
|
|
});
|
2018-11-22 17:10:53 +00:00
|
|
|
} else if(fileIds.length > 1) {
|
2018-06-22 05:15:04 +00:00
|
|
|
return callback(Errors.General(`More than one existing entry for TIC in ${localInfo.areaTag} ([${fileIds.join(', ')}])`));
|
|
|
|
} else {
|
|
|
|
return callback(null, localInfo);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function scan(localInfo, callback) {
|
|
|
|
const scanOpts = {
|
2018-06-23 03:26:46 +00:00
|
|
|
sha256 : localInfo.sha256, // *may* have already been calculated
|
|
|
|
meta : {
|
|
|
|
// some TIC-related metadata we always want
|
|
|
|
short_file_name : ticFileInfo.getAsString('File').toUpperCase(), // upper to ensure no case issues later; this should be a DOS 8.3 name
|
|
|
|
tic_origin : ticFileInfo.getAsString('Origin'),
|
|
|
|
tic_desc : ticFileInfo.getAsString('Desc'),
|
|
|
|
upload_by_username : _.get(Config().scannerTossers.ftn_bso.nodes, [ localInfo.node, 'tic', 'uploadBy' ], Config().scannerTossers.ftn_bso.tic.uploadBy),
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const ldesc = ticFileInfo.getAsString('Ldesc', '\n');
|
|
|
|
if(ldesc) {
|
|
|
|
scanOpts.meta.tic_ldesc = ldesc;
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// We may have TIC auto-tagging for this node and/or specific (remote) area
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
const hashTags =
|
2018-06-23 03:26:46 +00:00
|
|
|
localInfo.hashTags ||
|
|
|
|
_.get(Config().scannerTossers.ftn_bso.nodes, [ localInfo.node, 'tic', 'hashTags' ] ); // catch-all*/
|
2017-03-09 05:37:02 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
if(hashTags) {
|
|
|
|
scanOpts.hashTags = new Set(hashTags.split(/[\s,]+/));
|
|
|
|
}
|
|
|
|
|
|
|
|
if(localInfo.crc32) {
|
2018-06-23 03:26:46 +00:00
|
|
|
scanOpts.meta.file_crc32 = localInfo.crc32.toString(16); // again, *may* have already been calculated
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
scanFile(
|
|
|
|
ticFileInfo.filePath,
|
|
|
|
scanOpts,
|
|
|
|
(err, fileEntry) => {
|
|
|
|
if(err) {
|
|
|
|
Log.trace( { reason : err.message }, 'Scanning failed');
|
|
|
|
}
|
|
|
|
|
|
|
|
localInfo.fileEntry = fileEntry;
|
|
|
|
return callback(err, localInfo);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
},
|
|
|
|
function store(localInfo, callback) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Move file to final area storage and persist to DB
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
const areaInfo = getFileAreaByTag(localInfo.areaTag);
|
|
|
|
if(!areaInfo) {
|
|
|
|
return callback(Errors.UnexpectedState(`Could not get area for tag ${localInfo.areaTag}`));
|
|
|
|
}
|
|
|
|
|
|
|
|
const storageTag = localInfo.storageTag || areaInfo.storageTags[0];
|
|
|
|
if(!isValidStorageTag(storageTag)) {
|
|
|
|
return callback(Errors.Invalid(`Invalid storage tag: ${storageTag}`));
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
localInfo.fileEntry.storageTag = storageTag;
|
|
|
|
localInfo.fileEntry.areaTag = localInfo.areaTag;
|
|
|
|
localInfo.fileEntry.fileName = ticFileInfo.longFileName;
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// We may now have two descriptions: from .DIZ/etc. or the TIC itself.
|
|
|
|
// Determine which one to use using |descPriority| and availability.
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// We will still fallback as needed from <priority1> -> <priority2> -> <fromFileName>
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
const descPriority = _.get(
|
|
|
|
Config().scannerTossers.ftn_bso.nodes, [ localInfo.node, 'tic', 'descPriority' ],
|
|
|
|
Config().scannerTossers.ftn_bso.tic.descPriority
|
|
|
|
);
|
|
|
|
|
|
|
|
if('tic' === descPriority) {
|
|
|
|
const origDesc = localInfo.fileEntry.desc;
|
|
|
|
localInfo.fileEntry.desc = ticFileInfo.getAsString('Ldesc') || origDesc || getDescFromFileName(ticFileInfo.filePath);
|
|
|
|
} else {
|
2018-06-23 03:26:46 +00:00
|
|
|
// see if we got desc from .DIZ/etc.
|
2018-06-22 05:15:04 +00:00
|
|
|
const fromDescFile = 'descFile' === localInfo.fileEntry.descSrc;
|
|
|
|
localInfo.fileEntry.desc = fromDescFile ? localInfo.fileEntry.desc : ticFileInfo.getAsString('Ldesc');
|
|
|
|
localInfo.fileEntry.desc = localInfo.fileEntry.desc || getDescFromFileName(ticFileInfo.filePath);
|
|
|
|
}
|
|
|
|
|
|
|
|
const areaStorageDir = getAreaStorageDirectoryByTag(storageTag);
|
|
|
|
if(!areaStorageDir) {
|
|
|
|
return callback(Errors.UnexpectedState(`Could not get storage directory for tag ${localInfo.areaTag}`));
|
|
|
|
}
|
|
|
|
|
|
|
|
const isUpdate = localInfo.existingFileId ? true : false;
|
|
|
|
|
|
|
|
if(isUpdate) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// we need to *update* an existing record/file
|
2018-06-22 05:15:04 +00:00
|
|
|
localInfo.fileEntry.fileId = localInfo.existingFileId;
|
|
|
|
}
|
|
|
|
|
|
|
|
const dst = paths.join(areaStorageDir, localInfo.fileEntry.fileName);
|
|
|
|
|
|
|
|
self.copyTicAttachment(ticFileInfo.filePath, dst, isUpdate, (err, finalPath) => {
|
|
|
|
if(err) {
|
|
|
|
Log.info( { reason : err.message }, 'Failed to copy TIC attachment');
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
if(dst !== finalPath) {
|
|
|
|
localInfo.fileEntry.fileName = paths.basename(finalPath);
|
|
|
|
}
|
|
|
|
|
|
|
|
localInfo.fileEntry.persist(isUpdate, err => {
|
|
|
|
return callback(err, localInfo);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
},
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: from here, we need to re-toss files if needed, before they are removed
|
2018-06-22 05:15:04 +00:00
|
|
|
function cleanupOldFile(localInfo, callback) {
|
|
|
|
if(!localInfo.existingFileId) {
|
|
|
|
return callback(null, localInfo);
|
|
|
|
}
|
|
|
|
|
|
|
|
const oldStorageDir = getAreaStorageDirectoryByTag(localInfo.oldStorageTag);
|
2018-06-23 03:26:46 +00:00
|
|
|
const oldPath = paths.join(oldStorageDir, localInfo.oldFileName);
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
fs.unlink(oldPath, err => {
|
|
|
|
if(err) {
|
|
|
|
Log.warn( { error : err.message, oldPath : oldPath }, 'Failed removing old physical file during TIC replacement');
|
|
|
|
} else {
|
2018-08-12 02:45:50 +00:00
|
|
|
Log.trace( { oldPath : oldPath }, 'Removed old physical file during TIC replacement');
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
2018-06-23 03:26:46 +00:00
|
|
|
return callback(null, localInfo); // continue even if err
|
2018-06-22 05:15:04 +00:00
|
|
|
});
|
|
|
|
},
|
|
|
|
],
|
|
|
|
(err, localInfo) => {
|
|
|
|
if(err) {
|
|
|
|
Log.error( { error : err.message, reason : err.reason, tic : ticFileInfo.filePath }, 'Failed import/update TIC record' );
|
|
|
|
} else {
|
2018-08-12 02:45:50 +00:00
|
|
|
Log.info(
|
2018-06-22 05:15:04 +00:00
|
|
|
{ tic : ticFileInfo.path, file : ticFileInfo.filePath, area : localInfo.areaTag },
|
|
|
|
'TIC imported successfully'
|
|
|
|
);
|
|
|
|
}
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
this.removeAssocTicFiles = function(ticFileInfo, cb) {
|
|
|
|
async.each( [ ticFileInfo.path, ticFileInfo.filePath ], (path, nextPath) => {
|
|
|
|
fs.unlink(path, err => {
|
2018-06-23 03:26:46 +00:00
|
|
|
if(err && 'ENOENT' !== err.code) { // don't log when the file doesn't exist
|
2018-06-22 05:15:04 +00:00
|
|
|
Log.warn( { error : err.message, path : path }, 'Failed unlinking TIC file');
|
|
|
|
}
|
|
|
|
return nextPath(null);
|
|
|
|
});
|
|
|
|
}, err => {
|
|
|
|
return cb(err);
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
this.performEchoMailExport = function(cb) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Select all messages with a |message_id| > |lastScanId|.
|
|
|
|
// Additionally exclude messages with the System state_flags0 which will be present for
|
|
|
|
// imported or already exported messages
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// NOTE: If StateFlags0 starts to use additional bits, we'll likely need to check them here!
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
const getNewUuidsSql =
|
2018-06-23 03:26:46 +00:00
|
|
|
`SELECT message_id, message_uuid
|
|
|
|
FROM message m
|
|
|
|
WHERE area_tag = ? AND message_id > ? AND
|
|
|
|
(SELECT COUNT(message_id)
|
|
|
|
FROM message_meta
|
|
|
|
WHERE message_id = m.message_id AND meta_category = 'System' AND meta_name = 'state_flags0') = 0
|
|
|
|
ORDER BY message_id;`
|
|
|
|
;
|
|
|
|
|
|
|
|
// we shouldn't, but be sure we don't try to pick up private mail here
|
2018-06-22 05:15:04 +00:00
|
|
|
const config = Config();
|
|
|
|
const areaTags = Object.keys(config.messageNetworks.ftn.areas)
|
|
|
|
.filter(areaTag => Message.WellKnownAreaTags.Private !== areaTag);
|
|
|
|
|
|
|
|
async.each(areaTags, (areaTag, nextArea) => {
|
|
|
|
const areaConfig = config.messageNetworks.ftn.areas[areaTag];
|
|
|
|
if(!this.isAreaConfigValid(areaConfig)) {
|
|
|
|
return nextArea();
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// For each message that is newer than that of the last scan
|
|
|
|
// we need to export to each configured associated uplink(s)
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
function getLastScanId(callback) {
|
|
|
|
self.getAreaLastScanId(areaTag, callback);
|
|
|
|
},
|
|
|
|
function getNewUuids(lastScanId, callback) {
|
|
|
|
msgDb.all(getNewUuidsSql, [ areaTag, lastScanId ], (err, rows) => {
|
|
|
|
if(err) {
|
|
|
|
callback(err);
|
|
|
|
} else {
|
|
|
|
if(0 === rows.length) {
|
|
|
|
let nothingToDoErr = new Error('Nothing to do!');
|
|
|
|
nothingToDoErr.noRows = true;
|
|
|
|
callback(nothingToDoErr);
|
|
|
|
} else {
|
|
|
|
callback(null, rows);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function exportToConfiguredUplinks(msgRows, callback) {
|
2018-06-23 03:26:46 +00:00
|
|
|
const uuidsOnly = msgRows.map(r => r.message_uuid); // convert to array of UUIDs only
|
2018-06-22 05:15:04 +00:00
|
|
|
self.exportEchoMailMessagesToUplinks(uuidsOnly, areaConfig, err => {
|
|
|
|
const newLastScanId = msgRows[msgRows.length - 1].message_id;
|
|
|
|
|
|
|
|
Log.info(
|
|
|
|
{ areaTag : areaTag, messagesExported : msgRows.length, newLastScanId : newLastScanId },
|
|
|
|
'Export complete');
|
|
|
|
|
|
|
|
callback(err, newLastScanId);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function updateLastScanId(newLastScanId, callback) {
|
|
|
|
self.setAreaLastScanId(areaTag, newLastScanId, callback);
|
|
|
|
}
|
|
|
|
],
|
|
|
|
() => {
|
|
|
|
return nextArea();
|
|
|
|
}
|
|
|
|
);
|
|
|
|
},
|
|
|
|
err => {
|
|
|
|
return cb(err);
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.performNetMailExport = function(cb) {
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Select all messages with a |message_id| > |lastScanId| in the private area
|
|
|
|
// that are schedule for export to FTN-style networks.
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Just like EchoMail, we additionally exclude messages with the System state_flags0
|
|
|
|
// which will be present for imported or already exported messages
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: fill out the rest of the consts here
|
|
|
|
// :TODO: this statement is crazy ugly -- use JOIN / NOT EXISTS for state_flags & 0x02
|
2018-06-22 05:15:04 +00:00
|
|
|
const getNewUuidsSql =
|
2018-06-23 03:26:46 +00:00
|
|
|
`SELECT message_id, message_uuid
|
|
|
|
FROM message m
|
|
|
|
WHERE area_tag = '${Message.WellKnownAreaTags.Private}' AND message_id > ? AND
|
|
|
|
(SELECT COUNT(message_id)
|
|
|
|
FROM message_meta
|
|
|
|
WHERE message_id = m.message_id
|
|
|
|
AND meta_category = 'System'
|
|
|
|
AND (meta_name = 'state_flags0' OR meta_name = 'local_to_user_id')
|
|
|
|
) = 0
|
|
|
|
AND
|
|
|
|
(SELECT COUNT(message_id)
|
|
|
|
FROM message_meta
|
|
|
|
WHERE message_id = m.message_id
|
|
|
|
AND meta_category = 'System'
|
|
|
|
AND meta_name = '${Message.SystemMetaNames.ExternalFlavor}'
|
|
|
|
AND meta_value = '${Message.AddressFlavor.FTN}'
|
|
|
|
) = 1
|
|
|
|
ORDER BY message_id;
|
|
|
|
`;
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
function getLastScanId(callback) {
|
|
|
|
return self.getAreaLastScanId(Message.WellKnownAreaTags.Private, callback);
|
|
|
|
},
|
|
|
|
function getNewUuids(lastScanId, callback) {
|
|
|
|
msgDb.all(getNewUuidsSql, [ lastScanId ], (err, rows) => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
if(0 === rows.length) {
|
2018-06-23 03:26:46 +00:00
|
|
|
return cb(null); // note |cb| -- early bail out!
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null, rows);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function exportMessages(rows, callback) {
|
|
|
|
const messageUuids = rows.map(r => r.message_uuid);
|
|
|
|
return self.exportNetMailMessagesToUplinks(messageUuids, callback);
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
this.isNetMailMessage = function(message) {
|
|
|
|
return message.isPrivate() &&
|
2018-06-23 03:26:46 +00:00
|
|
|
null === _.get(message, 'meta.System.LocalToUserID', null) &&
|
|
|
|
Message.AddressFlavor.FTN === _.get(message, 'meta.System.external_flavor', null);
|
2018-06-22 05:15:04 +00:00
|
|
|
};
|
2016-02-10 05:30:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
require('util').inherits(FTNMessageScanTossModule, MessageScanTossModule);
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: *scheduled* portion of this stuff should probably use event_scheduler - @immediate would still use record().
|
2016-06-20 03:09:45 +00:00
|
|
|
|
2017-03-09 05:37:02 +00:00
|
|
|
FTNMessageScanTossModule.prototype.processTicFilesInDirectory = function(importDir, cb) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: pass in 'inbound' vs 'secInbound' -- pass along to processSingleTicFile() where password will be checked
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
const self = this;
|
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
function findTicFiles(callback) {
|
|
|
|
fs.readdir(importDir, (err, files) => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null, files.filter(f => '.tic' === paths.extname(f).toLowerCase()));
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function gatherInfo(ticFiles, callback) {
|
|
|
|
const ticFilesInfo = [];
|
|
|
|
|
|
|
|
async.each(ticFiles, (fileName, nextFile) => {
|
|
|
|
const fullPath = paths.join(importDir, fileName);
|
|
|
|
|
|
|
|
TicFileInfo.createFromFile(fullPath, (err, ticInfo) => {
|
|
|
|
if(err) {
|
|
|
|
Log.warn( { error : err.message, path : fullPath }, 'Failed reading TIC file');
|
|
|
|
} else {
|
|
|
|
ticFilesInfo.push(ticInfo);
|
|
|
|
}
|
|
|
|
|
|
|
|
return nextFile(null);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
err => {
|
|
|
|
return callback(err, ticFilesInfo);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function process(ticFilesInfo, callback) {
|
|
|
|
async.eachSeries(ticFilesInfo, (ticFileInfo, nextTicInfo) => {
|
|
|
|
self.processSingleTicFile(ticFileInfo, err => {
|
|
|
|
if(err) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// archive rejected TIC stuff (.TIC + attach)
|
2018-06-22 05:15:04 +00:00
|
|
|
async.each( [ ticFileInfo.path, ticFileInfo.filePath ], (path, nextPath) => {
|
2018-06-23 03:26:46 +00:00
|
|
|
if(!path) { // possibly rejected due to "File" not existing/etc.
|
2018-06-22 05:15:04 +00:00
|
|
|
return nextPath(null);
|
|
|
|
}
|
|
|
|
|
|
|
|
self.maybeArchiveImportFile(
|
|
|
|
path,
|
|
|
|
'tic',
|
|
|
|
'reject',
|
|
|
|
() => {
|
|
|
|
return nextPath(null);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
},
|
|
|
|
() => {
|
|
|
|
self.removeAssocTicFiles(ticFileInfo, () => {
|
|
|
|
return nextTicInfo(null);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
self.removeAssocTicFiles(ticFileInfo, () => {
|
|
|
|
return nextTicInfo(null);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}, err => {
|
|
|
|
return callback(err);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
);
|
2017-03-09 05:37:02 +00:00
|
|
|
};
|
|
|
|
|
2016-02-10 05:30:59 +00:00
|
|
|
FTNMessageScanTossModule.prototype.startup = function(cb) {
|
2018-06-22 05:15:04 +00:00
|
|
|
Log.info(`${exports.moduleInfo.name} Scanner/Tosser starting up`);
|
|
|
|
|
|
|
|
let importing = false;
|
|
|
|
|
|
|
|
let self = this;
|
|
|
|
|
|
|
|
function tryImportNow(reasonDesc, extraInfo) {
|
|
|
|
if(!importing) {
|
|
|
|
importing = true;
|
|
|
|
|
|
|
|
Log.info( Object.assign({ module : exports.moduleInfo.name }, extraInfo), reasonDesc);
|
|
|
|
|
|
|
|
self.performImport( () => {
|
|
|
|
importing = false;
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
this.createTempDirectories(err => {
|
|
|
|
if(err) {
|
|
|
|
Log.warn( { error : err.toStrong() }, 'Failed creating temporary directories!');
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
if(_.isObject(this.moduleConfig.schedule)) {
|
|
|
|
const exportSchedule = this.parseScheduleString(this.moduleConfig.schedule.export);
|
|
|
|
if(exportSchedule) {
|
|
|
|
Log.debug(
|
|
|
|
{
|
2018-06-23 03:26:46 +00:00
|
|
|
schedule : this.moduleConfig.schedule.export,
|
|
|
|
schedOK : -1 === exportSchedule.sched.error,
|
|
|
|
next : moment(later.schedule(exportSchedule.sched).next(1)).format('ddd, MMM Do, YYYY @ h:m:ss a'),
|
|
|
|
immediate : exportSchedule.immediate ? true : false,
|
2018-06-22 05:15:04 +00:00
|
|
|
},
|
|
|
|
'Export schedule loaded'
|
|
|
|
);
|
|
|
|
|
|
|
|
if(exportSchedule.sched) {
|
|
|
|
this.exportTimer = later.setInterval( () => {
|
|
|
|
if(this.exportingStart()) {
|
|
|
|
Log.info( { module : exports.moduleInfo.name }, 'Performing scheduled message scan/export...');
|
|
|
|
|
|
|
|
this.performExport( () => {
|
|
|
|
this.exportingEnd();
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}, exportSchedule.sched);
|
|
|
|
}
|
|
|
|
|
|
|
|
if(_.isBoolean(exportSchedule.immediate)) {
|
|
|
|
this.exportImmediate = exportSchedule.immediate;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
const importSchedule = this.parseScheduleString(this.moduleConfig.schedule.import);
|
|
|
|
if(importSchedule) {
|
|
|
|
Log.debug(
|
|
|
|
{
|
2018-06-23 03:26:46 +00:00
|
|
|
schedule : this.moduleConfig.schedule.import,
|
|
|
|
schedOK : -1 === importSchedule.sched.error,
|
|
|
|
next : moment(later.schedule(importSchedule.sched).next(1)).format('ddd, MMM Do, YYYY @ h:m:ss a'),
|
|
|
|
watchFile : _.isString(importSchedule.watchFile) ? importSchedule.watchFile : 'None',
|
2018-06-22 05:15:04 +00:00
|
|
|
},
|
|
|
|
'Import schedule loaded'
|
|
|
|
);
|
|
|
|
|
|
|
|
if(importSchedule.sched) {
|
|
|
|
this.importTimer = later.setInterval( () => {
|
|
|
|
tryImportNow('Performing scheduled message import/toss...');
|
|
|
|
}, importSchedule.sched);
|
|
|
|
}
|
|
|
|
|
|
|
|
if(_.isString(importSchedule.watchFile)) {
|
|
|
|
const watcher = sane(
|
|
|
|
paths.dirname(importSchedule.watchFile),
|
|
|
|
{
|
|
|
|
glob : `**/${paths.basename(importSchedule.watchFile)}`
|
|
|
|
}
|
|
|
|
);
|
|
|
|
|
|
|
|
[ 'change', 'add', 'delete' ].forEach(event => {
|
|
|
|
watcher.on(event, (fileName, fileRoot) => {
|
|
|
|
const eventPath = paths.join(fileRoot, fileName);
|
|
|
|
if(paths.join(fileRoot, fileName) === importSchedule.watchFile) {
|
|
|
|
tryImportNow('Performing import/toss due to @watch', { eventPath, event } );
|
|
|
|
}
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// If the watch file already exists, kick off now
|
|
|
|
// https://github.com/NuSkooler/enigma-bbs/issues/122
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
fse.exists(importSchedule.watchFile, exists => {
|
|
|
|
if(exists) {
|
|
|
|
tryImportNow('Performing import/toss due to @watch', { eventPath : importSchedule.watchFile, event : 'initial exists' } );
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
FTNMessageScanTossModule.super_.prototype.startup.call(this, cb);
|
|
|
|
});
|
2016-02-10 05:30:59 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
FTNMessageScanTossModule.prototype.shutdown = function(cb) {
|
2018-06-22 05:15:04 +00:00
|
|
|
Log.info('FidoNet Scanner/Tosser shutting down');
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
if(this.exportTimer) {
|
|
|
|
this.exportTimer.clear();
|
|
|
|
}
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
if(this.importTimer) {
|
|
|
|
this.importTimer.clear();
|
|
|
|
}
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// Clean up temp dir/files we created
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
temptmp.cleanup( paths => {
|
|
|
|
const fullStats = {
|
2018-06-23 03:26:46 +00:00
|
|
|
exportDir : this.exportTempDir,
|
|
|
|
importTemp : this.importTempDir,
|
|
|
|
paths : paths,
|
|
|
|
sessionId : temptmp.sessionId,
|
2018-06-22 05:15:04 +00:00
|
|
|
};
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
Log.trace(fullStats, 'Temporary directories cleaned up');
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
FTNMessageScanTossModule.super_.prototype.shutdown.call(this, cb);
|
|
|
|
});
|
2017-01-24 06:32:40 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
FTNMessageScanTossModule.super_.prototype.shutdown.call(this, cb);
|
2016-02-10 05:30:59 +00:00
|
|
|
};
|
|
|
|
|
2016-03-01 05:32:51 +00:00
|
|
|
FTNMessageScanTossModule.prototype.performImport = function(cb) {
|
2018-06-22 05:15:04 +00:00
|
|
|
if(!this.hasValidConfiguration()) {
|
|
|
|
return cb(new Error('Missing or invalid configuration'));
|
|
|
|
}
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
const self = this;
|
2018-01-01 00:54:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
async.each( [ 'inbound', 'secInbound' ], (inboundType, nextDir) => {
|
|
|
|
self.importFromDirectory(inboundType, self.moduleConfig.paths[inboundType], () => {
|
|
|
|
return nextDir(null);
|
|
|
|
});
|
|
|
|
}, cb);
|
2016-03-01 05:32:51 +00:00
|
|
|
};
|
|
|
|
|
2016-02-29 05:04:03 +00:00
|
|
|
FTNMessageScanTossModule.prototype.performExport = function(cb) {
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// We're only concerned with areas related to FTN. For each area, loop though
|
|
|
|
// and let's find out what messages need exported.
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
if(!this.hasValidConfiguration()) {
|
|
|
|
return cb(new Error('Missing or invalid configuration'));
|
|
|
|
}
|
|
|
|
|
|
|
|
const self = this;
|
|
|
|
|
|
|
|
async.eachSeries( [ 'EchoMail', 'NetMail' ], (type, nextType) => {
|
|
|
|
self[`perform${type}Export`]( err => {
|
|
|
|
if(err) {
|
|
|
|
Log.warn( { error : err.message, type : type }, 'Error(s) during export' );
|
|
|
|
}
|
2018-06-23 03:26:46 +00:00
|
|
|
return nextType(null); // try next, always
|
2018-06-22 05:15:04 +00:00
|
|
|
});
|
|
|
|
}, () => {
|
|
|
|
return cb(null);
|
|
|
|
});
|
2016-02-29 05:04:03 +00:00
|
|
|
};
|
|
|
|
|
2016-02-21 00:57:38 +00:00
|
|
|
FTNMessageScanTossModule.prototype.record = function(message) {
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
2018-06-23 03:26:46 +00:00
|
|
|
// This module works off schedules, but we do support @immediate for export
|
2018-06-22 05:15:04 +00:00
|
|
|
//
|
|
|
|
if(true !== this.exportImmediate || !this.hasValidConfiguration()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const info = { uuid : message.uuid, subject : message.subject };
|
|
|
|
|
|
|
|
function exportLog(err) {
|
|
|
|
if(err) {
|
|
|
|
Log.warn(info, 'Failed exporting message');
|
|
|
|
} else {
|
|
|
|
Log.info(info, 'Message exported');
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if(this.isNetMailMessage(message)) {
|
|
|
|
Object.assign(info, { type : 'NetMail' } );
|
|
|
|
|
|
|
|
if(this.exportingStart()) {
|
|
|
|
this.exportNetMailMessagesToUplinks( [ message.uuid ], err => {
|
|
|
|
this.exportingEnd( () => exportLog(err) );
|
|
|
|
});
|
|
|
|
}
|
|
|
|
} else if(message.areaTag) {
|
|
|
|
Object.assign(info, { type : 'EchoMail' } );
|
|
|
|
|
|
|
|
const areaConfig = Config().messageNetworks.ftn.areas[message.areaTag];
|
|
|
|
if(!this.isAreaConfigValid(areaConfig)) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if(this.exportingStart()) {
|
|
|
|
this.exportEchoMailMessagesToUplinks( [ message.uuid ], areaConfig, err => {
|
|
|
|
this.exportingEnd( () => exportLog(err) );
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
2016-02-10 05:30:59 +00:00
|
|
|
};
|