2016-02-10 05:30:59 +00:00
|
|
|
/* jslint node: true */
|
|
|
|
'use strict';
|
|
|
|
|
|
|
|
// ENiGMA½
|
2017-03-09 05:37:02 +00:00
|
|
|
const MessageScanTossModule = require('../msg_scan_toss_module.js').MessageScanTossModule;
|
|
|
|
const Config = require('../config.js').config;
|
|
|
|
const ftnMailPacket = require('../ftn_mail_packet.js');
|
|
|
|
const ftnUtil = require('../ftn_util.js');
|
|
|
|
const Address = require('../ftn_address.js');
|
|
|
|
const Log = require('../logger.js').log;
|
|
|
|
const ArchiveUtil = require('../archive_util.js');
|
|
|
|
const msgDb = require('../database.js').dbs.message;
|
|
|
|
const Message = require('../message.js');
|
|
|
|
const TicFileInfo = require('../tic_file_info.js');
|
|
|
|
const Errors = require('../enig_error.js').Errors;
|
|
|
|
const FileEntry = require('../file_entry.js');
|
|
|
|
const scanFile = require('../file_base_area.js').scanFile;
|
|
|
|
const getFileAreaByTag = require('../file_base_area.js').getFileAreaByTag;
|
|
|
|
const getDescFromFileName = require('../file_base_area.js').getDescFromFileName;
|
|
|
|
const copyFileWithCollisionHandling = require('../file_util.js').copyFileWithCollisionHandling;
|
|
|
|
const getAreaStorageDirectoryByTag = require('../file_base_area.js').getAreaStorageDirectoryByTag;
|
|
|
|
const isValidStorageTag = require('../file_base_area.js').isValidStorageTag;
|
2016-02-16 00:56:05 +00:00
|
|
|
|
2017-03-09 05:37:02 +00:00
|
|
|
// deps
|
2016-04-15 04:11:01 +00:00
|
|
|
const moment = require('moment');
|
|
|
|
const _ = require('lodash');
|
|
|
|
const paths = require('path');
|
|
|
|
const async = require('async');
|
2017-05-20 03:20:19 +00:00
|
|
|
const fs = require('graceful-fs');
|
2016-04-15 04:11:01 +00:00
|
|
|
const later = require('later');
|
2017-01-30 02:56:46 +00:00
|
|
|
const temptmp = require('temptmp').createTrackedSession('ftn_bso');
|
2016-04-15 04:11:01 +00:00
|
|
|
const assert = require('assert');
|
2017-10-03 03:06:53 +00:00
|
|
|
const sane = require('sane');
|
2016-04-15 04:11:01 +00:00
|
|
|
const fse = require('fs-extra');
|
|
|
|
const iconv = require('iconv-lite');
|
2017-01-31 07:17:19 +00:00
|
|
|
const uuidV4 = require('uuid/v4');
|
2016-02-10 05:30:59 +00:00
|
|
|
|
|
|
|
exports.moduleInfo = {
|
2016-02-29 05:04:03 +00:00
|
|
|
name : 'FTN BSO',
|
|
|
|
desc : 'BSO style message scanner/tosser for FTN networks',
|
2016-02-10 05:30:59 +00:00
|
|
|
author : 'NuSkooler',
|
|
|
|
};
|
|
|
|
|
2016-02-21 00:57:38 +00:00
|
|
|
/*
|
2016-03-15 04:29:41 +00:00
|
|
|
:TODO:
|
|
|
|
* Support (approx) max bundle size
|
|
|
|
* Support NetMail
|
|
|
|
* NetMail needs explicit isNetMail() check
|
|
|
|
* NetMail filename / location / etc. is still unknown - need to post on groups & get real answers
|
2016-10-03 03:40:37 +00:00
|
|
|
* Validate packet passwords!!!!
|
|
|
|
=> secure vs insecure landing areas
|
2016-03-15 04:29:41 +00:00
|
|
|
|
2016-02-21 00:57:38 +00:00
|
|
|
*/
|
|
|
|
|
2016-02-10 05:30:59 +00:00
|
|
|
exports.getModule = FTNMessageScanTossModule;
|
|
|
|
|
2016-02-29 05:04:03 +00:00
|
|
|
const SCHEDULE_REGEXP = /(?:^|or )?(@watch\:|@immediate)([^\0]+)?$/;
|
|
|
|
|
2016-02-10 05:30:59 +00:00
|
|
|
function FTNMessageScanTossModule() {
|
|
|
|
MessageScanTossModule.call(this);
|
2016-02-29 05:04:03 +00:00
|
|
|
|
|
|
|
let self = this;
|
2016-02-10 05:30:59 +00:00
|
|
|
|
2016-10-03 03:40:37 +00:00
|
|
|
this.archUtil = ArchiveUtil.getInstance();
|
2016-02-24 04:56:22 +00:00
|
|
|
|
2016-02-17 05:11:55 +00:00
|
|
|
if(_.has(Config, 'scannerTossers.ftn_bso')) {
|
2016-04-14 03:06:27 +00:00
|
|
|
this.moduleConfig = Config.scannerTossers.ftn_bso;
|
2016-02-21 00:57:38 +00:00
|
|
|
}
|
|
|
|
|
2016-03-01 05:32:51 +00:00
|
|
|
this.getDefaultNetworkName = function() {
|
|
|
|
if(this.moduleConfig.defaultNetwork) {
|
2016-04-02 06:28:25 +00:00
|
|
|
return this.moduleConfig.defaultNetwork.toLowerCase();
|
2016-03-01 05:32:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const networkNames = Object.keys(Config.messageNetworks.ftn.networks);
|
|
|
|
if(1 === networkNames.length) {
|
2016-04-02 06:28:25 +00:00
|
|
|
return networkNames[0].toLowerCase();
|
2016-03-01 05:32:51 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2016-04-02 06:28:25 +00:00
|
|
|
|
|
|
|
this.getDefaultZone = function(networkName) {
|
|
|
|
if(_.isNumber(Config.messageNetworks.ftn.networks[networkName].defaultZone)) {
|
|
|
|
return Config.messageNetworks.ftn.networks[networkName].defaultZone;
|
|
|
|
}
|
|
|
|
|
|
|
|
// non-explicit: default to local address zone
|
|
|
|
const networkLocalAddress = Config.messageNetworks.ftn.networks[networkName].localAddress;
|
|
|
|
if(networkLocalAddress) {
|
|
|
|
const addr = Address.fromString(networkLocalAddress);
|
|
|
|
return addr.zone;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
/*
|
2016-02-21 00:57:38 +00:00
|
|
|
this.isDefaultDomainZone = function(networkName, address) {
|
2016-04-02 06:28:25 +00:00
|
|
|
const defaultNetworkName = this.getDefaultNetworkName();
|
2016-03-01 05:32:51 +00:00
|
|
|
return(networkName === defaultNetworkName && address.zone === this.moduleConfig.defaultZone);
|
2016-03-04 05:54:32 +00:00
|
|
|
};
|
2016-04-02 06:28:25 +00:00
|
|
|
*/
|
2016-03-04 05:54:32 +00:00
|
|
|
|
2016-03-09 05:30:04 +00:00
|
|
|
this.getNetworkNameByAddress = function(remoteAddress) {
|
2016-03-04 05:54:32 +00:00
|
|
|
return _.findKey(Config.messageNetworks.ftn.networks, network => {
|
2016-03-09 05:30:04 +00:00
|
|
|
const localAddress = Address.fromString(network.localAddress);
|
|
|
|
return !_.isUndefined(localAddress) && localAddress.isEqual(remoteAddress);
|
2016-03-04 05:54:32 +00:00
|
|
|
});
|
|
|
|
};
|
2016-02-21 00:57:38 +00:00
|
|
|
|
2016-03-09 05:30:04 +00:00
|
|
|
this.getNetworkNameByAddressPattern = function(remoteAddressPattern) {
|
|
|
|
return _.findKey(Config.messageNetworks.ftn.networks, network => {
|
|
|
|
const localAddress = Address.fromString(network.localAddress);
|
|
|
|
return !_.isUndefined(localAddress) && localAddress.isPatternMatch(remoteAddressPattern);
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getLocalAreaTagByFtnAreaTag = function(ftnAreaTag) {
|
2016-07-22 03:04:08 +00:00
|
|
|
ftnAreaTag = ftnAreaTag.toUpperCase(); // always compare upper
|
2016-03-09 05:30:04 +00:00
|
|
|
return _.findKey(Config.messageNetworks.ftn.areas, areaConf => {
|
2016-07-22 03:04:08 +00:00
|
|
|
return areaConf.tag.toUpperCase() === ftnAreaTag;
|
2016-03-09 05:30:04 +00:00
|
|
|
});
|
|
|
|
};
|
|
|
|
|
2016-03-12 07:22:06 +00:00
|
|
|
this.getExportType = function(nodeConfig) {
|
|
|
|
return _.isString(nodeConfig.exportType) ? nodeConfig.exportType.toLowerCase() : 'crash';
|
|
|
|
};
|
|
|
|
|
2016-03-09 05:30:04 +00:00
|
|
|
/*
|
|
|
|
this.getSeenByAddresses = function(messageSeenBy) {
|
|
|
|
if(!_.isArray(messageSeenBy)) {
|
|
|
|
messageSeenBy = [ messageSeenBy ];
|
|
|
|
}
|
|
|
|
|
|
|
|
let seenByAddrs = [];
|
|
|
|
messageSeenBy.forEach(sb => {
|
|
|
|
seenByAddrs = seenByAddrs.concat(ftnUtil.parseAbbreviatedNetNodeList(sb));
|
|
|
|
});
|
|
|
|
return seenByAddrs;
|
|
|
|
};
|
|
|
|
*/
|
|
|
|
|
|
|
|
this.messageHasValidMSGID = function(msg) {
|
|
|
|
return _.isString(msg.meta.FtnKludge.MSGID) && msg.meta.FtnKludge.MSGID.length > 0;
|
|
|
|
};
|
|
|
|
|
2016-04-02 06:28:25 +00:00
|
|
|
/*
|
2016-02-24 04:56:22 +00:00
|
|
|
this.getOutgoingPacketDir = function(networkName, destAddress) {
|
2016-02-21 00:57:38 +00:00
|
|
|
let dir = this.moduleConfig.paths.outbound;
|
2016-02-24 04:56:22 +00:00
|
|
|
if(!this.isDefaultDomainZone(networkName, destAddress)) {
|
|
|
|
const hexZone = `000${destAddress.zone.toString(16)}`.substr(-3);
|
2016-02-21 00:57:38 +00:00
|
|
|
dir = paths.join(dir, `${networkName.toLowerCase()}.${hexZone}`);
|
|
|
|
}
|
|
|
|
return dir;
|
|
|
|
};
|
2016-04-02 06:28:25 +00:00
|
|
|
*/
|
|
|
|
|
|
|
|
this.getOutgoingPacketDir = function(networkName, destAddress) {
|
|
|
|
networkName = networkName.toLowerCase();
|
|
|
|
|
|
|
|
let dir = this.moduleConfig.paths.outbound;
|
|
|
|
|
|
|
|
const defaultNetworkName = this.getDefaultNetworkName();
|
|
|
|
const defaultZone = this.getDefaultZone(networkName);
|
|
|
|
|
|
|
|
let zoneExt;
|
|
|
|
if(defaultZone !== destAddress.zone) {
|
|
|
|
zoneExt = '.' + `000${destAddress.zone.toString(16)}`.substr(-3);
|
|
|
|
} else {
|
|
|
|
zoneExt = '';
|
|
|
|
}
|
|
|
|
|
|
|
|
if(defaultNetworkName === networkName) {
|
|
|
|
dir = paths.join(dir, `outbound${zoneExt}`);
|
|
|
|
} else {
|
|
|
|
dir = paths.join(dir, `${networkName}${zoneExt}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
return dir;
|
|
|
|
};
|
2016-02-21 00:57:38 +00:00
|
|
|
|
2016-12-07 03:51:48 +00:00
|
|
|
this.getOutgoingPacketFileName = function(basePath, messageId, isTemp, fileCase) {
|
2016-02-21 00:57:38 +00:00
|
|
|
//
|
|
|
|
// Generating an outgoing packet file name comes with a few issues:
|
|
|
|
// * We must use DOS 8.3 filenames due to legacy systems that receive
|
|
|
|
// the packet not understanding LFNs
|
|
|
|
// * We need uniqueness; This is especially important with packets that
|
|
|
|
// end up in bundles and on the receiving/remote system where conflicts
|
|
|
|
// with other systems could also occur
|
|
|
|
//
|
|
|
|
// There are a lot of systems in use here for the name:
|
|
|
|
// * HEX CRC16/32 of data
|
|
|
|
// * HEX UNIX timestamp
|
|
|
|
// * Mystic at least at one point, used Hex8(day of month + seconds past midnight + hundredths of second)
|
|
|
|
// See https://groups.google.com/forum/#!searchin/alt.bbs.mystic/netmail$20filename/alt.bbs.mystic/m1xLnY8i1pU/YnG2excdl6MJ
|
|
|
|
// * SBBSEcho uses DDHHMMSS - see https://github.com/ftnapps/pkg-sbbs/blob/master/docs/fidonet.txt
|
|
|
|
// * We already have a system for 8-character serial number gernation that is
|
|
|
|
// used for e.g. in FTS-0009.001 MSGIDs... let's use that!
|
|
|
|
//
|
2016-12-07 03:51:48 +00:00
|
|
|
const name = ftnUtil.getMessageSerialNumber(messageId);
|
|
|
|
const ext = (true === isTemp) ? 'pk_' : 'pkt';
|
|
|
|
|
|
|
|
let fileName = `${name}.${ext}`;
|
|
|
|
if('upper' === fileCase) {
|
|
|
|
fileName = fileName.toUpperCase();
|
|
|
|
}
|
|
|
|
|
|
|
|
return paths.join(basePath, fileName);
|
2016-02-21 00:57:38 +00:00
|
|
|
};
|
2016-03-13 17:11:51 +00:00
|
|
|
|
2016-12-07 03:51:48 +00:00
|
|
|
this.getOutgoingFlowFileExtension = function(destAddress, flowType, exportType, fileCase) {
|
2016-03-12 07:22:06 +00:00
|
|
|
let ext;
|
|
|
|
|
|
|
|
switch(flowType) {
|
2016-08-04 04:43:56 +00:00
|
|
|
case 'mail' : ext = `${exportType.toLowerCase()[0]}ut`; break;
|
|
|
|
case 'ref' : ext = `${exportType.toLowerCase()[0]}lo`; break;
|
|
|
|
case 'busy' : ext = 'bsy'; break;
|
|
|
|
case 'request' : ext = 'req'; break;
|
|
|
|
case 'requests' : ext = 'hrq'; break;
|
2016-03-12 07:22:06 +00:00
|
|
|
}
|
2016-12-07 03:51:48 +00:00
|
|
|
|
|
|
|
if('upper' === fileCase) {
|
|
|
|
ext = ext.toUpperCase();
|
|
|
|
}
|
2016-03-12 07:22:06 +00:00
|
|
|
|
2016-03-13 17:11:51 +00:00
|
|
|
return ext;
|
|
|
|
};
|
|
|
|
|
2016-12-07 03:51:48 +00:00
|
|
|
this.getOutgoingFlowFileName = function(basePath, destAddress, flowType, exportType, fileCase) {
|
2016-03-13 17:11:51 +00:00
|
|
|
let basename;
|
2016-12-07 03:51:48 +00:00
|
|
|
|
|
|
|
const ext = self.getOutgoingFlowFileExtension(
|
|
|
|
destAddress,
|
|
|
|
flowType,
|
|
|
|
exportType,
|
|
|
|
fileCase
|
|
|
|
);
|
2016-03-13 17:11:51 +00:00
|
|
|
|
2016-02-24 04:56:22 +00:00
|
|
|
if(destAddress.point) {
|
|
|
|
|
|
|
|
} else {
|
|
|
|
//
|
|
|
|
// Use |destAddress| nnnnNNNN.??? where nnnn is dest net and NNNN is dest
|
|
|
|
// node. This seems to match what Mystic does
|
|
|
|
//
|
2016-03-12 07:22:06 +00:00
|
|
|
basename =
|
|
|
|
`0000${destAddress.net.toString(16)}`.substr(-4) +
|
|
|
|
`0000${destAddress.node.toString(16)}`.substr(-4);
|
2016-02-24 04:56:22 +00:00
|
|
|
}
|
2016-12-07 03:51:48 +00:00
|
|
|
|
|
|
|
if('upper' === fileCase) {
|
|
|
|
basename = basename.toUpperCase();
|
|
|
|
}
|
2016-03-12 07:22:06 +00:00
|
|
|
|
|
|
|
return paths.join(basePath, `${basename}.${ext}`);
|
|
|
|
};
|
|
|
|
|
|
|
|
this.flowFileAppendRefs = function(filePath, fileRefs, directive, cb) {
|
|
|
|
const appendLines = fileRefs.reduce( (content, ref) => {
|
|
|
|
return content + `${directive}${ref}\n`;
|
|
|
|
}, '');
|
|
|
|
|
|
|
|
fs.appendFile(filePath, appendLines, err => {
|
|
|
|
cb(err);
|
|
|
|
});
|
2016-02-24 04:56:22 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
this.getOutgoingBundleFileName = function(basePath, sourceAddress, destAddress, cb) {
|
|
|
|
//
|
|
|
|
// Base filename is constructed as such:
|
|
|
|
// * If this |destAddress| is *not* a point address, we use NNNNnnnn where
|
|
|
|
// NNNN is 0 padded hex of dest net - source net and and nnnn is 0 padded
|
|
|
|
// hex of dest node - source node.
|
|
|
|
// * If |destAddress| is a point, NNNN becomes 0000 and nnnn becomes 'p' +
|
|
|
|
// 3 digit 0 padded hex point
|
|
|
|
//
|
|
|
|
// Extension is dd? where dd is Su...Mo and ? is 0...Z as collisions arise
|
|
|
|
//
|
2016-03-12 07:22:06 +00:00
|
|
|
let basename;
|
2016-02-24 04:56:22 +00:00
|
|
|
if(destAddress.point) {
|
|
|
|
const pointHex = `000${destAddress.point}`.substr(-3);
|
|
|
|
basename = `0000p${pointHex}`;
|
|
|
|
} else {
|
|
|
|
basename =
|
|
|
|
`0000${Math.abs(sourceAddress.net - destAddress.net).toString(16)}`.substr(-4) +
|
|
|
|
`0000${Math.abs(sourceAddress.node - destAddress.node).toString(16)}`.substr(-4);
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// We need to now find the first entry that does not exist starting
|
|
|
|
// with dd0 to ddz
|
|
|
|
//
|
|
|
|
const EXT_SUFFIXES = '0123456789abcdefghijklmnopqrstuvwxyz'.split('');
|
|
|
|
let fileName = `${basename}.${moment().format('dd').toLowerCase()}`;
|
|
|
|
async.detectSeries(EXT_SUFFIXES, (suffix, callback) => {
|
|
|
|
const checkFileName = fileName + suffix;
|
2016-03-10 05:32:00 +00:00
|
|
|
fs.stat(paths.join(basePath, checkFileName), err => {
|
2017-06-22 02:38:57 +00:00
|
|
|
callback(null, (err && 'ENOENT' === err.code) ? true : false);
|
2016-02-24 04:56:22 +00:00
|
|
|
});
|
2017-06-22 02:38:57 +00:00
|
|
|
}, (err, finalSuffix) => {
|
2016-02-24 04:56:22 +00:00
|
|
|
if(finalSuffix) {
|
2017-06-23 04:19:34 +00:00
|
|
|
return cb(null, paths.join(basePath, fileName + finalSuffix));
|
2016-02-24 04:56:22 +00:00
|
|
|
}
|
2017-06-23 04:19:34 +00:00
|
|
|
|
|
|
|
return cb(new Error('Could not acquire a bundle filename!'));
|
2016-02-24 04:56:22 +00:00
|
|
|
});
|
|
|
|
};
|
2016-02-29 05:04:03 +00:00
|
|
|
|
2016-02-21 00:57:38 +00:00
|
|
|
this.prepareMessage = function(message, options) {
|
2016-02-16 00:56:05 +00:00
|
|
|
//
|
|
|
|
// Set various FTN kludges/etc.
|
|
|
|
//
|
|
|
|
message.meta.FtnProperty = message.meta.FtnProperty || {};
|
2016-02-21 00:57:38 +00:00
|
|
|
message.meta.FtnKludge = message.meta.FtnKludge || {};
|
|
|
|
|
|
|
|
message.meta.FtnProperty.ftn_orig_node = options.network.localAddress.node;
|
2016-02-24 04:56:22 +00:00
|
|
|
message.meta.FtnProperty.ftn_dest_node = options.destAddress.node;
|
2016-02-21 00:57:38 +00:00
|
|
|
message.meta.FtnProperty.ftn_orig_network = options.network.localAddress.net;
|
2016-02-24 04:56:22 +00:00
|
|
|
message.meta.FtnProperty.ftn_dest_network = options.destAddress.net;
|
2016-02-16 00:56:05 +00:00
|
|
|
message.meta.FtnProperty.ftn_cost = 0;
|
2016-02-21 00:57:38 +00:00
|
|
|
message.meta.FtnProperty.ftn_tear_line = ftnUtil.getTearLine();
|
2016-02-16 00:56:05 +00:00
|
|
|
|
2016-02-21 00:57:38 +00:00
|
|
|
// :TODO: Need an explicit isNetMail() check
|
2016-03-10 05:32:00 +00:00
|
|
|
let ftnAttribute =
|
|
|
|
ftnMailPacket.Packet.Attribute.Local; // message from our system
|
2016-02-24 04:56:22 +00:00
|
|
|
|
2016-02-17 05:11:55 +00:00
|
|
|
if(message.isPrivate()) {
|
2016-02-24 04:56:22 +00:00
|
|
|
ftnAttribute |= ftnMailPacket.Packet.Attribute.Private;
|
|
|
|
|
2016-02-17 05:11:55 +00:00
|
|
|
//
|
|
|
|
// NetMail messages need a FRL-1005.001 "Via" line
|
|
|
|
// http://ftsc.org/docs/frl-1005.001
|
|
|
|
//
|
2016-02-21 00:57:38 +00:00
|
|
|
if(_.isString(message.meta.FtnKludge.Via)) {
|
|
|
|
message.meta.FtnKludge.Via = [ message.meta.FtnKludge.Via ];
|
2016-02-17 05:11:55 +00:00
|
|
|
}
|
2016-02-21 00:57:38 +00:00
|
|
|
message.meta.FtnKludge.Via = message.meta.FtnKludge.Via || [];
|
|
|
|
message.meta.FtnKludge.Via.push(ftnUtil.getVia(options.network.localAddress));
|
2016-02-16 00:56:05 +00:00
|
|
|
} else {
|
2016-03-12 07:22:06 +00:00
|
|
|
//
|
|
|
|
// Set appropriate attribute flag for export type
|
|
|
|
//
|
|
|
|
switch(this.getExportType(options.nodeConfig)) {
|
2016-08-04 04:43:56 +00:00
|
|
|
case 'crash' : ftnAttribute |= ftnMailPacket.Packet.Attribute.Crash; break;
|
|
|
|
case 'hold' : ftnAttribute |= ftnMailPacket.Packet.Attribute.Hold; break;
|
2016-03-12 07:22:06 +00:00
|
|
|
// :TODO: Others?
|
|
|
|
}
|
|
|
|
|
2016-02-21 00:57:38 +00:00
|
|
|
//
|
|
|
|
// EchoMail requires some additional properties & kludges
|
2016-03-10 05:32:00 +00:00
|
|
|
//
|
2016-02-21 00:57:38 +00:00
|
|
|
message.meta.FtnProperty.ftn_origin = ftnUtil.getOrigin(options.network.localAddress);
|
|
|
|
message.meta.FtnProperty.ftn_area = Config.messageNetworks.ftn.areas[message.areaTag].tag;
|
|
|
|
|
|
|
|
//
|
|
|
|
// When exporting messages, we should create/update SEEN-BY
|
|
|
|
// with remote address(s) we are exporting to.
|
|
|
|
//
|
2016-03-09 05:30:04 +00:00
|
|
|
const seenByAdditions =
|
|
|
|
[ `${options.network.localAddress.net}/${options.network.localAddress.node}` ].concat(Config.messageNetworks.ftn.areas[message.areaTag].uplinks);
|
2016-02-21 00:57:38 +00:00
|
|
|
message.meta.FtnProperty.ftn_seen_by =
|
2016-02-24 06:38:05 +00:00
|
|
|
ftnUtil.getUpdatedSeenByEntries(message.meta.FtnProperty.ftn_seen_by, seenByAdditions);
|
2016-02-16 00:56:05 +00:00
|
|
|
|
2016-02-21 00:57:38 +00:00
|
|
|
//
|
|
|
|
// And create/update PATH for ourself
|
|
|
|
//
|
|
|
|
message.meta.FtnKludge.PATH =
|
|
|
|
ftnUtil.getUpdatedPathEntries(message.meta.FtnKludge.PATH, options.network.localAddress);
|
|
|
|
}
|
|
|
|
|
2016-02-24 04:56:22 +00:00
|
|
|
message.meta.FtnProperty.ftn_attr_flags = ftnAttribute;
|
|
|
|
|
2016-02-16 00:56:05 +00:00
|
|
|
//
|
2016-02-21 00:57:38 +00:00
|
|
|
// Additional kludges
|
2016-03-09 05:30:04 +00:00
|
|
|
//
|
|
|
|
// Check for existence of MSGID as we may already have stored it from a previous
|
|
|
|
// export that failed to finish
|
2016-02-21 00:57:38 +00:00
|
|
|
//
|
2016-03-09 05:30:04 +00:00
|
|
|
if(!message.meta.FtnKludge.MSGID) {
|
|
|
|
message.meta.FtnKludge.MSGID = ftnUtil.getMessageIdentifier(message, options.network.localAddress);
|
|
|
|
}
|
|
|
|
|
2016-02-21 00:57:38 +00:00
|
|
|
message.meta.FtnKludge.TZUTC = ftnUtil.getUTCTimeZoneOffset();
|
2016-03-15 04:29:41 +00:00
|
|
|
|
|
|
|
//
|
|
|
|
// According to FSC-0046:
|
|
|
|
//
|
|
|
|
// "When a Conference Mail processor adds a TID to a message, it may not
|
|
|
|
// add a PID. An existing TID should, however, be replaced. TIDs follow
|
|
|
|
// the same format used for PIDs, as explained above."
|
|
|
|
//
|
|
|
|
message.meta.FtnKludge.TID = ftnUtil.getProductIdentifier();
|
2016-02-21 00:57:38 +00:00
|
|
|
|
2016-02-16 00:56:05 +00:00
|
|
|
//
|
2017-08-21 02:41:19 +00:00
|
|
|
// Determine CHRS and actual internal encoding name. If the message has an
|
|
|
|
// explicit encoding set, use it. Otherwise, try to preserve any CHRS/encoding already set.
|
2016-04-15 04:11:01 +00:00
|
|
|
//
|
2017-08-21 02:41:19 +00:00
|
|
|
let encoding = options.nodeConfig.encoding || Config.scannerTossers.ftn_bso.packetMsgEncoding || 'utf8';
|
|
|
|
const explicitEncoding = _.get(message.meta, 'System.explicit_encoding');
|
|
|
|
if(explicitEncoding) {
|
|
|
|
encoding = explicitEncoding;
|
|
|
|
} else if(message.meta.FtnKludge.CHRS) {
|
2016-02-21 00:57:38 +00:00
|
|
|
const encFromChars = ftnUtil.getEncodingFromCharacterSetIdentifier(message.meta.FtnKludge.CHRS);
|
|
|
|
if(encFromChars) {
|
|
|
|
encoding = encFromChars;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-04-15 04:11:01 +00:00
|
|
|
//
|
|
|
|
// Ensure we ended up with something useable. If not, back to utf8!
|
|
|
|
//
|
|
|
|
if(!iconv.encodingExists(encoding)) {
|
|
|
|
Log.debug( { encoding : encoding }, 'Unknown encoding. Falling back to utf8');
|
|
|
|
encoding = 'utf8';
|
|
|
|
}
|
|
|
|
|
2016-02-21 00:57:38 +00:00
|
|
|
options.encoding = encoding; // save for later
|
|
|
|
message.meta.FtnKludge.CHRS = ftnUtil.getCharacterSetIdentifierByEncoding(encoding);
|
2016-03-15 04:29:41 +00:00
|
|
|
// :TODO: FLAGS kludge?
|
2016-02-21 00:57:38 +00:00
|
|
|
};
|
|
|
|
|
2016-03-15 04:29:41 +00:00
|
|
|
this.setReplyKludgeFromReplyToMsgId = function(message, cb) {
|
|
|
|
//
|
|
|
|
// Look up MSGID kludge for |message.replyToMsgId|, if any.
|
|
|
|
// If found, we can create a REPLY kludge with the previously
|
|
|
|
// discovered MSGID.
|
|
|
|
//
|
|
|
|
|
|
|
|
if(0 === message.replyToMsgId) {
|
|
|
|
return cb(null); // nothing to do
|
|
|
|
}
|
|
|
|
|
2016-04-14 03:38:45 +00:00
|
|
|
Message.getMetaValuesByMessageId(message.replyToMsgId, 'FtnKludge', 'MSGID', (err, msgIdVal) => {
|
|
|
|
if(!err) {
|
|
|
|
assert(_.isString(msgIdVal), 'Expected string but got ' + (typeof msgIdVal) + ' (' + msgIdVal + ')');
|
2016-03-15 04:29:41 +00:00
|
|
|
// got a MSGID - create a REPLY
|
|
|
|
message.meta.FtnKludge.REPLY = msgIdVal;
|
|
|
|
}
|
|
|
|
|
|
|
|
cb(null); // this method always passes
|
|
|
|
});
|
|
|
|
};
|
2016-02-21 00:57:38 +00:00
|
|
|
|
|
|
|
// check paths, Addresses, etc.
|
2016-02-29 05:04:03 +00:00
|
|
|
this.isAreaConfigValid = function(areaConfig) {
|
2016-03-24 02:59:38 +00:00
|
|
|
if(!areaConfig || !_.isString(areaConfig.tag) || !_.isString(areaConfig.network)) {
|
2016-02-21 00:57:38 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if(_.isString(areaConfig.uplinks)) {
|
|
|
|
areaConfig.uplinks = areaConfig.uplinks.split(' ');
|
|
|
|
}
|
|
|
|
|
|
|
|
return (_.isArray(areaConfig.uplinks));
|
2016-02-16 00:56:05 +00:00
|
|
|
};
|
2016-02-29 05:04:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
this.hasValidConfiguration = function() {
|
|
|
|
if(!_.has(this, 'moduleConfig.nodes') || !_.has(Config, 'messageNetworks.ftn.areas')) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2016-03-01 05:32:51 +00:00
|
|
|
// :TODO: need to check more!
|
|
|
|
|
2016-02-29 05:04:03 +00:00
|
|
|
return true;
|
|
|
|
};
|
|
|
|
|
|
|
|
this.parseScheduleString = function(schedStr) {
|
2016-03-04 05:54:32 +00:00
|
|
|
if(!schedStr) {
|
|
|
|
return; // nothing to parse!
|
|
|
|
}
|
|
|
|
|
2016-02-29 05:04:03 +00:00
|
|
|
let schedule = {};
|
|
|
|
|
|
|
|
const m = SCHEDULE_REGEXP.exec(schedStr);
|
|
|
|
if(m) {
|
|
|
|
schedStr = schedStr.substr(0, m.index).trim();
|
|
|
|
|
|
|
|
if('@watch:' === m[1]) {
|
|
|
|
schedule.watchFile = m[2];
|
|
|
|
} else if('@immediate' === m[1]) {
|
|
|
|
schedule.immediate = true;
|
|
|
|
}
|
|
|
|
}
|
2016-02-16 00:56:05 +00:00
|
|
|
|
2016-02-29 05:04:03 +00:00
|
|
|
if(schedStr.length > 0) {
|
|
|
|
const sched = later.parse.text(schedStr);
|
|
|
|
if(-1 === sched.error) {
|
|
|
|
schedule.sched = sched;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// return undefined if we couldn't parse out anything useful
|
|
|
|
if(!_.isEmpty(schedule)) {
|
|
|
|
return schedule;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getAreaLastScanId = function(areaTag, cb) {
|
|
|
|
const sql =
|
|
|
|
`SELECT area_tag, message_id
|
|
|
|
FROM message_area_last_scan
|
|
|
|
WHERE scan_toss = "ftn_bso" AND area_tag = ?
|
|
|
|
LIMIT 1;`;
|
|
|
|
|
|
|
|
msgDb.get(sql, [ areaTag ], (err, row) => {
|
|
|
|
cb(err, row ? row.message_id : 0);
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
2016-02-29 05:35:43 +00:00
|
|
|
this.setAreaLastScanId = function(areaTag, lastScanId, cb) {
|
|
|
|
const sql =
|
|
|
|
`REPLACE INTO message_area_last_scan (scan_toss, area_tag, message_id)
|
|
|
|
VALUES ("ftn_bso", ?, ?);`;
|
|
|
|
|
|
|
|
msgDb.run(sql, [ areaTag, lastScanId ], err => {
|
|
|
|
cb(err);
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
2016-03-15 04:29:41 +00:00
|
|
|
this.getNodeConfigKeyByAddress = function(uplink) {
|
2016-02-29 05:04:03 +00:00
|
|
|
// :TODO: sort by least # of '*' & take top?
|
|
|
|
const nodeKey = _.filter(Object.keys(this.moduleConfig.nodes), addr => {
|
2016-03-09 05:30:04 +00:00
|
|
|
return Address.fromString(addr).isPatternMatch(uplink);
|
2016-02-29 05:04:03 +00:00
|
|
|
})[0];
|
|
|
|
|
|
|
|
return nodeKey;
|
|
|
|
};
|
|
|
|
|
|
|
|
this.exportMessagesByUuid = function(messageUuids, exportOpts, cb) {
|
|
|
|
//
|
|
|
|
// This method has a lot of madness going on:
|
|
|
|
// - Try to stuff messages into packets until we've hit the target size
|
|
|
|
// - We need to wait for write streams to finish before proceeding in many cases
|
|
|
|
// or data will be cut off when closing and creating a new stream
|
|
|
|
//
|
|
|
|
let exportedFiles = [];
|
|
|
|
let currPacketSize = self.moduleConfig.packetTargetByteSize;
|
|
|
|
let packet;
|
|
|
|
let ws;
|
|
|
|
let remainMessageBuf;
|
|
|
|
let remainMessageId;
|
2016-03-13 17:11:51 +00:00
|
|
|
const createTempPacket = !_.isString(exportOpts.nodeConfig.archiveType) || 0 === exportOpts.nodeConfig.archiveType.length;
|
2017-03-02 03:02:45 +00:00
|
|
|
|
|
|
|
function finalizePacket(cb) {
|
|
|
|
packet.writeTerminator(ws);
|
|
|
|
ws.end();
|
|
|
|
ws.once('finish', () => {
|
|
|
|
return cb(null);
|
|
|
|
});
|
|
|
|
}
|
2016-02-29 05:04:03 +00:00
|
|
|
|
|
|
|
async.each(messageUuids, (msgUuid, nextUuid) => {
|
|
|
|
let message = new Message();
|
|
|
|
|
|
|
|
async.series(
|
|
|
|
[
|
|
|
|
function finalizePrevious(callback) {
|
|
|
|
if(packet && currPacketSize >= self.moduleConfig.packetTargetByteSize) {
|
2017-03-02 03:02:45 +00:00
|
|
|
return finalizePacket(callback);
|
2016-02-29 05:04:03 +00:00
|
|
|
} else {
|
|
|
|
callback(null);
|
2016-03-15 04:29:41 +00:00
|
|
|
}
|
2016-02-29 05:04:03 +00:00
|
|
|
},
|
|
|
|
function loadMessage(callback) {
|
|
|
|
message.load( { uuid : msgUuid }, err => {
|
2016-03-15 04:29:41 +00:00
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
2016-02-29 05:04:03 +00:00
|
|
|
}
|
2016-03-15 04:29:41 +00:00
|
|
|
|
|
|
|
// General preperation
|
|
|
|
self.prepareMessage(message, exportOpts);
|
|
|
|
|
|
|
|
self.setReplyKludgeFromReplyToMsgId(message, err => {
|
|
|
|
callback(err);
|
|
|
|
});
|
2016-02-29 05:04:03 +00:00
|
|
|
});
|
|
|
|
},
|
|
|
|
function createNewPacket(callback) {
|
|
|
|
if(currPacketSize >= self.moduleConfig.packetTargetByteSize) {
|
|
|
|
packet = new ftnMailPacket.Packet();
|
|
|
|
|
|
|
|
const packetHeader = new ftnMailPacket.PacketHeader(
|
|
|
|
exportOpts.network.localAddress,
|
|
|
|
exportOpts.destAddress,
|
|
|
|
exportOpts.nodeConfig.packetType);
|
|
|
|
|
|
|
|
packetHeader.password = exportOpts.nodeConfig.packetPassword || '';
|
|
|
|
|
|
|
|
// use current message ID for filename seed
|
2016-12-07 03:51:48 +00:00
|
|
|
const pktFileName = self.getOutgoingPacketFileName(
|
|
|
|
self.exportTempDir,
|
|
|
|
message.messageId,
|
|
|
|
createTempPacket,
|
|
|
|
exportOpts.fileCase
|
|
|
|
);
|
|
|
|
|
2016-02-29 05:04:03 +00:00
|
|
|
exportedFiles.push(pktFileName);
|
|
|
|
|
|
|
|
ws = fs.createWriteStream(pktFileName);
|
|
|
|
|
|
|
|
currPacketSize = packet.writeHeader(ws, packetHeader);
|
|
|
|
|
|
|
|
if(remainMessageBuf) {
|
|
|
|
currPacketSize += packet.writeMessageEntry(ws, remainMessageBuf);
|
|
|
|
remainMessageBuf = null;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
callback(null);
|
|
|
|
},
|
|
|
|
function appendMessage(callback) {
|
2017-08-27 02:37:20 +00:00
|
|
|
packet.getMessageEntryBuffer(message, exportOpts, (err, msgBuf) => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
currPacketSize += msgBuf.length;
|
2016-02-29 05:04:03 +00:00
|
|
|
|
2017-08-27 02:37:20 +00:00
|
|
|
if(currPacketSize >= self.moduleConfig.packetTargetByteSize) {
|
|
|
|
remainMessageBuf = msgBuf; // save for next packet
|
|
|
|
remainMessageId = message.messageId;
|
|
|
|
} else {
|
|
|
|
ws.write(msgBuf);
|
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null);
|
|
|
|
});
|
2016-03-09 05:30:04 +00:00
|
|
|
},
|
2016-03-15 04:29:41 +00:00
|
|
|
function storeStateFlags0Meta(callback) {
|
|
|
|
message.persistMetaValue('System', 'state_flags0', Message.StateFlags0.Exported.toString(), err => {
|
|
|
|
callback(err);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function storeMsgIdMeta(callback) {
|
2016-03-09 05:30:04 +00:00
|
|
|
//
|
|
|
|
// We want to store some meta as if we had imported
|
|
|
|
// this message for later reference
|
|
|
|
//
|
|
|
|
if(message.meta.FtnKludge.MSGID) {
|
|
|
|
message.persistMetaValue('FtnKludge', 'MSGID', message.meta.FtnKludge.MSGID, err => {
|
|
|
|
callback(err);
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
callback(null);
|
|
|
|
}
|
2016-02-29 05:04:03 +00:00
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
nextUuid(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}, err => {
|
|
|
|
if(err) {
|
|
|
|
cb(err);
|
|
|
|
} else {
|
|
|
|
async.series(
|
|
|
|
[
|
|
|
|
function terminateLast(callback) {
|
|
|
|
if(packet) {
|
2017-03-02 03:02:45 +00:00
|
|
|
return finalizePacket(callback);
|
2016-02-29 05:04:03 +00:00
|
|
|
} else {
|
|
|
|
callback(null);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
function writeRemainPacket(callback) {
|
|
|
|
if(remainMessageBuf) {
|
|
|
|
// :TODO: DRY this with the code above -- they are basically identical
|
|
|
|
packet = new ftnMailPacket.Packet();
|
|
|
|
|
|
|
|
const packetHeader = new ftnMailPacket.PacketHeader(
|
|
|
|
exportOpts.network.localAddress,
|
|
|
|
exportOpts.destAddress,
|
|
|
|
exportOpts.nodeConfig.packetType);
|
|
|
|
|
|
|
|
packetHeader.password = exportOpts.nodeConfig.packetPassword || '';
|
|
|
|
|
|
|
|
// use current message ID for filename seed
|
2016-12-07 03:51:48 +00:00
|
|
|
const pktFileName = self.getOutgoingPacketFileName(
|
|
|
|
self.exportTempDir,
|
|
|
|
remainMessageId,
|
|
|
|
createTempPacket,
|
|
|
|
exportOpts.filleCase
|
|
|
|
);
|
|
|
|
|
2016-02-29 05:04:03 +00:00
|
|
|
exportedFiles.push(pktFileName);
|
|
|
|
|
|
|
|
ws = fs.createWriteStream(pktFileName);
|
|
|
|
|
|
|
|
packet.writeHeader(ws, packetHeader);
|
|
|
|
ws.write(remainMessageBuf);
|
2017-03-02 03:02:45 +00:00
|
|
|
return finalizePacket(callback);
|
2016-02-29 05:04:03 +00:00
|
|
|
} else {
|
|
|
|
callback(null);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
cb(err, exportedFiles);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
this.exportMessagesToUplinks = function(messageUuids, areaConfig, cb) {
|
|
|
|
async.each(areaConfig.uplinks, (uplink, nextUplink) => {
|
2016-03-15 04:29:41 +00:00
|
|
|
const nodeConfigKey = self.getNodeConfigKeyByAddress(uplink);
|
2016-02-29 05:04:03 +00:00
|
|
|
if(!nodeConfigKey) {
|
|
|
|
return nextUplink();
|
|
|
|
}
|
|
|
|
|
|
|
|
const exportOpts = {
|
|
|
|
nodeConfig : self.moduleConfig.nodes[nodeConfigKey],
|
|
|
|
network : Config.messageNetworks.ftn.networks[areaConfig.network],
|
|
|
|
destAddress : Address.fromString(uplink),
|
|
|
|
networkName : areaConfig.network,
|
2016-12-07 03:51:48 +00:00
|
|
|
fileCase : self.moduleConfig.nodes[nodeConfigKey].fileCase || 'lower',
|
2016-02-29 05:04:03 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
if(_.isString(exportOpts.network.localAddress)) {
|
|
|
|
exportOpts.network.localAddress = Address.fromString(exportOpts.network.localAddress);
|
|
|
|
}
|
|
|
|
|
2016-03-13 17:11:51 +00:00
|
|
|
const outgoingDir = self.getOutgoingPacketDir(exportOpts.networkName, exportOpts.destAddress);
|
|
|
|
const exportType = self.getExportType(exportOpts.nodeConfig);
|
2016-02-29 05:04:03 +00:00
|
|
|
|
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
function createOutgoingDir(callback) {
|
2016-03-28 05:19:18 +00:00
|
|
|
fse.mkdirs(outgoingDir, err => {
|
2016-02-29 05:04:03 +00:00
|
|
|
callback(err);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function exportToTempArea(callback) {
|
|
|
|
self.exportMessagesByUuid(messageUuids, exportOpts, callback);
|
|
|
|
},
|
|
|
|
function createArcMailBundle(exportedFileNames, callback) {
|
|
|
|
if(self.archUtil.haveArchiver(exportOpts.nodeConfig.archiveType)) {
|
|
|
|
// :TODO: support bundleTargetByteSize:
|
|
|
|
//
|
|
|
|
// Compress to a temp location then we'll move it in the next step
|
|
|
|
//
|
|
|
|
// Note that we must use the *final* output dir for getOutgoingBundleFileName()
|
|
|
|
// as it checks for collisions in bundle names!
|
|
|
|
//
|
|
|
|
self.getOutgoingBundleFileName(outgoingDir, exportOpts.network.localAddress, exportOpts.destAddress, (err, bundlePath) => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
// adjust back to temp path
|
2016-03-13 17:11:51 +00:00
|
|
|
const tempBundlePath = paths.join(self.exportTempDir, paths.basename(bundlePath));
|
2016-02-29 05:04:03 +00:00
|
|
|
|
|
|
|
self.archUtil.compressTo(
|
|
|
|
exportOpts.nodeConfig.archiveType,
|
|
|
|
tempBundlePath,
|
|
|
|
exportedFileNames, err => {
|
2016-03-12 07:22:06 +00:00
|
|
|
callback(err, [ tempBundlePath ] );
|
2016-02-29 05:04:03 +00:00
|
|
|
}
|
|
|
|
);
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
callback(null, exportedFileNames);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
function moveFilesToOutgoing(exportedFileNames, callback) {
|
|
|
|
async.each(exportedFileNames, (oldPath, nextFile) => {
|
2016-04-02 15:56:16 +00:00
|
|
|
const ext = paths.extname(oldPath).toLowerCase();
|
2016-12-07 03:51:48 +00:00
|
|
|
if('.pk_' === ext.toLowerCase()) {
|
2016-03-13 17:11:51 +00:00
|
|
|
//
|
|
|
|
// For a given temporary .pk_ file, we need to move it to the outoing
|
|
|
|
// directory with the appropriate BSO style filename.
|
|
|
|
//
|
2016-12-07 03:51:48 +00:00
|
|
|
const newExt = self.getOutgoingFlowFileExtension(
|
2016-03-13 17:11:51 +00:00
|
|
|
exportOpts.destAddress,
|
|
|
|
'mail',
|
2016-12-07 03:51:48 +00:00
|
|
|
exportType,
|
|
|
|
exportOpts.fileCase
|
|
|
|
);
|
2016-03-13 17:11:51 +00:00
|
|
|
|
|
|
|
const newPath = paths.join(
|
|
|
|
outgoingDir,
|
2016-12-07 03:51:48 +00:00
|
|
|
`${paths.basename(oldPath, ext)}${newExt}`);
|
2016-03-13 17:11:51 +00:00
|
|
|
|
2016-03-28 05:19:18 +00:00
|
|
|
fse.move(oldPath, newPath, nextFile);
|
2016-02-29 05:04:03 +00:00
|
|
|
} else {
|
|
|
|
const newPath = paths.join(outgoingDir, paths.basename(oldPath));
|
2016-03-28 05:19:18 +00:00
|
|
|
fse.move(oldPath, newPath, err => {
|
2016-03-12 07:22:06 +00:00
|
|
|
if(err) {
|
2016-03-13 17:11:51 +00:00
|
|
|
Log.warn(
|
2016-03-28 05:19:18 +00:00
|
|
|
{ oldPath : oldPath, newPath : newPath, error : err.toString() },
|
2016-03-13 17:11:51 +00:00
|
|
|
'Failed moving temporary bundle file!');
|
|
|
|
|
2016-03-12 07:22:06 +00:00
|
|
|
return nextFile();
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// For bundles, we need to append to the appropriate flow file
|
|
|
|
//
|
|
|
|
const flowFilePath = self.getOutgoingFlowFileName(
|
|
|
|
outgoingDir,
|
|
|
|
exportOpts.destAddress,
|
|
|
|
'ref',
|
2016-12-07 03:51:48 +00:00
|
|
|
exportType,
|
|
|
|
exportOpts.fileCase
|
|
|
|
);
|
2017-03-09 05:37:02 +00:00
|
|
|
|
2016-03-12 07:22:06 +00:00
|
|
|
// directive of '^' = delete file after transfer
|
|
|
|
self.flowFileAppendRefs(flowFilePath, [ newPath ], '^', err => {
|
|
|
|
if(err) {
|
2016-03-13 17:11:51 +00:00
|
|
|
Log.warn( { path : flowFilePath }, 'Failed appending flow reference record!');
|
2016-03-12 07:22:06 +00:00
|
|
|
}
|
|
|
|
nextFile();
|
|
|
|
});
|
|
|
|
});
|
2016-02-29 05:04:03 +00:00
|
|
|
}
|
|
|
|
}, callback);
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
2016-03-10 05:32:00 +00:00
|
|
|
// :TODO: do something with |err| ?
|
2017-02-22 04:26:56 +00:00
|
|
|
if(err) {
|
|
|
|
Log.warn(err.message);
|
|
|
|
}
|
2016-03-04 05:54:32 +00:00
|
|
|
nextUplink();
|
2016-02-29 05:04:03 +00:00
|
|
|
}
|
|
|
|
);
|
|
|
|
}, cb); // complete
|
|
|
|
};
|
2016-03-01 05:32:51 +00:00
|
|
|
|
2016-03-09 05:30:04 +00:00
|
|
|
this.setReplyToMsgIdFtnReplyKludge = function(message, cb) {
|
|
|
|
//
|
|
|
|
// Given a FTN REPLY kludge, set |message.replyToMsgId|, if possible,
|
|
|
|
// by looking up an associated MSGID kludge meta.
|
|
|
|
//
|
|
|
|
// See also: http://ftsc.org/docs/fts-0009.001
|
|
|
|
//
|
|
|
|
if(!_.isString(message.meta.FtnKludge.REPLY)) {
|
|
|
|
// nothing to do
|
|
|
|
return cb();
|
|
|
|
}
|
2016-03-04 05:54:32 +00:00
|
|
|
|
2016-03-09 05:30:04 +00:00
|
|
|
Message.getMessageIdsByMetaValue('FtnKludge', 'MSGID', message.meta.FtnKludge.REPLY, (err, msgIds) => {
|
2016-07-10 02:10:03 +00:00
|
|
|
if(msgIds && msgIds.length > 0) {
|
2016-07-06 04:18:43 +00:00
|
|
|
// expect a single match, but dupe checking is not perfect - warn otherwise
|
|
|
|
if(1 === msgIds.length) {
|
|
|
|
message.replyToMsgId = msgIds[0];
|
|
|
|
} else {
|
|
|
|
Log.warn( { msgIds : msgIds, replyKludge : message.meta.FtnKludge.REPLY }, 'Found 2:n MSGIDs matching REPLY kludge!');
|
|
|
|
}
|
2016-03-09 05:30:04 +00:00
|
|
|
}
|
|
|
|
cb();
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
2016-03-29 04:07:21 +00:00
|
|
|
this.importEchoMailToArea = function(localAreaTag, header, message, cb) {
|
2016-03-09 05:30:04 +00:00
|
|
|
async.series(
|
|
|
|
[
|
2016-03-10 05:32:00 +00:00
|
|
|
function validateDestinationAddress(callback) {
|
|
|
|
const localNetworkPattern = `${message.meta.FtnProperty.ftn_dest_network}/${message.meta.FtnProperty.ftn_dest_node}`;
|
2016-03-09 05:30:04 +00:00
|
|
|
const localNetworkName = self.getNetworkNameByAddressPattern(localNetworkPattern);
|
|
|
|
|
|
|
|
callback(_.isString(localNetworkName) ? null : new Error('Packet destination is not us'));
|
|
|
|
},
|
2016-07-06 04:32:13 +00:00
|
|
|
function checkForDupeMSGID(callback) {
|
|
|
|
//
|
|
|
|
// If we have a MSGID, don't allow a dupe
|
|
|
|
//
|
|
|
|
if(!_.has(message.meta, 'FtnKludge.MSGID')) {
|
|
|
|
return callback(null);
|
|
|
|
}
|
|
|
|
|
|
|
|
Message.getMessageIdsByMetaValue('FtnKludge', 'MSGID', message.meta.FtnKludge.MSGID, (err, msgIds) => {
|
|
|
|
if(msgIds && msgIds.length > 0) {
|
|
|
|
const err = new Error('Duplicate MSGID');
|
|
|
|
err.code = 'DUPE_MSGID';
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null);
|
|
|
|
});
|
|
|
|
},
|
2016-03-09 05:30:04 +00:00
|
|
|
function basicSetup(callback) {
|
|
|
|
message.areaTag = localAreaTag;
|
|
|
|
|
|
|
|
//
|
2016-07-06 04:18:43 +00:00
|
|
|
// If we *allow* dupes (disabled by default), then just generate
|
|
|
|
// a random UUID. Otherwise, don't assign the UUID just yet. It will be
|
|
|
|
// generated at persist() time and should be consistent across import/exports
|
|
|
|
//
|
|
|
|
if(Config.messageNetworks.ftn.areas[localAreaTag].allowDupes) {
|
|
|
|
// just generate a UUID & therefor always allow for dupes
|
2017-01-31 07:17:19 +00:00
|
|
|
message.uuid = uuidV4();
|
2016-03-09 05:30:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
callback(null);
|
|
|
|
},
|
|
|
|
function setReplyToMessageId(callback) {
|
|
|
|
self.setReplyToMsgIdFtnReplyKludge(message, () => {
|
|
|
|
callback(null);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function persistImport(callback) {
|
2016-03-15 04:29:41 +00:00
|
|
|
// mark as imported
|
2016-04-15 05:08:54 +00:00
|
|
|
message.meta.System.state_flags0 = Message.StateFlags0.Imported.toString();
|
2016-03-15 04:29:41 +00:00
|
|
|
|
|
|
|
// save to disc
|
2016-03-09 05:30:04 +00:00
|
|
|
message.persist(err => {
|
|
|
|
callback(err);
|
|
|
|
});
|
|
|
|
}
|
2016-03-10 05:32:00 +00:00
|
|
|
],
|
|
|
|
err => {
|
2016-03-09 05:30:04 +00:00
|
|
|
cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
};
|
2016-07-06 04:18:43 +00:00
|
|
|
|
|
|
|
this.appendTearAndOrigin = function(message) {
|
|
|
|
if(message.meta.FtnProperty.ftn_tear_line) {
|
|
|
|
message.message += `\r\n${message.meta.FtnProperty.ftn_tear_line}\r\n`;
|
|
|
|
}
|
|
|
|
|
|
|
|
if(message.meta.FtnProperty.ftn_origin) {
|
|
|
|
message.message += `${message.meta.FtnProperty.ftn_origin}\r\n`;
|
|
|
|
}
|
|
|
|
};
|
2016-03-09 05:30:04 +00:00
|
|
|
|
|
|
|
//
|
|
|
|
// Ref. implementations on import:
|
|
|
|
// * https://github.com/larsks/crashmail/blob/26e5374710c7868dab3d834be14bf4041041aae5/crashmail/pkt.c
|
|
|
|
// https://github.com/larsks/crashmail/blob/26e5374710c7868dab3d834be14bf4041041aae5/crashmail/handle.c
|
|
|
|
//
|
2016-03-13 17:11:51 +00:00
|
|
|
this.importMessagesFromPacketFile = function(packetPath, password, cb) {
|
2016-03-09 05:30:04 +00:00
|
|
|
let packetHeader;
|
|
|
|
|
2016-07-06 04:18:43 +00:00
|
|
|
const packetOpts = { keepTearAndOrigin : false }; // needed so we can calc message UUID without these; we'll add later
|
2016-03-29 04:07:21 +00:00
|
|
|
|
|
|
|
let importStats = {
|
|
|
|
areaSuccess : {}, // areaTag->count
|
|
|
|
areaFail : {}, // areaTag->count
|
|
|
|
otherFail : 0,
|
|
|
|
};
|
2016-03-23 04:24:00 +00:00
|
|
|
|
|
|
|
new ftnMailPacket.Packet(packetOpts).read(packetPath, (entryType, entryData, next) => {
|
2016-03-04 05:54:32 +00:00
|
|
|
if('header' === entryType) {
|
2016-03-09 05:30:04 +00:00
|
|
|
packetHeader = entryData;
|
|
|
|
|
|
|
|
const localNetworkName = self.getNetworkNameByAddress(packetHeader.destAddress);
|
|
|
|
if(!_.isString(localNetworkName)) {
|
2016-08-07 01:57:27 +00:00
|
|
|
const addrString = new Address(packetHeader.destAddress).toString();
|
|
|
|
return next(new Error(`No local configuration for packet addressed to ${addrString}`));
|
2016-03-09 05:30:04 +00:00
|
|
|
} else {
|
2016-03-15 04:29:41 +00:00
|
|
|
|
|
|
|
// :TODO: password needs validated - need to determine if it will use the same node config (which can have wildcards) or something else?!
|
2016-07-20 03:01:25 +00:00
|
|
|
return next(null);
|
2016-03-09 05:30:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
} else if('message' === entryType) {
|
|
|
|
const message = entryData;
|
2016-03-04 05:54:32 +00:00
|
|
|
const areaTag = message.meta.FtnProperty.ftn_area;
|
2016-07-06 04:18:43 +00:00
|
|
|
|
2016-03-04 05:54:32 +00:00
|
|
|
if(areaTag) {
|
|
|
|
//
|
2016-03-09 05:30:04 +00:00
|
|
|
// EchoMail
|
|
|
|
//
|
|
|
|
const localAreaTag = self.getLocalAreaTagByFtnAreaTag(areaTag);
|
|
|
|
if(localAreaTag) {
|
2016-07-06 04:18:43 +00:00
|
|
|
message.uuid = Message.createMessageUUID(
|
|
|
|
localAreaTag,
|
|
|
|
message.modTimestamp,
|
|
|
|
message.subject,
|
|
|
|
message.message);
|
|
|
|
|
|
|
|
self.appendTearAndOrigin(message);
|
|
|
|
|
2016-03-29 04:07:21 +00:00
|
|
|
self.importEchoMailToArea(localAreaTag, packetHeader, message, err => {
|
2016-03-09 05:30:04 +00:00
|
|
|
if(err) {
|
2016-03-29 04:07:21 +00:00
|
|
|
// bump area fail stats
|
|
|
|
importStats.areaFail[localAreaTag] = (importStats.areaFail[localAreaTag] || 0) + 1;
|
|
|
|
|
2016-07-06 04:32:13 +00:00
|
|
|
if('SQLITE_CONSTRAINT' === err.code || 'DUPE_MSGID' === err.code) {
|
|
|
|
const msgId = _.has(message.meta, 'FtnKludge.MSGID') ? message.meta.FtnKludge.MSGID : 'N/A';
|
2016-03-09 05:30:04 +00:00
|
|
|
Log.info(
|
2016-07-06 04:32:13 +00:00
|
|
|
{ area : localAreaTag, subject : message.subject, uuid : message.uuid, MSGID : msgId },
|
2016-03-09 05:30:04 +00:00
|
|
|
'Not importing non-unique message');
|
|
|
|
|
|
|
|
return next(null);
|
|
|
|
}
|
2016-03-29 04:07:21 +00:00
|
|
|
} else {
|
|
|
|
// bump area success
|
|
|
|
importStats.areaSuccess[localAreaTag] = (importStats.areaSuccess[localAreaTag] || 0) + 1;
|
2016-03-09 05:30:04 +00:00
|
|
|
}
|
|
|
|
|
2016-07-20 03:01:25 +00:00
|
|
|
return next(err);
|
2016-03-09 05:30:04 +00:00
|
|
|
});
|
|
|
|
} else {
|
|
|
|
//
|
|
|
|
// No local area configured for this import
|
|
|
|
//
|
|
|
|
// :TODO: Handle the "catch all" case, if configured
|
2016-03-29 04:07:21 +00:00
|
|
|
Log.warn( { areaTag : areaTag }, 'No local area configured for this packet file!');
|
|
|
|
|
|
|
|
// bump generic failure
|
|
|
|
importStats.otherFail += 1;
|
|
|
|
|
|
|
|
return next(null);
|
2016-03-09 05:30:04 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
//
|
|
|
|
// NetMail
|
2016-03-04 05:54:32 +00:00
|
|
|
//
|
2016-03-29 04:07:21 +00:00
|
|
|
Log.warn('NetMail import not yet implemented!');
|
|
|
|
return next(null);
|
2016-03-04 05:54:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}, err => {
|
2016-08-07 01:57:27 +00:00
|
|
|
//
|
|
|
|
// try to produce something helpful in the log
|
|
|
|
//
|
2016-03-29 04:07:21 +00:00
|
|
|
const finalStats = Object.assign(importStats, { packetPath : packetPath } );
|
2016-08-07 01:57:27 +00:00
|
|
|
if(err || Object.keys(finalStats.areaFail).length > 0) {
|
|
|
|
if(err) {
|
|
|
|
Object.assign(finalStats, { error : err.message } );
|
|
|
|
}
|
|
|
|
|
|
|
|
Log.warn(finalStats, 'Import completed with error(s)');
|
|
|
|
} else {
|
|
|
|
Log.info(finalStats, 'Import complete');
|
|
|
|
}
|
2016-03-29 04:07:21 +00:00
|
|
|
|
2016-03-04 05:54:32 +00:00
|
|
|
cb(err);
|
2016-03-09 05:30:04 +00:00
|
|
|
});
|
|
|
|
};
|
2017-03-09 05:37:02 +00:00
|
|
|
|
|
|
|
this.maybeArchiveImportFile = function(origPath, type, status, cb) {
|
|
|
|
//
|
|
|
|
// type : pkt|tic|bundle
|
|
|
|
// status : good|reject
|
|
|
|
//
|
|
|
|
// Status of "good" is only applied to pkt files & placed
|
|
|
|
// in |retain| if set. This is generally used for debugging only.
|
|
|
|
//
|
|
|
|
let archivePath;
|
|
|
|
const ts = moment().format('YYYY-MM-DDTHH.mm.ss.SSS');
|
|
|
|
const fn = paths.basename(origPath);
|
|
|
|
|
|
|
|
if('good' === status && type === 'pkt') {
|
|
|
|
if(!_.isString(self.moduleConfig.paths.retain)) {
|
|
|
|
return cb(null);
|
|
|
|
}
|
|
|
|
|
|
|
|
archivePath = paths.join(self.moduleConfig.paths.retain, `good-pkt-${ts}--${fn}`);
|
2017-03-11 04:13:30 +00:00
|
|
|
} else if('good' !== status) {
|
2017-03-09 05:37:02 +00:00
|
|
|
archivePath = paths.join(self.moduleConfig.paths.reject, `${status}-${type}--${ts}-${fn}`);
|
2017-03-11 04:13:30 +00:00
|
|
|
} else {
|
|
|
|
return cb(null); // don't archive non-good/pkt files
|
2016-04-14 03:06:27 +00:00
|
|
|
}
|
2017-03-09 05:37:02 +00:00
|
|
|
|
2017-03-11 04:13:30 +00:00
|
|
|
Log.debug( { origPath : origPath, archivePath : archivePath, type : type, status : status }, 'Archiving import file');
|
|
|
|
|
2017-03-09 05:37:02 +00:00
|
|
|
fse.copy(origPath, archivePath, err => {
|
|
|
|
if(err) {
|
|
|
|
Log.warn( { error : err.message, origPath : origPath, archivePath : archivePath, type : type, status : status }, 'Failed to archive packet file');
|
|
|
|
}
|
|
|
|
|
|
|
|
return cb(null); // never fatal
|
|
|
|
});
|
|
|
|
};
|
2016-04-14 03:06:27 +00:00
|
|
|
|
2016-03-13 17:11:51 +00:00
|
|
|
this.importPacketFilesFromDirectory = function(importDir, password, cb) {
|
2016-03-01 05:32:51 +00:00
|
|
|
async.waterfall(
|
|
|
|
[
|
2016-03-04 05:54:32 +00:00
|
|
|
function getPacketFiles(callback) {
|
2016-03-01 05:32:51 +00:00
|
|
|
fs.readdir(importDir, (err, files) => {
|
2016-03-02 05:42:29 +00:00
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
2016-04-02 15:56:16 +00:00
|
|
|
callback(null, files.filter(f => '.pkt' === paths.extname(f).toLowerCase()));
|
2016-03-01 05:32:51 +00:00
|
|
|
});
|
|
|
|
},
|
2016-03-04 05:54:32 +00:00
|
|
|
function importPacketFiles(packetFiles, callback) {
|
|
|
|
let rejects = [];
|
2016-03-29 01:36:09 +00:00
|
|
|
async.eachSeries(packetFiles, (packetFile, nextFile) => {
|
|
|
|
self.importMessagesFromPacketFile(paths.join(importDir, packetFile), '', err => {
|
2016-03-04 05:54:32 +00:00
|
|
|
if(err) {
|
2016-03-29 01:36:09 +00:00
|
|
|
Log.debug(
|
|
|
|
{ path : paths.join(importDir, packetFile), error : err.toString() },
|
|
|
|
'Failed to import packet file');
|
|
|
|
|
2016-03-04 05:54:32 +00:00
|
|
|
rejects.push(packetFile);
|
|
|
|
}
|
2016-03-02 05:42:29 +00:00
|
|
|
nextFile();
|
|
|
|
});
|
|
|
|
}, err => {
|
|
|
|
// :TODO: Handle err! we should try to keep going though...
|
2016-03-04 05:54:32 +00:00
|
|
|
callback(err, packetFiles, rejects);
|
|
|
|
});
|
2016-03-02 05:42:29 +00:00
|
|
|
},
|
2016-03-04 05:54:32 +00:00
|
|
|
function handleProcessedFiles(packetFiles, rejects, callback) {
|
2017-03-09 05:37:02 +00:00
|
|
|
async.each(packetFiles, (packetFile, nextFile) => {
|
|
|
|
// possibly archive, then remove original
|
2016-03-04 05:54:32 +00:00
|
|
|
const fullPath = paths.join(importDir, packetFile);
|
2017-03-09 05:37:02 +00:00
|
|
|
self.maybeArchiveImportFile(
|
|
|
|
fullPath,
|
|
|
|
'pkt',
|
|
|
|
rejects.includes(packetFile) ? 'reject' : 'good',
|
|
|
|
() => {
|
2016-04-15 04:12:46 +00:00
|
|
|
fs.unlink(fullPath, () => {
|
2017-03-09 05:37:02 +00:00
|
|
|
return nextFile(null);
|
2016-04-14 03:06:27 +00:00
|
|
|
});
|
2017-03-09 05:37:02 +00:00
|
|
|
}
|
|
|
|
);
|
2016-03-04 05:54:32 +00:00
|
|
|
}, err => {
|
|
|
|
callback(err);
|
|
|
|
});
|
2016-03-01 05:32:51 +00:00
|
|
|
}
|
2016-03-04 05:54:32 +00:00
|
|
|
],
|
2016-03-01 05:32:51 +00:00
|
|
|
err => {
|
|
|
|
cb(err);
|
2016-03-04 05:54:32 +00:00
|
|
|
}
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
2017-03-09 05:37:02 +00:00
|
|
|
this.importFromDirectory = function(inboundType, importDir, cb) {
|
2016-03-04 05:54:32 +00:00
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
// start with .pkt files
|
|
|
|
function importPacketFiles(callback) {
|
2016-03-13 17:11:51 +00:00
|
|
|
self.importPacketFilesFromDirectory(importDir, '', err => {
|
2016-03-04 05:54:32 +00:00
|
|
|
callback(err);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function discoverBundles(callback) {
|
|
|
|
fs.readdir(importDir, (err, files) => {
|
2016-03-29 01:36:09 +00:00
|
|
|
// :TODO: if we do much more of this, probably just use the glob module
|
2016-04-02 15:56:16 +00:00
|
|
|
const bundleRegExp = /\.(su|mo|tu|we|th|fr|sa)[0-9a-z]/i;
|
2016-03-29 01:36:09 +00:00
|
|
|
files = files.filter(f => {
|
|
|
|
const fext = paths.extname(f);
|
|
|
|
return bundleRegExp.test(fext);
|
|
|
|
});
|
2016-03-04 05:54:32 +00:00
|
|
|
|
|
|
|
async.map(files, (file, transform) => {
|
|
|
|
const fullPath = paths.join(importDir, file);
|
|
|
|
self.archUtil.detectType(fullPath, (err, archName) => {
|
|
|
|
transform(null, { path : fullPath, archName : archName } );
|
|
|
|
});
|
|
|
|
}, (err, bundleFiles) => {
|
|
|
|
callback(err, bundleFiles);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
},
|
2016-03-09 05:30:04 +00:00
|
|
|
function importBundles(bundleFiles, callback) {
|
|
|
|
let rejects = [];
|
|
|
|
|
2016-03-04 05:54:32 +00:00
|
|
|
async.each(bundleFiles, (bundleFile, nextFile) => {
|
|
|
|
if(_.isUndefined(bundleFile.archName)) {
|
2016-03-13 17:11:51 +00:00
|
|
|
Log.warn(
|
2016-03-09 05:30:04 +00:00
|
|
|
{ fileName : bundleFile.path },
|
|
|
|
'Unknown bundle archive type');
|
|
|
|
|
|
|
|
rejects.push(bundleFile.path);
|
|
|
|
|
2016-03-04 05:54:32 +00:00
|
|
|
return nextFile(); // unknown archive type
|
|
|
|
}
|
2016-12-07 01:58:56 +00:00
|
|
|
|
|
|
|
Log.debug( { bundleFile : bundleFile }, 'Processing bundle' );
|
2016-03-04 05:54:32 +00:00
|
|
|
|
|
|
|
self.archUtil.extractTo(
|
|
|
|
bundleFile.path,
|
2016-03-13 17:11:51 +00:00
|
|
|
self.importTempDir,
|
2016-03-04 05:54:32 +00:00
|
|
|
bundleFile.archName,
|
|
|
|
err => {
|
2016-03-09 05:30:04 +00:00
|
|
|
if(err) {
|
2016-03-13 17:11:51 +00:00
|
|
|
Log.warn(
|
2017-03-09 05:37:02 +00:00
|
|
|
{ path : bundleFile.path, error : err.message },
|
2016-03-09 05:30:04 +00:00
|
|
|
'Failed to extract bundle');
|
|
|
|
|
|
|
|
rejects.push(bundleFile.path);
|
|
|
|
}
|
|
|
|
|
2016-03-04 05:54:32 +00:00
|
|
|
nextFile();
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}, err => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// All extracted - import .pkt's
|
|
|
|
//
|
2016-03-13 17:11:51 +00:00
|
|
|
self.importPacketFilesFromDirectory(self.importTempDir, '', err => {
|
|
|
|
// :TODO: handle |err|
|
2016-03-09 05:30:04 +00:00
|
|
|
callback(null, bundleFiles, rejects);
|
2016-03-04 05:54:32 +00:00
|
|
|
});
|
|
|
|
});
|
2016-03-09 05:30:04 +00:00
|
|
|
},
|
|
|
|
function handleProcessedBundleFiles(bundleFiles, rejects, callback) {
|
|
|
|
async.each(bundleFiles, (bundleFile, nextFile) => {
|
2017-03-09 05:37:02 +00:00
|
|
|
self.maybeArchiveImportFile(
|
|
|
|
bundleFile.path,
|
|
|
|
'bundle',
|
|
|
|
rejects.includes(bundleFile.path) ? 'reject' : 'good',
|
|
|
|
() => {
|
|
|
|
fs.unlink(bundleFile.path, err => {
|
2017-03-09 06:01:25 +00:00
|
|
|
if(err) {
|
|
|
|
Log.error( { path : bundleFile.path, error : err.message }, 'Failed unlinking bundle');
|
|
|
|
}
|
2017-03-09 05:37:02 +00:00
|
|
|
return nextFile(null);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
);
|
2016-03-09 05:30:04 +00:00
|
|
|
}, err => {
|
|
|
|
callback(err);
|
|
|
|
});
|
2017-03-09 05:37:02 +00:00
|
|
|
},
|
|
|
|
function importTicFiles(callback) {
|
|
|
|
self.processTicFilesInDirectory(importDir, err => {
|
|
|
|
return callback(err);
|
|
|
|
});
|
2016-03-04 05:54:32 +00:00
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
2016-03-13 17:11:51 +00:00
|
|
|
cb(err);
|
2016-03-04 05:54:32 +00:00
|
|
|
}
|
2016-03-01 05:32:51 +00:00
|
|
|
);
|
|
|
|
};
|
2016-03-13 17:11:51 +00:00
|
|
|
|
|
|
|
this.createTempDirectories = function(cb) {
|
2017-01-30 02:56:46 +00:00
|
|
|
temptmp.mkdir( { prefix : 'enigftnexport-' }, (err, tempDir) => {
|
2016-03-13 17:11:51 +00:00
|
|
|
if(err) {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
self.exportTempDir = tempDir;
|
|
|
|
|
2017-01-30 02:56:46 +00:00
|
|
|
temptmp.mkdir( { prefix : 'enigftnimport-' }, (err, tempDir) => {
|
2016-03-13 17:11:51 +00:00
|
|
|
self.importTempDir = tempDir;
|
|
|
|
|
|
|
|
cb(err);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
};
|
2016-03-16 03:44:24 +00:00
|
|
|
|
|
|
|
// Starts an export block - returns true if we can proceed
|
|
|
|
this.exportingStart = function() {
|
|
|
|
if(!this.exportRunning) {
|
|
|
|
this.exportRunning = true;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
};
|
2017-02-22 04:26:56 +00:00
|
|
|
|
2016-03-16 03:44:24 +00:00
|
|
|
// ends an export block
|
|
|
|
this.exportingEnd = function() {
|
|
|
|
this.exportRunning = false;
|
|
|
|
};
|
2017-03-09 05:37:02 +00:00
|
|
|
|
|
|
|
this.copyTicAttachment = function(src, dst, isUpdate, cb) {
|
|
|
|
if(isUpdate) {
|
|
|
|
fse.copy(src, dst, err => {
|
|
|
|
return cb(err, dst);
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
copyFileWithCollisionHandling(src, dst, (err, finalPath) => {
|
|
|
|
return cb(err, finalPath);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
this.getLocalAreaTagsForTic = function() {
|
|
|
|
return _.union(Object.keys(Config.scannerTossers.ftn_bso.ticAreas || {} ), Object.keys(Config.fileBase.areas));
|
|
|
|
};
|
|
|
|
|
|
|
|
this.processSingleTicFile = function(ticFileInfo, cb) {
|
|
|
|
const self = this;
|
|
|
|
|
|
|
|
Log.debug( { tic : ticFileInfo.path, file : ticFileInfo.getAsString('File') }, 'Processing TIC file');
|
|
|
|
|
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
function generalValidation(callback) {
|
|
|
|
const config = {
|
|
|
|
nodes : Config.scannerTossers.ftn_bso.nodes,
|
|
|
|
defaultPassword : Config.scannerTossers.ftn_bso.tic.password,
|
|
|
|
localAreaTags : self.getLocalAreaTagsForTic(),
|
|
|
|
};
|
|
|
|
|
|
|
|
return ticFileInfo.validate(config, (err, localInfo) => {
|
|
|
|
if(err) {
|
2017-09-30 01:43:22 +00:00
|
|
|
Log.trace( { reason : err.message }, 'Validation failure');
|
2017-03-09 05:37:02 +00:00
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
// We may need to map |localAreaTag| back to real areaTag if it's a mapping/alias
|
|
|
|
const mappedLocalAreaTag = _.get(Config.scannerTossers.ftn_bso, [ 'ticAreas', localInfo.areaTag ]);
|
|
|
|
|
|
|
|
if(mappedLocalAreaTag) {
|
|
|
|
if(_.isString(mappedLocalAreaTag.areaTag)) {
|
|
|
|
localInfo.areaTag = mappedLocalAreaTag.areaTag;
|
|
|
|
localInfo.hashTags = mappedLocalAreaTag.hashTags; // override default for node
|
|
|
|
localInfo.storageTag = mappedLocalAreaTag.storageTag; // override default
|
|
|
|
} else if(_.isString(mappedLocalAreaTag)) {
|
|
|
|
localInfo.areaTag = mappedLocalAreaTag;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null, localInfo);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function findExistingItem(localInfo, callback) {
|
|
|
|
//
|
|
|
|
// We will need to look for an existing item to replace/update if:
|
|
|
|
// a) The TIC file has a "Replaces" field
|
|
|
|
// b) The general or node specific |allowReplace| is true
|
|
|
|
//
|
|
|
|
// Replace specifies a DOS 8.3 *pattern* which is allowed to have
|
|
|
|
// ? and * characters. For example, RETRONET.*
|
|
|
|
//
|
|
|
|
// Lastly, we will only replace if the item is in the same/specified area
|
|
|
|
// and that come from the same origin as a previous entry.
|
|
|
|
//
|
2017-10-01 17:07:49 +00:00
|
|
|
const allowReplace = _.get(Config.scannerTossers.ftn_bso.nodes, [ localInfo.node, 'tic', 'allowReplace' ], Config.scannerTossers.ftn_bso.tic.allowReplace);
|
2017-03-09 05:37:02 +00:00
|
|
|
const replaces = ticFileInfo.getAsString('Replaces');
|
|
|
|
|
|
|
|
if(!allowReplace || !replaces) {
|
|
|
|
return callback(null, localInfo);
|
|
|
|
}
|
|
|
|
|
|
|
|
const metaPairs = [
|
|
|
|
{
|
|
|
|
name : 'short_file_name',
|
|
|
|
value : replaces.toUpperCase(), // we store upper as well
|
|
|
|
wcValue : true, // value may contain wildcards
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name : 'tic_origin',
|
|
|
|
value : ticFileInfo.getAsString('Origin'),
|
|
|
|
}
|
|
|
|
];
|
|
|
|
|
|
|
|
FileEntry.findFiles( { metaPairs : metaPairs, areaTag : localInfo.areaTag }, (err, fileIds) => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
// 0:1 allowed
|
|
|
|
if(1 === fileIds.length) {
|
|
|
|
localInfo.existingFileId = fileIds[0];
|
|
|
|
|
|
|
|
// fetch old filename - we may need to remove it if replacing with a new name
|
2017-09-30 01:43:22 +00:00
|
|
|
FileEntry.loadBasicEntry(localInfo.existingFileId, {}, (err, info) => {
|
|
|
|
if(info) {
|
|
|
|
Log.trace(
|
|
|
|
{ fileId : localInfo.existingFileId, oldFileName : info.fileName, oldStorageTag : info.storageTag },
|
|
|
|
'Existing TIC file target to be replaced'
|
|
|
|
);
|
|
|
|
|
|
|
|
localInfo.oldFileName = info.fileName;
|
|
|
|
localInfo.oldStorageTag = info.storageTag;
|
|
|
|
}
|
|
|
|
return callback(null, localInfo); // continue even if we couldn't find an old match
|
2017-03-09 05:37:02 +00:00
|
|
|
});
|
|
|
|
} else if(fileIds.legnth > 1) {
|
|
|
|
return callback(Errors.General(`More than one existing entry for TIC in ${localInfo.areaTag} ([${fileIds.join(', ')}])`));
|
|
|
|
} else {
|
|
|
|
return callback(null, localInfo);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function scan(localInfo, callback) {
|
|
|
|
const scanOpts = {
|
|
|
|
sha256 : localInfo.sha256, // *may* have already been calculated
|
|
|
|
meta : {
|
|
|
|
// some TIC-related metadata we always want
|
|
|
|
short_file_name : ticFileInfo.getAsString('File').toUpperCase(), // upper to ensure no case issues later; this should be a DOS 8.3 name
|
|
|
|
tic_origin : ticFileInfo.getAsString('Origin'),
|
|
|
|
tic_desc : ticFileInfo.getAsString('Desc'),
|
2017-10-01 17:07:49 +00:00
|
|
|
upload_by_username : _.get(Config.scannerTossers.ftn_bso.nodes, [ localInfo.node, 'tic', 'uploadBy' ], Config.scannerTossers.ftn_bso.tic.uploadBy),
|
2017-03-09 05:37:02 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-03-14 02:11:43 +00:00
|
|
|
const ldesc = ticFileInfo.getAsString('Ldesc', '\n');
|
|
|
|
if(ldesc) {
|
|
|
|
scanOpts.meta.tic_ldesc = ldesc;
|
|
|
|
}
|
|
|
|
|
2017-03-09 05:37:02 +00:00
|
|
|
//
|
|
|
|
// We may have TIC auto-tagging for this node and/or specific (remote) area
|
|
|
|
//
|
|
|
|
const hashTags =
|
|
|
|
localInfo.hashTags ||
|
|
|
|
_.get(Config.scannerTossers.ftn_bso.nodes, [ localInfo.node, 'tic', 'hashTags' ] ); // catch-all*/
|
|
|
|
|
|
|
|
if(hashTags) {
|
|
|
|
scanOpts.hashTags = new Set(hashTags.split(/[\s,]+/));
|
|
|
|
}
|
|
|
|
|
|
|
|
if(localInfo.crc32) {
|
|
|
|
scanOpts.meta.file_crc32 = localInfo.crc32.toString(16); // again, *may* have already been calculated
|
|
|
|
}
|
|
|
|
|
|
|
|
scanFile(
|
|
|
|
ticFileInfo.filePath,
|
|
|
|
scanOpts,
|
|
|
|
(err, fileEntry) => {
|
2017-09-30 01:43:22 +00:00
|
|
|
if(err) {
|
|
|
|
Log.trace( { reason : err.message }, 'Scanning failed');
|
|
|
|
}
|
|
|
|
|
2017-03-09 05:37:02 +00:00
|
|
|
localInfo.fileEntry = fileEntry;
|
|
|
|
return callback(err, localInfo);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
},
|
|
|
|
function store(localInfo, callback) {
|
|
|
|
//
|
|
|
|
// Move file to final area storage and persist to DB
|
|
|
|
//
|
|
|
|
const areaInfo = getFileAreaByTag(localInfo.areaTag);
|
|
|
|
if(!areaInfo) {
|
|
|
|
return callback(Errors.UnexpectedState(`Could not get area for tag ${localInfo.areaTag}`));
|
|
|
|
}
|
|
|
|
|
|
|
|
const storageTag = localInfo.storageTag || areaInfo.storageTags[0];
|
|
|
|
if(!isValidStorageTag(storageTag)) {
|
|
|
|
return callback(Errors.Invalid(`Invalid storage tag: ${storageTag}`));
|
|
|
|
}
|
|
|
|
|
|
|
|
localInfo.fileEntry.storageTag = storageTag;
|
|
|
|
localInfo.fileEntry.areaTag = localInfo.areaTag;
|
|
|
|
localInfo.fileEntry.fileName = ticFileInfo.longFileName;
|
|
|
|
|
2017-10-01 17:07:49 +00:00
|
|
|
//
|
|
|
|
// We may now have two descriptions: from .DIZ/etc. or the TIC itself.
|
|
|
|
// Determine which one to use using |descPriority| and availability.
|
|
|
|
//
|
|
|
|
// We will still fallback as needed from <priority1> -> <priority2> -> <fromFileName>
|
|
|
|
//
|
|
|
|
const descPriority = _.get(
|
|
|
|
Config.scannerTossers.ftn_bso.nodes, [ localInfo.node, 'tic', 'descPriority' ],
|
|
|
|
Config.scannerTossers.ftn_bso.tic.descPriority
|
|
|
|
);
|
|
|
|
|
|
|
|
if('tic' === descPriority) {
|
|
|
|
const origDesc = localInfo.fileEntry.desc;
|
|
|
|
localInfo.fileEntry.desc = ticFileInfo.getAsString('Ldesc') || origDesc || getDescFromFileName(ticFileInfo.filePath);
|
|
|
|
} else {
|
|
|
|
// see if we got desc from .DIZ/etc.
|
|
|
|
const fromDescFile = 'descFile' === localInfo.fileEntry.descSrc;
|
|
|
|
localInfo.fileEntry.desc = fromDescFile ? localInfo.fileEntry.desc : ticFileInfo.getAsString('Ldesc');
|
|
|
|
localInfo.fileEntry.desc = localInfo.fileEntry.desc || getDescFromFileName(ticFileInfo.filePath);
|
2017-03-09 05:37:02 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const areaStorageDir = getAreaStorageDirectoryByTag(storageTag);
|
|
|
|
if(!areaStorageDir) {
|
|
|
|
return callback(Errors.UnexpectedState(`Could not get storage directory for tag ${localInfo.areaTag}`));
|
|
|
|
}
|
|
|
|
|
|
|
|
const isUpdate = localInfo.existingFileId ? true : false;
|
|
|
|
|
|
|
|
if(isUpdate) {
|
|
|
|
// we need to *update* an existing record/file
|
|
|
|
localInfo.fileEntry.fileId = localInfo.existingFileId;
|
|
|
|
}
|
|
|
|
|
|
|
|
const dst = paths.join(areaStorageDir, localInfo.fileEntry.fileName);
|
|
|
|
|
|
|
|
self.copyTicAttachment(ticFileInfo.filePath, dst, isUpdate, (err, finalPath) => {
|
|
|
|
if(err) {
|
2017-09-30 01:43:22 +00:00
|
|
|
Log.info( { reason : err.message }, 'Failed to copy TIC attachment');
|
2017-03-09 05:37:02 +00:00
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
if(dst !== finalPath) {
|
|
|
|
localInfo.fileEntry.fileName = paths.basename(finalPath);
|
|
|
|
}
|
|
|
|
|
|
|
|
localInfo.fileEntry.persist(isUpdate, err => {
|
|
|
|
return callback(err, localInfo);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
},
|
|
|
|
// :TODO: from here, we need to re-toss files if needed, before they are removed
|
|
|
|
function cleanupOldFile(localInfo, callback) {
|
|
|
|
if(!localInfo.existingFileId) {
|
|
|
|
return callback(null, localInfo);
|
|
|
|
}
|
|
|
|
|
|
|
|
const oldStorageDir = getAreaStorageDirectoryByTag(localInfo.oldStorageTag);
|
|
|
|
const oldPath = paths.join(oldStorageDir, localInfo.oldFileName);
|
|
|
|
|
|
|
|
fs.unlink(oldPath, err => {
|
|
|
|
if(err) {
|
|
|
|
Log.warn( { error : err.message, oldPath : oldPath }, 'Failed removing old physical file during TIC replacement');
|
|
|
|
} else {
|
|
|
|
Log.debug( { oldPath : oldPath }, 'Removed old physical file during TIC replacement');
|
|
|
|
}
|
|
|
|
return callback(null, localInfo); // continue even if err
|
|
|
|
});
|
|
|
|
},
|
|
|
|
],
|
|
|
|
(err, localInfo) => {
|
|
|
|
if(err) {
|
2017-09-30 01:43:22 +00:00
|
|
|
Log.error( { error : err.message, reason : err.reason, tic : ticFileInfo.filePath }, 'Failed import/update TIC record' );
|
2017-03-09 05:37:02 +00:00
|
|
|
} else {
|
|
|
|
Log.debug(
|
|
|
|
{ tic : ticFileInfo.path, file : ticFileInfo.filePath, area : localInfo.areaTag },
|
|
|
|
'TIC imported successfully'
|
|
|
|
);
|
|
|
|
}
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
this.removeAssocTicFiles = function(ticFileInfo, cb) {
|
|
|
|
async.each( [ ticFileInfo.path, ticFileInfo.filePath ], (path, nextPath) => {
|
|
|
|
fs.unlink(path, err => {
|
|
|
|
if(err && 'ENOENT' !== err.code) { // don't log when the file doesn't exist
|
|
|
|
Log.warn( { error : err.message, path : path }, 'Failed unlinking TIC file');
|
|
|
|
}
|
|
|
|
return nextPath(null);
|
|
|
|
});
|
|
|
|
}, err => {
|
|
|
|
return cb(err);
|
|
|
|
});
|
|
|
|
};
|
2016-02-10 05:30:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
require('util').inherits(FTNMessageScanTossModule, MessageScanTossModule);
|
|
|
|
|
2016-06-20 03:09:45 +00:00
|
|
|
// :TODO: *scheduled* portion of this stuff should probably use event_scheduler - @immediate would still use record().
|
|
|
|
|
2017-03-09 05:37:02 +00:00
|
|
|
FTNMessageScanTossModule.prototype.processTicFilesInDirectory = function(importDir, cb) {
|
|
|
|
// :TODO: pass in 'inbound' vs 'secInbound' -- pass along to processSingleTicFile() where password will be checked
|
|
|
|
|
|
|
|
const self = this;
|
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
function findTicFiles(callback) {
|
|
|
|
fs.readdir(importDir, (err, files) => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
return callback(null, files.filter(f => '.tic' === paths.extname(f).toLowerCase()));
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function gatherInfo(ticFiles, callback) {
|
|
|
|
const ticFilesInfo = [];
|
|
|
|
|
|
|
|
async.each(ticFiles, (fileName, nextFile) => {
|
|
|
|
const fullPath = paths.join(importDir, fileName);
|
|
|
|
|
|
|
|
TicFileInfo.createFromFile(fullPath, (err, ticInfo) => {
|
|
|
|
if(err) {
|
|
|
|
Log.warn( { error : err.message, path : fullPath }, 'Failed reading TIC file');
|
|
|
|
} else {
|
|
|
|
ticFilesInfo.push(ticInfo);
|
|
|
|
}
|
|
|
|
|
|
|
|
return nextFile(null);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
err => {
|
|
|
|
return callback(err, ticFilesInfo);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function process(ticFilesInfo, callback) {
|
2017-03-11 04:13:30 +00:00
|
|
|
async.eachSeries(ticFilesInfo, (ticFileInfo, nextTicInfo) => {
|
2017-03-09 05:37:02 +00:00
|
|
|
self.processSingleTicFile(ticFileInfo, err => {
|
|
|
|
if(err) {
|
|
|
|
// archive rejected TIC stuff (.TIC + attach)
|
|
|
|
async.each( [ ticFileInfo.path, ticFileInfo.filePath ], (path, nextPath) => {
|
|
|
|
if(!path) { // possibly rejected due to "File" not existing/etc.
|
|
|
|
return nextPath(null);
|
|
|
|
}
|
|
|
|
|
|
|
|
self.maybeArchiveImportFile(
|
|
|
|
path,
|
|
|
|
'tic',
|
|
|
|
'reject',
|
|
|
|
() => {
|
|
|
|
return nextPath(null);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
},
|
|
|
|
() => {
|
|
|
|
self.removeAssocTicFiles(ticFileInfo, () => {
|
|
|
|
return nextTicInfo(null);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
self.removeAssocTicFiles(ticFileInfo, () => {
|
|
|
|
return nextTicInfo(null);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}, err => {
|
|
|
|
return callback(err);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
2016-02-10 05:30:59 +00:00
|
|
|
FTNMessageScanTossModule.prototype.startup = function(cb) {
|
2016-03-13 17:11:51 +00:00
|
|
|
Log.info(`${exports.moduleInfo.name} Scanner/Tosser starting up`);
|
2016-02-21 00:57:38 +00:00
|
|
|
|
2016-03-16 03:44:24 +00:00
|
|
|
let importing = false;
|
|
|
|
|
|
|
|
let self = this;
|
|
|
|
|
|
|
|
function tryImportNow(reasonDesc) {
|
|
|
|
if(!importing) {
|
|
|
|
importing = true;
|
|
|
|
|
|
|
|
Log.info( { module : exports.moduleInfo.name }, reasonDesc);
|
|
|
|
|
|
|
|
self.performImport( () => {
|
|
|
|
importing = false;
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-03-13 17:11:51 +00:00
|
|
|
this.createTempDirectories(err => {
|
|
|
|
if(err) {
|
|
|
|
Log.warn( { error : err.toStrong() }, 'Failed creating temporary directories!');
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
if(_.isObject(this.moduleConfig.schedule)) {
|
|
|
|
const exportSchedule = this.parseScheduleString(this.moduleConfig.schedule.export);
|
|
|
|
if(exportSchedule) {
|
2016-03-29 04:07:21 +00:00
|
|
|
Log.debug(
|
|
|
|
{
|
2016-09-20 03:29:46 +00:00
|
|
|
schedule : this.moduleConfig.schedule.export,
|
2016-03-29 04:07:21 +00:00
|
|
|
schedOK : -1 === exportSchedule.sched.error,
|
2016-09-20 03:29:46 +00:00
|
|
|
next : moment(later.schedule(exportSchedule.sched).next(1)).format('ddd, MMM Do, YYYY @ h:m:ss a'),
|
2016-03-29 04:07:21 +00:00
|
|
|
immediate : exportSchedule.immediate ? true : false,
|
|
|
|
},
|
|
|
|
'Export schedule loaded'
|
|
|
|
);
|
|
|
|
|
2017-02-22 04:26:56 +00:00
|
|
|
if(exportSchedule.sched) {
|
2016-03-13 17:11:51 +00:00
|
|
|
this.exportTimer = later.setInterval( () => {
|
2017-02-22 04:26:56 +00:00
|
|
|
if(this.exportingStart()) {
|
|
|
|
Log.info( { module : exports.moduleInfo.name }, 'Performing scheduled message scan/export...');
|
|
|
|
|
|
|
|
this.performExport( () => {
|
|
|
|
this.exportingEnd();
|
|
|
|
});
|
|
|
|
}
|
2016-03-13 17:11:51 +00:00
|
|
|
}, exportSchedule.sched);
|
|
|
|
}
|
|
|
|
|
2016-03-15 04:29:41 +00:00
|
|
|
if(_.isBoolean(exportSchedule.immediate)) {
|
|
|
|
this.exportImmediate = exportSchedule.immediate;
|
|
|
|
}
|
2016-02-29 05:04:03 +00:00
|
|
|
}
|
|
|
|
|
2016-03-13 17:11:51 +00:00
|
|
|
const importSchedule = this.parseScheduleString(this.moduleConfig.schedule.import);
|
2016-03-29 04:07:21 +00:00
|
|
|
if(importSchedule) {
|
|
|
|
Log.debug(
|
|
|
|
{
|
|
|
|
schedule : this.moduleConfig.schedule.import,
|
|
|
|
schedOK : -1 === importSchedule.sched.error,
|
2016-09-20 03:29:46 +00:00
|
|
|
next : moment(later.schedule(importSchedule.sched).next(1)).format('ddd, MMM Do, YYYY @ h:m:ss a'),
|
2016-03-29 04:07:21 +00:00
|
|
|
watchFile : _.isString(importSchedule.watchFile) ? importSchedule.watchFile : 'None',
|
|
|
|
},
|
|
|
|
'Import schedule loaded'
|
|
|
|
);
|
|
|
|
|
2016-03-16 03:44:24 +00:00
|
|
|
if(importSchedule.sched) {
|
2016-03-13 17:11:51 +00:00
|
|
|
this.importTimer = later.setInterval( () => {
|
2016-03-16 03:44:24 +00:00
|
|
|
tryImportNow('Performing scheduled message import/toss...');
|
2016-03-13 17:11:51 +00:00
|
|
|
}, importSchedule.sched);
|
|
|
|
}
|
2016-03-16 03:44:24 +00:00
|
|
|
|
|
|
|
if(_.isString(importSchedule.watchFile)) {
|
2017-10-03 03:06:53 +00:00
|
|
|
const watcher = sane(
|
|
|
|
paths.dirname(importSchedule.watchFile),
|
|
|
|
{
|
|
|
|
glob : `**/${paths.basename(importSchedule.watchFile)}`
|
|
|
|
}
|
|
|
|
);
|
|
|
|
|
|
|
|
[ 'change', 'add', 'delete' ].forEach(event => {
|
|
|
|
watcher.on(event, (fileName, fileRoot) => {
|
|
|
|
const eventPath = paths.join(fileRoot, fileName);
|
|
|
|
if(paths.join(fileRoot, fileName) === importSchedule.watchFile) {
|
|
|
|
tryImportNow(`Performing import/toss due to @watch: ${eventPath} (${event})`);
|
2016-03-16 03:44:24 +00:00
|
|
|
}
|
|
|
|
});
|
|
|
|
});
|
2017-10-03 03:28:32 +00:00
|
|
|
|
|
|
|
//
|
|
|
|
// If the watch file already exists, kick off now
|
|
|
|
// https://github.com/NuSkooler/enigma-bbs/issues/122
|
|
|
|
//
|
|
|
|
fse.exists(importSchedule.watchFile, exists => {
|
|
|
|
if(exists) {
|
|
|
|
tryImportNow(`Performing import/toss due to @watch: ${importSchedule.watchFile} (initial exists)`);
|
|
|
|
}
|
|
|
|
});
|
2016-03-16 03:44:24 +00:00
|
|
|
}
|
2016-02-29 05:04:03 +00:00
|
|
|
}
|
|
|
|
}
|
2016-03-01 05:32:51 +00:00
|
|
|
|
2016-03-13 17:11:51 +00:00
|
|
|
FTNMessageScanTossModule.super_.prototype.startup.call(this, cb);
|
|
|
|
});
|
2016-02-10 05:30:59 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
FTNMessageScanTossModule.prototype.shutdown = function(cb) {
|
2016-02-17 05:11:55 +00:00
|
|
|
Log.info('FidoNet Scanner/Tosser shutting down');
|
2016-02-29 05:04:03 +00:00
|
|
|
|
|
|
|
if(this.exportTimer) {
|
|
|
|
this.exportTimer.clear();
|
|
|
|
}
|
2016-03-13 17:11:51 +00:00
|
|
|
|
2016-03-15 04:29:41 +00:00
|
|
|
if(this.importTimer) {
|
|
|
|
this.importTimer.clear();
|
|
|
|
}
|
|
|
|
|
2016-03-13 17:11:51 +00:00
|
|
|
//
|
|
|
|
// Clean up temp dir/files we created
|
|
|
|
//
|
2017-01-30 02:56:46 +00:00
|
|
|
temptmp.cleanup( paths => {
|
|
|
|
const fullStats = {
|
|
|
|
exportDir : this.exportTempDir,
|
|
|
|
importTemp : this.importTempDir,
|
|
|
|
paths : paths,
|
|
|
|
sessionId : temptmp.sessionId,
|
|
|
|
};
|
2016-03-13 17:11:51 +00:00
|
|
|
|
2017-01-30 02:56:46 +00:00
|
|
|
Log.trace(fullStats, 'Temporary directories cleaned up');
|
2016-03-13 17:11:51 +00:00
|
|
|
|
|
|
|
FTNMessageScanTossModule.super_.prototype.shutdown.call(this, cb);
|
|
|
|
});
|
2017-01-24 06:32:40 +00:00
|
|
|
|
|
|
|
FTNMessageScanTossModule.super_.prototype.shutdown.call(this, cb);
|
2016-02-10 05:30:59 +00:00
|
|
|
};
|
|
|
|
|
2016-03-01 05:32:51 +00:00
|
|
|
FTNMessageScanTossModule.prototype.performImport = function(cb) {
|
|
|
|
if(!this.hasValidConfiguration()) {
|
|
|
|
return cb(new Error('Missing or invalid configuration'));
|
|
|
|
}
|
|
|
|
|
2017-03-09 05:37:02 +00:00
|
|
|
const self = this;
|
2016-03-01 05:32:51 +00:00
|
|
|
|
2016-03-09 05:30:04 +00:00
|
|
|
async.each( [ 'inbound', 'secInbound' ], (inboundType, nextDir) => {
|
2017-03-09 05:37:02 +00:00
|
|
|
self.importFromDirectory(inboundType, self.moduleConfig.paths[inboundType], () => {
|
|
|
|
return nextDir(null);
|
2016-03-01 05:32:51 +00:00
|
|
|
});
|
|
|
|
}, cb);
|
|
|
|
};
|
|
|
|
|
2016-02-29 05:04:03 +00:00
|
|
|
FTNMessageScanTossModule.prototype.performExport = function(cb) {
|
|
|
|
//
|
|
|
|
// We're only concerned with areas related to FTN. For each area, loop though
|
|
|
|
// and let's find out what messages need exported.
|
|
|
|
//
|
|
|
|
if(!this.hasValidConfiguration()) {
|
2016-03-01 05:32:51 +00:00
|
|
|
return cb(new Error('Missing or invalid configuration'));
|
2016-02-29 05:04:03 +00:00
|
|
|
}
|
|
|
|
|
2016-03-15 04:29:41 +00:00
|
|
|
//
|
|
|
|
// Select all messages with a |message_id| > |lastScanId|.
|
|
|
|
// Additionally exclude messages with the System state_flags0 which will be present for
|
|
|
|
// imported or already exported messages
|
|
|
|
//
|
|
|
|
// NOTE: If StateFlags0 starts to use additional bits, we'll likely need to check them here!
|
|
|
|
//
|
|
|
|
const getNewUuidsSql =
|
|
|
|
`SELECT message_id, message_uuid
|
|
|
|
FROM message m
|
|
|
|
WHERE area_tag = ? AND message_id > ? AND
|
|
|
|
(SELECT COUNT(message_id)
|
|
|
|
FROM message_meta
|
|
|
|
WHERE message_id = m.message_id AND meta_category = 'System' AND meta_name = 'state_flags0') = 0
|
|
|
|
ORDER BY message_id;`;
|
2016-02-29 05:04:03 +00:00
|
|
|
|
2016-03-29 04:07:21 +00:00
|
|
|
let self = this;
|
2016-02-29 05:04:03 +00:00
|
|
|
|
|
|
|
async.each(Object.keys(Config.messageNetworks.ftn.areas), (areaTag, nextArea) => {
|
|
|
|
const areaConfig = Config.messageNetworks.ftn.areas[areaTag];
|
|
|
|
if(!this.isAreaConfigValid(areaConfig)) {
|
|
|
|
return nextArea();
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// For each message that is newer than that of the last scan
|
|
|
|
// we need to export to each configured associated uplink(s)
|
|
|
|
//
|
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
function getLastScanId(callback) {
|
|
|
|
self.getAreaLastScanId(areaTag, callback);
|
|
|
|
},
|
|
|
|
function getNewUuids(lastScanId, callback) {
|
|
|
|
msgDb.all(getNewUuidsSql, [ areaTag, lastScanId ], (err, rows) => {
|
|
|
|
if(err) {
|
|
|
|
callback(err);
|
|
|
|
} else {
|
2016-02-29 05:35:43 +00:00
|
|
|
if(0 === rows.length) {
|
|
|
|
let nothingToDoErr = new Error('Nothing to do!');
|
|
|
|
nothingToDoErr.noRows = true;
|
|
|
|
callback(nothingToDoErr);
|
|
|
|
} else {
|
|
|
|
callback(null, rows);
|
|
|
|
}
|
2016-02-29 05:04:03 +00:00
|
|
|
}
|
|
|
|
});
|
|
|
|
},
|
2016-02-29 05:35:43 +00:00
|
|
|
function exportToConfiguredUplinks(msgRows, callback) {
|
2016-03-01 05:32:51 +00:00
|
|
|
const uuidsOnly = msgRows.map(r => r.message_uuid); // convert to array of UUIDs only
|
2016-02-29 05:35:43 +00:00
|
|
|
self.exportMessagesToUplinks(uuidsOnly, areaConfig, err => {
|
2016-03-01 05:32:51 +00:00
|
|
|
const newLastScanId = msgRows[msgRows.length - 1].message_id;
|
|
|
|
|
|
|
|
Log.info(
|
2016-03-09 05:30:04 +00:00
|
|
|
{ areaTag : areaTag, messagesExported : msgRows.length, newLastScanId : newLastScanId },
|
2016-03-01 05:32:51 +00:00
|
|
|
'Export complete');
|
|
|
|
|
|
|
|
callback(err, newLastScanId);
|
2016-02-29 05:04:03 +00:00
|
|
|
});
|
|
|
|
},
|
|
|
|
function updateLastScanId(newLastScanId, callback) {
|
2016-02-29 05:35:43 +00:00
|
|
|
self.setAreaLastScanId(areaTag, newLastScanId, callback);
|
2016-02-29 05:04:03 +00:00
|
|
|
}
|
|
|
|
],
|
2016-07-22 03:04:08 +00:00
|
|
|
() => {
|
|
|
|
return nextArea();
|
2016-02-29 05:04:03 +00:00
|
|
|
}
|
|
|
|
);
|
|
|
|
}, err => {
|
2016-07-22 03:04:08 +00:00
|
|
|
return cb(err);
|
2016-02-29 05:04:03 +00:00
|
|
|
});
|
|
|
|
};
|
|
|
|
|
2016-02-21 00:57:38 +00:00
|
|
|
FTNMessageScanTossModule.prototype.record = function(message) {
|
2016-02-17 05:11:55 +00:00
|
|
|
//
|
2016-03-15 04:29:41 +00:00
|
|
|
// This module works off schedules, but we do support @immediate for export
|
|
|
|
//
|
|
|
|
if(true !== this.exportImmediate || !this.hasValidConfiguration()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if(message.isPrivate()) {
|
|
|
|
// :TODO: support NetMail
|
|
|
|
} else if(message.areaTag) {
|
|
|
|
const areaConfig = Config.messageNetworks.ftn.areas[message.areaTag];
|
|
|
|
if(!this.isAreaConfigValid(areaConfig)) {
|
|
|
|
return;
|
|
|
|
}
|
2016-03-16 03:44:24 +00:00
|
|
|
|
|
|
|
if(this.exportingStart()) {
|
|
|
|
this.exportMessagesToUplinks( [ message.uuid ], areaConfig, err => {
|
|
|
|
const info = { uuid : message.uuid, subject : message.subject };
|
|
|
|
|
|
|
|
if(err) {
|
|
|
|
Log.warn(info, 'Failed exporting message');
|
|
|
|
} else {
|
|
|
|
Log.info(info, 'Message exported');
|
|
|
|
}
|
|
|
|
|
|
|
|
this.exportingEnd();
|
|
|
|
});
|
|
|
|
}
|
2016-03-15 04:29:41 +00:00
|
|
|
}
|
2016-02-10 05:30:59 +00:00
|
|
|
};
|