Merge branch 'master' of ssh://numinibsd/git/base/enigma-bbs

This commit is contained in:
Bryan Ashby 2017-03-08 22:40:26 -07:00
commit 37c743e974
10 changed files with 843 additions and 110 deletions

View File

@ -52,7 +52,7 @@ ENiGMA has been tested with many terminals. However, the following are suggested
## Boards ## Boards
* WQH: :skull: Xibalba :skull: (**telnet://xibalba.l33t.codes:44510**) * WQH: :skull: Xibalba :skull: (**telnet://xibalba.l33t.codes:44510**)
* Support board: ☠ BLACK ƒlag ☠ (**telnet://blackflag.acid.org:2425**) * Exotica: (**telnet://andrew.homeunix.org:2023**)
* [force9](http://bbs.force9.org/): (**telnet://bbs.force9.org**) * [force9](http://bbs.force9.org/): (**telnet://bbs.force9.org**)

View File

@ -500,6 +500,8 @@ function getDefaultConfig() {
outbound : paths.join(__dirname, './../mail/ftn_out/'), outbound : paths.join(__dirname, './../mail/ftn_out/'),
inbound : paths.join(__dirname, './../mail/ftn_in/'), inbound : paths.join(__dirname, './../mail/ftn_in/'),
secInbound : paths.join(__dirname, './../mail/ftn_secin/'), secInbound : paths.join(__dirname, './../mail/ftn_secin/'),
reject : paths.join(__dirname, './../mail/reject/'), // bad pkt, bundles, TIC attachments that fail any check, etc.
// set 'retain' to a valid path to keep good pkt files
}, },
// //
@ -509,6 +511,12 @@ function getDefaultConfig() {
// //
packetTargetByteSize : 512000, // 512k, before placing messages in a new pkt packetTargetByteSize : 512000, // 512k, before placing messages in a new pkt
bundleTargetByteSize : 2048000, // 2M, before creating another archive bundleTargetByteSize : 2048000, // 2M, before creating another archive
tic : {
secureInOnly : true, // only bring in from secure inbound (|secInbound| path, password protected)
uploadBy : 'ENiGMA TIC', // default upload by username (override @ network)
allowReplace : false, // use "Replaces" TIC field
}
} }
}, },

View File

@ -263,7 +263,7 @@ const DB_INIT_TABLE = {
dbs.file.run('PRAGMA foreign_keys = ON;'); dbs.file.run('PRAGMA foreign_keys = ON;');
dbs.file.run( dbs.file.run(
// :TODO: should any of this be unique?? // :TODO: should any of this be unique -- file_sha256 unless dupes are allowed on the system
`CREATE TABLE IF NOT EXISTS file ( `CREATE TABLE IF NOT EXISTS file (
file_id INTEGER PRIMARY KEY, file_id INTEGER PRIMARY KEY,
area_tag VARCHAR NOT NULL, area_tag VARCHAR NOT NULL,
@ -281,6 +281,11 @@ const DB_INIT_TABLE = {
ON file (area_tag);` ON file (area_tag);`
); );
dbs.file.run(
`CREATE INDEX IF NOT EXISTS file_by_sha256_index
ON file (file_sha256);`
);
dbs.file.run( dbs.file.run(
`CREATE VIRTUAL TABLE IF NOT EXISTS file_fts USING fts4 ( `CREATE VIRTUAL TABLE IF NOT EXISTS file_fts USING fts4 (
content="file", content="file",

View File

@ -23,6 +23,7 @@ const iconv = require('iconv-lite');
exports.isInternalArea = isInternalArea; exports.isInternalArea = isInternalArea;
exports.getAvailableFileAreas = getAvailableFileAreas; exports.getAvailableFileAreas = getAvailableFileAreas;
exports.getSortedAvailableFileAreas = getSortedAvailableFileAreas; exports.getSortedAvailableFileAreas = getSortedAvailableFileAreas;
exports.isValidStorageTag = isValidStorageTag;
exports.getAreaStorageDirectoryByTag = getAreaStorageDirectoryByTag; exports.getAreaStorageDirectoryByTag = getAreaStorageDirectoryByTag;
exports.getAreaDefaultStorageDirectory = getAreaDefaultStorageDirectory; exports.getAreaDefaultStorageDirectory = getAreaDefaultStorageDirectory;
exports.getAreaStorageLocations = getAreaStorageLocations; exports.getAreaStorageLocations = getAreaStorageLocations;
@ -129,6 +130,10 @@ function changeFileAreaWithOptions(client, areaTag, options, cb) {
); );
} }
function isValidStorageTag(storageTag) {
return storageTag in Config.fileBase.storageTags;
}
function getAreaStorageDirectoryByTag(storageTag) { function getAreaStorageDirectoryByTag(storageTag) {
const storageLocation = (storageTag && Config.fileBase.storageTags[storageTag]); const storageLocation = (storageTag && Config.fileBase.storageTags[storageTag]);
@ -428,6 +433,7 @@ function scanFile(filePath, options, iterator, cb) {
hashTags : options.hashTags, // Set() or Array hashTags : options.hashTags, // Set() or Array
fileName : paths.basename(filePath), fileName : paths.basename(filePath),
storageTag : options.storageTag, storageTag : options.storageTag,
fileSha256 : options.sha256, // caller may know this already
}); });
const stepInfo = { const stepInfo = {
@ -455,6 +461,19 @@ function scanFile(filePath, options, iterator, cb) {
let lastCalcHashPercent; let lastCalcHashPercent;
// don't re-calc hashes for any we already have in |options|
const hashesToCalc = HASH_NAMES.filter(hn => {
if('sha256' === hn && fileEntry.fileSha256) {
return false;
}
if(`file_${hn}` in fileEntry.meta) {
return false;
}
return true;
});
async.waterfall( async.waterfall(
[ [
function startScan(callback) { function startScan(callback) {
@ -472,17 +491,19 @@ function scanFile(filePath, options, iterator, cb) {
function processPhysicalFileGeneric(callback) { function processPhysicalFileGeneric(callback) {
stepInfo.bytesProcessed = 0; stepInfo.bytesProcessed = 0;
const hashes = { const hashes = {};
sha1 : crypto.createHash('sha1'), hashesToCalc.forEach(hashName => {
sha256 : crypto.createHash('sha256'), if('crc32' === hashName) {
md5 : crypto.createHash('md5'), hashes.crc32 = new CRC32;
crc32 : new CRC32(), } else {
}; hashes[hashName] = crypto.createHash(hashName);
}
});
const stream = fs.createReadStream(filePath); const stream = fs.createReadStream(filePath);
function updateHashes(data) { function updateHashes(data) {
async.each( HASH_NAMES, (hashName, nextHash) => { async.each(hashesToCalc, (hashName, nextHash) => {
hashes[hashName].update(data); hashes[hashName].update(data);
return nextHash(null); return nextHash(null);
}, () => { }, () => {
@ -519,7 +540,7 @@ function scanFile(filePath, options, iterator, cb) {
stream.on('end', () => { stream.on('end', () => {
fileEntry.meta.byte_size = stepInfo.bytesProcessed; fileEntry.meta.byte_size = stepInfo.bytesProcessed;
async.each(HASH_NAMES, (hashName, nextHash) => { async.each(hashesToCalc, (hashName, nextHash) => {
if('sha256' === hashName) { if('sha256' === hashName) {
stepInfo.sha256 = fileEntry.fileSha256 = hashes.sha256.digest('hex'); stepInfo.sha256 = fileEntry.fileSha256 = hashes.sha256.digest('hex');
} else if('sha1' === hashName || 'md5' === hashName) { } else if('sha1' === hashName || 'md5' === hashName) {

View File

@ -28,6 +28,10 @@ const FILE_WELL_KNOWN_META = {
dl_count : (d) => parseInt(d) || 0, dl_count : (d) => parseInt(d) || 0,
byte_size : (b) => parseInt(b) || 0, byte_size : (b) => parseInt(b) || 0,
archive_type : null, archive_type : null,
short_file_name : null, // e.g. DOS 8.3 filename, avail in some scenarios such as TIC import
tic_origin : null, // TIC "Origin"
tic_desc : null, // TIC "Desc"
tic_ldesc : null, // TIC "Ldesc" joined by '\n'
}; };
module.exports = class FileEntry { module.exports = class FileEntry {
@ -44,13 +48,11 @@ module.exports = class FileEntry {
this.hashTags = options.hashTags || new Set(); this.hashTags = options.hashTags || new Set();
this.fileName = options.fileName; this.fileName = options.fileName;
this.storageTag = options.storageTag; this.storageTag = options.storageTag;
this.fileSha256 = options.fileSha256;
} }
static loadBasicEntry(fileId, dest, cb) { static loadBasicEntry(fileId, dest, cb) {
if(!cb && _.isFunction(dest)) { dest = dest || {};
cb = dest;
dest = this;
}
fileDb.get( fileDb.get(
`SELECT ${FILE_TABLE_MEMBERS.join(', ')} `SELECT ${FILE_TABLE_MEMBERS.join(', ')}
@ -72,7 +74,7 @@ module.exports = class FileEntry {
dest[_.camelCase(prop)] = file[prop]; dest[_.camelCase(prop)] = file[prop];
}); });
return cb(null); return cb(null, dest);
} }
); );
} }
@ -101,26 +103,51 @@ module.exports = class FileEntry {
); );
} }
persist(cb) { persist(isUpdate, cb) {
if(!cb && _.isFunction(isUpdate)) {
cb = isUpdate;
isUpdate = false;
}
const self = this; const self = this;
let inTransaction = false;
async.series( async.series(
[ [
function check(callback) {
if(isUpdate && !self.fileId) {
return callback(Errors.Invalid('Cannot update file entry without an existing "fileId" member'));
}
return callback(null);
},
function startTrans(callback) { function startTrans(callback) {
return fileDb.run('BEGIN;', callback); return fileDb.run('BEGIN;', callback);
}, },
function storeEntry(callback) { function storeEntry(callback) {
fileDb.run( inTransaction = true;
`REPLACE INTO file (area_tag, file_sha256, file_name, storage_tag, desc, desc_long, upload_timestamp)
VALUES(?, ?, ?, ?, ?, ?, ?);`, if(isUpdate) {
[ self.areaTag, self.fileSha256, self.fileName, self.storageTag, self.desc, self.descLong, getISOTimestampString() ], fileDb.run(
function inserted(err) { // use non-arrow func for 'this' scope / lastID `REPLACE INTO file (file_id, area_tag, file_sha256, file_name, storage_tag, desc, desc_long, upload_timestamp)
if(!err) { VALUES(?, ?, ?, ?, ?, ?, ?, ?);`,
self.fileId = this.lastID; [ self.fileId, self.areaTag, self.fileSha256, self.fileName, self.storageTag, self.desc, self.descLong, getISOTimestampString() ],
err => {
return callback(err);
} }
return callback(err); );
} } else {
); fileDb.run(
`REPLACE INTO file (area_tag, file_sha256, file_name, storage_tag, desc, desc_long, upload_timestamp)
VALUES(?, ?, ?, ?, ?, ?, ?);`,
[ self.areaTag, self.fileSha256, self.fileName, self.storageTag, self.desc, self.descLong, getISOTimestampString() ],
function inserted(err) { // use non-arrow func for 'this' scope / lastID
if(!err) {
self.fileId = this.lastID;
}
return callback(err);
}
);
}
}, },
function storeMeta(callback) { function storeMeta(callback) {
async.each(Object.keys(self.meta), (n, next) => { async.each(Object.keys(self.meta), (n, next) => {
@ -143,9 +170,13 @@ module.exports = class FileEntry {
], ],
err => { err => {
// :TODO: Log orig err // :TODO: Log orig err
fileDb.run(err ? 'ROLLBACK;' : 'COMMIT;', err => { if(inTransaction) {
fileDb.run(err ? 'ROLLBACK;' : 'COMMIT;', err => {
return cb(err);
});
} else {
return cb(err); return cb(err);
}); }
} }
); );
} }
@ -350,43 +381,67 @@ module.exports = class FileEntry {
if(filter.sort && filter.sort.length > 0) { if(filter.sort && filter.sort.length > 0) {
if(Object.keys(FILE_WELL_KNOWN_META).indexOf(filter.sort) > -1) { // sorting via a meta value? if(Object.keys(FILE_WELL_KNOWN_META).indexOf(filter.sort) > -1) { // sorting via a meta value?
sql = sql =
`SELECT f.file_id `SELECT DISTINCT f.file_id
FROM file f, file_meta m`; FROM file f, file_meta m`;
appendWhereClause(`f.file_id = m.file_id AND m.meta_name="${filter.sort}"`); appendWhereClause(`f.file_id = m.file_id AND m.meta_name = "${filter.sort}"`);
sqlOrderBy = `${getOrderByWithCast('m.meta_value')} ${sqlOrderDir}`; sqlOrderBy = `${getOrderByWithCast('m.meta_value')} ${sqlOrderDir}`;
} else { } else {
// additional special treatment for user ratings: we need to average them // additional special treatment for user ratings: we need to average them
if('user_rating' === filter.sort) { if('user_rating' === filter.sort) {
sql = sql =
`SELECT f.file_id, `SELECT DISTINCT f.file_id,
(SELECT IFNULL(AVG(rating), 0) rating (SELECT IFNULL(AVG(rating), 0) rating
FROM file_user_rating FROM file_user_rating
WHERE file_id = f.file_id) WHERE file_id = f.file_id)
AS avg_rating AS avg_rating
FROM file f`; FROM file f, file_meta m`;
sqlOrderBy = `ORDER BY avg_rating ${sqlOrderDir}`; sqlOrderBy = `ORDER BY avg_rating ${sqlOrderDir}`;
} else { } else {
sql = sql =
`SELECT f.file_id, f.${filter.sort} `SELECT DISTINCT f.file_id, f.${filter.sort}
FROM file f`; FROM file f, file_meta m`;
sqlOrderBy = getOrderByWithCast(`f.${filter.sort}`) + ' ' + sqlOrderDir; sqlOrderBy = getOrderByWithCast(`f.${filter.sort}`) + ' ' + sqlOrderDir;
} }
} }
} else { } else {
sql = sql =
`SELECT f.file_id `SELECT DISTINCT f.file_id
FROM file f`; FROM file f, file_meta m`;
sqlOrderBy = `${getOrderByWithCast('f.file_id')} ${sqlOrderDir}`; sqlOrderBy = `${getOrderByWithCast('f.file_id')} ${sqlOrderDir}`;
} }
if(filter.areaTag && filter.areaTag.length > 0) { if(filter.areaTag && filter.areaTag.length > 0) {
appendWhereClause(`f.area_tag="${filter.areaTag}"`); appendWhereClause(`f.area_tag = "${filter.areaTag}"`);
}
if(filter.metaPairs && filter.metaPairs.length > 0) {
filter.metaPairs.forEach(mp => {
if(mp.wcValue) {
// convert any * -> % and ? -> _ for SQLite syntax - see https://www.sqlite.org/lang_expr.html
mp.value = mp.value.replace(/\*/g, '%').replace(/\?/g, '_');
appendWhereClause(
`f.file_id IN (
SELECT file_id
FROM file_meta
WHERE meta_name = "${mp.name}" AND meta_value LIKE "${mp.value}"
)`
);
} else {
appendWhereClause(
`f.file_id IN (
SELECT file_id
FROM file_meta
WHERE meta_name = "${mp.name}" AND meta_value = "${mp.value}"
)`
);
}
});
} }
if(filter.storageTag && filter.storageTag.length > 0) { if(filter.storageTag && filter.storageTag.length > 0) {

View File

@ -2,30 +2,43 @@
'use strict'; 'use strict';
// ENiGMA½ // ENiGMA½
const EnigAssert = require('./enigma_assert.js');
// deps // deps
const fse = require('fs-extra'); const fse = require('fs-extra');
const paths = require('path'); const paths = require('path');
const async = require('async'); const async = require('async');
exports.moveFileWithCollisionHandling = moveFileWithCollisionHandling; exports.moveFileWithCollisionHandling = moveFileWithCollisionHandling;
exports.copyFileWithCollisionHandling = copyFileWithCollisionHandling;
exports.pathWithTerminatingSeparator = pathWithTerminatingSeparator; exports.pathWithTerminatingSeparator = pathWithTerminatingSeparator;
// function moveOrCopyFileWithCollisionHandling(src, dst, operation, cb) {
// Move |src| -> |dst| renaming to file(1).ext, file(2).ext, etc. operation = operation || 'copy';
// in the case of collisions.
//
function moveFileWithCollisionHandling(src, dst, cb) {
const dstPath = paths.dirname(dst); const dstPath = paths.dirname(dst);
const dstFileExt = paths.extname(dst); const dstFileExt = paths.extname(dst);
const dstFileSuffix = paths.basename(dst, dstFileExt); const dstFileSuffix = paths.basename(dst, dstFileExt);
EnigAssert('move' === operation || 'copy' === operation);
let renameIndex = 0; let renameIndex = 0;
let movedOk = false; let opOk = false;
let tryDstPath; let tryDstPath;
function tryOperation(src, dst, callback) {
if('move' === operation) {
fse.move(src, tryDstPath, err => {
return callback(err);
});
} else if('copy' === operation) {
fse.copy(src, tryDstPath, { overwrite : false, errorOnExist : true }, err => {
return callback(err);
});
}
}
async.until( async.until(
() => movedOk, // until moved OK () => opOk, // until moved OK
(cb) => { (cb) => {
if(0 === renameIndex) { if(0 === renameIndex) {
// try originally supplied path first // try originally supplied path first
@ -34,9 +47,11 @@ function moveFileWithCollisionHandling(src, dst, cb) {
tryDstPath = paths.join(dstPath, `${dstFileSuffix}(${renameIndex})${dstFileExt}`); tryDstPath = paths.join(dstPath, `${dstFileSuffix}(${renameIndex})${dstFileExt}`);
} }
fse.move(src, tryDstPath, err => { tryOperation(src, tryDstPath, err => {
if(err) { if(err) {
if('EEXIST' === err.code) { // for some reason fs-extra copy doesn't pass err.code
// :TODO: this is dangerous: submit a PR to fs-extra to set EEXIST
if('EEXIST' === err.code || 'copy' === operation) {
renameIndex += 1; renameIndex += 1;
return cb(null); // keep trying return cb(null); // keep trying
} }
@ -44,7 +59,7 @@ function moveFileWithCollisionHandling(src, dst, cb) {
return cb(err); return cb(err);
} }
movedOk = true; opOk = true;
return cb(null, tryDstPath); return cb(null, tryDstPath);
}); });
}, },
@ -54,6 +69,18 @@ function moveFileWithCollisionHandling(src, dst, cb) {
); );
} }
//
// Move |src| -> |dst| renaming to file(1).ext, file(2).ext, etc.
// in the case of collisions.
//
function moveFileWithCollisionHandling(src, dst, cb) {
return moveOrCopyFileWithCollisionHandling(src, dst, 'move', cb);
}
function copyFileWithCollisionHandling(src, dst, cb) {
return moveOrCopyFileWithCollisionHandling(src, dst, 'copy', cb);
}
function pathWithTerminatingSeparator(path) { function pathWithTerminatingSeparator(path) {
if(path && paths.sep !== path.charAt(path.length - 1)) { if(path && paths.sep !== path.charAt(path.length - 1)) {
path = path + paths.sep; path = path + paths.sep;

View File

@ -1,7 +1,7 @@
/* jslint node: true */ /* jslint node: true */
'use strict'; 'use strict';
let _ = require('lodash'); const _ = require('lodash');
const FTN_ADDRESS_REGEXP = /^([0-9]+:)?([0-9]+)(\/[0-9]+)?(\.[0-9]+)?(@[a-z0-9\-\.]+)?$/i; const FTN_ADDRESS_REGEXP = /^([0-9]+:)?([0-9]+)(\/[0-9]+)?(\.[0-9]+)?(@[a-z0-9\-\.]+)?$/i;
const FTN_PATTERN_REGEXP = /^([0-9\*]+:)?([0-9\*]+)(\/[0-9\*]+)?(\.[0-9\*]+)?(@[a-z0-9\-\.\*]+)?$/i; const FTN_PATTERN_REGEXP = /^([0-9\*]+:)?([0-9\*]+)(\/[0-9\*]+)?(\.[0-9\*]+)?(@[a-z0-9\-\.\*]+)?$/i;

View File

@ -2,16 +2,26 @@
'use strict'; 'use strict';
// ENiGMA½ // ENiGMA½
const MessageScanTossModule = require('../msg_scan_toss_module.js').MessageScanTossModule; const MessageScanTossModule = require('../msg_scan_toss_module.js').MessageScanTossModule;
const Config = require('../config.js').config; const Config = require('../config.js').config;
const ftnMailPacket = require('../ftn_mail_packet.js'); const ftnMailPacket = require('../ftn_mail_packet.js');
const ftnUtil = require('../ftn_util.js'); const ftnUtil = require('../ftn_util.js');
const Address = require('../ftn_address.js'); const Address = require('../ftn_address.js');
const Log = require('../logger.js').log; const Log = require('../logger.js').log;
const ArchiveUtil = require('../archive_util.js'); const ArchiveUtil = require('../archive_util.js');
const msgDb = require('../database.js').dbs.message; const msgDb = require('../database.js').dbs.message;
const Message = require('../message.js'); const Message = require('../message.js');
const TicFileInfo = require('../tic_file_info.js');
const Errors = require('../enig_error.js').Errors;
const FileEntry = require('../file_entry.js');
const scanFile = require('../file_base_area.js').scanFile;
const getFileAreaByTag = require('../file_base_area.js').getFileAreaByTag;
const getDescFromFileName = require('../file_base_area.js').getDescFromFileName;
const copyFileWithCollisionHandling = require('../file_util.js').copyFileWithCollisionHandling;
const getAreaStorageDirectoryByTag = require('../file_base_area.js').getAreaStorageDirectoryByTag;
const isValidStorageTag = require('../file_base_area.js').isValidStorageTag;
// deps
const moment = require('moment'); const moment = require('moment');
const _ = require('lodash'); const _ = require('lodash');
const paths = require('path'); const paths = require('path');
@ -1013,22 +1023,36 @@ function FTNMessageScanTossModule() {
}); });
}; };
this.archivePacketFile = function(type, origPath, label, cb) { this.maybeArchiveImportFile = function(origPath, type, status, cb) {
if('import' === type && _.isString(self.moduleConfig.retainImportPacketPath)) { //
const archivePath = paths.join( // type : pkt|tic|bundle
self.moduleConfig.retainImportPacketPath, // status : good|reject
`${label}-${moment().format('YYYY-MM-DDTHH.mm.ss.SSS')}-${paths.basename(origPath)}`); //
// Status of "good" is only applied to pkt files & placed
// in |retain| if set. This is generally used for debugging only.
//
let archivePath;
const ts = moment().format('YYYY-MM-DDTHH.mm.ss.SSS');
const fn = paths.basename(origPath);
fse.copy(origPath, archivePath, err => { if('good' === status && type === 'pkt') {
if(err) { if(!_.isString(self.moduleConfig.paths.retain)) {
Log.warn( { origPath : origPath, archivePath : archivePath }, 'Failed to archive packet file'); return cb(null);
} }
cb(null); // non-fatal always
}); archivePath = paths.join(self.moduleConfig.paths.retain, `good-pkt-${ts}--${fn}`);
} else { } else {
cb(null); // NYI archivePath = paths.join(self.moduleConfig.paths.reject, `${status}-${type}--${ts}-${fn}`);
} }
}
fse.copy(origPath, archivePath, err => {
if(err) {
Log.warn( { error : err.message, origPath : origPath, archivePath : archivePath, type : type, status : status }, 'Failed to archive packet file');
}
return cb(null); // never fatal
});
};
this.importPacketFilesFromDirectory = function(importDir, password, cb) { this.importPacketFilesFromDirectory = function(importDir, password, cb) {
async.waterfall( async.waterfall(
@ -1061,27 +1085,18 @@ function FTNMessageScanTossModule() {
}, },
function handleProcessedFiles(packetFiles, rejects, callback) { function handleProcessedFiles(packetFiles, rejects, callback) {
async.each(packetFiles, (packetFile, nextFile) => { async.each(packetFiles, (packetFile, nextFile) => {
// possibly archive, then remove original
const fullPath = paths.join(importDir, packetFile); const fullPath = paths.join(importDir, packetFile);
self.maybeArchiveImportFile(
// fullPath,
// If scannerTossers::ftn_bso::reainImportPacketPath is set, 'pkt',
// copy each packet file over in the following format: rejects.includes(packetFile) ? 'reject' : 'good',
// () => {
// <good|bad>-<msSinceEpoc>-<origPacketFileName.pkt>
//
if(rejects.indexOf(packetFile) > -1) {
self.archivePacketFile('import', fullPath, 'reject', () => {
nextFile();
});
// :TODO: rename to .bad, perhaps move to a rejects dir + log
//nextFile();
} else {
self.archivePacketFile('import', fullPath, 'imported', () => {
fs.unlink(fullPath, () => { fs.unlink(fullPath, () => {
nextFile(); return nextFile(null);
}); });
}); }
} );
}, err => { }, err => {
callback(err); callback(err);
}); });
@ -1093,7 +1108,7 @@ function FTNMessageScanTossModule() {
); );
}; };
this.importMessagesFromDirectory = function(inboundType, importDir, cb) { this.importFromDirectory = function(inboundType, importDir, cb) {
async.waterfall( async.waterfall(
[ [
// start with .pkt files // start with .pkt files
@ -1144,7 +1159,7 @@ function FTNMessageScanTossModule() {
err => { err => {
if(err) { if(err) {
Log.warn( Log.warn(
{ fileName : bundleFile.path, error : err.toString() }, { path : bundleFile.path, error : err.message },
'Failed to extract bundle'); 'Failed to extract bundle');
rejects.push(bundleFile.path); rejects.push(bundleFile.path);
@ -1169,17 +1184,25 @@ function FTNMessageScanTossModule() {
}, },
function handleProcessedBundleFiles(bundleFiles, rejects, callback) { function handleProcessedBundleFiles(bundleFiles, rejects, callback) {
async.each(bundleFiles, (bundleFile, nextFile) => { async.each(bundleFiles, (bundleFile, nextFile) => {
if(rejects.indexOf(bundleFile.path) > -1) { self.maybeArchiveImportFile(
// :TODO: rename to .bad, perhaps move to a rejects dir + log bundleFile.path,
nextFile(); 'bundle',
} else { rejects.includes(bundleFile.path) ? 'reject' : 'good',
fs.unlink(bundleFile.path, err => { () => {
nextFile(); fs.unlink(bundleFile.path, err => {
}); Log.error( { path : bundleFile.path, error : err.message }, 'Failed unlinking bundle');
} return nextFile(null);
});
}
);
}, err => { }, err => {
callback(err); callback(err);
}); });
},
function importTicFiles(callback) {
self.processTicFilesInDirectory(importDir, err => {
return callback(err);
});
} }
], ],
err => { err => {
@ -1218,12 +1241,331 @@ function FTNMessageScanTossModule() {
this.exportingEnd = function() { this.exportingEnd = function() {
this.exportRunning = false; this.exportRunning = false;
}; };
this.copyTicAttachment = function(src, dst, isUpdate, cb) {
if(isUpdate) {
fse.copy(src, dst, err => {
return cb(err, dst);
});
} else {
copyFileWithCollisionHandling(src, dst, (err, finalPath) => {
return cb(err, finalPath);
});
}
};
this.getLocalAreaTagsForTic = function() {
return _.union(Object.keys(Config.scannerTossers.ftn_bso.ticAreas || {} ), Object.keys(Config.fileBase.areas));
};
this.processSingleTicFile = function(ticFileInfo, cb) {
const self = this;
Log.debug( { tic : ticFileInfo.path, file : ticFileInfo.getAsString('File') }, 'Processing TIC file');
async.waterfall(
[
function generalValidation(callback) {
const config = {
nodes : Config.scannerTossers.ftn_bso.nodes,
defaultPassword : Config.scannerTossers.ftn_bso.tic.password,
localAreaTags : self.getLocalAreaTagsForTic(),
};
return ticFileInfo.validate(config, (err, localInfo) => {
if(err) {
return callback(err);
}
// We may need to map |localAreaTag| back to real areaTag if it's a mapping/alias
const mappedLocalAreaTag = _.get(Config.scannerTossers.ftn_bso, [ 'ticAreas', localInfo.areaTag ]);
if(mappedLocalAreaTag) {
if(_.isString(mappedLocalAreaTag.areaTag)) {
localInfo.areaTag = mappedLocalAreaTag.areaTag;
localInfo.hashTags = mappedLocalAreaTag.hashTags; // override default for node
localInfo.storageTag = mappedLocalAreaTag.storageTag; // override default
} else if(_.isString(mappedLocalAreaTag)) {
localInfo.areaTag = mappedLocalAreaTag;
}
}
return callback(null, localInfo);
});
},
function findExistingItem(localInfo, callback) {
//
// We will need to look for an existing item to replace/update if:
// a) The TIC file has a "Replaces" field
// b) The general or node specific |allowReplace| is true
//
// Replace specifies a DOS 8.3 *pattern* which is allowed to have
// ? and * characters. For example, RETRONET.*
//
// Lastly, we will only replace if the item is in the same/specified area
// and that come from the same origin as a previous entry.
//
const allowReplace = _.get(Config.scannerTossers.ftn_bso.nodes, [ localInfo.node, 'tic', 'allowReplace' ] ) || Config.scannerTossers.ftn_bso.tic.allowReplace;
const replaces = ticFileInfo.getAsString('Replaces');
if(!allowReplace || !replaces) {
return callback(null, localInfo);
}
const metaPairs = [
{
name : 'short_file_name',
value : replaces.toUpperCase(), // we store upper as well
wcValue : true, // value may contain wildcards
},
{
name : 'tic_origin',
value : ticFileInfo.getAsString('Origin'),
}
];
FileEntry.findFiles( { metaPairs : metaPairs, areaTag : localInfo.areaTag }, (err, fileIds) => {
if(err) {
return callback(err);
}
// 0:1 allowed
if(1 === fileIds.length) {
localInfo.existingFileId = fileIds[0];
// fetch old filename - we may need to remove it if replacing with a new name
FileEntry.loadBasicEntry(localInfo.existingFileId, {}, (cb, info) => {
localInfo.oldFileName = info.fileName;
localInfo.oldStorageTag = info.storageTag;
return callback(null, localInfo);
});
} else if(fileIds.legnth > 1) {
return callback(Errors.General(`More than one existing entry for TIC in ${localInfo.areaTag} ([${fileIds.join(', ')}])`));
} else {
return callback(null, localInfo);
}
});
},
function scan(localInfo, callback) {
let ldesc = ticFileInfo.getAsString('Ldesc', '\n');
if(ldesc) {
ldesc = ldesc.trim();
}
const scanOpts = {
sha256 : localInfo.sha256, // *may* have already been calculated
meta : {
// some TIC-related metadata we always want
short_file_name : ticFileInfo.getAsString('File').toUpperCase(), // upper to ensure no case issues later; this should be a DOS 8.3 name
tic_origin : ticFileInfo.getAsString('Origin'),
tic_desc : ticFileInfo.getAsString('Desc'),
tic_ldesc : ldesc,
upload_by_username : _.get(Config.scannerTossers.ftn_bso.nodes, [ localInfo.node, 'tic', 'uploadBy' ]) || Config.scannerTossers.ftn_bso.tic.uploadBy,
}
};
//
// We may have TIC auto-tagging for this node and/or specific (remote) area
//
const hashTags =
localInfo.hashTags ||
_.get(Config.scannerTossers.ftn_bso.nodes, [ localInfo.node, 'tic', 'hashTags' ] ); // catch-all*/
if(hashTags) {
scanOpts.hashTags = new Set(hashTags.split(/[\s,]+/));
}
if(localInfo.crc32) {
scanOpts.meta.file_crc32 = localInfo.crc32.toString(16); // again, *may* have already been calculated
}
scanFile(
ticFileInfo.filePath,
scanOpts,
(err, fileEntry) => {
localInfo.fileEntry = fileEntry;
return callback(err, localInfo);
}
);
},
function store(localInfo, callback) {
//
// Move file to final area storage and persist to DB
//
const areaInfo = getFileAreaByTag(localInfo.areaTag);
if(!areaInfo) {
return callback(Errors.UnexpectedState(`Could not get area for tag ${localInfo.areaTag}`));
}
const storageTag = localInfo.storageTag || areaInfo.storageTags[0];
if(!isValidStorageTag(storageTag)) {
return callback(Errors.Invalid(`Invalid storage tag: ${storageTag}`));
}
localInfo.fileEntry.storageTag = storageTag;
localInfo.fileEntry.areaTag = localInfo.areaTag;
localInfo.fileEntry.fileName = ticFileInfo.longFileName;
// we default to .DIZ/etc. desc, but use from TIC if needed
if(!localInfo.fileEntry.desc || 0 === localInfo.fileEntry.desc.length) {
localInfo.fileEntry.desc = ticFileInfo.getAsString('Ldesc') || ticFileInfo.getAsString('Desc') || getDescFromFileName(ticFileInfo.filePath);
}
const areaStorageDir = getAreaStorageDirectoryByTag(storageTag);
if(!areaStorageDir) {
return callback(Errors.UnexpectedState(`Could not get storage directory for tag ${localInfo.areaTag}`));
}
const isUpdate = localInfo.existingFileId ? true : false;
if(isUpdate) {
// we need to *update* an existing record/file
localInfo.fileEntry.fileId = localInfo.existingFileId;
}
const dst = paths.join(areaStorageDir, localInfo.fileEntry.fileName);
self.copyTicAttachment(ticFileInfo.filePath, dst, isUpdate, (err, finalPath) => {
if(err) {
return callback(err);
}
if(dst !== finalPath) {
localInfo.fileEntry.fileName = paths.basename(finalPath);
}
localInfo.fileEntry.persist(isUpdate, err => {
return callback(err, localInfo);
});
});
},
// :TODO: from here, we need to re-toss files if needed, before they are removed
function cleanupOldFile(localInfo, callback) {
if(!localInfo.existingFileId) {
return callback(null, localInfo);
}
const oldStorageDir = getAreaStorageDirectoryByTag(localInfo.oldStorageTag);
const oldPath = paths.join(oldStorageDir, localInfo.oldFileName);
fs.unlink(oldPath, err => {
if(err) {
Log.warn( { error : err.message, oldPath : oldPath }, 'Failed removing old physical file during TIC replacement');
} else {
Log.debug( { oldPath : oldPath }, 'Removed old physical file during TIC replacement');
}
return callback(null, localInfo); // continue even if err
});
},
],
(err, localInfo) => {
if(err) {
Log.error( { error : err.message, reason : err.reason, tic : ticFileInfo.path }, 'Failed import/update TIC record' );
} else {
Log.debug(
{ tic : ticFileInfo.path, file : ticFileInfo.filePath, area : localInfo.areaTag },
'TIC imported successfully'
);
}
return cb(err);
}
);
};
this.removeAssocTicFiles = function(ticFileInfo, cb) {
async.each( [ ticFileInfo.path, ticFileInfo.filePath ], (path, nextPath) => {
fs.unlink(path, err => {
if(err && 'ENOENT' !== err.code) { // don't log when the file doesn't exist
Log.warn( { error : err.message, path : path }, 'Failed unlinking TIC file');
}
return nextPath(null);
});
}, err => {
return cb(err);
});
};
} }
require('util').inherits(FTNMessageScanTossModule, MessageScanTossModule); require('util').inherits(FTNMessageScanTossModule, MessageScanTossModule);
// :TODO: *scheduled* portion of this stuff should probably use event_scheduler - @immediate would still use record(). // :TODO: *scheduled* portion of this stuff should probably use event_scheduler - @immediate would still use record().
FTNMessageScanTossModule.prototype.processTicFilesInDirectory = function(importDir, cb) {
// :TODO: pass in 'inbound' vs 'secInbound' -- pass along to processSingleTicFile() where password will be checked
const self = this;
async.waterfall(
[
function findTicFiles(callback) {
fs.readdir(importDir, (err, files) => {
if(err) {
return callback(err);
}
return callback(null, files.filter(f => '.tic' === paths.extname(f).toLowerCase()));
});
},
function gatherInfo(ticFiles, callback) {
const ticFilesInfo = [];
async.each(ticFiles, (fileName, nextFile) => {
const fullPath = paths.join(importDir, fileName);
TicFileInfo.createFromFile(fullPath, (err, ticInfo) => {
if(err) {
Log.warn( { error : err.message, path : fullPath }, 'Failed reading TIC file');
} else {
ticFilesInfo.push(ticInfo);
}
return nextFile(null);
});
},
err => {
return callback(err, ticFilesInfo);
});
},
function process(ticFilesInfo, callback) {
async.each(ticFilesInfo, (ticFileInfo, nextTicInfo) => {
self.processSingleTicFile(ticFileInfo, err => {
if(err) {
// archive rejected TIC stuff (.TIC + attach)
async.each( [ ticFileInfo.path, ticFileInfo.filePath ], (path, nextPath) => {
if(!path) { // possibly rejected due to "File" not existing/etc.
return nextPath(null);
}
self.maybeArchiveImportFile(
path,
'tic',
'reject',
() => {
return nextPath(null);
}
);
},
() => {
self.removeAssocTicFiles(ticFileInfo, () => {
return nextTicInfo(null);
});
});
} else {
self.removeAssocTicFiles(ticFileInfo, () => {
return nextTicInfo(null);
});
}
});
}, err => {
return callback(err);
});
}
],
err => {
return cb(err);
}
);
};
FTNMessageScanTossModule.prototype.startup = function(cb) { FTNMessageScanTossModule.prototype.startup = function(cb) {
Log.info(`${exports.moduleInfo.name} Scanner/Tosser starting up`); Log.info(`${exports.moduleInfo.name} Scanner/Tosser starting up`);
@ -1348,12 +1690,11 @@ FTNMessageScanTossModule.prototype.performImport = function(cb) {
return cb(new Error('Missing or invalid configuration')); return cb(new Error('Missing or invalid configuration'));
} }
var self = this; const self = this;
async.each( [ 'inbound', 'secInbound' ], (inboundType, nextDir) => { async.each( [ 'inbound', 'secInbound' ], (inboundType, nextDir) => {
self.importMessagesFromDirectory(inboundType, self.moduleConfig.paths[inboundType], err => { self.importFromDirectory(inboundType, self.moduleConfig.paths[inboundType], () => {
return nextDir(null);
nextDir();
}); });
}, cb); }, cb);
}; };

271
core/tic_file_info.js Normal file
View File

@ -0,0 +1,271 @@
/* jslint node: true */
'use strict';
// ENiGMA½
const Address = require('./ftn_address.js');
const Errors = require('./enig_error.js').Errors;
const EnigAssert = require('./enigma_assert.js');
// deps
const fs = require('fs');
const CRC32 = require('./crc.js').CRC32;
const _ = require('lodash');
const async = require('async');
const paths = require('path');
const crypto = require('crypto');
//
// Class to read and hold information from a TIC file
//
// * FSP-1039.001 @ http://ftsc.org/docs/old/fsp-1039.001
// * FSC-0087.001 @ http://ftsc.org/docs/fsc-0087.001
//
module.exports = class TicFileInfo {
constructor() {
this.entries = new Map();
}
static get requiredFields() {
return [
'Area', 'Origin', 'From', 'File', 'Crc',
// :TODO: validate this:
//'Path', 'Seenby' // these two are questionable; some systems don't send them?
];
}
get(key) {
return this.entries.get(key.toLowerCase());
}
getAsString(key, joinWith) {
const value = this.get(key);
if(value) {
//
// We call toString() on values to ensure numbers, addresses, etc. are converted
//
joinWith = joinWith || '';
if(Array.isArray(value)) {
return value.map(v => v.toString() ).join(joinWith);
}
return value.toString();
}
}
get filePath() {
return paths.join(paths.dirname(this.path), this.getAsString('File'));
}
get longFileName() {
return this.getAsString('Lfile') || this.getAsString('Fullname') || this.getAsString('File');
}
hasRequiredFields() {
const req = TicFileInfo.requiredFields;
return req.every( f => this.get(f) );
}
validate(config, cb) {
// config.nodes
// config.defaultPassword (optional)
// config.localAreaTags
EnigAssert(config.nodes && config.localAreaTags);
const self = this;
async.waterfall(
[
function initial(callback) {
if(!self.hasRequiredFields()) {
return callback(Errors.Invalid('One or more required fields missing from TIC'));
}
const area = self.getAsString('Area').toUpperCase();
const localInfo = {
areaTag : config.localAreaTags.find( areaTag => areaTag.toUpperCase() === area ),
};
if(!localInfo.areaTag) {
return callback(Errors.Invalid(`No local area for "Area" of ${area}`));
}
const from = self.getAsString('From');
localInfo.node = Object.keys(config.nodes).find( nodeAddr => Address.fromString(nodeAddr).isPatternMatch(from) );
if(!localInfo.node) {
return callback(Errors.Invalid('TIC is not from a known node'));
}
// if we require a password, "PW" must match
const passActual = _.get(config.nodes, [ localInfo.node, 'tic', 'password' ] ) || config.defaultPassword;
if(!passActual) {
return callback(null, localInfo); // no pw validation
}
const passTic = self.getAsString('Pw');
if(passTic !== passActual) {
return callback(Errors.Invalid('Bad TIC password'));
}
return callback(null, localInfo);
},
function checksumAndSize(localInfo, callback) {
const crcTic = self.get('Crc');
const stream = fs.createReadStream(self.filePath);
const crc = new CRC32();
let sizeActual = 0;
let sha256Tic = self.getAsString('Sha256');
let sha256;
if(sha256Tic) {
sha256Tic = sha256Tic.toLowerCase();
sha256 = crypto.createHash('sha256');
}
stream.on('data', data => {
sizeActual += data.length;
// sha256 if possible, else crc32
if(sha256) {
sha256.update(data);
} else {
crc.update(data);
}
});
stream.on('end', () => {
// again, use sha256 if possible
if(sha256) {
const sha256Actual = sha256.digest('hex');
if(sha256Tic != sha256Actual) {
return callback(Errors.Invalid(`TIC "Sha256" of ${sha256Tic} does not match actual SHA-256 of ${sha256Actual}`));
}
localInfo.sha256 = sha256Actual;
} else {
const crcActual = crc.finalize();
if(crcActual !== crcTic) {
return callback(Errors.Invalid(`TIC "Crc" of ${crcTic} does not match actual CRC-32 of ${crcActual}`));
}
localInfo.crc32 = crcActual;
}
const sizeTic = self.get('Size');
if(_.isUndefined(sizeTic)) {
return callback(null, localInfo);
}
if(sizeTic !== sizeActual) {
return callback(Errors.Invalid(`TIC "Size" of ${sizeTic} does not match actual size of ${sizeActual}`));
}
return callback(null, localInfo);
});
stream.on('error', err => {
return callback(err);
});
}
],
(err, localInfo) => {
return cb(err, localInfo);
}
);
}
isToAddress(address, allowNonExplicit) {
//
// FSP-1039.001:
// "This keyword specifies the FTN address of the system where to
// send the file to be distributed and the accompanying TIC file.
// Some File processors (Allfix) only insert a line with this
// keyword when the file and the associated TIC file are to be
// file routed through a third sysem instead of being processed
// by a file processor on that system. Others always insert it.
// Note that the To keyword may cause problems when the TIC file
// is proecessed by software that does not recognise it and
// passes the line "as is" to other systems.
//
// Example: To 292/854
//
// This is an optional keyword."
//
const to = this.get('To');
if(!to) {
return allowNonExplicit;
}
return address.isEqual(to);
}
static createFromFile(path, cb) {
fs.readFile(path, 'utf8', (err, ticData) => {
if(err) {
return cb(err);
}
const ticFileInfo = new TicFileInfo();
ticFileInfo.path = path;
//
// Lines in a TIC file should be separated by CRLF (DOS)
// may be separated by LF (UNIX)
//
const lines = ticData.split(/\r\n|\n/g);
let keyEnd;
let key;
let value;
let entry;
lines.forEach(line => {
keyEnd = line.indexOf(' ');
if(keyEnd < 0) {
keyEnd = line.length;
}
key = line.substr(0, keyEnd).toLowerCase();
if(0 === key.length) {
return;
}
value = line.substr(keyEnd + 1).trim();
// convert well known keys to a more reasonable format
switch(key) {
case 'origin' :
case 'from' :
case 'seenby' :
case 'to' :
value = Address.fromString(value);
break;
case 'crc' :
case 'size' :
value = parseInt(value, 16);
break;
default :
break;
}
entry = ticFileInfo.entries.get(key);
if(entry) {
if(!Array.isArray(entry)) {
entry = [ entry ];
ticFileInfo.entries.set(key, entry);
}
entry.push(value);
} else {
ticFileInfo.entries.set(key, value);
}
});
return cb(null, ticFileInfo);
});
}
};

View File

@ -385,6 +385,11 @@ exports.getModule = class UploadModule extends MenuModule {
'Failed moving physical upload file', { error : err.message, fileName : newEntry.fileName, source : src, dest : dst } 'Failed moving physical upload file', { error : err.message, fileName : newEntry.fileName, source : src, dest : dst }
); );
if(dst !== finalPath) {
// name changed; ajust before persist
newEntry.fileName = paths.basename(finalPath);
}
return nextEntry(null); // still try next file return nextEntry(null); // still try next file
} }