diff --git a/core/config.js b/core/config.js index 6a5405b9..87432fa7 100644 --- a/core/config.js +++ b/core/config.js @@ -500,6 +500,8 @@ function getDefaultConfig() { outbound : paths.join(__dirname, './../mail/ftn_out/'), inbound : paths.join(__dirname, './../mail/ftn_in/'), secInbound : paths.join(__dirname, './../mail/ftn_secin/'), + reject : paths.join(__dirname, './../mail/reject/'), // bad pkt, bundles, TIC attachments that fail any check, etc. + // set 'retain' to a valid path to keep good pkt files }, // @@ -509,6 +511,12 @@ function getDefaultConfig() { // packetTargetByteSize : 512000, // 512k, before placing messages in a new pkt bundleTargetByteSize : 2048000, // 2M, before creating another archive + + tic : { + secureInOnly : true, // only bring in from secure inbound (|secInbound| path, password protected) + uploadBy : 'ENiGMA TIC', // default upload by username (override @ network) + allowReplace : false, // use "Replaces" TIC field + } } }, diff --git a/core/database.js b/core/database.js index 2c327f4c..e3f8fc8a 100644 --- a/core/database.js +++ b/core/database.js @@ -263,7 +263,7 @@ const DB_INIT_TABLE = { dbs.file.run('PRAGMA foreign_keys = ON;'); dbs.file.run( - // :TODO: should any of this be unique?? + // :TODO: should any of this be unique -- file_sha256 unless dupes are allowed on the system `CREATE TABLE IF NOT EXISTS file ( file_id INTEGER PRIMARY KEY, area_tag VARCHAR NOT NULL, @@ -281,6 +281,11 @@ const DB_INIT_TABLE = { ON file (area_tag);` ); + dbs.file.run( + `CREATE INDEX IF NOT EXISTS file_by_sha256_index + ON file (file_sha256);` + ); + dbs.file.run( `CREATE VIRTUAL TABLE IF NOT EXISTS file_fts USING fts4 ( content="file", diff --git a/core/file_base_area.js b/core/file_base_area.js index 48b98c24..5030f725 100644 --- a/core/file_base_area.js +++ b/core/file_base_area.js @@ -23,6 +23,7 @@ const iconv = require('iconv-lite'); exports.isInternalArea = isInternalArea; exports.getAvailableFileAreas = getAvailableFileAreas; exports.getSortedAvailableFileAreas = getSortedAvailableFileAreas; +exports.isValidStorageTag = isValidStorageTag; exports.getAreaStorageDirectoryByTag = getAreaStorageDirectoryByTag; exports.getAreaDefaultStorageDirectory = getAreaDefaultStorageDirectory; exports.getAreaStorageLocations = getAreaStorageLocations; @@ -129,6 +130,10 @@ function changeFileAreaWithOptions(client, areaTag, options, cb) { ); } +function isValidStorageTag(storageTag) { + return storageTag in Config.fileBase.storageTags; +} + function getAreaStorageDirectoryByTag(storageTag) { const storageLocation = (storageTag && Config.fileBase.storageTags[storageTag]); @@ -428,6 +433,7 @@ function scanFile(filePath, options, iterator, cb) { hashTags : options.hashTags, // Set() or Array fileName : paths.basename(filePath), storageTag : options.storageTag, + fileSha256 : options.sha256, // caller may know this already }); const stepInfo = { @@ -455,6 +461,19 @@ function scanFile(filePath, options, iterator, cb) { let lastCalcHashPercent; + // don't re-calc hashes for any we already have in |options| + const hashesToCalc = HASH_NAMES.filter(hn => { + if('sha256' === hn && fileEntry.fileSha256) { + return false; + } + + if(`file_${hn}` in fileEntry.meta) { + return false; + } + + return true; + }); + async.waterfall( [ function startScan(callback) { @@ -472,17 +491,19 @@ function scanFile(filePath, options, iterator, cb) { function processPhysicalFileGeneric(callback) { stepInfo.bytesProcessed = 0; - const hashes = { - sha1 : crypto.createHash('sha1'), - sha256 : crypto.createHash('sha256'), - md5 : crypto.createHash('md5'), - crc32 : new CRC32(), - }; + const hashes = {}; + hashesToCalc.forEach(hashName => { + if('crc32' === hashName) { + hashes.crc32 = new CRC32; + } else { + hashes[hashName] = crypto.createHash(hashName); + } + }); const stream = fs.createReadStream(filePath); function updateHashes(data) { - async.each( HASH_NAMES, (hashName, nextHash) => { + async.each(hashesToCalc, (hashName, nextHash) => { hashes[hashName].update(data); return nextHash(null); }, () => { @@ -519,7 +540,7 @@ function scanFile(filePath, options, iterator, cb) { stream.on('end', () => { fileEntry.meta.byte_size = stepInfo.bytesProcessed; - async.each(HASH_NAMES, (hashName, nextHash) => { + async.each(hashesToCalc, (hashName, nextHash) => { if('sha256' === hashName) { stepInfo.sha256 = fileEntry.fileSha256 = hashes.sha256.digest('hex'); } else if('sha1' === hashName || 'md5' === hashName) { diff --git a/core/file_entry.js b/core/file_entry.js index 2baee1e2..35241d7d 100644 --- a/core/file_entry.js +++ b/core/file_entry.js @@ -28,6 +28,10 @@ const FILE_WELL_KNOWN_META = { dl_count : (d) => parseInt(d) || 0, byte_size : (b) => parseInt(b) || 0, archive_type : null, + short_file_name : null, // e.g. DOS 8.3 filename, avail in some scenarios such as TIC import + tic_origin : null, // TIC "Origin" + tic_desc : null, // TIC "Desc" + tic_ldesc : null, // TIC "Ldesc" joined by '\n' }; module.exports = class FileEntry { @@ -44,13 +48,11 @@ module.exports = class FileEntry { this.hashTags = options.hashTags || new Set(); this.fileName = options.fileName; this.storageTag = options.storageTag; + this.fileSha256 = options.fileSha256; } static loadBasicEntry(fileId, dest, cb) { - if(!cb && _.isFunction(dest)) { - cb = dest; - dest = this; - } + dest = dest || {}; fileDb.get( `SELECT ${FILE_TABLE_MEMBERS.join(', ')} @@ -72,7 +74,7 @@ module.exports = class FileEntry { dest[_.camelCase(prop)] = file[prop]; }); - return cb(null); + return cb(null, dest); } ); } @@ -101,26 +103,51 @@ module.exports = class FileEntry { ); } - persist(cb) { + persist(isUpdate, cb) { + if(!cb && _.isFunction(isUpdate)) { + cb = isUpdate; + isUpdate = false; + } + const self = this; + let inTransaction = false; async.series( [ + function check(callback) { + if(isUpdate && !self.fileId) { + return callback(Errors.Invalid('Cannot update file entry without an existing "fileId" member')); + } + return callback(null); + }, function startTrans(callback) { return fileDb.run('BEGIN;', callback); }, function storeEntry(callback) { - fileDb.run( - `REPLACE INTO file (area_tag, file_sha256, file_name, storage_tag, desc, desc_long, upload_timestamp) - VALUES(?, ?, ?, ?, ?, ?, ?);`, - [ self.areaTag, self.fileSha256, self.fileName, self.storageTag, self.desc, self.descLong, getISOTimestampString() ], - function inserted(err) { // use non-arrow func for 'this' scope / lastID - if(!err) { - self.fileId = this.lastID; + inTransaction = true; + + if(isUpdate) { + fileDb.run( + `REPLACE INTO file (file_id, area_tag, file_sha256, file_name, storage_tag, desc, desc_long, upload_timestamp) + VALUES(?, ?, ?, ?, ?, ?, ?, ?);`, + [ self.fileId, self.areaTag, self.fileSha256, self.fileName, self.storageTag, self.desc, self.descLong, getISOTimestampString() ], + err => { + return callback(err); } - return callback(err); - } - ); + ); + } else { + fileDb.run( + `REPLACE INTO file (area_tag, file_sha256, file_name, storage_tag, desc, desc_long, upload_timestamp) + VALUES(?, ?, ?, ?, ?, ?, ?);`, + [ self.areaTag, self.fileSha256, self.fileName, self.storageTag, self.desc, self.descLong, getISOTimestampString() ], + function inserted(err) { // use non-arrow func for 'this' scope / lastID + if(!err) { + self.fileId = this.lastID; + } + return callback(err); + } + ); + } }, function storeMeta(callback) { async.each(Object.keys(self.meta), (n, next) => { @@ -143,9 +170,13 @@ module.exports = class FileEntry { ], err => { // :TODO: Log orig err - fileDb.run(err ? 'ROLLBACK;' : 'COMMIT;', err => { + if(inTransaction) { + fileDb.run(err ? 'ROLLBACK;' : 'COMMIT;', err => { + return cb(err); + }); + } else { return cb(err); - }); + } } ); } @@ -350,43 +381,67 @@ module.exports = class FileEntry { if(filter.sort && filter.sort.length > 0) { if(Object.keys(FILE_WELL_KNOWN_META).indexOf(filter.sort) > -1) { // sorting via a meta value? sql = - `SELECT f.file_id + `SELECT DISTINCT f.file_id FROM file f, file_meta m`; - appendWhereClause(`f.file_id = m.file_id AND m.meta_name="${filter.sort}"`); + appendWhereClause(`f.file_id = m.file_id AND m.meta_name = "${filter.sort}"`); sqlOrderBy = `${getOrderByWithCast('m.meta_value')} ${sqlOrderDir}`; } else { // additional special treatment for user ratings: we need to average them if('user_rating' === filter.sort) { sql = - `SELECT f.file_id, + `SELECT DISTINCT f.file_id, (SELECT IFNULL(AVG(rating), 0) rating FROM file_user_rating WHERE file_id = f.file_id) AS avg_rating - FROM file f`; + FROM file f, file_meta m`; sqlOrderBy = `ORDER BY avg_rating ${sqlOrderDir}`; } else { sql = - `SELECT f.file_id, f.${filter.sort} - FROM file f`; + `SELECT DISTINCT f.file_id, f.${filter.sort} + FROM file f, file_meta m`; sqlOrderBy = getOrderByWithCast(`f.${filter.sort}`) + ' ' + sqlOrderDir; } } } else { sql = - `SELECT f.file_id - FROM file f`; + `SELECT DISTINCT f.file_id + FROM file f, file_meta m`; sqlOrderBy = `${getOrderByWithCast('f.file_id')} ${sqlOrderDir}`; } - if(filter.areaTag && filter.areaTag.length > 0) { - appendWhereClause(`f.area_tag="${filter.areaTag}"`); + appendWhereClause(`f.area_tag = "${filter.areaTag}"`); + } + + if(filter.metaPairs && filter.metaPairs.length > 0) { + + filter.metaPairs.forEach(mp => { + if(mp.wcValue) { + // convert any * -> % and ? -> _ for SQLite syntax - see https://www.sqlite.org/lang_expr.html + mp.value = mp.value.replace(/\*/g, '%').replace(/\?/g, '_'); + appendWhereClause( + `f.file_id IN ( + SELECT file_id + FROM file_meta + WHERE meta_name = "${mp.name}" AND meta_value LIKE "${mp.value}" + )` + ); + } else { + appendWhereClause( + `f.file_id IN ( + SELECT file_id + FROM file_meta + WHERE meta_name = "${mp.name}" AND meta_value = "${mp.value}" + )` + ); + } + }); } if(filter.storageTag && filter.storageTag.length > 0) { diff --git a/core/file_util.js b/core/file_util.js index e2ea6e90..9452b23f 100644 --- a/core/file_util.js +++ b/core/file_util.js @@ -2,30 +2,43 @@ 'use strict'; // ENiGMA½ +const EnigAssert = require('./enigma_assert.js'); // deps -const fse = require('fs-extra'); -const paths = require('path'); -const async = require('async'); +const fse = require('fs-extra'); +const paths = require('path'); +const async = require('async'); exports.moveFileWithCollisionHandling = moveFileWithCollisionHandling; +exports.copyFileWithCollisionHandling = copyFileWithCollisionHandling; exports.pathWithTerminatingSeparator = pathWithTerminatingSeparator; -// -// Move |src| -> |dst| renaming to file(1).ext, file(2).ext, etc. -// in the case of collisions. -// -function moveFileWithCollisionHandling(src, dst, cb) { +function moveOrCopyFileWithCollisionHandling(src, dst, operation, cb) { + operation = operation || 'copy'; const dstPath = paths.dirname(dst); const dstFileExt = paths.extname(dst); const dstFileSuffix = paths.basename(dst, dstFileExt); + EnigAssert('move' === operation || 'copy' === operation); + let renameIndex = 0; - let movedOk = false; + let opOk = false; let tryDstPath; + function tryOperation(src, dst, callback) { + if('move' === operation) { + fse.move(src, tryDstPath, err => { + return callback(err); + }); + } else if('copy' === operation) { + fse.copy(src, tryDstPath, { overwrite : false, errorOnExist : true }, err => { + return callback(err); + }); + } + } + async.until( - () => movedOk, // until moved OK + () => opOk, // until moved OK (cb) => { if(0 === renameIndex) { // try originally supplied path first @@ -34,9 +47,11 @@ function moveFileWithCollisionHandling(src, dst, cb) { tryDstPath = paths.join(dstPath, `${dstFileSuffix}(${renameIndex})${dstFileExt}`); } - fse.move(src, tryDstPath, err => { + tryOperation(src, tryDstPath, err => { if(err) { - if('EEXIST' === err.code) { + // for some reason fs-extra copy doesn't pass err.code + // :TODO: this is dangerous: submit a PR to fs-extra to set EEXIST + if('EEXIST' === err.code || 'copy' === operation) { renameIndex += 1; return cb(null); // keep trying } @@ -44,7 +59,7 @@ function moveFileWithCollisionHandling(src, dst, cb) { return cb(err); } - movedOk = true; + opOk = true; return cb(null, tryDstPath); }); }, @@ -54,6 +69,18 @@ function moveFileWithCollisionHandling(src, dst, cb) { ); } +// +// Move |src| -> |dst| renaming to file(1).ext, file(2).ext, etc. +// in the case of collisions. +// +function moveFileWithCollisionHandling(src, dst, cb) { + return moveOrCopyFileWithCollisionHandling(src, dst, 'move', cb); +} + +function copyFileWithCollisionHandling(src, dst, cb) { + return moveOrCopyFileWithCollisionHandling(src, dst, 'copy', cb); +} + function pathWithTerminatingSeparator(path) { if(path && paths.sep !== path.charAt(path.length - 1)) { path = path + paths.sep; diff --git a/core/ftn_address.js b/core/ftn_address.js index 616b4965..9edb3819 100644 --- a/core/ftn_address.js +++ b/core/ftn_address.js @@ -1,7 +1,7 @@ /* jslint node: true */ 'use strict'; -let _ = require('lodash'); +const _ = require('lodash'); const FTN_ADDRESS_REGEXP = /^([0-9]+:)?([0-9]+)(\/[0-9]+)?(\.[0-9]+)?(@[a-z0-9\-\.]+)?$/i; const FTN_PATTERN_REGEXP = /^([0-9\*]+:)?([0-9\*]+)(\/[0-9\*]+)?(\.[0-9\*]+)?(@[a-z0-9\-\.\*]+)?$/i; diff --git a/core/scanner_tossers/ftn_bso.js b/core/scanner_tossers/ftn_bso.js index d7734826..145b230d 100644 --- a/core/scanner_tossers/ftn_bso.js +++ b/core/scanner_tossers/ftn_bso.js @@ -2,16 +2,26 @@ 'use strict'; // ENiGMA½ -const MessageScanTossModule = require('../msg_scan_toss_module.js').MessageScanTossModule; -const Config = require('../config.js').config; -const ftnMailPacket = require('../ftn_mail_packet.js'); -const ftnUtil = require('../ftn_util.js'); -const Address = require('../ftn_address.js'); -const Log = require('../logger.js').log; -const ArchiveUtil = require('../archive_util.js'); -const msgDb = require('../database.js').dbs.message; -const Message = require('../message.js'); +const MessageScanTossModule = require('../msg_scan_toss_module.js').MessageScanTossModule; +const Config = require('../config.js').config; +const ftnMailPacket = require('../ftn_mail_packet.js'); +const ftnUtil = require('../ftn_util.js'); +const Address = require('../ftn_address.js'); +const Log = require('../logger.js').log; +const ArchiveUtil = require('../archive_util.js'); +const msgDb = require('../database.js').dbs.message; +const Message = require('../message.js'); +const TicFileInfo = require('../tic_file_info.js'); +const Errors = require('../enig_error.js').Errors; +const FileEntry = require('../file_entry.js'); +const scanFile = require('../file_base_area.js').scanFile; +const getFileAreaByTag = require('../file_base_area.js').getFileAreaByTag; +const getDescFromFileName = require('../file_base_area.js').getDescFromFileName; +const copyFileWithCollisionHandling = require('../file_util.js').copyFileWithCollisionHandling; +const getAreaStorageDirectoryByTag = require('../file_base_area.js').getAreaStorageDirectoryByTag; +const isValidStorageTag = require('../file_base_area.js').isValidStorageTag; +// deps const moment = require('moment'); const _ = require('lodash'); const paths = require('path'); @@ -784,7 +794,7 @@ function FTNMessageScanTossModule() { exportType, exportOpts.fileCase ); - + // directive of '^' = delete file after transfer self.flowFileAppendRefs(flowFilePath, [ newPath ], '^', err => { if(err) { @@ -1012,23 +1022,37 @@ function FTNMessageScanTossModule() { cb(err); }); }; - - this.archivePacketFile = function(type, origPath, label, cb) { - if('import' === type && _.isString(self.moduleConfig.retainImportPacketPath)) { - const archivePath = paths.join( - self.moduleConfig.retainImportPacketPath, - `${label}-${moment().format('YYYY-MM-DDTHH.mm.ss.SSS')}-${paths.basename(origPath)}`); - - fse.copy(origPath, archivePath, err => { - if(err) { - Log.warn( { origPath : origPath, archivePath : archivePath }, 'Failed to archive packet file'); - } - cb(null); // non-fatal always - }); + + this.maybeArchiveImportFile = function(origPath, type, status, cb) { + // + // type : pkt|tic|bundle + // status : good|reject + // + // Status of "good" is only applied to pkt files & placed + // in |retain| if set. This is generally used for debugging only. + // + let archivePath; + const ts = moment().format('YYYY-MM-DDTHH.mm.ss.SSS'); + const fn = paths.basename(origPath); + + if('good' === status && type === 'pkt') { + if(!_.isString(self.moduleConfig.paths.retain)) { + return cb(null); + } + + archivePath = paths.join(self.moduleConfig.paths.retain, `good-pkt-${ts}--${fn}`); } else { - cb(null); // NYI + archivePath = paths.join(self.moduleConfig.paths.reject, `${status}-${type}--${ts}-${fn}`); } - } + + fse.copy(origPath, archivePath, err => { + if(err) { + Log.warn( { error : err.message, origPath : origPath, archivePath : archivePath, type : type, status : status }, 'Failed to archive packet file'); + } + + return cb(null); // never fatal + }); + }; this.importPacketFilesFromDirectory = function(importDir, password, cb) { async.waterfall( @@ -1060,28 +1084,19 @@ function FTNMessageScanTossModule() { }); }, function handleProcessedFiles(packetFiles, rejects, callback) { - async.each(packetFiles, (packetFile, nextFile) => { + async.each(packetFiles, (packetFile, nextFile) => { + // possibly archive, then remove original const fullPath = paths.join(importDir, packetFile); - - // - // If scannerTossers::ftn_bso::reainImportPacketPath is set, - // copy each packet file over in the following format: - // - // -- - // - if(rejects.indexOf(packetFile) > -1) { - self.archivePacketFile('import', fullPath, 'reject', () => { - nextFile(); - }); - // :TODO: rename to .bad, perhaps move to a rejects dir + log - //nextFile(); - } else { - self.archivePacketFile('import', fullPath, 'imported', () => { + self.maybeArchiveImportFile( + fullPath, + 'pkt', + rejects.includes(packetFile) ? 'reject' : 'good', + () => { fs.unlink(fullPath, () => { - nextFile(); + return nextFile(null); }); - }); - } + } + ); }, err => { callback(err); }); @@ -1093,7 +1108,7 @@ function FTNMessageScanTossModule() { ); }; - this.importMessagesFromDirectory = function(inboundType, importDir, cb) { + this.importFromDirectory = function(inboundType, importDir, cb) { async.waterfall( [ // start with .pkt files @@ -1144,7 +1159,7 @@ function FTNMessageScanTossModule() { err => { if(err) { Log.warn( - { fileName : bundleFile.path, error : err.toString() }, + { path : bundleFile.path, error : err.message }, 'Failed to extract bundle'); rejects.push(bundleFile.path); @@ -1169,17 +1184,25 @@ function FTNMessageScanTossModule() { }, function handleProcessedBundleFiles(bundleFiles, rejects, callback) { async.each(bundleFiles, (bundleFile, nextFile) => { - if(rejects.indexOf(bundleFile.path) > -1) { - // :TODO: rename to .bad, perhaps move to a rejects dir + log - nextFile(); - } else { - fs.unlink(bundleFile.path, err => { - nextFile(); - }); - } + self.maybeArchiveImportFile( + bundleFile.path, + 'bundle', + rejects.includes(bundleFile.path) ? 'reject' : 'good', + () => { + fs.unlink(bundleFile.path, err => { + Log.error( { path : bundleFile.path, error : err.message }, 'Failed unlinking bundle'); + return nextFile(null); + }); + } + ); }, err => { callback(err); }); + }, + function importTicFiles(callback) { + self.processTicFilesInDirectory(importDir, err => { + return callback(err); + }); } ], err => { @@ -1218,12 +1241,331 @@ function FTNMessageScanTossModule() { this.exportingEnd = function() { this.exportRunning = false; }; + + this.copyTicAttachment = function(src, dst, isUpdate, cb) { + if(isUpdate) { + fse.copy(src, dst, err => { + return cb(err, dst); + }); + } else { + copyFileWithCollisionHandling(src, dst, (err, finalPath) => { + return cb(err, finalPath); + }); + } + }; + + this.getLocalAreaTagsForTic = function() { + return _.union(Object.keys(Config.scannerTossers.ftn_bso.ticAreas || {} ), Object.keys(Config.fileBase.areas)); + }; + + this.processSingleTicFile = function(ticFileInfo, cb) { + const self = this; + + Log.debug( { tic : ticFileInfo.path, file : ticFileInfo.getAsString('File') }, 'Processing TIC file'); + + async.waterfall( + [ + function generalValidation(callback) { + const config = { + nodes : Config.scannerTossers.ftn_bso.nodes, + defaultPassword : Config.scannerTossers.ftn_bso.tic.password, + localAreaTags : self.getLocalAreaTagsForTic(), + }; + + return ticFileInfo.validate(config, (err, localInfo) => { + if(err) { + return callback(err); + } + + // We may need to map |localAreaTag| back to real areaTag if it's a mapping/alias + const mappedLocalAreaTag = _.get(Config.scannerTossers.ftn_bso, [ 'ticAreas', localInfo.areaTag ]); + + if(mappedLocalAreaTag) { + if(_.isString(mappedLocalAreaTag.areaTag)) { + localInfo.areaTag = mappedLocalAreaTag.areaTag; + localInfo.hashTags = mappedLocalAreaTag.hashTags; // override default for node + localInfo.storageTag = mappedLocalAreaTag.storageTag; // override default + } else if(_.isString(mappedLocalAreaTag)) { + localInfo.areaTag = mappedLocalAreaTag; + } + } + + return callback(null, localInfo); + }); + }, + function findExistingItem(localInfo, callback) { + // + // We will need to look for an existing item to replace/update if: + // a) The TIC file has a "Replaces" field + // b) The general or node specific |allowReplace| is true + // + // Replace specifies a DOS 8.3 *pattern* which is allowed to have + // ? and * characters. For example, RETRONET.* + // + // Lastly, we will only replace if the item is in the same/specified area + // and that come from the same origin as a previous entry. + // + const allowReplace = _.get(Config.scannerTossers.ftn_bso.nodes, [ localInfo.node, 'tic', 'allowReplace' ] ) || Config.scannerTossers.ftn_bso.tic.allowReplace; + const replaces = ticFileInfo.getAsString('Replaces'); + + if(!allowReplace || !replaces) { + return callback(null, localInfo); + } + + const metaPairs = [ + { + name : 'short_file_name', + value : replaces.toUpperCase(), // we store upper as well + wcValue : true, // value may contain wildcards + }, + { + name : 'tic_origin', + value : ticFileInfo.getAsString('Origin'), + } + ]; + + FileEntry.findFiles( { metaPairs : metaPairs, areaTag : localInfo.areaTag }, (err, fileIds) => { + if(err) { + return callback(err); + } + + // 0:1 allowed + if(1 === fileIds.length) { + localInfo.existingFileId = fileIds[0]; + + // fetch old filename - we may need to remove it if replacing with a new name + FileEntry.loadBasicEntry(localInfo.existingFileId, {}, (cb, info) => { + localInfo.oldFileName = info.fileName; + localInfo.oldStorageTag = info.storageTag; + return callback(null, localInfo); + }); + } else if(fileIds.legnth > 1) { + return callback(Errors.General(`More than one existing entry for TIC in ${localInfo.areaTag} ([${fileIds.join(', ')}])`)); + } else { + return callback(null, localInfo); + } + }); + }, + function scan(localInfo, callback) { + let ldesc = ticFileInfo.getAsString('Ldesc', '\n'); + if(ldesc) { + ldesc = ldesc.trim(); + } + + const scanOpts = { + sha256 : localInfo.sha256, // *may* have already been calculated + meta : { + // some TIC-related metadata we always want + short_file_name : ticFileInfo.getAsString('File').toUpperCase(), // upper to ensure no case issues later; this should be a DOS 8.3 name + tic_origin : ticFileInfo.getAsString('Origin'), + tic_desc : ticFileInfo.getAsString('Desc'), + tic_ldesc : ldesc, + upload_by_username : _.get(Config.scannerTossers.ftn_bso.nodes, [ localInfo.node, 'tic', 'uploadBy' ]) || Config.scannerTossers.ftn_bso.tic.uploadBy, + } + }; + + // + // We may have TIC auto-tagging for this node and/or specific (remote) area + // + const hashTags = + localInfo.hashTags || + _.get(Config.scannerTossers.ftn_bso.nodes, [ localInfo.node, 'tic', 'hashTags' ] ); // catch-all*/ + + if(hashTags) { + scanOpts.hashTags = new Set(hashTags.split(/[\s,]+/)); + } + + if(localInfo.crc32) { + scanOpts.meta.file_crc32 = localInfo.crc32.toString(16); // again, *may* have already been calculated + } + + scanFile( + ticFileInfo.filePath, + scanOpts, + (err, fileEntry) => { + localInfo.fileEntry = fileEntry; + return callback(err, localInfo); + } + ); + }, + function store(localInfo, callback) { + // + // Move file to final area storage and persist to DB + // + const areaInfo = getFileAreaByTag(localInfo.areaTag); + if(!areaInfo) { + return callback(Errors.UnexpectedState(`Could not get area for tag ${localInfo.areaTag}`)); + } + + const storageTag = localInfo.storageTag || areaInfo.storageTags[0]; + if(!isValidStorageTag(storageTag)) { + return callback(Errors.Invalid(`Invalid storage tag: ${storageTag}`)); + } + + localInfo.fileEntry.storageTag = storageTag; + localInfo.fileEntry.areaTag = localInfo.areaTag; + localInfo.fileEntry.fileName = ticFileInfo.longFileName; + + // we default to .DIZ/etc. desc, but use from TIC if needed + if(!localInfo.fileEntry.desc || 0 === localInfo.fileEntry.desc.length) { + localInfo.fileEntry.desc = ticFileInfo.getAsString('Ldesc') || ticFileInfo.getAsString('Desc') || getDescFromFileName(ticFileInfo.filePath); + } + + const areaStorageDir = getAreaStorageDirectoryByTag(storageTag); + if(!areaStorageDir) { + return callback(Errors.UnexpectedState(`Could not get storage directory for tag ${localInfo.areaTag}`)); + } + + const isUpdate = localInfo.existingFileId ? true : false; + + if(isUpdate) { + // we need to *update* an existing record/file + localInfo.fileEntry.fileId = localInfo.existingFileId; + } + + const dst = paths.join(areaStorageDir, localInfo.fileEntry.fileName); + + self.copyTicAttachment(ticFileInfo.filePath, dst, isUpdate, (err, finalPath) => { + if(err) { + return callback(err); + } + + if(dst !== finalPath) { + localInfo.fileEntry.fileName = paths.basename(finalPath); + } + + localInfo.fileEntry.persist(isUpdate, err => { + return callback(err, localInfo); + }); + }); + }, + // :TODO: from here, we need to re-toss files if needed, before they are removed + function cleanupOldFile(localInfo, callback) { + if(!localInfo.existingFileId) { + return callback(null, localInfo); + } + + const oldStorageDir = getAreaStorageDirectoryByTag(localInfo.oldStorageTag); + const oldPath = paths.join(oldStorageDir, localInfo.oldFileName); + + fs.unlink(oldPath, err => { + if(err) { + Log.warn( { error : err.message, oldPath : oldPath }, 'Failed removing old physical file during TIC replacement'); + } else { + Log.debug( { oldPath : oldPath }, 'Removed old physical file during TIC replacement'); + } + return callback(null, localInfo); // continue even if err + }); + }, + ], + (err, localInfo) => { + if(err) { + Log.error( { error : err.message, reason : err.reason, tic : ticFileInfo.path }, 'Failed import/update TIC record' ); + } else { + Log.debug( + { tic : ticFileInfo.path, file : ticFileInfo.filePath, area : localInfo.areaTag }, + 'TIC imported successfully' + ); + } + return cb(err); + } + ); + }; + + this.removeAssocTicFiles = function(ticFileInfo, cb) { + async.each( [ ticFileInfo.path, ticFileInfo.filePath ], (path, nextPath) => { + fs.unlink(path, err => { + if(err && 'ENOENT' !== err.code) { // don't log when the file doesn't exist + Log.warn( { error : err.message, path : path }, 'Failed unlinking TIC file'); + } + return nextPath(null); + }); + }, err => { + return cb(err); + }); + }; } require('util').inherits(FTNMessageScanTossModule, MessageScanTossModule); // :TODO: *scheduled* portion of this stuff should probably use event_scheduler - @immediate would still use record(). +FTNMessageScanTossModule.prototype.processTicFilesInDirectory = function(importDir, cb) { + // :TODO: pass in 'inbound' vs 'secInbound' -- pass along to processSingleTicFile() where password will be checked + + const self = this; + async.waterfall( + [ + function findTicFiles(callback) { + fs.readdir(importDir, (err, files) => { + if(err) { + return callback(err); + } + + return callback(null, files.filter(f => '.tic' === paths.extname(f).toLowerCase())); + }); + }, + function gatherInfo(ticFiles, callback) { + const ticFilesInfo = []; + + async.each(ticFiles, (fileName, nextFile) => { + const fullPath = paths.join(importDir, fileName); + + TicFileInfo.createFromFile(fullPath, (err, ticInfo) => { + if(err) { + Log.warn( { error : err.message, path : fullPath }, 'Failed reading TIC file'); + } else { + ticFilesInfo.push(ticInfo); + } + + return nextFile(null); + }); + }, + err => { + return callback(err, ticFilesInfo); + }); + }, + function process(ticFilesInfo, callback) { + async.each(ticFilesInfo, (ticFileInfo, nextTicInfo) => { + self.processSingleTicFile(ticFileInfo, err => { + if(err) { + // archive rejected TIC stuff (.TIC + attach) + async.each( [ ticFileInfo.path, ticFileInfo.filePath ], (path, nextPath) => { + if(!path) { // possibly rejected due to "File" not existing/etc. + return nextPath(null); + } + + self.maybeArchiveImportFile( + path, + 'tic', + 'reject', + () => { + return nextPath(null); + } + ); + }, + () => { + self.removeAssocTicFiles(ticFileInfo, () => { + return nextTicInfo(null); + }); + }); + } else { + self.removeAssocTicFiles(ticFileInfo, () => { + return nextTicInfo(null); + }); + } + }); + }, err => { + return callback(err); + }); + } + ], + err => { + return cb(err); + } + ); +}; + FTNMessageScanTossModule.prototype.startup = function(cb) { Log.info(`${exports.moduleInfo.name} Scanner/Tosser starting up`); @@ -1348,12 +1690,11 @@ FTNMessageScanTossModule.prototype.performImport = function(cb) { return cb(new Error('Missing or invalid configuration')); } - var self = this; + const self = this; async.each( [ 'inbound', 'secInbound' ], (inboundType, nextDir) => { - self.importMessagesFromDirectory(inboundType, self.moduleConfig.paths[inboundType], err => { - - nextDir(); + self.importFromDirectory(inboundType, self.moduleConfig.paths[inboundType], () => { + return nextDir(null); }); }, cb); }; diff --git a/core/tic_file_info.js b/core/tic_file_info.js index c8def765..d80af5b4 100644 --- a/core/tic_file_info.js +++ b/core/tic_file_info.js @@ -39,24 +39,25 @@ module.exports = class TicFileInfo { getAsString(key, joinWith) { const value = this.get(key); - - // - // We call toString() on values to ensure numbers, addresses, etc. are converted - // - joinWith = joinWith || ''; - if(Array.isArray(value)) { - return value.map(v => v.toString() ).join(joinWith); + if(value) { + // + // We call toString() on values to ensure numbers, addresses, etc. are converted + // + joinWith = joinWith || ''; + if(Array.isArray(value)) { + return value.map(v => v.toString() ).join(joinWith); + } + + return value.toString(); } - - return value.toString(); } - + get filePath() { - return paths.join(paths.dirname(this.path), this.get('File')); + return paths.join(paths.dirname(this.path), this.getAsString('File')); } get longFileName() { - return this.get('Lfile') || this.get('Fullname') || this.get('File'); + return this.getAsString('Lfile') || this.getAsString('Fullname') || this.getAsString('File'); } hasRequiredFields() { @@ -79,7 +80,7 @@ module.exports = class TicFileInfo { return callback(Errors.Invalid('One or more required fields missing from TIC')); } - const area = self.get('Area').toUpperCase(); + const area = self.getAsString('Area').toUpperCase(); const localInfo = { areaTag : config.localAreaTags.find( areaTag => areaTag.toUpperCase() === area ), @@ -89,7 +90,7 @@ module.exports = class TicFileInfo { return callback(Errors.Invalid(`No local area for "Area" of ${area}`)); } - const from = self.get('From'); + const from = self.getAsString('From'); localInfo.node = Object.keys(config.nodes).find( nodeAddr => Address.fromString(nodeAddr).isPatternMatch(from) ); if(!localInfo.node) { @@ -102,7 +103,7 @@ module.exports = class TicFileInfo { return callback(null, localInfo); // no pw validation } - const passTic = self.get('Pw'); + const passTic = self.getAsString('Pw'); if(passTic !== passActual) { return callback(Errors.Invalid('Bad TIC password')); } @@ -115,7 +116,7 @@ module.exports = class TicFileInfo { const crc = new CRC32(); let sizeActual = 0; - let sha256Tic = self.get('Sha256'); + let sha256Tic = self.getAsString('Sha256'); let sha256; if(sha256Tic) { sha256Tic = sha256Tic.toLowerCase(); @@ -243,6 +244,7 @@ module.exports = class TicFileInfo { break; case 'crc' : + case 'size' : value = parseInt(value, 16); break; diff --git a/mods/upload.js b/mods/upload.js index b043ebd2..d8887193 100644 --- a/mods/upload.js +++ b/mods/upload.js @@ -384,6 +384,11 @@ exports.getModule = class UploadModule extends MenuModule { self.client.log.error( 'Failed moving physical upload file', { error : err.message, fileName : newEntry.fileName, source : src, dest : dst } ); + + if(dst !== finalPath) { + // name changed; ajust before persist + newEntry.fileName = paths.basename(finalPath); + } return nextEntry(null); // still try next file }