2016-09-29 03:54:25 +00:00
|
|
|
/* jslint node: true */
|
|
|
|
'use strict';
|
|
|
|
|
|
|
|
// ENiGMA½
|
2016-10-01 19:25:32 +00:00
|
|
|
const Config = require('./config.js').config;
|
|
|
|
const Errors = require('./enig_error.js').Errors;
|
|
|
|
const sortAreasOrConfs = require('./conf_area_util.js').sortAreasOrConfs;
|
2016-10-03 03:40:37 +00:00
|
|
|
const FileEntry = require('./file_entry.js');
|
|
|
|
const FileDb = require('./database.js').dbs.file;
|
|
|
|
const ArchiveUtil = require('./archive_util.js');
|
2016-10-14 02:53:38 +00:00
|
|
|
const CRC32 = require('./crc.js').CRC32;
|
2017-01-30 02:56:46 +00:00
|
|
|
const Log = require('./logger.js').log;
|
2016-09-29 03:54:25 +00:00
|
|
|
|
|
|
|
// deps
|
|
|
|
const _ = require('lodash');
|
2016-10-01 19:25:32 +00:00
|
|
|
const async = require('async');
|
2016-10-03 03:40:37 +00:00
|
|
|
const fs = require('fs');
|
|
|
|
const crypto = require('crypto');
|
|
|
|
const paths = require('path');
|
2017-01-30 02:56:46 +00:00
|
|
|
const temptmp = require('temptmp').createTrackedSession('file_area');
|
2016-10-06 05:22:59 +00:00
|
|
|
const iconv = require('iconv-lite');
|
2016-09-29 03:54:25 +00:00
|
|
|
|
2017-01-02 04:53:04 +00:00
|
|
|
exports.isInternalArea = isInternalArea;
|
2016-09-29 03:54:25 +00:00
|
|
|
exports.getAvailableFileAreas = getAvailableFileAreas;
|
2016-10-01 19:25:32 +00:00
|
|
|
exports.getSortedAvailableFileAreas = getSortedAvailableFileAreas;
|
2017-02-19 02:00:09 +00:00
|
|
|
exports.getAreaStorageDirectoryByTag = getAreaStorageDirectoryByTag;
|
2017-01-02 04:53:04 +00:00
|
|
|
exports.getAreaDefaultStorageDirectory = getAreaDefaultStorageDirectory;
|
2017-02-16 03:27:16 +00:00
|
|
|
exports.getAreaStorageLocations = getAreaStorageLocations;
|
2016-10-25 03:49:45 +00:00
|
|
|
exports.getDefaultFileAreaTag = getDefaultFileAreaTag;
|
2016-09-29 03:54:25 +00:00
|
|
|
exports.getFileAreaByTag = getFileAreaByTag;
|
2016-10-25 03:49:45 +00:00
|
|
|
exports.getFileEntryPath = getFileEntryPath;
|
2016-10-01 19:25:32 +00:00
|
|
|
exports.changeFileAreaWithOptions = changeFileAreaWithOptions;
|
2017-01-12 05:51:00 +00:00
|
|
|
exports.scanFile = scanFile;
|
2016-10-03 03:40:37 +00:00
|
|
|
exports.scanFileAreaForChanges = scanFileAreaForChanges;
|
2016-10-01 19:25:32 +00:00
|
|
|
|
|
|
|
const WellKnownAreaTags = exports.WellKnownAreaTags = {
|
|
|
|
Invalid : '',
|
2017-01-02 04:53:04 +00:00
|
|
|
MessageAreaAttach : 'system_message_attachment',
|
2016-10-01 19:25:32 +00:00
|
|
|
};
|
2016-09-29 03:54:25 +00:00
|
|
|
|
2017-01-02 04:53:04 +00:00
|
|
|
function isInternalArea(areaTag) {
|
|
|
|
return areaTag === WellKnownAreaTags.MessageAreaAttach;
|
|
|
|
}
|
|
|
|
|
2016-09-29 03:54:25 +00:00
|
|
|
function getAvailableFileAreas(client, options) {
|
2016-12-31 21:50:29 +00:00
|
|
|
options = options || { };
|
2016-09-29 03:54:25 +00:00
|
|
|
|
2017-01-02 04:53:04 +00:00
|
|
|
// perform ACS check per conf & omit internal if desired
|
2017-01-12 05:51:00 +00:00
|
|
|
const allAreas = _.map(Config.fileBase.areas, (areaInfo, areaTag) => Object.assign(areaInfo, { areaTag : areaTag } ));
|
|
|
|
|
2017-01-28 19:33:06 +00:00
|
|
|
return _.omitBy(allAreas, areaInfo => {
|
2017-01-12 05:51:00 +00:00
|
|
|
if(!options.includeSystemInternal && isInternalArea(areaInfo.areaTag)) {
|
2016-09-29 03:54:25 +00:00
|
|
|
return true;
|
2016-10-01 19:25:32 +00:00
|
|
|
}
|
2016-09-29 03:54:25 +00:00
|
|
|
|
2017-01-12 05:51:00 +00:00
|
|
|
if(options.writeAcs && !client.acs.hasFileAreaWrite(areaInfo)) {
|
2016-12-31 21:50:29 +00:00
|
|
|
return true; // omit
|
|
|
|
}
|
|
|
|
|
2017-01-12 05:51:00 +00:00
|
|
|
return !client.acs.hasFileAreaRead(areaInfo);
|
2016-09-29 03:54:25 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2016-10-01 19:25:32 +00:00
|
|
|
function getSortedAvailableFileAreas(client, options) {
|
2016-10-25 03:49:45 +00:00
|
|
|
const areas = _.map(getAvailableFileAreas(client, options), v => v);
|
2016-12-07 01:58:56 +00:00
|
|
|
sortAreasOrConfs(areas);
|
2016-10-01 19:25:32 +00:00
|
|
|
return areas;
|
|
|
|
}
|
|
|
|
|
2016-10-13 04:07:22 +00:00
|
|
|
function getDefaultFileAreaTag(client, disableAcsCheck) {
|
2016-12-07 01:58:56 +00:00
|
|
|
let defaultArea = _.findKey(Config.fileBase, o => o.default);
|
2016-10-01 19:25:32 +00:00
|
|
|
if(defaultArea) {
|
2016-12-07 01:58:56 +00:00
|
|
|
const area = Config.fileBase.areas[defaultArea];
|
2016-10-01 19:25:32 +00:00
|
|
|
if(true === disableAcsCheck || client.acs.hasFileAreaRead(area)) {
|
|
|
|
return defaultArea;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// just use anything we can
|
2016-12-07 01:58:56 +00:00
|
|
|
defaultArea = _.findKey(Config.fileBase.areas, (area, areaTag) => {
|
2016-10-01 19:25:32 +00:00
|
|
|
return WellKnownAreaTags.MessageAreaAttach !== areaTag && (true === disableAcsCheck || client.acs.hasFileAreaRead(area));
|
|
|
|
});
|
|
|
|
|
|
|
|
return defaultArea;
|
|
|
|
}
|
2016-09-29 03:54:25 +00:00
|
|
|
|
|
|
|
function getFileAreaByTag(areaTag) {
|
2016-12-07 01:58:56 +00:00
|
|
|
const areaInfo = Config.fileBase.areas[areaTag];
|
2016-10-03 03:40:37 +00:00
|
|
|
if(areaInfo) {
|
2016-12-07 01:58:56 +00:00
|
|
|
areaInfo.areaTag = areaTag; // convienence!
|
|
|
|
areaInfo.storage = getAreaStorageLocations(areaInfo);
|
2016-10-03 03:40:37 +00:00
|
|
|
return areaInfo;
|
|
|
|
}
|
2016-10-01 19:25:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
function changeFileAreaWithOptions(client, areaTag, options, cb) {
|
|
|
|
async.waterfall(
|
|
|
|
[
|
|
|
|
function getArea(callback) {
|
|
|
|
const area = getFileAreaByTag(areaTag);
|
|
|
|
return callback(area ? null : Errors.Invalid('Invalid file areaTag'), area);
|
|
|
|
},
|
|
|
|
function validateAccess(area, callback) {
|
|
|
|
if(!client.acs.hasFileAreaRead(area)) {
|
|
|
|
return callback(Errors.AccessDenied('No access to this area'));
|
|
|
|
}
|
|
|
|
},
|
|
|
|
function changeArea(area, callback) {
|
|
|
|
if(true === options.persist) {
|
|
|
|
client.user.persistProperty('file_area_tag', areaTag, err => {
|
|
|
|
return callback(err, area);
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
client.user.properties['file_area_tag'] = areaTag;
|
|
|
|
return callback(null, area);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
],
|
|
|
|
(err, area) => {
|
|
|
|
if(!err) {
|
|
|
|
client.log.info( { areaTag : areaTag, area : area }, 'Current file area changed');
|
|
|
|
} else {
|
|
|
|
client.log.warn( { areaTag : areaTag, area : area, error : err.message }, 'Could not change file area');
|
|
|
|
}
|
|
|
|
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
2016-10-03 03:40:37 +00:00
|
|
|
|
2016-12-07 01:58:56 +00:00
|
|
|
function getAreaStorageDirectoryByTag(storageTag) {
|
|
|
|
const storageLocation = (storageTag && Config.fileBase.storageTags[storageTag]);
|
|
|
|
|
|
|
|
return paths.resolve(Config.fileBase.areaStoragePrefix, storageLocation || '');
|
2017-01-02 04:53:04 +00:00
|
|
|
}
|
2016-12-07 01:58:56 +00:00
|
|
|
|
2017-01-02 04:53:04 +00:00
|
|
|
function getAreaDefaultStorageDirectory(areaInfo) {
|
|
|
|
return getAreaStorageDirectoryByTag(areaInfo.storageTags[0]);
|
2016-12-07 01:58:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
function getAreaStorageLocations(areaInfo) {
|
|
|
|
|
|
|
|
const storageTags = Array.isArray(areaInfo.storageTags) ?
|
|
|
|
areaInfo.storageTags :
|
|
|
|
[ areaInfo.storageTags || '' ];
|
|
|
|
|
|
|
|
const avail = Config.fileBase.storageTags;
|
|
|
|
|
|
|
|
return _.compact(storageTags.map(storageTag => {
|
|
|
|
if(avail[storageTag]) {
|
|
|
|
return {
|
|
|
|
storageTag : storageTag,
|
|
|
|
dir : getAreaStorageDirectoryByTag(storageTag),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}));
|
2016-10-03 03:40:37 +00:00
|
|
|
}
|
|
|
|
|
2016-10-25 03:49:45 +00:00
|
|
|
function getFileEntryPath(fileEntry) {
|
|
|
|
const areaInfo = getFileAreaByTag(fileEntry.areaTag);
|
|
|
|
if(areaInfo) {
|
|
|
|
return paths.join(areaInfo.storageDirectory, fileEntry.fileName);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-22 05:09:29 +00:00
|
|
|
function getExistingFileEntriesBySha256(sha256, cb) {
|
2016-10-03 03:40:37 +00:00
|
|
|
const entries = [];
|
|
|
|
|
|
|
|
FileDb.each(
|
|
|
|
`SELECT file_id, area_tag
|
|
|
|
FROM file
|
2017-01-22 05:09:29 +00:00
|
|
|
WHERE file_sha256=?;`,
|
|
|
|
[ sha256 ],
|
2016-10-03 03:40:37 +00:00
|
|
|
(err, fileRow) => {
|
|
|
|
if(fileRow) {
|
|
|
|
entries.push({
|
|
|
|
fileId : fileRow.file_id,
|
|
|
|
areaTag : fileRow.area_tag,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
},
|
|
|
|
err => {
|
|
|
|
return cb(err, entries);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2016-10-06 05:22:59 +00:00
|
|
|
// :TODO: This is bascially sliceAtEOF() from art.js .... DRY!
|
|
|
|
function sliceAtSauceMarker(data) {
|
|
|
|
let eof = data.length;
|
|
|
|
const stopPos = Math.max(data.length - (256), 0); // 256 = 2 * sizeof(SAUCE)
|
|
|
|
|
|
|
|
for(let i = eof - 1; i > stopPos; i--) {
|
|
|
|
if(0x1a === data[i]) {
|
|
|
|
eof = i;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return data.slice(0, eof);
|
|
|
|
}
|
|
|
|
|
2016-10-07 03:03:04 +00:00
|
|
|
function attemptSetEstimatedReleaseDate(fileEntry) {
|
2016-10-06 05:22:59 +00:00
|
|
|
// :TODO: yearEstPatterns RegExp's should be cached - we can do this @ Config (re)load time
|
|
|
|
const patterns = Config.fileBase.yearEstPatterns.map( p => new RegExp(p, 'gmi'));
|
|
|
|
|
2016-10-07 03:03:04 +00:00
|
|
|
function getMatch(input) {
|
|
|
|
if(input) {
|
|
|
|
let m;
|
|
|
|
for(let i = 0; i < patterns.length; ++i) {
|
|
|
|
m = patterns[i].exec(input);
|
|
|
|
if(m) {
|
|
|
|
return m;
|
|
|
|
}
|
|
|
|
}
|
2016-10-06 05:22:59 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-07 03:03:04 +00:00
|
|
|
//
|
|
|
|
// We attempt deteciton in short -> long order
|
|
|
|
//
|
|
|
|
const match = getMatch(fileEntry.desc) || getMatch(fileEntry.descLong);
|
|
|
|
if(match && match[1]) {
|
2016-10-13 04:07:22 +00:00
|
|
|
let year;
|
|
|
|
if(2 === match[1].length) {
|
|
|
|
year = parseInt(match[1]);
|
|
|
|
if(year) {
|
|
|
|
if(year > 70) {
|
|
|
|
year += 1900;
|
|
|
|
} else {
|
|
|
|
year += 2000;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
year = parseInt(match[1]);
|
|
|
|
}
|
|
|
|
|
2016-10-07 03:03:04 +00:00
|
|
|
if(year) {
|
|
|
|
fileEntry.meta.est_release_year = year;
|
2016-10-06 05:22:59 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-09 05:53:48 +00:00
|
|
|
// a simple log proxy for when we call from oputil.js
|
|
|
|
function logDebug(obj, msg) {
|
|
|
|
if(Log) {
|
|
|
|
Log.debug(obj, msg);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-22 05:09:29 +00:00
|
|
|
function populateFileEntryWithArchive(fileEntry, filePath, stepInfo, iterator, cb) {
|
|
|
|
const archiveUtil = ArchiveUtil.getInstance();
|
|
|
|
const archiveType = fileEntry.meta.archive_type; // we set this previous to populateFileEntryWithArchive()
|
2016-10-03 04:21:37 +00:00
|
|
|
|
2016-10-04 04:03:32 +00:00
|
|
|
async.waterfall(
|
2016-10-03 03:40:37 +00:00
|
|
|
[
|
2017-01-22 05:09:29 +00:00
|
|
|
function getArchiveFileList(callback) {
|
|
|
|
stepInfo.step = 'archive_list_start';
|
|
|
|
|
|
|
|
iterator(err => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
archiveUtil.listEntries(filePath, archiveType, (err, entries) => {
|
|
|
|
if(err) {
|
|
|
|
stepInfo.step = 'archive_list_failed';
|
|
|
|
} else {
|
|
|
|
stepInfo.step = 'archive_list_finish';
|
|
|
|
stepInfo.archiveEntries = entries || [];
|
|
|
|
}
|
|
|
|
|
|
|
|
iterator(iterErr => {
|
|
|
|
return callback( iterErr, entries || [] ); // ignore original |err| here
|
|
|
|
});
|
|
|
|
});
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function processDescFilesStart(entries, callback) {
|
|
|
|
stepInfo.step = 'desc_files_start';
|
|
|
|
iterator(err => {
|
|
|
|
return callback(err, entries);
|
2016-10-04 04:03:32 +00:00
|
|
|
});
|
|
|
|
},
|
|
|
|
function extractDescFiles(entries, callback) {
|
|
|
|
|
|
|
|
// :TODO: would be nice if these RegExp's were cached
|
2016-10-06 05:22:59 +00:00
|
|
|
// :TODO: this is long winded...
|
|
|
|
|
|
|
|
const extractList = [];
|
|
|
|
|
2016-10-04 04:03:32 +00:00
|
|
|
const shortDescFile = entries.find( e => {
|
2017-01-30 05:30:48 +00:00
|
|
|
return Config.fileBase.fileNamePatterns.desc.find( pat => new RegExp(pat, 'i').test(e.fileName) );
|
2016-10-04 04:03:32 +00:00
|
|
|
});
|
|
|
|
|
2016-10-06 05:22:59 +00:00
|
|
|
if(shortDescFile) {
|
|
|
|
extractList.push(shortDescFile.fileName);
|
|
|
|
}
|
|
|
|
|
2016-10-04 04:03:32 +00:00
|
|
|
const longDescFile = entries.find( e => {
|
2017-01-30 05:30:48 +00:00
|
|
|
return Config.fileBase.fileNamePatterns.descLong.find( pat => new RegExp(pat, 'i').test(e.fileName) );
|
2016-10-04 04:03:32 +00:00
|
|
|
});
|
|
|
|
|
2016-10-06 05:22:59 +00:00
|
|
|
if(longDescFile) {
|
|
|
|
extractList.push(longDescFile.fileName);
|
|
|
|
}
|
|
|
|
|
2017-01-23 04:30:49 +00:00
|
|
|
if(0 === extractList.length) {
|
|
|
|
return callback(null, [] );
|
|
|
|
}
|
|
|
|
|
2017-01-30 02:56:46 +00:00
|
|
|
temptmp.mkdir( { prefix : 'enigextract-' }, (err, tempDir) => {
|
2016-10-06 05:22:59 +00:00
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
archiveUtil.extractTo(filePath, tempDir, archiveType, extractList, err => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
const descFiles = {
|
|
|
|
desc : shortDescFile ? paths.join(tempDir, shortDescFile.fileName) : null,
|
|
|
|
descLong : longDescFile ? paths.join(tempDir, longDescFile.fileName) : null,
|
|
|
|
};
|
|
|
|
|
|
|
|
return callback(null, descFiles);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
},
|
2017-01-30 05:30:48 +00:00
|
|
|
function readDescFiles(descFiles, callback) {
|
2016-10-06 05:22:59 +00:00
|
|
|
async.each(Object.keys(descFiles), (descType, next) => {
|
|
|
|
const path = descFiles[descType];
|
|
|
|
if(!path) {
|
|
|
|
return next(null);
|
|
|
|
}
|
|
|
|
|
2017-01-30 05:30:48 +00:00
|
|
|
fs.stat(path, (err, stats) => {
|
|
|
|
if(err) {
|
2016-10-06 05:22:59 +00:00
|
|
|
return next(null);
|
|
|
|
}
|
|
|
|
|
2017-01-30 05:30:48 +00:00
|
|
|
// skip entries that are too large
|
|
|
|
const maxFileSizeKey = `max${_.upperFirst(descType)}FileByteSize`;
|
|
|
|
|
|
|
|
if(Config.fileBase[maxFileSizeKey] && stats.size > Config.fileBase[maxFileSizeKey]) {
|
2017-02-09 05:53:48 +00:00
|
|
|
logDebug( { byteSize : stats.size, maxByteSize : Config.fileBase[maxFileSizeKey] }, `Skipping "${descType}"; Too large` );
|
2017-01-30 05:30:48 +00:00
|
|
|
return next(null);
|
|
|
|
}
|
|
|
|
|
|
|
|
fs.readFile(path, (err, data) => {
|
|
|
|
if(err || !data) {
|
|
|
|
return next(null);
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// Assume FILE_ID.DIZ, NFO files, etc. are CP437.
|
|
|
|
//
|
|
|
|
// :TODO: This isn't really always the case - how to handle this? We could do a quick detection...
|
|
|
|
fileEntry[descType] = iconv.decode(sliceAtSauceMarker(data, 0x1a), 'cp437');
|
|
|
|
return next(null);
|
|
|
|
});
|
2016-10-06 05:22:59 +00:00
|
|
|
});
|
|
|
|
}, () => {
|
2017-01-30 02:56:46 +00:00
|
|
|
// cleanup but don't wait
|
|
|
|
temptmp.cleanup( paths => {
|
|
|
|
// note: don't use client logger here - may not be avail
|
2017-02-09 05:53:48 +00:00
|
|
|
logDebug( { paths : paths, sessionId : temptmp.sessionId }, 'Cleaned up temporary files' );
|
2017-01-30 02:56:46 +00:00
|
|
|
});
|
2016-10-06 05:22:59 +00:00
|
|
|
return callback(null);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function attemptReleaseYearEstimation(callback) {
|
2016-10-07 03:03:04 +00:00
|
|
|
attemptSetEstimatedReleaseDate(fileEntry);
|
|
|
|
return callback(null);
|
2017-01-22 05:09:29 +00:00
|
|
|
},
|
|
|
|
function processDescFilesFinish(callback) {
|
|
|
|
stepInfo.step = 'desc_files_finish';
|
|
|
|
return iterator(callback);
|
|
|
|
},
|
2016-10-03 03:40:37 +00:00
|
|
|
],
|
|
|
|
err => {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2017-01-22 05:09:29 +00:00
|
|
|
function populateFileEntryNonArchive(fileEntry, filePath, stepInfo, iterator, cb) {
|
2016-10-07 03:03:04 +00:00
|
|
|
// :TODO: implement me!
|
|
|
|
return cb(null);
|
|
|
|
}
|
|
|
|
|
2016-10-03 03:40:37 +00:00
|
|
|
function addNewFileEntry(fileEntry, filePath, cb) {
|
|
|
|
// :TODO: Use detectTypeWithBuf() once avail - we *just* read some file data
|
2016-10-07 03:03:04 +00:00
|
|
|
|
|
|
|
async.series(
|
|
|
|
[
|
2017-01-12 05:51:00 +00:00
|
|
|
function addNewDbRecord(callback) {
|
|
|
|
return fileEntry.persist(callback);
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
function updateFileEntry(fileEntry, filePath, cb) {
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2017-01-22 05:09:29 +00:00
|
|
|
const HASH_NAMES = [ 'sha1', 'sha256', 'md5', 'crc32' ];
|
|
|
|
|
|
|
|
function scanFile(filePath, options, iterator, cb) {
|
|
|
|
|
|
|
|
if(3 === arguments.length && _.isFunction(iterator)) {
|
|
|
|
cb = iterator;
|
|
|
|
iterator = null;
|
|
|
|
} else if(2 === arguments.length && _.isFunction(options)) {
|
|
|
|
cb = options;
|
|
|
|
iterator = null;
|
|
|
|
options = {};
|
2017-01-12 05:51:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const fileEntry = new FileEntry({
|
|
|
|
areaTag : options.areaTag,
|
|
|
|
meta : options.meta,
|
|
|
|
hashTags : options.hashTags, // Set() or Array
|
|
|
|
fileName : paths.basename(filePath),
|
|
|
|
storageTag : options.storageTag,
|
|
|
|
});
|
|
|
|
|
2017-01-22 05:09:29 +00:00
|
|
|
const stepInfo = {
|
|
|
|
filePath : filePath,
|
|
|
|
fileName : paths.basename(filePath),
|
|
|
|
};
|
|
|
|
|
|
|
|
function callIter(next) {
|
|
|
|
if(iterator) {
|
|
|
|
return iterator(stepInfo, next);
|
|
|
|
} else {
|
|
|
|
return next(null);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function readErrorCallIter(origError, next) {
|
|
|
|
stepInfo.step = 'read_error';
|
|
|
|
stepInfo.error = origError.message;
|
|
|
|
|
|
|
|
callIter( () => {
|
|
|
|
return next(origError);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-01-23 04:30:49 +00:00
|
|
|
|
|
|
|
let lastCalcHashPercent;
|
|
|
|
|
2017-01-12 05:51:00 +00:00
|
|
|
async.waterfall(
|
|
|
|
[
|
2017-01-22 05:09:29 +00:00
|
|
|
function startScan(callback) {
|
|
|
|
fs.stat(filePath, (err, stats) => {
|
|
|
|
if(err) {
|
|
|
|
return readErrorCallIter(err, callback);
|
|
|
|
}
|
|
|
|
|
|
|
|
stepInfo.step = 'start';
|
|
|
|
stepInfo.byteSize = fileEntry.meta.byte_size = stats.size;
|
|
|
|
|
|
|
|
return callIter(callback);
|
|
|
|
});
|
|
|
|
},
|
2017-01-12 05:51:00 +00:00
|
|
|
function processPhysicalFileGeneric(callback) {
|
2017-01-22 05:09:29 +00:00
|
|
|
stepInfo.bytesProcessed = 0;
|
|
|
|
|
|
|
|
const hashes = {
|
|
|
|
sha1 : crypto.createHash('sha1'),
|
|
|
|
sha256 : crypto.createHash('sha256'),
|
|
|
|
md5 : crypto.createHash('md5'),
|
|
|
|
crc32 : new CRC32(),
|
|
|
|
};
|
|
|
|
|
2017-01-12 05:51:00 +00:00
|
|
|
const stream = fs.createReadStream(filePath);
|
|
|
|
|
2017-01-23 04:30:49 +00:00
|
|
|
function updateHashes(data) {
|
|
|
|
async.each( HASH_NAMES, (hashName, nextHash) => {
|
|
|
|
hashes[hashName].update(data);
|
|
|
|
return nextHash(null);
|
|
|
|
}, () => {
|
|
|
|
return stream.resume();
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-01-12 05:51:00 +00:00
|
|
|
stream.on('data', data => {
|
2017-01-22 05:09:29 +00:00
|
|
|
stream.pause(); // until iterator compeltes
|
2017-01-12 05:51:00 +00:00
|
|
|
|
2017-01-23 04:30:49 +00:00
|
|
|
stepInfo.bytesProcessed += data.length;
|
|
|
|
stepInfo.calcHashPercent = Math.round(((stepInfo.bytesProcessed / stepInfo.byteSize) * 100));
|
|
|
|
|
|
|
|
//
|
|
|
|
// Only send 'hash_update' step update if we have a noticable percentage change in progress
|
|
|
|
//
|
|
|
|
if(stepInfo.calcHashPercent === lastCalcHashPercent) {
|
|
|
|
updateHashes(data);
|
|
|
|
} else {
|
|
|
|
lastCalcHashPercent = stepInfo.calcHashPercent;
|
|
|
|
stepInfo.step = 'hash_update';
|
|
|
|
|
|
|
|
callIter(err => {
|
|
|
|
if(err) {
|
|
|
|
stream.destroy(); // cancel read
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
updateHashes(data);
|
2017-01-22 05:09:29 +00:00
|
|
|
});
|
2017-01-23 04:30:49 +00:00
|
|
|
}
|
2017-01-22 05:09:29 +00:00
|
|
|
});
|
2017-01-12 05:51:00 +00:00
|
|
|
|
2017-01-22 05:09:29 +00:00
|
|
|
stream.on('end', () => {
|
|
|
|
fileEntry.meta.byte_size = stepInfo.bytesProcessed;
|
|
|
|
|
|
|
|
async.each(HASH_NAMES, (hashName, nextHash) => {
|
|
|
|
if('sha256' === hashName) {
|
|
|
|
stepInfo.sha256 = fileEntry.fileSha256 = hashes.sha256.digest('hex');
|
|
|
|
} else if('sha1' === hashName || 'md5' === hashName) {
|
|
|
|
stepInfo[hashName] = fileEntry.meta[`file_${hashName}`] = hashes[hashName].digest('hex');
|
|
|
|
} else if('crc32' === hashName) {
|
2017-02-12 07:24:24 +00:00
|
|
|
stepInfo.crc32 = fileEntry.meta.file_crc32 = hashes.crc32.finalize().toString(16);
|
2017-01-22 05:09:29 +00:00
|
|
|
}
|
2017-01-12 05:51:00 +00:00
|
|
|
|
2017-01-22 05:09:29 +00:00
|
|
|
return nextHash(null);
|
|
|
|
}, () => {
|
|
|
|
stepInfo.step = 'hash_finish';
|
|
|
|
return callIter(callback);
|
|
|
|
});
|
2017-01-12 05:51:00 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
stream.on('error', err => {
|
2017-01-22 05:09:29 +00:00
|
|
|
return readErrorCallIter(err, callback);
|
2017-01-12 05:51:00 +00:00
|
|
|
});
|
|
|
|
},
|
|
|
|
function processPhysicalFileByType(callback) {
|
|
|
|
const archiveUtil = ArchiveUtil.getInstance();
|
|
|
|
|
2016-10-07 03:03:04 +00:00
|
|
|
archiveUtil.detectType(filePath, (err, archiveType) => {
|
|
|
|
if(archiveType) {
|
2016-10-13 04:07:22 +00:00
|
|
|
// save this off
|
|
|
|
fileEntry.meta.archive_type = archiveType;
|
|
|
|
|
2017-01-22 05:09:29 +00:00
|
|
|
populateFileEntryWithArchive(fileEntry, filePath, stepInfo, callIter, err => {
|
2016-10-07 03:03:04 +00:00
|
|
|
if(err) {
|
2017-01-22 05:09:29 +00:00
|
|
|
populateFileEntryNonArchive(fileEntry, filePath, stepInfo, callIter, err => {
|
2016-10-07 03:03:04 +00:00
|
|
|
// :TODO: log err
|
|
|
|
return callback(null); // ignore err
|
|
|
|
});
|
2016-10-13 04:07:22 +00:00
|
|
|
} else {
|
|
|
|
return callback(null);
|
2016-10-07 03:03:04 +00:00
|
|
|
}
|
|
|
|
});
|
|
|
|
} else {
|
2017-01-22 05:09:29 +00:00
|
|
|
populateFileEntryNonArchive(fileEntry, filePath, stepInfo, callIter, err => {
|
2016-10-07 03:03:04 +00:00
|
|
|
// :TODO: log err
|
|
|
|
return callback(null); // ignore err
|
|
|
|
});
|
|
|
|
}
|
|
|
|
});
|
|
|
|
},
|
2017-01-12 05:51:00 +00:00
|
|
|
function fetchExistingEntry(callback) {
|
2017-01-22 05:09:29 +00:00
|
|
|
getExistingFileEntriesBySha256(fileEntry.fileSha256, (err, dupeEntries) => {
|
|
|
|
return callback(err, dupeEntries);
|
2017-01-12 05:51:00 +00:00
|
|
|
});
|
2017-02-10 05:03:21 +00:00
|
|
|
},
|
|
|
|
function finished(dupeEntries, callback) {
|
|
|
|
stepInfo.step = 'finished';
|
|
|
|
callIter( () => {
|
|
|
|
return callback(null, dupeEntries);
|
|
|
|
});
|
2016-10-07 03:03:04 +00:00
|
|
|
}
|
2017-01-12 05:51:00 +00:00
|
|
|
],
|
2017-01-22 05:09:29 +00:00
|
|
|
(err, dupeEntries) => {
|
2017-01-12 05:51:00 +00:00
|
|
|
if(err) {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
|
2017-01-22 05:09:29 +00:00
|
|
|
return cb(null, fileEntry, dupeEntries);
|
2016-10-03 03:40:37 +00:00
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2017-02-14 05:51:20 +00:00
|
|
|
function scanFileAreaForChanges(areaInfo, options, iterator, cb) {
|
|
|
|
if(3 === arguments.length && _.isFunction(iterator)) {
|
|
|
|
cb = iterator;
|
|
|
|
iterator = null;
|
|
|
|
} else if(2 === arguments.length && _.isFunction(options)) {
|
|
|
|
cb = options;
|
|
|
|
iterator = null;
|
|
|
|
options = {};
|
2017-02-09 05:53:48 +00:00
|
|
|
}
|
|
|
|
|
2016-12-07 01:58:56 +00:00
|
|
|
const storageLocations = getAreaStorageLocations(areaInfo);
|
|
|
|
|
|
|
|
async.eachSeries(storageLocations, (storageLoc, nextLocation) => {
|
|
|
|
async.series(
|
|
|
|
[
|
|
|
|
function scanPhysFiles(callback) {
|
|
|
|
const physDir = storageLoc.dir;
|
|
|
|
|
|
|
|
fs.readdir(physDir, (err, files) => {
|
|
|
|
if(err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
async.eachSeries(files, (fileName, nextFile) => {
|
|
|
|
const fullPath = paths.join(physDir, fileName);
|
|
|
|
|
|
|
|
fs.stat(fullPath, (err, stats) => {
|
|
|
|
if(err) {
|
|
|
|
// :TODO: Log me!
|
|
|
|
return nextFile(null); // always try next file
|
|
|
|
}
|
|
|
|
|
|
|
|
if(!stats.isFile()) {
|
|
|
|
return nextFile(null);
|
|
|
|
}
|
|
|
|
|
2017-01-12 05:51:00 +00:00
|
|
|
scanFile(
|
|
|
|
fullPath,
|
|
|
|
{
|
|
|
|
areaTag : areaInfo.areaTag,
|
|
|
|
storageTag : storageLoc.storageTag
|
|
|
|
},
|
2017-02-09 05:53:48 +00:00
|
|
|
iterator,
|
2017-01-22 05:09:29 +00:00
|
|
|
(err, fileEntry, dupeEntries) => {
|
2017-01-12 05:51:00 +00:00
|
|
|
if(err) {
|
|
|
|
// :TODO: Log me!!!
|
|
|
|
return nextFile(null); // try next anyway
|
|
|
|
}
|
|
|
|
|
2017-01-22 05:09:29 +00:00
|
|
|
if(dupeEntries.length > 0) {
|
2017-01-12 05:51:00 +00:00
|
|
|
// :TODO: Handle duplidates -- what to do here???
|
|
|
|
} else {
|
2017-02-14 05:51:20 +00:00
|
|
|
if(Array.isArray(options.tags)) {
|
|
|
|
options.tags.forEach(tag => {
|
|
|
|
fileEntry.hashTags.add(tag);
|
|
|
|
});
|
|
|
|
}
|
2017-01-12 05:51:00 +00:00
|
|
|
addNewFileEntry(fileEntry, fullPath, err => {
|
|
|
|
// pass along error; we failed to insert a record in our DB or something else bad
|
|
|
|
return nextFile(err);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
);
|
2016-12-07 01:58:56 +00:00
|
|
|
});
|
|
|
|
}, err => {
|
|
|
|
return callback(err);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function scanDbEntries(callback) {
|
|
|
|
// :TODO: Look @ db entries for area that were *not* processed above
|
|
|
|
return callback(null);
|
|
|
|
}
|
|
|
|
],
|
|
|
|
err => {
|
|
|
|
return nextLocation(err);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
},
|
|
|
|
err => {
|
|
|
|
return cb(err);
|
|
|
|
});
|
|
|
|
}
|