#195: Finish scanFile() & hash updates: Clean up code
This commit is contained in:
parent
681e45cb6d
commit
4074d68526
|
@ -811,194 +811,6 @@ function scanFile(filePath, options, iterator, cb) {
|
|||
);
|
||||
}
|
||||
|
||||
function scanFile2(filePath, options, iterator, cb) {
|
||||
|
||||
if(3 === arguments.length && _.isFunction(iterator)) {
|
||||
cb = iterator;
|
||||
iterator = null;
|
||||
} else if(2 === arguments.length && _.isFunction(options)) {
|
||||
cb = options;
|
||||
iterator = null;
|
||||
options = {};
|
||||
}
|
||||
|
||||
const fileEntry = new FileEntry({
|
||||
areaTag : options.areaTag,
|
||||
meta : options.meta,
|
||||
hashTags : options.hashTags, // Set() or Array
|
||||
fileName : paths.basename(filePath),
|
||||
storageTag : options.storageTag,
|
||||
fileSha256 : options.sha256, // caller may know this already
|
||||
});
|
||||
|
||||
const stepInfo = {
|
||||
filePath : filePath,
|
||||
fileName : paths.basename(filePath),
|
||||
};
|
||||
|
||||
const callIter = (next) => {
|
||||
return iterator ? iterator(stepInfo, next) : next(null);
|
||||
};
|
||||
|
||||
const readErrorCallIter = (origError, next) => {
|
||||
stepInfo.step = 'read_error';
|
||||
stepInfo.error = origError.message;
|
||||
|
||||
callIter( () => {
|
||||
return next(origError);
|
||||
});
|
||||
};
|
||||
|
||||
let lastCalcHashPercent;
|
||||
|
||||
// don't re-calc hashes for any we already have in |options|
|
||||
const hashesToCalc = HASH_NAMES.filter(hn => {
|
||||
if('sha256' === hn && fileEntry.fileSha256) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if(`file_${hn}` in fileEntry.meta) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
async.waterfall(
|
||||
[
|
||||
function startScan(callback) {
|
||||
fs.stat(filePath, (err, stats) => {
|
||||
if(err) {
|
||||
return readErrorCallIter(err, callback);
|
||||
}
|
||||
|
||||
stepInfo.step = 'start';
|
||||
stepInfo.byteSize = fileEntry.meta.byte_size = stats.size;
|
||||
|
||||
return callIter(callback);
|
||||
});
|
||||
},
|
||||
function processPhysicalFileGeneric(callback) {
|
||||
stepInfo.bytesProcessed = 0;
|
||||
|
||||
const hashes = {};
|
||||
hashesToCalc.forEach(hashName => {
|
||||
if('crc32' === hashName) {
|
||||
hashes.crc32 = new CRC32;
|
||||
} else {
|
||||
hashes[hashName] = crypto.createHash(hashName);
|
||||
}
|
||||
});
|
||||
|
||||
const stream = fs.createReadStream(filePath);
|
||||
|
||||
const updateHashes = (data) => {
|
||||
for(let i = 0; i < hashesToCalc.length; ++i) {
|
||||
hashes[hashesToCalc[i]].update(data);
|
||||
}
|
||||
return stream.resume();
|
||||
};
|
||||
|
||||
stream.on('data', data => {
|
||||
stream.pause(); // until iterator completes
|
||||
|
||||
stepInfo.bytesProcessed += data.length;
|
||||
stepInfo.calcHashPercent = Math.round(((stepInfo.bytesProcessed / stepInfo.byteSize) * 100));
|
||||
|
||||
//
|
||||
// Only send 'hash_update' step update if we have a noticable percentage change in progress
|
||||
//
|
||||
if(!iterator || stepInfo.calcHashPercent === lastCalcHashPercent) {
|
||||
updateHashes(data);
|
||||
} else {
|
||||
lastCalcHashPercent = stepInfo.calcHashPercent;
|
||||
stepInfo.step = 'hash_update';
|
||||
|
||||
callIter(err => {
|
||||
if(err) {
|
||||
stream.destroy(); // cancel read
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
updateHashes(data);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
fileEntry.meta.byte_size = stepInfo.bytesProcessed;
|
||||
|
||||
for(let i = 0; i < hashesToCalc.length; ++i) {
|
||||
const hashName = hashesToCalc[i];
|
||||
if('sha256' === hashName) {
|
||||
stepInfo.sha256 = fileEntry.fileSha256 = hashes.sha256.digest('hex');
|
||||
} else if('sha1' === hashName || 'md5' === hashName) {
|
||||
stepInfo[hashName] = fileEntry.meta[`file_${hashName}`] = hashes[hashName].digest('hex');
|
||||
} else if('crc32' === hashName) {
|
||||
stepInfo.crc32 = fileEntry.meta.file_crc32 = hashes.crc32.finalize().toString(16);
|
||||
}
|
||||
}
|
||||
|
||||
stepInfo.step = 'hash_finish';
|
||||
return callIter(callback);
|
||||
});
|
||||
|
||||
stream.on('error', err => {
|
||||
return readErrorCallIter(err, callback);
|
||||
});
|
||||
},
|
||||
function processPhysicalFileByType(callback) {
|
||||
const archiveUtil = ArchiveUtil.getInstance();
|
||||
|
||||
archiveUtil.detectType(filePath, (err, archiveType) => {
|
||||
if(archiveType) {
|
||||
// save this off
|
||||
fileEntry.meta.archive_type = archiveType;
|
||||
|
||||
populateFileEntryWithArchive(fileEntry, filePath, stepInfo, callIter, err => {
|
||||
if(err) {
|
||||
populateFileEntryNonArchive(fileEntry, filePath, stepInfo, callIter, err => {
|
||||
if(err) {
|
||||
logDebug( { error : err.message }, 'Non-archive file entry population failed');
|
||||
}
|
||||
return callback(null); // ignore err
|
||||
});
|
||||
} else {
|
||||
return callback(null);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
populateFileEntryNonArchive(fileEntry, filePath, stepInfo, callIter, err => {
|
||||
if(err) {
|
||||
logDebug( { error : err.message }, 'Non-archive file entry population failed');
|
||||
}
|
||||
return callback(null); // ignore err
|
||||
});
|
||||
}
|
||||
});
|
||||
},
|
||||
function fetchExistingEntry(callback) {
|
||||
getExistingFileEntriesBySha256(fileEntry.fileSha256, (err, dupeEntries) => {
|
||||
return callback(err, dupeEntries);
|
||||
});
|
||||
},
|
||||
function finished(dupeEntries, callback) {
|
||||
stepInfo.step = 'finished';
|
||||
callIter( () => {
|
||||
return callback(null, dupeEntries);
|
||||
});
|
||||
}
|
||||
],
|
||||
(err, dupeEntries) => {
|
||||
if(err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
return cb(null, fileEntry, dupeEntries);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
function scanFileAreaForChanges(areaInfo, options, iterator, cb) {
|
||||
if(3 === arguments.length && _.isFunction(iterator)) {
|
||||
cb = iterator;
|
||||
|
|
Loading…
Reference in New Issue