2016-02-24 04:56:22 +00:00
|
|
|
/* jslint node: true */
|
|
|
|
'use strict';
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// ENiGMA½
|
|
|
|
const Config = require('./config.js').get;
|
|
|
|
const stringFormat = require('./string_format.js');
|
|
|
|
const Errors = require('./enig_error.js').Errors;
|
|
|
|
const resolveMimeType = require('./mime_util.js').resolveMimeType;
|
2018-06-24 03:02:16 +00:00
|
|
|
const Events = require('./events.js');
|
2018-06-23 03:26:46 +00:00
|
|
|
|
|
|
|
// base/modules
|
|
|
|
const fs = require('graceful-fs');
|
|
|
|
const _ = require('lodash');
|
|
|
|
const pty = require('node-pty');
|
|
|
|
const paths = require('path');
|
2016-02-24 04:56:22 +00:00
|
|
|
|
2016-10-03 03:39:29 +00:00
|
|
|
let archiveUtil;
|
|
|
|
|
|
|
|
class Archiver {
|
2018-06-22 05:15:04 +00:00
|
|
|
constructor(config) {
|
2018-06-23 03:26:46 +00:00
|
|
|
this.compress = config.compress;
|
|
|
|
this.decompress = config.decompress;
|
|
|
|
this.list = config.list;
|
|
|
|
this.extract = config.extract;
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
ok() {
|
|
|
|
return this.canCompress() && this.canDecompress();
|
|
|
|
}
|
|
|
|
|
|
|
|
can(what) {
|
|
|
|
if(!_.has(this, [ what, 'cmd' ]) || !_.has(this, [ what, 'args' ])) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
return _.isString(this[what].cmd) && Array.isArray(this[what].args) && this[what].args.length > 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
canCompress() { return this.can('compress'); }
|
|
|
|
canDecompress() { return this.can('decompress'); }
|
2018-06-23 03:26:46 +00:00
|
|
|
canList() { return this.can('list'); } // :TODO: validate entryMatch
|
2018-06-22 05:15:04 +00:00
|
|
|
canExtract() { return this.can('extract'); }
|
2016-10-03 03:39:29 +00:00
|
|
|
}
|
|
|
|
|
2016-02-24 04:56:22 +00:00
|
|
|
module.exports = class ArchiveUtil {
|
2018-01-15 19:22:11 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
constructor() {
|
|
|
|
this.archivers = {};
|
|
|
|
this.longestSignature = 0;
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// singleton access
|
2020-06-10 03:18:17 +00:00
|
|
|
static getInstance(hotReload = true) {
|
2018-06-22 05:15:04 +00:00
|
|
|
if(!archiveUtil) {
|
|
|
|
archiveUtil = new ArchiveUtil();
|
2020-06-10 03:18:17 +00:00
|
|
|
archiveUtil.init(hotReload);
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
return archiveUtil;
|
|
|
|
}
|
|
|
|
|
2020-06-10 03:18:17 +00:00
|
|
|
init(hotReload = true) {
|
2018-06-24 03:02:16 +00:00
|
|
|
this.reloadConfig();
|
2020-06-10 03:18:17 +00:00
|
|
|
if(hotReload) {
|
2018-06-30 05:04:03 +00:00
|
|
|
Events.on(Events.getSystemEvents().ConfigChanged, () => {
|
|
|
|
this.reloadConfig();
|
|
|
|
});
|
|
|
|
}
|
2018-06-24 03:02:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
reloadConfig() {
|
2018-06-22 05:15:04 +00:00
|
|
|
const config = Config();
|
|
|
|
if(_.has(config, 'archives.archivers')) {
|
|
|
|
Object.keys(config.archives.archivers).forEach(archKey => {
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
const archConfig = config.archives.archivers[archKey];
|
|
|
|
const archiver = new Archiver(archConfig);
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
if(!archiver.ok()) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: Log warning - bad archiver/config
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
this.archivers[archKey] = archiver;
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
if(_.isObject(config.fileTypes)) {
|
|
|
|
const updateSig = (ft) => {
|
2018-06-23 03:26:46 +00:00
|
|
|
ft.sig = Buffer.from(ft.sig, 'hex');
|
|
|
|
ft.offset = ft.offset || 0;
|
2018-06-22 05:15:04 +00:00
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: this is broken: sig is NOT this long, it's sig.length long; offset needs to allow for -negative values as well
|
2018-06-22 05:15:04 +00:00
|
|
|
const sigLen = ft.offset + ft.sig.length;
|
|
|
|
if(sigLen > this.longestSignature) {
|
|
|
|
this.longestSignature = sigLen;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
Object.keys(config.fileTypes).forEach(mimeType => {
|
|
|
|
const fileType = config.fileTypes[mimeType];
|
|
|
|
if(Array.isArray(fileType)) {
|
|
|
|
fileType.forEach(ft => {
|
|
|
|
if(ft.sig) {
|
|
|
|
updateSig(ft);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
} else if(fileType.sig) {
|
|
|
|
updateSig(fileType);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
getArchiver(mimeTypeOrExtension, justExtention) {
|
|
|
|
const mimeType = resolveMimeType(mimeTypeOrExtension);
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
if(!mimeType) { // lookup returns false on failure
|
2018-06-22 05:15:04 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const config = Config();
|
|
|
|
let fileType = _.get(config, [ 'fileTypes', mimeType ] );
|
|
|
|
|
|
|
|
if(Array.isArray(fileType)) {
|
|
|
|
if(!justExtention) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// need extention for lookup; ambiguous as-is :(
|
2018-06-22 05:15:04 +00:00
|
|
|
return;
|
|
|
|
}
|
2018-06-23 03:26:46 +00:00
|
|
|
// further refine by extention
|
2018-06-22 05:15:04 +00:00
|
|
|
fileType = fileType.find(ft => justExtention === ft.ext);
|
|
|
|
}
|
|
|
|
|
|
|
|
if(!_.isObject(fileType)) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if(fileType.archiveHandler) {
|
|
|
|
return _.get( config, [ 'archives', 'archivers', fileType.archiveHandler ] );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
haveArchiver(archType) {
|
|
|
|
return this.getArchiver(archType) ? true : false;
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: implement me:
|
2018-06-22 05:15:04 +00:00
|
|
|
/*
|
2018-06-23 03:26:46 +00:00
|
|
|
detectTypeWithBuf(buf, cb) {
|
|
|
|
}
|
|
|
|
*/
|
2016-10-03 03:39:29 +00:00
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
detectType(path, cb) {
|
2018-06-30 05:04:03 +00:00
|
|
|
const closeFile = (fd) => {
|
|
|
|
fs.close(fd, () => { /* sadface */ });
|
|
|
|
};
|
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
fs.open(path, 'r', (err, fd) => {
|
|
|
|
if(err) {
|
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
const buf = Buffer.alloc(this.longestSignature);
|
|
|
|
fs.read(fd, buf, 0, buf.length, 0, (err, bytesRead) => {
|
|
|
|
if(err) {
|
2018-06-30 05:04:03 +00:00
|
|
|
closeFile(fd);
|
2018-06-22 05:15:04 +00:00
|
|
|
return cb(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
const archFormat = _.findKey(Config().fileTypes, fileTypeInfo => {
|
|
|
|
const fileTypeInfos = Array.isArray(fileTypeInfo) ? fileTypeInfo : [ fileTypeInfo ];
|
|
|
|
return fileTypeInfos.find(fti => {
|
|
|
|
if(!fti.sig || !fti.archiveHandler) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
const lenNeeded = fti.offset + fti.sig.length;
|
|
|
|
|
|
|
|
if(bytesRead < lenNeeded) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
const comp = buf.slice(fti.offset, fti.offset + fti.sig.length);
|
|
|
|
return (fti.sig.equals(comp));
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
2018-06-30 05:04:03 +00:00
|
|
|
closeFile(fd);
|
2018-06-22 05:15:04 +00:00
|
|
|
return cb(archFormat ? null : Errors.General('Unknown type'), archFormat);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
spawnHandler(proc, action, cb) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// pty.js doesn't currently give us a error when things fail,
|
|
|
|
// so we have this horrible, horrible hack:
|
2018-06-22 05:15:04 +00:00
|
|
|
let err;
|
|
|
|
proc.once('data', d => {
|
|
|
|
if(_.isString(d) && d.startsWith('execvp(3) failed.')) {
|
|
|
|
err = Errors.ExternalProcess(`${action} failed: ${d.trim()}`);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
proc.once('exit', exitCode => {
|
|
|
|
return cb(exitCode ? Errors.ExternalProcess(`${action} failed with exit code: ${exitCode}`) : err);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2020-04-28 02:55:41 +00:00
|
|
|
compressTo(archType, archivePath, files, workDir, cb) {
|
2018-06-22 05:15:04 +00:00
|
|
|
const archiver = this.getArchiver(archType, paths.extname(archivePath));
|
|
|
|
|
|
|
|
if(!archiver) {
|
|
|
|
return cb(Errors.Invalid(`Unknown archive type: ${archType}`));
|
|
|
|
}
|
|
|
|
|
2020-04-28 02:55:41 +00:00
|
|
|
if (!cb && _.isFunction(workDir)) {
|
|
|
|
cb = workDir;
|
|
|
|
workDir = null;
|
|
|
|
}
|
|
|
|
|
2018-06-22 05:15:04 +00:00
|
|
|
const fmtObj = {
|
2018-06-23 03:26:46 +00:00
|
|
|
archivePath : archivePath,
|
|
|
|
fileList : files.join(' '), // :TODO: probably need same hack as extractTo here!
|
2018-06-22 05:15:04 +00:00
|
|
|
};
|
|
|
|
|
2020-04-28 02:55:41 +00:00
|
|
|
// :TODO: DRY with extractTo()
|
|
|
|
const args = archiver.compress.args.map( arg => {
|
|
|
|
return '{fileList}' === arg ? arg : stringFormat(arg, fmtObj);
|
|
|
|
});
|
|
|
|
|
|
|
|
const fileListPos = args.indexOf('{fileList}');
|
|
|
|
if(fileListPos > -1) {
|
|
|
|
// replace {fileList} with 0:n sep file list arguments
|
|
|
|
args.splice.apply(args, [fileListPos, 1].concat(files));
|
|
|
|
}
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
let proc;
|
|
|
|
try {
|
2020-04-28 02:55:41 +00:00
|
|
|
proc = pty.spawn(archiver.compress.cmd, args, this.getPtyOpts(workDir));
|
2018-06-22 05:15:04 +00:00
|
|
|
} catch(e) {
|
2018-11-15 04:24:15 +00:00
|
|
|
return cb(Errors.ExternalProcess(
|
|
|
|
`Error spawning archiver process "${archiver.compress.cmd}" with args "${args.join(' ')}": ${e.message}`)
|
|
|
|
);
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return this.spawnHandler(proc, 'Compression', cb);
|
|
|
|
}
|
|
|
|
|
|
|
|
extractTo(archivePath, extractPath, archType, fileList, cb) {
|
|
|
|
let haveFileList;
|
|
|
|
|
|
|
|
if(!cb && _.isFunction(fileList)) {
|
|
|
|
cb = fileList;
|
|
|
|
fileList = [];
|
|
|
|
haveFileList = false;
|
|
|
|
} else {
|
|
|
|
haveFileList = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
const archiver = this.getArchiver(archType, paths.extname(archivePath));
|
|
|
|
|
|
|
|
if(!archiver) {
|
|
|
|
return cb(Errors.Invalid(`Unknown archive type: ${archType}`));
|
|
|
|
}
|
|
|
|
|
|
|
|
const fmtObj = {
|
2018-06-23 03:26:46 +00:00
|
|
|
archivePath : archivePath,
|
|
|
|
extractPath : extractPath,
|
2018-06-22 05:15:04 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
let action = haveFileList ? 'extract' : 'decompress';
|
|
|
|
if('extract' === action && !_.isObject(archiver[action])) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// we're forced to do a full decompress
|
2018-06-22 05:15:04 +00:00
|
|
|
action = 'decompress';
|
|
|
|
haveFileList = false;
|
|
|
|
}
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
// we need to treat {fileList} special in that it should be broken up to 0:n args
|
2018-06-22 05:15:04 +00:00
|
|
|
const args = archiver[action].args.map( arg => {
|
|
|
|
return '{fileList}' === arg ? arg : stringFormat(arg, fmtObj);
|
|
|
|
});
|
|
|
|
|
|
|
|
const fileListPos = args.indexOf('{fileList}');
|
|
|
|
if(fileListPos > -1) {
|
2018-06-23 03:26:46 +00:00
|
|
|
// replace {fileList} with 0:n sep file list arguments
|
2018-06-22 05:15:04 +00:00
|
|
|
args.splice.apply(args, [fileListPos, 1].concat(fileList));
|
|
|
|
}
|
|
|
|
|
|
|
|
let proc;
|
|
|
|
try {
|
|
|
|
proc = pty.spawn(archiver[action].cmd, args, this.getPtyOpts(extractPath));
|
|
|
|
} catch(e) {
|
2018-11-15 04:00:21 +00:00
|
|
|
return cb(Errors.ExternalProcess(
|
|
|
|
`Error spawning archiver process "${archiver[action].cmd}" with args "${args.join(' ')}": ${e.message}`)
|
|
|
|
);
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return this.spawnHandler(proc, (haveFileList ? 'Extraction' : 'Decompression'), cb);
|
|
|
|
}
|
|
|
|
|
|
|
|
listEntries(archivePath, archType, cb) {
|
|
|
|
const archiver = this.getArchiver(archType, paths.extname(archivePath));
|
|
|
|
|
|
|
|
if(!archiver) {
|
|
|
|
return cb(Errors.Invalid(`Unknown archive type: ${archType}`));
|
|
|
|
}
|
|
|
|
|
|
|
|
const fmtObj = {
|
2018-06-23 03:26:46 +00:00
|
|
|
archivePath : archivePath,
|
2018-06-22 05:15:04 +00:00
|
|
|
};
|
|
|
|
|
2018-06-23 03:26:46 +00:00
|
|
|
const args = archiver.list.args.map( arg => stringFormat(arg, fmtObj) );
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
let proc;
|
|
|
|
try {
|
|
|
|
proc = pty.spawn(archiver.list.cmd, args, this.getPtyOpts());
|
|
|
|
} catch(e) {
|
2018-11-15 04:24:15 +00:00
|
|
|
return cb(Errors.ExternalProcess(
|
|
|
|
`Error spawning archiver process "${archiver.list.cmd}" with args "${args.join(' ')}": ${e.message}`)
|
|
|
|
);
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
let output = '';
|
|
|
|
proc.on('data', data => {
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: hack for: execvp(3) failed.: No such file or directory
|
2018-06-22 05:15:04 +00:00
|
|
|
|
|
|
|
output += data;
|
|
|
|
});
|
|
|
|
|
|
|
|
proc.once('exit', exitCode => {
|
|
|
|
if(exitCode) {
|
|
|
|
return cb(Errors.ExternalProcess(`List failed with exit code: ${exitCode}`));
|
|
|
|
}
|
|
|
|
|
|
|
|
const entryGroupOrder = archiver.list.entryGroupOrder || { byteSize : 1, fileName : 2 };
|
|
|
|
|
|
|
|
const entries = [];
|
|
|
|
const entryMatchRe = new RegExp(archiver.list.entryMatch, 'gm');
|
|
|
|
let m;
|
|
|
|
while((m = entryMatchRe.exec(output))) {
|
|
|
|
entries.push({
|
2018-06-23 03:26:46 +00:00
|
|
|
byteSize : parseInt(m[entryGroupOrder.byteSize]),
|
|
|
|
fileName : m[entryGroupOrder.fileName].trim(),
|
2018-06-22 05:15:04 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
return cb(null, entries);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2020-04-28 02:55:41 +00:00
|
|
|
getPtyOpts(cwd) {
|
2018-06-22 05:15:04 +00:00
|
|
|
const opts = {
|
2018-06-23 03:26:46 +00:00
|
|
|
name : 'enigma-archiver',
|
|
|
|
cols : 80,
|
|
|
|
rows : 24,
|
|
|
|
env : process.env,
|
2018-06-22 05:15:04 +00:00
|
|
|
};
|
2020-04-28 02:55:41 +00:00
|
|
|
if(cwd) {
|
|
|
|
opts.cwd = cwd;
|
2018-06-22 05:15:04 +00:00
|
|
|
}
|
2018-06-23 03:26:46 +00:00
|
|
|
// :TODO: set cwd to supplied temp path if not sepcific extract
|
2018-06-22 05:15:04 +00:00
|
|
|
return opts;
|
|
|
|
}
|
2016-08-06 22:30:56 +00:00
|
|
|
};
|