Allow default hash tags to be supplied in file areas

* Supply array or comma separated list of strings via 'hashTags' property
* oputil will use these unless --tags are supplied
* Uploads will default to these tags (but user can override)
This commit is contained in:
Bryan Ashby 2020-12-12 12:35:01 -07:00
parent 47a690c2fe
commit ceeda8b13f
No known key found for this signature in database
GPG Key ID: B49EB437951D2542
5 changed files with 95 additions and 80 deletions

View File

@ -15,6 +15,7 @@ This document attempts to track **major** changes and additions in ENiGMA½. For
* `./oputil user group -group` to now accepts `~group` removing the need for special handling of the "-" character. #331 * `./oputil user group -group` to now accepts `~group` removing the need for special handling of the "-" character. #331
* A fix has been made to clean up old `file.db` entries when a file is removed. Previously stale records could be left or even recycled into new entries. Please see [UPGRADE.md](UPGRADE.md) for details on applying this fix (look for `tables_update_2020-11-29.sql`). * A fix has been made to clean up old `file.db` entries when a file is removed. Previously stale records could be left or even recycled into new entries. Please see [UPGRADE.md](UPGRADE.md) for details on applying this fix (look for `tables_update_2020-11-29.sql`).
* The [./docs/modding/onelinerz.md](onelinerz) module can have `dbSuffix` set in it's `config` block to specify a separate DB file. For example to use as a requests list. * The [./docs/modding/onelinerz.md](onelinerz) module can have `dbSuffix` set in it's `config` block to specify a separate DB file. For example to use as a requests list.
* Default hash tags can now be set in file areas. Simply supply an array or list of values in a file area block via `hashTags`.
## 0.0.11-beta ## 0.0.11-beta
* Upgraded from `alpha` to `beta` -- The software is far along and mature enough at this point! * Upgraded from `alpha` to `beta` -- The software is far along and mature enough at this point!

View File

@ -45,7 +45,7 @@ exports.getFileAreasByTagWildcardRule = getFileAreasByTagWildcardRule;
exports.getFileEntryPath = getFileEntryPath; exports.getFileEntryPath = getFileEntryPath;
exports.changeFileAreaWithOptions = changeFileAreaWithOptions; exports.changeFileAreaWithOptions = changeFileAreaWithOptions;
exports.scanFile = scanFile; exports.scanFile = scanFile;
exports.scanFileAreaForChanges = scanFileAreaForChanges; //exports.scanFileAreaForChanges = scanFileAreaForChanges;
exports.getDescFromFileName = getDescFromFileName; exports.getDescFromFileName = getDescFromFileName;
exports.getAreaStats = getAreaStats; exports.getAreaStats = getAreaStats;
exports.cleanUpTempSessionItems = cleanUpTempSessionItems; exports.cleanUpTempSessionItems = cleanUpTempSessionItems;
@ -139,7 +139,14 @@ function getDefaultFileAreaTag(client, disableAcsCheck) {
function getFileAreaByTag(areaTag) { function getFileAreaByTag(areaTag) {
const areaInfo = Config().fileBase.areas[areaTag]; const areaInfo = Config().fileBase.areas[areaTag];
if(areaInfo) { if(areaInfo) {
areaInfo.areaTag = areaTag; // convienence! // normalize |hashTags|
if (_.isString(areaInfo.hashTags)) {
areaInfo.hashTags = areaInfo.hashTags.trim().split(',');
}
if (Array.isArray(areaInfo.hashTags)) {
areaInfo.hashTags = new Set(areaInfo.hashTags.map(t => t.trim()));
}
areaInfo.areaTag = areaTag; // convenience!
areaInfo.storage = getAreaStorageLocations(areaInfo); areaInfo.storage = getAreaStorageLocations(areaInfo);
return areaInfo; return areaInfo;
} }
@ -794,7 +801,7 @@ function scanFile(filePath, options, iterator, cb) {
stepInfo.calcHashPercent = Math.round(((stepInfo.bytesProcessed / stepInfo.byteSize) * 100)); stepInfo.calcHashPercent = Math.round(((stepInfo.bytesProcessed / stepInfo.byteSize) * 100));
// //
// Only send 'hash_update' step update if we have a noticable percentage change in progress // Only send 'hash_update' step update if we have a noticeable percentage change in progress
// //
const data = bytesRead < chunkSize ? buffer.slice(0, bytesRead) : buffer; const data = bytesRead < chunkSize ? buffer.slice(0, bytesRead) : buffer;
if(!iterator || stepInfo.calcHashPercent === lastCalcHashPercent) { if(!iterator || stepInfo.calcHashPercent === lastCalcHashPercent) {
@ -871,90 +878,91 @@ function scanFile(filePath, options, iterator, cb) {
); );
} }
function scanFileAreaForChanges(areaInfo, options, iterator, cb) { // :TODO: this stuff needs cleaned up
if(3 === arguments.length && _.isFunction(iterator)) { // function scanFileAreaForChanges(areaInfo, options, iterator, cb) {
cb = iterator; // if(3 === arguments.length && _.isFunction(iterator)) {
iterator = null; // cb = iterator;
} else if(2 === arguments.length && _.isFunction(options)) { // iterator = null;
cb = options; // } else if(2 === arguments.length && _.isFunction(options)) {
iterator = null; // cb = options;
options = {}; // iterator = null;
} // options = {};
// }
const storageLocations = getAreaStorageLocations(areaInfo); // const storageLocations = getAreaStorageLocations(areaInfo);
async.eachSeries(storageLocations, (storageLoc, nextLocation) => { // async.eachSeries(storageLocations, (storageLoc, nextLocation) => {
async.series( // async.series(
[ // [
function scanPhysFiles(callback) { // function scanPhysFiles(callback) {
const physDir = storageLoc.dir; // const physDir = storageLoc.dir;
fs.readdir(physDir, (err, files) => { // fs.readdir(physDir, (err, files) => {
if(err) { // if(err) {
return callback(err); // return callback(err);
} // }
async.eachSeries(files, (fileName, nextFile) => { // async.eachSeries(files, (fileName, nextFile) => {
const fullPath = paths.join(physDir, fileName); // const fullPath = paths.join(physDir, fileName);
fs.stat(fullPath, (err, stats) => { // fs.stat(fullPath, (err, stats) => {
if(err) { // if(err) {
// :TODO: Log me! // // :TODO: Log me!
return nextFile(null); // always try next file // return nextFile(null); // always try next file
} // }
if(!stats.isFile()) { // if(!stats.isFile()) {
return nextFile(null); // return nextFile(null);
} // }
scanFile( // scanFile(
fullPath, // fullPath,
{ // {
areaTag : areaInfo.areaTag, // areaTag : areaInfo.areaTag,
storageTag : storageLoc.storageTag // storageTag : storageLoc.storageTag
}, // },
iterator, // iterator,
(err, fileEntry, dupeEntries) => { // (err, fileEntry, dupeEntries) => {
if(err) { // if(err) {
// :TODO: Log me!!! // // :TODO: Log me!!!
return nextFile(null); // try next anyway // return nextFile(null); // try next anyway
} // }
if(dupeEntries.length > 0) { // if(dupeEntries.length > 0) {
// :TODO: Handle duplidates -- what to do here??? // // :TODO: Handle duplicates -- what to do here???
} else { // } else {
if(Array.isArray(options.tags)) { // if(Array.isArray(options.tags)) {
options.tags.forEach(tag => { // options.tags.forEach(tag => {
fileEntry.hashTags.add(tag); // fileEntry.hashTags.add(tag);
}); // });
} // }
addNewFileEntry(fileEntry, fullPath, err => { // addNewFileEntry(fileEntry, fullPath, err => {
// pass along error; we failed to insert a record in our DB or something else bad // // pass along error; we failed to insert a record in our DB or something else bad
return nextFile(err); // return nextFile(err);
}); // });
} // }
} // }
); // );
}); // });
}, err => { // }, err => {
return callback(err); // return callback(err);
}); // });
}); // });
}, // },
function scanDbEntries(callback) { // function scanDbEntries(callback) {
// :TODO: Look @ db entries for area that were *not* processed above // // :TODO: Look @ db entries for area that were *not* processed above
return callback(null); // return callback(null);
} // }
], // ],
err => { // err => {
return nextLocation(err); // return nextLocation(err);
} // }
); // );
}, // },
err => { // err => {
return cb(err); // return cb(err);
}); // });
} // }
function getDescFromFileName(fileName) { function getDescFromFileName(fileName) {
// //

View File

@ -153,6 +153,8 @@ function scanFileAreaForChanges(areaInfo, options, cb) {
function updateTags(fe) { function updateTags(fe) {
if(Array.isArray(options.tags)) { if(Array.isArray(options.tags)) {
fe.hashTags = new Set(options.tags); fe.hashTags = new Set(options.tags);
} else if (areaInfo.hashTags) { // no explicit tags; merge in defaults, if any
fe.hashTags = areaInfo.hashTags;
} }
} }
@ -227,7 +229,8 @@ function scanFileAreaForChanges(areaInfo, options, cb) {
fullPath, fullPath,
{ {
areaTag : areaInfo.areaTag, areaTag : areaInfo.areaTag,
storageTag : storageLoc.storageTag storageTag : storageLoc.storageTag,
hashTags : areaInfo.hashTags,
}, },
(stepInfo, next) => { (stepInfo, next) => {
if(argv.verbose) { if(argv.verbose) {

View File

@ -332,6 +332,7 @@ exports.getModule = class UploadModule extends MenuModule {
const scanOpts = { const scanOpts = {
areaTag : self.areaInfo.areaTag, areaTag : self.areaInfo.areaTag,
storageTag : self.areaInfo.storageTags[0], storageTag : self.areaInfo.storageTags[0],
hashTags : self.areaInfo.hashTags,
}; };
function handleScanStep(stepInfo, nextScanStep) { function handleScanStep(stepInfo, nextScanStep) {

View File

@ -36,6 +36,7 @@ File base *Areas* are configured using the `fileBase.areas` configuration block
| `desc` | :-1: | Friendly area description. | | `desc` | :-1: | Friendly area description. |
| `storageTags` | :+1: | An array of storage tags for physical storage backing of the files in this area. If uploads are enabled for this area, **first** storage tag location is utilized! | | `storageTags` | :+1: | An array of storage tags for physical storage backing of the files in this area. If uploads are enabled for this area, **first** storage tag location is utilized! |
| `sort` | :-1: | If present, provides the sort key for ordering. `name` is used otherwise. | | `sort` | :-1: | If present, provides the sort key for ordering. `name` is used otherwise. |
| `hashTags` | :-1: | Set to an array of strings or comma separated list to provide _default_ hash tags for this area. |
Example areas section: Example areas section:
@ -45,6 +46,7 @@ areas: {
name: Retro PC name: Retro PC
desc: Oldschool PC/DOS desc: Oldschool PC/DOS
storageTags: [ "retro_pc_dos", "retro_pc_bbs" ] storageTags: [ "retro_pc_dos", "retro_pc_bbs" ]
hashTags: ["retro", "pc", "dos" ]
} }
} }
``` ```