Allow default hash tags to be supplied in file areas

* Supply array or comma separated list of strings via 'hashTags' property
* oputil will use these unless --tags are supplied
* Uploads will default to these tags (but user can override)
This commit is contained in:
Bryan Ashby 2020-12-12 12:35:01 -07:00
parent 47a690c2fe
commit ceeda8b13f
No known key found for this signature in database
GPG Key ID: B49EB437951D2542
5 changed files with 95 additions and 80 deletions

View File

@ -15,6 +15,7 @@ This document attempts to track **major** changes and additions in ENiGMA½. For
* `./oputil user group -group` to now accepts `~group` removing the need for special handling of the "-" character. #331
* A fix has been made to clean up old `file.db` entries when a file is removed. Previously stale records could be left or even recycled into new entries. Please see [UPGRADE.md](UPGRADE.md) for details on applying this fix (look for `tables_update_2020-11-29.sql`).
* The [./docs/modding/onelinerz.md](onelinerz) module can have `dbSuffix` set in it's `config` block to specify a separate DB file. For example to use as a requests list.
* Default hash tags can now be set in file areas. Simply supply an array or list of values in a file area block via `hashTags`.
## 0.0.11-beta
* Upgraded from `alpha` to `beta` -- The software is far along and mature enough at this point!

View File

@ -45,7 +45,7 @@ exports.getFileAreasByTagWildcardRule = getFileAreasByTagWildcardRule;
exports.getFileEntryPath = getFileEntryPath;
exports.changeFileAreaWithOptions = changeFileAreaWithOptions;
exports.scanFile = scanFile;
exports.scanFileAreaForChanges = scanFileAreaForChanges;
//exports.scanFileAreaForChanges = scanFileAreaForChanges;
exports.getDescFromFileName = getDescFromFileName;
exports.getAreaStats = getAreaStats;
exports.cleanUpTempSessionItems = cleanUpTempSessionItems;
@ -139,7 +139,14 @@ function getDefaultFileAreaTag(client, disableAcsCheck) {
function getFileAreaByTag(areaTag) {
const areaInfo = Config().fileBase.areas[areaTag];
if(areaInfo) {
areaInfo.areaTag = areaTag; // convienence!
// normalize |hashTags|
if (_.isString(areaInfo.hashTags)) {
areaInfo.hashTags = areaInfo.hashTags.trim().split(',');
}
if (Array.isArray(areaInfo.hashTags)) {
areaInfo.hashTags = new Set(areaInfo.hashTags.map(t => t.trim()));
}
areaInfo.areaTag = areaTag; // convenience!
areaInfo.storage = getAreaStorageLocations(areaInfo);
return areaInfo;
}
@ -794,7 +801,7 @@ function scanFile(filePath, options, iterator, cb) {
stepInfo.calcHashPercent = Math.round(((stepInfo.bytesProcessed / stepInfo.byteSize) * 100));
//
// Only send 'hash_update' step update if we have a noticable percentage change in progress
// Only send 'hash_update' step update if we have a noticeable percentage change in progress
//
const data = bytesRead < chunkSize ? buffer.slice(0, bytesRead) : buffer;
if(!iterator || stepInfo.calcHashPercent === lastCalcHashPercent) {
@ -871,90 +878,91 @@ function scanFile(filePath, options, iterator, cb) {
);
}
function scanFileAreaForChanges(areaInfo, options, iterator, cb) {
if(3 === arguments.length && _.isFunction(iterator)) {
cb = iterator;
iterator = null;
} else if(2 === arguments.length && _.isFunction(options)) {
cb = options;
iterator = null;
options = {};
}
// :TODO: this stuff needs cleaned up
// function scanFileAreaForChanges(areaInfo, options, iterator, cb) {
// if(3 === arguments.length && _.isFunction(iterator)) {
// cb = iterator;
// iterator = null;
// } else if(2 === arguments.length && _.isFunction(options)) {
// cb = options;
// iterator = null;
// options = {};
// }
const storageLocations = getAreaStorageLocations(areaInfo);
// const storageLocations = getAreaStorageLocations(areaInfo);
async.eachSeries(storageLocations, (storageLoc, nextLocation) => {
async.series(
[
function scanPhysFiles(callback) {
const physDir = storageLoc.dir;
// async.eachSeries(storageLocations, (storageLoc, nextLocation) => {
// async.series(
// [
// function scanPhysFiles(callback) {
// const physDir = storageLoc.dir;
fs.readdir(physDir, (err, files) => {
if(err) {
return callback(err);
}
// fs.readdir(physDir, (err, files) => {
// if(err) {
// return callback(err);
// }
async.eachSeries(files, (fileName, nextFile) => {
const fullPath = paths.join(physDir, fileName);
// async.eachSeries(files, (fileName, nextFile) => {
// const fullPath = paths.join(physDir, fileName);
fs.stat(fullPath, (err, stats) => {
if(err) {
// :TODO: Log me!
return nextFile(null); // always try next file
}
// fs.stat(fullPath, (err, stats) => {
// if(err) {
// // :TODO: Log me!
// return nextFile(null); // always try next file
// }
if(!stats.isFile()) {
return nextFile(null);
}
// if(!stats.isFile()) {
// return nextFile(null);
// }
scanFile(
fullPath,
{
areaTag : areaInfo.areaTag,
storageTag : storageLoc.storageTag
},
iterator,
(err, fileEntry, dupeEntries) => {
if(err) {
// :TODO: Log me!!!
return nextFile(null); // try next anyway
}
// scanFile(
// fullPath,
// {
// areaTag : areaInfo.areaTag,
// storageTag : storageLoc.storageTag
// },
// iterator,
// (err, fileEntry, dupeEntries) => {
// if(err) {
// // :TODO: Log me!!!
// return nextFile(null); // try next anyway
// }
if(dupeEntries.length > 0) {
// :TODO: Handle duplidates -- what to do here???
} else {
if(Array.isArray(options.tags)) {
options.tags.forEach(tag => {
fileEntry.hashTags.add(tag);
});
}
addNewFileEntry(fileEntry, fullPath, err => {
// pass along error; we failed to insert a record in our DB or something else bad
return nextFile(err);
});
}
}
);
});
}, err => {
return callback(err);
});
});
},
function scanDbEntries(callback) {
// :TODO: Look @ db entries for area that were *not* processed above
return callback(null);
}
],
err => {
return nextLocation(err);
}
);
},
err => {
return cb(err);
});
}
// if(dupeEntries.length > 0) {
// // :TODO: Handle duplicates -- what to do here???
// } else {
// if(Array.isArray(options.tags)) {
// options.tags.forEach(tag => {
// fileEntry.hashTags.add(tag);
// });
// }
// addNewFileEntry(fileEntry, fullPath, err => {
// // pass along error; we failed to insert a record in our DB or something else bad
// return nextFile(err);
// });
// }
// }
// );
// });
// }, err => {
// return callback(err);
// });
// });
// },
// function scanDbEntries(callback) {
// // :TODO: Look @ db entries for area that were *not* processed above
// return callback(null);
// }
// ],
// err => {
// return nextLocation(err);
// }
// );
// },
// err => {
// return cb(err);
// });
// }
function getDescFromFileName(fileName) {
//

View File

@ -153,6 +153,8 @@ function scanFileAreaForChanges(areaInfo, options, cb) {
function updateTags(fe) {
if(Array.isArray(options.tags)) {
fe.hashTags = new Set(options.tags);
} else if (areaInfo.hashTags) { // no explicit tags; merge in defaults, if any
fe.hashTags = areaInfo.hashTags;
}
}
@ -227,7 +229,8 @@ function scanFileAreaForChanges(areaInfo, options, cb) {
fullPath,
{
areaTag : areaInfo.areaTag,
storageTag : storageLoc.storageTag
storageTag : storageLoc.storageTag,
hashTags : areaInfo.hashTags,
},
(stepInfo, next) => {
if(argv.verbose) {

View File

@ -332,6 +332,7 @@ exports.getModule = class UploadModule extends MenuModule {
const scanOpts = {
areaTag : self.areaInfo.areaTag,
storageTag : self.areaInfo.storageTags[0],
hashTags : self.areaInfo.hashTags,
};
function handleScanStep(stepInfo, nextScanStep) {

View File

@ -36,6 +36,7 @@ File base *Areas* are configured using the `fileBase.areas` configuration block
| `desc` | :-1: | Friendly area description. |
| `storageTags` | :+1: | An array of storage tags for physical storage backing of the files in this area. If uploads are enabled for this area, **first** storage tag location is utilized! |
| `sort` | :-1: | If present, provides the sort key for ordering. `name` is used otherwise. |
| `hashTags` | :-1: | Set to an array of strings or comma separated list to provide _default_ hash tags for this area. |
Example areas section:
@ -45,6 +46,7 @@ areas: {
name: Retro PC
desc: Oldschool PC/DOS
storageTags: [ "retro_pc_dos", "retro_pc_bbs" ]
hashTags: ["retro", "pc", "dos" ]
}
}
```