Finish conversion from 'binary' -> 'binary-parser'
* FTN packets * SAUCE
This commit is contained in:
parent
c1f971d2d9
commit
8bfad971a1
|
@ -8,10 +8,11 @@ const Address = require('./ftn_address.js');
|
||||||
const strUtil = require('./string_util.js');
|
const strUtil = require('./string_util.js');
|
||||||
const Log = require('./logger.js').log;
|
const Log = require('./logger.js').log;
|
||||||
const ansiPrep = require('./ansi_prep.js');
|
const ansiPrep = require('./ansi_prep.js');
|
||||||
|
const Errors = require('./enig_error.js').Errors;
|
||||||
|
|
||||||
const _ = require('lodash');
|
const _ = require('lodash');
|
||||||
const assert = require('assert');
|
const assert = require('assert');
|
||||||
const binary = require('binary');
|
const { Parser } = require('binary-parser');
|
||||||
const fs = require('graceful-fs');
|
const fs = require('graceful-fs');
|
||||||
const async = require('async');
|
const async = require('async');
|
||||||
const iconv = require('iconv-lite');
|
const iconv = require('iconv-lite');
|
||||||
|
@ -23,7 +24,6 @@ const FTN_PACKET_HEADER_SIZE = 58; // fixed header size
|
||||||
const FTN_PACKET_HEADER_TYPE = 2;
|
const FTN_PACKET_HEADER_TYPE = 2;
|
||||||
const FTN_PACKET_MESSAGE_TYPE = 2;
|
const FTN_PACKET_MESSAGE_TYPE = 2;
|
||||||
const FTN_PACKET_BAUD_TYPE_2_2 = 2;
|
const FTN_PACKET_BAUD_TYPE_2_2 = 2;
|
||||||
const NULL_TERM_BUFFER = new Buffer( [ 0x00 ] );
|
|
||||||
|
|
||||||
// SAUCE magic header + version ("00")
|
// SAUCE magic header + version ("00")
|
||||||
const FTN_MESSAGE_SAUCE_HEADER = new Buffer('SAUCE00');
|
const FTN_MESSAGE_SAUCE_HEADER = new Buffer('SAUCE00');
|
||||||
|
@ -173,108 +173,103 @@ function Packet(options) {
|
||||||
this.parsePacketHeader = function(packetBuffer, cb) {
|
this.parsePacketHeader = function(packetBuffer, cb) {
|
||||||
assert(Buffer.isBuffer(packetBuffer));
|
assert(Buffer.isBuffer(packetBuffer));
|
||||||
|
|
||||||
if(packetBuffer.length < FTN_PACKET_HEADER_SIZE) {
|
let packetHeader;
|
||||||
cb(new Error('Buffer too small'));
|
try {
|
||||||
return;
|
packetHeader = new Parser()
|
||||||
|
.uint16le('origNode')
|
||||||
|
.uint16le('destNode')
|
||||||
|
.uint16le('year')
|
||||||
|
.uint16le('month')
|
||||||
|
.uint16le('day')
|
||||||
|
.uint16le('hour')
|
||||||
|
.uint16le('minute')
|
||||||
|
.uint16le('second')
|
||||||
|
.uint16le('baud')
|
||||||
|
.uint16le('packetType')
|
||||||
|
.uint16le('origNet')
|
||||||
|
.uint16le('destNet')
|
||||||
|
.int8('prodCodeLo')
|
||||||
|
.int8('prodRevLo') // aka serialNo
|
||||||
|
.buffer('password', { length : 8 }) // can't use string; need CP437 - see https://github.com/keichi/binary-parser/issues/33
|
||||||
|
.uint16le('origZone')
|
||||||
|
.uint16le('destZone')
|
||||||
|
//
|
||||||
|
// The following is "filler" in FTS-0001, specifics in
|
||||||
|
// FSC-0045 and FSC-0048
|
||||||
|
//
|
||||||
|
.uint16le('auxNet')
|
||||||
|
.uint16le('capWordValidate')
|
||||||
|
.int8('prodCodeHi')
|
||||||
|
.int8('prodRevHi')
|
||||||
|
.uint16le('capWord')
|
||||||
|
.uint16le('origZone2')
|
||||||
|
.uint16le('destZone2')
|
||||||
|
.uint16le('origPoint')
|
||||||
|
.uint16le('destPoint')
|
||||||
|
.uint32le('prodData')
|
||||||
|
.parse(packetBuffer);
|
||||||
|
} catch(e) {
|
||||||
|
return Errors.Invalid(`Unable to parse FTN packet header: ${e.message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert password from NULL padded array to string
|
||||||
|
packetHeader.password = strUtil.stringFromNullTermBuffer(packetHeader.password, 'CP437');
|
||||||
|
|
||||||
|
if(FTN_PACKET_HEADER_TYPE !== packetHeader.packetType) {
|
||||||
|
return cb(Errors.Invalid(`Unsupported FTN packet header type: ${packetHeader.packetType}`));
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
// Start out reading as if this is a FSC-0048 2+ packet
|
// What kind of packet do we really have here?
|
||||||
//
|
//
|
||||||
binary.parse(packetBuffer)
|
// :TODO: adjust values based on version discovered
|
||||||
.word16lu('origNode')
|
if(FTN_PACKET_BAUD_TYPE_2_2 === packetHeader.baud) {
|
||||||
.word16lu('destNode')
|
packetHeader.version = '2.2';
|
||||||
.word16lu('year')
|
|
||||||
.word16lu('month')
|
// See FSC-0045
|
||||||
.word16lu('day')
|
packetHeader.origPoint = packetHeader.year;
|
||||||
.word16lu('hour')
|
packetHeader.destPoint = packetHeader.month;
|
||||||
.word16lu('minute')
|
|
||||||
.word16lu('second')
|
packetHeader.destDomain = packetHeader.origZone2;
|
||||||
.word16lu('baud')
|
packetHeader.origDomain = packetHeader.auxNet;
|
||||||
.word16lu('packetType')
|
} else {
|
||||||
.word16lu('origNet')
|
|
||||||
.word16lu('destNet')
|
|
||||||
.word8('prodCodeLo')
|
|
||||||
.word8('prodRevLo') // aka serialNo
|
|
||||||
.buffer('password', 8) // null padded C style string
|
|
||||||
.word16lu('origZone')
|
|
||||||
.word16lu('destZone')
|
|
||||||
//
|
//
|
||||||
// The following is "filler" in FTS-0001, specifics in
|
// See heuristics described in FSC-0048, "Receiving Type-2+ bundles"
|
||||||
// FSC-0045 and FSC-0048
|
|
||||||
//
|
//
|
||||||
.word16lu('auxNet')
|
const capWordValidateSwapped =
|
||||||
.word16lu('capWordValidate')
|
((packetHeader.capWordValidate & 0xff) << 8) |
|
||||||
.word8('prodCodeHi')
|
((packetHeader.capWordValidate >> 8) & 0xff);
|
||||||
.word8('prodRevHi')
|
|
||||||
.word16lu('capWord')
|
|
||||||
.word16lu('origZone2')
|
|
||||||
.word16lu('destZone2')
|
|
||||||
.word16lu('origPoint')
|
|
||||||
.word16lu('destPoint')
|
|
||||||
.word32lu('prodData')
|
|
||||||
.tap(packetHeader => {
|
|
||||||
// Convert password from NULL padded array to string
|
|
||||||
//packetHeader.password = ftn.stringFromFTN(packetHeader.password);
|
|
||||||
packetHeader.password = strUtil.stringFromNullTermBuffer(packetHeader.password, 'CP437');
|
|
||||||
|
|
||||||
if(FTN_PACKET_HEADER_TYPE !== packetHeader.packetType) {
|
if(capWordValidateSwapped === packetHeader.capWord &&
|
||||||
cb(new Error('Unsupported header type: ' + packetHeader.packetType));
|
0 != packetHeader.capWord &&
|
||||||
return;
|
packetHeader.capWord & 0x0001)
|
||||||
|
{
|
||||||
|
packetHeader.version = '2+';
|
||||||
|
|
||||||
|
// See FSC-0048
|
||||||
|
if(-1 === packetHeader.origNet) {
|
||||||
|
packetHeader.origNet = packetHeader.auxNet;
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
packetHeader.version = '2';
|
||||||
|
|
||||||
//
|
// :TODO: should fill bytes be 0?
|
||||||
// What kind of packet do we really have here?
|
}
|
||||||
//
|
}
|
||||||
// :TODO: adjust values based on version discovered
|
|
||||||
if(FTN_PACKET_BAUD_TYPE_2_2 === packetHeader.baud) {
|
|
||||||
packetHeader.version = '2.2';
|
|
||||||
|
|
||||||
// See FSC-0045
|
packetHeader.created = moment({
|
||||||
packetHeader.origPoint = packetHeader.year;
|
year : packetHeader.year,
|
||||||
packetHeader.destPoint = packetHeader.month;
|
month : packetHeader.month - 1, // moment uses 0 indexed months
|
||||||
|
date : packetHeader.day,
|
||||||
|
hour : packetHeader.hour,
|
||||||
|
minute : packetHeader.minute,
|
||||||
|
second : packetHeader.second
|
||||||
|
});
|
||||||
|
|
||||||
packetHeader.destDomain = packetHeader.origZone2;
|
const ph = new PacketHeader();
|
||||||
packetHeader.origDomain = packetHeader.auxNet;
|
_.assign(ph, packetHeader);
|
||||||
} else {
|
|
||||||
//
|
|
||||||
// See heuristics described in FSC-0048, "Receiving Type-2+ bundles"
|
|
||||||
//
|
|
||||||
const capWordValidateSwapped =
|
|
||||||
((packetHeader.capWordValidate & 0xff) << 8) |
|
|
||||||
((packetHeader.capWordValidate >> 8) & 0xff);
|
|
||||||
|
|
||||||
if(capWordValidateSwapped === packetHeader.capWord &&
|
return cb(null, ph);
|
||||||
0 != packetHeader.capWord &&
|
|
||||||
packetHeader.capWord & 0x0001)
|
|
||||||
{
|
|
||||||
packetHeader.version = '2+';
|
|
||||||
|
|
||||||
// See FSC-0048
|
|
||||||
if(-1 === packetHeader.origNet) {
|
|
||||||
packetHeader.origNet = packetHeader.auxNet;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
packetHeader.version = '2';
|
|
||||||
|
|
||||||
// :TODO: should fill bytes be 0?
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
packetHeader.created = moment({
|
|
||||||
year : packetHeader.year,
|
|
||||||
month : packetHeader.month - 1, // moment uses 0 indexed months
|
|
||||||
date : packetHeader.day,
|
|
||||||
hour : packetHeader.hour,
|
|
||||||
minute : packetHeader.minute,
|
|
||||||
second : packetHeader.second
|
|
||||||
});
|
|
||||||
|
|
||||||
let ph = new PacketHeader();
|
|
||||||
_.assign(ph, packetHeader);
|
|
||||||
|
|
||||||
cb(null, ph);
|
|
||||||
});
|
|
||||||
};
|
};
|
||||||
|
|
||||||
this.getPacketHeaderBuffer = function(packetHeader) {
|
this.getPacketHeaderBuffer = function(packetHeader) {
|
||||||
|
@ -454,21 +449,30 @@ function Packet(options) {
|
||||||
// :TODO: See encodingFromHeader() for CHRS/CHARSET support @ https://github.com/Mithgol/node-fidonet-jam
|
// :TODO: See encodingFromHeader() for CHRS/CHARSET support @ https://github.com/Mithgol/node-fidonet-jam
|
||||||
const FTN_CHRS_PREFIX = new Buffer( [ 0x01, 0x43, 0x48, 0x52, 0x53, 0x3a, 0x20 ] ); // "\x01CHRS:"
|
const FTN_CHRS_PREFIX = new Buffer( [ 0x01, 0x43, 0x48, 0x52, 0x53, 0x3a, 0x20 ] ); // "\x01CHRS:"
|
||||||
const FTN_CHRS_SUFFIX = new Buffer( [ 0x0d ] );
|
const FTN_CHRS_SUFFIX = new Buffer( [ 0x0d ] );
|
||||||
binary.parse(messageBodyBuffer)
|
|
||||||
.scan('prefix', FTN_CHRS_PREFIX)
|
let chrsPrefixIndex = messageBodyBuffer.indexOf(FTN_CHRS_PREFIX);
|
||||||
.scan('content', FTN_CHRS_SUFFIX)
|
if(chrsPrefixIndex < 0) {
|
||||||
.tap(chrsData => {
|
return callback(null);
|
||||||
if(chrsData.prefix && chrsData.content && chrsData.content.length > 0) {
|
}
|
||||||
const chrs = iconv.decode(chrsData.content, 'CP437');
|
|
||||||
const chrsEncoding = ftn.getEncodingFromCharacterSetIdentifier(chrs);
|
chrsPrefixIndex += FTN_CHRS_PREFIX.length;
|
||||||
if(chrsEncoding) {
|
|
||||||
encoding = chrsEncoding;
|
const chrsEndIndex = messageBodyBuffer.indexOf(FTN_CHRS_SUFFIX, chrsPrefixIndex);
|
||||||
}
|
if(chrsEndIndex < 0) {
|
||||||
callback(null);
|
return callback(null);
|
||||||
} else {
|
}
|
||||||
callback(null);
|
|
||||||
}
|
let chrsContent = messageBodyBuffer.slice(chrsPrefixIndex, chrsEndIndex);
|
||||||
});
|
if(0 === chrsContent.length) {
|
||||||
|
return callback(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
chrsContent = iconv.decode(chrsContent, 'CP437');
|
||||||
|
const chrsEncoding = ftn.getEncodingFromCharacterSetIdentifier(chrsContent);
|
||||||
|
if(chrsEncoding) {
|
||||||
|
encoding = chrsEncoding;
|
||||||
|
}
|
||||||
|
return callback(null);
|
||||||
},
|
},
|
||||||
function extractMessageData(callback) {
|
function extractMessageData(callback) {
|
||||||
//
|
//
|
||||||
|
@ -525,125 +529,160 @@ function Packet(options) {
|
||||||
};
|
};
|
||||||
|
|
||||||
this.parsePacketMessages = function(header, packetBuffer, iterator, cb) {
|
this.parsePacketMessages = function(header, packetBuffer, iterator, cb) {
|
||||||
binary.parse(packetBuffer)
|
//
|
||||||
.word16lu('messageType')
|
// Check for end-of-messages marker up front before parse so we can easily
|
||||||
.word16lu('ftn_msg_orig_node')
|
// tell the difference between end and bad header
|
||||||
.word16lu('ftn_msg_dest_node')
|
//
|
||||||
.word16lu('ftn_msg_orig_net')
|
if(packetBuffer.length < 3) {
|
||||||
.word16lu('ftn_msg_dest_net')
|
const peek = packetBuffer.slice(0, 2);
|
||||||
.word16lu('ftn_attr_flags')
|
if(peek.equals(Buffer.from([ 0x00 ])) || peek.equals(Buffer.from( [ 0x00, 0x00 ]))) {
|
||||||
.word16lu('ftn_cost')
|
// end marker - no more messages
|
||||||
.scan('modDateTime', NULL_TERM_BUFFER) // :TODO: 20 bytes max
|
return cb(null);
|
||||||
.scan('toUserName', NULL_TERM_BUFFER) // :TODO: 36 bytes max
|
}
|
||||||
.scan('fromUserName', NULL_TERM_BUFFER) // :TODO: 36 bytes max
|
// else fall through & hit exception below to log error
|
||||||
.scan('subject', NULL_TERM_BUFFER) // :TODO: 72 bytes max6
|
}
|
||||||
.scan('message', NULL_TERM_BUFFER)
|
|
||||||
.tap(function tapped(msgData) { // no arrow function; want classic this
|
let msgData;
|
||||||
if(!msgData.messageType) {
|
try {
|
||||||
// end marker -- no more messages
|
msgData = new Parser()
|
||||||
return cb(null);
|
.uint16le('messageType')
|
||||||
|
.uint16le('ftn_msg_orig_node')
|
||||||
|
.uint16le('ftn_msg_dest_node')
|
||||||
|
.uint16le('ftn_msg_orig_net')
|
||||||
|
.uint16le('ftn_msg_dest_net')
|
||||||
|
.uint16le('ftn_attr_flags')
|
||||||
|
.uint16le('ftn_cost')
|
||||||
|
// :TODO: use string() for these if https://github.com/keichi/binary-parser/issues/33 is resolved
|
||||||
|
.array('modDateTime', {
|
||||||
|
type : 'uint8',
|
||||||
|
readUntil : b => 0x00 === b,
|
||||||
|
})
|
||||||
|
.array('toUserName', {
|
||||||
|
type : 'uint8',
|
||||||
|
readUntil : b => 0x00 === b,
|
||||||
|
})
|
||||||
|
.array('fromUserName', {
|
||||||
|
type : 'uint8',
|
||||||
|
readUntil : b => 0x00 === b,
|
||||||
|
})
|
||||||
|
.array('subject', {
|
||||||
|
type : 'uint8',
|
||||||
|
readUntil : b => 0x00 === b,
|
||||||
|
})
|
||||||
|
.array('message', {
|
||||||
|
type : 'uint8',
|
||||||
|
readUntil : b => 0x00 === b,
|
||||||
|
})
|
||||||
|
.parse(packetBuffer);
|
||||||
|
} catch(e) {
|
||||||
|
return cb(Errors.Invalid(`Failed to parse FTN message header: ${e.message}`));
|
||||||
|
}
|
||||||
|
|
||||||
|
if(FTN_PACKET_MESSAGE_TYPE != msgData.messageType) {
|
||||||
|
return cb(Errors.Invalid(`Unsupported FTN message type: ${msgData.messageType}`));
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Convert null terminated arrays to strings
|
||||||
|
//
|
||||||
|
[ 'modDateTime', 'toUserName', 'fromUserName', 'subject' ].forEach(k => {
|
||||||
|
msgData[k] = strUtil.stringFromNullTermBuffer(msgData[k], 'CP437');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Technically the following fields have length limits as per fts-0001.016:
|
||||||
|
// * modDateTime : 20 bytes
|
||||||
|
// * toUserName : 36 bytes
|
||||||
|
// * fromUserName : 36 bytes
|
||||||
|
// * subject : 72 bytes
|
||||||
|
|
||||||
|
//
|
||||||
|
// The message body itself is a special beast as it may
|
||||||
|
// contain an origin line, kludges, SAUCE in the case
|
||||||
|
// of ANSI files, etc.
|
||||||
|
//
|
||||||
|
const msg = new Message( {
|
||||||
|
toUserName : msgData.toUserName,
|
||||||
|
fromUserName : msgData.fromUserName,
|
||||||
|
subject : msgData.subject,
|
||||||
|
modTimestamp : ftn.getDateFromFtnDateTime(msgData.modDateTime),
|
||||||
|
});
|
||||||
|
|
||||||
|
// :TODO: When non-private (e.g. EchoMail), attempt to extract SRC from MSGID vs headers, when avail (or Orgin line? research further)
|
||||||
|
msg.meta.FtnProperty = {
|
||||||
|
ftn_orig_node : header.origNode,
|
||||||
|
ftn_dest_node : header.destNode,
|
||||||
|
ftn_orig_network : header.origNet,
|
||||||
|
ftn_dest_network : header.destNet,
|
||||||
|
|
||||||
|
ftn_attr_flags : msgData.ftn_attr_flags,
|
||||||
|
ftn_cost : msgData.ftn_cost,
|
||||||
|
|
||||||
|
ftn_msg_orig_node : msgData.ftn_msg_orig_node,
|
||||||
|
ftn_msg_dest_node : msgData.ftn_msg_dest_node,
|
||||||
|
ftn_msg_orig_net : msgData.ftn_msg_orig_net,
|
||||||
|
ftn_msg_dest_net : msgData.ftn_msg_dest_net,
|
||||||
|
};
|
||||||
|
|
||||||
|
self.processMessageBody(msgData.message, messageBodyData => {
|
||||||
|
msg.message = messageBodyData.message;
|
||||||
|
msg.meta.FtnKludge = messageBodyData.kludgeLines;
|
||||||
|
|
||||||
|
if(messageBodyData.tearLine) {
|
||||||
|
msg.meta.FtnProperty.ftn_tear_line = messageBodyData.tearLine;
|
||||||
|
|
||||||
|
if(self.options.keepTearAndOrigin) {
|
||||||
|
msg.message += `\r\n${messageBodyData.tearLine}\r\n`;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if(FTN_PACKET_MESSAGE_TYPE != msgData.messageType) {
|
if(messageBodyData.seenBy.length > 0) {
|
||||||
return cb(new Error('Unsupported message type: ' + msgData.messageType));
|
msg.meta.FtnProperty.ftn_seen_by = messageBodyData.seenBy;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(messageBodyData.area) {
|
||||||
|
msg.meta.FtnProperty.ftn_area = messageBodyData.area;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(messageBodyData.originLine) {
|
||||||
|
msg.meta.FtnProperty.ftn_origin = messageBodyData.originLine;
|
||||||
|
|
||||||
|
if(self.options.keepTearAndOrigin) {
|
||||||
|
msg.message += `${messageBodyData.originLine}\r\n`;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const read =
|
//
|
||||||
14 + // fixed header size
|
// If we have a UTC offset kludge (e.g. TZUTC) then update
|
||||||
msgData.modDateTime.length + 1 +
|
// modDateTime with it
|
||||||
msgData.toUserName.length + 1 +
|
//
|
||||||
msgData.fromUserName.length + 1 +
|
if(_.isString(msg.meta.FtnKludge.TZUTC) && msg.meta.FtnKludge.TZUTC.length > 0) {
|
||||||
msgData.subject.length + 1 +
|
msg.modDateTime = msg.modTimestamp.utcOffset(msg.meta.FtnKludge.TZUTC);
|
||||||
msgData.message.length + 1;
|
}
|
||||||
|
|
||||||
//
|
// :TODO: Parser should give is this info:
|
||||||
// Convert null terminated arrays to strings
|
const bytesRead =
|
||||||
//
|
14 + // fixed header size
|
||||||
let convMsgData = {};
|
msgData.modDateTime.length + 1 + // +1 = NULL
|
||||||
[ 'modDateTime', 'toUserName', 'fromUserName', 'subject' ].forEach(k => {
|
msgData.toUserName.length + 1 + // +1 = NULL
|
||||||
convMsgData[k] = iconv.decode(msgData[k], 'CP437');
|
msgData.fromUserName.length + 1 + // +1 = NULL
|
||||||
});
|
msgData.subject.length + 1 + // +1 = NULL
|
||||||
|
msgData.message.length; // includes NULL
|
||||||
|
|
||||||
//
|
const nextBuf = packetBuffer.slice(bytesRead);
|
||||||
// The message body itself is a special beast as it may
|
if(nextBuf.length > 0) {
|
||||||
// contain an origin line, kludges, SAUCE in the case
|
const next = function(e) {
|
||||||
// of ANSI files, etc.
|
if(e) {
|
||||||
//
|
cb(e);
|
||||||
const msg = new Message( {
|
} else {
|
||||||
toUserName : convMsgData.toUserName,
|
self.parsePacketMessages(header, nextBuf, iterator, cb);
|
||||||
fromUserName : convMsgData.fromUserName,
|
}
|
||||||
subject : convMsgData.subject,
|
|
||||||
modTimestamp : ftn.getDateFromFtnDateTime(convMsgData.modDateTime),
|
|
||||||
});
|
|
||||||
|
|
||||||
// :TODO: When non-private (e.g. EchoMail), attempt to extract SRC from MSGID vs headers, when avail (or Orgin line? research further)
|
|
||||||
msg.meta.FtnProperty = {
|
|
||||||
ftn_orig_node : header.origNode,
|
|
||||||
ftn_dest_node : header.destNode,
|
|
||||||
ftn_orig_network : header.origNet,
|
|
||||||
ftn_dest_network : header.destNet,
|
|
||||||
|
|
||||||
ftn_attr_flags : msgData.ftn_attr_flags,
|
|
||||||
ftn_cost : msgData.ftn_cost,
|
|
||||||
|
|
||||||
ftn_msg_orig_node : msgData.ftn_msg_orig_node,
|
|
||||||
ftn_msg_dest_node : msgData.ftn_msg_dest_node,
|
|
||||||
ftn_msg_orig_net : msgData.ftn_msg_orig_net,
|
|
||||||
ftn_msg_dest_net : msgData.ftn_msg_dest_net,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
self.processMessageBody(msgData.message, messageBodyData => {
|
iterator('message', msg, next);
|
||||||
msg.message = messageBodyData.message;
|
} else {
|
||||||
msg.meta.FtnKludge = messageBodyData.kludgeLines;
|
cb(null);
|
||||||
|
}
|
||||||
if(messageBodyData.tearLine) {
|
});
|
||||||
msg.meta.FtnProperty.ftn_tear_line = messageBodyData.tearLine;
|
|
||||||
|
|
||||||
if(self.options.keepTearAndOrigin) {
|
|
||||||
msg.message += `\r\n${messageBodyData.tearLine}\r\n`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if(messageBodyData.seenBy.length > 0) {
|
|
||||||
msg.meta.FtnProperty.ftn_seen_by = messageBodyData.seenBy;
|
|
||||||
}
|
|
||||||
|
|
||||||
if(messageBodyData.area) {
|
|
||||||
msg.meta.FtnProperty.ftn_area = messageBodyData.area;
|
|
||||||
}
|
|
||||||
|
|
||||||
if(messageBodyData.originLine) {
|
|
||||||
msg.meta.FtnProperty.ftn_origin = messageBodyData.originLine;
|
|
||||||
|
|
||||||
if(self.options.keepTearAndOrigin) {
|
|
||||||
msg.message += `${messageBodyData.originLine}\r\n`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//
|
|
||||||
// If we have a UTC offset kludge (e.g. TZUTC) then update
|
|
||||||
// modDateTime with it
|
|
||||||
//
|
|
||||||
if(_.isString(msg.meta.FtnKludge.TZUTC) && msg.meta.FtnKludge.TZUTC.length > 0) {
|
|
||||||
msg.modDateTime = msg.modTimestamp.utcOffset(msg.meta.FtnKludge.TZUTC);
|
|
||||||
}
|
|
||||||
|
|
||||||
const nextBuf = packetBuffer.slice(read);
|
|
||||||
if(nextBuf.length > 0) {
|
|
||||||
const next = function(e) {
|
|
||||||
if(e) {
|
|
||||||
cb(e);
|
|
||||||
} else {
|
|
||||||
self.parsePacketMessages(header, nextBuf, iterator, cb);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
iterator('message', msg, next);
|
|
||||||
} else {
|
|
||||||
cb(null);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
};
|
};
|
||||||
|
|
||||||
this.sanatizeFtnProperties = function(message) {
|
this.sanatizeFtnProperties = function(message) {
|
||||||
|
|
180
core/sauce.js
180
core/sauce.js
|
@ -1,8 +1,11 @@
|
||||||
/* jslint node: true */
|
/* jslint node: true */
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
var binary = require('binary');
|
const Errors = require('./enig_error.js').Errors;
|
||||||
var iconv = require('iconv-lite');
|
|
||||||
|
// deps
|
||||||
|
const iconv = require('iconv-lite');
|
||||||
|
const { Parser } = require('binary-parser');
|
||||||
|
|
||||||
exports.readSAUCE = readSAUCE;
|
exports.readSAUCE = readSAUCE;
|
||||||
|
|
||||||
|
@ -25,103 +28,107 @@ const SAUCE_VALID_DATA_TYPES = [0, 1, 2, 3, 4, 5, 6, 7, 8 ];
|
||||||
|
|
||||||
function readSAUCE(data, cb) {
|
function readSAUCE(data, cb) {
|
||||||
if(data.length < SAUCE_SIZE) {
|
if(data.length < SAUCE_SIZE) {
|
||||||
cb(new Error('No SAUCE record present'));
|
return cb(Errors.DoesNotExist('No SAUCE record present'));
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var offset = data.length - SAUCE_SIZE;
|
let sauceRec;
|
||||||
var sauceRec = data.slice(offset);
|
try {
|
||||||
|
sauceRec = new Parser()
|
||||||
|
.buffer('id', { length : 5 } )
|
||||||
|
.buffer('version', { length : 2 } )
|
||||||
|
.buffer('title', { length: 35 } )
|
||||||
|
.buffer('author', { length : 20 } )
|
||||||
|
.buffer('group', { length: 20 } )
|
||||||
|
.buffer('date', { length: 8 } )
|
||||||
|
.uint32le('fileSize')
|
||||||
|
.int8('dataType')
|
||||||
|
.int8('fileType')
|
||||||
|
.uint16le('tinfo1')
|
||||||
|
.uint16le('tinfo2')
|
||||||
|
.uint16le('tinfo3')
|
||||||
|
.uint16le('tinfo4')
|
||||||
|
.int8('numComments')
|
||||||
|
.int8('flags')
|
||||||
|
// :TODO: does this need to be optional?
|
||||||
|
.buffer('tinfos', { length: 22 } ) // SAUCE 00.5
|
||||||
|
.parse(data.slice(data.length - SAUCE_SIZE));
|
||||||
|
} catch(e) {
|
||||||
|
return cb(Errors.Invalid('Invalid SAUCE record'));
|
||||||
|
}
|
||||||
|
|
||||||
binary.parse(sauceRec)
|
|
||||||
.buffer('id', 5)
|
|
||||||
.buffer('version', 2)
|
|
||||||
.buffer('title', 35)
|
|
||||||
.buffer('author', 20)
|
|
||||||
.buffer('group', 20)
|
|
||||||
.buffer('date', 8)
|
|
||||||
.word32lu('fileSize')
|
|
||||||
.word8('dataType')
|
|
||||||
.word8('fileType')
|
|
||||||
.word16lu('tinfo1')
|
|
||||||
.word16lu('tinfo2')
|
|
||||||
.word16lu('tinfo3')
|
|
||||||
.word16lu('tinfo4')
|
|
||||||
.word8('numComments')
|
|
||||||
.word8('flags')
|
|
||||||
.buffer('tinfos', 22) // SAUCE 00.5
|
|
||||||
.tap(function onVars(vars) {
|
|
||||||
|
|
||||||
if(!SAUCE_ID.equals(vars.id)) {
|
if(!SAUCE_ID.equals(sauceRec.id)) {
|
||||||
return cb(new Error('No SAUCE record present'));
|
return cb(Errors.DoesNotExist('No SAUCE record present'));
|
||||||
}
|
}
|
||||||
|
|
||||||
var ver = iconv.decode(vars.version, 'cp437');
|
const ver = iconv.decode(sauceRec.version, 'cp437');
|
||||||
|
|
||||||
if('00' !== ver) {
|
if('00' !== ver) {
|
||||||
return cb(new Error('Unsupported SAUCE version: ' + ver));
|
return cb(Errors.Invalid(`Unsupported SAUCE version: ${ver}`));
|
||||||
}
|
}
|
||||||
|
|
||||||
if(-1 === SAUCE_VALID_DATA_TYPES.indexOf(vars.dataType)) {
|
if(-1 === SAUCE_VALID_DATA_TYPES.indexOf(sauceRec.dataType)) {
|
||||||
return cb(new Error('Unsupported SAUCE DataType: ' + vars.dataType));
|
return cb(Errors.Invalid(`Unsupported SAUCE DataType: ${sauceRec.dataType}`));
|
||||||
}
|
}
|
||||||
|
|
||||||
var sauce = {
|
const sauce = {
|
||||||
id : iconv.decode(vars.id, 'cp437'),
|
id : iconv.decode(sauceRec.id, 'cp437'),
|
||||||
version : iconv.decode(vars.version, 'cp437').trim(),
|
version : iconv.decode(sauceRec.version, 'cp437').trim(),
|
||||||
title : iconv.decode(vars.title, 'cp437').trim(),
|
title : iconv.decode(sauceRec.title, 'cp437').trim(),
|
||||||
author : iconv.decode(vars.author, 'cp437').trim(),
|
author : iconv.decode(sauceRec.author, 'cp437').trim(),
|
||||||
group : iconv.decode(vars.group, 'cp437').trim(),
|
group : iconv.decode(sauceRec.group, 'cp437').trim(),
|
||||||
date : iconv.decode(vars.date, 'cp437').trim(),
|
date : iconv.decode(sauceRec.date, 'cp437').trim(),
|
||||||
fileSize : vars.fileSize,
|
fileSize : sauceRec.fileSize,
|
||||||
dataType : vars.dataType,
|
dataType : sauceRec.dataType,
|
||||||
fileType : vars.fileType,
|
fileType : sauceRec.fileType,
|
||||||
tinfo1 : vars.tinfo1,
|
tinfo1 : sauceRec.tinfo1,
|
||||||
tinfo2 : vars.tinfo2,
|
tinfo2 : sauceRec.tinfo2,
|
||||||
tinfo3 : vars.tinfo3,
|
tinfo3 : sauceRec.tinfo3,
|
||||||
tinfo4 : vars.tinfo4,
|
tinfo4 : sauceRec.tinfo4,
|
||||||
numComments : vars.numComments,
|
numComments : sauceRec.numComments,
|
||||||
flags : vars.flags,
|
flags : sauceRec.flags,
|
||||||
tinfos : vars.tinfos,
|
tinfos : sauceRec.tinfos,
|
||||||
};
|
};
|
||||||
|
|
||||||
var dt = SAUCE_DATA_TYPES[sauce.dataType];
|
const dt = SAUCE_DATA_TYPES[sauce.dataType];
|
||||||
if(dt && dt.parser) {
|
if(dt && dt.parser) {
|
||||||
sauce[dt.name] = dt.parser(sauce);
|
sauce[dt.name] = dt.parser(sauce);
|
||||||
}
|
}
|
||||||
|
|
||||||
cb(null, sauce);
|
return cb(null, sauce);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// :TODO: These need completed:
|
// :TODO: These need completed:
|
||||||
var SAUCE_DATA_TYPES = {};
|
const SAUCE_DATA_TYPES = {
|
||||||
SAUCE_DATA_TYPES[0] = { name : 'None' };
|
0 : { name : 'None' },
|
||||||
SAUCE_DATA_TYPES[1] = { name : 'Character', parser : parseCharacterSAUCE };
|
1 : { name : 'Character', parser : parseCharacterSAUCE },
|
||||||
SAUCE_DATA_TYPES[2] = 'Bitmap';
|
2 : 'Bitmap',
|
||||||
SAUCE_DATA_TYPES[3] = 'Vector';
|
3 : 'Vector',
|
||||||
SAUCE_DATA_TYPES[4] = 'Audio';
|
4 : 'Audio',
|
||||||
SAUCE_DATA_TYPES[5] = 'BinaryText';
|
5 : 'BinaryText',
|
||||||
SAUCE_DATA_TYPES[6] = 'XBin';
|
6 : 'XBin',
|
||||||
SAUCE_DATA_TYPES[7] = 'Archive';
|
7 : 'Archive',
|
||||||
SAUCE_DATA_TYPES[8] = 'Executable';
|
8 : 'Executable',
|
||||||
|
};
|
||||||
|
|
||||||
var SAUCE_CHARACTER_FILE_TYPES = {};
|
const SAUCE_CHARACTER_FILE_TYPES = {
|
||||||
SAUCE_CHARACTER_FILE_TYPES[0] = 'ASCII';
|
0 : 'ASCII',
|
||||||
SAUCE_CHARACTER_FILE_TYPES[1] = 'ANSi';
|
1 : 'ANSi',
|
||||||
SAUCE_CHARACTER_FILE_TYPES[2] = 'ANSiMation';
|
2 : 'ANSiMation',
|
||||||
SAUCE_CHARACTER_FILE_TYPES[3] = 'RIP script';
|
3 : 'RIP script',
|
||||||
SAUCE_CHARACTER_FILE_TYPES[4] = 'PCBoard';
|
4 : 'PCBoard',
|
||||||
SAUCE_CHARACTER_FILE_TYPES[5] = 'Avatar';
|
5 : 'Avatar',
|
||||||
SAUCE_CHARACTER_FILE_TYPES[6] = 'HTML';
|
6 : 'HTML',
|
||||||
SAUCE_CHARACTER_FILE_TYPES[7] = 'Source';
|
7 : 'Source',
|
||||||
SAUCE_CHARACTER_FILE_TYPES[8] = 'TundraDraw';
|
8 : 'TundraDraw',
|
||||||
|
};
|
||||||
|
|
||||||
//
|
//
|
||||||
// Map of SAUCE font -> encoding hint
|
// Map of SAUCE font -> encoding hint
|
||||||
//
|
//
|
||||||
// Note that this is the same mapping that x84 uses. Be compatible!
|
// Note that this is the same mapping that x84 uses. Be compatible!
|
||||||
//
|
//
|
||||||
var SAUCE_FONT_TO_ENCODING_HINT = {
|
const SAUCE_FONT_TO_ENCODING_HINT = {
|
||||||
'Amiga MicroKnight' : 'amiga',
|
'Amiga MicroKnight' : 'amiga',
|
||||||
'Amiga MicroKnight+' : 'amiga',
|
'Amiga MicroKnight+' : 'amiga',
|
||||||
'Amiga mOsOul' : 'amiga',
|
'Amiga mOsOul' : 'amiga',
|
||||||
|
@ -138,9 +145,11 @@ var SAUCE_FONT_TO_ENCODING_HINT = {
|
||||||
'IBM VGA' : 'cp437',
|
'IBM VGA' : 'cp437',
|
||||||
};
|
};
|
||||||
|
|
||||||
['437', '720', '737', '775', '819', '850', '852', '855', '857', '858',
|
[
|
||||||
'860', '861', '862', '863', '864', '865', '866', '869', '872'].forEach(function onPage(page) {
|
'437', '720', '737', '775', '819', '850', '852', '855', '857', '858',
|
||||||
var codec = 'cp' + page;
|
'860', '861', '862', '863', '864', '865', '866', '869', '872'
|
||||||
|
].forEach( page => {
|
||||||
|
const codec = 'cp' + page;
|
||||||
SAUCE_FONT_TO_ENCODING_HINT['IBM EGA43 ' + page] = codec;
|
SAUCE_FONT_TO_ENCODING_HINT['IBM EGA43 ' + page] = codec;
|
||||||
SAUCE_FONT_TO_ENCODING_HINT['IBM EGA ' + page] = codec;
|
SAUCE_FONT_TO_ENCODING_HINT['IBM EGA ' + page] = codec;
|
||||||
SAUCE_FONT_TO_ENCODING_HINT['IBM VGA25g ' + page] = codec;
|
SAUCE_FONT_TO_ENCODING_HINT['IBM VGA25g ' + page] = codec;
|
||||||
|
@ -149,7 +158,7 @@ var SAUCE_FONT_TO_ENCODING_HINT = {
|
||||||
});
|
});
|
||||||
|
|
||||||
function parseCharacterSAUCE(sauce) {
|
function parseCharacterSAUCE(sauce) {
|
||||||
var result = {};
|
const result = {};
|
||||||
|
|
||||||
result.fileType = SAUCE_CHARACTER_FILE_TYPES[sauce.fileType] || 'Unknown';
|
result.fileType = SAUCE_CHARACTER_FILE_TYPES[sauce.fileType] || 'Unknown';
|
||||||
|
|
||||||
|
@ -157,11 +166,12 @@ function parseCharacterSAUCE(sauce) {
|
||||||
// convience: create ansiFlags
|
// convience: create ansiFlags
|
||||||
sauce.ansiFlags = sauce.flags;
|
sauce.ansiFlags = sauce.flags;
|
||||||
|
|
||||||
var i = 0;
|
let i = 0;
|
||||||
while(i < sauce.tinfos.length && sauce.tinfos[i] !== 0x00) {
|
while(i < sauce.tinfos.length && sauce.tinfos[i] !== 0x00) {
|
||||||
++i;
|
++i;
|
||||||
}
|
}
|
||||||
var fontName = iconv.decode(sauce.tinfos.slice(0, i), 'cp437');
|
|
||||||
|
const fontName = iconv.decode(sauce.tinfos.slice(0, i), 'cp437');
|
||||||
if(fontName.length > 0) {
|
if(fontName.length > 0) {
|
||||||
result.fontName = fontName;
|
result.fontName = fontName;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,16 +2,17 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
// ENiGMA½
|
// ENiGMA½
|
||||||
const baseClient = require('../../client.js');
|
const baseClient = require('../../client.js');
|
||||||
const Log = require('../../logger.js').log;
|
const Log = require('../../logger.js').log;
|
||||||
const LoginServerModule = require('../../login_server_module.js');
|
const LoginServerModule = require('../../login_server_module.js');
|
||||||
const Config = require('../../config.js').config;
|
const Config = require('../../config.js').config;
|
||||||
const EnigAssert = require('../../enigma_assert.js');
|
const EnigAssert = require('../../enigma_assert.js');
|
||||||
|
const { stringFromNullTermBuffer } = require('../../string_util.js');
|
||||||
|
|
||||||
// deps
|
// deps
|
||||||
const net = require('net');
|
const net = require('net');
|
||||||
const buffers = require('buffers');
|
const buffers = require('buffers');
|
||||||
const binary = require('binary');
|
const { Parser } = require('binary-parser');
|
||||||
const util = require('util');
|
const util = require('util');
|
||||||
|
|
||||||
//var debug = require('debug')('telnet');
|
//var debug = require('debug')('telnet');
|
||||||
|
@ -218,46 +219,42 @@ OPTION_IMPLS[OPTIONS.TERMINAL_TYPE] = function(bufs, i, event) {
|
||||||
return MORE_DATA_REQUIRED;
|
return MORE_DATA_REQUIRED;
|
||||||
}
|
}
|
||||||
|
|
||||||
let end = bufs.indexOf(IAC_SE_BUF, 5); // look past header bytes
|
const end = bufs.indexOf(IAC_SE_BUF, 5); // look past header bytes
|
||||||
if(-1 === end) {
|
if(-1 === end) {
|
||||||
return MORE_DATA_REQUIRED;
|
return MORE_DATA_REQUIRED;
|
||||||
}
|
}
|
||||||
|
|
||||||
// eat up and process the header
|
let ttypeCmd;
|
||||||
let buf = bufs.splice(0, 4).toBuffer();
|
try {
|
||||||
binary.parse(buf)
|
ttypeCmd = new Parser()
|
||||||
.word8('iac1')
|
.uint8('iac1')
|
||||||
.word8('sb')
|
.uint8('sb')
|
||||||
.word8('ttype')
|
.uint8('opt')
|
||||||
.word8('is')
|
.uint8('is')
|
||||||
.tap(function(vars) {
|
.array('ttype', {
|
||||||
EnigAssert(vars.iac1 === COMMANDS.IAC);
|
type : 'uint8',
|
||||||
EnigAssert(vars.sb === COMMANDS.SB);
|
readUntil : b => 255 === b, // 255=COMMANDS.IAC
|
||||||
EnigAssert(vars.ttype === OPTIONS.TERMINAL_TYPE);
|
})
|
||||||
EnigAssert(vars.is === SB_COMMANDS.IS);
|
// note we read iac2 above
|
||||||
});
|
.uint8('se')
|
||||||
|
.parse(bufs.toBuffer());
|
||||||
// eat up the rest
|
} catch(e) {
|
||||||
end -= 4;
|
Log.debug( { error : e }, 'Failed parsing TTYP telnet command');
|
||||||
buf = bufs.splice(0, end).toBuffer();
|
return event;
|
||||||
|
|
||||||
//
|
|
||||||
// From this point -> |end| is our ttype
|
|
||||||
//
|
|
||||||
// Look for trailing NULL(s). Clients such as NetRunner do this.
|
|
||||||
// If none is found, we take the entire buffer
|
|
||||||
//
|
|
||||||
let trimAt = 0;
|
|
||||||
for(; trimAt < buf.length; ++trimAt) {
|
|
||||||
if(0x00 === buf[trimAt]) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
event.ttype = buf.toString('ascii', 0, trimAt);
|
EnigAssert(COMMANDS.IAC === ttypeCmd.iac1);
|
||||||
|
EnigAssert(COMMANDS.SB === ttypeCmd.sb);
|
||||||
|
EnigAssert(OPTIONS.TERMINAL_TYPE === ttypeCmd.opt);
|
||||||
|
EnigAssert(SB_COMMANDS.IS === ttypeCmd.is);
|
||||||
|
EnigAssert(ttypeCmd.ttype.length > 0);
|
||||||
|
// note we found IAC_SE above
|
||||||
|
|
||||||
// pop off the terminating IAC SE
|
// some terminals such as NetRunner provide a NULL-terminated buffer
|
||||||
bufs.splice(0, 2);
|
// slice to remove IAC
|
||||||
|
event.ttype = stringFromNullTermBuffer(ttypeCmd.ttype.slice(0, -1), 'ascii');
|
||||||
|
|
||||||
|
bufs.splice(0, end);
|
||||||
}
|
}
|
||||||
|
|
||||||
return event;
|
return event;
|
||||||
|
@ -272,25 +269,30 @@ OPTION_IMPLS[OPTIONS.WINDOW_SIZE] = function(bufs, i, event) {
|
||||||
return MORE_DATA_REQUIRED;
|
return MORE_DATA_REQUIRED;
|
||||||
}
|
}
|
||||||
|
|
||||||
event.buf = bufs.splice(0, 9).toBuffer();
|
let nawsCmd;
|
||||||
binary.parse(event.buf)
|
try {
|
||||||
.word8('iac1')
|
nawsCmd = new Parser()
|
||||||
.word8('sb')
|
.uint8('iac1')
|
||||||
.word8('naws')
|
.uint8('sb')
|
||||||
.word16bu('width')
|
.uint8('opt')
|
||||||
.word16bu('height')
|
.uint16be('width')
|
||||||
.word8('iac2')
|
.uint16be('height')
|
||||||
.word8('se')
|
.uint8('iac2')
|
||||||
.tap(function(vars) {
|
.uint8('se')
|
||||||
EnigAssert(vars.iac1 == COMMANDS.IAC);
|
.parse(bufs.splice(0, 9).toBuffer());
|
||||||
EnigAssert(vars.sb == COMMANDS.SB);
|
} catch(e) {
|
||||||
EnigAssert(vars.naws == OPTIONS.WINDOW_SIZE);
|
Log.debug( { error : e }, 'Failed parsing NAWS telnet command');
|
||||||
EnigAssert(vars.iac2 == COMMANDS.IAC);
|
return event;
|
||||||
EnigAssert(vars.se == COMMANDS.SE);
|
}
|
||||||
|
|
||||||
event.cols = event.columns = event.width = vars.width;
|
EnigAssert(COMMANDS.IAC === nawsCmd.iac1);
|
||||||
event.rows = event.height = vars.height;
|
EnigAssert(COMMANDS.SB === nawsCmd.sb);
|
||||||
});
|
EnigAssert(OPTIONS.WINDOW_SIZE === nawsCmd.opt);
|
||||||
|
EnigAssert(COMMANDS.IAC === nawsCmd.iac2);
|
||||||
|
EnigAssert(COMMANDS.SE === nawsCmd.se);
|
||||||
|
|
||||||
|
event.cols = event.columns = event.width = nawsCmd.width;
|
||||||
|
event.rows = event.height = nawsCmd.height;
|
||||||
}
|
}
|
||||||
return event;
|
return event;
|
||||||
};
|
};
|
||||||
|
@ -321,78 +323,109 @@ OPTION_IMPLS[OPTIONS.NEW_ENVIRONMENT] = function(bufs, i, event) {
|
||||||
return MORE_DATA_REQUIRED;
|
return MORE_DATA_REQUIRED;
|
||||||
}
|
}
|
||||||
|
|
||||||
// eat up and process the header
|
// :TODO: It's likely that we could do all the env name/value parsing directly in Parser.
|
||||||
let buf = bufs.splice(0, 4).toBuffer();
|
|
||||||
binary.parse(buf)
|
|
||||||
.word8('iac1')
|
|
||||||
.word8('sb')
|
|
||||||
.word8('newEnv')
|
|
||||||
.word8('isOrInfo') // initial=IS, updates=INFO
|
|
||||||
.tap(function(vars) {
|
|
||||||
EnigAssert(vars.iac1 === COMMANDS.IAC);
|
|
||||||
EnigAssert(vars.sb === COMMANDS.SB);
|
|
||||||
EnigAssert(vars.newEnv === OPTIONS.NEW_ENVIRONMENT || vars.newEnv === OPTIONS.NEW_ENVIRONMENT_DEP);
|
|
||||||
EnigAssert(vars.isOrInfo === SB_COMMANDS.IS || vars.isOrInfo === SB_COMMANDS.INFO);
|
|
||||||
|
|
||||||
event.type = vars.isOrInfo;
|
let envCmd;
|
||||||
|
try {
|
||||||
if(vars.newEnv === OPTIONS.NEW_ENVIRONMENT_DEP) {
|
envCmd = new Parser()
|
||||||
// :TODO: bring all this into Telnet class
|
.uint8('iac1')
|
||||||
Log.log.warn('Handling deprecated RFC 1408 NEW-ENVIRON');
|
.uint8('sb')
|
||||||
}
|
.uint8('opt')
|
||||||
});
|
.uint8('isOrInfo') // IS=initial, INFO=updates
|
||||||
|
.array('envBlock', {
|
||||||
// eat up the rest
|
type : 'uint8',
|
||||||
end -= 4;
|
readUntil : b => 255 === b, // 255=COMMANDS.IAC
|
||||||
buf = bufs.splice(0, end).toBuffer();
|
})
|
||||||
|
// note we consume IAC above
|
||||||
//
|
.uint8('se')
|
||||||
// This part can become messy. The basic spec is:
|
.parse(bufs.splice(0, bufs.length).toBuffer());
|
||||||
// IAC SB NEW-ENVIRON IS type ... [ VALUE ... ] [ type ... [ VALUE ... ] [ ... ] ] IAC SE
|
} catch(e) {
|
||||||
//
|
Log.debug( { error : e }, 'Failed parsing NEW-ENVIRON telnet command');
|
||||||
// See RFC 1572 @ http://www.faqs.org/rfcs/rfc1572.html
|
return event;
|
||||||
//
|
|
||||||
// Start by splitting up the remaining buffer. Keep the delimiters
|
|
||||||
// as prefixes we can use for processing.
|
|
||||||
//
|
|
||||||
// :TODO: Currently not supporting ESCaped values (ESC + <type>). Probably not really in the wild, but we should be compliant
|
|
||||||
// :TODO: Could probably just convert this to use a regex & handle delims + escaped values... in any case, this is sloppy...
|
|
||||||
const params = [];
|
|
||||||
let p = 0;
|
|
||||||
let j;
|
|
||||||
let l;
|
|
||||||
for(j = 0, l = buf.length; j < l; ++j) {
|
|
||||||
if(NEW_ENVIRONMENT_DELIMITERS.indexOf(buf[j]) === -1) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
params.push(buf.slice(p, j));
|
|
||||||
p = j;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// remainder
|
EnigAssert(COMMANDS.IAC === envCmd.iac1);
|
||||||
if(p < l) {
|
EnigAssert(COMMANDS.SB === envCmd.sb);
|
||||||
params.push(buf.slice(p, l));
|
EnigAssert(OPTIONS.NEW_ENVIRONMENT === envCmd.opt || OPTIONS.NEW_ENVIRONMENT_DEP === envCmd.opt);
|
||||||
|
EnigAssert(SB_COMMANDS.IS === envCmd.isOrInfo || SB_COMMANDS.INFO === envCmd.isOrInfo);
|
||||||
|
|
||||||
|
if(OPTIONS.NEW_ENVIRONMENT_DEP === envCmd.opt) {
|
||||||
|
// :TODO: we should probably support this for legacy clients?
|
||||||
|
Log.warn('Handling deprecated RFC 1408 NEW-ENVIRON');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const envBuf = envCmd.envBlock.slice(0, -1); // remove IAC
|
||||||
|
|
||||||
|
if(envBuf.length < 4) { // TYPE + single char name + sep + single char value
|
||||||
|
// empty env block
|
||||||
|
return event;
|
||||||
|
}
|
||||||
|
|
||||||
|
const States = {
|
||||||
|
Name : 1,
|
||||||
|
Value : 2,
|
||||||
|
};
|
||||||
|
|
||||||
|
let state = States.Name;
|
||||||
|
const setVars = {};
|
||||||
|
const delVars = [];
|
||||||
let varName;
|
let varName;
|
||||||
event.envVars = {};
|
// :TODO: handle ESC type!!!
|
||||||
// :TODO: handle cases where a variable was present in a previous exchange, but missing here...e.g removed
|
while(envBuf.length) {
|
||||||
for(j = 0; j < params.length; ++j) {
|
switch(state) {
|
||||||
if(params[j].length < 2) {
|
case States.Name :
|
||||||
continue;
|
{
|
||||||
}
|
const type = parseInt(envBuf.splice(0, 1));
|
||||||
|
if(![ NEW_ENVIRONMENT_COMMANDS.VAR, NEW_ENVIRONMENT_COMMANDS.USERVAR, NEW_ENVIRONMENT_COMMANDS.ESC ].includes(type)) {
|
||||||
|
return event; // fail :(
|
||||||
|
}
|
||||||
|
|
||||||
let cmd = params[j].readUInt8();
|
let nameEnd = envBuf.indexOf(NEW_ENVIRONMENT_COMMANDS.VALUE);
|
||||||
if(cmd === NEW_ENVIRONMENT_COMMANDS.VAR || cmd === NEW_ENVIRONMENT_COMMANDS.USERVAR) {
|
if(-1 === nameEnd) {
|
||||||
varName = params[j].slice(1).toString('utf8'); // :TODO: what encoding should this really be?
|
nameEnd = envBuf.length;
|
||||||
} else {
|
}
|
||||||
event.envVars[varName] = params[j].slice(1).toString('utf8'); // :TODO: again, what encoding?
|
|
||||||
|
varName = envBuf.splice(0, nameEnd);
|
||||||
|
if(!varName) {
|
||||||
|
return event; // something is wrong.
|
||||||
|
}
|
||||||
|
|
||||||
|
varName = Buffer.from(varName).toString('ascii');
|
||||||
|
|
||||||
|
const next = parseInt(envBuf.splice(0, 1));
|
||||||
|
if(NEW_ENVIRONMENT_COMMANDS.VALUE === next) {
|
||||||
|
state = States.Value;
|
||||||
|
} else {
|
||||||
|
state = States.Name;
|
||||||
|
delVars.push(varName); // no value; del this var
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case States.Value :
|
||||||
|
{
|
||||||
|
let valueEnd = envBuf.indexOf(NEW_ENVIRONMENT_COMMANDS.VAR);
|
||||||
|
if(-1 === valueEnd) {
|
||||||
|
valueEnd = envBuf.indexOf(NEW_ENVIRONMENT_COMMANDS.USERVAR);
|
||||||
|
}
|
||||||
|
if(-1 === valueEnd) {
|
||||||
|
valueEnd = envBuf.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
let value = envBuf.splice(0, valueEnd);
|
||||||
|
if(value) {
|
||||||
|
value = Buffer.from(value).toString('ascii');
|
||||||
|
setVars[varName] = value;
|
||||||
|
}
|
||||||
|
state = States.Name;
|
||||||
|
}
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// pop off remaining IAC SE
|
// :TODO: Handle deleting previously set vars via delVars
|
||||||
bufs.splice(0, 2);
|
event.type = envCmd.isOrInfo;
|
||||||
|
event.envVars = setVars;
|
||||||
}
|
}
|
||||||
|
|
||||||
return event;
|
return event;
|
||||||
|
|
|
@ -204,7 +204,7 @@ function debugEscapedString(s) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function stringFromNullTermBuffer(buf, encoding) {
|
function stringFromNullTermBuffer(buf, encoding) {
|
||||||
let nullPos = buf.indexOf(new Buffer( [ 0x00 ] ));
|
let nullPos = buf.indexOf( 0x00 );
|
||||||
if(-1 === nullPos) {
|
if(-1 === nullPos) {
|
||||||
nullPos = buf.length;
|
nullPos = buf.length;
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,7 @@
|
||||||
],
|
],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"async": "^2.5.0",
|
"async": "^2.5.0",
|
||||||
"binary": "0.3.x",
|
"binary-parser": "^1.3.2",
|
||||||
"buffers": "NuSkooler/node-buffers",
|
"buffers": "NuSkooler/node-buffers",
|
||||||
"bunyan": "^1.8.12",
|
"bunyan": "^1.8.12",
|
||||||
"exiftool": "^0.0.3",
|
"exiftool": "^0.0.3",
|
||||||
|
|
Loading…
Reference in New Issue