Compare commits
36 Commits
d0ee1a7044
...
f_influx-c
| Author | SHA1 | Date | |
|---|---|---|---|
| 873f00b21b | |||
| 271554719e | |||
| e18de63d7c | |||
| 2356040572 | |||
| b0bbf0c71a | |||
| 37b78e7373 | |||
| c5e1bb4c64 | |||
| 3c29ed2000 | |||
| ddf39b9433 | |||
| 3a927688d0 | |||
| c51cfc1b14 | |||
| e1b2a7e016 | |||
| fc5900b0ba | |||
| 354ca32a61 | |||
| d10e9bb2c6 | |||
| 44cd3288cf | |||
| 3af4bb7cc6 | |||
| 2a662e0bd1 | |||
| d7a9530b68 | |||
| 7de2250983 | |||
| bb3d843895 | |||
| 9472ed9198 | |||
| dcd0ce8111 | |||
| 1a9ced0bb8 | |||
| d77e3f8844 | |||
| e715cc1cac | |||
| cf1b300f6a | |||
| 450f162cda | |||
| 7ebcf573b9 | |||
| 38985ea9e2 | |||
| 1976838d8a | |||
| a1ce7a848b | |||
| 21e4ff1a66 | |||
| 25297ed4d9 | |||
| 68541b1191 | |||
| 7bc5a3530c |
31
package-lock.json
generated
31
package-lock.json
generated
@@ -10,8 +10,10 @@
|
||||
"license": "AGPL-3.0",
|
||||
"dependencies": {
|
||||
"@influxdata/influxdb-client": "^1.20.0",
|
||||
"@influxdata/influxdb-client-apis": "^1.20.0",
|
||||
"log4js": "^6.3.0",
|
||||
"luxon": "^2.1.1"
|
||||
"luxon": "^2.1.1",
|
||||
"string-argv": "^0.3.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@influxdata/influxdb-client": {
|
||||
@@ -19,6 +21,14 @@
|
||||
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client/-/influxdb-client-1.20.0.tgz",
|
||||
"integrity": "sha512-jaKSI63hmQ5VSkJrFJkYIXaKlhoF+mGd4HmOf7v/X7pmEi69ReHp922Wyx6/OeCrpndRMbsadk+XmGNdd43cFw=="
|
||||
},
|
||||
"node_modules/@influxdata/influxdb-client-apis": {
|
||||
"version": "1.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client-apis/-/influxdb-client-apis-1.20.0.tgz",
|
||||
"integrity": "sha512-KMTmXH4rbpS+NWGpqDjxcKTyan2rbiT2IM5AdRElKhH2sHbH96xwLgziaxeC+OCJLeNAdehJgae3I8WiZjbwdg==",
|
||||
"peerDependencies": {
|
||||
"@influxdata/influxdb-client": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/date-format": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/date-format/-/date-format-3.0.0.tgz",
|
||||
@@ -128,6 +138,14 @@
|
||||
"node": ">=4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/string-argv": {
|
||||
"version": "0.3.1",
|
||||
"resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.1.tgz",
|
||||
"integrity": "sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg==",
|
||||
"engines": {
|
||||
"node": ">=0.6.19"
|
||||
}
|
||||
},
|
||||
"node_modules/universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
@@ -143,6 +161,12 @@
|
||||
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client/-/influxdb-client-1.20.0.tgz",
|
||||
"integrity": "sha512-jaKSI63hmQ5VSkJrFJkYIXaKlhoF+mGd4HmOf7v/X7pmEi69ReHp922Wyx6/OeCrpndRMbsadk+XmGNdd43cFw=="
|
||||
},
|
||||
"@influxdata/influxdb-client-apis": {
|
||||
"version": "1.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client-apis/-/influxdb-client-apis-1.20.0.tgz",
|
||||
"integrity": "sha512-KMTmXH4rbpS+NWGpqDjxcKTyan2rbiT2IM5AdRElKhH2sHbH96xwLgziaxeC+OCJLeNAdehJgae3I8WiZjbwdg==",
|
||||
"requires": {}
|
||||
},
|
||||
"date-format": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/date-format/-/date-format-3.0.0.tgz",
|
||||
@@ -228,6 +252,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"string-argv": {
|
||||
"version": "0.3.1",
|
||||
"resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.1.tgz",
|
||||
"integrity": "sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg=="
|
||||
},
|
||||
"universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
|
||||
@@ -15,7 +15,9 @@
|
||||
"license": "AGPL-3.0",
|
||||
"dependencies": {
|
||||
"@influxdata/influxdb-client": "^1.20.0",
|
||||
"@influxdata/influxdb-client-apis": "^1.20.0",
|
||||
"log4js": "^6.3.0",
|
||||
"luxon": "^2.1.1"
|
||||
"luxon": "^2.1.1",
|
||||
"string-argv": "^0.3.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ const PacketType = {
|
||||
ProbeRequest: 'ProbeRequest',
|
||||
ProbeResponse: 'ProbeResponse',
|
||||
Data: 'Data',
|
||||
MoreData: 'MoreData',
|
||||
RequestToSend: 'RequestToSend',
|
||||
ClearToSend: 'ClearToSend',
|
||||
Acknowledgment: 'Acknowledgment',
|
||||
@@ -17,9 +16,19 @@ const PacketType = {
|
||||
Unknown: 'Unknown'
|
||||
}
|
||||
|
||||
const FlagType = {
|
||||
MoreFragments: "MoreFragments",
|
||||
Retry: "Retry",
|
||||
PwrMgt: "PwrMgt",
|
||||
MoreData: "MoreData",
|
||||
Protected: "Protected",
|
||||
Order: "Order"
|
||||
}
|
||||
|
||||
class Packet{
|
||||
timestampMicros;
|
||||
isRetry;
|
||||
|
||||
flags = {};
|
||||
|
||||
srcMac;
|
||||
dstMac;
|
||||
@@ -80,6 +89,7 @@ class HandshakePacket extends Packet{
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
PacketType,
|
||||
FlagType,
|
||||
Packet,
|
||||
PacketWithSSID,
|
||||
BeaconPacket,
|
||||
|
||||
@@ -1,20 +1,15 @@
|
||||
const logger = require("./logger.js")("exec");
|
||||
|
||||
const { spawn } = require("child_process");
|
||||
const { parseArgsStringToArgv } = require('string-argv');
|
||||
|
||||
|
||||
function exec(cmd, stdout, stderr, exit_handler){
|
||||
const [bin, ...args] = cmd.split(' ')
|
||||
function exec(cmd, options){
|
||||
const [bin, ...args] = parseArgsStringToArgv(cmd);
|
||||
|
||||
logger.addContext("binary", "bin");
|
||||
logger.debug(`Spawn process '${cmd}'`);
|
||||
let proc = spawn(bin, args);
|
||||
|
||||
return {
|
||||
"process": proc,
|
||||
"stdout": proc.stdout,
|
||||
"stderr": proc.stderr
|
||||
}
|
||||
return spawn(bin, args, options);
|
||||
}
|
||||
|
||||
// Specify exports
|
||||
|
||||
61
src/helper/influx-checks.js
Normal file
61
src/helper/influx-checks.js
Normal file
@@ -0,0 +1,61 @@
|
||||
const logger = require.main.require("./helper/logger.js")("influx-checks");
|
||||
|
||||
const Os = require("os");
|
||||
const { InfluxDB, Point } = require('@influxdata/influxdb-client')
|
||||
const Influx = require('@influxdata/influxdb-client-apis');
|
||||
|
||||
|
||||
function checkHealth(influxDb){
|
||||
return new Promise((resolve, reject) => {
|
||||
new Influx.HealthAPI(influxDb) // Check influx health
|
||||
.getHealth()
|
||||
.catch((err) => {
|
||||
logger.error("Could not communicate with Influx:");
|
||||
logger.error(`Error [${err.code}]:`, err.message);
|
||||
reject();
|
||||
})
|
||||
.then((res) => {
|
||||
logger.debug("Server healthy.", "Version: ", res.version);
|
||||
resolve(res);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function checkBucket(influxDb, options){
|
||||
return new Promise((resolve, reject) => {
|
||||
new Influx.BucketsAPI(influxDb).getBuckets(options)
|
||||
.catch((err) => { // Weirdly the influx-Api returns 404 for searches of non-existing buckets
|
||||
logger.error("Could not get bucket:");
|
||||
logger.error(`Error [${err.code}]:`, err.message);
|
||||
reject();
|
||||
}).then((res) => { // But an empty list when the bucket exists, but token does not have permission to get details
|
||||
logger.debug("Bucket found");
|
||||
resolve(res);
|
||||
// Now we know the bucket exists and we have some kind of permission.. but we still dont know if we are able to write to it..
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function checkWriteApi(influxDb, options){
|
||||
return new Promise((resolve, reject) => {
|
||||
const writeApi = influxDb.getWriteApi(options.org, options.bucket); // Get WriteAPI
|
||||
writeApi.writePoint(new Point("worker_connectionTest").tag("hostname", Os.hostname())) // Write point
|
||||
writeApi.close()
|
||||
.catch((err) => {
|
||||
logger.error("Could not get writeApi:");
|
||||
logger.error(`Error [${err.code}]:`, err.message);
|
||||
reject();
|
||||
}).then((res) => {
|
||||
logger.debug("Writing ok");
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
checkHealth,
|
||||
checkBucket,
|
||||
checkWriteApi,
|
||||
};
|
||||
29
src/main.js
29
src/main.js
@@ -2,6 +2,8 @@ const logger = require("./helper/logger.js")("main");
|
||||
|
||||
const { requireEnvVars } = require("./helper/env.js");
|
||||
const { exit } = require("process");
|
||||
const { InfluxDB } = require('@influxdata/influxdb-client');
|
||||
const InfluxChecks = require('./helper/influx-checks.js');
|
||||
|
||||
/// Setup ENVs
|
||||
const env = process.env;
|
||||
@@ -9,8 +11,6 @@ const env = process.env;
|
||||
{
|
||||
env.LOGLEVEL ??= "INFO";
|
||||
env.WIFI_INTERFACE ??= "wlan0";
|
||||
env.WIFI_CHANNEL ??= [1,6,11];
|
||||
env.WIFI_CHANNEL_TIME ??= 1;
|
||||
}
|
||||
// Required vars
|
||||
let errorMsg = requireEnvVars([
|
||||
@@ -22,3 +22,28 @@ if(errorMsg){
|
||||
exit(1);
|
||||
}
|
||||
|
||||
(async function() {
|
||||
logger.info("Setup Influx..");
|
||||
const influxDb = new InfluxDB({url: env.INFLUX_URL, token: env.INFLUX_TOKEN});
|
||||
|
||||
await InfluxChecks.checkHealth(influxDb)
|
||||
.then((res) => {return InfluxChecks.checkBucket(influxDb, {
|
||||
org: env.INFLUX_ORG,
|
||||
name: env.INFLUX_BUCKET
|
||||
})})
|
||||
.then((res) => {return InfluxChecks.checkWriteApi(influxDb, {
|
||||
org: env.INFLUX_ORG,
|
||||
bucket: env.INFLUX_BUCKET
|
||||
})})
|
||||
.catch((err) => {
|
||||
if(err) {
|
||||
logger.error("Error whilst checking influx:");
|
||||
logger.error(err);
|
||||
}
|
||||
logger.fatal("Setup influx failed!");
|
||||
exit(1);
|
||||
});
|
||||
|
||||
logger.info("Influx ok");
|
||||
|
||||
})();
|
||||
|
||||
38
src/streamHandler/InfluxPointWriter.js
Normal file
38
src/streamHandler/InfluxPointWriter.js
Normal file
@@ -0,0 +1,38 @@
|
||||
const logger = require.main.require("./helper/logger.js")("InfluxPointWriter");
|
||||
const { Writable } = require('stream');
|
||||
const {InfluxDB, Point, HttpError} = require('@influxdata/influxdb-client')
|
||||
|
||||
/**
|
||||
* Get points and write them into influx
|
||||
*/
|
||||
class InfluxPointWriter extends Writable{
|
||||
/**
|
||||
*
|
||||
* @param {InfluxDB} influxDb InfluxDb
|
||||
* @param {string} org Organization to use
|
||||
* @param {string} bucket Bucket to use
|
||||
* @param {Partial<WriteOptions>} options Options for WriteApi
|
||||
*/
|
||||
constructor(influxDb, org, bucket, options){
|
||||
super({
|
||||
objectMode: true
|
||||
});
|
||||
this._api = influxDb.getWriteApi(org, bucket, 'us', options);
|
||||
}
|
||||
|
||||
_write(point, encoding, next){
|
||||
this._api.writePoint(point);
|
||||
next();
|
||||
}
|
||||
|
||||
_flush(next){
|
||||
this._api.flush(true)
|
||||
.catch((err) => { next(new Error(`WriteApi rejected promise for flush: ${err}`)); })
|
||||
.then(next);
|
||||
}
|
||||
}
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
InfluxPointWriter
|
||||
};
|
||||
72
src/streamHandler/PacketInfluxPointFactory.js
Normal file
72
src/streamHandler/PacketInfluxPointFactory.js
Normal file
@@ -0,0 +1,72 @@
|
||||
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
|
||||
const { Transform } = require('stream');
|
||||
const {Point} = require('@influxdata/influxdb-client')
|
||||
|
||||
/** Keys to always use as tags */
|
||||
const TAG_LIST = [
|
||||
"srcMac",
|
||||
"dstMac",
|
||||
"bssid",
|
||||
"frequency",
|
||||
"flags",
|
||||
];
|
||||
|
||||
/** Measurement-name and corresponding field-key */
|
||||
const MEASUREMENT_MAP = new Map([
|
||||
["Signal", "signal"],
|
||||
["PayloadSize", "payloadSize"],
|
||||
["DataRate", "dataRate"],
|
||||
["SSID", "ssid"],
|
||||
["AuthenticationType", "authenticationType"],
|
||||
["AssociationSuccess", "associationIsSuccessful"],
|
||||
["DisassociationReason", "disassociationReason"],
|
||||
]);
|
||||
|
||||
|
||||
/**
|
||||
* Get packets and convert them into influx-points
|
||||
*/
|
||||
class PacketInfluxPointFactory extends Transform{
|
||||
constructor(){
|
||||
super({
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true
|
||||
});
|
||||
}
|
||||
|
||||
_transform(packet, encoding, next){
|
||||
// Create measurements
|
||||
MEASUREMENT_MAP.forEach((objKey, measurement) => {
|
||||
if(!Object.keys(packet).includes(objKey)) return;
|
||||
|
||||
let point = new Point(measurement); // Create point
|
||||
|
||||
// Set tags
|
||||
TAG_LIST.filter(tag => Object.keys(packet).includes(tag))
|
||||
.forEach(tag => point.tag(tag, packet[tag]));
|
||||
|
||||
point.setField('value', packet[objKey]); // Set field
|
||||
|
||||
this.push(point); // Push point into stream
|
||||
});
|
||||
|
||||
next(); // Get next packet
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/** Mapping for type -> field-method */
|
||||
const POINT_FIELD_TYPE = new Map([
|
||||
['boolean', function(key, value){ return this.booleanField(key, value); }],
|
||||
['number', function(key, value){ return this.intField(key, value); }],
|
||||
['string', function(key, value){ return this.stringField(key, value); }],
|
||||
]);
|
||||
Point.prototype.setField = function(key, value){
|
||||
let setField = POINT_FIELD_TYPE.get(typeof value);
|
||||
return setField.apply(this, [key, value]);
|
||||
}
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
PacketInfluxPointFactory
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
|
||||
const { Transform } = require('stream');
|
||||
const { DateTime } = require("luxon");
|
||||
const { PacketType, Packet, PacketWithSSID, BeaconPacket, ProbeRequestPacket, ProbeResponsePacket, AuthenticationPacket, AuthenticationType, AssociationResponsePacket, DisassociationPacket, HandshakePacket, HandshakeStage } = require.main.require('./dto/Packet.js');
|
||||
const { PacketType, FlagType, Packet, PacketWithSSID, BeaconPacket, ProbeRequestPacket, ProbeResponsePacket, AuthenticationPacket, AuthenticationType, AssociationResponsePacket, DisassociationPacket, HandshakePacket, HandshakeStage } = require.main.require('./dto/Packet.js');
|
||||
const hexConv = require.main.require("./helper/hexConverter.js");
|
||||
|
||||
const PACKET_TYPE_MAP = {
|
||||
@@ -9,7 +9,6 @@ const PACKET_TYPE_MAP = {
|
||||
"Probe Request": PacketType.ProbeRequest,
|
||||
"Probe Response": PacketType.ProbeResponse,
|
||||
"Data": PacketType.Data,
|
||||
"More Data": PacketType.MoreData,
|
||||
"Request-To-Send": PacketType.RequestToSend,
|
||||
"Clear-To-Send": PacketType.ClearToSend,
|
||||
"Acknowledgment": PacketType.Acknowledgment,
|
||||
@@ -27,14 +26,18 @@ const AUTHENTICATION_TYPE_MAP = {
|
||||
"(Open System)-2": AuthenticationType.OpenSystem_2,
|
||||
}
|
||||
|
||||
const FLAG_TYPE_MAP = {
|
||||
"Retry": FlagType.Retry,
|
||||
"Pwr Mgmt": FlagType.PwrMgt,
|
||||
"More Data": FlagType.MoreData,
|
||||
"Protected": FlagType.Protected,
|
||||
}
|
||||
const FLAG_TYPE_MAPS_REGEX = Object.keys(FLAG_TYPE_MAP).join('|');
|
||||
|
||||
/**
|
||||
* Read data from text-blocks and convert them to Packet
|
||||
*/
|
||||
class PacketStreamFactory extends Transform{
|
||||
matcher;
|
||||
withholdLastBlock;
|
||||
matchAllOnFlush;
|
||||
|
||||
constructor(){
|
||||
super({
|
||||
readableObjectMode: true,
|
||||
@@ -50,8 +53,6 @@ class PacketStreamFactory extends Transform{
|
||||
packet = this._handleHeader(packet, header);
|
||||
packet = this._handlePayload(packet, lines);
|
||||
|
||||
logger.debug(packet);
|
||||
|
||||
next(null, packet); // Get next chunk
|
||||
}
|
||||
|
||||
@@ -59,7 +60,10 @@ class PacketStreamFactory extends Transform{
|
||||
// Convert time to epoch-micros Unfortunately luxon doesnt use micros, but millis as smallest time-unit requiring some "hacks"
|
||||
packet.timestampMicros = DateTime.fromISO(data.slice(0, 12)).toSeconds() + data.slice(12, 15)/1000000;
|
||||
|
||||
packet.isRetry = data.match(/(?<=^|\s)Retry(?=$|\s)/i)? true: false;
|
||||
// Find flags
|
||||
data.match(data.match(new RegExp('(?<=^|\\s)('+ FLAG_TYPE_MAPS_REGEX +')(?=$|\\s)', 'ig'))
|
||||
?.forEach(match => packet.flags[FLAG_TYPE_MAP[match]] = true) // Set them to true in flags
|
||||
);
|
||||
|
||||
packet.dataRate = Number(data.match(/(?<=^|\s)[0-9]+(\.[0-9]+)?(?=\sMb\/?s($|\s))/i)?.[0]) || null;
|
||||
packet.frequency = Number(data.match(/(?<=^|\s)[0-9]{4}(?=\sMHz($|\s))/i)?.[0]) || null;
|
||||
@@ -69,8 +73,10 @@ class PacketStreamFactory extends Transform{
|
||||
packet.signal = Number(data.match(/(?<=^|\s)-[0-9]{2,3}(?=dBm\sSignal($|\s))/i)?.[0]) || null;
|
||||
|
||||
let packetTypeStr = data.match(new RegExp('(?<=^|\\s)('+ PACKET_TYPES_REGEX +')(?=$|\\s)', 'i'))?.[0];
|
||||
packet.packetType = packetTypeStr? PACKET_TYPE_MAP[packetTypeStr]: PacketType.Unknown;
|
||||
|
||||
packet.packetType = packetTypeStr? PACKET_TYPE_MAP[packetTypeStr]:
|
||||
data.match(/(SA|TA|DA|RA|BSSID):.{17}\s*$/i)? PacketType.NoData:
|
||||
PacketType.Unknown;
|
||||
|
||||
packet.srcMac = data.match(/(?<=(^|\s)(SA|TA):).{17}(?=$|\s)/i)?.[0] ?? null;
|
||||
|
||||
packet.dstMac = data.match(/(?<=(^|\s)(DA|RA):).{17}(?=$|\s)/i)?.[0] ?? null;
|
||||
@@ -112,7 +118,8 @@ class PacketStreamFactory extends Transform{
|
||||
data = data.join('');
|
||||
|
||||
// Get payload-Hex-Data. If there is no data: empty
|
||||
packet.payloadData = hexConv.hexToBytes(data.match(/(?<=\s)([A-F0-9]{1,4}(?!(\.|x)))/igm)?.join('') ?? '');
|
||||
packet.payloadData = hexConv.hexToBytes(data.match(/(?<=\s)([A-F0-9]{1,4}(?=\s))/igm)?.join('') ?? '');
|
||||
packet.payloadData.splice(packet.payloadData.length-4, 4); // Remove FrameCheck sequence
|
||||
|
||||
// Cover special cases with more data
|
||||
let newPacket;
|
||||
@@ -121,8 +128,8 @@ class PacketStreamFactory extends Transform{
|
||||
newPacket = new HandshakePacket();
|
||||
|
||||
// Read key-information
|
||||
let keyInfoRaw = (packet.payloadData[0x5]<<0x8) + packet.payloadData[0x6];
|
||||
let keyInfo = {
|
||||
const keyInfoRaw = (packet.payloadData[0x5]<<0x8) + packet.payloadData[0x6];
|
||||
const keyInfo = {
|
||||
"KeyDescriptorVersion": keyInfoRaw>>0 & 0b111,
|
||||
"KeyType": keyInfoRaw>>3 & 0b1,
|
||||
"KeyIndex": keyInfoRaw>>4 & 0b11,
|
||||
|
||||
@@ -10,9 +10,10 @@ class RegexBlockStream extends Transform{
|
||||
matchAllOnFlush;
|
||||
|
||||
/**
|
||||
* @param {RegExp} matcher Block-match - WARNING: It should match a clean-block (including e.g. newline)! Otherwise buffer will get dirty and use more and more ressources.
|
||||
* @param {RegExp} matcher Block-match
|
||||
* @param {boolean} withholdLastBlock When true, the last matches block will not be submitted to prevent submitting incomplete blocks.
|
||||
* @param {boolean} matchAllOnFlush (Only in combination with withholdLastBlock) When enabled, the buffer will be matched on last time on _flush (stream deconstruction) and write any, also incomplete, blocks
|
||||
* @remarks WARNING: It should match a clean-block (including e.g. newline)! Otherwise buffer will get dirty and use more and more ressources.
|
||||
*/
|
||||
constructor(matcher, withholdLastBlock = true, matchAllOnFlush = false){
|
||||
super({
|
||||
@@ -26,7 +27,7 @@ class RegexBlockStream extends Transform{
|
||||
}
|
||||
|
||||
_transform(chunk, encoding, next){
|
||||
chunk = this.readableBuffer.length? this.readableBuffer.join() + chunk: chunk; // Add previous buffer to current chunk
|
||||
chunk = this.readableBuffer.length? this.readableBuffer.join('') + chunk: chunk; // Add previous buffer to current chunk
|
||||
this.readableBuffer.length && this.readableBuffer.clear(); // Clear buffer once we read it
|
||||
|
||||
let matches = chunk.match(this.matcher); // Match
|
||||
@@ -51,7 +52,7 @@ class RegexBlockStream extends Transform{
|
||||
|
||||
_flush(next){
|
||||
if(matchAllOnFlush){ // When requested, we'll match one last time over the remaining buffer
|
||||
let chunk = this.readableBuffer.toString();
|
||||
let chunk = this.readableBuffer.join('');
|
||||
let matches = chunk.match(this.matcher); // Match remaining buffer
|
||||
_writeMatches(matches); // Write matches including last element
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user