Compare commits
2 Commits
9095e21e6f
...
cc9e4c7258
Author | SHA1 | Date | |
---|---|---|---|
cc9e4c7258 | |||
56ac283544 |
@ -1,9 +1,9 @@
|
|||||||
function requireEnvVars(requiredEnv){
|
function requireEnvVars(requiredEnv){
|
||||||
// Ensure required ENV vars are set
|
// Ensure required ENV vars are set
|
||||||
let unsetEnv = requiredEnv.filter((env) => !(typeof process.env[env] !== 'undefined'));
|
let unsetEnv = requiredEnv.filter((env) => (typeof process.env[env] === "undefined"));
|
||||||
|
|
||||||
if (unsetEnv.length > 0) {
|
if (unsetEnv.length > 0) {
|
||||||
return "Required ENV variables are not set: [" + unsetEnv.join(', ') + "]";
|
return "Required ENV variables are not set: [" + unsetEnv.join(", ") + "]";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
const logger = require("./logger.js")("exec");
|
const logger = require("./logger.js")("exec");
|
||||||
|
|
||||||
const { spawn } = require("child_process");
|
const { spawn } = require("child_process");
|
||||||
const { parseArgsStringToArgv } = require('string-argv');
|
const { parseArgsStringToArgv } = require("string-argv");
|
||||||
|
|
||||||
|
|
||||||
function exec(cmd, options){
|
function exec(cmd, options){
|
||||||
|
@ -21,4 +21,4 @@ function bytesToHex(bytes) {
|
|||||||
module.exports = {
|
module.exports = {
|
||||||
hexToBytes,
|
hexToBytes,
|
||||||
bytesToHex
|
bytesToHex
|
||||||
}
|
};
|
@ -1,8 +1,8 @@
|
|||||||
const logger = require.main.require("./helper/logger.js")("influx-checks");
|
const logger = require.main.require("./helper/logger.js")("influx-checks");
|
||||||
|
|
||||||
const Os = require("os");
|
const Os = require("os");
|
||||||
const { InfluxDB, Point } = require('@influxdata/influxdb-client')
|
const { InfluxDB, Point } = require("@influxdata/influxdb-client")
|
||||||
const Influx = require('@influxdata/influxdb-client-apis');
|
const Influx = require("@influxdata/influxdb-client-apis");
|
||||||
|
|
||||||
|
|
||||||
function checkHealth(influxDb){
|
function checkHealth(influxDb){
|
||||||
@ -39,7 +39,7 @@ function checkBucket(influxDb, options){
|
|||||||
function checkWriteApi(influxDb, options){
|
function checkWriteApi(influxDb, options){
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const writeApi = influxDb.getWriteApi(options.org, options.bucket); // Get WriteAPI
|
const writeApi = influxDb.getWriteApi(options.org, options.bucket); // Get WriteAPI
|
||||||
writeApi.writePoint(new Point("worker_connectionTest").tag("hostname", Os.hostname())) // Write point
|
writeApi.writePoint(new Point("worker_connectionTest").tag("hostname", Os.hostname())); // Write point
|
||||||
writeApi.close()
|
writeApi.close()
|
||||||
.catch((err) => {
|
.catch((err) => {
|
||||||
logger.error("Could not get writeApi:");
|
logger.error("Could not get writeApi:");
|
||||||
|
@ -5,15 +5,15 @@ function detectStreamData(stream, timeout = 5000){
|
|||||||
let timeoutHandler;
|
let timeoutHandler;
|
||||||
if(timeout){
|
if(timeout){
|
||||||
timeoutHandler = setTimeout(() => {
|
timeoutHandler = setTimeout(() => {
|
||||||
reject('timeout');
|
reject("timeout");
|
||||||
remListeners();
|
remListeners();
|
||||||
},
|
},
|
||||||
timeout);
|
timeout);
|
||||||
}
|
}
|
||||||
|
|
||||||
function remListeners(){
|
function remListeners(){
|
||||||
stream.removeListener('error', errorHandler);
|
stream.removeListener("error", errorHandler);
|
||||||
stream.removeListener('data', dataHandler);
|
stream.removeListener("data", dataHandler);
|
||||||
if(timeoutHandler) clearTimeout(timeoutHandler);
|
if(timeoutHandler) clearTimeout(timeoutHandler);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -25,8 +25,8 @@ function detectStreamData(stream, timeout = 5000){
|
|||||||
remListeners();
|
remListeners();
|
||||||
}
|
}
|
||||||
|
|
||||||
stream.on('error', errorHandler);
|
stream.on("error", errorHandler);
|
||||||
stream.on('data', dataHandler);
|
stream.on("data", dataHandler);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -34,7 +34,7 @@ function detectStreamsData(streams, timeout = 5000){
|
|||||||
let promises = [];
|
let promises = [];
|
||||||
streams.forEach((stream) => {
|
streams.forEach((stream) => {
|
||||||
promises.push(detectStreamData(stream, timeout));
|
promises.push(detectStreamData(stream, timeout));
|
||||||
})
|
});
|
||||||
return promises;
|
return promises;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
const { HandshakeStage } = require.main.require('./dto/Packet.js');
|
const { HandshakeStage } = require.main.require("./dto/Packet.js");
|
||||||
|
|
||||||
function keyInfoFromRaw(keyInfoRaw) {
|
function keyInfoFromRaw(keyInfoRaw) {
|
||||||
return {
|
return {
|
||||||
@ -27,7 +27,7 @@ function handshakeStageFromKeyInfo(keyInfo){
|
|||||||
|
|
||||||
// Extract compare-keys
|
// Extract compare-keys
|
||||||
let keyData = "";
|
let keyData = "";
|
||||||
for (const key of HANDSHAKE_STAGE_KEYINFO['keys']) {
|
for (const key of HANDSHAKE_STAGE_KEYINFO["keys"]) {
|
||||||
keyData += keyInfo[key].toString();
|
keyData += keyInfo[key].toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
186
src/main.js
186
src/main.js
@ -6,8 +6,8 @@ const { requireEnvVars } = require("./helper/env.js");
|
|||||||
const { exit } = require("process");
|
const { exit } = require("process");
|
||||||
const { exec } = require("./helper/exec.js");
|
const { exec } = require("./helper/exec.js");
|
||||||
|
|
||||||
const { InfluxDB } = require('@influxdata/influxdb-client');
|
const { InfluxDB } = require("@influxdata/influxdb-client");
|
||||||
const InfluxChecks = require('./helper/influx-checks.js');
|
const InfluxChecks = require("./helper/influx-checks.js");
|
||||||
|
|
||||||
const { RegexBlockStream } = require("./streamHandler/RegexBlockStream.js");
|
const { RegexBlockStream } = require("./streamHandler/RegexBlockStream.js");
|
||||||
const { PacketStreamFactory } = require("./streamHandler/PacketStreamFactory.js");
|
const { PacketStreamFactory } = require("./streamHandler/PacketStreamFactory.js");
|
||||||
@ -21,111 +21,111 @@ const userHelper = require("./helper/userHelper.js");
|
|||||||
const env = process.env;
|
const env = process.env;
|
||||||
// Defaults
|
// Defaults
|
||||||
{
|
{
|
||||||
env.LOGLEVEL ??= "INFO";
|
env.LOGLEVEL ??= "INFO";
|
||||||
env.WIFI_INTERFACE ??= "wlan0";
|
env.WIFI_INTERFACE ??= "wlan0";
|
||||||
}
|
}
|
||||||
// Required vars
|
// Required vars
|
||||||
let errorMsg = requireEnvVars([
|
let errorMsg = requireEnvVars([
|
||||||
"INFLUX_URL", "INFLUX_TOKEN",
|
"INFLUX_URL", "INFLUX_TOKEN",
|
||||||
"INFLUX_ORG", "INFLUX_BUCKET"
|
"INFLUX_ORG", "INFLUX_BUCKET"
|
||||||
]);
|
]);
|
||||||
if(errorMsg){
|
if(errorMsg){
|
||||||
logger.fatal(errorMsg);
|
logger.fatal(errorMsg);
|
||||||
exit(1);
|
exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
(async function() {
|
(async function() {
|
||||||
logger.info("Setup Influx..");
|
logger.info("Setup Influx..");
|
||||||
const influxDb = new InfluxDB({url: env.INFLUX_URL, token: env.INFLUX_TOKEN});
|
const influxDb = new InfluxDB({url: env.INFLUX_URL, token: env.INFLUX_TOKEN});
|
||||||
|
|
||||||
await InfluxChecks.checkHealth(influxDb)
|
await InfluxChecks.checkHealth(influxDb)
|
||||||
.then((res) => {return InfluxChecks.checkBucket(influxDb, {
|
.then((res) => {return InfluxChecks.checkBucket(influxDb, {
|
||||||
org: env.INFLUX_ORG,
|
org: env.INFLUX_ORG,
|
||||||
name: env.INFLUX_BUCKET
|
name: env.INFLUX_BUCKET
|
||||||
})})
|
});})
|
||||||
.then((res) => {return InfluxChecks.checkWriteApi(influxDb, {
|
.then((res) => {return InfluxChecks.checkWriteApi(influxDb, {
|
||||||
org: env.INFLUX_ORG,
|
org: env.INFLUX_ORG,
|
||||||
bucket: env.INFLUX_BUCKET
|
bucket: env.INFLUX_BUCKET
|
||||||
})})
|
});})
|
||||||
.catch((err) => {
|
.catch((err) => {
|
||||||
if(err) {
|
if(err) {
|
||||||
logger.error("Error whilst checking influx:");
|
logger.error("Error whilst checking influx:");
|
||||||
logger.error(err);
|
logger.error(err);
|
||||||
}
|
}
|
||||||
logger.fatal("Setup influx failed!");
|
logger.fatal("Setup influx failed!");
|
||||||
exit(1);
|
exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info("Influx ok");
|
||||||
|
|
||||||
|
logger.info("Starting tcpdump..");
|
||||||
|
const TCPDUMP_BASECMD = "tcpdump -vvv -e -n -X -s0 -i";
|
||||||
|
let cmd = `${TCPDUMP_BASECMD} ${env.WIFI_INTERFACE}`;
|
||||||
|
|
||||||
|
let proc = exec(cmd);
|
||||||
|
logger.debug("Creating & Attaching streams..");
|
||||||
|
let regexBlockStream = new RegexBlockStream(/^\d{2}:\d{2}:\d{2}.\d{6}.*(\n( {4,8}|\t\t?).*)+\n/gm);
|
||||||
|
let packetStreamFactory = new PacketStreamFactory();
|
||||||
|
let packetInfluxPointFactory = new PacketInfluxPointFactory();
|
||||||
|
let influxPointWriter = new InfluxPointWriter(influxDb, env.INFLUX_ORG, env.INFLUX_BUCKET);
|
||||||
|
proc.stdout
|
||||||
|
.setEncoding("utf8")
|
||||||
|
.pipe(regexBlockStream)
|
||||||
|
.pipe(packetStreamFactory)
|
||||||
|
.pipe(packetInfluxPointFactory)
|
||||||
|
.pipe(influxPointWriter);
|
||||||
|
|
||||||
|
logger.debug("Attaching error-logger..");
|
||||||
|
const loggerTcpdump = logFactory("tcpdump");
|
||||||
|
proc.stderr.setEncoding("utf8").on("data", (data) => {
|
||||||
|
if(!data.match(/^(tcpdump: )?listening on /i) || !data.match(/^\d+ packets captured/i)) { // Catch start-error
|
||||||
|
loggerTcpdump.debug(data);
|
||||||
|
}
|
||||||
|
else loggerTcpdump.error(data);
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.info("Influx ok");
|
// FIXME: This is a hacky workaround to not let errors from subprocess bubble up and terminate our process
|
||||||
|
regexBlockStream.on("error", (err) => {});
|
||||||
|
|
||||||
logger.info("Starting tcpdump..");
|
proc.on("error", (err) => {
|
||||||
const TCPDUMP_BASECMD = "tcpdump -vvv -e -n -X -s0 -i"
|
loggerTcpdump.error(err);
|
||||||
let cmd = `${TCPDUMP_BASECMD} ${env.WIFI_INTERFACE}`;
|
|
||||||
|
|
||||||
let proc = exec(cmd);
|
|
||||||
logger.debug("Creating & Attaching streams..");
|
|
||||||
let regexBlockStream = new RegexBlockStream(/^\d{2}:\d{2}:\d{2}.\d{6}.*(\n( {4,8}|\t\t?).*)+\n/gm);
|
|
||||||
let packetStreamFactory = new PacketStreamFactory();
|
|
||||||
let packetInfluxPointFactory = new PacketInfluxPointFactory();
|
|
||||||
let influxPointWriter = new InfluxPointWriter(influxDb, env.INFLUX_ORG, env.INFLUX_BUCKET);
|
|
||||||
proc.stdout
|
|
||||||
.setEncoding("utf8")
|
|
||||||
.pipe(regexBlockStream)
|
|
||||||
.pipe(packetStreamFactory)
|
|
||||||
.pipe(packetInfluxPointFactory)
|
|
||||||
.pipe(influxPointWriter);
|
|
||||||
|
|
||||||
logger.debug("Attaching error-logger..");
|
|
||||||
const loggerTcpdump = logFactory("tcpdump");
|
|
||||||
proc.stderr.setEncoding("utf8").on("data", (data) => {
|
|
||||||
if(!data.match(/^(tcpdump: )?listening on /i) || !data.match(/^\d+ packets captured/i)) { // Catch start-error
|
|
||||||
loggerTcpdump.debug(data);
|
|
||||||
}
|
|
||||||
else loggerTcpdump.error(data);
|
|
||||||
});
|
|
||||||
|
|
||||||
// FIXME: This is a hacky workaround to not let errors from subprocess bubble up and terminate our process
|
|
||||||
regexBlockStream.on('error', (err) => {});
|
|
||||||
|
|
||||||
proc.on("error", (err) => {
|
|
||||||
loggerTcpdump.error(err);
|
|
||||||
});
|
|
||||||
|
|
||||||
const loggerPacketStream = logFactory("PacketStreamFactory");
|
|
||||||
userHelper.detectStreamData(proc.stdout, 10000) // Expect tcpdump-logs to have data after max. 10s
|
|
||||||
.then(() => {
|
|
||||||
loggerTcpdump.debug("Got first data");
|
|
||||||
userHelper.detectStreamData(packetStreamFactory, 10000) // Expect then to have packets after further 10s
|
|
||||||
.then(() => {
|
|
||||||
loggerPacketStream.debug("Got first packet");
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
if(err == 'timeout') loggerPacketStream.warn("No packets");
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
if(err == 'timeout') loggerTcpdump.warn("No data after 10s! Wrong configuration?");
|
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.debug("Attaching exit-handler..");
|
const loggerPacketStream = logFactory("PacketStreamFactory");
|
||||||
proc.on("exit", (code) => {
|
userHelper.detectStreamData(proc.stdout, 10000) // Expect tcpdump-logs to have data after max. 10s
|
||||||
loggerTcpdump.debug(`tcpdump exited code: ${code}`);
|
.then(() => {
|
||||||
if (code) {
|
loggerTcpdump.debug("Got first data");
|
||||||
loggerTcpdump.fatal(`tcpdump exited with non-zero code: ${code}`);
|
userHelper.detectStreamData(packetStreamFactory, 10000) // Expect then to have packets after further 10s
|
||||||
exit(1);
|
.then(() => {
|
||||||
|
loggerPacketStream.debug("Got first packet");
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
if(err == "timeout") loggerPacketStream.warn("No packets");
|
||||||
|
});
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
if(err == "timeout") loggerTcpdump.warn("No data after 10s! Wrong configuration?");
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.debug("Attaching exit-handler..");
|
||||||
|
proc.on("exit", (code) => {
|
||||||
|
loggerTcpdump.debug(`tcpdump exited code: ${code}`);
|
||||||
|
if (code) {
|
||||||
|
loggerTcpdump.fatal(`tcpdump exited with non-zero code: ${code}`);
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
logger.info("Shutdown");
|
||||||
|
exit(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle stop-signals for graceful shutdown
|
||||||
|
function shutdownReq() {
|
||||||
|
logger.info("Shutdown request received..");
|
||||||
|
logger.debug("Stopping subprocess tcpdump, then exiting myself..");
|
||||||
|
proc.kill(); // Kill process (send SIGTERM), then upper event-handler will stop self
|
||||||
}
|
}
|
||||||
logger.info("Shutdown");
|
process.on("SIGTERM", shutdownReq);
|
||||||
exit(0);
|
process.on("SIGINT", shutdownReq);
|
||||||
});
|
|
||||||
|
|
||||||
// Handle stop-signals for graceful shutdown
|
logger.info("Startup complete");
|
||||||
function shutdownReq() {
|
|
||||||
logger.info("Shutdown request received..");
|
|
||||||
logger.debug("Stopping subprocess tcpdump, then exiting myself..");
|
|
||||||
proc.kill(); // Kill process (send SIGTERM), then upper event-handler will stop self
|
|
||||||
}
|
|
||||||
process.on('SIGTERM', shutdownReq);
|
|
||||||
process.on('SIGINT', shutdownReq);
|
|
||||||
|
|
||||||
logger.info("Startup complete");
|
|
||||||
})();
|
})();
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
const logger = require.main.require("./helper/logger.js")("InfluxPointWriter");
|
const logger = require.main.require("./helper/logger.js")("InfluxPointWriter");
|
||||||
const { Writable } = require('stream');
|
const { Writable } = require("stream");
|
||||||
const {InfluxDB, Point, HttpError} = require('@influxdata/influxdb-client')
|
const {InfluxDB, Point, HttpError} = require("@influxdata/influxdb-client");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get points and write them into influx
|
* Get points and write them into influx
|
||||||
@ -17,7 +17,7 @@ class InfluxPointWriter extends Writable{
|
|||||||
super({
|
super({
|
||||||
objectMode: true
|
objectMode: true
|
||||||
});
|
});
|
||||||
this._api = influxDb.getWriteApi(org, bucket, 'us', options);
|
this._api = influxDb.getWriteApi(org, bucket, "us", options);
|
||||||
}
|
}
|
||||||
|
|
||||||
_write(point, encoding, next){
|
_write(point, encoding, next){
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
|
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
|
||||||
const { Transform } = require('stream');
|
const { Transform } = require("stream");
|
||||||
const {Point} = require('@influxdata/influxdb-client')
|
const {Point} = require("@influxdata/influxdb-client");
|
||||||
|
|
||||||
/** Keys to always use as tags */
|
/** Keys to always use as tags */
|
||||||
const TAG_LIST = [
|
const TAG_LIST = [
|
||||||
@ -45,12 +45,12 @@ class PacketInfluxPointFactory extends Transform{
|
|||||||
|
|
||||||
// Set tags
|
// Set tags
|
||||||
TAG_LIST.filter(tag => Object.keys(packet).includes(tag)) // Filter tags available on object
|
TAG_LIST.filter(tag => Object.keys(packet).includes(tag)) // Filter tags available on object
|
||||||
.filter(tag => packet[tag] != null) // Filter tags not falsy on object
|
.filter(tag => packet[tag] != null) // Filter tags not falsy on object
|
||||||
.forEach(tag => {
|
.forEach(tag => {
|
||||||
tagObjectRecursively(point, tag, packet[tag]);
|
tagObjectRecursively(point, tag, packet[tag]);
|
||||||
});
|
});
|
||||||
|
|
||||||
point.setField('value', packet[objKey]); // Set field
|
point.setField("value", packet[objKey]); // Set field
|
||||||
|
|
||||||
this.push(point); // Push point into stream
|
this.push(point); // Push point into stream
|
||||||
});
|
});
|
||||||
@ -71,14 +71,14 @@ function tagObjectRecursively(point, tag, field, suffix = ""){
|
|||||||
|
|
||||||
/** Mapping for type -> field-method */
|
/** Mapping for type -> field-method */
|
||||||
const POINT_FIELD_TYPE = new Map([
|
const POINT_FIELD_TYPE = new Map([
|
||||||
['boolean', function(key, value){ return this.booleanField(key, value); }],
|
["boolean", function(key, value){ return this.booleanField(key, value); }],
|
||||||
['number', function(key, value){ return this.intField(key, value); }],
|
["number", function(key, value){ return this.intField(key, value); }],
|
||||||
['string', function(key, value){ return this.stringField(key, value); }],
|
["string", function(key, value){ return this.stringField(key, value); }],
|
||||||
]);
|
]);
|
||||||
Point.prototype.setField = function(key, value){
|
Point.prototype.setField = function(key, value){
|
||||||
let setField = POINT_FIELD_TYPE.get(typeof value);
|
let setField = POINT_FIELD_TYPE.get(typeof value);
|
||||||
return setField.apply(this, [key, value]);
|
return setField.apply(this, [key, value]);
|
||||||
}
|
};
|
||||||
|
|
||||||
// Specify exports
|
// Specify exports
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
|
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
|
||||||
const { Transform } = require('stream');
|
const { Transform } = require("stream");
|
||||||
const { DateTime } = require("luxon");
|
const { DateTime } = require("luxon");
|
||||||
const { PacketType, FlagType, Packet, PacketWithSSID, BeaconPacket, ProbeRequestPacket, ProbeResponsePacket, AuthenticationPacket, AuthenticationType, AssociationResponsePacket, DisassociationPacket, HandshakePacket, HandshakeStage } = require.main.require('./dto/Packet.js');
|
const { PacketType, FlagType, Packet, PacketWithSSID, BeaconPacket, ProbeRequestPacket, ProbeResponsePacket, AuthenticationPacket, AuthenticationType, AssociationResponsePacket, DisassociationPacket, HandshakePacket, HandshakeStage } = require.main.require("./dto/Packet.js");
|
||||||
const hexConv = require.main.require("./helper/hexConverter.js");
|
const hexConv = require.main.require("./helper/hexConverter.js");
|
||||||
const wifiStateAnalyser = require.main.require("./helper/wifiStateAnalyzer.js");
|
const wifiStateAnalyser = require.main.require("./helper/wifiStateAnalyzer.js");
|
||||||
|
|
||||||
@ -20,20 +20,20 @@ const PACKET_TYPE_MAP = {
|
|||||||
"Disassociation:": PacketType.Disassociation,
|
"Disassociation:": PacketType.Disassociation,
|
||||||
"EAPOL": PacketType.Handshake,
|
"EAPOL": PacketType.Handshake,
|
||||||
};
|
};
|
||||||
const PACKET_TYPES_REGEX = Object.keys(PACKET_TYPE_MAP).join('|');
|
const PACKET_TYPES_REGEX = Object.keys(PACKET_TYPE_MAP).join("|");
|
||||||
|
|
||||||
const AUTHENTICATION_TYPE_MAP = {
|
const AUTHENTICATION_TYPE_MAP = {
|
||||||
"(Open System)-1": AuthenticationType.OpenSystem_1,
|
"(Open System)-1": AuthenticationType.OpenSystem_1,
|
||||||
"(Open System)-2": AuthenticationType.OpenSystem_2,
|
"(Open System)-2": AuthenticationType.OpenSystem_2,
|
||||||
}
|
};
|
||||||
|
|
||||||
const FLAG_TYPE_MAP = {
|
const FLAG_TYPE_MAP = {
|
||||||
"Retry": FlagType.Retry,
|
"Retry": FlagType.Retry,
|
||||||
"Pwr Mgmt": FlagType.PwrMgt,
|
"Pwr Mgmt": FlagType.PwrMgt,
|
||||||
"More Data": FlagType.MoreData,
|
"More Data": FlagType.MoreData,
|
||||||
"Protected": FlagType.Protected,
|
"Protected": FlagType.Protected,
|
||||||
}
|
};
|
||||||
const FLAG_TYPE_MAPS_REGEX = Object.keys(FLAG_TYPE_MAP).join('|');
|
const FLAG_TYPE_MAPS_REGEX = Object.keys(FLAG_TYPE_MAP).join("|");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read data from text-blocks and convert them to Packet
|
* Read data from text-blocks and convert them to Packet
|
||||||
@ -49,8 +49,8 @@ class PacketStreamFactory extends Transform{
|
|||||||
_transform(chunk, encoding, next){
|
_transform(chunk, encoding, next){
|
||||||
let packet = new Packet();
|
let packet = new Packet();
|
||||||
|
|
||||||
const lines = chunk.split('\n');
|
const lines = chunk.split("\n");
|
||||||
const header = lines.splice(0, 1)[0]; // Grab first line, 'lines' is now the payload
|
const header = lines.splice(0, 1)[0]; // Grab first line, "lines" is now the payload
|
||||||
packet = this._handleHeader(packet, header);
|
packet = this._handleHeader(packet, header);
|
||||||
packet = this._handlePayload(packet, lines);
|
packet = this._handlePayload(packet, lines);
|
||||||
|
|
||||||
@ -62,7 +62,7 @@ class PacketStreamFactory extends Transform{
|
|||||||
packet.timestampMicros = DateTime.fromISO(data.slice(0, 12)).toSeconds() + data.slice(12, 15)/1000000;
|
packet.timestampMicros = DateTime.fromISO(data.slice(0, 12)).toSeconds() + data.slice(12, 15)/1000000;
|
||||||
|
|
||||||
// Find flags
|
// Find flags
|
||||||
data.match(data.match(new RegExp('(?<=^|\\s)('+ FLAG_TYPE_MAPS_REGEX +')(?=$|\\s)', 'ig'))
|
data.match(data.match(new RegExp("(?<=^|\\s)("+ FLAG_TYPE_MAPS_REGEX +")(?=$|\\s)", "ig"))
|
||||||
?.forEach(match => packet.flags[FLAG_TYPE_MAP[match]] = true) // Set them to true in flags
|
?.forEach(match => packet.flags[FLAG_TYPE_MAP[match]] = true) // Set them to true in flags
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -73,11 +73,11 @@ class PacketStreamFactory extends Transform{
|
|||||||
|
|
||||||
packet.signal = Number(data.match(/(?<=^|\s)-\d{2,3}(?=dBm\sSignal($|\s))/i)?.[0]) || null;
|
packet.signal = Number(data.match(/(?<=^|\s)-\d{2,3}(?=dBm\sSignal($|\s))/i)?.[0]) || null;
|
||||||
|
|
||||||
let packetTypeStr = data.match(new RegExp('(?<=^|\\s)('+ PACKET_TYPES_REGEX +')(?=$|\\s)', 'i'))?.[0];
|
let packetTypeStr = data.match(new RegExp("(?<=^|\\s)("+ PACKET_TYPES_REGEX +")(?=$|\\s)", "i"))?.[0];
|
||||||
if(packetTypeStr)
|
if(packetTypeStr)
|
||||||
packet.packetType = PACKET_TYPE_MAP[packetTypeStr];
|
packet.packetType = PACKET_TYPE_MAP[packetTypeStr];
|
||||||
else if(data.match(/(SA|TA|DA|RA|BSSID):.{17}\s*$/i)){
|
else if(data.match(/(SA|TA|DA|RA|BSSID):.{17}\s*$/i)){
|
||||||
packet.packetType = PacketType.NoData
|
packet.packetType = PacketType.NoData;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
packet.packetType = PacketType.Unknown;
|
packet.packetType = PacketType.Unknown;
|
||||||
@ -97,12 +97,12 @@ class PacketStreamFactory extends Transform{
|
|||||||
case PacketType.ProbeResponse:
|
case PacketType.ProbeResponse:
|
||||||
case PacketType.AssociationRequest:
|
case PacketType.AssociationRequest:
|
||||||
newPacket = new PacketWithSSID();
|
newPacket = new PacketWithSSID();
|
||||||
newPacket.ssid = data.match(new RegExp('(?<=(^|\\s)'+ packetTypeStr +'\\s\\().{0,32}(?=\\)($|\\s))', 'i'))?.[0] ?? null;
|
newPacket.ssid = data.match(new RegExp("(?<=(^|\\s)"+ packetTypeStr +"\\s\\().{0,32}(?=\\)($|\\s))", "i"))?.[0] ?? null;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case PacketType.Authentication:
|
case PacketType.Authentication:
|
||||||
newPacket = new AuthenticationPacket();
|
newPacket = new AuthenticationPacket();
|
||||||
newPacket.authenticationType = AUTHENTICATION_TYPE_MAP[data.match(/(?<=(^|\s)Authentication\s).{3,}(?=\:(\s|$))/i)[0]] ?? AuthenticationType.Unknown;
|
newPacket.authenticationType = AUTHENTICATION_TYPE_MAP[data.match(/(?<=(^|\s)Authentication\s).{3,}(?=:(\s|$))/i)[0]] ?? AuthenticationType.Unknown;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case PacketType.AssociationResponse:
|
case PacketType.AssociationResponse:
|
||||||
@ -121,16 +121,16 @@ class PacketStreamFactory extends Transform{
|
|||||||
}
|
}
|
||||||
|
|
||||||
_handlePayload(packet, data){
|
_handlePayload(packet, data){
|
||||||
data = data.join('');
|
data = data.join("");
|
||||||
|
|
||||||
// Get payload-Hex-Data. If there is no data: empty
|
// Get payload-Hex-Data. If there is no data: empty
|
||||||
packet.payloadData = hexConv.hexToBytes(data.match(/(?<=\s)([A-F0-9]{1,4}(?=\s))/igm)?.join('') ?? '');
|
packet.payloadData = hexConv.hexToBytes(data.match(/(?<=\s)([A-F0-9]{1,4}(?=\s))/igm)?.join("") ?? "");
|
||||||
packet.payloadData.splice(packet.payloadData.length-4, 4); // Remove FrameCheck sequence
|
packet.payloadData.splice(packet.payloadData.length-4, 4); // Remove FrameCheck sequence
|
||||||
|
|
||||||
// Cover special cases with more data
|
// Cover special cases with more data
|
||||||
let newPacket;
|
let newPacket;
|
||||||
switch(packet.packetType){
|
switch(packet.packetType){
|
||||||
case PacketType.Handshake:
|
case PacketType.Handshake: {
|
||||||
newPacket = new HandshakePacket();
|
newPacket = new HandshakePacket();
|
||||||
|
|
||||||
// Read key-information
|
// Read key-information
|
||||||
@ -139,6 +139,7 @@ class PacketStreamFactory extends Transform{
|
|||||||
|
|
||||||
newPacket.handshakeStage = wifiStateAnalyser.handshakeStageFromKeyInfo(keyInfo); // Get stage
|
newPacket.handshakeStage = wifiStateAnalyser.handshakeStageFromKeyInfo(keyInfo); // Get stage
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if(newPacket) packet = Object.assign(newPacket, packet);
|
if(newPacket) packet = Object.assign(newPacket, packet);
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
const logger = require.main.require("./helper/logger.js")("RegexBlockStream");
|
const logger = require.main.require("./helper/logger.js")("RegexBlockStream");
|
||||||
const { Transform } = require('stream')
|
const { Transform } = require("stream");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Matches whole blocks as regex and passes them on
|
* Matches whole blocks as regex and passes them on
|
||||||
@ -27,7 +27,7 @@ class RegexBlockStream extends Transform{
|
|||||||
}
|
}
|
||||||
|
|
||||||
_transform(chunk, encoding, next){
|
_transform(chunk, encoding, next){
|
||||||
chunk = this.readableBuffer.length? this.readableBuffer.join('') + chunk: chunk; // Add previous buffer to current chunk
|
chunk = this.readableBuffer.length? this.readableBuffer.join("") + chunk: chunk; // Add previous buffer to current chunk
|
||||||
this.readableBuffer.length && this.readableBuffer.clear(); // Clear buffer once we read it
|
this.readableBuffer.length && this.readableBuffer.clear(); // Clear buffer once we read it
|
||||||
|
|
||||||
let matches = chunk.match(this.matcher); // Match
|
let matches = chunk.match(this.matcher); // Match
|
||||||
@ -44,17 +44,17 @@ class RegexBlockStream extends Transform{
|
|||||||
if(matches){
|
if(matches){
|
||||||
matches.forEach((match) => {
|
matches.forEach((match) => {
|
||||||
this.push(match); // Write match to stream
|
this.push(match); // Write match to stream
|
||||||
if(chunk) chunk = chunk.replace(match, ''); // Remove match from chunks
|
if(chunk) chunk = chunk.replace(match, ""); // Remove match from chunks
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if(chunk) return chunk;
|
if(chunk) return chunk;
|
||||||
}
|
}
|
||||||
|
|
||||||
_flush(next){
|
_flush(next){
|
||||||
if(matchAllOnFlush){ // When requested, we'll match one last time over the remaining buffer
|
if(this.matchAllOnFlush){ // When requested, we'll match one last time over the remaining buffer
|
||||||
let chunk = this.readableBuffer.join('');
|
let chunk = this.readableBuffer.join("");
|
||||||
let matches = chunk.match(this.matcher); // Match remaining buffer
|
let matches = chunk.match(this.matcher); // Match remaining buffer
|
||||||
_writeMatches(matches); // Write matches including last element
|
this._writeMatches(matches); // Write matches including last element
|
||||||
}
|
}
|
||||||
|
|
||||||
next(); // Tell system we are done
|
next(); // Tell system we are done
|
||||||
|
Loading…
x
Reference in New Issue
Block a user