51 Commits

Author SHA1 Message Date
c97137f4a7 Merge branch 'release-2' 2021-12-06 12:47:14 +01:00
39350932a4 Changed metric- and tag-names to match best-practise naminc-convention 2021-12-06 12:46:04 +01:00
1e37f35e38 Moved creation of writeApi to main and set default-tag hostname 2021-12-06 12:44:26 +01:00
d0be44c1af Fix coding-style 2021-12-06 12:42:34 +01:00
a13d81e9c0 Merge branch 'release-1.1' 2021-12-03 10:53:35 +01:00
059c02e243 Merge branch 'dev' into release-1.1 2021-12-03 10:53:13 +01:00
57cf6fb0a7 Add npm prune (wont do much here) 2021-12-03 10:50:50 +01:00
298a96bf16 apk-install on 1 line and delete cache when done 2021-12-03 10:48:28 +01:00
b98dff947d Refactored packet to use new code-style (for got last time :/) 2021-12-02 14:54:32 +01:00
a610f209d5 Merge branch 'release-1.1' 2021-12-02 14:08:54 +01:00
0f6c5b6b0e Change baseimage from node to node-alpine
Size drastically decreased
2021-12-02 14:06:21 +01:00
cc9e4c7258 Fix flush using local vars instead of this-vars 2021-12-02 13:41:23 +01:00
56ac283544 Refactored code to match code-style 2021-12-02 13:40:56 +01:00
9095e21e6f Add handshakeStage as metric 2021-12-01 14:24:03 +01:00
6e05a0b45c Merge branch 'release-1.0' 2021-11-29 15:55:27 +01:00
d14e469ef4 Removed error-logging from RegexBlockStream FIXME 2021-11-29 15:35:53 +01:00
99a3e13d77 Added further exception to error-event from tcpdump 2021-11-29 15:35:36 +01:00
b5c895674e Merge branch 'f_gracefulShutdown' into dev 2021-11-29 15:34:56 +01:00
86d2b8c1cf Added event for SIGTERM and SIGINT 2021-11-29 15:34:19 +01:00
7ff6556d51 Merge branch 'f_userHelper' into dev 2021-11-29 15:12:39 +01:00
16388c73e5 Added additional check for packetStream 2021-11-29 15:11:14 +01:00
8211f55b89 Added check for tcpdump-process stdout 2021-11-29 15:10:42 +01:00
c28bbaaada Moved Stream-creations outside 2021-11-29 15:09:59 +01:00
4ddbe3f06f Implemented userHelper methods to detect if streams have data before timeout 2021-11-29 15:09:21 +01:00
0d84079ce1 Added packetType to tag-fields 2021-11-29 14:20:29 +01:00
1bf761970f Added error-handling for regexBlockStream 2021-11-29 13:45:36 +01:00
5a0118aedd Added error-handling for process 2021-11-29 13:45:15 +01:00
0709db0ddf Changed exit-log to debug 2021-11-29 13:44:37 +01:00
c27761322c Catch start-error from tcpdump
tcpdump writes to stderr at the beginning
2021-11-29 13:18:29 +01:00
45a11753de Add own logger for tcpdump 2021-11-29 13:16:23 +01:00
d482001cdc Merge branch 'f_docker' into dev 2021-11-29 13:12:00 +01:00
a681bbd2d2 Removed sudo as container runs as root anyways 2021-11-29 13:11:11 +01:00
d9ee804c3b Removed src-references as hats the workdir 2021-11-29 13:10:44 +01:00
e320d8670b Add installation of package tcpdump 2021-11-29 13:10:05 +01:00
2d824543d1 fixed cmd 2021-11-29 13:09:49 +01:00
6e080907d1 Merge branch 'code-smell_handling' into dev 2021-11-29 09:58:04 +01:00
91c3aca9e2 Add function to get handshakeStage from keyInfo 2021-11-29 09:57:33 +01:00
31ab10c3e1 Fix wrong var-name 2021-11-29 09:57:05 +01:00
8ff7211f0f Revert "Extract packetType handling in own function"
This reverts commit 227ba127f8.
2021-11-29 09:50:14 +01:00
2ae85ababb Extract keyInfo reading to helper-file 2021-11-29 09:37:28 +01:00
227ba127f8 Extract packetType handling in own function 2021-11-29 09:36:21 +01:00
8417de5756 Extract checks with if-statements 2021-11-29 09:35:17 +01:00
54eadf3bc3 Fixed Regex codesmells 2021-11-29 09:24:18 +01:00
e39ebaac23 Fixed Regex codesmell 2021-11-29 09:23:08 +01:00
54d627d469 Fix typo 2021-11-29 09:20:38 +01:00
ca3c37be0f Activating strict-mode 2021-11-29 09:16:49 +01:00
96b52e63a0 Add exit-handler with error-detection 2021-11-29 09:16:39 +01:00
3c5e941cba Creating & Attaching streams / Error-logger 2021-11-29 09:15:49 +01:00
a468d7a57b Start tcpdump process 2021-11-29 09:14:50 +01:00
4ad5eba7e0 Change tag-srtting to recursive by field
This will properly set e.g. arrays
2021-11-26 21:03:11 +01:00
2646c9787e Changed key-checking to value-checking
This includes e.g. gettings, originally not included in keys
2021-11-26 21:00:15 +01:00
14 changed files with 323 additions and 139 deletions

View File

@@ -1,4 +1,4 @@
FROM node:16
FROM node:16-alpine
# Create app directory
WORKDIR /usr/src/app
@@ -7,7 +7,13 @@ WORKDIR /usr/src/app
COPY package*.json ./
RUN npm install
# remove development dependencies
RUN npm prune --production
# Install required apk-packages & delete cache
RUN apk update && apk add tcpdump && rm -rf /var/cache/apk/*
# Bundle app source
COPY ./src/ .
CMD ["npm", "run"]
CMD ["npm", "run", "start"]

View File

@@ -2,10 +2,10 @@
"name": "rfmon-to-influx",
"version": "1.0.0",
"description": "Writing (mostly meta-) data received in Wireless-Monitor-Mode into an InfluxDB",
"main": "src/main.js",
"main": "main.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "node src/main.js"
"start": "node main.js"
},
"repository": {
"type": "git",

View File

@@ -1,20 +1,20 @@
const PacketType = {
Beacon: 'Beacon',
ProbeRequest: 'ProbeRequest',
ProbeResponse: 'ProbeResponse',
Data: 'Data',
RequestToSend: 'RequestToSend',
ClearToSend: 'ClearToSend',
Acknowledgment: 'Acknowledgment',
BlockAcknowledgment: 'BlockAcknowledgment',
NoData: 'NoData',
Authentication: 'Authentication',
AssociationRequest: 'AssociationRequest',
AssociationResponse: 'AssociationResponse',
Disassociation: 'Disassociation',
Handshake: 'Handshake',
Unknown: 'Unknown'
}
Beacon: "Beacon",
ProbeRequest: "ProbeRequest",
ProbeResponse: "ProbeResponse",
Data: "Data",
RequestToSend: "RequestToSend",
ClearToSend: "ClearToSend",
Acknowledgment: "Acknowledgment",
BlockAcknowledgment: "BlockAcknowledgment",
NoData: "NoData",
Authentication: "Authentication",
AssociationRequest: "AssociationRequest",
AssociationResponse: "AssociationResponse",
Disassociation: "Disassociation",
Handshake: "Handshake",
Unknown: "Unknown"
};
const FlagType = {
MoreFragments: "MoreFragments",
@@ -23,7 +23,7 @@ const FlagType = {
MoreData: "MoreData",
Protected: "Protected",
Order: "Order"
}
};
class Packet{
timestampMicros;
@@ -58,10 +58,10 @@ class ProbeRequestPacket extends PacketWithSSID{}
class ProbeResponsePacket extends PacketWithSSID{}
const AuthenticationType = {
OpenSystem_1: 'OpenSystem_1',
OpenSystem_2: 'OpenSystem_2',
Unknown: 'Unknown',
}
OpenSystem_1: "OpenSystem_1",
OpenSystem_2: "OpenSystem_2",
Unknown: "Unknown",
};
class AuthenticationPacket extends Packet{
authenticationType;
}
@@ -77,11 +77,11 @@ class DisassociationPacket extends Packet{
const HandshakeStage = {
1: '1',
2: '2',
3: '3',
4: '4'
}
1: "1",
2: "2",
3: "3",
4: "4"
};
class HandshakePacket extends Packet{
handshakeStage;
}

View File

@@ -1,9 +1,9 @@
function requireEnvVars(requiredEnv){
// Ensure required ENV vars are set
let unsetEnv = requiredEnv.filter((env) => !(typeof process.env[env] !== 'undefined'));
let unsetEnv = requiredEnv.filter((env) => (typeof process.env[env] === "undefined"));
if (unsetEnv.length > 0) {
return "Required ENV variables are not set: [" + unsetEnv.join(', ') + "]";
return "Required ENV variables are not set: [" + unsetEnv.join(", ") + "]";
}
}

View File

@@ -1,7 +1,7 @@
const logger = require("./logger.js")("exec");
const { spawn } = require("child_process");
const { parseArgsStringToArgv } = require('string-argv');
const { parseArgsStringToArgv } = require("string-argv");
function exec(cmd, options){

View File

@@ -21,4 +21,4 @@ function bytesToHex(bytes) {
module.exports = {
hexToBytes,
bytesToHex
}
};

View File

@@ -1,8 +1,8 @@
const logger = require.main.require("./helper/logger.js")("influx-checks");
const Os = require("os");
const { InfluxDB, Point } = require('@influxdata/influxdb-client')
const Influx = require('@influxdata/influxdb-client-apis');
const { InfluxDB, Point } = require("@influxdata/influxdb-client")
const Influx = require("@influxdata/influxdb-client-apis");
function checkHealth(influxDb){
@@ -39,7 +39,7 @@ function checkBucket(influxDb, options){
function checkWriteApi(influxDb, options){
return new Promise((resolve, reject) => {
const writeApi = influxDb.getWriteApi(options.org, options.bucket); // Get WriteAPI
writeApi.writePoint(new Point("worker_connectionTest").tag("hostname", Os.hostname())) // Write point
writeApi.writePoint(new Point("worker_connectionTest").tag("hostname", Os.hostname())); // Write point
writeApi.close()
.catch((err) => {
logger.error("Could not get writeApi:");

46
src/helper/userHelper.js Normal file
View File

@@ -0,0 +1,46 @@
// This file specifies functions to help a user with e.g. configuration-errors
function detectStreamData(stream, timeout = 5000){
return new Promise((resolve, reject) => {
let timeoutHandler;
if(timeout){
timeoutHandler = setTimeout(() => {
reject("timeout");
remListeners();
},
timeout);
}
function remListeners(){
stream.removeListener("error", errorHandler);
stream.removeListener("data", dataHandler);
if(timeoutHandler) clearTimeout(timeoutHandler);
}
function errorHandler(err) {
remListeners();
}
function dataHandler(data) {
resolve(data);
remListeners();
}
stream.on("error", errorHandler);
stream.on("data", dataHandler);
});
}
function detectStreamsData(streams, timeout = 5000){
let promises = [];
streams.forEach((stream) => {
promises.push(detectStreamData(stream, timeout));
});
return promises;
}
// Specify exports
module.exports = {
detectStreamData,
detectStreamsData,
};

View File

@@ -0,0 +1,43 @@
const { HandshakeStage } = require.main.require("./dto/Packet.js");
function keyInfoFromRaw(keyInfoRaw) {
return {
"KeyDescriptorVersion": keyInfoRaw>>0 & 0b111,
"KeyType": keyInfoRaw>>3 & 0b1,
"KeyIndex": keyInfoRaw>>4 & 0b11,
"Install": keyInfoRaw>>6 & 0b1,
"KeyACK": keyInfoRaw>>7 & 0b1,
"KeyMIC": keyInfoRaw>>8 & 0b1,
"Secure": keyInfoRaw>>9 & 0b1,
"Error": keyInfoRaw>>10 & 0b1,
"Request": keyInfoRaw>>11 & 0b1,
"EncryptedKeyData": keyInfoRaw>>12 & 0b1,
"SMKMessage": keyInfoRaw>>13 & 0b1,
};
}
const HANDSHAKE_STAGE_KEYINFO = {
"keys": ["Install", "KeyACK", "KeyMIC", "Secure"],
"0100": HandshakeStage[1],
"0010": HandshakeStage[2],
"1111": HandshakeStage[3],
"0011": HandshakeStage[4],
};
function handshakeStageFromKeyInfo(keyInfo){
// Extract compare-keys
let keyData = "";
for (const key of HANDSHAKE_STAGE_KEYINFO["keys"]) {
keyData += keyInfo[key].toString();
}
// Get and return stage
return HANDSHAKE_STAGE_KEYINFO[keyData];
}
// Specify exports
module.exports = {
keyInfoFromRaw,
handshakeStageFromKeyInfo,
};

View File

@@ -1,49 +1,136 @@
const logger = require("./helper/logger.js")("main");
"use strict";
const logFactory = require("./helper/logger.js");
const logger = logFactory("main");
const { requireEnvVars } = require("./helper/env.js");
const { exit } = require("process");
const { InfluxDB } = require('@influxdata/influxdb-client');
const InfluxChecks = require('./helper/influx-checks.js');
const { exec } = require("./helper/exec.js");
const Os = require("os");
const { InfluxDB } = require("@influxdata/influxdb-client");
const InfluxChecks = require("./helper/influx-checks.js");
const { RegexBlockStream } = require("./streamHandler/RegexBlockStream.js");
const { PacketStreamFactory } = require("./streamHandler/PacketStreamFactory.js");
const { PacketInfluxPointFactory } = require("./streamHandler/PacketInfluxPointFactory.js");
const { InfluxPointWriter } = require("./streamHandler/InfluxPointWriter.js");
const userHelper = require("./helper/userHelper.js");
/// Setup ENVs
const env = process.env;
// Defaults
{
env.LOGLEVEL ??= "INFO";
env.WIFI_INTERFACE ??= "wlan0";
env.LOGLEVEL ??= "INFO";
env.WIFI_INTERFACE ??= "wlan0";
env.HOSTNAME ??= Os.hostname();
}
// Required vars
let errorMsg = requireEnvVars([
"INFLUX_URL", "INFLUX_TOKEN",
"INFLUX_ORG", "INFLUX_BUCKET"
"INFLUX_URL", "INFLUX_TOKEN",
"INFLUX_ORG", "INFLUX_BUCKET"
]);
if(errorMsg){
logger.fatal(errorMsg);
exit(1);
logger.fatal(errorMsg);
exit(1);
}
(async function() {
logger.info("Setup Influx..");
const influxDb = new InfluxDB({url: env.INFLUX_URL, token: env.INFLUX_TOKEN});
logger.info("Setup Influx..");
const influxDb = new InfluxDB({url: env.INFLUX_URL, token: env.INFLUX_TOKEN});
await InfluxChecks.checkHealth(influxDb)
.then((res) => {return InfluxChecks.checkBucket(influxDb, {
org: env.INFLUX_ORG,
name: env.INFLUX_BUCKET
})})
.then((res) => {return InfluxChecks.checkWriteApi(influxDb, {
org: env.INFLUX_ORG,
bucket: env.INFLUX_BUCKET
})})
.catch((err) => {
if(err) {
logger.error("Error whilst checking influx:");
logger.error(err);
}
logger.fatal("Setup influx failed!");
exit(1);
await InfluxChecks.checkHealth(influxDb)
.then((res) => {return InfluxChecks.checkBucket(influxDb, {
org: env.INFLUX_ORG,
name: env.INFLUX_BUCKET
});})
.then((res) => {return InfluxChecks.checkWriteApi(influxDb, {
org: env.INFLUX_ORG,
bucket: env.INFLUX_BUCKET
});})
.catch((err) => {
if(err) {
logger.error("Error whilst checking influx:");
logger.error(err);
}
logger.fatal("Setup influx failed!");
exit(1);
});
logger.debug("Get WriteApi & set default-hostname to", `'${env.HOSTNAME}'`);
const influxWriteApi = influxDb.getWriteApi(env.INFLUX_ORG, env.INFLUX_BUCKET, "us");
influxWriteApi.useDefaultTags("hostname", env.HOSTNAME);
logger.info("Influx ok");
logger.info("Starting tcpdump..");
const TCPDUMP_BASECMD = "tcpdump -vvv -e -n -X -s0 -i";
let cmd = `${TCPDUMP_BASECMD} ${env.WIFI_INTERFACE}`;
let proc = exec(cmd);
logger.debug("Creating & Attaching streams..");
let regexBlockStream = new RegexBlockStream(/^\d{2}:\d{2}:\d{2}.\d{6}.*(\n( {4,8}|\t\t?).*)+\n/gm);
let packetStreamFactory = new PacketStreamFactory();
let packetInfluxPointFactory = new PacketInfluxPointFactory();
let influxPointWriter = new InfluxPointWriter(influxWriteApi);
proc.stdout
.setEncoding("utf8")
.pipe(regexBlockStream)
.pipe(packetStreamFactory)
.pipe(packetInfluxPointFactory)
.pipe(influxPointWriter);
logger.debug("Attaching error-logger..");
const loggerTcpdump = logFactory("tcpdump");
proc.stderr.setEncoding("utf8").on("data", (data) => {
if(!data.match(/^(tcpdump: )?listening on /i) || !data.match(/^\d+ packets captured/i)) { // Catch start-error
loggerTcpdump.debug(data);
}
else loggerTcpdump.error(data);
});
logger.info("Influx ok");
// FIXME: This is a hacky workaround to not let errors from subprocess bubble up and terminate our process
regexBlockStream.on("error", (err) => {});
proc.on("error", (err) => {
loggerTcpdump.error(err);
});
const loggerPacketStream = logFactory("PacketStreamFactory");
userHelper.detectStreamData(proc.stdout, 10000) // Expect tcpdump-logs to have data after max. 10s
.then(() => {
loggerTcpdump.debug("Got first data");
userHelper.detectStreamData(packetStreamFactory, 10000) // Expect then to have packets after further 10s
.then(() => {
loggerPacketStream.debug("Got first packet");
})
.catch((err) => {
if(err == "timeout") loggerPacketStream.warn("No packets");
});
})
.catch((err) => {
if(err == "timeout") loggerTcpdump.warn("No data after 10s! Wrong configuration?");
});
logger.debug("Attaching exit-handler..");
proc.on("exit", (code) => {
loggerTcpdump.debug(`tcpdump exited code: ${code}`);
if (code) {
loggerTcpdump.fatal(`tcpdump exited with non-zero code: ${code}`);
exit(1);
}
logger.info("Shutdown");
exit(0);
});
// Handle stop-signals for graceful shutdown
function shutdownReq() {
logger.info("Shutdown request received..");
logger.debug("Stopping subprocess tcpdump, then exiting myself..");
proc.kill(); // Kill process (send SIGTERM), then upper event-handler will stop self
}
process.on("SIGTERM", shutdownReq);
process.on("SIGINT", shutdownReq);
logger.info("Startup complete");
})();

View File

@@ -1,6 +1,6 @@
const logger = require.main.require("./helper/logger.js")("InfluxPointWriter");
const { Writable } = require('stream');
const {InfluxDB, Point, HttpError} = require('@influxdata/influxdb-client')
const { Writable } = require("stream");
const { WriteApi } = require("@influxdata/influxdb-client");
/**
* Get points and write them into influx
@@ -8,16 +8,13 @@ const {InfluxDB, Point, HttpError} = require('@influxdata/influxdb-client')
class InfluxPointWriter extends Writable{
/**
*
* @param {InfluxDB} influxDb InfluxDb
* @param {string} org Organization to use
* @param {string} bucket Bucket to use
* @param {Partial<WriteOptions>} options Options for WriteApi
* @param {WriteApi} writeApi WriteAPI from InfluxDB instance
*/
constructor(influxDb, org, bucket, options){
constructor(writeApi){
super({
objectMode: true
});
this._api = influxDb.getWriteApi(org, bucket, 'us', options);
this._api = writeApi;
}
_write(point, encoding, next){

View File

@@ -1,25 +1,27 @@
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
const { Transform } = require('stream');
const {Point} = require('@influxdata/influxdb-client')
const { Transform } = require("stream");
const {Point} = require("@influxdata/influxdb-client");
/** Keys to always use as tags */
const TAG_LIST = [
"srcMac",
"dstMac",
"srcmac",
"dstmac",
"bssid",
"frequency",
"flags",
"packettype",
];
/** Measurement-name and corresponding field-key */
const MEASUREMENT_MAP = new Map([
["Signal", "signal"],
["PayloadSize", "payloadSize"],
["DataRate", "dataRate"],
["SSID", "ssid"],
["AuthenticationType", "authenticationType"],
["AssociationSuccess", "associationIsSuccessful"],
["DisassociationReason", "disassociationReason"],
["rfmon_signal_dbm", "signal"],
["rfmon_payloadsize_bytes", "payloadSize"],
["rfmon_datarate_bytes", "dataRate"],
["rfmon_ssid_names", "ssid"],
["rfmon_authenticationtype_info", "authenticationType"],
["rfmon_associationsuccess_bools", "associationIsSuccessful"],
["rfmon_disassociationreason_info", "disassociationReason"],
["rfmon_handshakestage_info", "handshakeStage"],
]);
@@ -37,15 +39,18 @@ class PacketInfluxPointFactory extends Transform{
_transform(packet, encoding, next){
// Create measurements
MEASUREMENT_MAP.forEach((objKey, measurement) => {
if(!Object.keys(packet).includes(objKey)) return;
if(packet[objKey] == null) return;
let point = new Point(measurement); // Create point
// Set tags
TAG_LIST.filter(tag => Object.keys(packet).includes(tag))
.forEach(tag => point.tag(tag, packet[tag]));
TAG_LIST.filter(tag => Object.keys(packet).includes(tag)) // Filter tags available on object
.filter(tag => packet[tag] != null) // Filter tags not falsy on object
.forEach(tag => {
tagObjectRecursively(point, tag, packet[tag]);
});
point.setField('value', packet[objKey]); // Set field
point.setField("value", packet[objKey]); // Set field
this.push(point); // Push point into stream
});
@@ -54,17 +59,26 @@ class PacketInfluxPointFactory extends Transform{
}
}
function tagObjectRecursively(point, tag, field, suffix = ""){
if(typeof(field) == "object"){
// TODO: Convert boolean-arrays like "packet.flags" to key: value
Object.entries(field).map(([key, value]) => {
tagObjectRecursively(point, tag, value, `_${key}${suffix}`);
});
}
else point.tag(tag+suffix, field);
}
/** Mapping for type -> field-method */
const POINT_FIELD_TYPE = new Map([
['boolean', function(key, value){ return this.booleanField(key, value); }],
['number', function(key, value){ return this.intField(key, value); }],
['string', function(key, value){ return this.stringField(key, value); }],
["boolean", function(key, value){ return this.booleanField(key, value); }],
["number", function(key, value){ return this.intField(key, value); }],
["string", function(key, value){ return this.stringField(key, value); }],
]);
Point.prototype.setField = function(key, value){
let setField = POINT_FIELD_TYPE.get(typeof value);
return setField.apply(this, [key, value]);
}
};
// Specify exports
module.exports = {

View File

@@ -1,8 +1,9 @@
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
const { Transform } = require('stream');
const { Transform } = require("stream");
const { DateTime } = require("luxon");
const { PacketType, FlagType, Packet, PacketWithSSID, BeaconPacket, ProbeRequestPacket, ProbeResponsePacket, AuthenticationPacket, AuthenticationType, AssociationResponsePacket, DisassociationPacket, HandshakePacket, HandshakeStage } = require.main.require('./dto/Packet.js');
const { PacketType, FlagType, Packet, PacketWithSSID, BeaconPacket, ProbeRequestPacket, ProbeResponsePacket, AuthenticationPacket, AuthenticationType, AssociationResponsePacket, DisassociationPacket, HandshakePacket, HandshakeStage } = require.main.require("./dto/Packet.js");
const hexConv = require.main.require("./helper/hexConverter.js");
const wifiStateAnalyser = require.main.require("./helper/wifiStateAnalyzer.js");
const PACKET_TYPE_MAP = {
"Beacon": PacketType.Beacon,
@@ -19,20 +20,20 @@ const PACKET_TYPE_MAP = {
"Disassociation:": PacketType.Disassociation,
"EAPOL": PacketType.Handshake,
};
const PACKET_TYPES_REGEX = Object.keys(PACKET_TYPE_MAP).join('|');
const PACKET_TYPES_REGEX = Object.keys(PACKET_TYPE_MAP).join("|");
const AUTHENTICATION_TYPE_MAP = {
"(Open System)-1": AuthenticationType.OpenSystem_1,
"(Open System)-2": AuthenticationType.OpenSystem_2,
}
};
const FLAG_TYPE_MAP = {
"Retry": FlagType.Retry,
"Pwr Mgmt": FlagType.PwrMgt,
"More Data": FlagType.MoreData,
"Protected": FlagType.Protected,
}
const FLAG_TYPE_MAPS_REGEX = Object.keys(FLAG_TYPE_MAP).join('|');
};
const FLAG_TYPE_MAPS_REGEX = Object.keys(FLAG_TYPE_MAP).join("|");
/**
* Read data from text-blocks and convert them to Packet
@@ -48,8 +49,8 @@ class PacketStreamFactory extends Transform{
_transform(chunk, encoding, next){
let packet = new Packet();
const lines = chunk.split('\n');
const header = lines.splice(0, 1)[0]; // Grab first line, 'lines' is now the payload
const lines = chunk.split("\n");
const header = lines.splice(0, 1)[0]; // Grab first line, "lines" is now the payload
packet = this._handleHeader(packet, header);
packet = this._handlePayload(packet, lines);
@@ -61,21 +62,26 @@ class PacketStreamFactory extends Transform{
packet.timestampMicros = DateTime.fromISO(data.slice(0, 12)).toSeconds() + data.slice(12, 15)/1000000;
// Find flags
data.match(data.match(new RegExp('(?<=^|\\s)('+ FLAG_TYPE_MAPS_REGEX +')(?=$|\\s)', 'ig'))
data.match(data.match(new RegExp("(?<=^|\\s)("+ FLAG_TYPE_MAPS_REGEX +")(?=$|\\s)", "ig"))
?.forEach(match => packet.flags[FLAG_TYPE_MAP[match]] = true) // Set them to true in flags
);
packet.dataRate = Number(data.match(/(?<=^|\s)[0-9]+(\.[0-9]+)?(?=\sMb\/?s($|\s))/i)?.[0]) || null;
packet.frequency = Number(data.match(/(?<=^|\s)[0-9]{4}(?=\sMHz($|\s))/i)?.[0]) || null;
packet.dataRate = Number(data.match(/(?<=^|\s)\d+(\.\d+)?(?=\sMb\/?s($|\s))/i)?.[0]) || null;
packet.frequency = Number(data.match(/(?<=^|\s)\d{4}(?=\sMHz($|\s))/i)?.[0]) || null;
packet.durationMicros = Number(data.match(/(?<=^|\s)[0-9]{1,4}(?=us($|\s))/i)?.[0]) || null;
packet.durationMicros = Number(data.match(/(?<=^|\s)\d{1,4}(?=us($|\s))/i)?.[0]) || null;
packet.signal = Number(data.match(/(?<=^|\s)-[0-9]{2,3}(?=dBm\sSignal($|\s))/i)?.[0]) || null;
packet.signal = Number(data.match(/(?<=^|\s)-\d{2,3}(?=dBm\sSignal($|\s))/i)?.[0]) || null;
let packetTypeStr = data.match(new RegExp('(?<=^|\\s)('+ PACKET_TYPES_REGEX +')(?=$|\\s)', 'i'))?.[0];
packet.packetType = packetTypeStr? PACKET_TYPE_MAP[packetTypeStr]:
data.match(/(SA|TA|DA|RA|BSSID):.{17}\s*$/i)? PacketType.NoData:
PacketType.Unknown;
let packetTypeStr = data.match(new RegExp("(?<=^|\\s)("+ PACKET_TYPES_REGEX +")(?=$|\\s)", "i"))?.[0];
if(packetTypeStr)
packet.packetType = PACKET_TYPE_MAP[packetTypeStr];
else if(data.match(/(SA|TA|DA|RA|BSSID):.{17}\s*$/i)){
packet.packetType = PacketType.NoData;
}
else {
packet.packetType = PacketType.Unknown;
}
packet.srcMac = data.match(/(?<=(^|\s)(SA|TA):).{17}(?=$|\s)/i)?.[0] ?? null;
@@ -91,12 +97,12 @@ class PacketStreamFactory extends Transform{
case PacketType.ProbeResponse:
case PacketType.AssociationRequest:
newPacket = new PacketWithSSID();
newPacket.ssid = data.match(new RegExp('(?<=(^|\\s)'+ packetTypeStr +'\\s\\().{0,32}(?=\\)($|\\s))', 'i'))?.[0] ?? null;
newPacket.ssid = data.match(new RegExp("(?<=(^|\\s)"+ packetTypeStr +"\\s\\().{0,32}(?=\\)($|\\s))", "i"))?.[0] ?? null;
break;
case PacketType.Authentication:
newPacket = new AuthenticationPacket();
newPacket.authenticationType = AUTHENTICATION_TYPE_MAP[data.match(/(?<=(^|\s)Authentication\s).{3,}(?=\:(\s|$))/i)[0]] ?? AuthenticationType.Unknown;
newPacket.authenticationType = AUTHENTICATION_TYPE_MAP[data.match(/(?<=(^|\s)Authentication\s).{3,}(?=:(\s|$))/i)[0]] ?? AuthenticationType.Unknown;
break;
case PacketType.AssociationResponse:
@@ -115,40 +121,25 @@ class PacketStreamFactory extends Transform{
}
_handlePayload(packet, data){
data = data.join('');
data = data.join("");
// Get payload-Hex-Data. If there is no data: empty
packet.payloadData = hexConv.hexToBytes(data.match(/(?<=\s)([A-F0-9]{1,4}(?=\s))/igm)?.join('') ?? '');
packet.payloadData = hexConv.hexToBytes(data.match(/(?<=\s)([A-F0-9]{1,4}(?=\s))/igm)?.join("") ?? "");
packet.payloadData.splice(packet.payloadData.length-4, 4); // Remove FrameCheck sequence
// Cover special cases with more data
let newPacket;
switch(packet.packetType){
case PacketType.Handshake:
case PacketType.Handshake: {
newPacket = new HandshakePacket();
// Read key-information
const keyInfoRaw = (packet.payloadData[0x5]<<0x8) + packet.payloadData[0x6];
const keyInfo = {
"KeyDescriptorVersion": keyInfoRaw>>0 & 0b111,
"KeyType": keyInfoRaw>>3 & 0b1,
"KeyIndex": keyInfoRaw>>4 & 0b11,
"Install": keyInfoRaw>>6 & 0b1,
"KeyACK": keyInfoRaw>>7 & 0b1,
"KeyMIC": keyInfoRaw>>8 & 0b1,
"Secure": keyInfoRaw>>9 & 0b1,
"Error": keyInfoRaw>>10 & 0b1,
"Request": keyInfoRaw>>11 & 0b1,
"EncryptedKeyData": keyInfoRaw>>12 & 0b1,
"SMKMessage": keyInfoRaw>>13 & 0b1,
};
const keyInfo = wifiStateAnalyser.keyInfoFromRaw(keyInfoRaw); // Convert
newPacket.handshakeStage = (!keyInfo.Install && keyInfo.KeyACK && !keyInfo.KeyMIC && !keyInfo.Secure)? HandshakeStage[1] :
(!keyInfo.Install && !keyInfo.KeyACK && keyInfo.KeyMIC && !keyInfo.Secure)? HandshakeStage[2] :
( keyInfo.Install && keyInfo.KeyACK && keyInfo.KeyMIC && keyInfo.Secure)? HandshakeStage[3] :
(!keyInfo.Install && !keyInfo.KeyACK && keyInfo.KeyMIC && keyInfo.Secure)? HandshakeStage[4] :
null;
newPacket.handshakeStage = wifiStateAnalyser.handshakeStageFromKeyInfo(keyInfo); // Get stage
break;
}
}
if(newPacket) packet = Object.assign(newPacket, packet);

View File

@@ -1,5 +1,5 @@
const logger = require.main.require("./helper/logger.js")("RegexBlockStream");
const { Transform } = require('stream')
const { Transform } = require("stream");
/**
* Matches whole blocks as regex and passes them on
@@ -13,7 +13,7 @@ class RegexBlockStream extends Transform{
* @param {RegExp} matcher Block-match
* @param {boolean} withholdLastBlock When true, the last matches block will not be submitted to prevent submitting incomplete blocks.
* @param {boolean} matchAllOnFlush (Only in combination with withholdLastBlock) When enabled, the buffer will be matched on last time on _flush (stream deconstruction) and write any, also incomplete, blocks
* @remarks WARNING: It should match a clean-block (including e.g. newline)! Otherwise buffer will get dirty and use more and more ressources.
* @remarks WARNING: It should match a clean-block (including e.g. newline)! Otherwise buffer will get dirty and use more and more resources.
*/
constructor(matcher, withholdLastBlock = true, matchAllOnFlush = false){
super({
@@ -27,7 +27,7 @@ class RegexBlockStream extends Transform{
}
_transform(chunk, encoding, next){
chunk = this.readableBuffer.length? this.readableBuffer.join('') + chunk: chunk; // Add previous buffer to current chunk
chunk = this.readableBuffer.length? this.readableBuffer.join("") + chunk: chunk; // Add previous buffer to current chunk
this.readableBuffer.length && this.readableBuffer.clear(); // Clear buffer once we read it
let matches = chunk.match(this.matcher); // Match
@@ -44,17 +44,17 @@ class RegexBlockStream extends Transform{
if(matches){
matches.forEach((match) => {
this.push(match); // Write match to stream
if(chunk) chunk = chunk.replace(match, ''); // Remove match from chunks
if(chunk) chunk = chunk.replace(match, ""); // Remove match from chunks
});
}
if(chunk) return chunk;
}
_flush(next){
if(matchAllOnFlush){ // When requested, we'll match one last time over the remaining buffer
let chunk = this.readableBuffer.join('');
if(this.matchAllOnFlush){ // When requested, we'll match one last time over the remaining buffer
let chunk = this.readableBuffer.join("");
let matches = chunk.match(this.matcher); // Match remaining buffer
_writeMatches(matches); // Write matches including last element
this._writeMatches(matches); // Write matches including last element
}
next(); // Tell system we are done