Compare commits

..

62 Commits

Author SHA1 Message Date
Ruakij 09e86de93d Merge branch 'release-2.1' 2 years ago
Ruakij 4bdaae0c70 Merge branch 'dev' into release-2.1 2 years ago
Ruakij 41302b9f9f Set bash args as extra-args 2 years ago
Ruakij ad8d546b78 Utilize Multi-stage building to reduce Image-size 2 years ago
Ruakij d5a727c3df Make executeable 2 years ago
Ruakij d5c7f0a580 Make apk package install more elegant 2 years ago
Ruakij 9ff168317e Add build-scripts for own- and multi-arch 2 years ago
Ruakij 7469565f52 Merge branch 'dev' 3 years ago
Ruakij a6a3a8b180 Add Architecture-Overview 3 years ago
Ruakij e1238b1ba0 Fix TOC-points 3 years ago
Ruakij b545b99135 FIx wrong Description 3 years ago
Ruakij 6c848c0e42 Merge branch 'release-2.1' 3 years ago
Ruakij 918306647d Fixed chunk-error not being stopped after handling 3 years ago
Ruakij 1bc52b0a37 Merge branch 'release-2.1' 3 years ago
Ruakij 3b10aca352 Added chunk-check against undefined 3 years ago
Ruakij b1942b89cb Merge branch 'release-2.1' 3 years ago
Ruakij 10bd72907e Merge branch 'f_influxdb-line-protocol' into dev 3 years ago
Ruakij 69e910428e Added documentation for LineProtocol export 3 years ago
Ruakij 01e28682df Merge branch 'dev' into f_influxdb-line-protocol 3 years ago
Ruakij 3903e492d2 Merge branch 'docs' into dev 3 years ago
Ruakij ae4f2f13f0 Remove screenshot-section, it will be readded later 3 years ago
Ruakij 2f84bb4408 Implement connection-checking 3 years ago
Ruakij c09c6c29fb Move InfluxDB to if-block 3 years ago
Ruakij 1012001312 Implement env-var-checks 3 years ago
Ruakij f596a99ee6 Implement converter-stream point -> lineProtocol 3 years ago
Ruakij d1cf1d8f7d Add connection-state tracking as getter 3 years ago
Ruakij 4e2ffec656 Fix typo 3 years ago
Ruakij 7f5e168fda Added missing autoReconnectBackoffTime 3 years ago
Ruakij 024305db43 Fixed host and port wrong way around 3 years ago
Ruakij 6b93a02943 Implement basic LineProtocolWriter 3 years ago
Ruakij 7753c245d2 Created first big version of documentation 3 years ago
Ruakij a1faee8303 Change linkTypeDetail to be written to the upper-scoped-var 3 years ago
Ruakij 230c75ffd4 Fix match being wrong way around 3 years ago
Ruakij 7c5c37e9b5 Implemented check if interface is in wrong mode 3 years ago
Ruakij ae389c8310 Fix signal not being used 3 years ago
Ruakij a3d7239895 Change shutdown to accept exitCode and kill-signal 3 years ago
Ruakij 38fed9604c Merge branch 'release-2.0' into dev 3 years ago
Ruakij 5303b31bd7 Merge branch 'release-2.0' 3 years ago
Ruakij 94846a48e1 Fix data-fields not getting added as tags 3 years ago
Ruakij f00db16269 Merge branch 'dev' into release-2.0 3 years ago
Ruakij 1bffd22735 Commented hostname-tag as it could be misleading 3 years ago
Ruakij 41b2caecb3 Merge branch 'release-2.0' into dev 3 years ago
Ruakij c3cd6393d4 Merge branch 'release-2.0' 3 years ago
Ruakij 8eef17fd4c Fix wrong default-tag setting 3 years ago
Ruakij c97137f4a7 Merge branch 'release-2' 3 years ago
Ruakij 39350932a4 Changed metric- and tag-names to match best-practise naminc-convention 3 years ago
Ruakij 1e37f35e38 Moved creation of writeApi to main and set default-tag hostname 3 years ago
Ruakij d0be44c1af Fix coding-style 3 years ago
Ruakij a13d81e9c0 Merge branch 'release-1.1' 3 years ago
Ruakij 059c02e243 Merge branch 'dev' into release-1.1 3 years ago
Ruakij 57cf6fb0a7 Add npm prune (wont do much here) 3 years ago
Ruakij 298a96bf16 apk-install on 1 line and delete cache when done 3 years ago
Ruakij b98dff947d Refactored packet to use new code-style (for got last time :/) 3 years ago
Ruakij a610f209d5 Merge branch 'release-1.1' 3 years ago
Ruakij 0f6c5b6b0e Change baseimage from node to node-alpine
Size drastically decreased
3 years ago
Ruakij cc9e4c7258 Fix flush using local vars instead of this-vars 3 years ago
Ruakij 56ac283544 Refactored code to match code-style 3 years ago
Ruakij 9095e21e6f Add handshakeStage as metric 3 years ago
Ruakij 6e05a0b45c Merge branch 'release-1.0' 3 years ago
Ruakij d14e469ef4 Removed error-logging from RegexBlockStream FIXME 3 years ago
Ruakij 99a3e13d77 Added further exception to error-event from tcpdump 3 years ago
Ruakij b5c895674e Merge branch 'f_gracefulShutdown' into dev 3 years ago

@ -1,16 +1,28 @@
FROM node:16 # ---- Base ----
FROM alpine:3 AS base
# Create app directory # Create app directory
WORKDIR /usr/src/app WORKDIR /usr/src/app
# Copy project file
COPY package.json .
# Install required apk-packages
RUN apk add --no-cache nodejs npm tcpdump
# ---- Dependencies ----
FROM base AS dependencies
# Install app dependencies # Install app dependencies
COPY package*.json ./ RUN npm install --only=production
RUN npm install
RUN apt-get update # ---- Release ----
RUN apt-get -y install \ FROM base AS release
tcpdump
# copy from build image
COPY --from=dependencies /usr/src/app/ ./
# Bundle app source # Bundle app source
COPY ./src/ . COPY ./src/ .

@ -1,3 +1,419 @@
# rfmon-to-influx rfmon-to-influx
=================
![](docs/img/header0.png)
*Successful Associations, grouped by AP within 24h*
<br>
Writing (mostly meta-) data received in Wireless-Monitor-Mode into an InfluxDB. Writing (mostly meta-) data received in Wireless-Monitor-Mode into an InfluxDB.
<br>
Table of contents
=================
<!-- TOC -->
- [1. Description](#1-description)
- [1.1. What kind of data](#11-what-kind-of-data)
- [1.2. Data-Usage](#12-data-usage)
- [1.3. Tools used](#13-tools-used)
- [1.4. Architecture](#14-architecture)
- [2. Usage/Installation](#2-usageinstallation)
- [2.1. Prerequisites](#21-prerequisites)
- [2.2. Choosing an Export-Method](#22-choosing-an-export-method)
- [2.3. Running with Docker](#23-running-with-docker)
- [2.4. Environment-Variables](#24-environment-variables)
- [3. Data collected](#3-data-collected)
- [3.1. Data-Types](#31-data-types)
- [3.2. Metric-Overview](#32-metric-overview)
- [3.3. Metric-Details](#33-metric-details)
- [3.4. Tag-Overview](#34-tag-overview)
- [3.5. Tag-Details](#35-tag-details)
- [4. Potential Issues](#4-potential-issues)
- [4.1. Channel/Frequency](#41-channelfrequency)
- [4.2. Technology](#42-technology)
- [4.3. Data protection](#43-data-protection)
- [4.4. Ethical](#44-ethical)
<!-- /TOC -->
<br>
# 1. Description
This Program listens on an Wifi-Interface in Monitor-Mode (rfmon) and logs most actions made into an influx or influx-like time-database.
<br>
## 1.1. What kind of data
**Any** packet sent by a router or station nearby is received and its metadata is collected and categorised.
The host does **not** have to be part of that network.
<br>
## 1.2. Data-Usage
The data can be used to identify problems with the wifi-communication nearby
e.g.
- Wifi-Congestion at certain times of the day
- occurring signal-issues
- e.g. due to broken Microwave-Ovens disrupting communications
- or moving big Objects (e.g. Machines) causing signal-reduction.
<br>
Aswell as gaining knowledge about installed routers and user interaction with them
e.g.
- in a company environment
- Logging presense and activity of interconnected machines
- Finding other Access-Points not allowed due to potential disruption of Production-Lines
<br>
Other usages might be threat-detection at Wifi-Level
e.g.
- Deauthentication-Attacks
- Bruteforce-Attempts
<br>
## 1.3. Tools used
The program uses `tcpdump` for listening in a subProcess and then extract the metadata when packets arrive.
<br>
## 1.4. Architecture
![](docs/img/1.4.architecture.png)
The system heavily uses NodeJS-Streams to read, transform and pass data around.
<br>
# 2. Usage/Installation
## 2.1. Prerequisites
The Wifi-Interface cannot be used elsewhere at the same time e.g. Network-Manager.
(Packet-capture e.g. tcpdump or Wireshark is ok)
As of this version, the program does **not** set the interface into monitor mode or changes to channels.
<br>
### 2.1.1. Interface into Monitor-Mode (rfmon)
You can change into Monitor-mode beforehand with the packages `net-tools` and `wireless-tools`:
```sh
ifconfig <interface> down
iwconfig <interface> mode Monitor
ifconfig <interface> up
```
<br>
### 2.1.2. Set/Change channels
You can set the channel of the interface (if the interface allows this) with the package `wireless-tools`:
```sh
iw dev <interface> set channel <channelNumber>
```
<br>
## 2.2. Choosing an Export-Method
The system allows exporting directly into [InfluxDB](https://docs.influxdata.com/influxdb) version >= 2.0 or into any system using the [InfluxDb-Line-Protocol](https://docs.influxdata.com/influxdb/v2.1/reference/syntax/line-protocol/) e.g. [QuestDB](https://questdb.io/) over TCP.
As of writing (using InfluxDB v2.1 and using the *flux*-language), the data written by this system was a bit too much for InfluxDB and it struggled very quickly on a fairly beefy machine.
Thats why the additional LineProtocol-Export-Method was added. Freedom of choice of the Time-Database.
<br>
If you want to use the InfluxDB-Line-Protocol, simply set the environment variable `USE_INFLUXDB_LINEPROTOCOL` to `true` along with the-other necessary Host and Port-variables.
<br>
## 2.3. Running with Docker
### 2.3.1. Permissions
The container must run as **root**, to have permission to listen on the wifi-interface.
<br>
### 2.3.2. docker run
Either run with docker directly.
<details><summary>for InfluxDB</summary>
```sh
docker run
-d
--restart unless-stopped
--network host
-e WIFI_INTERFACE="<yourInterfaceName or leave empty for wlan0>"
-e INFLUX_URL="http://influxdb:8086/"
-e INFLUX_TOKEN="<yourToken>"
-e INFLUX_ORG="<yourOrganisation>"
ruakij/rfmon-to-influx:2
```
</details>
<details><summary>for InfluxDB-Line-Protocol</summary>
```sh
docker run
-d
--restart unless-stopped
--network host
-e WIFI_INTERFACE="<yourInterfaceName or leave empty for wlan0>"
-e USE_INFLUXDB_LINEPROTOCOL="true"
-e INFLUXDB_LINEPROTOCOL_HOST="<host>"
-e INFLUXDB_LINEPROTOCOL_PORT="<port>"
ruakij/rfmon-to-influx:2
```
</details>
<br>
### 2.3.3. docker-compose
Or use the more preferred way with docker-compose.
`docker-compose.yml`
<details><summary>for InfluxDB</summary>
```yaml
version: '3'
services:
rfmon:
container_name: rfmon
image: ruakij/rfmon-to-influx:2
restart: unless-stopped
network_mode: "host"
environment:
- WIFI_INTERFACE="<yourInterfaceName or leave empty for wlan0>"
- INFLUX_URL="http://influxdb:8086/"
- INFLUX_TOKEN="<yourToken>"
- INFLUX_ORG="<yourOrganisation>"
- INFLUX_BUCKET="<yourBucket>"
```
</details>
<details><summary>for InfluxDB-Line-Protocol</summary>
```yaml
version: '3'
services:
rfmon:
container_name: rfmon
image: ruakij/rfmon-to-influx:2
restart: unless-stopped
network_mode: "host"
environment:
- WIFI_INTERFACE="<yourInterfaceName or leave empty for wlan0>"
- USE_INFLUXDB_LINEPROTOCOL="true"
- INFLUXDB_LINEPROTOCOL_HOST="<host>"
- INFLUXDB_LINEPROTOCOL_PORT="<port>"
```
</details>
<br>
And then pull&start the container:
```sh
docker-compose up -d
```
<br>
## 2.4. Environment-Variables
### 2.4.1. Necessary
<details><summary>for InfluxDB</summary>
Variable|Description
---|---
`INFLUX_URL` | Url of influx-server
`INFLUX_TOKEN` | Token with write-access
`INFLUX_ORG` | Organisation and..
`INFLUX_BUCKET` | Bucket to write into
</details>
<details><summary>for InfluxDB-Line-Protocol</summary>
Variable|Description
---|---
`USE_INFLUXDB_LINEPROTOCOL` | Enable LineProtocol
`INFLUXDB_LINEPROTOCOL_HOST` | Host and..
`INFLUXDB_LINEPROTOCOL_PORT` | Port of your server
</details>
<br>
### 2.4.2. Optional
Variable|Default|Description
---|---|---
`LOGLEVEL` | INFO | Loglevel
`WIFI_INTERFACE` | wlan0 | Wifi-Interface name in Monitor-Mode
~~`HOSTNAME`~~ | ~~Device's Hostname~~ | ~~Hostname to use as global hostname-tag~~ *(Unused)*
<br>
# 3. Data collected
8 Metrics are constructed with 6-10 tags identifying them.
<br>
## 3.1. Data-Types
Type|Example|Description
---|---|---
`String` | Wlan | -
`Number` | 0 | Any normal number, positive and negative
`Boolean` | true | true or false values
`MAC` | 12:34:56:78:9A:BC | Address for L2-networks
<br>
## 3.2. Metric-Overview
---
<br>
Name|Type|Description
---|---|---
rfmon_signal_dbm | `Number` (-95 <> -20) | Signal-Level of every Packet in dBm
rfmon_datarate_bytes | `Number` (1 <> 144) | Data-Rate of every Packet in MBit/s
rfmon_ssid_names | `String` (Length: 0-32) | SSIDs of any Packet containing it
rfmon_authenticationtype_info | `String` | Authentication-Type used by Sender
rfmon_associationsuccess_bools | `Boolean` | Result of an Association
rfmon_disassociationreason_info | `String` | Disconnect-Reason from a ST (not always sent)
rfmon_handshakestage_info | `Number` (1 <> 4) | Stage of a handshake (1 and 3 from ST, 2 and 4 from AP)
<br>
## 3.3. Metric-Details
### 3.3.1. rfmon_ssid_names
`String` (Length: 0-32)
SSIDs from ProbeRequest might be empty (probe for any) or in case of Beacon-Frames could be hidden.
### 3.3.2. rfmon_authenticationtype_info
`String` {OpenSystem_1, OpenSystem_2, Unknown}
<br>
## 3.4. Tag-Overview
---
<br>
Name |Type |Description
---|---|---
srcmac | `MAC` | Sender's MAC-Address (not present in ClearToSend-Packet)
dstmac | `MAC` | Destination's MAC-Address (not present in RequestToSend-Packet)
bssid | `MAC` | AP's MAC-Address
frequency | `Number` | Frequency the packet was captured on in MHz
packetType | `String` | Type of packet
flags_MoreFragments | `Boolean` | Packet is incomplete
flags_Retry | " | Packet is being retried
flags_PwrMgt | " | Sender will not sleep
flags_MoreData | " | More data in send-buffer to be expected
flags_Protected | " | Packet is protected
flags_Order | " | Packet is strictly ordered
<br>
## 3.5. Tag-Details
### 3.5.1. frequency
`Number` (2412 <> 2484)
The frequency corresponds to following wifi-channels:
Channel|Frequency
---|---
1 | 2412
2 | 2417
3 | 2422
4 | 2427
5 | 2432
6 | 2437
7 | 2442
8 | 2447
9 | 2452
10 | 2457
11 | 2462
12 | 2467
13 | 2472
14 | 2484
See [Wikipedia - List of WLAN channels - 2.4GHz](https://en.wikipedia.org/wiki/List_of_WLAN_channels#2.4_GHz_(802.11b/g/n/ax)) for more Information.
### 3.5.2. packettype
`String`
Type|Sender|Description
---|---|---
Beacon | AP | Signal its presence and provide synchronisation for Stations
ProbeRequest | ST | Ask if certain RA/SSID is available
ProbeResponse | AP | Directly respond to Request and Signal own presence
Data | Both | Data-packets
RequestToSend | ST | Ask for transmission-time
ClearToSend | RA | Ack transmission-time
Acknowledgment | Both | Ack Data-Packets
BlockAcknowledgment | Both | Ack alot of Data-Packets at once
NoData | Both | Packet without content, typically used to transmit QoS-States
Authentication | Both | Authentication-process to establish identity and set states
AssociationRequest | ST | Register to AP
AssociationResponse | AP | Respond to registering
Disassociation | ST | Actively unregister e.g. to associate with different AP
Handshake | Both | 4-Way-EAPOL-Handshake to generate encryption-keys between participants
Unknown | - | Unknown packets not identified into above types
<br>
# 4. Potential Issues
## 4.1. Channel/Frequency
The System can only monitor one channel at a time which might not be enough cover,
to combat this, more Interfaces and Systems can be deployed.
This is not entirely unproblematic, as the system cannot currently prevent packages from being inserted more than once.
<br>
## 4.2. Technology
Mismatches between sender and receiver-technologies (e.g. MIMO or HT) can cause packets not being logged at all.
Though this should only be a problem for data-packets.
<br>
## 4.3. Data protection
Because the system collects any data, this can be problematic, specially in countries with strong data-protection laws.
A wifi MAC address is likely to be considered as information of an identifiable natural person, e.g. under GDPR Art.4 (1) and its processing may only be done with prior consent or has to be anonymised.
<br>
## 4.4. Ethical
The large-scale collection of data for behavioural or movement analysis, especially without consent of the data subject, is highly controversial.
Metadata that can be used to track precise activities, such as wifi data, is very powerful and should only be collected and used when necessary.
If this data falls into the hands of a malicious actor, more precise attacks on the targets could be carried out, such as break-insv, behaviour-based discrimination or more successful phishing.

@ -0,0 +1,9 @@
TAG="ruakij/rfmon-to-influx"
PLATFORM="linux/amd64,linux/arm64/v8,linux/arm/v7"
EXTRA_ARGS="$@"
docker buildx build \
--platform $PLATFORM \
--tag $TAG \
$EXTRA_ARGS \
.

@ -0,0 +1,7 @@
TAG="ruakij/rfmon-to-influx"
EXTRA_ARGS="$@"
docker build \
--tag $TAG \
$EXTRA_ARGS \
.

Binary file not shown.

After

Width:  |  Height:  |  Size: 147 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

@ -1,20 +1,20 @@
const PacketType = { const PacketType = {
Beacon: 'Beacon', Beacon: "Beacon",
ProbeRequest: 'ProbeRequest', ProbeRequest: "ProbeRequest",
ProbeResponse: 'ProbeResponse', ProbeResponse: "ProbeResponse",
Data: 'Data', Data: "Data",
RequestToSend: 'RequestToSend', RequestToSend: "RequestToSend",
ClearToSend: 'ClearToSend', ClearToSend: "ClearToSend",
Acknowledgment: 'Acknowledgment', Acknowledgment: "Acknowledgment",
BlockAcknowledgment: 'BlockAcknowledgment', BlockAcknowledgment: "BlockAcknowledgment",
NoData: 'NoData', NoData: "NoData",
Authentication: 'Authentication', Authentication: "Authentication",
AssociationRequest: 'AssociationRequest', AssociationRequest: "AssociationRequest",
AssociationResponse: 'AssociationResponse', AssociationResponse: "AssociationResponse",
Disassociation: 'Disassociation', Disassociation: "Disassociation",
Handshake: 'Handshake', Handshake: "Handshake",
Unknown: 'Unknown' Unknown: "Unknown"
} };
const FlagType = { const FlagType = {
MoreFragments: "MoreFragments", MoreFragments: "MoreFragments",
@ -23,7 +23,7 @@ const FlagType = {
MoreData: "MoreData", MoreData: "MoreData",
Protected: "Protected", Protected: "Protected",
Order: "Order" Order: "Order"
} };
class Packet{ class Packet{
timestampMicros; timestampMicros;
@ -58,10 +58,10 @@ class ProbeRequestPacket extends PacketWithSSID{}
class ProbeResponsePacket extends PacketWithSSID{} class ProbeResponsePacket extends PacketWithSSID{}
const AuthenticationType = { const AuthenticationType = {
OpenSystem_1: 'OpenSystem_1', OpenSystem_1: "OpenSystem_1",
OpenSystem_2: 'OpenSystem_2', OpenSystem_2: "OpenSystem_2",
Unknown: 'Unknown', Unknown: "Unknown",
} };
class AuthenticationPacket extends Packet{ class AuthenticationPacket extends Packet{
authenticationType; authenticationType;
} }
@ -77,11 +77,11 @@ class DisassociationPacket extends Packet{
const HandshakeStage = { const HandshakeStage = {
1: '1', 1: "1",
2: '2', 2: "2",
3: '3', 3: "3",
4: '4' 4: "4"
} };
class HandshakePacket extends Packet{ class HandshakePacket extends Packet{
handshakeStage; handshakeStage;
} }

@ -1,9 +1,9 @@
function requireEnvVars(requiredEnv){ function requireEnvVars(requiredEnv){
// Ensure required ENV vars are set // Ensure required ENV vars are set
let unsetEnv = requiredEnv.filter((env) => !(typeof process.env[env] !== 'undefined')); let unsetEnv = requiredEnv.filter((env) => (typeof process.env[env] === "undefined"));
if (unsetEnv.length > 0) { if (unsetEnv.length > 0) {
return "Required ENV variables are not set: [" + unsetEnv.join(', ') + "]"; return "Required ENV variables are not set: [" + unsetEnv.join(", ") + "]";
} }
} }

@ -1,7 +1,7 @@
const logger = require("./logger.js")("exec"); const logger = require("./logger.js")("exec");
const { spawn } = require("child_process"); const { spawn } = require("child_process");
const { parseArgsStringToArgv } = require('string-argv'); const { parseArgsStringToArgv } = require("string-argv");
function exec(cmd, options){ function exec(cmd, options){

@ -21,4 +21,4 @@ function bytesToHex(bytes) {
module.exports = { module.exports = {
hexToBytes, hexToBytes,
bytesToHex bytesToHex
} };

@ -1,8 +1,8 @@
const logger = require.main.require("./helper/logger.js")("influx-checks"); const logger = require.main.require("./helper/logger.js")("influx-checks");
const Os = require("os"); const Os = require("os");
const { InfluxDB, Point } = require('@influxdata/influxdb-client') const { InfluxDB, Point } = require("@influxdata/influxdb-client")
const Influx = require('@influxdata/influxdb-client-apis'); const Influx = require("@influxdata/influxdb-client-apis");
function checkHealth(influxDb){ function checkHealth(influxDb){
@ -39,7 +39,7 @@ function checkBucket(influxDb, options){
function checkWriteApi(influxDb, options){ function checkWriteApi(influxDb, options){
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const writeApi = influxDb.getWriteApi(options.org, options.bucket); // Get WriteAPI const writeApi = influxDb.getWriteApi(options.org, options.bucket); // Get WriteAPI
writeApi.writePoint(new Point("worker_connectionTest").tag("hostname", Os.hostname())) // Write point writeApi.writePoint(new Point("worker_connectionTest").tag("hostname", Os.hostname())); // Write point
writeApi.close() writeApi.close()
.catch((err) => { .catch((err) => {
logger.error("Could not get writeApi:"); logger.error("Could not get writeApi:");

@ -5,15 +5,15 @@ function detectStreamData(stream, timeout = 5000){
let timeoutHandler; let timeoutHandler;
if(timeout){ if(timeout){
timeoutHandler = setTimeout(() => { timeoutHandler = setTimeout(() => {
reject('timeout'); reject("timeout");
remListeners(); remListeners();
}, },
timeout); timeout);
} }
function remListeners(){ function remListeners(){
stream.removeListener('error', errorHandler); stream.removeListener("error", errorHandler);
stream.removeListener('data', dataHandler); stream.removeListener("data", dataHandler);
if(timeoutHandler) clearTimeout(timeoutHandler); if(timeoutHandler) clearTimeout(timeoutHandler);
} }
@ -25,8 +25,8 @@ function detectStreamData(stream, timeout = 5000){
remListeners(); remListeners();
} }
stream.on('error', errorHandler); stream.on("error", errorHandler);
stream.on('data', dataHandler); stream.on("data", dataHandler);
}); });
} }
@ -34,7 +34,7 @@ function detectStreamsData(streams, timeout = 5000){
let promises = []; let promises = [];
streams.forEach((stream) => { streams.forEach((stream) => {
promises.push(detectStreamData(stream, timeout)); promises.push(detectStreamData(stream, timeout));
}) });
return promises; return promises;
} }

@ -1,4 +1,4 @@
const { HandshakeStage } = require.main.require('./dto/Packet.js'); const { HandshakeStage } = require.main.require("./dto/Packet.js");
function keyInfoFromRaw(keyInfoRaw) { function keyInfoFromRaw(keyInfoRaw) {
return { return {
@ -27,7 +27,7 @@ function handshakeStageFromKeyInfo(keyInfo){
// Extract compare-keys // Extract compare-keys
let keyData = ""; let keyData = "";
for (const key of HANDSHAKE_STAGE_KEYINFO['keys']) { for (const key of HANDSHAKE_STAGE_KEYINFO["keys"]) {
keyData += keyInfo[key].toString(); keyData += keyInfo[key].toString();
} }

@ -5,14 +5,17 @@ const logger = logFactory("main");
const { requireEnvVars } = require("./helper/env.js"); const { requireEnvVars } = require("./helper/env.js");
const { exit } = require("process"); const { exit } = require("process");
const { exec } = require("./helper/exec.js"); const { exec } = require("./helper/exec.js");
const Os = require("os");
const { InfluxDB } = require('@influxdata/influxdb-client'); const { InfluxDB } = require("@influxdata/influxdb-client");
const InfluxChecks = require('./helper/influx-checks.js'); const InfluxChecks = require("./helper/influx-checks.js");
const { RegexBlockStream } = require("./streamHandler/RegexBlockStream.js"); const { RegexBlockStream } = require("./streamHandler/RegexBlockStream.js");
const { PacketStreamFactory } = require("./streamHandler/PacketStreamFactory.js"); const { PacketStreamFactory } = require("./streamHandler/PacketStreamFactory.js");
const { PacketInfluxPointFactory } = require("./streamHandler/PacketInfluxPointFactory.js"); const { PacketInfluxPointFactory } = require("./streamHandler/PacketInfluxPointFactory.js");
const { InfluxPointWriter } = require("./streamHandler/InfluxPointWriter.js"); const { InfluxPointWriter } = require("./streamHandler/InfluxPointWriter.js");
const { InfluxDbLineProtocolWriter } = require("./streamHandler/InfluxDbLineProtocolWriter.js");
const { InfluxPointToLineProtoStream } = require("./streamHandler/InfluxPointToLineProtoStream.js");
const userHelper = require("./helper/userHelper.js"); const userHelper = require("./helper/userHelper.js");
@ -23,18 +26,26 @@ const env = process.env;
{ {
env.LOGLEVEL ??= "INFO"; env.LOGLEVEL ??= "INFO";
env.WIFI_INTERFACE ??= "wlan0"; env.WIFI_INTERFACE ??= "wlan0";
env.HOSTNAME ??= Os.hostname();
env.USE_INFLUXDB_LINEPROTOCOL ??= false;
} }
// Required vars // Required vars
let errorMsg = requireEnvVars([ let errorMsg = requireEnvVars(
env.USE_INFLUXDB_LINEPROTOCOL? [ // When lineprotocol is enabled, we need host and port
"INFLUXDB_LINEPROTOCOL_HOST", "INFLUXDB_LINEPROTOCOL_PORT",
] : [ // When its disabled, influxdb-data
"INFLUX_URL", "INFLUX_TOKEN", "INFLUX_URL", "INFLUX_TOKEN",
"INFLUX_ORG", "INFLUX_BUCKET" "INFLUX_ORG", "INFLUX_BUCKET"
]); ]);
if(errorMsg){ if(errorMsg){
logger.fatal(errorMsg); logger.fatal(errorMsg);
exit(1); exit(1);
} }
(async function() { (async function() {
let pointWriter;
if(!env.USE_INFLUXDB_LINEPROTOCOL){
logger.info("Setup Influx.."); logger.info("Setup Influx..");
const influxDb = new InfluxDB({url: env.INFLUX_URL, token: env.INFLUX_TOKEN}); const influxDb = new InfluxDB({url: env.INFLUX_URL, token: env.INFLUX_TOKEN});
@ -42,11 +53,11 @@ if(errorMsg){
.then((res) => {return InfluxChecks.checkBucket(influxDb, { .then((res) => {return InfluxChecks.checkBucket(influxDb, {
org: env.INFLUX_ORG, org: env.INFLUX_ORG,
name: env.INFLUX_BUCKET name: env.INFLUX_BUCKET
})}) });})
.then((res) => {return InfluxChecks.checkWriteApi(influxDb, { .then((res) => {return InfluxChecks.checkWriteApi(influxDb, {
org: env.INFLUX_ORG, org: env.INFLUX_ORG,
bucket: env.INFLUX_BUCKET bucket: env.INFLUX_BUCKET
})}) });})
.catch((err) => { .catch((err) => {
if(err) { if(err) {
logger.error("Error whilst checking influx:"); logger.error("Error whilst checking influx:");
@ -56,10 +67,52 @@ if(errorMsg){
exit(1); exit(1);
}); });
logger.debug("Get WriteApi & set default-hostname to", `'${env.HOSTNAME}'`);
const influxWriteApi = influxDb.getWriteApi(env.INFLUX_ORG, env.INFLUX_BUCKET, "us");
//influxWriteApi.useDefaultTags({"hostname": env.HOSTNAME});
pointWriter = new InfluxPointWriter(influxWriteApi);
logger.info("Influx ok"); logger.info("Influx ok");
}
else {
logger.info("Setup Influxdb-LineProtocol..");
let lineProtocolWriter = new InfluxDbLineProtocolWriter(env.INFLUXDB_LINEPROTOCOL_HOST, env.INFLUXDB_LINEPROTOCOL_PORT);
logger.debug("Create PointToLineProto and pipe to LineProtocolWriter");
pointWriter = new InfluxPointToLineProtoStream();
pointWriter
.setEncoding("utf8")
.pipe(lineProtocolWriter);
logger.debug("Waiting for connection..");
await new Promise((resolve, reject) => {
lineProtocolWriter.once("connect", () => {
resolve();
});
lineProtocolWriter.once("error", (err) => {
reject(err);
});
setTimeout(() => { // After timeout, reject promise
reject("Timeout whilst waiting to connect");
}, 6500);
})
.then(() => {
logger.info("Influxdb-LineProtocol ok");
})
.catch((err) => {
if(err) {
logger.error("Error whilst checking Influxdb-LineProtocol:");
logger.error(err);
}
logger.fatal("Setup Influxdb-LineProtocol failed!");
exit(1);
});
}
logger.info("Starting tcpdump.."); logger.info("Starting tcpdump..");
const TCPDUMP_BASECMD = "tcpdump -vvv -e -n -X -s0 -i" const TCPDUMP_BASECMD = "tcpdump -vvv -e -n -X -s0 -i";
let cmd = `${TCPDUMP_BASECMD} ${env.WIFI_INTERFACE}`; let cmd = `${TCPDUMP_BASECMD} ${env.WIFI_INTERFACE}`;
let proc = exec(cmd); let proc = exec(cmd);
@ -67,24 +120,37 @@ if(errorMsg){
let regexBlockStream = new RegexBlockStream(/^\d{2}:\d{2}:\d{2}.\d{6}.*(\n( {4,8}|\t\t?).*)+\n/gm); let regexBlockStream = new RegexBlockStream(/^\d{2}:\d{2}:\d{2}.\d{6}.*(\n( {4,8}|\t\t?).*)+\n/gm);
let packetStreamFactory = new PacketStreamFactory(); let packetStreamFactory = new PacketStreamFactory();
let packetInfluxPointFactory = new PacketInfluxPointFactory(); let packetInfluxPointFactory = new PacketInfluxPointFactory();
let influxPointWriter = new InfluxPointWriter(influxDb, env.INFLUX_ORG, env.INFLUX_BUCKET);
proc.stdout proc.stdout
.setEncoding("utf8") .setEncoding("utf8")
.pipe(regexBlockStream) .pipe(regexBlockStream)
.pipe(packetStreamFactory) .pipe(packetStreamFactory)
.pipe(packetInfluxPointFactory) .pipe(packetInfluxPointFactory)
.pipe(influxPointWriter); .pipe(pointWriter);
logger.debug("Attaching error-logger.."); logger.debug("Attaching error-logger..");
const loggerTcpdump = logFactory("tcpdump"); const loggerTcpdump = logFactory("tcpdump");
let linkTypeId;
proc.stderr.setEncoding("utf8").on("data", (data) => { proc.stderr.setEncoding("utf8").on("data", (data) => {
if(!data.match(/^(tcpdump: )?listening on /i)) // Catch start-error if(data.match(/^(tcpdump: )?listening on /i) || data.match(/^\d+ packets captured/i)) { // Catch start-error
loggerTcpdump.error(data); loggerTcpdump.debug(data);
if(!linkTypeId && data.match(/^(tcpdump: )?listening on/i)){ // Grab first data containing listen-info if proper header was found
const linkType = data.match(/((?<=link-type ))([a-z].*?) \(.*?\)(?=,)/i)[0];
const linkTypeData = linkType.match(/(\S*) (.*)/i);
linkTypeId = linkTypeData[1];
const linkTypeDetail = linkTypeData[2];
if(linkTypeId !== "IEEE802_11_RADIO"){
logger.error(`Interface not in Monitor-mode! (Expected 'IEEE802_11_RADIO', but got '${linkTypeId}')`);
shutdown(1, "SIGKILL");
}
}
}
else loggerTcpdump.error(data);
}); });
regexBlockStream.on('error', (err) => { // FIXME: This is a hacky workaround to not let errors from subprocess bubble up and terminate our process
if(err) loggerTcpdump.error(err); regexBlockStream.on("error", (err) => {});
})
proc.on("error", (err) => { proc.on("error", (err) => {
loggerTcpdump.error(err); loggerTcpdump.error(err);
@ -99,11 +165,11 @@ if(errorMsg){
loggerPacketStream.debug("Got first packet"); loggerPacketStream.debug("Got first packet");
}) })
.catch((err) => { .catch((err) => {
if(err == 'timeout') loggerPacketStream.warn("No packets"); if(err == "timeout") loggerPacketStream.warn("No packets");
}); });
}) })
.catch((err) => { .catch((err) => {
if(err == 'timeout') loggerTcpdump.warn("No data after 10s! Wrong configuration?"); if(err == "timeout") loggerTcpdump.warn("No data after 10s! Wrong configuration?");
}); });
logger.debug("Attaching exit-handler.."); logger.debug("Attaching exit-handler..");
@ -111,20 +177,25 @@ if(errorMsg){
loggerTcpdump.debug(`tcpdump exited code: ${code}`); loggerTcpdump.debug(`tcpdump exited code: ${code}`);
if (code) { if (code) {
loggerTcpdump.fatal(`tcpdump exited with non-zero code: ${code}`); loggerTcpdump.fatal(`tcpdump exited with non-zero code: ${code}`);
exit(1); if(!exitCode) exitCode = 1; // When exitCode is 0, set to 1
} }
logger.info("Shutdown"); logger.info("Shutdown");
exit(0); exit(exitCode);
}); });
// Handle stop-signals for graceful shutdown // Handle stop-signals for graceful shutdown
var exitCode = 0;
function shutdownReq() { function shutdownReq() {
logger.info("Shutdown request received.."); logger.info("Shutdown request received..");
shutdown();
}
function shutdown(code, signal = "SIGTERM"){
if(code) exitCode = code;
logger.debug("Stopping subprocess tcpdump, then exiting myself.."); logger.debug("Stopping subprocess tcpdump, then exiting myself..");
proc.kill(); // Kill process (send SIGTERM), then upper event-handler will stop self proc.kill(signal); // Kill process, then upper event-handler will stop self
} }
process.on('SIGTERM', shutdownReq); process.on("SIGTERM", shutdownReq);
process.on('SIGINT', shutdownReq); process.on("SIGINT", shutdownReq);
logger.info("Startup complete"); logger.info("Startup complete");
})(); })();

@ -0,0 +1,72 @@
const logger = require.main.require("./helper/logger.js")("InfluxDbLineProtocolWriter");
const net = require("net");
/**
* Get points and write them into influx
*/
class InfluxDbLineProtocolWriter extends net.Socket{
/**
*
* @param {string} host Host of line-server
* @param {string} port Port of line-server
* @param {object} options Options for further configuration
*/
constructor(host, port, options = {}) {
super();
this._host = host;
this._port = port;
// options defaults
options.autoConnect ??= true;
options.timeout ??= 5000;
options.autoReconnect ??= true;
options.autoReconnectBackoffTime ??= 3000;
this._options = options;
this._isConnected = false;
super.setKeepAlive(true, 5000);
// Register auto-Reconnect if enabled
if(this._options.autoReconnect){
this.on("connect", () => {
logger.debug("Connection established!");
this._isConnected = true;
if(this._autoReconnectTimeout)
clearInterval(this._autoReconnectTimeout);
this._autoReconnectTimeout = 0;
});
this.on("error", (err) => {
logger.error(err.code, "TCP ERROR");
this._isConnected = false;
if(!this._autoReconnectTimeout)
this._autoReconnectTimeout = setInterval(() => {
this.connect();
},
this._options.autoReconnectBackoffTime);
});
}
// Autoconnect if requested
if(this._options.autoConnect) this.connect();
}
get host(){ return this._host; }
get port(){ return this._port; }
get isConnected(){ return this._isConnected; }
connect(){
logger.debug("Connecting..");
super.connect(this._port, this._host);
}
}
// Specify exports
module.exports = {
InfluxDbLineProtocolWriter
};

@ -0,0 +1,22 @@
const logger = require.main.require("./helper/logger.js")("InfluxPointToLineProtoStream");
const { Transform } = require("stream");
/**
* Get points and converts them to Line-protocol
*/
class InfluxPointToLineProtoStream extends Transform{
constructor(){
super({
writableObjectMode: true
});
}
_transform(point, encoding, next){
next(null, point.toLineProtocol() +"\n");
}
}
// Specify exports
module.exports = {
InfluxPointToLineProtoStream
};

@ -1,6 +1,6 @@
const logger = require.main.require("./helper/logger.js")("InfluxPointWriter"); const logger = require.main.require("./helper/logger.js")("InfluxPointWriter");
const { Writable } = require('stream'); const { Writable } = require("stream");
const {InfluxDB, Point, HttpError} = require('@influxdata/influxdb-client') const { WriteApi } = require("@influxdata/influxdb-client");
/** /**
* Get points and write them into influx * Get points and write them into influx
@ -8,16 +8,13 @@ const {InfluxDB, Point, HttpError} = require('@influxdata/influxdb-client')
class InfluxPointWriter extends Writable{ class InfluxPointWriter extends Writable{
/** /**
* *
* @param {InfluxDB} influxDb InfluxDb * @param {WriteApi} writeApi WriteAPI from InfluxDB instance
* @param {string} org Organization to use
* @param {string} bucket Bucket to use
* @param {Partial<WriteOptions>} options Options for WriteApi
*/ */
constructor(influxDb, org, bucket, options){ constructor(writeApi){
super({ super({
objectMode: true objectMode: true
}); });
this._api = influxDb.getWriteApi(org, bucket, 'us', options); this._api = writeApi;
} }
_write(point, encoding, next){ _write(point, encoding, next){

@ -1,6 +1,6 @@
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory"); const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
const { Transform } = require('stream'); const { Transform } = require("stream");
const {Point} = require('@influxdata/influxdb-client') const {Point} = require("@influxdata/influxdb-client");
/** Keys to always use as tags */ /** Keys to always use as tags */
const TAG_LIST = [ const TAG_LIST = [
@ -14,13 +14,14 @@ const TAG_LIST = [
/** Measurement-name and corresponding field-key */ /** Measurement-name and corresponding field-key */
const MEASUREMENT_MAP = new Map([ const MEASUREMENT_MAP = new Map([
["Signal", "signal"], ["rfmon_signal_dbm", "signal"],
["PayloadSize", "payloadSize"], ["rfmon_payloadsize_bytes", "payloadSize"],
["DataRate", "dataRate"], ["rfmon_datarate_bytes", "dataRate"],
["SSID", "ssid"], ["rfmon_ssid_names", "ssid"],
["AuthenticationType", "authenticationType"], ["rfmon_authenticationtype_info", "authenticationType"],
["AssociationSuccess", "associationIsSuccessful"], ["rfmon_associationsuccess_bools", "associationIsSuccessful"],
["DisassociationReason", "disassociationReason"], ["rfmon_disassociationreason_info", "disassociationReason"],
["rfmon_handshakestage_info", "handshakeStage"],
]); ]);
@ -49,7 +50,7 @@ class PacketInfluxPointFactory extends Transform{
tagObjectRecursively(point, tag, packet[tag]); tagObjectRecursively(point, tag, packet[tag]);
}); });
point.setField('value', packet[objKey]); // Set field point.setField("value", packet[objKey]); // Set field
this.push(point); // Push point into stream this.push(point); // Push point into stream
}); });
@ -65,19 +66,22 @@ function tagObjectRecursively(point, tag, field, suffix = ""){
tagObjectRecursively(point, tag, value, `_${key}${suffix}`); tagObjectRecursively(point, tag, value, `_${key}${suffix}`);
}); });
} }
else point.tag(tag+suffix, field); else {
const name = (tag+suffix).toLowerCase();
point.tag(name, field);
}
} }
/** Mapping for type -> field-method */ /** Mapping for type -> field-method */
const POINT_FIELD_TYPE = new Map([ const POINT_FIELD_TYPE = new Map([
['boolean', function(key, value){ return this.booleanField(key, value); }], ["boolean", function(key, value){ return this.booleanField(key, value); }],
['number', function(key, value){ return this.intField(key, value); }], ["number", function(key, value){ return this.intField(key, value); }],
['string', function(key, value){ return this.stringField(key, value); }], ["string", function(key, value){ return this.stringField(key, value); }],
]); ]);
Point.prototype.setField = function(key, value){ Point.prototype.setField = function(key, value){
let setField = POINT_FIELD_TYPE.get(typeof value); let setField = POINT_FIELD_TYPE.get(typeof value);
return setField.apply(this, [key, value]); return setField.apply(this, [key, value]);
} };
// Specify exports // Specify exports
module.exports = { module.exports = {

@ -1,7 +1,7 @@
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory"); const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
const { Transform } = require('stream'); const { Transform } = require("stream");
const { DateTime } = require("luxon"); const { DateTime } = require("luxon");
const { PacketType, FlagType, Packet, PacketWithSSID, BeaconPacket, ProbeRequestPacket, ProbeResponsePacket, AuthenticationPacket, AuthenticationType, AssociationResponsePacket, DisassociationPacket, HandshakePacket, HandshakeStage } = require.main.require('./dto/Packet.js'); const { PacketType, FlagType, Packet, PacketWithSSID, BeaconPacket, ProbeRequestPacket, ProbeResponsePacket, AuthenticationPacket, AuthenticationType, AssociationResponsePacket, DisassociationPacket, HandshakePacket, HandshakeStage } = require.main.require("./dto/Packet.js");
const hexConv = require.main.require("./helper/hexConverter.js"); const hexConv = require.main.require("./helper/hexConverter.js");
const wifiStateAnalyser = require.main.require("./helper/wifiStateAnalyzer.js"); const wifiStateAnalyser = require.main.require("./helper/wifiStateAnalyzer.js");
@ -20,20 +20,20 @@ const PACKET_TYPE_MAP = {
"Disassociation:": PacketType.Disassociation, "Disassociation:": PacketType.Disassociation,
"EAPOL": PacketType.Handshake, "EAPOL": PacketType.Handshake,
}; };
const PACKET_TYPES_REGEX = Object.keys(PACKET_TYPE_MAP).join('|'); const PACKET_TYPES_REGEX = Object.keys(PACKET_TYPE_MAP).join("|");
const AUTHENTICATION_TYPE_MAP = { const AUTHENTICATION_TYPE_MAP = {
"(Open System)-1": AuthenticationType.OpenSystem_1, "(Open System)-1": AuthenticationType.OpenSystem_1,
"(Open System)-2": AuthenticationType.OpenSystem_2, "(Open System)-2": AuthenticationType.OpenSystem_2,
} };
const FLAG_TYPE_MAP = { const FLAG_TYPE_MAP = {
"Retry": FlagType.Retry, "Retry": FlagType.Retry,
"Pwr Mgmt": FlagType.PwrMgt, "Pwr Mgmt": FlagType.PwrMgt,
"More Data": FlagType.MoreData, "More Data": FlagType.MoreData,
"Protected": FlagType.Protected, "Protected": FlagType.Protected,
} };
const FLAG_TYPE_MAPS_REGEX = Object.keys(FLAG_TYPE_MAP).join('|'); const FLAG_TYPE_MAPS_REGEX = Object.keys(FLAG_TYPE_MAP).join("|");
/** /**
* Read data from text-blocks and convert them to Packet * Read data from text-blocks and convert them to Packet
@ -47,10 +47,17 @@ class PacketStreamFactory extends Transform{
} }
_transform(chunk, encoding, next){ _transform(chunk, encoding, next){
if(!chunk){
const err = "Chunk was invalid!";
logger.error(err);
next(err);
return;
}
let packet = new Packet(); let packet = new Packet();
const lines = chunk.split('\n'); const lines = chunk.split("\n");
const header = lines.splice(0, 1)[0]; // Grab first line, 'lines' is now the payload const header = lines.splice(0, 1)[0]; // Grab first line, "lines" is now the payload
packet = this._handleHeader(packet, header); packet = this._handleHeader(packet, header);
packet = this._handlePayload(packet, lines); packet = this._handlePayload(packet, lines);
@ -62,7 +69,7 @@ class PacketStreamFactory extends Transform{
packet.timestampMicros = DateTime.fromISO(data.slice(0, 12)).toSeconds() + data.slice(12, 15)/1000000; packet.timestampMicros = DateTime.fromISO(data.slice(0, 12)).toSeconds() + data.slice(12, 15)/1000000;
// Find flags // Find flags
data.match(data.match(new RegExp('(?<=^|\\s)('+ FLAG_TYPE_MAPS_REGEX +')(?=$|\\s)', 'ig')) data.match(data.match(new RegExp("(?<=^|\\s)("+ FLAG_TYPE_MAPS_REGEX +")(?=$|\\s)", "ig"))
?.forEach(match => packet.flags[FLAG_TYPE_MAP[match]] = true) // Set them to true in flags ?.forEach(match => packet.flags[FLAG_TYPE_MAP[match]] = true) // Set them to true in flags
); );
@ -73,11 +80,11 @@ class PacketStreamFactory extends Transform{
packet.signal = Number(data.match(/(?<=^|\s)-\d{2,3}(?=dBm\sSignal($|\s))/i)?.[0]) || null; packet.signal = Number(data.match(/(?<=^|\s)-\d{2,3}(?=dBm\sSignal($|\s))/i)?.[0]) || null;
let packetTypeStr = data.match(new RegExp('(?<=^|\\s)('+ PACKET_TYPES_REGEX +')(?=$|\\s)', 'i'))?.[0]; let packetTypeStr = data.match(new RegExp("(?<=^|\\s)("+ PACKET_TYPES_REGEX +")(?=$|\\s)", "i"))?.[0];
if(packetTypeStr) if(packetTypeStr)
packet.packetType = PACKET_TYPE_MAP[packetTypeStr]; packet.packetType = PACKET_TYPE_MAP[packetTypeStr];
else if(data.match(/(SA|TA|DA|RA|BSSID):.{17}\s*$/i)){ else if(data.match(/(SA|TA|DA|RA|BSSID):.{17}\s*$/i)){
packet.packetType = PacketType.NoData packet.packetType = PacketType.NoData;
} }
else { else {
packet.packetType = PacketType.Unknown; packet.packetType = PacketType.Unknown;
@ -97,12 +104,12 @@ class PacketStreamFactory extends Transform{
case PacketType.ProbeResponse: case PacketType.ProbeResponse:
case PacketType.AssociationRequest: case PacketType.AssociationRequest:
newPacket = new PacketWithSSID(); newPacket = new PacketWithSSID();
newPacket.ssid = data.match(new RegExp('(?<=(^|\\s)'+ packetTypeStr +'\\s\\().{0,32}(?=\\)($|\\s))', 'i'))?.[0] ?? null; newPacket.ssid = data.match(new RegExp("(?<=(^|\\s)"+ packetTypeStr +"\\s\\().{0,32}(?=\\)($|\\s))", "i"))?.[0] ?? null;
break; break;
case PacketType.Authentication: case PacketType.Authentication:
newPacket = new AuthenticationPacket(); newPacket = new AuthenticationPacket();
newPacket.authenticationType = AUTHENTICATION_TYPE_MAP[data.match(/(?<=(^|\s)Authentication\s).{3,}(?=\:(\s|$))/i)[0]] ?? AuthenticationType.Unknown; newPacket.authenticationType = AUTHENTICATION_TYPE_MAP[data.match(/(?<=(^|\s)Authentication\s).{3,}(?=:(\s|$))/i)[0]] ?? AuthenticationType.Unknown;
break; break;
case PacketType.AssociationResponse: case PacketType.AssociationResponse:
@ -121,16 +128,16 @@ class PacketStreamFactory extends Transform{
} }
_handlePayload(packet, data){ _handlePayload(packet, data){
data = data.join(''); data = data.join("");
// Get payload-Hex-Data. If there is no data: empty // Get payload-Hex-Data. If there is no data: empty
packet.payloadData = hexConv.hexToBytes(data.match(/(?<=\s)([A-F0-9]{1,4}(?=\s))/igm)?.join('') ?? ''); packet.payloadData = hexConv.hexToBytes(data.match(/(?<=\s)([A-F0-9]{1,4}(?=\s))/igm)?.join("") ?? "");
packet.payloadData.splice(packet.payloadData.length-4, 4); // Remove FrameCheck sequence packet.payloadData.splice(packet.payloadData.length-4, 4); // Remove FrameCheck sequence
// Cover special cases with more data // Cover special cases with more data
let newPacket; let newPacket;
switch(packet.packetType){ switch(packet.packetType){
case PacketType.Handshake: case PacketType.Handshake: {
newPacket = new HandshakePacket(); newPacket = new HandshakePacket();
// Read key-information // Read key-information
@ -140,6 +147,7 @@ class PacketStreamFactory extends Transform{
newPacket.handshakeStage = wifiStateAnalyser.handshakeStageFromKeyInfo(keyInfo); // Get stage newPacket.handshakeStage = wifiStateAnalyser.handshakeStageFromKeyInfo(keyInfo); // Get stage
break; break;
} }
}
if(newPacket) packet = Object.assign(newPacket, packet); if(newPacket) packet = Object.assign(newPacket, packet);
return packet; return packet;

@ -1,5 +1,5 @@
const logger = require.main.require("./helper/logger.js")("RegexBlockStream"); const logger = require.main.require("./helper/logger.js")("RegexBlockStream");
const { Transform } = require('stream') const { Transform } = require("stream");
/** /**
* Matches whole blocks as regex and passes them on * Matches whole blocks as regex and passes them on
@ -27,7 +27,7 @@ class RegexBlockStream extends Transform{
} }
_transform(chunk, encoding, next){ _transform(chunk, encoding, next){
chunk = this.readableBuffer.length? this.readableBuffer.join('') + chunk: chunk; // Add previous buffer to current chunk chunk = this.readableBuffer.length? this.readableBuffer.join("") + chunk: chunk; // Add previous buffer to current chunk
this.readableBuffer.length && this.readableBuffer.clear(); // Clear buffer once we read it this.readableBuffer.length && this.readableBuffer.clear(); // Clear buffer once we read it
let matches = chunk.match(this.matcher); // Match let matches = chunk.match(this.matcher); // Match
@ -44,17 +44,17 @@ class RegexBlockStream extends Transform{
if(matches){ if(matches){
matches.forEach((match) => { matches.forEach((match) => {
this.push(match); // Write match to stream this.push(match); // Write match to stream
if(chunk) chunk = chunk.replace(match, ''); // Remove match from chunks if(chunk) chunk = chunk.replace(match, ""); // Remove match from chunks
}); });
} }
if(chunk) return chunk; if(chunk) return chunk;
} }
_flush(next){ _flush(next){
if(matchAllOnFlush){ // When requested, we'll match one last time over the remaining buffer if(this.matchAllOnFlush){ // When requested, we'll match one last time over the remaining buffer
let chunk = this.readableBuffer.join(''); let chunk = this.readableBuffer.join("");
let matches = chunk.match(this.matcher); // Match remaining buffer let matches = chunk.match(this.matcher); // Match remaining buffer
_writeMatches(matches); // Write matches including last element this._writeMatches(matches); // Write matches including last element
} }
next(); // Tell system we are done next(); // Tell system we are done

Loading…
Cancel
Save