Compare commits
173 Commits
@ -0,0 +1,3 @@
|
||||
node_modules
|
||||
npm-debug.log
|
||||
.*
|
@ -0,0 +1,29 @@
|
||||
# ---- Base ----
|
||||
FROM alpine:3 AS base
|
||||
|
||||
# Create app directory
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Copy project file
|
||||
COPY package.json .
|
||||
|
||||
# Install required apk-packages
|
||||
RUN apk add --no-cache nodejs npm tcpdump
|
||||
|
||||
|
||||
# ---- Dependencies ----
|
||||
FROM base AS dependencies
|
||||
|
||||
# Install app dependencies
|
||||
RUN npm install --only=production
|
||||
|
||||
|
||||
# ---- Release ----
|
||||
FROM base AS release
|
||||
|
||||
# copy from build image
|
||||
COPY --from=dependencies /usr/src/app/ ./
|
||||
# Bundle app source
|
||||
COPY ./src/ .
|
||||
|
||||
CMD ["npm", "run", "start"]
|
@ -1,3 +1,419 @@
|
||||
# rfmon-to-influx
|
||||
rfmon-to-influx
|
||||
=================
|
||||
|
||||
![](docs/img/header0.png)
|
||||
*Successful Associations, grouped by AP within 24h*
|
||||
|
||||
<br>
|
||||
|
||||
Writing (mostly meta-) data received in Wireless-Monitor-Mode into an InfluxDB.
|
||||
|
||||
<br>
|
||||
|
||||
Table of contents
|
||||
=================
|
||||
<!-- TOC -->
|
||||
- [1. Description](#1-description)
|
||||
- [1.1. What kind of data](#11-what-kind-of-data)
|
||||
- [1.2. Data-Usage](#12-data-usage)
|
||||
- [1.3. Tools used](#13-tools-used)
|
||||
- [1.4. Architecture](#14-architecture)
|
||||
- [2. Usage/Installation](#2-usageinstallation)
|
||||
- [2.1. Prerequisites](#21-prerequisites)
|
||||
- [2.2. Choosing an Export-Method](#22-choosing-an-export-method)
|
||||
- [2.3. Running with Docker](#23-running-with-docker)
|
||||
- [2.4. Environment-Variables](#24-environment-variables)
|
||||
- [3. Data collected](#3-data-collected)
|
||||
- [3.1. Data-Types](#31-data-types)
|
||||
- [3.2. Metric-Overview](#32-metric-overview)
|
||||
- [3.3. Metric-Details](#33-metric-details)
|
||||
- [3.4. Tag-Overview](#34-tag-overview)
|
||||
- [3.5. Tag-Details](#35-tag-details)
|
||||
- [4. Potential Issues](#4-potential-issues)
|
||||
- [4.1. Channel/Frequency](#41-channelfrequency)
|
||||
- [4.2. Technology](#42-technology)
|
||||
- [4.3. Data protection](#43-data-protection)
|
||||
- [4.4. Ethical](#44-ethical)
|
||||
<!-- /TOC -->
|
||||
<br>
|
||||
|
||||
# 1. Description
|
||||
|
||||
This Program listens on an Wifi-Interface in Monitor-Mode (rfmon) and logs most actions made into an influx or influx-like time-database.
|
||||
|
||||
<br>
|
||||
|
||||
## 1.1. What kind of data
|
||||
|
||||
**Any** packet sent by a router or station nearby is received and its metadata is collected and categorised.
|
||||
|
||||
The host does **not** have to be part of that network.
|
||||
|
||||
<br>
|
||||
|
||||
## 1.2. Data-Usage
|
||||
|
||||
The data can be used to identify problems with the wifi-communication nearby
|
||||
e.g.
|
||||
- Wifi-Congestion at certain times of the day
|
||||
- occurring signal-issues
|
||||
- e.g. due to broken Microwave-Ovens disrupting communications
|
||||
- or moving big Objects (e.g. Machines) causing signal-reduction.
|
||||
|
||||
<br>
|
||||
|
||||
Aswell as gaining knowledge about installed routers and user interaction with them
|
||||
e.g.
|
||||
- in a company environment
|
||||
- Logging presense and activity of interconnected machines
|
||||
- Finding other Access-Points not allowed due to potential disruption of Production-Lines
|
||||
|
||||
<br>
|
||||
|
||||
Other usages might be threat-detection at Wifi-Level
|
||||
e.g.
|
||||
- Deauthentication-Attacks
|
||||
- Bruteforce-Attempts
|
||||
|
||||
<br>
|
||||
|
||||
## 1.3. Tools used
|
||||
|
||||
The program uses `tcpdump` for listening in a subProcess and then extract the metadata when packets arrive.
|
||||
|
||||
|
||||
<br>
|
||||
|
||||
## 1.4. Architecture
|
||||
|
||||
![](docs/img/1.4.architecture.png)
|
||||
|
||||
The system heavily uses NodeJS-Streams to read, transform and pass data around.
|
||||
|
||||
<br>
|
||||
|
||||
# 2. Usage/Installation
|
||||
|
||||
## 2.1. Prerequisites
|
||||
|
||||
The Wifi-Interface cannot be used elsewhere at the same time e.g. Network-Manager.
|
||||
(Packet-capture e.g. tcpdump or Wireshark is ok)
|
||||
|
||||
As of this version, the program does **not** set the interface into monitor mode or changes to channels.
|
||||
|
||||
<br>
|
||||
|
||||
### 2.1.1. Interface into Monitor-Mode (rfmon)
|
||||
|
||||
You can change into Monitor-mode beforehand with the packages `net-tools` and `wireless-tools`:
|
||||
```sh
|
||||
ifconfig <interface> down
|
||||
iwconfig <interface> mode Monitor
|
||||
ifconfig <interface> up
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
### 2.1.2. Set/Change channels
|
||||
|
||||
You can set the channel of the interface (if the interface allows this) with the package `wireless-tools`:
|
||||
```sh
|
||||
iw dev <interface> set channel <channelNumber>
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
## 2.2. Choosing an Export-Method
|
||||
|
||||
The system allows exporting directly into [InfluxDB](https://docs.influxdata.com/influxdb) version >= 2.0 or into any system using the [InfluxDb-Line-Protocol](https://docs.influxdata.com/influxdb/v2.1/reference/syntax/line-protocol/) e.g. [QuestDB](https://questdb.io/) over TCP.
|
||||
|
||||
As of writing (using InfluxDB v2.1 and using the *flux*-language), the data written by this system was a bit too much for InfluxDB and it struggled very quickly on a fairly beefy machine.
|
||||
|
||||
Thats why the additional LineProtocol-Export-Method was added. Freedom of choice of the Time-Database.
|
||||
|
||||
<br>
|
||||
|
||||
If you want to use the InfluxDB-Line-Protocol, simply set the environment variable `USE_INFLUXDB_LINEPROTOCOL` to `true` along with the-other necessary Host and Port-variables.
|
||||
|
||||
<br>
|
||||
|
||||
## 2.3. Running with Docker
|
||||
|
||||
### 2.3.1. Permissions
|
||||
|
||||
The container must run as **root**, to have permission to listen on the wifi-interface.
|
||||
|
||||
<br>
|
||||
|
||||
### 2.3.2. docker run
|
||||
|
||||
Either run with docker directly.
|
||||
|
||||
<details><summary>for InfluxDB</summary>
|
||||
|
||||
```sh
|
||||
docker run
|
||||
-d
|
||||
--restart unless-stopped
|
||||
--network host
|
||||
-e WIFI_INTERFACE="<yourInterfaceName or leave empty for wlan0>"
|
||||
-e INFLUX_URL="http://influxdb:8086/"
|
||||
-e INFLUX_TOKEN="<yourToken>"
|
||||
-e INFLUX_ORG="<yourOrganisation>"
|
||||
ruakij/rfmon-to-influx:2
|
||||
```
|
||||
</details>
|
||||
|
||||
<details><summary>for InfluxDB-Line-Protocol</summary>
|
||||
|
||||
```sh
|
||||
docker run
|
||||
-d
|
||||
--restart unless-stopped
|
||||
--network host
|
||||
-e WIFI_INTERFACE="<yourInterfaceName or leave empty for wlan0>"
|
||||
-e USE_INFLUXDB_LINEPROTOCOL="true"
|
||||
-e INFLUXDB_LINEPROTOCOL_HOST="<host>"
|
||||
-e INFLUXDB_LINEPROTOCOL_PORT="<port>"
|
||||
ruakij/rfmon-to-influx:2
|
||||
```
|
||||
</details>
|
||||
|
||||
<br>
|
||||
|
||||
### 2.3.3. docker-compose
|
||||
|
||||
Or use the more preferred way with docker-compose.
|
||||
|
||||
`docker-compose.yml`
|
||||
|
||||
<details><summary>for InfluxDB</summary>
|
||||
|
||||
```yaml
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
rfmon:
|
||||
container_name: rfmon
|
||||
image: ruakij/rfmon-to-influx:2
|
||||
restart: unless-stopped
|
||||
network_mode: "host"
|
||||
environment:
|
||||
- WIFI_INTERFACE="<yourInterfaceName or leave empty for wlan0>"
|
||||
- INFLUX_URL="http://influxdb:8086/"
|
||||
- INFLUX_TOKEN="<yourToken>"
|
||||
- INFLUX_ORG="<yourOrganisation>"
|
||||
- INFLUX_BUCKET="<yourBucket>"
|
||||
```
|
||||
</details>
|
||||
|
||||
<details><summary>for InfluxDB-Line-Protocol</summary>
|
||||
|
||||
```yaml
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
rfmon:
|
||||
container_name: rfmon
|
||||
image: ruakij/rfmon-to-influx:2
|
||||
restart: unless-stopped
|
||||
network_mode: "host"
|
||||
environment:
|
||||
- WIFI_INTERFACE="<yourInterfaceName or leave empty for wlan0>"
|
||||
- USE_INFLUXDB_LINEPROTOCOL="true"
|
||||
- INFLUXDB_LINEPROTOCOL_HOST="<host>"
|
||||
- INFLUXDB_LINEPROTOCOL_PORT="<port>"
|
||||
```
|
||||
</details>
|
||||
|
||||
<br>
|
||||
|
||||
And then pull&start the container:
|
||||
```sh
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
## 2.4. Environment-Variables
|
||||
|
||||
### 2.4.1. Necessary
|
||||
|
||||
<details><summary>for InfluxDB</summary>
|
||||
|
||||
Variable|Description
|
||||
---|---
|
||||
`INFLUX_URL` | Url of influx-server
|
||||
`INFLUX_TOKEN` | Token with write-access
|
||||
`INFLUX_ORG` | Organisation and..
|
||||
`INFLUX_BUCKET` | Bucket to write into
|
||||
</details>
|
||||
|
||||
<details><summary>for InfluxDB-Line-Protocol</summary>
|
||||
|
||||
Variable|Description
|
||||
---|---
|
||||
`USE_INFLUXDB_LINEPROTOCOL` | Enable LineProtocol
|
||||
`INFLUXDB_LINEPROTOCOL_HOST` | Host and..
|
||||
`INFLUXDB_LINEPROTOCOL_PORT` | Port of your server
|
||||
</details>
|
||||
|
||||
<br>
|
||||
|
||||
### 2.4.2. Optional
|
||||
|
||||
Variable|Default|Description
|
||||
---|---|---
|
||||
`LOGLEVEL` | INFO | Loglevel
|
||||
`WIFI_INTERFACE` | wlan0 | Wifi-Interface name in Monitor-Mode
|
||||
~~`HOSTNAME`~~ | ~~Device's Hostname~~ | ~~Hostname to use as global hostname-tag~~ *(Unused)*
|
||||
|
||||
<br>
|
||||
|
||||
# 3. Data collected
|
||||
|
||||
8 Metrics are constructed with 6-10 tags identifying them.
|
||||
|
||||
<br>
|
||||
|
||||
## 3.1. Data-Types
|
||||
|
||||
Type|Example|Description
|
||||
---|---|---
|
||||
`String` | Wlan | -
|
||||
`Number` | 0 | Any normal number, positive and negative
|
||||
`Boolean` | true | true or false values
|
||||
`MAC` | 12:34:56:78:9A:BC | Address for L2-networks
|
||||
|
||||
<br>
|
||||
|
||||
## 3.2. Metric-Overview
|
||||
---
|
||||
<br>
|
||||
|
||||
Name|Type|Description
|
||||
---|---|---
|
||||
rfmon_signal_dbm | `Number` (-95 <> -20) | Signal-Level of every Packet in dBm
|
||||
rfmon_datarate_bytes | `Number` (1 <> 144) | Data-Rate of every Packet in MBit/s
|
||||
rfmon_ssid_names | `String` (Length: 0-32) | SSIDs of any Packet containing it
|
||||
rfmon_authenticationtype_info | `String` | Authentication-Type used by Sender
|
||||
rfmon_associationsuccess_bools | `Boolean` | Result of an Association
|
||||
rfmon_disassociationreason_info | `String` | Disconnect-Reason from a ST (not always sent)
|
||||
rfmon_handshakestage_info | `Number` (1 <> 4) | Stage of a handshake (1 and 3 from ST, 2 and 4 from AP)
|
||||
|
||||
<br>
|
||||
|
||||
## 3.3. Metric-Details
|
||||
|
||||
### 3.3.1. rfmon_ssid_names
|
||||
`String` (Length: 0-32)
|
||||
|
||||
SSIDs from ProbeRequest might be empty (probe for any) or in case of Beacon-Frames could be hidden.
|
||||
|
||||
### 3.3.2. rfmon_authenticationtype_info
|
||||
`String` {OpenSystem_1, OpenSystem_2, Unknown}
|
||||
|
||||
<br>
|
||||
|
||||
## 3.4. Tag-Overview
|
||||
---
|
||||
<br>
|
||||
|
||||
Name |Type |Description
|
||||
---|---|---
|
||||
srcmac | `MAC` | Sender's MAC-Address (not present in ClearToSend-Packet)
|
||||
dstmac | `MAC` | Destination's MAC-Address (not present in RequestToSend-Packet)
|
||||
bssid | `MAC` | AP's MAC-Address
|
||||
frequency | `Number` | Frequency the packet was captured on in MHz
|
||||
packetType | `String` | Type of packet
|
||||
flags_MoreFragments | `Boolean` | Packet is incomplete
|
||||
flags_Retry | " | Packet is being retried
|
||||
flags_PwrMgt | " | Sender will not sleep
|
||||
flags_MoreData | " | More data in send-buffer to be expected
|
||||
flags_Protected | " | Packet is protected
|
||||
flags_Order | " | Packet is strictly ordered
|
||||
|
||||
<br>
|
||||
|
||||
## 3.5. Tag-Details
|
||||
|
||||
### 3.5.1. frequency
|
||||
`Number` (2412 <> 2484)
|
||||
|
||||
The frequency corresponds to following wifi-channels:
|
||||
|
||||
Channel|Frequency
|
||||
---|---
|
||||
1 | 2412
|
||||
2 | 2417
|
||||
3 | 2422
|
||||
4 | 2427
|
||||
5 | 2432
|
||||
6 | 2437
|
||||
7 | 2442
|
||||
8 | 2447
|
||||
9 | 2452
|
||||
10 | 2457
|
||||
11 | 2462
|
||||
12 | 2467
|
||||
13 | 2472
|
||||
14 | 2484
|
||||
|
||||
See [Wikipedia - List of WLAN channels - 2.4GHz](https://en.wikipedia.org/wiki/List_of_WLAN_channels#2.4_GHz_(802.11b/g/n/ax)) for more Information.
|
||||
|
||||
### 3.5.2. packettype
|
||||
`String`
|
||||
|
||||
Type|Sender|Description
|
||||
---|---|---
|
||||
Beacon | AP | Signal its presence and provide synchronisation for Stations
|
||||
ProbeRequest | ST | Ask if certain RA/SSID is available
|
||||
ProbeResponse | AP | Directly respond to Request and Signal own presence
|
||||
Data | Both | Data-packets
|
||||
RequestToSend | ST | Ask for transmission-time
|
||||
ClearToSend | RA | Ack transmission-time
|
||||
Acknowledgment | Both | Ack Data-Packets
|
||||
BlockAcknowledgment | Both | Ack alot of Data-Packets at once
|
||||
NoData | Both | Packet without content, typically used to transmit QoS-States
|
||||
Authentication | Both | Authentication-process to establish identity and set states
|
||||
AssociationRequest | ST | Register to AP
|
||||
AssociationResponse | AP | Respond to registering
|
||||
Disassociation | ST | Actively unregister e.g. to associate with different AP
|
||||
Handshake | Both | 4-Way-EAPOL-Handshake to generate encryption-keys between participants
|
||||
Unknown | - | Unknown packets not identified into above types
|
||||
|
||||
<br>
|
||||
|
||||
# 4. Potential Issues
|
||||
|
||||
## 4.1. Channel/Frequency
|
||||
|
||||
The System can only monitor one channel at a time which might not be enough cover,
|
||||
to combat this, more Interfaces and Systems can be deployed.
|
||||
|
||||
This is not entirely unproblematic, as the system cannot currently prevent packages from being inserted more than once.
|
||||
|
||||
<br>
|
||||
|
||||
## 4.2. Technology
|
||||
|
||||
Mismatches between sender and receiver-technologies (e.g. MIMO or HT) can cause packets not being logged at all.
|
||||
Though this should only be a problem for data-packets.
|
||||
|
||||
<br>
|
||||
|
||||
## 4.3. Data protection
|
||||
|
||||
Because the system collects any data, this can be problematic, specially in countries with strong data-protection laws.
|
||||
|
||||
A wifi MAC address is likely to be considered as information of an identifiable natural person, e.g. under GDPR Art.4 (1) and its processing may only be done with prior consent or has to be anonymised.
|
||||
|
||||
<br>
|
||||
|
||||
## 4.4. Ethical
|
||||
|
||||
The large-scale collection of data for behavioural or movement analysis, especially without consent of the data subject, is highly controversial.
|
||||
|
||||
Metadata that can be used to track precise activities, such as wifi data, is very powerful and should only be collected and used when necessary.
|
||||
|
||||
If this data falls into the hands of a malicious actor, more precise attacks on the targets could be carried out, such as break-insv, behaviour-based discrimination or more successful phishing.
|
||||
|
@ -0,0 +1,9 @@
|
||||
TAG="ruakij/rfmon-to-influx"
|
||||
PLATFORM="linux/amd64,linux/arm64/v8,linux/arm/v7"
|
||||
EXTRA_ARGS="$@"
|
||||
|
||||
docker buildx build \
|
||||
--platform $PLATFORM \
|
||||
--tag $TAG \
|
||||
$EXTRA_ARGS \
|
||||
.
|
@ -0,0 +1,7 @@
|
||||
TAG="ruakij/rfmon-to-influx"
|
||||
EXTRA_ARGS="$@"
|
||||
|
||||
docker build \
|
||||
--tag $TAG \
|
||||
$EXTRA_ARGS \
|
||||
.
|
Binary file not shown.
After Width: | Height: | Size: 147 KiB |
Binary file not shown.
After Width: | Height: | Size: 14 KiB |
@ -0,0 +1,266 @@
|
||||
{
|
||||
"name": "rfmon-to-influx",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "rfmon-to-influx",
|
||||
"version": "1.0.0",
|
||||
"license": "AGPL-3.0",
|
||||
"dependencies": {
|
||||
"@influxdata/influxdb-client": "^1.20.0",
|
||||
"@influxdata/influxdb-client-apis": "^1.20.0",
|
||||
"log4js": "^6.3.0",
|
||||
"luxon": "^2.1.1",
|
||||
"string-argv": "^0.3.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@influxdata/influxdb-client": {
|
||||
"version": "1.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client/-/influxdb-client-1.20.0.tgz",
|
||||
"integrity": "sha512-jaKSI63hmQ5VSkJrFJkYIXaKlhoF+mGd4HmOf7v/X7pmEi69ReHp922Wyx6/OeCrpndRMbsadk+XmGNdd43cFw=="
|
||||
},
|
||||
"node_modules/@influxdata/influxdb-client-apis": {
|
||||
"version": "1.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client-apis/-/influxdb-client-apis-1.20.0.tgz",
|
||||
"integrity": "sha512-KMTmXH4rbpS+NWGpqDjxcKTyan2rbiT2IM5AdRElKhH2sHbH96xwLgziaxeC+OCJLeNAdehJgae3I8WiZjbwdg==",
|
||||
"peerDependencies": {
|
||||
"@influxdata/influxdb-client": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/date-format": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/date-format/-/date-format-3.0.0.tgz",
|
||||
"integrity": "sha512-eyTcpKOcamdhWJXj56DpQMo1ylSQpcGtGKXcU0Tb97+K56/CF5amAqqqNj0+KvA0iw2ynxtHWFsPDSClCxe48w==",
|
||||
"engines": {
|
||||
"node": ">=4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.3.2",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz",
|
||||
"integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==",
|
||||
"dependencies": {
|
||||
"ms": "2.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/flatted": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz",
|
||||
"integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA=="
|
||||
},
|
||||
"node_modules/fs-extra": {
|
||||
"version": "8.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz",
|
||||
"integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==",
|
||||
"dependencies": {
|
||||
"graceful-fs": "^4.2.0",
|
||||
"jsonfile": "^4.0.0",
|
||||
"universalify": "^0.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6 <7 || >=8"
|
||||
}
|
||||
},
|
||||
"node_modules/graceful-fs": {
|
||||
"version": "4.2.8",
|
||||
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz",
|
||||
"integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg=="
|
||||
},
|
||||
"node_modules/jsonfile": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
|
||||
"integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=",
|
||||
"optionalDependencies": {
|
||||
"graceful-fs": "^4.1.6"
|
||||
}
|
||||
},
|
||||
"node_modules/log4js": {
|
||||
"version": "6.3.0",
|
||||
"resolved": "https://registry.npmjs.org/log4js/-/log4js-6.3.0.tgz",
|
||||
"integrity": "sha512-Mc8jNuSFImQUIateBFwdOQcmC6Q5maU0VVvdC2R6XMb66/VnT+7WS4D/0EeNMZu1YODmJe5NIn2XftCzEocUgw==",
|
||||
"dependencies": {
|
||||
"date-format": "^3.0.0",
|
||||
"debug": "^4.1.1",
|
||||
"flatted": "^2.0.1",
|
||||
"rfdc": "^1.1.4",
|
||||
"streamroller": "^2.2.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/luxon": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-2.1.1.tgz",
|
||||
"integrity": "sha512-6VQVNw7+kQu3hL1ZH5GyOhnk8uZm21xS7XJ/6vDZaFNcb62dpFDKcH8TI5NkoZOdMRxr7af7aYGrJlE/Wv0i1w==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"node_modules/rfdc": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz",
|
||||
"integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA=="
|
||||
},
|
||||
"node_modules/streamroller": {
|
||||
"version": "2.2.4",
|
||||
"resolved": "https://registry.npmjs.org/streamroller/-/streamroller-2.2.4.tgz",
|
||||
"integrity": "sha512-OG79qm3AujAM9ImoqgWEY1xG4HX+Lw+yY6qZj9R1K2mhF5bEmQ849wvrb+4vt4jLMLzwXttJlQbOdPOQVRv7DQ==",
|
||||
"dependencies": {
|
||||
"date-format": "^2.1.0",
|
||||
"debug": "^4.1.1",
|
||||
"fs-extra": "^8.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/streamroller/node_modules/date-format": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/date-format/-/date-format-2.1.0.tgz",
|
||||
"integrity": "sha512-bYQuGLeFxhkxNOF3rcMtiZxvCBAquGzZm6oWA1oZ0g2THUzivaRhv8uOhdr19LmoobSOLoIAxeUK2RdbM8IFTA==",
|
||||
"engines": {
|
||||
"node": ">=4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/string-argv": {
|
||||
"version": "0.3.1",
|
||||
"resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.1.tgz",
|
||||
"integrity": "sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg==",
|
||||
"engines": {
|
||||
"node": ">=0.6.19"
|
||||
}
|
||||
},
|
||||
"node_modules/universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
|
||||
"engines": {
|
||||
"node": ">= 4.0.0"
|
||||
}
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@influxdata/influxdb-client": {
|
||||
"version": "1.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client/-/influxdb-client-1.20.0.tgz",
|
||||
"integrity": "sha512-jaKSI63hmQ5VSkJrFJkYIXaKlhoF+mGd4HmOf7v/X7pmEi69ReHp922Wyx6/OeCrpndRMbsadk+XmGNdd43cFw=="
|
||||
},
|
||||
"@influxdata/influxdb-client-apis": {
|
||||
"version": "1.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client-apis/-/influxdb-client-apis-1.20.0.tgz",
|
||||
"integrity": "sha512-KMTmXH4rbpS+NWGpqDjxcKTyan2rbiT2IM5AdRElKhH2sHbH96xwLgziaxeC+OCJLeNAdehJgae3I8WiZjbwdg==",
|
||||
"requires": {}
|
||||
},
|
||||
"date-format": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/date-format/-/date-format-3.0.0.tgz",
|
||||
"integrity": "sha512-eyTcpKOcamdhWJXj56DpQMo1ylSQpcGtGKXcU0Tb97+K56/CF5amAqqqNj0+KvA0iw2ynxtHWFsPDSClCxe48w=="
|
||||
},
|
||||
"debug": {
|
||||
"version": "4.3.2",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz",
|
||||
"integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==",
|
||||
"requires": {
|
||||
"ms": "2.1.2"
|
||||
}
|
||||
},
|
||||
"flatted": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz",
|
||||
"integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA=="
|
||||
},
|
||||
"fs-extra": {
|
||||
"version": "8.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz",
|
||||
"integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==",
|
||||
"requires": {
|
||||
"graceful-fs": "^4.2.0",
|
||||
"jsonfile": "^4.0.0",
|
||||
"universalify": "^0.1.0"
|
||||
}
|
||||
},
|
||||
"graceful-fs": {
|
||||
"version": "4.2.8",
|
||||
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz",
|
||||
"integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg=="
|
||||
},
|
||||
"jsonfile": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
|
||||
"integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=",
|
||||
"requires": {
|
||||
"graceful-fs": "^4.1.6"
|
||||
}
|
||||
},
|
||||
"log4js": {
|
||||
"version": "6.3.0",
|
||||
"resolved": "https://registry.npmjs.org/log4js/-/log4js-6.3.0.tgz",
|
||||
"integrity": "sha512-Mc8jNuSFImQUIateBFwdOQcmC6Q5maU0VVvdC2R6XMb66/VnT+7WS4D/0EeNMZu1YODmJe5NIn2XftCzEocUgw==",
|
||||
"requires": {
|
||||
"date-format": "^3.0.0",
|
||||
"debug": "^4.1.1",
|
||||
"flatted": "^2.0.1",
|
||||
"rfdc": "^1.1.4",
|
||||
"streamroller": "^2.2.4"
|
||||
}
|
||||
},
|
||||
"luxon": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-2.1.1.tgz",
|
||||
"integrity": "sha512-6VQVNw7+kQu3hL1ZH5GyOhnk8uZm21xS7XJ/6vDZaFNcb62dpFDKcH8TI5NkoZOdMRxr7af7aYGrJlE/Wv0i1w=="
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"rfdc": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz",
|
||||
"integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA=="
|
||||
},
|
||||
"streamroller": {
|
||||
"version": "2.2.4",
|
||||
"resolved": "https://registry.npmjs.org/streamroller/-/streamroller-2.2.4.tgz",
|
||||
"integrity": "sha512-OG79qm3AujAM9ImoqgWEY1xG4HX+Lw+yY6qZj9R1K2mhF5bEmQ849wvrb+4vt4jLMLzwXttJlQbOdPOQVRv7DQ==",
|
||||
"requires": {
|
||||
"date-format": "^2.1.0",
|
||||
"debug": "^4.1.1",
|
||||
"fs-extra": "^8.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"date-format": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/date-format/-/date-format-2.1.0.tgz",
|
||||
"integrity": "sha512-bYQuGLeFxhkxNOF3rcMtiZxvCBAquGzZm6oWA1oZ0g2THUzivaRhv8uOhdr19LmoobSOLoIAxeUK2RdbM8IFTA=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"string-argv": {
|
||||
"version": "0.3.1",
|
||||
"resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.1.tgz",
|
||||
"integrity": "sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg=="
|
||||
},
|
||||
"universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
{
|
||||
"name": "rfmon-to-influx",
|
||||
"version": "1.0.0",
|
||||
"description": "Writing (mostly meta-) data received in Wireless-Monitor-Mode into an InfluxDB",
|
||||
"main": "main.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"start": "node main.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://gitea.ruekov.eu/Ruakij/rfmon-to-influx"
|
||||
},
|
||||
"author": "Ruakij",
|
||||
"license": "AGPL-3.0",
|
||||
"dependencies": {
|
||||
"@influxdata/influxdb-client": "^1.20.0",
|
||||
"@influxdata/influxdb-client-apis": "^1.20.0",
|
||||
"log4js": "^6.3.0",
|
||||
"luxon": "^2.1.1",
|
||||
"string-argv": "^0.3.1"
|
||||
}
|
||||
}
|
@ -0,0 +1,105 @@
|
||||
const PacketType = {
|
||||
Beacon: "Beacon",
|
||||
ProbeRequest: "ProbeRequest",
|
||||
ProbeResponse: "ProbeResponse",
|
||||
Data: "Data",
|
||||
RequestToSend: "RequestToSend",
|
||||
ClearToSend: "ClearToSend",
|
||||
Acknowledgment: "Acknowledgment",
|
||||
BlockAcknowledgment: "BlockAcknowledgment",
|
||||
NoData: "NoData",
|
||||
Authentication: "Authentication",
|
||||
AssociationRequest: "AssociationRequest",
|
||||
AssociationResponse: "AssociationResponse",
|
||||
Disassociation: "Disassociation",
|
||||
Handshake: "Handshake",
|
||||
Unknown: "Unknown"
|
||||
};
|
||||
|
||||
const FlagType = {
|
||||
MoreFragments: "MoreFragments",
|
||||
Retry: "Retry",
|
||||
PwrMgt: "PwrMgt",
|
||||
MoreData: "MoreData",
|
||||
Protected: "Protected",
|
||||
Order: "Order"
|
||||
};
|
||||
|
||||
class Packet{
|
||||
timestampMicros;
|
||||
|
||||
flags = {};
|
||||
|
||||
srcMac;
|
||||
dstMac;
|
||||
bssid;
|
||||
|
||||
signal;
|
||||
frequency;
|
||||
dataRate;
|
||||
|
||||
durationMicros;
|
||||
|
||||
payloadData;
|
||||
get payloadSize(){
|
||||
return this.payloadData.length;
|
||||
}
|
||||
|
||||
packetType;
|
||||
}
|
||||
|
||||
// Extensions of Packet
|
||||
class PacketWithSSID extends Packet{
|
||||
ssid;
|
||||
}
|
||||
|
||||
class BeaconPacket extends PacketWithSSID{}
|
||||
class ProbeRequestPacket extends PacketWithSSID{}
|
||||
class ProbeResponsePacket extends PacketWithSSID{}
|
||||
|
||||
const AuthenticationType = {
|
||||
OpenSystem_1: "OpenSystem_1",
|
||||
OpenSystem_2: "OpenSystem_2",
|
||||
Unknown: "Unknown",
|
||||
};
|
||||
class AuthenticationPacket extends Packet{
|
||||
authenticationType;
|
||||
}
|
||||
|
||||
class AssociationRequestPacket extends PacketWithSSID{}
|
||||
class AssociationResponsePacket extends Packet{
|
||||
associationIsSuccessful;
|
||||
}
|
||||
|
||||
class DisassociationPacket extends Packet{
|
||||
disassociationReason;
|
||||
}
|
||||
|
||||
|
||||
const HandshakeStage = {
|
||||
1: "1",
|
||||
2: "2",
|
||||
3: "3",
|
||||
4: "4"
|
||||
};
|
||||
class HandshakePacket extends Packet{
|
||||
handshakeStage;
|
||||
}
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
PacketType,
|
||||
FlagType,
|
||||
Packet,
|
||||
PacketWithSSID,
|
||||
BeaconPacket,
|
||||
ProbeRequestPacket,
|
||||
ProbeResponsePacket,
|
||||
AuthenticationType,
|
||||
AuthenticationPacket,
|
||||
AssociationRequestPacket,
|
||||
AssociationResponsePacket,
|
||||
DisassociationPacket,
|
||||
HandshakeStage,
|
||||
HandshakePacket,
|
||||
};
|
@ -0,0 +1,13 @@
|
||||
function requireEnvVars(requiredEnv){
|
||||
// Ensure required ENV vars are set
|
||||
let unsetEnv = requiredEnv.filter((env) => (typeof process.env[env] === "undefined"));
|
||||
|
||||
if (unsetEnv.length > 0) {
|
||||
return "Required ENV variables are not set: [" + unsetEnv.join(", ") + "]";
|
||||
}
|
||||
}
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
requireEnvVars
|
||||
};
|
@ -0,0 +1,18 @@
|
||||
const logger = require("./logger.js")("exec");
|
||||
|
||||
const { spawn } = require("child_process");
|
||||
const { parseArgsStringToArgv } = require("string-argv");
|
||||
|
||||
|
||||
function exec(cmd, options){
|
||||
const [bin, ...args] = parseArgsStringToArgv(cmd);
|
||||
|
||||
logger.addContext("binary", "bin");
|
||||
logger.debug(`Spawn process '${cmd}'`);
|
||||
return spawn(bin, args, options);
|
||||
}
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
exec
|
||||
};
|
@ -0,0 +1,24 @@
|
||||
// From https://stackoverflow.com/a/34356351
|
||||
|
||||
// Convert a hex string to a byte array
|
||||
function hexToBytes(hex) {
|
||||
for (var bytes = [], c = 0; c < hex.length; c += 2)
|
||||
bytes.push(parseInt(hex.substr(c, 2), 16));
|
||||
return bytes;
|
||||
}
|
||||
|
||||
// Convert a byte array to a hex string
|
||||
function bytesToHex(bytes) {
|
||||
for (var hex = [], i = 0; i < bytes.length; i++) {
|
||||
var current = bytes[i] < 0 ? bytes[i] + 256 : bytes[i];
|
||||
hex.push((current >>> 4).toString(16));
|
||||
hex.push((current & 0xF).toString(16));
|
||||
}
|
||||
return hex.join("");
|
||||
}
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
hexToBytes,
|
||||
bytesToHex
|
||||
};
|
@ -0,0 +1,61 @@
|
||||
const logger = require.main.require("./helper/logger.js")("influx-checks");
|
||||
|
||||
const Os = require("os");
|
||||
const { InfluxDB, Point } = require("@influxdata/influxdb-client")
|
||||
const Influx = require("@influxdata/influxdb-client-apis");
|
||||
|
||||
|
||||
function checkHealth(influxDb){
|
||||
return new Promise((resolve, reject) => {
|
||||
new Influx.HealthAPI(influxDb) // Check influx health
|
||||
.getHealth()
|
||||
.catch((err) => {
|
||||
logger.error("Could not communicate with Influx:");
|
||||
logger.error(`Error [${err.code}]:`, err.message);
|
||||
reject();
|
||||
})
|
||||
.then((res) => {
|
||||
logger.debug("Server healthy.", "Version: ", res.version);
|
||||
resolve(res);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function checkBucket(influxDb, options){
|
||||
return new Promise((resolve, reject) => {
|
||||
new Influx.BucketsAPI(influxDb).getBuckets(options)
|
||||
.catch((err) => { // Weirdly the influx-Api returns 404 for searches of non-existing buckets
|
||||
logger.error("Could not get bucket:");
|
||||
logger.error(`Error [${err.code}]:`, err.message);
|
||||
reject();
|
||||
}).then((res) => { // But an empty list when the bucket exists, but token does not have permission to get details
|
||||
logger.debug("Bucket found");
|
||||
resolve(res);
|
||||
// Now we know the bucket exists and we have some kind of permission.. but we still dont know if we are able to write to it..
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function checkWriteApi(influxDb, options){
|
||||
return new Promise((resolve, reject) => {
|
||||
const writeApi = influxDb.getWriteApi(options.org, options.bucket); // Get WriteAPI
|
||||
writeApi.writePoint(new Point("worker_connectionTest").tag("hostname", Os.hostname())); // Write point
|
||||
writeApi.close()
|
||||
.catch((err) => {
|
||||
logger.error("Could not get writeApi:");
|
||||
logger.error(`Error [${err.code}]:`, err.message);
|
||||
reject();
|
||||
}).then((res) => {
|
||||
logger.debug("Writing ok");
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
checkHealth,
|
||||
checkBucket,
|
||||
checkWriteApi,
|
||||
};
|
@ -0,0 +1,11 @@
|
||||
const log4js = require("log4js");
|
||||
|
||||
|
||||
function setup(category = "unknown"){
|
||||
const logger = log4js.getLogger(category);
|
||||
logger.level = process.env.LOGLEVEL ?? "INFO";
|
||||
return logger;
|
||||
}
|
||||
|
||||
// Specify exports
|
||||
module.exports = setup;
|
@ -0,0 +1,46 @@
|
||||
// This file specifies functions to help a user with e.g. configuration-errors
|
||||
|
||||
function detectStreamData(stream, timeout = 5000){
|
||||
return new Promise((resolve, reject) => {
|
||||
let timeoutHandler;
|
||||
if(timeout){
|
||||
timeoutHandler = setTimeout(() => {
|
||||
reject("timeout");
|
||||
remListeners();
|
||||
},
|
||||
timeout);
|
||||
}
|
||||
|
||||
function remListeners(){
|
||||
stream.removeListener("error", errorHandler);
|
||||
stream.removeListener("data", dataHandler);
|
||||
if(timeoutHandler) clearTimeout(timeoutHandler);
|
||||
}
|
||||
|
||||
function errorHandler(err) {
|
||||
remListeners();
|
||||
}
|
||||
function dataHandler(data) {
|
||||
resolve(data);
|
||||
remListeners();
|
||||
}
|
||||
|
||||
stream.on("error", errorHandler);
|
||||
stream.on("data", dataHandler);
|
||||
});
|
||||
}
|
||||
|
||||
function detectStreamsData(streams, timeout = 5000){
|
||||
let promises = [];
|
||||
streams.forEach((stream) => {
|
||||
promises.push(detectStreamData(stream, timeout));
|
||||
});
|
||||
return promises;
|
||||
}
|
||||
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
detectStreamData,
|
||||
detectStreamsData,
|
||||
};
|
@ -0,0 +1,43 @@
|
||||
const { HandshakeStage } = require.main.require("./dto/Packet.js");
|
||||
|
||||
function keyInfoFromRaw(keyInfoRaw) {
|
||||
return {
|
||||
"KeyDescriptorVersion": keyInfoRaw>>0 & 0b111,
|
||||
"KeyType": keyInfoRaw>>3 & 0b1,
|
||||
"KeyIndex": keyInfoRaw>>4 & 0b11,
|
||||
"Install": keyInfoRaw>>6 & 0b1,
|
||||
"KeyACK": keyInfoRaw>>7 & 0b1,
|
||||
"KeyMIC": keyInfoRaw>>8 & 0b1,
|
||||
"Secure": keyInfoRaw>>9 & 0b1,
|
||||
"Error": keyInfoRaw>>10 & 0b1,
|
||||
"Request": keyInfoRaw>>11 & 0b1,
|
||||
"EncryptedKeyData": keyInfoRaw>>12 & 0b1,
|
||||
"SMKMessage": keyInfoRaw>>13 & 0b1,
|
||||
};
|
||||
}
|
||||
|
||||
const HANDSHAKE_STAGE_KEYINFO = {
|
||||
"keys": ["Install", "KeyACK", "KeyMIC", "Secure"],
|
||||
"0100": HandshakeStage[1],
|
||||
"0010": HandshakeStage[2],
|
||||
"1111": HandshakeStage[3],
|
||||
"0011": HandshakeStage[4],
|
||||
};
|
||||
function handshakeStageFromKeyInfo(keyInfo){
|
||||
|
||||
// Extract compare-keys
|
||||
let keyData = "";
|
||||
for (const key of HANDSHAKE_STAGE_KEYINFO["keys"]) {
|
||||
keyData += keyInfo[key].toString();
|
||||
}
|
||||
|
||||
// Get and return stage
|
||||
return HANDSHAKE_STAGE_KEYINFO[keyData];
|
||||
}
|
||||
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
keyInfoFromRaw,
|
||||
handshakeStageFromKeyInfo,
|
||||
};
|
@ -0,0 +1,201 @@
|
||||
"use strict";
|
||||
const logFactory = require("./helper/logger.js");
|
||||
const logger = logFactory("main");
|
||||
|
||||
const { requireEnvVars } = require("./helper/env.js");
|
||||
const { exit } = require("process");
|
||||
const { exec } = require("./helper/exec.js");
|
||||
const Os = require("os");
|
||||
|
||||
const { InfluxDB } = require("@influxdata/influxdb-client");
|
||||
const InfluxChecks = require("./helper/influx-checks.js");
|
||||
|
||||
const { RegexBlockStream } = require("./streamHandler/RegexBlockStream.js");
|
||||
const { PacketStreamFactory } = require("./streamHandler/PacketStreamFactory.js");
|
||||
const { PacketInfluxPointFactory } = require("./streamHandler/PacketInfluxPointFactory.js");
|
||||
const { InfluxPointWriter } = require("./streamHandler/InfluxPointWriter.js");
|
||||
const { InfluxDbLineProtocolWriter } = require("./streamHandler/InfluxDbLineProtocolWriter.js");
|
||||
const { InfluxPointToLineProtoStream } = require("./streamHandler/InfluxPointToLineProtoStream.js");
|
||||
|
||||
const userHelper = require("./helper/userHelper.js");
|
||||
|
||||
|
||||
/// Setup ENVs
|
||||
const env = process.env;
|
||||
// Defaults
|
||||
{
|
||||
env.LOGLEVEL ??= "INFO";
|
||||
env.WIFI_INTERFACE ??= "wlan0";
|
||||
env.HOSTNAME ??= Os.hostname();
|
||||
|
||||
env.USE_INFLUXDB_LINEPROTOCOL ??= false;
|
||||
}
|
||||
// Required vars
|
||||
let errorMsg = requireEnvVars(
|
||||
env.USE_INFLUXDB_LINEPROTOCOL? [ // When lineprotocol is enabled, we need host and port
|
||||
"INFLUXDB_LINEPROTOCOL_HOST", "INFLUXDB_LINEPROTOCOL_PORT",
|
||||
] : [ // When its disabled, influxdb-data
|
||||
"INFLUX_URL", "INFLUX_TOKEN",
|
||||
"INFLUX_ORG", "INFLUX_BUCKET"
|
||||
]);
|
||||
if(errorMsg){
|
||||
logger.fatal(errorMsg);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
(async function() {
|
||||
let pointWriter;
|
||||
if(!env.USE_INFLUXDB_LINEPROTOCOL){
|
||||
logger.info("Setup Influx..");
|
||||
const influxDb = new InfluxDB({url: env.INFLUX_URL, token: env.INFLUX_TOKEN});
|
||||
|
||||
await InfluxChecks.checkHealth(influxDb)
|
||||
.then((res) => {return InfluxChecks.checkBucket(influxDb, {
|
||||
org: env.INFLUX_ORG,
|
||||
name: env.INFLUX_BUCKET
|
||||
});})
|
||||
.then((res) => {return InfluxChecks.checkWriteApi(influxDb, {
|
||||
org: env.INFLUX_ORG,
|
||||
bucket: env.INFLUX_BUCKET
|
||||
});})
|
||||
.catch((err) => {
|
||||
if(err) {
|
||||
logger.error("Error whilst checking influx:");
|
||||
logger.error(err);
|
||||
}
|
||||
logger.fatal("Setup influx failed!");
|
||||
exit(1);
|
||||
});
|
||||
|
||||
logger.debug("Get WriteApi & set default-hostname to", `'${env.HOSTNAME}'`);
|
||||
const influxWriteApi = influxDb.getWriteApi(env.INFLUX_ORG, env.INFLUX_BUCKET, "us");
|
||||
//influxWriteApi.useDefaultTags({"hostname": env.HOSTNAME});
|
||||
|
||||
pointWriter = new InfluxPointWriter(influxWriteApi);
|
||||
|
||||
logger.info("Influx ok");
|
||||
}
|
||||
else {
|
||||
logger.info("Setup Influxdb-LineProtocol..");
|
||||
|
||||
let lineProtocolWriter = new InfluxDbLineProtocolWriter(env.INFLUXDB_LINEPROTOCOL_HOST, env.INFLUXDB_LINEPROTOCOL_PORT);
|
||||
|
||||
logger.debug("Create PointToLineProto and pipe to LineProtocolWriter");
|
||||
pointWriter = new InfluxPointToLineProtoStream();
|
||||
pointWriter
|
||||
.setEncoding("utf8")
|
||||
.pipe(lineProtocolWriter);
|
||||
|
||||
logger.debug("Waiting for connection..");
|
||||
await new Promise((resolve, reject) => {
|
||||
lineProtocolWriter.once("connect", () => {
|
||||
resolve();
|
||||
});
|
||||
lineProtocolWriter.once("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
setTimeout(() => { // After timeout, reject promise
|
||||
reject("Timeout whilst waiting to connect");
|
||||
}, 6500);
|
||||
})
|
||||
.then(() => {
|
||||
logger.info("Influxdb-LineProtocol ok");
|
||||
})
|
||||
.catch((err) => {
|
||||
if(err) {
|
||||
logger.error("Error whilst checking Influxdb-LineProtocol:");
|
||||
logger.error(err);
|
||||
}
|
||||
logger.fatal("Setup Influxdb-LineProtocol failed!");
|
||||
exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
logger.info("Starting tcpdump..");
|
||||
const TCPDUMP_BASECMD = "tcpdump -vvv -e -n -X -s0 -i";
|
||||
let cmd = `${TCPDUMP_BASECMD} ${env.WIFI_INTERFACE}`;
|
||||
|
||||
let proc = exec(cmd);
|
||||
logger.debug("Creating & Attaching streams..");
|
||||
let regexBlockStream = new RegexBlockStream(/^\d{2}:\d{2}:\d{2}.\d{6}.*(\n( {4,8}|\t\t?).*)+\n/gm);
|
||||
let packetStreamFactory = new PacketStreamFactory();
|
||||
let packetInfluxPointFactory = new PacketInfluxPointFactory();
|
||||
proc.stdout
|
||||
.setEncoding("utf8")
|
||||
.pipe(regexBlockStream)
|
||||
.pipe(packetStreamFactory)
|
||||
.pipe(packetInfluxPointFactory)
|
||||
.pipe(pointWriter);
|
||||
|
||||
logger.debug("Attaching error-logger..");
|
||||
const loggerTcpdump = logFactory("tcpdump");
|
||||
let linkTypeId;
|
||||
proc.stderr.setEncoding("utf8").on("data", (data) => {
|
||||
if(data.match(/^(tcpdump: )?listening on /i) || data.match(/^\d+ packets captured/i)) { // Catch start-error
|
||||
loggerTcpdump.debug(data);
|
||||
|
||||
if(!linkTypeId && data.match(/^(tcpdump: )?listening on/i)){ // Grab first data containing listen-info if proper header was found
|
||||
const linkType = data.match(/((?<=link-type ))([a-z].*?) \(.*?\)(?=,)/i)[0];
|
||||
const linkTypeData = linkType.match(/(\S*) (.*)/i);
|
||||
linkTypeId = linkTypeData[1];
|
||||
const linkTypeDetail = linkTypeData[2];
|
||||
|
||||
if(linkTypeId !== "IEEE802_11_RADIO"){
|
||||
logger.error(`Interface not in Monitor-mode! (Expected 'IEEE802_11_RADIO', but got '${linkTypeId}')`);
|
||||
shutdown(1, "SIGKILL");
|
||||
}
|
||||
}
|
||||
}
|
||||
else loggerTcpdump.error(data);
|
||||
});
|
||||
|
||||
// FIXME: This is a hacky workaround to not let errors from subprocess bubble up and terminate our process
|
||||
regexBlockStream.on("error", (err) => {});
|
||||
|
||||
proc.on("error", (err) => {
|
||||
loggerTcpdump.error(err);
|
||||
});
|
||||
|
||||
const loggerPacketStream = logFactory("PacketStreamFactory");
|
||||
userHelper.detectStreamData(proc.stdout, 10000) // Expect tcpdump-logs to have data after max. 10s
|
||||
.then(() => {
|
||||
loggerTcpdump.debug("Got first data");
|
||||
userHelper.detectStreamData(packetStreamFactory, 10000) // Expect then to have packets after further 10s
|
||||
.then(() => {
|
||||
loggerPacketStream.debug("Got first packet");
|
||||
})
|
||||
.catch((err) => {
|
||||
if(err == "timeout") loggerPacketStream.warn("No packets");
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
if(err == "timeout") loggerTcpdump.warn("No data after 10s! Wrong configuration?");
|
||||
});
|
||||
|
||||
logger.debug("Attaching exit-handler..");
|
||||
proc.on("exit", (code) => {
|
||||
loggerTcpdump.debug(`tcpdump exited code: ${code}`);
|
||||
if (code) {
|
||||
loggerTcpdump.fatal(`tcpdump exited with non-zero code: ${code}`);
|
||||
if(!exitCode) exitCode = 1; // When exitCode is 0, set to 1
|
||||
}
|
||||
logger.info("Shutdown");
|
||||
exit(exitCode);
|
||||
});
|
||||
|
||||
// Handle stop-signals for graceful shutdown
|
||||
var exitCode = 0;
|
||||
function shutdownReq() {
|
||||
logger.info("Shutdown request received..");
|
||||
shutdown();
|
||||
}
|
||||
function shutdown(code, signal = "SIGTERM"){
|
||||
if(code) exitCode = code;
|
||||
logger.debug("Stopping subprocess tcpdump, then exiting myself..");
|
||||
proc.kill(signal); // Kill process, then upper event-handler will stop self
|
||||
}
|
||||
process.on("SIGTERM", shutdownReq);
|
||||
process.on("SIGINT", shutdownReq);
|
||||
|
||||
logger.info("Startup complete");
|
||||
})();
|
@ -0,0 +1,72 @@
|
||||
const logger = require.main.require("./helper/logger.js")("InfluxDbLineProtocolWriter");
|
||||
const net = require("net");
|
||||
|
||||
/**
|
||||
* Get points and write them into influx
|
||||
*/
|
||||
class InfluxDbLineProtocolWriter extends net.Socket{
|
||||
/**
|
||||
*
|
||||
* @param {string} host Host of line-server
|
||||
* @param {string} port Port of line-server
|
||||
* @param {object} options Options for further configuration
|
||||
*/
|
||||
constructor(host, port, options = {}) {
|
||||
super();
|
||||
|
||||
this._host = host;
|
||||
this._port = port;
|
||||
|
||||
// options defaults
|
||||
options.autoConnect ??= true;
|
||||
options.timeout ??= 5000;
|
||||
options.autoReconnect ??= true;
|
||||
options.autoReconnectBackoffTime ??= 3000;
|
||||
this._options = options;
|
||||
|
||||
this._isConnected = false;
|
||||
|
||||
super.setKeepAlive(true, 5000);
|
||||
|
||||
// Register auto-Reconnect if enabled
|
||||
if(this._options.autoReconnect){
|
||||
this.on("connect", () => {
|
||||
logger.debug("Connection established!");
|
||||
this._isConnected = true;
|
||||
|
||||
if(this._autoReconnectTimeout)
|
||||
clearInterval(this._autoReconnectTimeout);
|
||||
this._autoReconnectTimeout = 0;
|
||||
});
|
||||
|
||||
this.on("error", (err) => {
|
||||
logger.error(err.code, "TCP ERROR");
|
||||
this._isConnected = false;
|
||||
|
||||
if(!this._autoReconnectTimeout)
|
||||
this._autoReconnectTimeout = setInterval(() => {
|
||||
this.connect();
|
||||
},
|
||||
this._options.autoReconnectBackoffTime);
|
||||
});
|
||||
}
|
||||
|
||||
// Autoconnect if requested
|
||||
if(this._options.autoConnect) this.connect();
|
||||
}
|
||||
|
||||
get host(){ return this._host; }
|
||||
get port(){ return this._port; }
|
||||
|
||||
get isConnected(){ return this._isConnected; }
|
||||
|
||||
connect(){
|
||||
logger.debug("Connecting..");
|
||||
super.connect(this._port, this._host);
|
||||
}
|
||||
}
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
InfluxDbLineProtocolWriter
|
||||
};
|
@ -0,0 +1,22 @@
|
||||
const logger = require.main.require("./helper/logger.js")("InfluxPointToLineProtoStream");
|
||||
const { Transform } = require("stream");
|
||||
|
||||
/**
|
||||
* Get points and converts them to Line-protocol
|
||||
*/
|
||||
class InfluxPointToLineProtoStream extends Transform{
|
||||
constructor(){
|
||||
super({
|
||||
writableObjectMode: true
|
||||
});
|
||||
}
|
||||
|
||||
_transform(point, encoding, next){
|
||||
next(null, point.toLineProtocol() +"\n");
|
||||
}
|
||||
}
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
InfluxPointToLineProtoStream
|
||||
};
|
@ -0,0 +1,35 @@
|
||||
const logger = require.main.require("./helper/logger.js")("InfluxPointWriter");
|
||||
const { Writable } = require("stream");
|
||||
const { WriteApi } = require("@influxdata/influxdb-client");
|
||||
|
||||
/**
|
||||
* Get points and write them into influx
|
||||
*/
|
||||
class InfluxPointWriter extends Writable{
|
||||
/**
|
||||
*
|
||||
* @param {WriteApi} writeApi WriteAPI from InfluxDB instance
|
||||
*/
|
||||
constructor(writeApi){
|
||||
super({
|
||||
objectMode: true
|
||||
});
|
||||
this._api = writeApi;
|
||||
}
|
||||
|
||||
_write(point, encoding, next){
|
||||
this._api.writePoint(point);
|
||||
next();
|
||||
}
|
||||
|
||||
_flush(next){
|
||||
this._api.flush(true)
|
||||
.catch((err) => { next(new Error(`WriteApi rejected promise for flush: ${err}`)); })
|
||||
.then(next);
|
||||
}
|
||||
}
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
InfluxPointWriter
|
||||
};
|
@ -0,0 +1,89 @@
|
||||
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
|
||||
const { Transform } = require("stream");
|
||||
const {Point} = require("@influxdata/influxdb-client");
|
||||
|
||||
/** Keys to always use as tags */
|
||||
const TAG_LIST = [
|
||||
"srcMac",
|
||||
"dstMac",
|
||||
"bssid",
|
||||
"frequency",
|
||||
"flags",
|
||||
"packetType",
|
||||
];
|
||||
|
||||
/** Measurement-name and corresponding field-key */
|
||||
const MEASUREMENT_MAP = new Map([
|
||||
["rfmon_signal_dbm", "signal"],
|
||||
["rfmon_payloadsize_bytes", "payloadSize"],
|
||||
["rfmon_datarate_bytes", "dataRate"],
|
||||
["rfmon_ssid_names", "ssid"],
|
||||
["rfmon_authenticationtype_info", "authenticationType"],
|
||||
["rfmon_associationsuccess_bools", "associationIsSuccessful"],
|
||||
["rfmon_disassociationreason_info", "disassociationReason"],
|
||||
["rfmon_handshakestage_info", "handshakeStage"],
|
||||
]);
|
||||
|
||||
|
||||
/**
|
||||
* Get packets and convert them into influx-points
|
||||
*/
|
||||
class PacketInfluxPointFactory extends Transform{
|
||||
constructor(){
|
||||
super({
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true
|
||||
});
|
||||
}
|
||||
|
||||
_transform(packet, encoding, next){
|
||||
// Create measurements
|
||||
MEASUREMENT_MAP.forEach((objKey, measurement) => {
|
||||
if(packet[objKey] == null) return;
|
||||
|
||||
let point = new Point(measurement); // Create point
|
||||
|
||||
// Set tags
|
||||
TAG_LIST.filter(tag => Object.keys(packet).includes(tag)) // Filter tags available on object
|
||||
.filter(tag => packet[tag] != null) // Filter tags not falsy on object
|
||||
.forEach(tag => {
|
||||
tagObjectRecursively(point, tag, packet[tag]);
|
||||
});
|
||||
|
||||
point.setField("value", packet[objKey]); // Set field
|
||||
|
||||
this.push(point); // Push point into stream
|
||||
});
|
||||
|
||||
next(); // Get next packet
|
||||
}
|
||||
}
|
||||
|
||||
function tagObjectRecursively(point, tag, field, suffix = ""){
|
||||
if(typeof(field) == "object"){
|
||||
// TODO: Convert boolean-arrays like "packet.flags" to key: value
|
||||
Object.entries(field).map(([key, value]) => {
|
||||
tagObjectRecursively(point, tag, value, `_${key}${suffix}`);
|
||||
});
|
||||
}
|
||||
else {
|
||||
const name = (tag+suffix).toLowerCase();
|
||||
point.tag(name, field);
|
||||
}
|
||||
}
|
||||
|
||||
/** Mapping for type -> field-method */
|
||||
const POINT_FIELD_TYPE = new Map([
|
||||
["boolean", function(key, value){ return this.booleanField(key, value); }],
|
||||
["number", function(key, value){ return this.intField(key, value); }],
|
||||
["string", function(key, value){ return this.stringField(key, value); }],
|
||||
]);
|
||||
Point.prototype.setField = function(key, value){
|
||||
let setField = POINT_FIELD_TYPE.get(typeof value);
|
||||
return setField.apply(this, [key, value]);
|
||||
};
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
PacketInfluxPointFactory
|
||||
};
|
@ -0,0 +1,160 @@
|
||||
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
|
||||
const { Transform } = require("stream");
|
||||
const { DateTime } = require("luxon");
|
||||
const { PacketType, FlagType, Packet, PacketWithSSID, BeaconPacket, ProbeRequestPacket, ProbeResponsePacket, AuthenticationPacket, AuthenticationType, AssociationResponsePacket, DisassociationPacket, HandshakePacket, HandshakeStage } = require.main.require("./dto/Packet.js");
|
||||
const hexConv = require.main.require("./helper/hexConverter.js");
|
||||
const wifiStateAnalyser = require.main.require("./helper/wifiStateAnalyzer.js");
|
||||
|
||||
const PACKET_TYPE_MAP = {
|
||||
"Beacon": PacketType.Beacon,
|
||||
"Probe Request": PacketType.ProbeRequest,
|
||||
"Probe Response": PacketType.ProbeResponse,
|
||||
"Data": PacketType.Data,
|
||||
"Request-To-Send": PacketType.RequestToSend,
|
||||
"Clear-To-Send": PacketType.ClearToSend,
|
||||
"Acknowledgment": PacketType.Acknowledgment,
|
||||
"BA": PacketType.BlockAcknowledgment,
|
||||
"Authentication": PacketType.Authentication,
|
||||
"Assoc Request": PacketType.AssociationRequest,
|
||||
"Assoc Response": PacketType.AssociationResponse,
|
||||
"Disassociation:": PacketType.Disassociation,
|
||||
"EAPOL": PacketType.Handshake,
|
||||
};
|
||||
const PACKET_TYPES_REGEX = Object.keys(PACKET_TYPE_MAP).join("|");
|
||||
|
||||
const AUTHENTICATION_TYPE_MAP = {
|
||||
"(Open System)-1": AuthenticationType.OpenSystem_1,
|
||||
"(Open System)-2": AuthenticationType.OpenSystem_2,
|
||||
};
|
||||
|
||||
const FLAG_TYPE_MAP = {
|
||||
"Retry": FlagType.Retry,
|
||||
"Pwr Mgmt": FlagType.PwrMgt,
|
||||
"More Data": FlagType.MoreData,
|
||||
"Protected": FlagType.Protected,
|
||||
};
|
||||
const FLAG_TYPE_MAPS_REGEX = Object.keys(FLAG_TYPE_MAP).join("|");
|
||||
|
||||
/**
|
||||
* Read data from text-blocks and convert them to Packet
|
||||
*/
|
||||
class PacketStreamFactory extends Transform{
|
||||
constructor(){
|
||||
super({
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true
|
||||
});
|
||||
}
|
||||
|
||||
_transform(chunk, encoding, next){
|
||||
if(!chunk){
|
||||
const err = "Chunk was invalid!";
|
||||
logger.error(err);
|
||||
next(err);
|
||||
return;
|
||||
}
|
||||
|
||||
let packet = new Packet();
|
||||
|
||||
const lines = chunk.split("\n");
|
||||
const header = lines.splice(0, 1)[0]; // Grab first line, "lines" is now the payload
|
||||
packet = this._handleHeader(packet, header);
|
||||
packet = this._handlePayload(packet, lines);
|
||||
|
||||
next(null, packet); // Get next chunk
|
||||
}
|
||||
|
||||
_handleHeader(packet, data){
|
||||
// Convert time to epoch-micros Unfortunately luxon doesnt use micros, but millis as smallest time-unit requiring some "hacks"
|
||||
packet.timestampMicros = DateTime.fromISO(data.slice(0, 12)).toSeconds() + data.slice(12, 15)/1000000;
|
||||
|
||||
// Find flags
|
||||
data.match(data.match(new RegExp("(?<=^|\\s)("+ FLAG_TYPE_MAPS_REGEX +")(?=$|\\s)", "ig"))
|
||||
?.forEach(match => packet.flags[FLAG_TYPE_MAP[match]] = true) // Set them to true in flags
|
||||
);
|
||||
|
||||
packet.dataRate = Number(data.match(/(?<=^|\s)\d+(\.\d+)?(?=\sMb\/?s($|\s))/i)?.[0]) || null;
|
||||
packet.frequency = Number(data.match(/(?<=^|\s)\d{4}(?=\sMHz($|\s))/i)?.[0]) || null;
|
||||
|
||||
packet.durationMicros = Number(data.match(/(?<=^|\s)\d{1,4}(?=us($|\s))/i)?.[0]) || null;
|
||||
|
||||
packet.signal = Number(data.match(/(?<=^|\s)-\d{2,3}(?=dBm\sSignal($|\s))/i)?.[0]) || null;
|
||||
|
||||
let packetTypeStr = data.match(new RegExp("(?<=^|\\s)("+ PACKET_TYPES_REGEX +")(?=$|\\s)", "i"))?.[0];
|
||||
if(packetTypeStr)
|
||||
packet.packetType = PACKET_TYPE_MAP[packetTypeStr];
|
||||
else if(data.match(/(SA|TA|DA|RA|BSSID):.{17}\s*$/i)){
|
||||
packet.packetType = PacketType.NoData;
|
||||
}
|
||||
else {
|
||||
packet.packetType = PacketType.Unknown;
|
||||
}
|
||||
|
||||
packet.srcMac = data.match(/(?<=(^|\s)(SA|TA):).{17}(?=$|\s)/i)?.[0] ?? null;
|
||||
|
||||
packet.dstMac = data.match(/(?<=(^|\s)(DA|RA):).{17}(?=$|\s)/i)?.[0] ?? null;
|
||||
|
||||
packet.bssid = data.match(/(?<=(^|\s)BSSID:).{17}(?=$|\s)/i)?.[0] ?? null;
|
||||
|
||||
// Cover special cases with more data
|
||||
let newPacket;
|
||||
switch(packet.packetType){
|
||||
case PacketType.Beacon:
|
||||
case PacketType.ProbeRequest:
|
||||
case PacketType.ProbeResponse:
|
||||
case PacketType.AssociationRequest:
|
||||
newPacket = new PacketWithSSID();
|
||||
newPacket.ssid = data.match(new RegExp("(?<=(^|\\s)"+ packetTypeStr +"\\s\\().{0,32}(?=\\)($|\\s))", "i"))?.[0] ?? null;
|
||||
break;
|
||||
|
||||
case PacketType.Authentication:
|
||||
newPacket = new AuthenticationPacket();
|
||||
newPacket.authenticationType = AUTHENTICATION_TYPE_MAP[data.match(/(?<=(^|\s)Authentication\s).{3,}(?=:(\s|$))/i)[0]] ?? AuthenticationType.Unknown;
|
||||
break;
|
||||
|
||||
case PacketType.AssociationResponse:
|
||||
newPacket = new AssociationResponsePacket();
|
||||
newPacket.associationIsSuccessful = data.match(/(?<=(^|\s)Assoc\sResponse\s.{0,30})Successful(?=\s|$)/i) ? true : false;
|
||||
break;
|
||||
|
||||
case PacketType.Disassociation:
|
||||
newPacket = new DisassociationPacket();
|
||||
newPacket.disassociationReason = data.match(/(?<=(^|\s)Disassociation:\s).*$/i)?.[0] ?? null;
|
||||
break;
|
||||
}
|
||||
if(newPacket) packet = Object.assign(newPacket, packet); // Use new, more specific, packet and copy old data over
|
||||
|
||||
return packet;
|
||||
}
|
||||
|
||||
_handlePayload(packet, data){
|
||||
data = data.join("");
|
||||
|
||||
// Get payload-Hex-Data. If there is no data: empty
|
||||
packet.payloadData = hexConv.hexToBytes(data.match(/(?<=\s)([A-F0-9]{1,4}(?=\s))/igm)?.join("") ?? "");
|
||||
packet.payloadData.splice(packet.payloadData.length-4, 4); // Remove FrameCheck sequence
|
||||
|
||||
// Cover special cases with more data
|
||||
let newPacket;
|
||||
switch(packet.packetType){
|
||||
case PacketType.Handshake: {
|
||||
newPacket = new HandshakePacket();
|
||||
|
||||
// Read key-information
|
||||
const keyInfoRaw = (packet.payloadData[0x5]<<0x8) + packet.payloadData[0x6];
|
||||
const keyInfo = wifiStateAnalyser.keyInfoFromRaw(keyInfoRaw); // Convert
|
||||
|
||||
newPacket.handshakeStage = wifiStateAnalyser.handshakeStageFromKeyInfo(keyInfo); // Get stage
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(newPacket) packet = Object.assign(newPacket, packet);
|
||||
|
||||
return packet;
|
||||
}
|
||||
}
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
PacketStreamFactory
|
||||
};
|
@ -0,0 +1,67 @@
|
||||
const logger = require.main.require("./helper/logger.js")("RegexBlockStream");
|
||||
const { Transform } = require("stream");
|
||||
|
||||
/**
|
||||
* Matches whole blocks as regex and passes them on
|
||||
*/
|
||||
class RegexBlockStream extends Transform{
|
||||
matcher;
|
||||
withholdLastBlock;
|
||||
matchAllOnFlush;
|
||||
|
||||
/**
|
||||
* @param {RegExp} matcher Block-match
|
||||
* @param {boolean} withholdLastBlock When true, the last matches block will not be submitted to prevent submitting incomplete blocks.
|
||||
* @param {boolean} matchAllOnFlush (Only in combination with withholdLastBlock) When enabled, the buffer will be matched on last time on _flush (stream deconstruction) and write any, also incomplete, blocks
|
||||
* @remarks WARNING: It should match a clean-block (including e.g. newline)! Otherwise buffer will get dirty and use more and more resources.
|
||||
*/
|
||||
constructor(matcher, withholdLastBlock = true, matchAllOnFlush = false){
|
||||
super({
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true
|
||||
});
|
||||
|
||||
this.matcher = matcher;
|
||||
this.withholdLastBlock = withholdLastBlock;
|
||||
this.matchAllOnFlush = matchAllOnFlush;
|
||||
}
|
||||
|
||||
_transform(chunk, encoding, next){
|
||||
chunk = this.readableBuffer.length? this.readableBuffer.join("") + chunk: chunk; // Add previous buffer to current chunk
|
||||
this.readableBuffer.length && this.readableBuffer.clear(); // Clear buffer once we read it
|
||||
|
||||
let matches = chunk.match(this.matcher); // Match
|
||||
if(matches){
|
||||
if(this.withholdLastBlock) matches.pop(); // Remove last if we want to withhold it
|
||||
chunk = this._writeMatches(matches, chunk);
|
||||
}
|
||||
|
||||
this.readableBuffer.push(chunk); // Store remaining data in buffer
|
||||
next(); // Get next chunk
|
||||
}
|
||||
|
||||
_writeMatches(matches, chunk = null){
|
||||
if(matches){
|
||||
matches.forEach((match) => {
|
||||
this.push(match); // Write match to stream
|
||||
if(chunk) chunk = chunk.replace(match, ""); // Remove match from chunks
|
||||
});
|
||||
}
|
||||
if(chunk) return chunk;
|
||||
}
|
||||
|
||||
_flush(next){
|
||||
if(this.matchAllOnFlush){ // When requested, we'll match one last time over the remaining buffer
|
||||
let chunk = this.readableBuffer.join("");
|
||||
let matches = chunk.match(this.matcher); // Match remaining buffer
|
||||
this._writeMatches(matches); // Write matches including last element
|
||||
}
|
||||
|
||||
next(); // Tell system we are done
|
||||
}
|
||||
}
|
||||
|
||||
// Specify exports
|
||||
module.exports = {
|
||||
RegexBlockStream
|
||||
};
|
Loading…
Reference in New Issue