52 Commits

Author SHA1 Message Date
159f3481d8 Revert "Added InfluxDB-client library"
This reverts commit 739f27c0b8.
2021-11-23 13:45:04 +01:00
739f27c0b8 Added InfluxDB-client library
Version 3.9.0
https://github.com/tobiasschuerg/InfluxDB-Client-for-Arduino
2021-11-23 12:51:13 +01:00
1c5a0638ed Calling packetHandler 2021-11-22 21:30:22 +01:00
69317e51f6 Changed arg to const ref 2021-11-22 21:30:14 +01:00
4f1cb82f46 Remove unused include 2021-11-22 21:27:26 +01:00
b7b7f6edbb Fixed exception when signal-data could is missing 2021-11-22 21:27:07 +01:00
8d67321d4b Implemented payload-alanyzing (size) 2021-11-22 21:26:47 +01:00
61f3c432c9 Moved head-analyzing to own function 2021-11-22 21:26:17 +01:00
52119fe3cd Added handling of special packet-types with their respective data 2021-11-22 21:25:29 +01:00
f938f8dd53 Finished packet-type detection 2021-11-22 21:23:34 +01:00
5b8a330e69 Added package-type 2021-11-22 21:19:18 +01:00
35e71ef44e Fixed default value 2021-11-22 21:19:10 +01:00
8ef6318a56 Added method to convert hex-char to hex-value 2021-11-22 18:00:12 +01:00
9301c4e0fd Added Copyconstrucotr for base and made inheritance public 2021-11-22 17:00:37 +01:00
4d8597b1d6 Made pass const 2021-11-22 16:15:52 +01:00
3c379a1a80 Rename split.hpp to string-helper.hpp 2021-11-22 16:15:20 +01:00
6710442067 Fixed missing imports 2021-11-22 16:14:37 +01:00
e191085cbd Renamed ssid to lowercase 2021-11-22 15:39:14 +01:00
f834ec7134 Fixed fields not being public 2021-11-22 15:39:02 +01:00
035491668a Fixed wrong key used 2021-11-22 15:38:30 +01:00
16028daf10 Auto-Remove newline-char 2021-11-22 15:23:53 +01:00
1ed9c84802 Added array to retrieve package-type names from enum 2021-11-22 15:23:13 +01:00
a0ae919614 Removed asyncHandler and creating new threads now once buffer-vector is complete 2021-11-22 14:56:18 +01:00
f1e8069afd Added specialisation-packets 2021-11-22 13:52:03 +01:00
bbd9623256 Added helper for vector-stats 2021-11-22 13:29:39 +01:00
5ea6d1f975 Implemented usage of packetType-enum 2021-11-22 13:29:26 +01:00
c87a95b167 Changed type to enum-packet-type 2021-11-22 13:29:02 +01:00
ca1b59e4c6 Choose and set addresses to packet 2021-11-22 12:43:16 +01:00
5ecc7ed383 Added bssid to packet 2021-11-22 12:39:51 +01:00
b10a0de9e6 Grabbing adresses if available from header 2021-11-22 12:37:44 +01:00
58a535401a Grabbing first data from header 2021-11-22 12:37:28 +01:00
c37a214808 Added type-string 2021-11-22 12:24:07 +01:00
9ce2da6f61 Renamed size to payloadSize 2021-11-22 12:23:56 +01:00
177181668d Renamed linkSpeed to dataRate 2021-11-22 12:23:43 +01:00
9755e86a98 Change packet-macs to string 2021-11-22 11:53:31 +01:00
c5bc468ab9 Added documentation to find 2021-11-22 11:31:13 +01:00
ee3505090d Added helper-file for finding items in std::vector<std::string> 2021-11-22 11:27:48 +01:00
cb33dd0f80 Moved timestampConvert to own helper-file 2021-11-22 11:26:38 +01:00
8a62d2899b Moved split to own helper-file 2021-11-22 11:24:10 +01:00
6de2063288 Implemented textTimestamp converter to timestampMicros 2021-11-22 10:35:08 +01:00
5d3e861fc9 Gathering first data from textPacket 2021-11-22 10:34:16 +01:00
9c18c50640 Added TextHandler to exec-call 2021-11-22 10:33:24 +01:00
8e5cb85c1a Fixed passing of handlerFunction-pointer 2021-11-22 10:32:57 +01:00
e7dcaf99a0 Created empty PacketHandler 2021-11-22 10:32:29 +01:00
36f252437d Changed from only strings to vector-strings 2021-11-22 10:32:04 +01:00
5ac832623d Created empty TextPacketHandler 2021-11-22 10:31:41 +01:00
57b8ae868c Changed packet-timestamp to uint64_t for epoch-timestamp in micros 2021-11-22 10:28:41 +01:00
e66f6c1140 Fixed strcopy being wrong way around 2021-11-22 10:28:23 +01:00
f3b9eb25a7 Added missing pthread-library 2021-11-22 09:10:59 +01:00
35edc6ca6e Created basic chained handlers 2021-11-22 09:08:25 +01:00
b8b2bb7202 Added basic packet-structure 2021-11-22 09:07:49 +01:00
3f7c92ba7e First implementation 2021-11-21 14:29:11 +01:00
31 changed files with 532 additions and 1112 deletions

View File

@@ -1,3 +0,0 @@
node_modules
npm-debug.log
.*

13
CMakeLists.txt Normal file
View File

@@ -0,0 +1,13 @@
cmake_minimum_required(VERSION 3.0.0)
project(rfmon-to-influx VERSION 0.1.0)
include(CTest)
enable_testing()
add_executable(rfmon-to-influx main.cpp)
target_link_libraries(rfmon-to-influx pthread)
set(CPACK_PROJECT_NAME ${PROJECT_NAME})
set(CPACK_PROJECT_VERSION ${PROJECT_VERSION})
include(CPack)

18
DTO/beaconPacket.hpp Normal file
View File

@@ -0,0 +1,18 @@
#ifndef FDDB997A_BCD3_4056_BFEA_9FF6A548DACF
#define FDDB997A_BCD3_4056_BFEA_9FF6A548DACF
#include "./packet.hpp"
#include <string>
class BeaconPacket : public Packet{
public:
BeaconPacket()
{}
BeaconPacket(const Packet &packet)
: Packet(packet)
{}
std::string ssid;
};
#endif /* FDDB997A_BCD3_4056_BFEA_9FF6A548DACF */

47
DTO/packet.hpp Normal file
View File

@@ -0,0 +1,47 @@
#ifndef C42FA9F6_8CF3_453F_8FA0_918E543DCD59
#define C42FA9F6_8CF3_453F_8FA0_918E543DCD59
#include <string>
enum PacketType {
Beacon,
ProbeRequest,
ProbeResponse,
Data,
RequestToSend,
ClearToSend,
Acknowledgment,
BlockAcknowledgment,
NoData,
Unknown
};
const std::array<const char*, 10> PACKET_TYPE_NAMES({{
"Beacon",
"Probe Request",
"Probe Response",
"Data",
"Request to send",
"Clear to send",
"Acknowledgment",
"BlockAcknowledgment",
"NoData",
"Unknown"
}});
struct Packet {
uint64_t timestampMicros;
std::string srcMac;
std::string dstMac;
std::string bssid;
unsigned int payloadSize;
char signal;
unsigned int frequency;
unsigned char dataRate;
PacketType type;
};
#endif /* C42FA9F6_8CF3_453F_8FA0_918E543DCD59 */

View File

@@ -0,0 +1,18 @@
#ifndef CD2BF199_8153_4F10_A85C_50883FAD66A8
#define CD2BF199_8153_4F10_A85C_50883FAD66A8
#include "./packet.hpp"
#include <string>
class ProbeRequestPacket : public Packet{
public:
ProbeRequestPacket()
{}
ProbeRequestPacket(const Packet &packet)
: Packet(packet)
{}
std::string requestSsid;
};
#endif /* CD2BF199_8153_4F10_A85C_50883FAD66A8 */

View File

@@ -0,0 +1,18 @@
#ifndef B199B4B3_BE27_4F0C_8DBE_5E78580AB1A9
#define B199B4B3_BE27_4F0C_8DBE_5E78580AB1A9
#include "./packet.hpp"
#include <string>
class ProbeResponsePacket : public Packet{
public:
ProbeResponsePacket()
{}
ProbeResponsePacket(const Packet &packet)
: Packet(packet)
{}
std::string responseSsid;
};
#endif /* B199B4B3_BE27_4F0C_8DBE_5E78580AB1A9 */

View File

@@ -1,19 +0,0 @@
FROM node:16-alpine
# Create app directory
WORKDIR /usr/src/app
# Install app dependencies
COPY package*.json ./
RUN npm install
# remove development dependencies
RUN npm prune --production
# Install required apk-packages & delete cache
RUN apk update && apk add tcpdump && rm -rf /var/cache/apk/*
# Bundle app source
COPY ./src/ .
CMD ["npm", "run", "start"]

28
handler/bufHandler.hpp Normal file
View File

@@ -0,0 +1,28 @@
#ifndef C251BA62_6D80_4033_86B6_61F184E6F250
#define C251BA62_6D80_4033_86B6_61F184E6F250
#include <future>
#include <string>
#include "textPacketHandler.hpp"
using namespace std::string_literals;
std::vector<std::string> buffer;
void bufHandler(const char *buf){
std::string line = buf;
// Remove last char which is \n
line = line.substr(0, line.size()-1);
// When first char of buf has text (no tab), we got a new packet
if(buf[0] != '\t'){
// Submit the just-read text-packet in a new thread
if(buffer.size() != 0) {
(void)std::async(std::launch::async, textPacketHandler, buffer);
}
buffer = {line};
}
else
buffer.push_back(line); // Append part-packet
}
#endif /* C251BA62_6D80_4033_86B6_61F184E6F250 */

10
handler/packetHandler.hpp Normal file
View File

@@ -0,0 +1,10 @@
#ifndef EA8E466A_DAAA_4747_9CEE_65B77A4EF694
#define EA8E466A_DAAA_4747_9CEE_65B77A4EF694
#include "../DTO/packet.hpp"
void packetHandler(const Packet &packet){
}
#endif /* EA8E466A_DAAA_4747_9CEE_65B77A4EF694 */

View File

@@ -0,0 +1,185 @@
#ifndef EE781A91_6D07_47AC_B3C4_F99E29F3731F
#define EE781A91_6D07_47AC_B3C4_F99E29F3731F
#include <string>
#include "../DTO/packet.hpp"
#include "../DTO/beaconPacket.hpp"
#include "../DTO/probeRequestPacket.hpp"
#include "../DTO/probeResponsePacket.hpp"
#include <vector>
#include <locale>
#include <iomanip>
#include "../helper/string-helper.hpp"
#include "../helper/timestampConvert.hpp"
#include "../helper/find.hpp"
#include "../helper/vector-stats.hpp"
#include <unordered_map>
#include "./packetHandler.hpp"
using namespace std::string_literals;
const std::unordered_map<std::string, PacketType> PACKET_TYPE_MAP({
{"Beacon", PacketType::Beacon},
{"Probe Request", PacketType::ProbeRequest},
{"Probe Response", PacketType::ProbeResponse},
{"Data", PacketType::Data},
{"Request-To-Send", PacketType::RequestToSend},
{"Clear-To-Send", PacketType::ClearToSend},
{"Acknowledgment", PacketType::Acknowledgment},
{"BA", PacketType::BlockAcknowledgment}
});
void parseHeader(Packet &packet, const std::vector<std::string> &textPacket);
void parsePayload(Packet &packet, const std::vector<std::string> &textPacket);
void textPacketHandler(const std::vector<std::string> textPacket){
/// Here we have to parse the packet
// Create empty packet
Packet packet;
parseHeader(packet, textPacket);
parsePayload(packet, textPacket);
packetHandler(packet);
}
void parseHeader(Packet &packet, const std::vector<std::string> &textPacket){
const std::string textHeader = textPacket[0];
const std::vector<std::string> headerData = split(textHeader, ' ');
std::string textTimestamp = headerData[0];
uint64_t timestamp = convertStringToTimestampMicros(textTimestamp);
// Find remaining data based on keys in/around fields
int linkSpeedIndex = findIs(headerData, "Mb/s", 1, 1);
packet.dataRate = std::stoi(headerData[linkSpeedIndex]);
int frequencyIndex = findIs(headerData, "MHz", 1, 1);
packet.frequency = std::stoi(headerData[frequencyIndex]);
int signalIndex = findIs(headerData, "signal", 1, 1);
if(signalIndex != -1){
std::string signalText = headerData[signalIndex].substr(0, 3);
packet.signal = std::stoi(signalText);
}
else {
fprintf(stderr, "Missing signal-data!\n");
packet.signal = -100;
}
// Addresses seem complicated at first, but just have many fields which might be available.
// SA and DA are src- and dst-Addresses
// BSSID is the used bssid
// TA and RA are transmitter- and receiver-address which are used exclusively for RTS and CTS in tcpdump
// BEWARE: SA, DA, BSSID, TA and RA can be used together, but tcpdump doesnt display all of them!
// DA might also not be a valid MAC-address, but Broadcast or an encoded IPv4/6 Multicast-address
int saIndex = findContains(headerData, "SA:", 1);
std::string sAddr = (saIndex != -1) ? headerData[saIndex].substr("SA:"s.length()) : "";
int daIndex = findContains(headerData, "DA:", 1);
std::string dAddr = (daIndex != -1) ? headerData[daIndex].substr("DA:"s.length()) : "";
int bssidIndex = findContains(headerData, "BSSID:", 1);
std::string bssidAddr = (bssidIndex != -1) ? headerData[bssidIndex].substr("BSSID:"s.length()) : "";
int taIndex = findContains(headerData, "TA:", 1);
std::string tAddr = (taIndex != -1) ? headerData[taIndex].substr("TA:"s.length()) : "";
int raIndex = findContains(headerData, "RA:", 1);
std::string rAddr = (raIndex != -1) ? headerData[raIndex].substr("RA:"s.length()) : "";
// Depending of when which address-fields are actually set, choose which ones to use
if(sAddr == "" && tAddr != "") sAddr = tAddr;
if(dAddr == "" && rAddr != "") dAddr = rAddr;
// Set addresses to packet
packet.srcMac = sAddr;
packet.dstMac = dAddr;
packet.bssid = bssidAddr;
// Identify type of packet
// -> comes right after the addresses
int typeIndex = max(std::vector({saIndex, daIndex, bssidIndex, taIndex, raIndex}))+1;
PacketType type = PacketType::Unknown;
if(typeIndex == headerData.size()) type = PacketType::NoData;
else {
std::string textType = headerData[typeIndex];
// Check for incomplete types
if(textType == "Probe"){
textType += " "+ headerData[typeIndex+1];
}
// If type is in map, use map-value, otherwise keep default
if(PACKET_TYPE_MAP.find(textType) != PACKET_TYPE_MAP.end())
type = PACKET_TYPE_MAP.at(textType);
if(type == PacketType::Unknown){
fprintf(stderr, "Unknown package-type: %s\n", textType.c_str());
}
}
packet.type = type;
// Read data for specializations
if(type == PacketType::Beacon){
// Create BeaconPacket from packet
BeaconPacket beaconPacket = BeaconPacket(packet);
packet = beaconPacket; // Overwrite packet
// Find ssid
int start = textHeader.find('(')+1;
std::string ssid = textHeader.substr(start, textHeader.find(')')-start);
// Write to packet
beaconPacket.ssid = ssid;
}
else if (type == PacketType::ProbeRequest){
// Create ProbeRequestPacket from packet
ProbeRequestPacket probeRequestPacket = ProbeRequestPacket(packet);
packet = probeRequestPacket; // Overwrite packet
// Find probe-request
int start = textHeader.find('(')+1;
std::string requestSsid = textHeader.substr(start, textHeader.find(')')-start);
// Write to packet
probeRequestPacket.requestSsid = requestSsid;
}
else if (type == PacketType::ProbeResponse){
// Create ProbeResponsePacket from packet
ProbeResponsePacket probeResponsePacket = ProbeResponsePacket(packet);
packet = probeResponsePacket; // Overwrite packet
// Find probe-request
int start = textHeader.find('(')+1;
std::string responseSsid = textHeader.substr(start, textHeader.find(')')-start);
// Write to packet
probeResponsePacket.responseSsid = responseSsid;
}
}
void parsePayload(Packet &packet, const std::vector<std::string> &textPacket){
// Expect max of 16byte per line of payload
unsigned int payloadSize = 16*(textPacket.size()-1);
// Go through last line
int line = textPacket.size()-1, charPos;
for(int f=0; f<8*2; ++f){
charPos = 10 + (f/2.0*5);
if(textPacket[line][charPos] == ' ') { // When our char is space, no more data is present
// Set size
payloadSize = 16*(textPacket.size()-2)+f;
break;
}
}
packet.payloadSize = payloadSize;
}
#endif /* EE781A91_6D07_47AC_B3C4_F99E29F3731F */

31
helper/exec.hpp Normal file
View File

@@ -0,0 +1,31 @@
#ifndef B89BC3C5_AD59_4765_AA06_8110111D316F
#define B89BC3C5_AD59_4765_AA06_8110111D316F
#include <cstdio>
#include <stdexcept>
/// @brief Executes given command and optionally sends buffer to handler
/// @param cmd is the command
/// @param handler is the handler(char*)-function
/// @return Return-code form command
int exec(const char* cmd, void (*handler)(const char*) = nullptr){
const int buf_size = 512;
char buf[buf_size];
// Open execution-pipe
FILE *pipe = popen(cmd, "r");
if (!pipe) {
throw std::runtime_error("popen() failed!");
}
while (fgets(buf, buf_size, pipe) != nullptr) {
// When a handler is specified, call it
if(handler != nullptr) (*handler)(buf);
}
// Close pipe and read exit-code
return WEXITSTATUS(pclose(pipe));
}
#endif /* B89BC3C5_AD59_4765_AA06_8110111D316F */

53
helper/find.hpp Normal file
View File

@@ -0,0 +1,53 @@
#ifndef B6A9DEE0_30C6_4492_AB96_87D9C5C10E8B
#define B6A9DEE0_30C6_4492_AB96_87D9C5C10E8B
#include <string>
#include <vector>
/// @brief Internal function
void prepare(const int &size, int &start, const int &offset, int &end){
// Set missing fields
if(!end) end = size;
// Edit start/end according to offset
if(offset < 0)
start += offset;
else if(offset > 0)
end -= offset;
}
/// @brief Find str-index based on contains-content
/// @param data is the vector-string-data to search
/// @param strContains string to find
/// @param start where to start searching
/// @param offset search offset to position (results in index being shifted by -offset)
/// @param end where to end searching
/// @return index of found index (with offset if any)
int findContains(const std::vector<std::string> &data, const std::string &strContains, int start = 0, int offset = 0, int end = 0){
prepare(data.size(), start, offset, end);
for(int i=start; i<data.size()-offset; ++i){
if(!data[i+offset].find(strContains))
return i;
}
return -1;
}
/// @brief Find str-index based on exact-content
/// @param data is the vector-string-data to search
/// @param strIs string to find (exact)
/// @param start where to start searching
/// @param offset search offset to position (results in index being shifted by -offset)
/// @param end where to end searching
/// @return index of found index (with offset if any)
int findIs(const std::vector<std::string> &data, const std::string &strIs, int start = 0, int offset = 0, int end = 0){
prepare(data.size(), start, offset, end);
for(int i=start; i<data.size()-offset; ++i){
if(data[i+offset] == strIs)
return i;
}
return -1;
}
#endif /* B6A9DEE0_30C6_4492_AB96_87D9C5C10E8B */

34
helper/string-helper.hpp Normal file
View File

@@ -0,0 +1,34 @@
#ifndef F7CFE6A7_34BF_4E04_94CF_DB8374980631
#define F7CFE6A7_34BF_4E04_94CF_DB8374980631
#include <vector>
#include <string>
#include <sstream>
std::vector<std::string> split(const std::string& s, char delimiter)
{
std::vector<std::string> tokens;
std::string token;
std::istringstream tokenStream(s);
while (std::getline(tokenStream, token, delimiter))
{
tokens.push_back(token);
}
return tokens;
}
char hex_char_to_int(const char &c) {
unsigned char result = 0;
if( ('0' <= c) && (c <= '9') ) {
result = c - '0';
}
else if( ('A' <= c) && (c <= 'F') ) {
result = 10 + c - 'A';
}
else if( ('a' <= c) && (c <= 'f') ) {
result = 10 + c - 'a';
}
return result;
}
#endif /* F7CFE6A7_34BF_4E04_94CF_DB8374980631 */

View File

@@ -0,0 +1,36 @@
#ifndef CC724CA7_8BB8_43B9_8A9A_54BD880A76AA
#define CC724CA7_8BB8_43B9_8A9A_54BD880A76AA
uint64_t convertStringToTimestampMicros(std::string textTimestamp){
uint64_t timestamp;
std::tm t = {};
std::istringstream ssTimestamp = std::istringstream(textTimestamp);
if (ssTimestamp >> std::get_time(&t, "%H:%M:%S"))
{
// Get current time
std::time_t curT = std::time(0);
std::tm* curTime = std::localtime(&curT);
// Set missing fields
t.tm_mday = curTime->tm_mday;
t.tm_mon = curTime->tm_mon;
t.tm_year = curTime->tm_year;
t.tm_zone = curTime->tm_zone;
// Convert tm to time
std::time_t time = std::mktime(&t);
// Get micros
int micros = std::stoi(textTimestamp.substr(9, 6));
// Calculate timestamp epoch in micros
timestamp = time*1000000 + micros;
return timestamp;
}
else
{
throw std::runtime_error("Could not parse time: '"+ textTimestamp +"'");
}
}
#endif /* CC724CA7_8BB8_43B9_8A9A_54BD880A76AA */

15
helper/vector-stats.hpp Normal file
View File

@@ -0,0 +1,15 @@
#ifndef C437A277_1F23_496D_9B69_A21D771ECA91
#define C437A277_1F23_496D_9B69_A21D771ECA91
#include <vector>
#include <limits.h>
int max(std::vector<int> vec){
int max = INT_MIN;
for(int i=0; i<vec.size(); ++i){
if(vec[i] > max) max = vec[i];
}
return max;
}
#endif /* C437A277_1F23_496D_9B69_A21D771ECA91 */

26
main.cpp Normal file
View File

@@ -0,0 +1,26 @@
#include <stdio.h>
#include <string>
#include "./helper/exec.hpp"
#include "./handler/bufHandler.hpp"
const std::string tcpdump_baseCmd = "tcpdump -vvv -e -n -X -s0 -i ";
int main(int argc, char *args[]){
std::string tcpdump_cmd;
if(argc == 2){
tcpdump_cmd = tcpdump_baseCmd + args[1];
} else {
fprintf(stderr, "Missing interface\n");
exit(1);
}
int exitCode = exec(tcpdump_cmd.c_str(), &bufHandler);
if(exitCode){
fprintf(stderr, "\ntcpdump exited with non-zero ExitCode: %d\n Something went wrong! Check tcpdump-output for more information.\n", exitCode);
exit(1);
}
return 0;
}

266
package-lock.json generated
View File

@@ -1,266 +0,0 @@
{
"name": "rfmon-to-influx",
"version": "1.0.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "rfmon-to-influx",
"version": "1.0.0",
"license": "AGPL-3.0",
"dependencies": {
"@influxdata/influxdb-client": "^1.20.0",
"@influxdata/influxdb-client-apis": "^1.20.0",
"log4js": "^6.3.0",
"luxon": "^2.1.1",
"string-argv": "^0.3.1"
}
},
"node_modules/@influxdata/influxdb-client": {
"version": "1.20.0",
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client/-/influxdb-client-1.20.0.tgz",
"integrity": "sha512-jaKSI63hmQ5VSkJrFJkYIXaKlhoF+mGd4HmOf7v/X7pmEi69ReHp922Wyx6/OeCrpndRMbsadk+XmGNdd43cFw=="
},
"node_modules/@influxdata/influxdb-client-apis": {
"version": "1.20.0",
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client-apis/-/influxdb-client-apis-1.20.0.tgz",
"integrity": "sha512-KMTmXH4rbpS+NWGpqDjxcKTyan2rbiT2IM5AdRElKhH2sHbH96xwLgziaxeC+OCJLeNAdehJgae3I8WiZjbwdg==",
"peerDependencies": {
"@influxdata/influxdb-client": "*"
}
},
"node_modules/date-format": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/date-format/-/date-format-3.0.0.tgz",
"integrity": "sha512-eyTcpKOcamdhWJXj56DpQMo1ylSQpcGtGKXcU0Tb97+K56/CF5amAqqqNj0+KvA0iw2ynxtHWFsPDSClCxe48w==",
"engines": {
"node": ">=4.0"
}
},
"node_modules/debug": {
"version": "4.3.2",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz",
"integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==",
"dependencies": {
"ms": "2.1.2"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/flatted": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz",
"integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA=="
},
"node_modules/fs-extra": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz",
"integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==",
"dependencies": {
"graceful-fs": "^4.2.0",
"jsonfile": "^4.0.0",
"universalify": "^0.1.0"
},
"engines": {
"node": ">=6 <7 || >=8"
}
},
"node_modules/graceful-fs": {
"version": "4.2.8",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz",
"integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg=="
},
"node_modules/jsonfile": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
"integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=",
"optionalDependencies": {
"graceful-fs": "^4.1.6"
}
},
"node_modules/log4js": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/log4js/-/log4js-6.3.0.tgz",
"integrity": "sha512-Mc8jNuSFImQUIateBFwdOQcmC6Q5maU0VVvdC2R6XMb66/VnT+7WS4D/0EeNMZu1YODmJe5NIn2XftCzEocUgw==",
"dependencies": {
"date-format": "^3.0.0",
"debug": "^4.1.1",
"flatted": "^2.0.1",
"rfdc": "^1.1.4",
"streamroller": "^2.2.4"
},
"engines": {
"node": ">=8.0"
}
},
"node_modules/luxon": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/luxon/-/luxon-2.1.1.tgz",
"integrity": "sha512-6VQVNw7+kQu3hL1ZH5GyOhnk8uZm21xS7XJ/6vDZaFNcb62dpFDKcH8TI5NkoZOdMRxr7af7aYGrJlE/Wv0i1w==",
"engines": {
"node": ">=12"
}
},
"node_modules/ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"node_modules/rfdc": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz",
"integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA=="
},
"node_modules/streamroller": {
"version": "2.2.4",
"resolved": "https://registry.npmjs.org/streamroller/-/streamroller-2.2.4.tgz",
"integrity": "sha512-OG79qm3AujAM9ImoqgWEY1xG4HX+Lw+yY6qZj9R1K2mhF5bEmQ849wvrb+4vt4jLMLzwXttJlQbOdPOQVRv7DQ==",
"dependencies": {
"date-format": "^2.1.0",
"debug": "^4.1.1",
"fs-extra": "^8.1.0"
},
"engines": {
"node": ">=8.0"
}
},
"node_modules/streamroller/node_modules/date-format": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/date-format/-/date-format-2.1.0.tgz",
"integrity": "sha512-bYQuGLeFxhkxNOF3rcMtiZxvCBAquGzZm6oWA1oZ0g2THUzivaRhv8uOhdr19LmoobSOLoIAxeUK2RdbM8IFTA==",
"engines": {
"node": ">=4.0"
}
},
"node_modules/string-argv": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.1.tgz",
"integrity": "sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg==",
"engines": {
"node": ">=0.6.19"
}
},
"node_modules/universalify": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
"engines": {
"node": ">= 4.0.0"
}
}
},
"dependencies": {
"@influxdata/influxdb-client": {
"version": "1.20.0",
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client/-/influxdb-client-1.20.0.tgz",
"integrity": "sha512-jaKSI63hmQ5VSkJrFJkYIXaKlhoF+mGd4HmOf7v/X7pmEi69ReHp922Wyx6/OeCrpndRMbsadk+XmGNdd43cFw=="
},
"@influxdata/influxdb-client-apis": {
"version": "1.20.0",
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client-apis/-/influxdb-client-apis-1.20.0.tgz",
"integrity": "sha512-KMTmXH4rbpS+NWGpqDjxcKTyan2rbiT2IM5AdRElKhH2sHbH96xwLgziaxeC+OCJLeNAdehJgae3I8WiZjbwdg==",
"requires": {}
},
"date-format": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/date-format/-/date-format-3.0.0.tgz",
"integrity": "sha512-eyTcpKOcamdhWJXj56DpQMo1ylSQpcGtGKXcU0Tb97+K56/CF5amAqqqNj0+KvA0iw2ynxtHWFsPDSClCxe48w=="
},
"debug": {
"version": "4.3.2",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz",
"integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==",
"requires": {
"ms": "2.1.2"
}
},
"flatted": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz",
"integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA=="
},
"fs-extra": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz",
"integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==",
"requires": {
"graceful-fs": "^4.2.0",
"jsonfile": "^4.0.0",
"universalify": "^0.1.0"
}
},
"graceful-fs": {
"version": "4.2.8",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz",
"integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg=="
},
"jsonfile": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
"integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=",
"requires": {
"graceful-fs": "^4.1.6"
}
},
"log4js": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/log4js/-/log4js-6.3.0.tgz",
"integrity": "sha512-Mc8jNuSFImQUIateBFwdOQcmC6Q5maU0VVvdC2R6XMb66/VnT+7WS4D/0EeNMZu1YODmJe5NIn2XftCzEocUgw==",
"requires": {
"date-format": "^3.0.0",
"debug": "^4.1.1",
"flatted": "^2.0.1",
"rfdc": "^1.1.4",
"streamroller": "^2.2.4"
}
},
"luxon": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/luxon/-/luxon-2.1.1.tgz",
"integrity": "sha512-6VQVNw7+kQu3hL1ZH5GyOhnk8uZm21xS7XJ/6vDZaFNcb62dpFDKcH8TI5NkoZOdMRxr7af7aYGrJlE/Wv0i1w=="
},
"ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"rfdc": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz",
"integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA=="
},
"streamroller": {
"version": "2.2.4",
"resolved": "https://registry.npmjs.org/streamroller/-/streamroller-2.2.4.tgz",
"integrity": "sha512-OG79qm3AujAM9ImoqgWEY1xG4HX+Lw+yY6qZj9R1K2mhF5bEmQ849wvrb+4vt4jLMLzwXttJlQbOdPOQVRv7DQ==",
"requires": {
"date-format": "^2.1.0",
"debug": "^4.1.1",
"fs-extra": "^8.1.0"
},
"dependencies": {
"date-format": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/date-format/-/date-format-2.1.0.tgz",
"integrity": "sha512-bYQuGLeFxhkxNOF3rcMtiZxvCBAquGzZm6oWA1oZ0g2THUzivaRhv8uOhdr19LmoobSOLoIAxeUK2RdbM8IFTA=="
}
}
},
"string-argv": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.1.tgz",
"integrity": "sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg=="
},
"universalify": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="
}
}
}

View File

@@ -1,23 +0,0 @@
{
"name": "rfmon-to-influx",
"version": "1.0.0",
"description": "Writing (mostly meta-) data received in Wireless-Monitor-Mode into an InfluxDB",
"main": "main.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "node main.js"
},
"repository": {
"type": "git",
"url": "https://gitea.ruekov.eu/Ruakij/rfmon-to-influx"
},
"author": "Ruakij",
"license": "AGPL-3.0",
"dependencies": {
"@influxdata/influxdb-client": "^1.20.0",
"@influxdata/influxdb-client-apis": "^1.20.0",
"log4js": "^6.3.0",
"luxon": "^2.1.1",
"string-argv": "^0.3.1"
}
}

View File

@@ -1,105 +0,0 @@
const PacketType = {
Beacon: "Beacon",
ProbeRequest: "ProbeRequest",
ProbeResponse: "ProbeResponse",
Data: "Data",
RequestToSend: "RequestToSend",
ClearToSend: "ClearToSend",
Acknowledgment: "Acknowledgment",
BlockAcknowledgment: "BlockAcknowledgment",
NoData: "NoData",
Authentication: "Authentication",
AssociationRequest: "AssociationRequest",
AssociationResponse: "AssociationResponse",
Disassociation: "Disassociation",
Handshake: "Handshake",
Unknown: "Unknown"
};
const FlagType = {
MoreFragments: "MoreFragments",
Retry: "Retry",
PwrMgt: "PwrMgt",
MoreData: "MoreData",
Protected: "Protected",
Order: "Order"
};
class Packet{
timestampMicros;
flags = {};
srcMac;
dstMac;
bssid;
signal;
frequency;
dataRate;
durationMicros;
payloadData;
get payloadSize(){
return this.payloadData.length;
}
packetType;
}
// Extensions of Packet
class PacketWithSSID extends Packet{
ssid;
}
class BeaconPacket extends PacketWithSSID{}
class ProbeRequestPacket extends PacketWithSSID{}
class ProbeResponsePacket extends PacketWithSSID{}
const AuthenticationType = {
OpenSystem_1: "OpenSystem_1",
OpenSystem_2: "OpenSystem_2",
Unknown: "Unknown",
};
class AuthenticationPacket extends Packet{
authenticationType;
}
class AssociationRequestPacket extends PacketWithSSID{}
class AssociationResponsePacket extends Packet{
associationIsSuccessful;
}
class DisassociationPacket extends Packet{
disassociationReason;
}
const HandshakeStage = {
1: "1",
2: "2",
3: "3",
4: "4"
};
class HandshakePacket extends Packet{
handshakeStage;
}
// Specify exports
module.exports = {
PacketType,
FlagType,
Packet,
PacketWithSSID,
BeaconPacket,
ProbeRequestPacket,
ProbeResponsePacket,
AuthenticationType,
AuthenticationPacket,
AssociationRequestPacket,
AssociationResponsePacket,
DisassociationPacket,
HandshakeStage,
HandshakePacket,
};

View File

@@ -1,13 +0,0 @@
function requireEnvVars(requiredEnv){
// Ensure required ENV vars are set
let unsetEnv = requiredEnv.filter((env) => (typeof process.env[env] === "undefined"));
if (unsetEnv.length > 0) {
return "Required ENV variables are not set: [" + unsetEnv.join(", ") + "]";
}
}
// Specify exports
module.exports = {
requireEnvVars
};

View File

@@ -1,18 +0,0 @@
const logger = require("./logger.js")("exec");
const { spawn } = require("child_process");
const { parseArgsStringToArgv } = require("string-argv");
function exec(cmd, options){
const [bin, ...args] = parseArgsStringToArgv(cmd);
logger.addContext("binary", "bin");
logger.debug(`Spawn process '${cmd}'`);
return spawn(bin, args, options);
}
// Specify exports
module.exports = {
exec
};

View File

@@ -1,24 +0,0 @@
// From https://stackoverflow.com/a/34356351
// Convert a hex string to a byte array
function hexToBytes(hex) {
for (var bytes = [], c = 0; c < hex.length; c += 2)
bytes.push(parseInt(hex.substr(c, 2), 16));
return bytes;
}
// Convert a byte array to a hex string
function bytesToHex(bytes) {
for (var hex = [], i = 0; i < bytes.length; i++) {
var current = bytes[i] < 0 ? bytes[i] + 256 : bytes[i];
hex.push((current >>> 4).toString(16));
hex.push((current & 0xF).toString(16));
}
return hex.join("");
}
// Specify exports
module.exports = {
hexToBytes,
bytesToHex
};

View File

@@ -1,61 +0,0 @@
const logger = require.main.require("./helper/logger.js")("influx-checks");
const Os = require("os");
const { InfluxDB, Point } = require("@influxdata/influxdb-client")
const Influx = require("@influxdata/influxdb-client-apis");
function checkHealth(influxDb){
return new Promise((resolve, reject) => {
new Influx.HealthAPI(influxDb) // Check influx health
.getHealth()
.catch((err) => {
logger.error("Could not communicate with Influx:");
logger.error(`Error [${err.code}]:`, err.message);
reject();
})
.then((res) => {
logger.debug("Server healthy.", "Version: ", res.version);
resolve(res);
});
});
}
function checkBucket(influxDb, options){
return new Promise((resolve, reject) => {
new Influx.BucketsAPI(influxDb).getBuckets(options)
.catch((err) => { // Weirdly the influx-Api returns 404 for searches of non-existing buckets
logger.error("Could not get bucket:");
logger.error(`Error [${err.code}]:`, err.message);
reject();
}).then((res) => { // But an empty list when the bucket exists, but token does not have permission to get details
logger.debug("Bucket found");
resolve(res);
// Now we know the bucket exists and we have some kind of permission.. but we still dont know if we are able to write to it..
});
});
}
function checkWriteApi(influxDb, options){
return new Promise((resolve, reject) => {
const writeApi = influxDb.getWriteApi(options.org, options.bucket); // Get WriteAPI
writeApi.writePoint(new Point("worker_connectionTest").tag("hostname", Os.hostname())); // Write point
writeApi.close()
.catch((err) => {
logger.error("Could not get writeApi:");
logger.error(`Error [${err.code}]:`, err.message);
reject();
}).then((res) => {
logger.debug("Writing ok");
resolve();
});
});
}
// Specify exports
module.exports = {
checkHealth,
checkBucket,
checkWriteApi,
};

View File

@@ -1,11 +0,0 @@
const log4js = require("log4js");
function setup(category = "unknown"){
const logger = log4js.getLogger(category);
logger.level = process.env.LOGLEVEL ?? "INFO";
return logger;
}
// Specify exports
module.exports = setup;

View File

@@ -1,46 +0,0 @@
// This file specifies functions to help a user with e.g. configuration-errors
function detectStreamData(stream, timeout = 5000){
return new Promise((resolve, reject) => {
let timeoutHandler;
if(timeout){
timeoutHandler = setTimeout(() => {
reject("timeout");
remListeners();
},
timeout);
}
function remListeners(){
stream.removeListener("error", errorHandler);
stream.removeListener("data", dataHandler);
if(timeoutHandler) clearTimeout(timeoutHandler);
}
function errorHandler(err) {
remListeners();
}
function dataHandler(data) {
resolve(data);
remListeners();
}
stream.on("error", errorHandler);
stream.on("data", dataHandler);
});
}
function detectStreamsData(streams, timeout = 5000){
let promises = [];
streams.forEach((stream) => {
promises.push(detectStreamData(stream, timeout));
});
return promises;
}
// Specify exports
module.exports = {
detectStreamData,
detectStreamsData,
};

View File

@@ -1,43 +0,0 @@
const { HandshakeStage } = require.main.require("./dto/Packet.js");
function keyInfoFromRaw(keyInfoRaw) {
return {
"KeyDescriptorVersion": keyInfoRaw>>0 & 0b111,
"KeyType": keyInfoRaw>>3 & 0b1,
"KeyIndex": keyInfoRaw>>4 & 0b11,
"Install": keyInfoRaw>>6 & 0b1,
"KeyACK": keyInfoRaw>>7 & 0b1,
"KeyMIC": keyInfoRaw>>8 & 0b1,
"Secure": keyInfoRaw>>9 & 0b1,
"Error": keyInfoRaw>>10 & 0b1,
"Request": keyInfoRaw>>11 & 0b1,
"EncryptedKeyData": keyInfoRaw>>12 & 0b1,
"SMKMessage": keyInfoRaw>>13 & 0b1,
};
}
const HANDSHAKE_STAGE_KEYINFO = {
"keys": ["Install", "KeyACK", "KeyMIC", "Secure"],
"0100": HandshakeStage[1],
"0010": HandshakeStage[2],
"1111": HandshakeStage[3],
"0011": HandshakeStage[4],
};
function handshakeStageFromKeyInfo(keyInfo){
// Extract compare-keys
let keyData = "";
for (const key of HANDSHAKE_STAGE_KEYINFO["keys"]) {
keyData += keyInfo[key].toString();
}
// Get and return stage
return HANDSHAKE_STAGE_KEYINFO[keyData];
}
// Specify exports
module.exports = {
keyInfoFromRaw,
handshakeStageFromKeyInfo,
};

View File

@@ -1,136 +0,0 @@
"use strict";
const logFactory = require("./helper/logger.js");
const logger = logFactory("main");
const { requireEnvVars } = require("./helper/env.js");
const { exit } = require("process");
const { exec } = require("./helper/exec.js");
const Os = require("os");
const { InfluxDB } = require("@influxdata/influxdb-client");
const InfluxChecks = require("./helper/influx-checks.js");
const { RegexBlockStream } = require("./streamHandler/RegexBlockStream.js");
const { PacketStreamFactory } = require("./streamHandler/PacketStreamFactory.js");
const { PacketInfluxPointFactory } = require("./streamHandler/PacketInfluxPointFactory.js");
const { InfluxPointWriter } = require("./streamHandler/InfluxPointWriter.js");
const userHelper = require("./helper/userHelper.js");
/// Setup ENVs
const env = process.env;
// Defaults
{
env.LOGLEVEL ??= "INFO";
env.WIFI_INTERFACE ??= "wlan0";
env.HOSTNAME ??= Os.hostname();
}
// Required vars
let errorMsg = requireEnvVars([
"INFLUX_URL", "INFLUX_TOKEN",
"INFLUX_ORG", "INFLUX_BUCKET"
]);
if(errorMsg){
logger.fatal(errorMsg);
exit(1);
}
(async function() {
logger.info("Setup Influx..");
const influxDb = new InfluxDB({url: env.INFLUX_URL, token: env.INFLUX_TOKEN});
await InfluxChecks.checkHealth(influxDb)
.then((res) => {return InfluxChecks.checkBucket(influxDb, {
org: env.INFLUX_ORG,
name: env.INFLUX_BUCKET
});})
.then((res) => {return InfluxChecks.checkWriteApi(influxDb, {
org: env.INFLUX_ORG,
bucket: env.INFLUX_BUCKET
});})
.catch((err) => {
if(err) {
logger.error("Error whilst checking influx:");
logger.error(err);
}
logger.fatal("Setup influx failed!");
exit(1);
});
logger.debug("Get WriteApi & set default-hostname to", `'${env.HOSTNAME}'`);
const influxWriteApi = influxDb.getWriteApi(env.INFLUX_ORG, env.INFLUX_BUCKET, "us");
//influxWriteApi.useDefaultTags({"hostname": env.HOSTNAME});
logger.info("Influx ok");
logger.info("Starting tcpdump..");
const TCPDUMP_BASECMD = "tcpdump -vvv -e -n -X -s0 -i";
let cmd = `${TCPDUMP_BASECMD} ${env.WIFI_INTERFACE}`;
let proc = exec(cmd);
logger.debug("Creating & Attaching streams..");
let regexBlockStream = new RegexBlockStream(/^\d{2}:\d{2}:\d{2}.\d{6}.*(\n( {4,8}|\t\t?).*)+\n/gm);
let packetStreamFactory = new PacketStreamFactory();
let packetInfluxPointFactory = new PacketInfluxPointFactory();
let influxPointWriter = new InfluxPointWriter(influxWriteApi);
proc.stdout
.setEncoding("utf8")
.pipe(regexBlockStream)
.pipe(packetStreamFactory)
.pipe(packetInfluxPointFactory)
.pipe(influxPointWriter);
logger.debug("Attaching error-logger..");
const loggerTcpdump = logFactory("tcpdump");
proc.stderr.setEncoding("utf8").on("data", (data) => {
if(!data.match(/^(tcpdump: )?listening on /i) || !data.match(/^\d+ packets captured/i)) { // Catch start-error
loggerTcpdump.debug(data);
}
else loggerTcpdump.error(data);
});
// FIXME: This is a hacky workaround to not let errors from subprocess bubble up and terminate our process
regexBlockStream.on("error", (err) => {});
proc.on("error", (err) => {
loggerTcpdump.error(err);
});
const loggerPacketStream = logFactory("PacketStreamFactory");
userHelper.detectStreamData(proc.stdout, 10000) // Expect tcpdump-logs to have data after max. 10s
.then(() => {
loggerTcpdump.debug("Got first data");
userHelper.detectStreamData(packetStreamFactory, 10000) // Expect then to have packets after further 10s
.then(() => {
loggerPacketStream.debug("Got first packet");
})
.catch((err) => {
if(err == "timeout") loggerPacketStream.warn("No packets");
});
})
.catch((err) => {
if(err == "timeout") loggerTcpdump.warn("No data after 10s! Wrong configuration?");
});
logger.debug("Attaching exit-handler..");
proc.on("exit", (code) => {
loggerTcpdump.debug(`tcpdump exited code: ${code}`);
if (code) {
loggerTcpdump.fatal(`tcpdump exited with non-zero code: ${code}`);
exit(1);
}
logger.info("Shutdown");
exit(0);
});
// Handle stop-signals for graceful shutdown
function shutdownReq() {
logger.info("Shutdown request received..");
logger.debug("Stopping subprocess tcpdump, then exiting myself..");
proc.kill(); // Kill process (send SIGTERM), then upper event-handler will stop self
}
process.on("SIGTERM", shutdownReq);
process.on("SIGINT", shutdownReq);
logger.info("Startup complete");
})();

View File

@@ -1,35 +0,0 @@
const logger = require.main.require("./helper/logger.js")("InfluxPointWriter");
const { Writable } = require("stream");
const { WriteApi } = require("@influxdata/influxdb-client");
/**
* Get points and write them into influx
*/
class InfluxPointWriter extends Writable{
/**
*
* @param {WriteApi} writeApi WriteAPI from InfluxDB instance
*/
constructor(writeApi){
super({
objectMode: true
});
this._api = writeApi;
}
_write(point, encoding, next){
this._api.writePoint(point);
next();
}
_flush(next){
this._api.flush(true)
.catch((err) => { next(new Error(`WriteApi rejected promise for flush: ${err}`)); })
.then(next);
}
}
// Specify exports
module.exports = {
InfluxPointWriter
};

View File

@@ -1,89 +0,0 @@
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
const { Transform } = require("stream");
const {Point} = require("@influxdata/influxdb-client");
/** Keys to always use as tags */
const TAG_LIST = [
"srcMac",
"dstMac",
"bssid",
"frequency",
"flags",
"packetType",
];
/** Measurement-name and corresponding field-key */
const MEASUREMENT_MAP = new Map([
["rfmon_signal_dbm", "signal"],
["rfmon_payloadsize_bytes", "payloadSize"],
["rfmon_datarate_bytes", "dataRate"],
["rfmon_ssid_names", "ssid"],
["rfmon_authenticationtype_info", "authenticationType"],
["rfmon_associationsuccess_bools", "associationIsSuccessful"],
["rfmon_disassociationreason_info", "disassociationReason"],
["rfmon_handshakestage_info", "handshakeStage"],
]);
/**
* Get packets and convert them into influx-points
*/
class PacketInfluxPointFactory extends Transform{
constructor(){
super({
readableObjectMode: true,
writableObjectMode: true
});
}
_transform(packet, encoding, next){
// Create measurements
MEASUREMENT_MAP.forEach((objKey, measurement) => {
if(packet[objKey] == null) return;
let point = new Point(measurement); // Create point
// Set tags
TAG_LIST.filter(tag => Object.keys(packet).includes(tag)) // Filter tags available on object
.filter(tag => packet[tag] != null) // Filter tags not falsy on object
.forEach(tag => {
tagObjectRecursively(point, tag, packet[tag]);
});
point.setField("value", packet[objKey]); // Set field
this.push(point); // Push point into stream
});
next(); // Get next packet
}
}
function tagObjectRecursively(point, tag, field, suffix = ""){
if(typeof(field) == "object"){
// TODO: Convert boolean-arrays like "packet.flags" to key: value
Object.entries(field).map(([key, value]) => {
tagObjectRecursively(point, tag, value, `_${key}${suffix}`);
});
}
else {
const name = (tag+suffix).toLowerCase();
point.tag(name, field);
}
}
/** Mapping for type -> field-method */
const POINT_FIELD_TYPE = new Map([
["boolean", function(key, value){ return this.booleanField(key, value); }],
["number", function(key, value){ return this.intField(key, value); }],
["string", function(key, value){ return this.stringField(key, value); }],
]);
Point.prototype.setField = function(key, value){
let setField = POINT_FIELD_TYPE.get(typeof value);
return setField.apply(this, [key, value]);
};
// Specify exports
module.exports = {
PacketInfluxPointFactory
};

View File

@@ -1,153 +0,0 @@
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
const { Transform } = require("stream");
const { DateTime } = require("luxon");
const { PacketType, FlagType, Packet, PacketWithSSID, BeaconPacket, ProbeRequestPacket, ProbeResponsePacket, AuthenticationPacket, AuthenticationType, AssociationResponsePacket, DisassociationPacket, HandshakePacket, HandshakeStage } = require.main.require("./dto/Packet.js");
const hexConv = require.main.require("./helper/hexConverter.js");
const wifiStateAnalyser = require.main.require("./helper/wifiStateAnalyzer.js");
const PACKET_TYPE_MAP = {
"Beacon": PacketType.Beacon,
"Probe Request": PacketType.ProbeRequest,
"Probe Response": PacketType.ProbeResponse,
"Data": PacketType.Data,
"Request-To-Send": PacketType.RequestToSend,
"Clear-To-Send": PacketType.ClearToSend,
"Acknowledgment": PacketType.Acknowledgment,
"BA": PacketType.BlockAcknowledgment,
"Authentication": PacketType.Authentication,
"Assoc Request": PacketType.AssociationRequest,
"Assoc Response": PacketType.AssociationResponse,
"Disassociation:": PacketType.Disassociation,
"EAPOL": PacketType.Handshake,
};
const PACKET_TYPES_REGEX = Object.keys(PACKET_TYPE_MAP).join("|");
const AUTHENTICATION_TYPE_MAP = {
"(Open System)-1": AuthenticationType.OpenSystem_1,
"(Open System)-2": AuthenticationType.OpenSystem_2,
};
const FLAG_TYPE_MAP = {
"Retry": FlagType.Retry,
"Pwr Mgmt": FlagType.PwrMgt,
"More Data": FlagType.MoreData,
"Protected": FlagType.Protected,
};
const FLAG_TYPE_MAPS_REGEX = Object.keys(FLAG_TYPE_MAP).join("|");
/**
* Read data from text-blocks and convert them to Packet
*/
class PacketStreamFactory extends Transform{
constructor(){
super({
readableObjectMode: true,
writableObjectMode: true
});
}
_transform(chunk, encoding, next){
let packet = new Packet();
const lines = chunk.split("\n");
const header = lines.splice(0, 1)[0]; // Grab first line, "lines" is now the payload
packet = this._handleHeader(packet, header);
packet = this._handlePayload(packet, lines);
next(null, packet); // Get next chunk
}
_handleHeader(packet, data){
// Convert time to epoch-micros Unfortunately luxon doesnt use micros, but millis as smallest time-unit requiring some "hacks"
packet.timestampMicros = DateTime.fromISO(data.slice(0, 12)).toSeconds() + data.slice(12, 15)/1000000;
// Find flags
data.match(data.match(new RegExp("(?<=^|\\s)("+ FLAG_TYPE_MAPS_REGEX +")(?=$|\\s)", "ig"))
?.forEach(match => packet.flags[FLAG_TYPE_MAP[match]] = true) // Set them to true in flags
);
packet.dataRate = Number(data.match(/(?<=^|\s)\d+(\.\d+)?(?=\sMb\/?s($|\s))/i)?.[0]) || null;
packet.frequency = Number(data.match(/(?<=^|\s)\d{4}(?=\sMHz($|\s))/i)?.[0]) || null;
packet.durationMicros = Number(data.match(/(?<=^|\s)\d{1,4}(?=us($|\s))/i)?.[0]) || null;
packet.signal = Number(data.match(/(?<=^|\s)-\d{2,3}(?=dBm\sSignal($|\s))/i)?.[0]) || null;
let packetTypeStr = data.match(new RegExp("(?<=^|\\s)("+ PACKET_TYPES_REGEX +")(?=$|\\s)", "i"))?.[0];
if(packetTypeStr)
packet.packetType = PACKET_TYPE_MAP[packetTypeStr];
else if(data.match(/(SA|TA|DA|RA|BSSID):.{17}\s*$/i)){
packet.packetType = PacketType.NoData;
}
else {
packet.packetType = PacketType.Unknown;
}
packet.srcMac = data.match(/(?<=(^|\s)(SA|TA):).{17}(?=$|\s)/i)?.[0] ?? null;
packet.dstMac = data.match(/(?<=(^|\s)(DA|RA):).{17}(?=$|\s)/i)?.[0] ?? null;
packet.bssid = data.match(/(?<=(^|\s)BSSID:).{17}(?=$|\s)/i)?.[0] ?? null;
// Cover special cases with more data
let newPacket;
switch(packet.packetType){
case PacketType.Beacon:
case PacketType.ProbeRequest:
case PacketType.ProbeResponse:
case PacketType.AssociationRequest:
newPacket = new PacketWithSSID();
newPacket.ssid = data.match(new RegExp("(?<=(^|\\s)"+ packetTypeStr +"\\s\\().{0,32}(?=\\)($|\\s))", "i"))?.[0] ?? null;
break;
case PacketType.Authentication:
newPacket = new AuthenticationPacket();
newPacket.authenticationType = AUTHENTICATION_TYPE_MAP[data.match(/(?<=(^|\s)Authentication\s).{3,}(?=:(\s|$))/i)[0]] ?? AuthenticationType.Unknown;
break;
case PacketType.AssociationResponse:
newPacket = new AssociationResponsePacket();
newPacket.associationIsSuccessful = data.match(/(?<=(^|\s)Assoc\sResponse\s.{0,30})Successful(?=\s|$)/i) ? true : false;
break;
case PacketType.Disassociation:
newPacket = new DisassociationPacket();
newPacket.disassociationReason = data.match(/(?<=(^|\s)Disassociation:\s).*$/i)?.[0] ?? null;
break;
}
if(newPacket) packet = Object.assign(newPacket, packet); // Use new, more specific, packet and copy old data over
return packet;
}
_handlePayload(packet, data){
data = data.join("");
// Get payload-Hex-Data. If there is no data: empty
packet.payloadData = hexConv.hexToBytes(data.match(/(?<=\s)([A-F0-9]{1,4}(?=\s))/igm)?.join("") ?? "");
packet.payloadData.splice(packet.payloadData.length-4, 4); // Remove FrameCheck sequence
// Cover special cases with more data
let newPacket;
switch(packet.packetType){
case PacketType.Handshake: {
newPacket = new HandshakePacket();
// Read key-information
const keyInfoRaw = (packet.payloadData[0x5]<<0x8) + packet.payloadData[0x6];
const keyInfo = wifiStateAnalyser.keyInfoFromRaw(keyInfoRaw); // Convert
newPacket.handshakeStage = wifiStateAnalyser.handshakeStageFromKeyInfo(keyInfo); // Get stage
break;
}
}
if(newPacket) packet = Object.assign(newPacket, packet);
return packet;
}
}
// Specify exports
module.exports = {
PacketStreamFactory
};

View File

@@ -1,67 +0,0 @@
const logger = require.main.require("./helper/logger.js")("RegexBlockStream");
const { Transform } = require("stream");
/**
* Matches whole blocks as regex and passes them on
*/
class RegexBlockStream extends Transform{
matcher;
withholdLastBlock;
matchAllOnFlush;
/**
* @param {RegExp} matcher Block-match
* @param {boolean} withholdLastBlock When true, the last matches block will not be submitted to prevent submitting incomplete blocks.
* @param {boolean} matchAllOnFlush (Only in combination with withholdLastBlock) When enabled, the buffer will be matched on last time on _flush (stream deconstruction) and write any, also incomplete, blocks
* @remarks WARNING: It should match a clean-block (including e.g. newline)! Otherwise buffer will get dirty and use more and more resources.
*/
constructor(matcher, withholdLastBlock = true, matchAllOnFlush = false){
super({
readableObjectMode: true,
writableObjectMode: true
});
this.matcher = matcher;
this.withholdLastBlock = withholdLastBlock;
this.matchAllOnFlush = matchAllOnFlush;
}
_transform(chunk, encoding, next){
chunk = this.readableBuffer.length? this.readableBuffer.join("") + chunk: chunk; // Add previous buffer to current chunk
this.readableBuffer.length && this.readableBuffer.clear(); // Clear buffer once we read it
let matches = chunk.match(this.matcher); // Match
if(matches){
if(this.withholdLastBlock) matches.pop(); // Remove last if we want to withhold it
chunk = this._writeMatches(matches, chunk);
}
this.readableBuffer.push(chunk); // Store remaining data in buffer
next(); // Get next chunk
}
_writeMatches(matches, chunk = null){
if(matches){
matches.forEach((match) => {
this.push(match); // Write match to stream
if(chunk) chunk = chunk.replace(match, ""); // Remove match from chunks
});
}
if(chunk) return chunk;
}
_flush(next){
if(this.matchAllOnFlush){ // When requested, we'll match one last time over the remaining buffer
let chunk = this.readableBuffer.join("");
let matches = chunk.match(this.matcher); // Match remaining buffer
this._writeMatches(matches); // Write matches including last element
}
next(); // Tell system we are done
}
}
// Specify exports
module.exports = {
RegexBlockStream
};