131 Commits

Author SHA1 Message Date
c3cd6393d4 Merge branch 'release-2.0' 2021-12-06 13:01:31 +01:00
8eef17fd4c Fix wrong default-tag setting 2021-12-06 13:01:10 +01:00
c97137f4a7 Merge branch 'release-2' 2021-12-06 12:47:14 +01:00
39350932a4 Changed metric- and tag-names to match best-practise naminc-convention 2021-12-06 12:46:04 +01:00
1e37f35e38 Moved creation of writeApi to main and set default-tag hostname 2021-12-06 12:44:26 +01:00
d0be44c1af Fix coding-style 2021-12-06 12:42:34 +01:00
a13d81e9c0 Merge branch 'release-1.1' 2021-12-03 10:53:35 +01:00
059c02e243 Merge branch 'dev' into release-1.1 2021-12-03 10:53:13 +01:00
57cf6fb0a7 Add npm prune (wont do much here) 2021-12-03 10:50:50 +01:00
298a96bf16 apk-install on 1 line and delete cache when done 2021-12-03 10:48:28 +01:00
b98dff947d Refactored packet to use new code-style (for got last time :/) 2021-12-02 14:54:32 +01:00
a610f209d5 Merge branch 'release-1.1' 2021-12-02 14:08:54 +01:00
0f6c5b6b0e Change baseimage from node to node-alpine
Size drastically decreased
2021-12-02 14:06:21 +01:00
cc9e4c7258 Fix flush using local vars instead of this-vars 2021-12-02 13:41:23 +01:00
56ac283544 Refactored code to match code-style 2021-12-02 13:40:56 +01:00
9095e21e6f Add handshakeStage as metric 2021-12-01 14:24:03 +01:00
6e05a0b45c Merge branch 'release-1.0' 2021-11-29 15:55:27 +01:00
d14e469ef4 Removed error-logging from RegexBlockStream FIXME 2021-11-29 15:35:53 +01:00
99a3e13d77 Added further exception to error-event from tcpdump 2021-11-29 15:35:36 +01:00
b5c895674e Merge branch 'f_gracefulShutdown' into dev 2021-11-29 15:34:56 +01:00
86d2b8c1cf Added event for SIGTERM and SIGINT 2021-11-29 15:34:19 +01:00
7ff6556d51 Merge branch 'f_userHelper' into dev 2021-11-29 15:12:39 +01:00
16388c73e5 Added additional check for packetStream 2021-11-29 15:11:14 +01:00
8211f55b89 Added check for tcpdump-process stdout 2021-11-29 15:10:42 +01:00
c28bbaaada Moved Stream-creations outside 2021-11-29 15:09:59 +01:00
4ddbe3f06f Implemented userHelper methods to detect if streams have data before timeout 2021-11-29 15:09:21 +01:00
0d84079ce1 Added packetType to tag-fields 2021-11-29 14:20:29 +01:00
1bf761970f Added error-handling for regexBlockStream 2021-11-29 13:45:36 +01:00
5a0118aedd Added error-handling for process 2021-11-29 13:45:15 +01:00
0709db0ddf Changed exit-log to debug 2021-11-29 13:44:37 +01:00
c27761322c Catch start-error from tcpdump
tcpdump writes to stderr at the beginning
2021-11-29 13:18:29 +01:00
45a11753de Add own logger for tcpdump 2021-11-29 13:16:23 +01:00
d482001cdc Merge branch 'f_docker' into dev 2021-11-29 13:12:00 +01:00
a681bbd2d2 Removed sudo as container runs as root anyways 2021-11-29 13:11:11 +01:00
d9ee804c3b Removed src-references as hats the workdir 2021-11-29 13:10:44 +01:00
e320d8670b Add installation of package tcpdump 2021-11-29 13:10:05 +01:00
2d824543d1 fixed cmd 2021-11-29 13:09:49 +01:00
6e080907d1 Merge branch 'code-smell_handling' into dev 2021-11-29 09:58:04 +01:00
91c3aca9e2 Add function to get handshakeStage from keyInfo 2021-11-29 09:57:33 +01:00
31ab10c3e1 Fix wrong var-name 2021-11-29 09:57:05 +01:00
8ff7211f0f Revert "Extract packetType handling in own function"
This reverts commit 227ba127f8.
2021-11-29 09:50:14 +01:00
2ae85ababb Extract keyInfo reading to helper-file 2021-11-29 09:37:28 +01:00
227ba127f8 Extract packetType handling in own function 2021-11-29 09:36:21 +01:00
8417de5756 Extract checks with if-statements 2021-11-29 09:35:17 +01:00
54eadf3bc3 Fixed Regex codesmells 2021-11-29 09:24:18 +01:00
e39ebaac23 Fixed Regex codesmell 2021-11-29 09:23:08 +01:00
54d627d469 Fix typo 2021-11-29 09:20:38 +01:00
ca3c37be0f Activating strict-mode 2021-11-29 09:16:49 +01:00
96b52e63a0 Add exit-handler with error-detection 2021-11-29 09:16:39 +01:00
3c5e941cba Creating & Attaching streams / Error-logger 2021-11-29 09:15:49 +01:00
a468d7a57b Start tcpdump process 2021-11-29 09:14:50 +01:00
4ad5eba7e0 Change tag-srtting to recursive by field
This will properly set e.g. arrays
2021-11-26 21:03:11 +01:00
2646c9787e Changed key-checking to value-checking
This includes e.g. gettings, originally not included in keys
2021-11-26 21:00:15 +01:00
873f00b21b Remove deleting point and rename point to something useable 2021-11-26 19:17:43 +01:00
271554719e Remove wrong hint 2021-11-26 19:17:24 +01:00
e18de63d7c Revert "Move catch into then-onRejected to fix resolving then anyways."
This reverts commit 2356040572.
2021-11-26 19:14:01 +01:00
2356040572 Move catch into then-onRejected to fix resolving then anyways. 2021-11-26 19:07:27 +01:00
b0bbf0c71a Fixed wrong option-subname 2021-11-26 19:06:55 +01:00
37b78e7373 Fixed wrong logging-mode used 2021-11-26 18:54:09 +01:00
c5e1bb4c64 Added info-log "ok" 2021-11-26 18:37:55 +01:00
3c29ed2000 Implemented CheckWriteApi by writing and then deleting a TestPoint 2021-11-26 18:37:34 +01:00
ddf39b9433 Fixed require 2021-11-26 18:36:50 +01:00
3a927688d0 Changed fatal to error and added error handling to promise catch in main 2021-11-26 18:36:35 +01:00
c51cfc1b14 Fixed missing require 2021-11-26 18:35:51 +01:00
e1b2a7e016 Added influx checkBucket 2021-11-26 18:03:14 +01:00
fc5900b0ba Added influx checkHealth 2021-11-26 17:55:51 +01:00
354ca32a61 Merge branch 'dev' into f_influx-checks 2021-11-26 17:47:59 +01:00
d10e9bb2c6 Create influx-client 2021-11-26 17:43:42 +01:00
44cd3288cf Fixed stram not being in object-mode 2021-11-26 17:33:19 +01:00
3af4bb7cc6 Fixed wrong push 2021-11-26 17:33:07 +01:00
2a662e0bd1 Changed constructor to take influxDb 2021-11-26 17:32:31 +01:00
d7a9530b68 Cleaned up Promise 2021-11-26 17:31:31 +01:00
7de2250983 Added client-apis 2021-11-26 17:30:46 +01:00
bb3d843895 Fixed wrong joining 2021-11-25 18:42:45 +01:00
9472ed9198 Fix wrong usage of join()
Default splitter is ',' but we want nothing
2021-11-25 18:42:26 +01:00
dcd0ce8111 Added lib string-argv to extract bin and args from cmd 2021-11-25 18:32:43 +01:00
1a9ced0bb8 Changed exec to only wrap spawn 2021-11-25 18:32:07 +01:00
d77e3f8844 Fixed naming 2021-11-25 18:30:50 +01:00
e715cc1cac Removed unused env-vars for now 2021-11-25 13:59:45 +01:00
cf1b300f6a Changed documentation to remarks section 2021-11-25 03:14:24 +01:00
450f162cda Implemented InfluxPointWriter for writing into influx with stream 2021-11-25 03:13:05 +01:00
7ebcf573b9 Implemented stream to transform packets to datapoints 2021-11-25 02:54:43 +01:00
38985ea9e2 Remove debug-logging for now 2021-11-25 01:48:41 +01:00
1976838d8a Remove unecessary properties 2021-11-25 01:48:07 +01:00
a1ce7a848b Replaced MoreData and isRetry with flags 2021-11-25 01:37:35 +01:00
21e4ff1a66 Removing frameCheckSequence from packets 2021-11-25 01:10:49 +01:00
25297ed4d9 Fixed data-regex 2021-11-25 01:10:34 +01:00
68541b1191 Added check for NoData-Packet 2021-11-25 01:09:25 +01:00
7bc5a3530c Made keyInfo vars const 2021-11-25 00:21:25 +01:00
d0ee1a7044 Fixed gtter payloadSize 2021-11-25 00:19:53 +01:00
57394882cc Implemented reading keyExchangeData and determining handshake-number 2021-11-25 00:19:31 +01:00
b41d728ba9 Fixed missing export 2021-11-25 00:18:37 +01:00
d6135a02e9 Fixed regex 2021-11-25 00:18:21 +01:00
ffe14e3f53 Added hexConverter helper-module 2021-11-24 23:22:09 +01:00
09bea81058 Returning packet not to loose information 2021-11-24 23:13:35 +01:00
66c0fdfe12 Fixed Object.assign 2021-11-24 23:13:04 +01:00
b32b5cbed7 Changed regex to use readbehind/ahead instead of groups 2021-11-24 23:12:47 +01:00
e86b5fca9a Added durationMicros 2021-11-24 22:35:59 +01:00
d2178f3b73 Implemented PacketType Disassociation 2021-11-24 22:35:07 +01:00
fe5dec7860 Implemented PacketType AssociationRequest & Response 2021-11-24 22:29:41 +01:00
0d472e8cb3 Implemented PacketType Authentication 2021-11-24 22:27:22 +01:00
bba5da2599 Moved packet-creation outside for all cases 2021-11-24 22:25:10 +01:00
c2210dfb59 Added isRetry field 2021-11-24 21:11:19 +01:00
3c3ab18410 Revert "Added PacketType ContentionFreePeriod"
This reverts commit 333a514363.
2021-11-24 21:01:17 +01:00
aa9d5431f8 Changed payloadSize to payloadData with payloadSize getter 2021-11-24 20:44:04 +01:00
62b55b94f1 Added missing payloadSize to Packet 2021-11-24 20:42:33 +01:00
025edf8465 Implemented getting payloadData and calculating to bytes 2021-11-24 20:42:10 +01:00
59e7188543 Changed most checks using Optional chaining and Nullish coalescing operator 2021-11-24 20:39:20 +01:00
333a514363 Added PacketType ContentionFreePeriod 2021-11-24 20:35:44 +01:00
d714b342ed Removed debug read statement 2021-11-24 20:35:10 +01:00
1aae9d5e71 Added library luxon 2021-11-24 20:01:33 +01:00
6bc248b667 First implementation of PacketStreamFactory 2021-11-24 20:01:03 +01:00
0cbbecbd2c Added typeMoreData 2021-11-24 20:00:28 +01:00
368b6585ba Now using internal buffer-system 2021-11-24 20:00:05 +01:00
4f1463eb4f Fixed args being wrong way around 2021-11-24 19:40:46 +01:00
d11aac5599 Implemented RegexBlockStream 2021-11-24 15:35:29 +01:00
fa8d630283 Change exec to just return process and streams 2021-11-24 15:35:05 +01:00
9c28ed53e3 Added exec-module 2021-11-24 13:11:01 +01:00
cc8b106157 Added logger-setup-module 2021-11-24 13:10:41 +01:00
8ee189f52b Fixed wrong capitalisation 2021-11-24 01:16:37 +01:00
79efcc3951 Fixed missing import 2021-11-24 01:12:13 +01:00
7dd131d6b5 Implemented dto-obj Packet 2021-11-24 01:11:42 +01:00
1b6721d242 Implemented env defaults and checking 2021-11-24 01:07:29 +01:00
d13df7b03d Add comment for logger 2021-11-24 01:06:44 +01:00
34c73c82c8 Gave logger name 2021-11-24 01:06:29 +01:00
93f78aafe0 Changed || to ?? to detect undefined env 2021-11-24 01:06:09 +01:00
184432f642 Added helper for env-vars 2021-11-24 01:03:40 +01:00
7562ff2268 Added module log4js 2021-11-23 17:05:43 +01:00
20ed3d0928 Docker setup 2021-11-23 15:15:10 +01:00
6ccd02af6c Added start-script 2021-11-23 15:14:55 +01:00
f862c362fe npm init 2021-11-23 15:09:53 +01:00
31 changed files with 1109 additions and 532 deletions

3
.dockerignore Normal file
View File

@@ -0,0 +1,3 @@
node_modules
npm-debug.log
.*

View File

@@ -1,13 +0,0 @@
cmake_minimum_required(VERSION 3.0.0)
project(rfmon-to-influx VERSION 0.1.0)
include(CTest)
enable_testing()
add_executable(rfmon-to-influx main.cpp)
target_link_libraries(rfmon-to-influx pthread)
set(CPACK_PROJECT_NAME ${PROJECT_NAME})
set(CPACK_PROJECT_VERSION ${PROJECT_VERSION})
include(CPack)

View File

@@ -1,18 +0,0 @@
#ifndef FDDB997A_BCD3_4056_BFEA_9FF6A548DACF
#define FDDB997A_BCD3_4056_BFEA_9FF6A548DACF
#include "./packet.hpp"
#include <string>
class BeaconPacket : public Packet{
public:
BeaconPacket()
{}
BeaconPacket(const Packet &packet)
: Packet(packet)
{}
std::string ssid;
};
#endif /* FDDB997A_BCD3_4056_BFEA_9FF6A548DACF */

View File

@@ -1,47 +0,0 @@
#ifndef C42FA9F6_8CF3_453F_8FA0_918E543DCD59
#define C42FA9F6_8CF3_453F_8FA0_918E543DCD59
#include <string>
enum PacketType {
Beacon,
ProbeRequest,
ProbeResponse,
Data,
RequestToSend,
ClearToSend,
Acknowledgment,
BlockAcknowledgment,
NoData,
Unknown
};
const std::array<const char*, 10> PACKET_TYPE_NAMES({{
"Beacon",
"Probe Request",
"Probe Response",
"Data",
"Request to send",
"Clear to send",
"Acknowledgment",
"BlockAcknowledgment",
"NoData",
"Unknown"
}});
struct Packet {
uint64_t timestampMicros;
std::string srcMac;
std::string dstMac;
std::string bssid;
unsigned int payloadSize;
char signal;
unsigned int frequency;
unsigned char dataRate;
PacketType type;
};
#endif /* C42FA9F6_8CF3_453F_8FA0_918E543DCD59 */

View File

@@ -1,18 +0,0 @@
#ifndef CD2BF199_8153_4F10_A85C_50883FAD66A8
#define CD2BF199_8153_4F10_A85C_50883FAD66A8
#include "./packet.hpp"
#include <string>
class ProbeRequestPacket : public Packet{
public:
ProbeRequestPacket()
{}
ProbeRequestPacket(const Packet &packet)
: Packet(packet)
{}
std::string requestSsid;
};
#endif /* CD2BF199_8153_4F10_A85C_50883FAD66A8 */

View File

@@ -1,18 +0,0 @@
#ifndef B199B4B3_BE27_4F0C_8DBE_5E78580AB1A9
#define B199B4B3_BE27_4F0C_8DBE_5E78580AB1A9
#include "./packet.hpp"
#include <string>
class ProbeResponsePacket : public Packet{
public:
ProbeResponsePacket()
{}
ProbeResponsePacket(const Packet &packet)
: Packet(packet)
{}
std::string responseSsid;
};
#endif /* B199B4B3_BE27_4F0C_8DBE_5E78580AB1A9 */

19
Dockerfile Normal file
View File

@@ -0,0 +1,19 @@
FROM node:16-alpine
# Create app directory
WORKDIR /usr/src/app
# Install app dependencies
COPY package*.json ./
RUN npm install
# remove development dependencies
RUN npm prune --production
# Install required apk-packages & delete cache
RUN apk update && apk add tcpdump && rm -rf /var/cache/apk/*
# Bundle app source
COPY ./src/ .
CMD ["npm", "run", "start"]

View File

@@ -1,28 +0,0 @@
#ifndef C251BA62_6D80_4033_86B6_61F184E6F250
#define C251BA62_6D80_4033_86B6_61F184E6F250
#include <future>
#include <string>
#include "textPacketHandler.hpp"
using namespace std::string_literals;
std::vector<std::string> buffer;
void bufHandler(const char *buf){
std::string line = buf;
// Remove last char which is \n
line = line.substr(0, line.size()-1);
// When first char of buf has text (no tab), we got a new packet
if(buf[0] != '\t'){
// Submit the just-read text-packet in a new thread
if(buffer.size() != 0) {
(void)std::async(std::launch::async, textPacketHandler, buffer);
}
buffer = {line};
}
else
buffer.push_back(line); // Append part-packet
}
#endif /* C251BA62_6D80_4033_86B6_61F184E6F250 */

View File

@@ -1,10 +0,0 @@
#ifndef EA8E466A_DAAA_4747_9CEE_65B77A4EF694
#define EA8E466A_DAAA_4747_9CEE_65B77A4EF694
#include "../DTO/packet.hpp"
void packetHandler(const Packet &packet){
}
#endif /* EA8E466A_DAAA_4747_9CEE_65B77A4EF694 */

View File

@@ -1,185 +0,0 @@
#ifndef EE781A91_6D07_47AC_B3C4_F99E29F3731F
#define EE781A91_6D07_47AC_B3C4_F99E29F3731F
#include <string>
#include "../DTO/packet.hpp"
#include "../DTO/beaconPacket.hpp"
#include "../DTO/probeRequestPacket.hpp"
#include "../DTO/probeResponsePacket.hpp"
#include <vector>
#include <locale>
#include <iomanip>
#include "../helper/string-helper.hpp"
#include "../helper/timestampConvert.hpp"
#include "../helper/find.hpp"
#include "../helper/vector-stats.hpp"
#include <unordered_map>
#include "./packetHandler.hpp"
using namespace std::string_literals;
const std::unordered_map<std::string, PacketType> PACKET_TYPE_MAP({
{"Beacon", PacketType::Beacon},
{"Probe Request", PacketType::ProbeRequest},
{"Probe Response", PacketType::ProbeResponse},
{"Data", PacketType::Data},
{"Request-To-Send", PacketType::RequestToSend},
{"Clear-To-Send", PacketType::ClearToSend},
{"Acknowledgment", PacketType::Acknowledgment},
{"BA", PacketType::BlockAcknowledgment}
});
void parseHeader(Packet &packet, const std::vector<std::string> &textPacket);
void parsePayload(Packet &packet, const std::vector<std::string> &textPacket);
void textPacketHandler(const std::vector<std::string> textPacket){
/// Here we have to parse the packet
// Create empty packet
Packet packet;
parseHeader(packet, textPacket);
parsePayload(packet, textPacket);
packetHandler(packet);
}
void parseHeader(Packet &packet, const std::vector<std::string> &textPacket){
const std::string textHeader = textPacket[0];
const std::vector<std::string> headerData = split(textHeader, ' ');
std::string textTimestamp = headerData[0];
uint64_t timestamp = convertStringToTimestampMicros(textTimestamp);
// Find remaining data based on keys in/around fields
int linkSpeedIndex = findIs(headerData, "Mb/s", 1, 1);
packet.dataRate = std::stoi(headerData[linkSpeedIndex]);
int frequencyIndex = findIs(headerData, "MHz", 1, 1);
packet.frequency = std::stoi(headerData[frequencyIndex]);
int signalIndex = findIs(headerData, "signal", 1, 1);
if(signalIndex != -1){
std::string signalText = headerData[signalIndex].substr(0, 3);
packet.signal = std::stoi(signalText);
}
else {
fprintf(stderr, "Missing signal-data!\n");
packet.signal = -100;
}
// Addresses seem complicated at first, but just have many fields which might be available.
// SA and DA are src- and dst-Addresses
// BSSID is the used bssid
// TA and RA are transmitter- and receiver-address which are used exclusively for RTS and CTS in tcpdump
// BEWARE: SA, DA, BSSID, TA and RA can be used together, but tcpdump doesnt display all of them!
// DA might also not be a valid MAC-address, but Broadcast or an encoded IPv4/6 Multicast-address
int saIndex = findContains(headerData, "SA:", 1);
std::string sAddr = (saIndex != -1) ? headerData[saIndex].substr("SA:"s.length()) : "";
int daIndex = findContains(headerData, "DA:", 1);
std::string dAddr = (daIndex != -1) ? headerData[daIndex].substr("DA:"s.length()) : "";
int bssidIndex = findContains(headerData, "BSSID:", 1);
std::string bssidAddr = (bssidIndex != -1) ? headerData[bssidIndex].substr("BSSID:"s.length()) : "";
int taIndex = findContains(headerData, "TA:", 1);
std::string tAddr = (taIndex != -1) ? headerData[taIndex].substr("TA:"s.length()) : "";
int raIndex = findContains(headerData, "RA:", 1);
std::string rAddr = (raIndex != -1) ? headerData[raIndex].substr("RA:"s.length()) : "";
// Depending of when which address-fields are actually set, choose which ones to use
if(sAddr == "" && tAddr != "") sAddr = tAddr;
if(dAddr == "" && rAddr != "") dAddr = rAddr;
// Set addresses to packet
packet.srcMac = sAddr;
packet.dstMac = dAddr;
packet.bssid = bssidAddr;
// Identify type of packet
// -> comes right after the addresses
int typeIndex = max(std::vector({saIndex, daIndex, bssidIndex, taIndex, raIndex}))+1;
PacketType type = PacketType::Unknown;
if(typeIndex == headerData.size()) type = PacketType::NoData;
else {
std::string textType = headerData[typeIndex];
// Check for incomplete types
if(textType == "Probe"){
textType += " "+ headerData[typeIndex+1];
}
// If type is in map, use map-value, otherwise keep default
if(PACKET_TYPE_MAP.find(textType) != PACKET_TYPE_MAP.end())
type = PACKET_TYPE_MAP.at(textType);
if(type == PacketType::Unknown){
fprintf(stderr, "Unknown package-type: %s\n", textType.c_str());
}
}
packet.type = type;
// Read data for specializations
if(type == PacketType::Beacon){
// Create BeaconPacket from packet
BeaconPacket beaconPacket = BeaconPacket(packet);
packet = beaconPacket; // Overwrite packet
// Find ssid
int start = textHeader.find('(')+1;
std::string ssid = textHeader.substr(start, textHeader.find(')')-start);
// Write to packet
beaconPacket.ssid = ssid;
}
else if (type == PacketType::ProbeRequest){
// Create ProbeRequestPacket from packet
ProbeRequestPacket probeRequestPacket = ProbeRequestPacket(packet);
packet = probeRequestPacket; // Overwrite packet
// Find probe-request
int start = textHeader.find('(')+1;
std::string requestSsid = textHeader.substr(start, textHeader.find(')')-start);
// Write to packet
probeRequestPacket.requestSsid = requestSsid;
}
else if (type == PacketType::ProbeResponse){
// Create ProbeResponsePacket from packet
ProbeResponsePacket probeResponsePacket = ProbeResponsePacket(packet);
packet = probeResponsePacket; // Overwrite packet
// Find probe-request
int start = textHeader.find('(')+1;
std::string responseSsid = textHeader.substr(start, textHeader.find(')')-start);
// Write to packet
probeResponsePacket.responseSsid = responseSsid;
}
}
void parsePayload(Packet &packet, const std::vector<std::string> &textPacket){
// Expect max of 16byte per line of payload
unsigned int payloadSize = 16*(textPacket.size()-1);
// Go through last line
int line = textPacket.size()-1, charPos;
for(int f=0; f<8*2; ++f){
charPos = 10 + (f/2.0*5);
if(textPacket[line][charPos] == ' ') { // When our char is space, no more data is present
// Set size
payloadSize = 16*(textPacket.size()-2)+f;
break;
}
}
packet.payloadSize = payloadSize;
}
#endif /* EE781A91_6D07_47AC_B3C4_F99E29F3731F */

View File

@@ -1,31 +0,0 @@
#ifndef B89BC3C5_AD59_4765_AA06_8110111D316F
#define B89BC3C5_AD59_4765_AA06_8110111D316F
#include <cstdio>
#include <stdexcept>
/// @brief Executes given command and optionally sends buffer to handler
/// @param cmd is the command
/// @param handler is the handler(char*)-function
/// @return Return-code form command
int exec(const char* cmd, void (*handler)(const char*) = nullptr){
const int buf_size = 512;
char buf[buf_size];
// Open execution-pipe
FILE *pipe = popen(cmd, "r");
if (!pipe) {
throw std::runtime_error("popen() failed!");
}
while (fgets(buf, buf_size, pipe) != nullptr) {
// When a handler is specified, call it
if(handler != nullptr) (*handler)(buf);
}
// Close pipe and read exit-code
return WEXITSTATUS(pclose(pipe));
}
#endif /* B89BC3C5_AD59_4765_AA06_8110111D316F */

View File

@@ -1,53 +0,0 @@
#ifndef B6A9DEE0_30C6_4492_AB96_87D9C5C10E8B
#define B6A9DEE0_30C6_4492_AB96_87D9C5C10E8B
#include <string>
#include <vector>
/// @brief Internal function
void prepare(const int &size, int &start, const int &offset, int &end){
// Set missing fields
if(!end) end = size;
// Edit start/end according to offset
if(offset < 0)
start += offset;
else if(offset > 0)
end -= offset;
}
/// @brief Find str-index based on contains-content
/// @param data is the vector-string-data to search
/// @param strContains string to find
/// @param start where to start searching
/// @param offset search offset to position (results in index being shifted by -offset)
/// @param end where to end searching
/// @return index of found index (with offset if any)
int findContains(const std::vector<std::string> &data, const std::string &strContains, int start = 0, int offset = 0, int end = 0){
prepare(data.size(), start, offset, end);
for(int i=start; i<data.size()-offset; ++i){
if(!data[i+offset].find(strContains))
return i;
}
return -1;
}
/// @brief Find str-index based on exact-content
/// @param data is the vector-string-data to search
/// @param strIs string to find (exact)
/// @param start where to start searching
/// @param offset search offset to position (results in index being shifted by -offset)
/// @param end where to end searching
/// @return index of found index (with offset if any)
int findIs(const std::vector<std::string> &data, const std::string &strIs, int start = 0, int offset = 0, int end = 0){
prepare(data.size(), start, offset, end);
for(int i=start; i<data.size()-offset; ++i){
if(data[i+offset] == strIs)
return i;
}
return -1;
}
#endif /* B6A9DEE0_30C6_4492_AB96_87D9C5C10E8B */

View File

@@ -1,34 +0,0 @@
#ifndef F7CFE6A7_34BF_4E04_94CF_DB8374980631
#define F7CFE6A7_34BF_4E04_94CF_DB8374980631
#include <vector>
#include <string>
#include <sstream>
std::vector<std::string> split(const std::string& s, char delimiter)
{
std::vector<std::string> tokens;
std::string token;
std::istringstream tokenStream(s);
while (std::getline(tokenStream, token, delimiter))
{
tokens.push_back(token);
}
return tokens;
}
char hex_char_to_int(const char &c) {
unsigned char result = 0;
if( ('0' <= c) && (c <= '9') ) {
result = c - '0';
}
else if( ('A' <= c) && (c <= 'F') ) {
result = 10 + c - 'A';
}
else if( ('a' <= c) && (c <= 'f') ) {
result = 10 + c - 'a';
}
return result;
}
#endif /* F7CFE6A7_34BF_4E04_94CF_DB8374980631 */

View File

@@ -1,36 +0,0 @@
#ifndef CC724CA7_8BB8_43B9_8A9A_54BD880A76AA
#define CC724CA7_8BB8_43B9_8A9A_54BD880A76AA
uint64_t convertStringToTimestampMicros(std::string textTimestamp){
uint64_t timestamp;
std::tm t = {};
std::istringstream ssTimestamp = std::istringstream(textTimestamp);
if (ssTimestamp >> std::get_time(&t, "%H:%M:%S"))
{
// Get current time
std::time_t curT = std::time(0);
std::tm* curTime = std::localtime(&curT);
// Set missing fields
t.tm_mday = curTime->tm_mday;
t.tm_mon = curTime->tm_mon;
t.tm_year = curTime->tm_year;
t.tm_zone = curTime->tm_zone;
// Convert tm to time
std::time_t time = std::mktime(&t);
// Get micros
int micros = std::stoi(textTimestamp.substr(9, 6));
// Calculate timestamp epoch in micros
timestamp = time*1000000 + micros;
return timestamp;
}
else
{
throw std::runtime_error("Could not parse time: '"+ textTimestamp +"'");
}
}
#endif /* CC724CA7_8BB8_43B9_8A9A_54BD880A76AA */

View File

@@ -1,15 +0,0 @@
#ifndef C437A277_1F23_496D_9B69_A21D771ECA91
#define C437A277_1F23_496D_9B69_A21D771ECA91
#include <vector>
#include <limits.h>
int max(std::vector<int> vec){
int max = INT_MIN;
for(int i=0; i<vec.size(); ++i){
if(vec[i] > max) max = vec[i];
}
return max;
}
#endif /* C437A277_1F23_496D_9B69_A21D771ECA91 */

View File

@@ -1,26 +0,0 @@
#include <stdio.h>
#include <string>
#include "./helper/exec.hpp"
#include "./handler/bufHandler.hpp"
const std::string tcpdump_baseCmd = "tcpdump -vvv -e -n -X -s0 -i ";
int main(int argc, char *args[]){
std::string tcpdump_cmd;
if(argc == 2){
tcpdump_cmd = tcpdump_baseCmd + args[1];
} else {
fprintf(stderr, "Missing interface\n");
exit(1);
}
int exitCode = exec(tcpdump_cmd.c_str(), &bufHandler);
if(exitCode){
fprintf(stderr, "\ntcpdump exited with non-zero ExitCode: %d\n Something went wrong! Check tcpdump-output for more information.\n", exitCode);
exit(1);
}
return 0;
}

266
package-lock.json generated Normal file
View File

@@ -0,0 +1,266 @@
{
"name": "rfmon-to-influx",
"version": "1.0.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "rfmon-to-influx",
"version": "1.0.0",
"license": "AGPL-3.0",
"dependencies": {
"@influxdata/influxdb-client": "^1.20.0",
"@influxdata/influxdb-client-apis": "^1.20.0",
"log4js": "^6.3.0",
"luxon": "^2.1.1",
"string-argv": "^0.3.1"
}
},
"node_modules/@influxdata/influxdb-client": {
"version": "1.20.0",
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client/-/influxdb-client-1.20.0.tgz",
"integrity": "sha512-jaKSI63hmQ5VSkJrFJkYIXaKlhoF+mGd4HmOf7v/X7pmEi69ReHp922Wyx6/OeCrpndRMbsadk+XmGNdd43cFw=="
},
"node_modules/@influxdata/influxdb-client-apis": {
"version": "1.20.0",
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client-apis/-/influxdb-client-apis-1.20.0.tgz",
"integrity": "sha512-KMTmXH4rbpS+NWGpqDjxcKTyan2rbiT2IM5AdRElKhH2sHbH96xwLgziaxeC+OCJLeNAdehJgae3I8WiZjbwdg==",
"peerDependencies": {
"@influxdata/influxdb-client": "*"
}
},
"node_modules/date-format": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/date-format/-/date-format-3.0.0.tgz",
"integrity": "sha512-eyTcpKOcamdhWJXj56DpQMo1ylSQpcGtGKXcU0Tb97+K56/CF5amAqqqNj0+KvA0iw2ynxtHWFsPDSClCxe48w==",
"engines": {
"node": ">=4.0"
}
},
"node_modules/debug": {
"version": "4.3.2",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz",
"integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==",
"dependencies": {
"ms": "2.1.2"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/flatted": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz",
"integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA=="
},
"node_modules/fs-extra": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz",
"integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==",
"dependencies": {
"graceful-fs": "^4.2.0",
"jsonfile": "^4.0.0",
"universalify": "^0.1.0"
},
"engines": {
"node": ">=6 <7 || >=8"
}
},
"node_modules/graceful-fs": {
"version": "4.2.8",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz",
"integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg=="
},
"node_modules/jsonfile": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
"integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=",
"optionalDependencies": {
"graceful-fs": "^4.1.6"
}
},
"node_modules/log4js": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/log4js/-/log4js-6.3.0.tgz",
"integrity": "sha512-Mc8jNuSFImQUIateBFwdOQcmC6Q5maU0VVvdC2R6XMb66/VnT+7WS4D/0EeNMZu1YODmJe5NIn2XftCzEocUgw==",
"dependencies": {
"date-format": "^3.0.0",
"debug": "^4.1.1",
"flatted": "^2.0.1",
"rfdc": "^1.1.4",
"streamroller": "^2.2.4"
},
"engines": {
"node": ">=8.0"
}
},
"node_modules/luxon": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/luxon/-/luxon-2.1.1.tgz",
"integrity": "sha512-6VQVNw7+kQu3hL1ZH5GyOhnk8uZm21xS7XJ/6vDZaFNcb62dpFDKcH8TI5NkoZOdMRxr7af7aYGrJlE/Wv0i1w==",
"engines": {
"node": ">=12"
}
},
"node_modules/ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"node_modules/rfdc": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz",
"integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA=="
},
"node_modules/streamroller": {
"version": "2.2.4",
"resolved": "https://registry.npmjs.org/streamroller/-/streamroller-2.2.4.tgz",
"integrity": "sha512-OG79qm3AujAM9ImoqgWEY1xG4HX+Lw+yY6qZj9R1K2mhF5bEmQ849wvrb+4vt4jLMLzwXttJlQbOdPOQVRv7DQ==",
"dependencies": {
"date-format": "^2.1.0",
"debug": "^4.1.1",
"fs-extra": "^8.1.0"
},
"engines": {
"node": ">=8.0"
}
},
"node_modules/streamroller/node_modules/date-format": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/date-format/-/date-format-2.1.0.tgz",
"integrity": "sha512-bYQuGLeFxhkxNOF3rcMtiZxvCBAquGzZm6oWA1oZ0g2THUzivaRhv8uOhdr19LmoobSOLoIAxeUK2RdbM8IFTA==",
"engines": {
"node": ">=4.0"
}
},
"node_modules/string-argv": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.1.tgz",
"integrity": "sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg==",
"engines": {
"node": ">=0.6.19"
}
},
"node_modules/universalify": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
"engines": {
"node": ">= 4.0.0"
}
}
},
"dependencies": {
"@influxdata/influxdb-client": {
"version": "1.20.0",
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client/-/influxdb-client-1.20.0.tgz",
"integrity": "sha512-jaKSI63hmQ5VSkJrFJkYIXaKlhoF+mGd4HmOf7v/X7pmEi69ReHp922Wyx6/OeCrpndRMbsadk+XmGNdd43cFw=="
},
"@influxdata/influxdb-client-apis": {
"version": "1.20.0",
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client-apis/-/influxdb-client-apis-1.20.0.tgz",
"integrity": "sha512-KMTmXH4rbpS+NWGpqDjxcKTyan2rbiT2IM5AdRElKhH2sHbH96xwLgziaxeC+OCJLeNAdehJgae3I8WiZjbwdg==",
"requires": {}
},
"date-format": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/date-format/-/date-format-3.0.0.tgz",
"integrity": "sha512-eyTcpKOcamdhWJXj56DpQMo1ylSQpcGtGKXcU0Tb97+K56/CF5amAqqqNj0+KvA0iw2ynxtHWFsPDSClCxe48w=="
},
"debug": {
"version": "4.3.2",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz",
"integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==",
"requires": {
"ms": "2.1.2"
}
},
"flatted": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz",
"integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA=="
},
"fs-extra": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz",
"integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==",
"requires": {
"graceful-fs": "^4.2.0",
"jsonfile": "^4.0.0",
"universalify": "^0.1.0"
}
},
"graceful-fs": {
"version": "4.2.8",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz",
"integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg=="
},
"jsonfile": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
"integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=",
"requires": {
"graceful-fs": "^4.1.6"
}
},
"log4js": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/log4js/-/log4js-6.3.0.tgz",
"integrity": "sha512-Mc8jNuSFImQUIateBFwdOQcmC6Q5maU0VVvdC2R6XMb66/VnT+7WS4D/0EeNMZu1YODmJe5NIn2XftCzEocUgw==",
"requires": {
"date-format": "^3.0.0",
"debug": "^4.1.1",
"flatted": "^2.0.1",
"rfdc": "^1.1.4",
"streamroller": "^2.2.4"
}
},
"luxon": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/luxon/-/luxon-2.1.1.tgz",
"integrity": "sha512-6VQVNw7+kQu3hL1ZH5GyOhnk8uZm21xS7XJ/6vDZaFNcb62dpFDKcH8TI5NkoZOdMRxr7af7aYGrJlE/Wv0i1w=="
},
"ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"rfdc": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz",
"integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA=="
},
"streamroller": {
"version": "2.2.4",
"resolved": "https://registry.npmjs.org/streamroller/-/streamroller-2.2.4.tgz",
"integrity": "sha512-OG79qm3AujAM9ImoqgWEY1xG4HX+Lw+yY6qZj9R1K2mhF5bEmQ849wvrb+4vt4jLMLzwXttJlQbOdPOQVRv7DQ==",
"requires": {
"date-format": "^2.1.0",
"debug": "^4.1.1",
"fs-extra": "^8.1.0"
},
"dependencies": {
"date-format": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/date-format/-/date-format-2.1.0.tgz",
"integrity": "sha512-bYQuGLeFxhkxNOF3rcMtiZxvCBAquGzZm6oWA1oZ0g2THUzivaRhv8uOhdr19LmoobSOLoIAxeUK2RdbM8IFTA=="
}
}
},
"string-argv": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.1.tgz",
"integrity": "sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg=="
},
"universalify": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="
}
}
}

23
package.json Normal file
View File

@@ -0,0 +1,23 @@
{
"name": "rfmon-to-influx",
"version": "1.0.0",
"description": "Writing (mostly meta-) data received in Wireless-Monitor-Mode into an InfluxDB",
"main": "main.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "node main.js"
},
"repository": {
"type": "git",
"url": "https://gitea.ruekov.eu/Ruakij/rfmon-to-influx"
},
"author": "Ruakij",
"license": "AGPL-3.0",
"dependencies": {
"@influxdata/influxdb-client": "^1.20.0",
"@influxdata/influxdb-client-apis": "^1.20.0",
"log4js": "^6.3.0",
"luxon": "^2.1.1",
"string-argv": "^0.3.1"
}
}

105
src/dto/Packet.js Normal file
View File

@@ -0,0 +1,105 @@
const PacketType = {
Beacon: "Beacon",
ProbeRequest: "ProbeRequest",
ProbeResponse: "ProbeResponse",
Data: "Data",
RequestToSend: "RequestToSend",
ClearToSend: "ClearToSend",
Acknowledgment: "Acknowledgment",
BlockAcknowledgment: "BlockAcknowledgment",
NoData: "NoData",
Authentication: "Authentication",
AssociationRequest: "AssociationRequest",
AssociationResponse: "AssociationResponse",
Disassociation: "Disassociation",
Handshake: "Handshake",
Unknown: "Unknown"
};
const FlagType = {
MoreFragments: "MoreFragments",
Retry: "Retry",
PwrMgt: "PwrMgt",
MoreData: "MoreData",
Protected: "Protected",
Order: "Order"
};
class Packet{
timestampMicros;
flags = {};
srcMac;
dstMac;
bssid;
signal;
frequency;
dataRate;
durationMicros;
payloadData;
get payloadSize(){
return this.payloadData.length;
}
packetType;
}
// Extensions of Packet
class PacketWithSSID extends Packet{
ssid;
}
class BeaconPacket extends PacketWithSSID{}
class ProbeRequestPacket extends PacketWithSSID{}
class ProbeResponsePacket extends PacketWithSSID{}
const AuthenticationType = {
OpenSystem_1: "OpenSystem_1",
OpenSystem_2: "OpenSystem_2",
Unknown: "Unknown",
};
class AuthenticationPacket extends Packet{
authenticationType;
}
class AssociationRequestPacket extends PacketWithSSID{}
class AssociationResponsePacket extends Packet{
associationIsSuccessful;
}
class DisassociationPacket extends Packet{
disassociationReason;
}
const HandshakeStage = {
1: "1",
2: "2",
3: "3",
4: "4"
};
class HandshakePacket extends Packet{
handshakeStage;
}
// Specify exports
module.exports = {
PacketType,
FlagType,
Packet,
PacketWithSSID,
BeaconPacket,
ProbeRequestPacket,
ProbeResponsePacket,
AuthenticationType,
AuthenticationPacket,
AssociationRequestPacket,
AssociationResponsePacket,
DisassociationPacket,
HandshakeStage,
HandshakePacket,
};

13
src/helper/env.js Normal file
View File

@@ -0,0 +1,13 @@
function requireEnvVars(requiredEnv){
// Ensure required ENV vars are set
let unsetEnv = requiredEnv.filter((env) => (typeof process.env[env] === "undefined"));
if (unsetEnv.length > 0) {
return "Required ENV variables are not set: [" + unsetEnv.join(", ") + "]";
}
}
// Specify exports
module.exports = {
requireEnvVars
};

18
src/helper/exec.js Normal file
View File

@@ -0,0 +1,18 @@
const logger = require("./logger.js")("exec");
const { spawn } = require("child_process");
const { parseArgsStringToArgv } = require("string-argv");
function exec(cmd, options){
const [bin, ...args] = parseArgsStringToArgv(cmd);
logger.addContext("binary", "bin");
logger.debug(`Spawn process '${cmd}'`);
return spawn(bin, args, options);
}
// Specify exports
module.exports = {
exec
};

View File

@@ -0,0 +1,24 @@
// From https://stackoverflow.com/a/34356351
// Convert a hex string to a byte array
function hexToBytes(hex) {
for (var bytes = [], c = 0; c < hex.length; c += 2)
bytes.push(parseInt(hex.substr(c, 2), 16));
return bytes;
}
// Convert a byte array to a hex string
function bytesToHex(bytes) {
for (var hex = [], i = 0; i < bytes.length; i++) {
var current = bytes[i] < 0 ? bytes[i] + 256 : bytes[i];
hex.push((current >>> 4).toString(16));
hex.push((current & 0xF).toString(16));
}
return hex.join("");
}
// Specify exports
module.exports = {
hexToBytes,
bytesToHex
};

View File

@@ -0,0 +1,61 @@
const logger = require.main.require("./helper/logger.js")("influx-checks");
const Os = require("os");
const { InfluxDB, Point } = require("@influxdata/influxdb-client")
const Influx = require("@influxdata/influxdb-client-apis");
function checkHealth(influxDb){
return new Promise((resolve, reject) => {
new Influx.HealthAPI(influxDb) // Check influx health
.getHealth()
.catch((err) => {
logger.error("Could not communicate with Influx:");
logger.error(`Error [${err.code}]:`, err.message);
reject();
})
.then((res) => {
logger.debug("Server healthy.", "Version: ", res.version);
resolve(res);
});
});
}
function checkBucket(influxDb, options){
return new Promise((resolve, reject) => {
new Influx.BucketsAPI(influxDb).getBuckets(options)
.catch((err) => { // Weirdly the influx-Api returns 404 for searches of non-existing buckets
logger.error("Could not get bucket:");
logger.error(`Error [${err.code}]:`, err.message);
reject();
}).then((res) => { // But an empty list when the bucket exists, but token does not have permission to get details
logger.debug("Bucket found");
resolve(res);
// Now we know the bucket exists and we have some kind of permission.. but we still dont know if we are able to write to it..
});
});
}
function checkWriteApi(influxDb, options){
return new Promise((resolve, reject) => {
const writeApi = influxDb.getWriteApi(options.org, options.bucket); // Get WriteAPI
writeApi.writePoint(new Point("worker_connectionTest").tag("hostname", Os.hostname())); // Write point
writeApi.close()
.catch((err) => {
logger.error("Could not get writeApi:");
logger.error(`Error [${err.code}]:`, err.message);
reject();
}).then((res) => {
logger.debug("Writing ok");
resolve();
});
});
}
// Specify exports
module.exports = {
checkHealth,
checkBucket,
checkWriteApi,
};

11
src/helper/logger.js Normal file
View File

@@ -0,0 +1,11 @@
const log4js = require("log4js");
function setup(category = "unknown"){
const logger = log4js.getLogger(category);
logger.level = process.env.LOGLEVEL ?? "INFO";
return logger;
}
// Specify exports
module.exports = setup;

46
src/helper/userHelper.js Normal file
View File

@@ -0,0 +1,46 @@
// This file specifies functions to help a user with e.g. configuration-errors
function detectStreamData(stream, timeout = 5000){
return new Promise((resolve, reject) => {
let timeoutHandler;
if(timeout){
timeoutHandler = setTimeout(() => {
reject("timeout");
remListeners();
},
timeout);
}
function remListeners(){
stream.removeListener("error", errorHandler);
stream.removeListener("data", dataHandler);
if(timeoutHandler) clearTimeout(timeoutHandler);
}
function errorHandler(err) {
remListeners();
}
function dataHandler(data) {
resolve(data);
remListeners();
}
stream.on("error", errorHandler);
stream.on("data", dataHandler);
});
}
function detectStreamsData(streams, timeout = 5000){
let promises = [];
streams.forEach((stream) => {
promises.push(detectStreamData(stream, timeout));
});
return promises;
}
// Specify exports
module.exports = {
detectStreamData,
detectStreamsData,
};

View File

@@ -0,0 +1,43 @@
const { HandshakeStage } = require.main.require("./dto/Packet.js");
function keyInfoFromRaw(keyInfoRaw) {
return {
"KeyDescriptorVersion": keyInfoRaw>>0 & 0b111,
"KeyType": keyInfoRaw>>3 & 0b1,
"KeyIndex": keyInfoRaw>>4 & 0b11,
"Install": keyInfoRaw>>6 & 0b1,
"KeyACK": keyInfoRaw>>7 & 0b1,
"KeyMIC": keyInfoRaw>>8 & 0b1,
"Secure": keyInfoRaw>>9 & 0b1,
"Error": keyInfoRaw>>10 & 0b1,
"Request": keyInfoRaw>>11 & 0b1,
"EncryptedKeyData": keyInfoRaw>>12 & 0b1,
"SMKMessage": keyInfoRaw>>13 & 0b1,
};
}
const HANDSHAKE_STAGE_KEYINFO = {
"keys": ["Install", "KeyACK", "KeyMIC", "Secure"],
"0100": HandshakeStage[1],
"0010": HandshakeStage[2],
"1111": HandshakeStage[3],
"0011": HandshakeStage[4],
};
function handshakeStageFromKeyInfo(keyInfo){
// Extract compare-keys
let keyData = "";
for (const key of HANDSHAKE_STAGE_KEYINFO["keys"]) {
keyData += keyInfo[key].toString();
}
// Get and return stage
return HANDSHAKE_STAGE_KEYINFO[keyData];
}
// Specify exports
module.exports = {
keyInfoFromRaw,
handshakeStageFromKeyInfo,
};

136
src/main.js Normal file
View File

@@ -0,0 +1,136 @@
"use strict";
const logFactory = require("./helper/logger.js");
const logger = logFactory("main");
const { requireEnvVars } = require("./helper/env.js");
const { exit } = require("process");
const { exec } = require("./helper/exec.js");
const Os = require("os");
const { InfluxDB } = require("@influxdata/influxdb-client");
const InfluxChecks = require("./helper/influx-checks.js");
const { RegexBlockStream } = require("./streamHandler/RegexBlockStream.js");
const { PacketStreamFactory } = require("./streamHandler/PacketStreamFactory.js");
const { PacketInfluxPointFactory } = require("./streamHandler/PacketInfluxPointFactory.js");
const { InfluxPointWriter } = require("./streamHandler/InfluxPointWriter.js");
const userHelper = require("./helper/userHelper.js");
/// Setup ENVs
const env = process.env;
// Defaults
{
env.LOGLEVEL ??= "INFO";
env.WIFI_INTERFACE ??= "wlan0";
env.HOSTNAME ??= Os.hostname();
}
// Required vars
let errorMsg = requireEnvVars([
"INFLUX_URL", "INFLUX_TOKEN",
"INFLUX_ORG", "INFLUX_BUCKET"
]);
if(errorMsg){
logger.fatal(errorMsg);
exit(1);
}
(async function() {
logger.info("Setup Influx..");
const influxDb = new InfluxDB({url: env.INFLUX_URL, token: env.INFLUX_TOKEN});
await InfluxChecks.checkHealth(influxDb)
.then((res) => {return InfluxChecks.checkBucket(influxDb, {
org: env.INFLUX_ORG,
name: env.INFLUX_BUCKET
});})
.then((res) => {return InfluxChecks.checkWriteApi(influxDb, {
org: env.INFLUX_ORG,
bucket: env.INFLUX_BUCKET
});})
.catch((err) => {
if(err) {
logger.error("Error whilst checking influx:");
logger.error(err);
}
logger.fatal("Setup influx failed!");
exit(1);
});
logger.debug("Get WriteApi & set default-hostname to", `'${env.HOSTNAME}'`);
const influxWriteApi = influxDb.getWriteApi(env.INFLUX_ORG, env.INFLUX_BUCKET, "us");
influxWriteApi.useDefaultTags({"hostname": env.HOSTNAME});
logger.info("Influx ok");
logger.info("Starting tcpdump..");
const TCPDUMP_BASECMD = "tcpdump -vvv -e -n -X -s0 -i";
let cmd = `${TCPDUMP_BASECMD} ${env.WIFI_INTERFACE}`;
let proc = exec(cmd);
logger.debug("Creating & Attaching streams..");
let regexBlockStream = new RegexBlockStream(/^\d{2}:\d{2}:\d{2}.\d{6}.*(\n( {4,8}|\t\t?).*)+\n/gm);
let packetStreamFactory = new PacketStreamFactory();
let packetInfluxPointFactory = new PacketInfluxPointFactory();
let influxPointWriter = new InfluxPointWriter(influxWriteApi);
proc.stdout
.setEncoding("utf8")
.pipe(regexBlockStream)
.pipe(packetStreamFactory)
.pipe(packetInfluxPointFactory)
.pipe(influxPointWriter);
logger.debug("Attaching error-logger..");
const loggerTcpdump = logFactory("tcpdump");
proc.stderr.setEncoding("utf8").on("data", (data) => {
if(!data.match(/^(tcpdump: )?listening on /i) || !data.match(/^\d+ packets captured/i)) { // Catch start-error
loggerTcpdump.debug(data);
}
else loggerTcpdump.error(data);
});
// FIXME: This is a hacky workaround to not let errors from subprocess bubble up and terminate our process
regexBlockStream.on("error", (err) => {});
proc.on("error", (err) => {
loggerTcpdump.error(err);
});
const loggerPacketStream = logFactory("PacketStreamFactory");
userHelper.detectStreamData(proc.stdout, 10000) // Expect tcpdump-logs to have data after max. 10s
.then(() => {
loggerTcpdump.debug("Got first data");
userHelper.detectStreamData(packetStreamFactory, 10000) // Expect then to have packets after further 10s
.then(() => {
loggerPacketStream.debug("Got first packet");
})
.catch((err) => {
if(err == "timeout") loggerPacketStream.warn("No packets");
});
})
.catch((err) => {
if(err == "timeout") loggerTcpdump.warn("No data after 10s! Wrong configuration?");
});
logger.debug("Attaching exit-handler..");
proc.on("exit", (code) => {
loggerTcpdump.debug(`tcpdump exited code: ${code}`);
if (code) {
loggerTcpdump.fatal(`tcpdump exited with non-zero code: ${code}`);
exit(1);
}
logger.info("Shutdown");
exit(0);
});
// Handle stop-signals for graceful shutdown
function shutdownReq() {
logger.info("Shutdown request received..");
logger.debug("Stopping subprocess tcpdump, then exiting myself..");
proc.kill(); // Kill process (send SIGTERM), then upper event-handler will stop self
}
process.on("SIGTERM", shutdownReq);
process.on("SIGINT", shutdownReq);
logger.info("Startup complete");
})();

View File

@@ -0,0 +1,35 @@
const logger = require.main.require("./helper/logger.js")("InfluxPointWriter");
const { Writable } = require("stream");
const { WriteApi } = require("@influxdata/influxdb-client");
/**
* Get points and write them into influx
*/
class InfluxPointWriter extends Writable{
/**
*
* @param {WriteApi} writeApi WriteAPI from InfluxDB instance
*/
constructor(writeApi){
super({
objectMode: true
});
this._api = writeApi;
}
_write(point, encoding, next){
this._api.writePoint(point);
next();
}
_flush(next){
this._api.flush(true)
.catch((err) => { next(new Error(`WriteApi rejected promise for flush: ${err}`)); })
.then(next);
}
}
// Specify exports
module.exports = {
InfluxPointWriter
};

View File

@@ -0,0 +1,86 @@
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
const { Transform } = require("stream");
const {Point} = require("@influxdata/influxdb-client");
/** Keys to always use as tags */
const TAG_LIST = [
"srcmac",
"dstmac",
"bssid",
"frequency",
"flags",
"packettype",
];
/** Measurement-name and corresponding field-key */
const MEASUREMENT_MAP = new Map([
["rfmon_signal_dbm", "signal"],
["rfmon_payloadsize_bytes", "payloadSize"],
["rfmon_datarate_bytes", "dataRate"],
["rfmon_ssid_names", "ssid"],
["rfmon_authenticationtype_info", "authenticationType"],
["rfmon_associationsuccess_bools", "associationIsSuccessful"],
["rfmon_disassociationreason_info", "disassociationReason"],
["rfmon_handshakestage_info", "handshakeStage"],
]);
/**
* Get packets and convert them into influx-points
*/
class PacketInfluxPointFactory extends Transform{
constructor(){
super({
readableObjectMode: true,
writableObjectMode: true
});
}
_transform(packet, encoding, next){
// Create measurements
MEASUREMENT_MAP.forEach((objKey, measurement) => {
if(packet[objKey] == null) return;
let point = new Point(measurement); // Create point
// Set tags
TAG_LIST.filter(tag => Object.keys(packet).includes(tag)) // Filter tags available on object
.filter(tag => packet[tag] != null) // Filter tags not falsy on object
.forEach(tag => {
tagObjectRecursively(point, tag, packet[tag]);
});
point.setField("value", packet[objKey]); // Set field
this.push(point); // Push point into stream
});
next(); // Get next packet
}
}
function tagObjectRecursively(point, tag, field, suffix = ""){
if(typeof(field) == "object"){
// TODO: Convert boolean-arrays like "packet.flags" to key: value
Object.entries(field).map(([key, value]) => {
tagObjectRecursively(point, tag, value, `_${key}${suffix}`);
});
}
else point.tag(tag+suffix, field);
}
/** Mapping for type -> field-method */
const POINT_FIELD_TYPE = new Map([
["boolean", function(key, value){ return this.booleanField(key, value); }],
["number", function(key, value){ return this.intField(key, value); }],
["string", function(key, value){ return this.stringField(key, value); }],
]);
Point.prototype.setField = function(key, value){
let setField = POINT_FIELD_TYPE.get(typeof value);
return setField.apply(this, [key, value]);
};
// Specify exports
module.exports = {
PacketInfluxPointFactory
};

View File

@@ -0,0 +1,153 @@
const logger = require.main.require("./helper/logger.js")("PacketStreamFactory");
const { Transform } = require("stream");
const { DateTime } = require("luxon");
const { PacketType, FlagType, Packet, PacketWithSSID, BeaconPacket, ProbeRequestPacket, ProbeResponsePacket, AuthenticationPacket, AuthenticationType, AssociationResponsePacket, DisassociationPacket, HandshakePacket, HandshakeStage } = require.main.require("./dto/Packet.js");
const hexConv = require.main.require("./helper/hexConverter.js");
const wifiStateAnalyser = require.main.require("./helper/wifiStateAnalyzer.js");
const PACKET_TYPE_MAP = {
"Beacon": PacketType.Beacon,
"Probe Request": PacketType.ProbeRequest,
"Probe Response": PacketType.ProbeResponse,
"Data": PacketType.Data,
"Request-To-Send": PacketType.RequestToSend,
"Clear-To-Send": PacketType.ClearToSend,
"Acknowledgment": PacketType.Acknowledgment,
"BA": PacketType.BlockAcknowledgment,
"Authentication": PacketType.Authentication,
"Assoc Request": PacketType.AssociationRequest,
"Assoc Response": PacketType.AssociationResponse,
"Disassociation:": PacketType.Disassociation,
"EAPOL": PacketType.Handshake,
};
const PACKET_TYPES_REGEX = Object.keys(PACKET_TYPE_MAP).join("|");
const AUTHENTICATION_TYPE_MAP = {
"(Open System)-1": AuthenticationType.OpenSystem_1,
"(Open System)-2": AuthenticationType.OpenSystem_2,
};
const FLAG_TYPE_MAP = {
"Retry": FlagType.Retry,
"Pwr Mgmt": FlagType.PwrMgt,
"More Data": FlagType.MoreData,
"Protected": FlagType.Protected,
};
const FLAG_TYPE_MAPS_REGEX = Object.keys(FLAG_TYPE_MAP).join("|");
/**
* Read data from text-blocks and convert them to Packet
*/
class PacketStreamFactory extends Transform{
constructor(){
super({
readableObjectMode: true,
writableObjectMode: true
});
}
_transform(chunk, encoding, next){
let packet = new Packet();
const lines = chunk.split("\n");
const header = lines.splice(0, 1)[0]; // Grab first line, "lines" is now the payload
packet = this._handleHeader(packet, header);
packet = this._handlePayload(packet, lines);
next(null, packet); // Get next chunk
}
_handleHeader(packet, data){
// Convert time to epoch-micros Unfortunately luxon doesnt use micros, but millis as smallest time-unit requiring some "hacks"
packet.timestampMicros = DateTime.fromISO(data.slice(0, 12)).toSeconds() + data.slice(12, 15)/1000000;
// Find flags
data.match(data.match(new RegExp("(?<=^|\\s)("+ FLAG_TYPE_MAPS_REGEX +")(?=$|\\s)", "ig"))
?.forEach(match => packet.flags[FLAG_TYPE_MAP[match]] = true) // Set them to true in flags
);
packet.dataRate = Number(data.match(/(?<=^|\s)\d+(\.\d+)?(?=\sMb\/?s($|\s))/i)?.[0]) || null;
packet.frequency = Number(data.match(/(?<=^|\s)\d{4}(?=\sMHz($|\s))/i)?.[0]) || null;
packet.durationMicros = Number(data.match(/(?<=^|\s)\d{1,4}(?=us($|\s))/i)?.[0]) || null;
packet.signal = Number(data.match(/(?<=^|\s)-\d{2,3}(?=dBm\sSignal($|\s))/i)?.[0]) || null;
let packetTypeStr = data.match(new RegExp("(?<=^|\\s)("+ PACKET_TYPES_REGEX +")(?=$|\\s)", "i"))?.[0];
if(packetTypeStr)
packet.packetType = PACKET_TYPE_MAP[packetTypeStr];
else if(data.match(/(SA|TA|DA|RA|BSSID):.{17}\s*$/i)){
packet.packetType = PacketType.NoData;
}
else {
packet.packetType = PacketType.Unknown;
}
packet.srcMac = data.match(/(?<=(^|\s)(SA|TA):).{17}(?=$|\s)/i)?.[0] ?? null;
packet.dstMac = data.match(/(?<=(^|\s)(DA|RA):).{17}(?=$|\s)/i)?.[0] ?? null;
packet.bssid = data.match(/(?<=(^|\s)BSSID:).{17}(?=$|\s)/i)?.[0] ?? null;
// Cover special cases with more data
let newPacket;
switch(packet.packetType){
case PacketType.Beacon:
case PacketType.ProbeRequest:
case PacketType.ProbeResponse:
case PacketType.AssociationRequest:
newPacket = new PacketWithSSID();
newPacket.ssid = data.match(new RegExp("(?<=(^|\\s)"+ packetTypeStr +"\\s\\().{0,32}(?=\\)($|\\s))", "i"))?.[0] ?? null;
break;
case PacketType.Authentication:
newPacket = new AuthenticationPacket();
newPacket.authenticationType = AUTHENTICATION_TYPE_MAP[data.match(/(?<=(^|\s)Authentication\s).{3,}(?=:(\s|$))/i)[0]] ?? AuthenticationType.Unknown;
break;
case PacketType.AssociationResponse:
newPacket = new AssociationResponsePacket();
newPacket.associationIsSuccessful = data.match(/(?<=(^|\s)Assoc\sResponse\s.{0,30})Successful(?=\s|$)/i) ? true : false;
break;
case PacketType.Disassociation:
newPacket = new DisassociationPacket();
newPacket.disassociationReason = data.match(/(?<=(^|\s)Disassociation:\s).*$/i)?.[0] ?? null;
break;
}
if(newPacket) packet = Object.assign(newPacket, packet); // Use new, more specific, packet and copy old data over
return packet;
}
_handlePayload(packet, data){
data = data.join("");
// Get payload-Hex-Data. If there is no data: empty
packet.payloadData = hexConv.hexToBytes(data.match(/(?<=\s)([A-F0-9]{1,4}(?=\s))/igm)?.join("") ?? "");
packet.payloadData.splice(packet.payloadData.length-4, 4); // Remove FrameCheck sequence
// Cover special cases with more data
let newPacket;
switch(packet.packetType){
case PacketType.Handshake: {
newPacket = new HandshakePacket();
// Read key-information
const keyInfoRaw = (packet.payloadData[0x5]<<0x8) + packet.payloadData[0x6];
const keyInfo = wifiStateAnalyser.keyInfoFromRaw(keyInfoRaw); // Convert
newPacket.handshakeStage = wifiStateAnalyser.handshakeStageFromKeyInfo(keyInfo); // Get stage
break;
}
}
if(newPacket) packet = Object.assign(newPacket, packet);
return packet;
}
}
// Specify exports
module.exports = {
PacketStreamFactory
};

View File

@@ -0,0 +1,67 @@
const logger = require.main.require("./helper/logger.js")("RegexBlockStream");
const { Transform } = require("stream");
/**
* Matches whole blocks as regex and passes them on
*/
class RegexBlockStream extends Transform{
matcher;
withholdLastBlock;
matchAllOnFlush;
/**
* @param {RegExp} matcher Block-match
* @param {boolean} withholdLastBlock When true, the last matches block will not be submitted to prevent submitting incomplete blocks.
* @param {boolean} matchAllOnFlush (Only in combination with withholdLastBlock) When enabled, the buffer will be matched on last time on _flush (stream deconstruction) and write any, also incomplete, blocks
* @remarks WARNING: It should match a clean-block (including e.g. newline)! Otherwise buffer will get dirty and use more and more resources.
*/
constructor(matcher, withholdLastBlock = true, matchAllOnFlush = false){
super({
readableObjectMode: true,
writableObjectMode: true
});
this.matcher = matcher;
this.withholdLastBlock = withholdLastBlock;
this.matchAllOnFlush = matchAllOnFlush;
}
_transform(chunk, encoding, next){
chunk = this.readableBuffer.length? this.readableBuffer.join("") + chunk: chunk; // Add previous buffer to current chunk
this.readableBuffer.length && this.readableBuffer.clear(); // Clear buffer once we read it
let matches = chunk.match(this.matcher); // Match
if(matches){
if(this.withholdLastBlock) matches.pop(); // Remove last if we want to withhold it
chunk = this._writeMatches(matches, chunk);
}
this.readableBuffer.push(chunk); // Store remaining data in buffer
next(); // Get next chunk
}
_writeMatches(matches, chunk = null){
if(matches){
matches.forEach((match) => {
this.push(match); // Write match to stream
if(chunk) chunk = chunk.replace(match, ""); // Remove match from chunks
});
}
if(chunk) return chunk;
}
_flush(next){
if(this.matchAllOnFlush){ // When requested, we'll match one last time over the remaining buffer
let chunk = this.readableBuffer.join("");
let matches = chunk.match(this.matcher); // Match remaining buffer
this._writeMatches(matches); // Write matches including last element
}
next(); // Tell system we are done
}
}
// Specify exports
module.exports = {
RegexBlockStream
};