MERGE: resolve conflicts

Signed-off-by: jokob-sk <jokob.sk@gmail.com>
This commit is contained in:
jokob-sk
2025-11-10 10:11:34 +11:00
77 changed files with 1670 additions and 811 deletions

View File

@@ -46,7 +46,7 @@ body:
attributes:
label: app.conf
description: |
Paste your `app.conf` (remove personal info)
Paste relevant `app.conf`settings (remove sensitive info)
render: python
validations:
required: false
@@ -70,6 +70,13 @@ body:
- Bare-metal (community only support - Check Discord)
validations:
required: true
- type: checkboxes
attributes:
label: Debug or Trace enabled
description: I confirm I set `LOG_LEVEL` to `debug` or `trace`
options:
- label: I have read and followed the steps in the wiki link above and provided the required debug logs and the log section covers the time when the issue occurs.
required: true
- type: textarea
attributes:
label: app.log
@@ -78,13 +85,14 @@ body:
***Generally speaking, all bug reports should have logs provided.***
Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
Additionally, any additional info? Screenshots? References? Anything that will give us more context about the issue you are encountering!
You can use `tail -100 /app/log/app.log` in the container if you have trouble getting to the log files.
You can use `tail -100 /app/log/app.log` in the container if you have trouble getting to the log files or send them to netalertx@gmail.com with the issue number.
validations:
required: false
- type: checkboxes
- type: textarea
attributes:
label: Debug enabled
description: I confirm I enabled `debug`
options:
- label: I have read and followed the steps in the wiki link above and provided the required debug logs and the log section covers the time when the issue occurs.
required: true
label: Docker Logs
description: |
You can retrieve the logs from Portainer -> Containers -> your NetAlertX container -> Logs or by running `sudo docker logs netalertx`.
validations:
required: true

View File

@@ -43,7 +43,7 @@ Backend loop phases (see `server/__main__.py` and `server/plugin.py`): `once`, `
## Conventions & helpers to reuse
- Settings: add/modify via `ccd()` in `server/initialise.py` or perplugin manifest. Never hardcode ports or secrets; use `get_setting_value()`.
- Logging: use `logger.mylog(level, [message])`; levels: none/minimal/verbose/debug/trace.
- Time/MAC/strings: `helper.py` (`timeNowTZ`, `normalize_mac`, sanitizers). Validate MACs before DB writes.
- Time/MAC/strings: `helper.py` (`timeNowDB`, `normalize_mac`, sanitizers). Validate MACs before DB writes.
- DB helpers: prefer `server/db/db_helper.py` functions (e.g., `get_table_json`, device condition helpers) over raw SQL in new paths.
## Dev workflow (devcontainer)

View File

@@ -10,7 +10,7 @@ on:
branches:
- next_release
jobs:
jobs:
docker_dev:
runs-on: ubuntu-latest
timeout-minutes: 30
@@ -19,7 +19,8 @@ jobs:
packages: write
if: >
contains(github.event.head_commit.message, 'PUSHPROD') != 'True' &&
github.repository == 'jokob-sk/NetAlertX'
github.repository == 'jokob-sk/NetAlertX'
steps:
- name: Checkout
uses: actions/checkout@v4
@@ -30,26 +31,36 @@ jobs:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# --- Generate timestamped dev version
- name: Generate timestamp version
id: timestamp
run: |
ts=$(date -u +'%Y%m%d-%H%M%S')
echo "version=dev-${ts}" >> $GITHUB_OUTPUT
echo "Generated version: dev-${ts}"
- name: Set up dynamic build ARGs
id: getargs
id: getargs
run: echo "version=$(cat ./stable/VERSION)" >> $GITHUB_OUTPUT
- name: Get release version
id: get_version
run: echo "version=Dev" >> $GITHUB_OUTPUT
# --- Write the timestamped version to .VERSION file
- name: Create .VERSION file
run: echo "${{ steps.get_version.outputs.version }}" >> .VERSION
run: echo "${{ steps.timestamp.outputs.version }}" > .VERSION
- name: Docker meta
id: meta
uses: docker/metadata-action@v4
uses: docker/metadata-action@v5
with:
images: |
ghcr.io/jokob-sk/netalertx-dev
jokobsk/netalertx-dev
tags: |
type=raw,value=latest
type=raw,value=${{ steps.timestamp.outputs.version }}
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}

View File

@@ -6,7 +6,6 @@
# GitHub recommends pinning actions to a commit SHA.
# To get a newer version, you will need to update the SHA.
# You can also reference a tag or branch, but the action may change without warning.
name: Publish Docker image
on:
@@ -14,6 +13,7 @@ on:
types: [published]
tags:
- '*.[1-9]+[0-9]?.[1-9]+*'
jobs:
docker:
runs-on: ubuntu-latest
@@ -21,6 +21,7 @@ jobs:
permissions:
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v3
@@ -31,42 +32,39 @@ jobs:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Set up dynamic build ARGs
id: getargs
run: echo "version=$(cat ./stable/VERSION)" >> $GITHUB_OUTPUT
# --- Get release version from tag
- name: Get release version
id: get_version
run: echo "::set-output name=version::${GITHUB_REF#refs/tags/}"
run: echo "version=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
# --- Write version to .VERSION file
- name: Create .VERSION file
run: echo "${{ steps.get_version.outputs.version }}" >> .VERSION
run: echo "${{ steps.get_version.outputs.version }}" > .VERSION
# --- Generate Docker metadata and tags
- name: Docker meta
id: meta
uses: docker/metadata-action@v4
uses: docker/metadata-action@v5
with:
# list of Docker images to use as base name for tags
images: |
ghcr.io/jokob-sk/netalertx
jokobsk/netalertx
# generate Docker tags based on the following events/attributes
jokobsk/netalertx
tags: |
type=semver,pattern={{version}},value=${{ inputs.version }}
type=semver,pattern={{major}}.{{minor}},value=${{ inputs.version }}
type=semver,pattern={{major}},value=${{ inputs.version }}
type=semver,pattern={{version}},value=${{ steps.get_version.outputs.version }}
type=semver,pattern={{major}}.{{minor}},value=${{ steps.get_version.outputs.version }}
type=semver,pattern={{major}},value=${{ steps.get_version.outputs.version }}
type=ref,event=branch,suffix=-{{ sha }}
type=ref,event=pr
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/') }}
- name: Log in to Github Container registry
- name: Log in to Github Container Registry (GHCR)
uses: docker/login-action@v3
with:
registry: ghcr.io
username: jokob-sk
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to DockerHub
- name: Log in to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
with:
@@ -81,6 +79,5 @@ jobs:
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
# # ⚠ disable cache if build is failing to download debian packages
# cache-from: type=registry,ref=ghcr.io/jokob-sk/netalertx:buildcache
# cache-to: type=registry,ref=ghcr.io/jokob-sk/netalertx:buildcache,mode=max

View File

@@ -43,7 +43,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@v4
uses: docker/metadata-action@v5
with:
images: |
ghcr.io/jokob-sk/netalertx-dev-rewrite

View File

@@ -135,6 +135,9 @@ RUN install -d -o ${NETALERTX_USER} -g ${NETALERTX_GROUP} -m 700 ${READ_WRITE_FO
sh -c "find ${NETALERTX_APP} -type f \( -name '*.sh' -o -name 'speedtest-cli' \) \
-exec chmod 750 {} \;"
# Copy version information into the image
COPY --chown=${NETALERTX_USER}:${NETALERTX_GROUP} .VERSION ${NETALERTX_APP}/.VERSION
# Copy the virtualenv from the builder stage
COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}

View File

@@ -6,7 +6,7 @@
# NetAlertX - Network, presence scanner and alert framework
Get visibility of what's going on on your WIFI/LAN network and enable presence detection of important devices. Schedule scans for devices, port changes and get alerts if unknown devices or changes are found. Write your own [Plugin](https://github.com/jokob-sk/NetAlertX/tree/main/docs/PLUGINS.md#readme) with auto-generated UI and in-build notification system. Build out and easily maintain your network source of truth (NSoT).
Get visibility of what's going on on your WIFI/LAN network and enable presence detection of important devices. Schedule scans for devices, port changes and get alerts if unknown devices or changes are found. Write your own [Plugin](https://github.com/jokob-sk/NetAlertX/tree/main/docs/PLUGINS.md#readme) with auto-generated UI and in-build notification system. Build out and easily maintain your network source of truth (NSoT) and device inventory.
## 📋 Table of Contents
@@ -61,7 +61,7 @@ For Home Assistant users: [Click here to add NetAlertX](https://my.home-assistan
For other install methods, check the [installation docs](#-documentation)
| [📑 Docker guide](https://github.com/jokob-sk/NetAlertX/blob/main/dockerfiles/README.md) | [🚀 Releases](https://github.com/jokob-sk/NetAlertX/releases) | [📚 Docs](https://jokob-sk.github.io/NetAlertX/) | [🔌 Plugins](https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS.md) | [🤖 Ask AI](https://gurubase.io/g/netalertx)
| [📑 Docker guide](https://github.com/jokob-sk/NetAlertX/blob/main/docs/DOCKER_INSTALLATION.md) | [🚀 Releases](https://github.com/jokob-sk/NetAlertX/releases) | [📚 Docs](https://jokob-sk.github.io/NetAlertX/) | [🔌 Plugins](https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS.md) | [🤖 Ask AI](https://gurubase.io/g/netalertx)
|----------------------| ----------------------| ----------------------| ----------------------| ----------------------|
![showcase][showcase]
@@ -103,7 +103,7 @@ The [workflows module](https://github.com/jokob-sk/NetAlertX/blob/main/docs/WORK
Supported browsers: Chrome, Firefox
- [[Installation] Docker](https://github.com/jokob-sk/NetAlertX/blob/main/dockerfiles/README.md)
- [[Installation] Docker](https://github.com/jokob-sk/NetAlertX/blob/main/docs/DOCKER_INSTALLATION.md)
- [[Installation] Home Assistant](https://github.com/alexbelgium/hassio-addons/tree/master/netalertx)
- [[Installation] Bare metal](https://github.com/jokob-sk/NetAlertX/blob/main/docs/HW_INSTALL.md)
- [[Installation] Unraid App](https://unraid.net/community/apps)

View File

@@ -64,8 +64,9 @@ http://<server>:<GRAPHQL_PORT>/
* [Metrics](API_METRICS.md) Prometheus metrics and per-device status
* [Network Tools](API_NETTOOLS.md) Utilities like Wake-on-LAN, traceroute, nslookup, nmap, and internet info
* [Online History](API_ONLINEHISTORY.md) Online/offline device records
* [GraphQL](API_GRAPHQL.md) Advanced queries and filtering
* [GraphQL](API_GRAPHQL.md) Advanced queries and filtering for Devices, Settings and Language Strings
* [Sync](API_SYNC.md) Synchronization between multiple NetAlertX instances
* [Logs](API_LOGS.md) Purging of logs and adding to the event execution queue for user triggered events
* [DB query](API_DBQUERY.md) (⚠ Internal) - Low level database access - use other endpoints if possible
See [Testing](API_TESTS.md) for example requests and usage.

View File

@@ -1,9 +1,10 @@
# GraphQL API Endpoint
GraphQL queries are **read-optimized for speed**. Data may be slightly out of date until the file system cache refreshes. The GraphQL endpoints allows you to access the following objects:
GraphQL queries are **read-optimized for speed**. Data may be slightly out of date until the file system cache refreshes. The GraphQL endpoints allow you to access the following objects:
- Devices
- Settings
* Devices
* Settings
* Language Strings (LangStrings)
## Endpoints
@@ -190,11 +191,74 @@ curl 'http://host:GRAPHQL_PORT/graphql' \
}
```
---
## LangStrings Query
The **LangStrings query** provides access to localized strings. Supports filtering by `langCode` and `langStringKey`. If the requested string is missing or empty, you can optionally fallback to `en_us`.
### Sample Query
```graphql
query GetLangStrings {
langStrings(langCode: "de_de", langStringKey: "settings_other_scanners") {
langStrings {
langCode
langStringKey
langStringText
}
count
}
}
```
### Query Parameters
| Parameter | Type | Description |
| ---------------- | ------- | ---------------------------------------------------------------------------------------- |
| `langCode` | String | Optional language code (e.g., `en_us`, `de_de`). If omitted, all languages are returned. |
| `langStringKey` | String | Optional string key to retrieve a specific entry. |
| `fallback_to_en` | Boolean | Optional (default `true`). If `true`, empty or missing strings fallback to `en_us`. |
### `curl` Example
```sh
curl 'http://host:GRAPHQL_PORT/graphql' \
-X POST \
-H 'Authorization: Bearer API_TOKEN' \
-H 'Content-Type: application/json' \
--data '{
"query": "query GetLangStrings { langStrings(langCode: \"de_de\", langStringKey: \"settings_other_scanners\") { langStrings { langCode langStringKey langStringText } count } }"
}'
```
### Sample Response
```json
{
"data": {
"langStrings": {
"count": 1,
"langStrings": [
{
"langCode": "de_de",
"langStringKey": "settings_other_scanners",
"langStringText": "Other, non-device scanner plugins that are currently enabled." // falls back to en_us if empty
}
]
}
}
}
```
---
## Notes
* Device and settings queries can be combined in one request since GraphQL supports batching.
* Device, settings, and LangStrings queries can be combined in **one request** since GraphQL supports batching.
* The `fallback_to_en` feature ensures UI always has a value even if a translation is missing.
* Data is **cached in memory** per JSON file; changes to language or plugin files will only refresh after the cache detects a file modification.
* The `setOverriddenByEnv` flag helps identify setting values that are locked at container runtime.
* The schema is **read-only** — updates must be performed through other APIs or configuration management. See the other [API](API.md) endpoints for details.

179
docs/API_LOGS.md Normal file
View File

@@ -0,0 +1,179 @@
# Logs API Endpoints
Manage or purge application log files stored under `/app/log` and manage the execution queue. These endpoints are primarily used for maintenance tasks such as clearing accumulated logs or adding system actions without restarting the container.
Only specific, pre-approved log files can be purged for security and stability reasons.
---
## Delete (Purge) a Log File
* **DELETE** `/logs?file=<log_file>` → Purge the contents of an allowed log file.
**Query Parameter:**
* `file` → The name of the log file to purge (e.g., `app.log`, `stdout.log`)
**Allowed Files:**
```
app.log
app_front.log
IP_changes.log
stdout.log
stderr.log
app.php_errors.log
execution_queue.log
db_is_locked.log
```
**Authorization:**
Requires a valid API token in the `Authorization` header.
---
### `curl` Example (Success)
```sh
curl -X DELETE 'http://<server_ip>:<GRAPHQL_PORT>/logs?file=app.log' \
-H 'Authorization: Bearer <API_TOKEN>' \
-H 'Accept: application/json'
```
**Response:**
```json
{
"success": true,
"message": "[clean_log] File app.log purged successfully"
}
```
---
### `curl` Example (Not Allowed)
```sh
curl -X DELETE 'http://<server_ip>:<GRAPHQL_PORT>/logs?file=not_allowed.log' \
-H 'Authorization: Bearer <API_TOKEN>' \
-H 'Accept: application/json'
```
**Response:**
```json
{
"success": false,
"message": "[clean_log] File not_allowed.log is not allowed to be purged"
}
```
---
### `curl` Example (Unauthorized)
```sh
curl -X DELETE 'http://<server_ip>:<GRAPHQL_PORT>/logs?file=app.log' \
-H 'Accept: application/json'
```
**Response:**
```json
{
"error": "Forbidden"
}
```
---
## Add an Action to the Execution Queue
* **POST** `/logs/add-to-execution-queue` → Add a system action to the execution queue.
**Request Body (JSON):**
```json
{
"action": "update_api|devices"
}
```
**Authorization:**
Requires a valid API token in the `Authorization` header.
---
### `curl` Example (Success)
The below will update the API cache for Devices
```sh
curl -X POST 'http://<server_ip>:<GRAPHQL_PORT>/logs/add-to-execution-queue' \
-H 'Authorization: Bearer <API_TOKEN>' \
-H 'Content-Type: application/json' \
--data '{"action": "update_api|devices"}'
```
**Response:**
```json
{
"success": true,
"message": "[UserEventsQueueInstance] Action \"update_api|devices\" added to the execution queue."
}
```
---
### `curl` Example (Missing Parameter)
```sh
curl -X POST 'http://<server_ip>:<GRAPHQL_PORT>/logs/add-to-execution-queue' \
-H 'Authorization: Bearer <API_TOKEN>' \
-H 'Content-Type: application/json' \
--data '{}'
```
**Response:**
```json
{
"success": false,
"message": "Missing parameters",
"error": "Missing required 'action' field in JSON body"
}
```
---
### `curl` Example (Unauthorized)
```sh
curl -X POST 'http://<server_ip>:<GRAPHQL_PORT>/logs/add-to-execution-queue' \
-H 'Content-Type: application/json' \
--data '{"action": "update_api|devices"}'
```
**Response:**
```json
{
"error": "Forbidden"
}
```
---
## Notes
* Only predefined files in `/app/log` can be purged — arbitrary paths are **not permitted**.
* When a log file is purged:
* Its content is replaced with a short marker text: `"File manually purged"`.
* A backend log entry is created via `mylog()`.
* A frontend notification is generated via `write_notification()`.
* Execution queue actions are appended to `execution_queue.log` and can be processed asynchronously by background tasks or workflows.
* Unauthorized or invalid attempts are safely logged and rejected.
* For advanced log retrieval, analysis, or structured querying, use the frontend log viewer.
* Always ensure that sensitive or production logs are handled carefully — purging cannot be undone.

View File

@@ -83,7 +83,7 @@ You can also download the `app.conf` and `devices.csv` files from the **Maintena
### 📥 How to Restore
Map these files into your container as described in the [Setup documentation](https://github.com/jokob-sk/NetAlertX/blob/main/dockerfiles/README.md#docker-paths).
Map these files into your container as described in the [Setup documentation](./DOCKER_INSTALLATION.md).
---

View File

@@ -4,7 +4,7 @@
NetAlertX can be installed several ways. The best supported option is Docker, followed by a supervised Home Assistant instance, as an Unraid app, and lastly, on bare metal.
- [[Installation] Docker (recommended)](https://github.com/jokob-sk/NetAlertX/blob/main/dockerfiles/README.md)
- [[Installation] Docker (recommended)](https://github.com/jokob-sk/NetAlertX/blob/main/docs/DOCKER_INSTALLATION.md)
- [[Installation] Home Assistant](https://github.com/alexbelgium/hassio-addons/tree/master/netalertx)
- [[Installation] Unraid App](https://unraid.net/community/apps)
- [[Installation] Bare metal (experimental - looking for maintainers)](https://github.com/jokob-sk/NetAlertX/blob/main/docs/HW_INSTALL.md)

View File

@@ -44,14 +44,19 @@ In Notification Processing settings, you can specify blanket rules. These allow
1. Notify on (`NTFPRCS_INCLUDED_SECTIONS`) allows you to specify which events trigger notifications. Usual setups will have `new_devices`, `down_devices`, and possibly `down_reconnected` set. Including `plugin` (dependenton the Plugin `<plugin>_WATCH` and `<plugin>_REPORT_ON` settings) and `events` (dependent on the on-device **Alert Events** setting) might be too noisy for most setups. More info in the [NTFPRCS plugin](https://github.com/jokob-sk/NetAlertX/blob/main/front/plugins/notification_processing/README.md) on what events these selections include.
2. Alert down after (`NTFPRCS_alert_down_time`) is useful if you want to wait for some time before the system sends out a down notification for a device. This is related to the on-device **Alert down** setting and only devices with this checked will trigger a down notification.
3. A filter to allow you to set device-specific exceptions to New devices being added to the app.
4. A filter to allow you to set device-specific exceptions to generated Events.
## Ignoring devices 🔕
You can filter out unwanted notifications globally. This could be because of a misbehaving device (GoogleNest/GoogleHub (See also [ARPSAN docs and the `--exclude-broadcast` flag](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/arp_scan#ip-flipping-on-google-nest-devices))) which flips between IP addresses, or because you want to ignore new device notifications of a certain pattern.
1. Events Filter (`NTFPRCS_event_condition`) - Filter out Events from notifications.
2. New Devices Filter (`NTFPRCS_new_dev_condition`) - Filter out New Devices from notifications, but log and keep a new device in the system.
## Ignoring devices 💻
![Ignoring new devices](./img/NOTIFICATIONS/NEWDEV_ignores.png)
You can completely ignore detected devices globally. This could be because your instance detects docker containers, you want to ignore devices from a specific manufacturer via MAC rules or you want to ignore devices on a specific IP range.
1. Ignored MACs (`NEWDEV_ignored_MACs`) - List of MACs to ignore.
2. Ignored IPs (`NEWDEV_ignored_IPs`) - List of IPs to ignore.
2. Ignored IPs (`NEWDEV_ignored_IPs`) - List of IPs to ignore.

View File

@@ -1,146 +1,192 @@
## config.json Lifecycle in NetAlertX
# Plugins Implementation Details
This document describes on a high level how `config.json` is read, processed, and used by the NetAlertX core and plugins. It also outlines the plugin output contract and the main plugin types.
Plugins provide data to the NetAlertX core, which processes it to detect changes, discover new devices, raise alerts, and apply heuristics.
> [!NOTE]
> For a deep-dive on the specific configuration options and sections of the `config.json` plugin manifest, consult the [Plugins Development Guide](PLUGINS_DEV.md).
---
## Overview: Plugin Data Flow
1. Each plugin runs on a defined schedule.
2. Aligning all plugin schedules is recommended so they execute in the same loop.
3. During execution, all plugins write their collected data into the **`CurrentScan`** table.
4. After all plugins complete, the `CurrentScan` table is evaluated to detect **new devices**, **changes**, and **triggers**.
Although plugins run independently, they contribute to the shared `CurrentScan` table.
To inspect its contents, set `LOG_LEVEL=trace` and check for the log section:
```
================ CurrentScan table content ================
```
---
## `config.json` Lifecycle
This section outlines how each plugins `config.json` manifest is read, validated, and used by the core and plugins.
It also describes plugin output expectations and the main plugin categories.
> [!TIP]
> For detailed schema and examples, see the [Plugin Development Guide](PLUGINS_DEV.md).
---
### 1. Loading
* On startup, the app core loads `config.json` for each plugin.
* The `config.json` represents a plugin manifest, that contains metadata and runtime settings.
* On startup, the core loads `config.json` for each plugin.
* The file acts as a **plugin manifest**, defining metadata, runtime configuration, and database mappings.
---
### 2. Validation
* The core checks that each required settings key (such as `RUN`) for a plugin exists.
* Invalid or missing values may be replaced with defaults, or the plugin may be disabled.
* The core validates required keys (for example, `RUN`).
* Missing or invalid entries may be replaced with defaults or cause the plugin to be disabled.
---
### 3. Preparation
* The plugins settings (paths, commands, parameters) are prepared.
* Database mappings (`mapped_to_table`, `database_column_definitions`) for data ingestion into the core app are parsed.
* Plugin parameters (paths, commands, and options) are prepared for execution.
* Database mappings (`mapped_to_table`, `database_column_definitions`) are parsed to define how data integrates with the main app.
---
### 4. Execution
* Plugins can be run at different core app execution points, such as on schedule, once on start, after a notification, etc.
* At runtime, the scheduler triggers plugins according to their `interval`.
* The plugin executes its command or script.
* Plugins may run:
* On a fixed schedule.
* Once at startup.
* After a notification or other trigger.
* The scheduler executes plugins according to their `interval`.
---
### 5. Parsing
* Plugin output is expected in **pipe (`|`)-delimited format**.
* The core parses lines into fields, matching the **plugin interface contract**.
* Plugin output must be **pipe-delimited (`|`)**.
* The core parses each output line following the **Plugin Interface Contract**, splitting and mapping fields accordingly.
---
### 6. Mapping
* Each parsed field is moved into the `Plugins_` database tables and can be mapped into a configured database table.
* Controlled by `database_column_definitions` and `mapped_to_table`.
* Example: `Object_PrimaryID → Devices.MAC`.
* Parsed fields are inserted into the plugins `Plugins_*` table.
* Data can be mapped into other tables (e.g., `Devices`, `CurrentScan`) as defined by:
* `database_column_definitions`
* `mapped_to_table`
**Example:** `Object_PrimaryID → devMAC`
---
### 6a. Plugin Output Contract
Each plugin must output results in the **plugin interface contract format**, pipe (`|`)-delimited values, in the column order described under [Plugin Interface Contract](PLUGINS_DEV.md)
All plugins must follow the **Plugin Interface Contract** defined in `PLUGINS_DEV.md`.
Output values are pipe-delimited in a fixed order.
#### IDs
#### Identifiers
* `Object_PrimaryID` and `Object_SecondaryID` identify the record (e.g. `MAC|IP`).
* `Object_PrimaryID` and `Object_SecondaryID` uniquely identify records (for example, `MAC|IP`).
#### **Watched values (`Watched_Value14`)**
#### Watched Values (`Watched_Value14`)
* Used by the core to detect changes between runs.
* Changes here can trigger **notifications**.
* Used by the core to detect changes between runs.
* Changes in these fields can trigger notifications.
#### **Extra value (`Extra`)**
#### Extra Field (`Extra`)
* Optional, extra field.
* Stored in the database but **not used for alerts**.
* Optional additional value.
* Stored in the database but not used for alerts.
#### **Helper values (`Helper_Value13`)**
#### Helper Values (`Helper_Value13`)
* Added for cases where more than IDs + watched + extra are needed.
* Can be made visible in the UI.
* Stored in the database but **not used for alerts**.
* Optional auxiliary data (for display or plugin logic).
* Stored but not alert-triggering.
#### **Mapping matters**
#### Mapping
* While the plugin output is free-form, the `database_column_definitions` and `mapped_to_table` settings in `config.json` determine the **target columns and data types** in NetAlertX.
* While the output format is flexible, the plugins manifest determines the destination and type of each field.
---
### 7. Persistence
* Data is upserted into the database.
* Conflicts are resolved using `Object_PrimaryID` + `Object_SecondaryID`.
* Parsed data is **upserted** into the database.
* Conflicts are resolved using the combined key: `Object_PrimaryID + Object_SecondaryID`.
---
### 8. Plugin Types and Expected Outputs
## Plugin Categories
Beyond the `data_source` setting, plugins fall into functional categories. Each has its own input requirements and output expectations:
Plugins fall into several functional categories depending on their purpose and expected outputs.
#### **Device discovery plugins**
### 1. Device Discovery Plugins
* **Inputs:** `N/A`, subnet, or API for discovery service, or similar.
* **Outputs:** At minimum `MAC` and `IP` that results in a new or updated device records in the `Devices` table.
* **Mapping:** Must be mapped to the `CurrentScan` table via `database_column_definitions` and `data_filters`.
* **Examples:** ARP-scan, NMAP device discovery (e.g., `ARPSCAN`, `NMAPDEV`).
#### **Device-data enrichment plugins**
* **Inputs:** Device identifier (usually `MAC`, `IP`).
* **Outputs:** Additional data for that device (e.g. open ports).
* **Mapping:** Controlled via `database_column_definitions` and `data_filters`.
* **Examples:** Ports, MQTT messages (e.g., `NMAP`, `MQTT`)
#### **Name resolver plugins**
* **Inputs:** Device identifiers (MAC, IP, or hostname).
* **Outputs:** Updated `devName` and `devFQDN` fields.
* **Mapping:** Not expected.
* **Note:** Currently requires **core app modification** to add new plugins, not fully driven by the plugins `config.json`.
* **Examples:** Avahiscan (e.g., `NBTSCAN`, `NSLOOKUP`).
#### **Generic plugins**
* **Inputs:** Whatever the script or query provides.
* **Outputs:** Data shown only in **Integrations → Plugins**, not tied to devices.
* **Mapping:** Not expected.
* **Examples:** External monitoring data (e.g., `INTRSPD`)
#### **Configuration-only plugins**
* **Inputs/Outputs:** None at runtime.
* **Mapping:** Not expected.
* **Examples:** Used to provide additional settings or execute scripts (e.g., `MAINT`, `CSVBCKP`).
* **Inputs:** None, subnet, or discovery API.
* **Outputs:** `MAC` and `IP` for new or updated device records in `Devices`.
* **Mapping:** Required usually into `CurrentScan`.
* **Examples:** `ARPSCAN`, `NMAPDEV`.
---
### 9. Post-Processing
### 2. Device Data Enrichment Plugins
* Notifications are generated if watched values change.
* UI is updated with new or updated records.
* All values that are configured to be shown in teh UI appear in the Plugins section.
* **Inputs:** Device identifiers (`MAC`, `IP`).
* **Outputs:** Additional metadata (for example, open ports or sensors).
* **Mapping:** Controlled via manifest definitions.
* **Examples:** `NMAP`, `MQTT`.
---
### 10. Summary
### 3. Name Resolver Plugins
The lifecycle of `config.json` entries is:
* **Inputs:** Device identifiers (`MAC`, `IP`, hostname`).
* **Outputs:** Updated `devName` and `devFQDN`.
* **Mapping:** Typically none.
* **Note:** Adding new resolvers currently requires a core change.
* **Examples:** `NBTSCAN`, `NSLOOKUP`.
---
### 4. Generic Plugins
* **Inputs:** Custom, based on the plugin logic or script.
* **Outputs:** Data displayed under **Integrations → Plugins** only.
* **Mapping:** Not required.
* **Examples:** `INTRSPD`, custom monitoring scripts.
---
### 5. Configuration-Only Plugins
* **Inputs/Outputs:** None at runtime.
* **Purpose:** Used for configuration or maintenance tasks.
* **Examples:** `MAINT`, `CSVBCKP`.
---
## Post-Processing
After persistence:
* The core generates notifications for any watched value changes.
* The UI updates with new or modified data.
* Plugins with UI-enabled data display under **Integrations → Plugins**.
---
## Summary
The lifecycle of a plugin configuration is:
**Load → Validate → Prepare → Execute → Parse → Map → Persist → Post-process**
Plugins must follow the **output contract**, and their category (discovery, specific, resolver, generic, config-only) defines what inputs they require and what outputs are expected.
Each plugin must:
* Follow the **output contract**.
* Declare its type and expected output structure.
* Define mappings and watched values clearly in `config.json`.

View File

@@ -13,7 +13,7 @@ There is also an in-app Help / FAQ section that should be answering frequently a
#### 🐳 Docker (Fully supported)
- The main installation method is as a [docker container - follow these instructions here](https://github.com/jokob-sk/NetAlertX/blob/main/dockerfiles/README.md).
- The main installation method is as a [docker container - follow these instructions here](./DOCKER_INSTALLATION.md).
#### 💻 Bare-metal / On-server (Experimental/community supported 🧪)

View File

@@ -15,7 +15,7 @@ The **Web UI** is served by an **nginx** server, while the **API backend** runs
APP_CONF_OVERRIDE={"GRAPHQL_PORT":"20212"}
```
For more information, check the [Docker installation guide](https://github.com/jokob-sk/NetAlertX/blob/main/dockerfiles/README.md).
For more information, check the [Docker installation guide](./DOCKER_INSTALLATION.md).
## Possible issues and troubleshooting

View File

@@ -21,7 +21,7 @@ The app can be installed different ways, with the best support of the docker-bas
NetAlertX is fully supported in Docker environments, allowing for easy setup and configuration. Follow the official guide to get started:
- [Docker Installation Guide](https://github.com/jokob-sk/NetAlertX/blob/main/dockerfiles/README.md)
- [Docker Installation Guide](./DOCKER_INSTALLATION.md)
This guide will take you through the process of setting up NetAlertX using Docker Compose or standalone Docker commands.

View File

@@ -33,6 +33,7 @@ function renderLogArea($params) {
$textAreaCssClass = isset($params['textAreaCssClass']) ? $params['textAreaCssClass'] : '';
$buttons = isset($params['buttons']) ? $params['buttons'] : [];
$content = "";
$fileSize = 0;
$filePath = resolveLogPath($filePath);
@@ -63,13 +64,7 @@ function renderLogArea($params) {
// Prepare buttons HTML
$buttonsHtml = '';
$totalButtons = count($buttons);
if ($totalButtons > 0) {
$colClass = 12 / $totalButtons;
// Use $colClass in your HTML generation or further logic
} else {
// Handle case where $buttons array is empty
$colClass = 12;
}
$colClass = $totalButtons > 0 ? (12 / $totalButtons) : 12;
foreach ($buttons as $button) {
$labelStringCode = isset($button['labelStringCode']) ? $button['labelStringCode'] : '';
@@ -81,8 +76,7 @@ function renderLogArea($params) {
</div>';
}
// Render the log area HTML
// Render HTML
$html = '
<div class="log-area box box-solid box-primary">
<div class="row logs-row col-sm-12 col-xs-12">

View File

@@ -177,7 +177,10 @@ function checkPermissions($files)
}
// ----------------------------------------------------------------------------------------
// 🔺----- API ENDPOINTS SUPERSEDED -----🔺
// check server/api_server/api_server_start.py for equivalents
// equivalent: /messaging/in-app/write
// 🔺----- API ENDPOINTS SUPERSEDED -----🔺
function displayMessage($message, $logAlert = FALSE, $logConsole = TRUE, $logFile = TRUE, $logEcho = FALSE)
{
global $logFolderPath, $log_file, $timestamp;
@@ -235,7 +238,10 @@ function displayMessage($message, $logAlert = FALSE, $logConsole = TRUE, $logFil
}
// 🔺----- API ENDPOINTS SUPERSEDED -----🔺
// check server/api_server/api_server_start.py for equivalents
// equivalent: /logs/add-to-execution-queue
// 🔺----- API ENDPOINTS SUPERSEDED -----🔺
// ----------------------------------------------------------------------------------------
// Adds an action to perform into the execution_queue.log file
function addToExecutionQueue($action)
@@ -258,6 +264,10 @@ function addToExecutionQueue($action)
// ----------------------------------------------------------------------------------------
// 🔺----- API ENDPOINTS SUPERSEDED -----🔺
// check server/api_server/api_server_start.py for equivalents
// equivalent: /logs DELETE
// 🔺----- API ENDPOINTS SUPERSEDED -----🔺
function cleanLog($logFile)
{
global $logFolderPath, $timestamp;
@@ -424,6 +434,10 @@ function saveSettings()
}
// -------------------------------------------------------------------------------------------
// 🔺----- API ENDPOINTS SUPERSEDED -----🔺
// check server/api_server/api_server_start.py for equivalents
// equivalent: /graphql LangStrings endpoint
// 🔺----- API ENDPOINTS SUPERSEDED -----🔺
function getString ($setKey, $default) {
$result = lang($setKey);
@@ -436,6 +450,10 @@ function getString ($setKey, $default) {
return $default;
}
// -------------------------------------------------------------------------------------------
// 🔺----- API ENDPOINTS SUPERSEDED -----🔺
// check server/api_server/api_server_start.py for equivalents
// equivalent: /settings/<key>
// 🔺----- API ENDPOINTS SUPERSEDED -----🔺
function getSettingValue($setKey) {
// Define the JSON endpoint URL
$apiRoot = rtrim(getenv('NETALERTX_API') ?: '/tmp/api', '/');

View File

@@ -8,9 +8,9 @@ from pytz import timezone
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from const import logPath
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
import conf

View File

@@ -11,6 +11,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from utils.datetime_utils import timeNowDB
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import timeNowTZ, get_setting_value
@@ -63,14 +64,14 @@ def main():
# Log result
plugin_objects.add_object(
primaryId=pluginName,
secondaryId=timeNowTZ(),
watched1=notification["GUID"],
watched2=result,
watched3="null",
watched4="null",
extra="null",
foreignKey=notification["GUID"],
primaryId = pluginName,
secondaryId = timeNowDB(),
watched1 = notification["GUID"],
watched2 = result,
watched3 = 'null',
watched4 = 'null',
extra = 'null',
foreignKey = notification["GUID"]
)
plugin_objects.write_result_file()

View File

@@ -19,6 +19,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import timeNowTZ, get_setting_value, hide_email
from models.notification_instance import NotificationInstance
@@ -81,7 +82,7 @@ def main():
# Log result
plugin_objects.add_object(
primaryId = pluginName,
secondaryId = timeNowTZ(),
secondaryId = timeNowDB(),
watched1 = notification["GUID"],
watched2 = result,
watched3 = 'null',

View File

@@ -20,11 +20,12 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# NetAlertX modules
import conf
from const import confFileName, logPath
from plugin_utils import getPluginObject
from utils.plugin_utils import getPluginObject
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import timeNowTZ, get_setting_value, bytes_to_string, \
from helper import get_setting_value, bytes_to_string, \
sanitize_string, normalize_string
from utils.datetime_utils import timeNowDB
from database import DB, get_device_stats
@@ -566,7 +567,7 @@ def prepTimeStamp(datetime_str):
except ValueError:
mylog('verbose', [f"[{pluginName}] Timestamp conversion failed of string '{datetime_str}'"])
# Use the current time if the input format is invalid
parsed_datetime = timeNowTZ() # Assuming this function returns the current time with timezone
parsed_datetime = datetime.now(conf.tz)
# Convert to the required format with 'T' between date and time and ensure the timezone is included
return parsed_datetime.isoformat() # This will include the timezone offset

View File

@@ -14,8 +14,9 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects, handleEmpty
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import timeNowTZ, get_setting_value
from helper import get_setting_value
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
@@ -64,7 +65,7 @@ def main():
# Log result
plugin_objects.add_object(
primaryId = pluginName,
secondaryId = timeNowTZ(),
secondaryId = timeNowDB(),
watched1 = notification["GUID"],
watched2 = handleEmpty(response_text),
watched3 = response_status_code,

View File

@@ -14,7 +14,8 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402
from logger import mylog, Logger # noqa: E402
from helper import timeNowTZ, get_setting_value, hide_string # noqa: E402
from helper import get_setting_value, hide_string # noqa: E402
from utils.datetime_utils import timeNowDB
from models.notification_instance import NotificationInstance # noqa: E402
from database import DB # noqa: E402
@@ -63,7 +64,7 @@ def main():
# Log result
plugin_objects.add_object(
primaryId=pluginName,
secondaryId=timeNowTZ(),
secondaryId=timeNowDB(),
watched1=notification["GUID"],
watched2=handleEmpty(response_text),
watched3=response_status_code,

View File

@@ -14,7 +14,8 @@ import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects, handleEmpty
from logger import mylog, Logger
from helper import timeNowTZ, get_setting_value, hide_string
from helper import get_setting_value, hide_string
from utils.datetime_utils import timeNowDB
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
@@ -63,7 +64,7 @@ def main():
# Log result
plugin_objects.add_object(
primaryId = pluginName,
secondaryId = timeNowTZ(),
secondaryId = timeNowDB(),
watched1 = notification["GUID"],
watched2 = handleEmpty(response_text),
watched3 = response_status_code,

View File

@@ -11,8 +11,9 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import timeNowTZ, get_setting_value
from helper import get_setting_value
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
@@ -61,7 +62,7 @@ def main():
# Log result
plugin_objects.add_object(
primaryId=pluginName,
secondaryId=timeNowTZ(),
secondaryId=timeNowDB(),
watched1=notification["GUID"],
watched2=result,
watched3='null',

View File

@@ -16,8 +16,9 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import logPath, confFileName
from plugin_helper import Plugin_Objects, handleEmpty
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import timeNowTZ, get_setting_value, write_file
from helper import get_setting_value, write_file
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
@@ -66,7 +67,7 @@ def main():
# Log result
plugin_objects.add_object(
primaryId = pluginName,
secondaryId = timeNowTZ(),
secondaryId = timeNowDB(),
watched1 = notification["GUID"],
watched2 = handleEmpty(response_stdout),
watched3 = handleEmpty(response_stderr),

View File

@@ -1,20 +1,25 @@
#!/usr/bin/env python
from database import DB
from plugin_helper import Plugin_Objects, handleEmpty
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
import os
import time
import pathlib
import argparse
import sys
import re
import base64
import subprocess
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import DB
from plugin_helper import Plugin_Objects, handleEmpty
from logger import mylog, Logger, append_line_to_file
from helper import get_setting_value
from const import logPath, applicationPath
import conf
from pytz import timezone
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))

View File

@@ -44,8 +44,11 @@ def main():
nmapArgs = ['sudo', 'nmap', '--privileged', '--script', 'broadcast-dhcp-discover']
try:
# Number of DHCP discovery probes to send
dhcp_probes = 1
newLines = [datetime.now().strftime("%Y-%m-%d %H:%M:%S")]
# Initialize a list to store output lines from the scan
newLines = []
for _ in range(dhcp_probes):
output = subprocess.check_output(nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=timeoutSec)

View File

@@ -12,8 +12,9 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger, append_line_to_file
from helper import timeNowTZ, check_IP_format, get_setting_value
from helper import check_IP_format, get_setting_value
from const import logPath
import conf
from pytz import timezone
@@ -73,7 +74,7 @@ def main():
mylog('verbose', [f'[{pluginName}] Curl Fallback (new_internet_IP|cmd_output): {new_internet_IP} | {cmd_output}'])
# logging
append_line_to_file (logPath + '/IP_changes.log', '['+str(timeNowTZ()) +']\t'+ new_internet_IP +'\n')
append_line_to_file (logPath + '/IP_changes.log', '['+str(timeNowDB()) +']\t'+ new_internet_IP +'\n')
plugin_objects = Plugin_Objects(RESULT_FILE)

View File

@@ -10,8 +10,9 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import timeNowTZ, get_setting_value
from helper import get_setting_value
import conf
from pytz import timezone
from const import logPath
@@ -38,7 +39,7 @@ def main():
speedtest_result = run_speedtest()
plugin_objects.add_object(
primaryId = 'Speedtest',
secondaryId = timeNowTZ(),
secondaryId = timeNowDB(),
watched1 = speedtest_result['download_speed'],
watched2 = speedtest_result['upload_speed'],
watched3 = 'null',

View File

@@ -39,10 +39,6 @@ def main():
mylog('verbose', [f'[{pluginName}] In script'])
# Create a database connection
db = DB() # instance of class DB
db.open()
timeout = get_setting_value('NMAPDEV_RUN_TIMEOUT')
fakeMac = get_setting_value('NMAPDEV_FAKE_MAC')
subnets = get_setting_value('SCAN_SUBNETS')

View File

@@ -11,7 +11,8 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, decodeBase64
from logger import mylog, Logger, append_line_to_file
from helper import timeNowTZ, get_setting_value
from utils.datetime_utils import timeNowDB
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
@@ -155,7 +156,7 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
startCollecting = False # end reached
elif startCollecting and len(line.split()) == 3:
newEntriesTmp.append(nmap_entry(ip, deviceMACs[devIndex], timeNowTZ(), line.split()[0], line.split()[1], line.split()[2]))
newEntriesTmp.append(nmap_entry(ip, deviceMACs[devIndex], timeNowDB(), line.split()[0], line.split()[1], line.split()[2]))
newPortsPerDevice += 1
elif 'Nmap done' in line:
duration = line.split('scanned in ')[1]

View File

@@ -149,7 +149,7 @@
"description": [
{
"language_code": "en_us",
"string": "You can specify a SQL where condition to filter out Events from notifications. For example <code>AND devLastIP NOT LIKE '192.168.3.%'</code> will always exclude New Device notifications for all devices with the IP starting with <code>192.168.3.%</code>."
"string": "You can specify a SQL where condition to filter out Events from notifications. For example <code>AND devLastIP NOT LIKE '192.168.3.%'</code> will always exclude any Event notifications for all devices with the IP starting with <code>192.168.3.%</code>."
}
]
}

View File

@@ -12,7 +12,8 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.append(f"{INSTALL_PATH}/front/plugins")
sys.path.append(f'{INSTALL_PATH}/server')
from logger import mylog
from logger import mylog, Logger
from utils.datetime_utils import timeNowDB
from const import default_tz, fullConfPath
#-------------------------------------------------------------------------------
@@ -205,7 +206,7 @@ class Plugin_Object:
self.pluginPref = ""
self.primaryId = primaryId
self.secondaryId = secondaryId
self.created = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
self.created = timeNowDB()
self.changed = ""
self.watched1 = watched1
self.watched2 = watched2

View File

@@ -13,11 +13,12 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from plugin_utils import get_plugins_configs, decode_and_rename_files
from utils.plugin_utils import get_plugins_configs, decode_and_rename_files
from logger import mylog, Logger
from const import fullDbPath, logPath
from helper import timeNowTZ, get_setting_value
from crypto_utils import encrypt_data
from helper import get_setting_value
from utils.datetime_utils import timeNowDB
from utils.crypto_utils import encrypt_data
from messaging.in_app import write_notification
import conf
from pytz import timezone
@@ -147,7 +148,7 @@ def main():
message = f'[{pluginName}] Device data from node "{node_name}" written to {log_file_name}'
mylog('verbose', [message])
if lggr.isAbove('verbose'):
write_notification(message, 'info', timeNowTZ())
write_notification(message, 'info', timeNowDB())
# Process any received data for the Device DB table (ONLY JSON)
@@ -178,8 +179,10 @@ def main():
# make sure the file has the correct name (e.g last_result.encoded.Node_1.1.log) to skip any otehr plugin files
if len(file_name.split('.')) > 2:
# Store e.g. Node_1 from last_result.encoded.Node_1.1.log
syncHubNodeName = file_name.split('.')[1]
# Extract node name from either last_result.decoded.Node_1.1.log or last_result.Node_1.log
parts = file_name.split('.')
# If decoded/encoded file, node name is at index 2; otherwise at index 1
syncHubNodeName = parts[2] if 'decoded' in file_name or 'encoded' in file_name else parts[1]
file_path = f"{LOG_PATH}/{file_name}"
@@ -253,7 +256,7 @@ def main():
message = f'[{pluginName}] Inserted "{len(new_devices)}" new devices'
mylog('verbose', [message])
write_notification(message, 'info', timeNowTZ())
write_notification(message, 'info', timeNowDB())
# Commit and close the connection
@@ -297,7 +300,7 @@ def send_data(api_token, file_content, encryption_key, file_path, node_name, pre
if response.status_code == 200:
message = f'[{pluginName}] Data for "{file_path}" sent successfully via {final_endpoint}'
mylog('verbose', [message])
write_notification(message, 'info', timeNowTZ())
write_notification(message, 'info', timeNowDB())
return True
except requests.RequestException as e:
@@ -306,7 +309,7 @@ def send_data(api_token, file_content, encryption_key, file_path, node_name, pre
# If all endpoints fail
message = f'[{pluginName}] Failed to send data for "{file_path}" via all endpoints'
mylog('verbose', [message])
write_notification(message, 'alert', timeNowTZ())
write_notification(message, 'alert', timeNowDB())
return False
@@ -330,7 +333,7 @@ def get_data(api_token, node_url):
except json.JSONDecodeError:
message = f'[{pluginName}] Failed to parse JSON from {final_endpoint}'
mylog('verbose', [message])
write_notification(message, 'alert', timeNowTZ())
write_notification(message, 'alert', timeNowDB())
return ""
except requests.RequestException as e:
mylog('verbose', [f'[{pluginName}] Error calling {final_endpoint}: {e}'])
@@ -338,7 +341,7 @@ def get_data(api_token, node_url):
# If all endpoints fail
message = f'[{pluginName}] Failed to get data from "{node_url}" via all endpoints'
mylog('verbose', [message])
write_notification(message, 'alert', timeNowTZ())
write_notification(message, 'alert', timeNowDB())
return ""

View File

@@ -23,8 +23,9 @@ from pathlib import Path
# Register NetAlertX modules
import conf
from const import *
from logger import mylog
from helper import filePermissions, timeNowTZ
from logger import mylog
from helper import filePermissions
from utils.datetime_utils import timeNowTZ
from app_state import updateState
from api import update_api
from scan.session_events import process_scan

View File

@@ -21,7 +21,8 @@ from const import (
sql_devices_filters,
)
from logger import mylog
from helper import write_file, get_setting_value, timeNowTZ
from helper import write_file, get_setting_value
from utils.datetime_utils import timeNowTZ
from app_state import updateState
from models.user_events_queue_instance import UserEventsQueueInstance

View File

@@ -14,61 +14,20 @@ from helper import get_setting_value
from db.db_helper import get_date_from_period
from app_state import updateState
from api_server.graphql_endpoint import devicesSchema
from api_server.device_endpoint import (
get_device_data,
set_device_data,
delete_device,
delete_device_events,
reset_device_props,
copy_device,
update_device_column,
)
from api_server.devices_endpoint import (
get_all_devices,
delete_unknown_devices,
delete_all_with_empty_macs,
delete_devices,
export_devices,
import_csv,
devices_totals,
devices_by_status,
)
from api_server.events_endpoint import (
delete_events,
delete_events_older_than,
get_events,
create_event,
get_events_totals,
)
from api_server.history_endpoint import delete_online_history
from api_server.prometheus_endpoint import get_metric_stats
from api_server.sessions_endpoint import (
get_sessions,
delete_session,
create_session,
get_sessions_calendar,
get_device_sessions,
get_session_events,
)
from api_server.nettools_endpoint import (
wakeonlan,
traceroute,
speedtest,
nslookup,
nmap_scan,
internet_info,
)
from api_server.dbquery_endpoint import read_query, write_query, update_query, delete_query
from api_server.sync_endpoint import handle_sync_post, handle_sync_get
from messaging.in_app import (
write_notification,
mark_all_notifications_read,
delete_notifications,
get_unread_notifications,
delete_notification,
mark_notification_as_read,
)
from .graphql_endpoint import devicesSchema
from .device_endpoint import get_device_data, set_device_data, delete_device, delete_device_events, reset_device_props, copy_device, update_device_column
from .devices_endpoint import get_all_devices, delete_unknown_devices, delete_all_with_empty_macs, delete_devices, export_devices, import_csv, devices_totals, devices_by_status
from .events_endpoint import delete_events, delete_events_older_than, get_events, create_event, get_events_totals
from .history_endpoint import delete_online_history
from .prometheus_endpoint import get_metric_stats
from .sessions_endpoint import get_sessions, delete_session, create_session, get_sessions_calendar, get_device_sessions, get_session_events
from .nettools_endpoint import wakeonlan, traceroute, speedtest, nslookup, nmap_scan, internet_info
from .dbquery_endpoint import read_query, write_query, update_query, delete_query
from .sync_endpoint import handle_sync_post, handle_sync_get
from .logs_endpoint import clean_log
from models.user_events_queue_instance import UserEventsQueueInstance
from messaging.in_app import write_notification, mark_all_notifications_read, delete_notifications, get_unread_notifications, delete_notification, mark_notification_as_read
# Flask application
app = Flask(__name__)
@@ -85,11 +44,24 @@ CORS(
r"/dbquery/*": {"origins": "*"},
r"/messaging/*": {"origins": "*"},
r"/events/*": {"origins": "*"},
r"/logs/*": {"origins": "*"}
},
supports_credentials=True,
allow_headers=["Authorization", "Content-Type"],
)
# -------------------------------------------------------------------
# Custom handler for 404 - Route not found
# -------------------------------------------------------------------
@app.errorhandler(404)
def not_found(error):
response = {
"success": False,
"error": "API route not found",
"message": f"The requested URL {error.description if hasattr(error, 'description') else ''} was not found on the server.",
}
return jsonify(response), 404
# --------------------------
# GraphQL Endpoints
# --------------------------
@@ -107,9 +79,9 @@ def graphql_debug():
def graphql_endpoint():
# Check for API token in headers
if not is_authorized():
msg = "[graphql_server] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct."
mylog("verbose", [msg])
return jsonify({"error": msg}), 401
msg = '[graphql_server] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct.'
mylog('verbose', [msg])
return jsonify({"success": False, "message": msg}), 401
# Retrieve and log request data
data = request.get_json()
@@ -137,7 +109,7 @@ def graphql_endpoint():
@app.route("/settings/<setKey>", methods=["GET"])
def api_get_setting(setKey):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
value = get_setting_value(setKey)
return jsonify({"success": True, "value": value})
@@ -150,51 +122,49 @@ def api_get_setting(setKey):
@app.route("/device/<mac>", methods=["GET"])
def api_get_device(mac):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return get_device_data(mac)
@app.route("/device/<mac>", methods=["POST"])
def api_set_device(mac):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return set_device_data(mac, request.json)
@app.route("/device/<mac>/delete", methods=["DELETE"])
def api_delete_device(mac):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return delete_device(mac)
@app.route("/device/<mac>/events/delete", methods=["DELETE"])
def api_delete_device_events(mac):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return delete_device_events(mac)
@app.route("/device/<mac>/reset-props", methods=["POST"])
def api_reset_device_props(mac):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return reset_device_props(mac, request.json)
@app.route("/device/copy", methods=["POST"])
def api_copy_device():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
data = request.get_json() or {}
mac_from = data.get("macFrom")
mac_to = data.get("macTo")
if not mac_from or not mac_to:
return jsonify(
{"success": False, "error": "macFrom and macTo are required"}
), 400
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "macFrom and macTo are required"}), 400
return copy_device(mac_from, mac_to)
@@ -202,16 +172,14 @@ def api_copy_device():
@app.route("/device/<mac>/update-column", methods=["POST"])
def api_update_device_column(mac):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
data = request.get_json() or {}
column_name = data.get("columnName")
column_value = data.get("columnValue")
if not column_name or not column_value:
return jsonify(
{"success": False, "error": "columnName and columnValue are required"}
), 400
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "columnName and columnValue are required"}), 400
return update_device_column(mac, column_name, column_value)
@@ -224,15 +192,15 @@ def api_update_device_column(mac):
@app.route("/devices", methods=["GET"])
def api_get_devices():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return get_all_devices()
@app.route("/devices", methods=["DELETE"])
def api_delete_devices():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
macs = request.json.get("macs") if request.is_json else None
return delete_devices(macs)
@@ -241,14 +209,14 @@ def api_delete_devices():
@app.route("/devices/empty-macs", methods=["DELETE"])
def api_delete_all_empty_macs():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return delete_all_with_empty_macs()
@app.route("/devices/unknown", methods=["DELETE"])
def api_delete_unknown_devices():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return delete_unknown_devices()
@@ -256,7 +224,7 @@ def api_delete_unknown_devices():
@app.route("/devices/export/<format>", methods=["GET"])
def api_export_devices(format=None):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
export_format = (format or request.args.get("format", "csv")).lower()
return export_devices(export_format)
@@ -265,21 +233,21 @@ def api_export_devices(format=None):
@app.route("/devices/import", methods=["POST"])
def api_import_csv():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return import_csv(request.files.get("file"))
@app.route("/devices/totals", methods=["GET"])
def api_devices_totals():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return devices_totals()
@app.route("/devices/by-status", methods=["GET"])
def api_devices_by_status():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
status = request.args.get("status", "") if request.args else None
@@ -292,7 +260,7 @@ def api_devices_by_status():
@app.route("/nettools/wakeonlan", methods=["POST"])
def api_wakeonlan():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
mac = request.json.get("devMac")
return wakeonlan(mac)
@@ -301,7 +269,7 @@ def api_wakeonlan():
@app.route("/nettools/traceroute", methods=["POST"])
def api_traceroute():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
ip = request.json.get("devLastIP")
return traceroute(ip)
@@ -309,7 +277,7 @@ def api_traceroute():
@app.route("/nettools/speedtest", methods=["GET"])
def api_speedtest():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return speedtest()
@@ -320,11 +288,11 @@ def api_nslookup():
Expects JSON with 'devLastIP'.
"""
if not is_authorized():
return jsonify({"success": False, "error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
data = request.get_json(silent=True)
if not data or "devLastIP" not in data:
return jsonify({"success": False, "error": "Missing 'devLastIP'"}), 400
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing 'devLastIP'"}), 400
ip = data["devLastIP"]
return nslookup(ip)
@@ -337,11 +305,11 @@ def api_nmap():
Expects JSON with 'scan' (IP address) and 'mode' (scan mode).
"""
if not is_authorized():
return jsonify({"success": False, "error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
data = request.get_json(silent=True)
if not data or "scan" not in data or "mode" not in data:
return jsonify({"success": False, "error": "Missing 'scan' or 'mode'"}), 400
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing 'scan' or 'mode'"}), 400
ip = data["scan"]
mode = data["mode"]
@@ -351,7 +319,7 @@ def api_nmap():
@app.route("/nettools/internetinfo", methods=["GET"])
def api_internet_info():
if not is_authorized():
return jsonify({"success": False, "error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return internet_info()
@@ -363,26 +331,26 @@ def api_internet_info():
@app.route("/dbquery/read", methods=["POST"])
def dbquery_read():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
data = request.get_json() or {}
raw_sql_b64 = data.get("rawSql")
if not raw_sql_b64:
return jsonify({"error": "rawSql is required"}), 400
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
return read_query(raw_sql_b64)
@app.route("/dbquery/write", methods=["POST"])
def dbquery_write():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
data = request.get_json() or {}
raw_sql_b64 = data.get("rawSql")
if not raw_sql_b64:
return jsonify({"error": "rawSql is required"}), 400
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
return write_query(raw_sql_b64)
@@ -390,12 +358,12 @@ def dbquery_write():
@app.route("/dbquery/update", methods=["POST"])
def dbquery_update():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
data = request.get_json() or {}
required = ["columnName", "id", "dbtable", "columns", "values"]
if not all(data.get(k) for k in required):
return jsonify({"error": "Missing required parameters"}), 400
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"}), 400
return update_query(
column_name=data["columnName"],
@@ -409,12 +377,12 @@ def dbquery_update():
@app.route("/dbquery/delete", methods=["POST"])
def dbquery_delete():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
data = request.get_json() or {}
required = ["columnName", "id", "dbtable"]
if not all(data.get(k) for k in required):
return jsonify({"error": "Missing required parameters"}), 400
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'columnName', 'id', or 'dbtable' query parameter"}), 400
return delete_query(
column_name=data["columnName"],
@@ -431,10 +399,51 @@ def dbquery_delete():
@app.route("/history", methods=["DELETE"])
def api_delete_online_history():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return delete_online_history()
# --------------------------
# Logs
# --------------------------
@app.route("/logs", methods=["DELETE"])
def api_clean_log():
if not is_authorized():
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
file = request.args.get("file")
if not file:
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing 'file' query parameter"}), 400
return clean_log(file)
@app.route("/logs/add-to-execution-queue", methods=["POST"])
def api_add_to_execution_queue():
if not is_authorized():
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
queue = UserEventsQueueInstance()
# Get JSON payload safely
data = request.get_json(silent=True) or {}
action = data.get("action")
if not action:
return jsonify({
"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'action' field in JSON body"}), 400
success, message = queue.add_event(action)
status_code = 200 if success else 400
response = {"success": success, "message": message}
if not success:
response["error"] = "ERROR"
return jsonify(response), status_code
# --------------------------
# Device Events
# --------------------------
@@ -443,7 +452,7 @@ def api_delete_online_history():
@app.route("/events/create/<mac>", methods=["POST"])
def api_create_event(mac):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
data = request.json or {}
ip = data.get("ip", "0.0.0.0")
@@ -462,21 +471,21 @@ def api_create_event(mac):
@app.route("/events/<mac>", methods=["DELETE"])
def api_events_by_mac(mac):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return delete_device_events(mac)
@app.route("/events", methods=["DELETE"])
def api_delete_all_events():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return delete_events()
@app.route("/events", methods=["GET"])
def api_get_events():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
mac = request.args.get("mac")
return get_events(mac)
@@ -489,15 +498,15 @@ def api_delete_old_events(days: int):
Example: DELETE /events/30
"""
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return delete_events_older_than(days)
@app.route("/sessions/totals", methods=["GET"])
def api_get_events_totals():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
period = get_date_from_period(request.args.get("period", "7 days"))
return get_events_totals(period)
@@ -511,7 +520,7 @@ def api_get_events_totals():
@app.route("/sessions/create", methods=["POST"])
def api_create_session():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
data = request.json
mac = data.get("mac")
@@ -522,7 +531,7 @@ def api_create_session():
event_type_disc = data.get("event_type_disc", "Disconnected")
if not mac or not ip or not start_time:
return jsonify({"success": False, "error": "Missing required parameters"}), 400
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'mac', 'ip', or 'start_time' query parameter"}), 400
return create_session(
mac, ip, start_time, end_time, event_type_conn, event_type_disc
@@ -532,11 +541,11 @@ def api_create_session():
@app.route("/sessions/delete", methods=["DELETE"])
def api_delete_session():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
mac = request.json.get("mac") if request.is_json else None
if not mac:
return jsonify({"success": False, "error": "Missing MAC parameter"}), 400
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing 'mac' query parameter"}), 400
return delete_session(mac)
@@ -544,7 +553,7 @@ def api_delete_session():
@app.route("/sessions/list", methods=["GET"])
def api_get_sessions():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
mac = request.args.get("mac")
start_date = request.args.get("start_date")
@@ -556,7 +565,7 @@ def api_get_sessions():
@app.route("/sessions/calendar", methods=["GET"])
def api_get_sessions_calendar():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
# Query params: /sessions/calendar?start=2025-08-01&end=2025-08-21
start_date = request.args.get("start")
@@ -568,7 +577,7 @@ def api_get_sessions_calendar():
@app.route("/sessions/<mac>", methods=["GET"])
def api_device_sessions(mac):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
period = request.args.get("period", "1 day")
return get_device_sessions(mac, period)
@@ -577,7 +586,7 @@ def api_device_sessions(mac):
@app.route("/sessions/session-events", methods=["GET"])
def api_get_session_events():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
session_event_type = request.args.get("type", "all")
period = get_date_from_period(request.args.get("period", "7 days"))
@@ -590,7 +599,7 @@ def api_get_session_events():
@app.route("/metrics")
def metrics():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
# Return Prometheus metrics as plain text
return Response(get_metric_stats(), mimetype="text/plain")
@@ -602,15 +611,15 @@ def metrics():
@app.route("/messaging/in-app/write", methods=["POST"])
def api_write_notification():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
data = request.json or {}
content = data.get("content")
level = data.get("level", "alert")
if not content:
return jsonify({"success": False, "error": "Missing content"}), 400
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing content"}), 400
write_notification(content, level)
return jsonify({"success": True})
@@ -618,7 +627,7 @@ def api_write_notification():
@app.route("/messaging/in-app/unread", methods=["GET"])
def api_get_unread_notifications():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return get_unread_notifications()
@@ -626,7 +635,7 @@ def api_get_unread_notifications():
@app.route("/messaging/in-app/read/all", methods=["POST"])
def api_mark_all_notifications_read():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return jsonify(mark_all_notifications_read())
@@ -634,7 +643,7 @@ def api_mark_all_notifications_read():
@app.route("/messaging/in-app/delete", methods=["DELETE"])
def api_delete_all_notifications():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return delete_notifications()
@@ -643,35 +652,34 @@ def api_delete_all_notifications():
def api_delete_notification(guid):
"""Delete a single notification by GUID."""
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
result = delete_notification(guid)
if result.get("success"):
return jsonify({"success": True})
else:
return jsonify({"success": False, "error": result.get("error")}), 500
return jsonify({"success": False, "message": "ERROR", "error": result.get("error")}), 500
@app.route("/messaging/in-app/read/<guid>", methods=["POST"])
def api_mark_notification_read(guid):
"""Mark a single notification as read by GUID."""
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
result = mark_notification_as_read(guid)
if result.get("success"):
return jsonify({"success": True})
else:
return jsonify({"success": False, "error": result.get("error")}), 500
return jsonify({"success": False, "message": "ERROR", "error": result.get("error")}), 500
# --------------------------
# SYNC endpoint
# --------------------------
@app.route("/sync", methods=["GET", "POST"])
def sync_endpoint():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
if request.method == "GET":
return handle_sync_get()
@@ -681,7 +689,7 @@ def sync_endpoint():
msg = "[sync endpoint] Method Not Allowed"
write_notification(msg, "alert")
mylog("verbose", [msg])
return jsonify({"error": "Method Not Allowed"}), 405
return jsonify({"success": False, "message": "ERROR: No allowed", "error": "Method Not Allowed"}), 405
# --------------------------

View File

@@ -10,7 +10,8 @@ INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from helper import is_random_mac, format_date, get_setting_value
from helper import is_random_mac, get_setting_value
from utils.datetime_utils import timeNowDB, format_date
from db.db_helper import row_to_json, get_date_from_period
# --------------------------
@@ -25,9 +26,11 @@ def get_device_data(mac):
conn = get_temp_db_connection()
cur = conn.cursor()
now = timeNowDB()
# Special case for new device
if mac.lower() == "new":
now = datetime.now().strftime("%Y-%m-%d %H:%M")
device_data = {
"devMac": "",
"devName": "",
@@ -75,7 +78,6 @@ def get_device_data(mac):
# Compute period date for sessions/events
period = request.args.get("period", "") # e.g., '7 days', '1 month', etc.
period_date_sql = get_date_from_period(period)
current_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# Fetch device info + computed fields
sql = f"""
@@ -103,7 +105,7 @@ def get_device_data(mac):
AND eve_EventType = 'Device Down') AS devDownAlerts,
(SELECT CAST(MAX(0, SUM(
julianday(IFNULL(ses_DateTimeDisconnection,'{current_date}')) -
julianday(IFNULL(ses_DateTimeDisconnection,'{now}')) -
julianday(CASE WHEN ses_DateTimeConnection < {period_date_sql}
THEN {period_date_sql} ELSE ses_DateTimeConnection END)
) * 24) AS INT)
@@ -186,10 +188,8 @@ def set_device_data(mac, data):
data.get("devSkipRepeated", 0),
data.get("devIsNew", 0),
data.get("devIsArchived", 0),
data.get("devLastConnection", datetime.now().strftime("%Y-%m-%d %H:%M:%S")),
data.get(
"devFirstConnection", datetime.now().strftime("%Y-%m-%d %H:%M:%S")
),
data.get("devLastConnection", timeNowDB()),
data.get("devFirstConnection", timeNowDB()),
data.get("devLastIP", ""),
data.get("devGUID", ""),
data.get("devCustomProps", ""),

View File

@@ -16,6 +16,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from db.db_helper import get_table_json, get_device_condition_by_status
from utils.datetime_utils import format_date
# --------------------------

View File

@@ -10,11 +10,9 @@ INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from helper import (
mylog,
ensure_datetime,
)
from helper import is_random_mac, mylog
from db.db_helper import row_to_json, get_date_from_period
from utils.datetime_utils import format_date, format_date_iso, format_event_date, ensure_datetime
# --------------------------

View File

@@ -1,5 +1,5 @@
import graphene
from graphene import ObjectType, String, Int, Boolean, List, Field, InputObjectType
from graphene import ObjectType, String, Int, Boolean, List, Field, InputObjectType, Argument
import json
import sys
import os
@@ -111,6 +111,22 @@ class SettingResult(ObjectType):
settings = List(Setting)
count = Int()
# --- LANGSTRINGS ---
# In-memory cache for lang strings
_langstrings_cache = {} # caches lists per file (core JSON or plugin)
_langstrings_cache_mtime = {} # tracks last modified times
# LangString ObjectType
class LangString(ObjectType):
langCode = String()
langStringKey = String()
langStringText = String()
class LangStringResult(ObjectType):
langStrings = List(LangString)
count = Int()
# Define Query Type with Pagination Support
class Query(ObjectType):
@@ -324,6 +340,107 @@ class Query(ObjectType):
return SettingResult(settings=settings, count=len(settings))
# --- LANGSTRINGS ---
langStrings = Field(
LangStringResult,
langCode=Argument(String, required=False),
langStringKey=Argument(String, required=False)
)
def resolve_langStrings(self, info, langCode=None, langStringKey=None, fallback_to_en=True):
"""
Collect language strings, optionally filtered by language code and/or string key.
Caches in memory for performance. Can fallback to 'en_us' if a string is missing.
"""
global _langstrings_cache, _langstrings_cache_mtime
langStrings = []
# --- CORE JSON FILES ---
language_folder = '/app/front/php/templates/language/'
if os.path.exists(language_folder):
for filename in os.listdir(language_folder):
if filename.endswith('.json'):
file_lang_code = filename.replace('.json', '')
# Filter by langCode if provided
if langCode and file_lang_code != langCode:
continue
file_path = os.path.join(language_folder, filename)
file_mtime = os.path.getmtime(file_path)
cache_key = f'core_{file_lang_code}'
# Use cached data if available and not modified
if cache_key in _langstrings_cache_mtime and _langstrings_cache_mtime[cache_key] == file_mtime:
lang_list = _langstrings_cache[cache_key]
else:
try:
with open(file_path, 'r', encoding='utf-8') as f:
data = json.load(f)
lang_list = [
LangString(
langCode=file_lang_code,
langStringKey=key,
langStringText=value
) for key, value in data.items()
]
_langstrings_cache[cache_key] = lang_list
_langstrings_cache_mtime[cache_key] = file_mtime
except (FileNotFoundError, json.JSONDecodeError) as e:
mylog('none', f'[graphql_schema] Error loading core language strings from {filename}: {e}')
lang_list = []
langStrings.extend(lang_list)
# --- PLUGIN STRINGS ---
plugin_file = folder + 'table_plugins_language_strings.json'
try:
file_mtime = os.path.getmtime(plugin_file)
cache_key = 'plugin'
if cache_key in _langstrings_cache_mtime and _langstrings_cache_mtime[cache_key] == file_mtime:
plugin_list = _langstrings_cache[cache_key]
else:
with open(plugin_file, 'r', encoding='utf-8') as f:
plugin_data = json.load(f).get("data", [])
plugin_list = [
LangString(
langCode=entry.get("Language_Code"),
langStringKey=entry.get("String_Key"),
langStringText=entry.get("String_Value")
) for entry in plugin_data
]
_langstrings_cache[cache_key] = plugin_list
_langstrings_cache_mtime[cache_key] = file_mtime
except (FileNotFoundError, json.JSONDecodeError) as e:
mylog('none', f'[graphql_schema] Error loading plugin language strings from {plugin_file}: {e}')
plugin_list = []
# Filter plugin strings by langCode if provided
if langCode:
plugin_list = [p for p in plugin_list if p.langCode == langCode]
langStrings.extend(plugin_list)
# --- Filter by string key if requested ---
if langStringKey:
langStrings = [ls for ls in langStrings if ls.langStringKey == langStringKey]
# --- Fallback to en_us if enabled and requested lang is missing ---
if fallback_to_en and langCode and langCode != "en_us":
for i, ls in enumerate(langStrings):
if not ls.langStringText: # empty string triggers fallback
# try to get en_us version
en_list = _langstrings_cache.get("core_en_us", [])
en_list += [p for p in _langstrings_cache.get("plugin", []) if p.langCode == "en_us"]
en_fallback = [e for e in en_list if e.langStringKey == ls.langStringKey]
if en_fallback:
langStrings[i] = en_fallback[0]
mylog('trace', f'[graphql_schema] Collected {len(langStrings)} language strings '
f'(langCode={langCode}, key={langStringKey}, fallback_to_en={fallback_to_en})')
return LangStringResult(langStrings=langStrings, count=len(langStrings))
# helps sorting inconsistent dataset mixed integers and strings
def mixed_type_sort_key(value):

View File

@@ -0,0 +1,58 @@
import os
import sys
from flask import jsonify
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from const import logPath
from logger import mylog, Logger
from helper import get_setting_value
from utils.datetime_utils import timeNowDB
from messaging.in_app import write_notification
# Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL'))
def clean_log(log_file):
"""
Purge the content of an allowed log file within the /app/log/ directory.
Args:
log_file (str): Name of the log file to purge.
Returns:
flask.Response: JSON response with success and message keys
"""
allowed_files = [
'app.log', 'app_front.log', 'IP_changes.log', 'stdout.log', 'stderr.log',
'app.php_errors.log', 'execution_queue.log', 'db_is_locked.log'
]
# Validate filename if purging allowed
if log_file not in allowed_files:
msg = f"[clean_log] File {log_file} is not allowed to be purged"
mylog('none', [msg])
write_notification(msg, 'interrupt')
return jsonify({"success": False, "message": msg}), 400
log_path = os.path.join(logPath, log_file)
try:
# Purge content
with open(log_path, "w") as f:
f.write("File manually purged\n")
msg = f"[clean_log] File {log_file} purged successfully"
mylog('minimal', [msg])
write_notification(msg, 'interrupt')
return jsonify({"success": True, "message": msg}), 200
except Exception as e:
msg = f"[clean_log] ERROR Failed to purge {log_file}: {e}"
mylog('none', [msg])
write_notification(msg, 'interrupt')
return jsonify({"success": False, "message": msg}), 500

View File

@@ -10,14 +10,9 @@ INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from helper import (
format_date,
format_date_iso,
format_event_date,
format_date_diff,
format_ip_long,
)
from db.db_helper import get_date_from_period
from helper import is_random_mac, get_setting_value, mylog, format_ip_long
from db.db_helper import row_to_json, get_date_from_period
from utils.datetime_utils import format_date_iso, format_event_date, format_date_diff, parse_datetime, format_date
# --------------------------
@@ -231,6 +226,7 @@ def get_device_sessions(mac, period):
cur.execute(sql, (mac,))
rows = cur.fetchall()
conn.close()
tz_name = get_setting_value("TIMEZONE") or "UTC"
table_data = {"data": []}
@@ -255,11 +251,9 @@ def get_device_sessions(mac, period):
] in ("<missing event>", None):
dur = "..."
elif row["ses_StillConnected"]:
dur = format_date_diff(row["ses_DateTimeConnection"], None)["text"]
dur = format_date_diff(row["ses_DateTimeConnection"], None, tz_name)["text"]
else:
dur = format_date_diff(
row["ses_DateTimeConnection"], row["ses_DateTimeDisconnection"]
)["text"]
dur = format_date_diff(row["ses_DateTimeConnection"], row["ses_DateTimeDisconnection"], tz_name)["text"]
# Additional Info
info = row["ses_AdditionalInfo"]
@@ -295,6 +289,7 @@ def get_session_events(event_type, period_date):
conn = get_temp_db_connection()
conn.row_factory = sqlite3.Row
cur = conn.cursor()
tz_name = get_setting_value("TIMEZONE") or "UTC"
# Base SQLs
sql_events = f"""
@@ -382,11 +377,11 @@ def get_session_events(event_type, period_date):
if event_type in ("sessions", "missing"):
# Duration
if row[5] and row[6]:
delta = format_date_diff(row[5], row[6])
delta = format_date_diff(row[5], row[6], tz_name)
row[7] = delta["text"]
row[8] = int(delta["total_minutes"] * 60) # seconds
elif row[12] == 1:
delta = format_date_diff(row[5], None)
delta = format_date_diff(row[5], None, tz_name)
row[7] = delta["text"]
row[8] = int(delta["total_minutes"] * 60) # seconds
else:

View File

@@ -2,7 +2,8 @@ import os
import base64
from flask import jsonify, request
from logger import mylog
from helper import get_setting_value, timeNowTZ
from helper import get_setting_value
from utils.datetime_utils import timeNowDB
from messaging.in_app import write_notification
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
@@ -17,22 +18,20 @@ def handle_sync_get():
raw_data = f.read()
except FileNotFoundError:
msg = f"[Plugin: SYNC] Data file not found: {file_path}"
write_notification(msg, "alert", timeNowTZ())
write_notification(msg, "alert", timeNowDB())
mylog("verbose", [msg])
return jsonify({"error": msg}), 500
response_data = base64.b64encode(raw_data).decode("utf-8")
write_notification("[Plugin: SYNC] Data sent", "info", timeNowTZ())
return jsonify(
{
"node_name": get_setting_value("SYNC_node_name"),
"status": 200,
"message": "OK",
"data_base64": response_data,
"timestamp": timeNowTZ(),
}
), 200
write_notification("[Plugin: SYNC] Data sent", "info", timeNowDB())
return jsonify({
"node_name": get_setting_value("SYNC_node_name"),
"status": 200,
"message": "OK",
"data_base64": response_data,
"timestamp": timeNowDB()
}), 200
def handle_sync_post():
@@ -65,11 +64,11 @@ def handle_sync_post():
f.write(data)
except Exception as e:
msg = f"[Plugin: SYNC] Failed to store data: {e}"
write_notification(msg, "alert", timeNowTZ())
write_notification(msg, "alert", timeNowDB())
mylog("verbose", [msg])
return jsonify({"error": msg}), 500
msg = f"[Plugin: SYNC] Data received ({file_path_new})"
write_notification(msg, "info", timeNowTZ())
write_notification(msg, "info", timeNowDB())
mylog("verbose", [msg])
return jsonify({"message": "Data received and stored successfully"}), 200

View File

@@ -3,7 +3,8 @@ import json
from const import *
from logger import mylog
from helper import timeNowTZ, timeNow, checkNewVersion
from helper import checkNewVersion
from utils.datetime_utils import timeNowDB, timeNow
# Register NetAlertX directories using runtime configuration
INSTALL_PATH = applicationPath
@@ -31,16 +32,14 @@ class app_state_class:
isNewVersionChecked (int): Timestamp of last version check.
"""
def __init__(
self,
currentState=None,
settingsSaved=None,
settingsImported=None,
showSpinner=None,
graphQLServerStarted=0,
processScan=False,
pluginsStates=None,
):
def __init__(self, currentState=None,
settingsSaved=None,
settingsImported=None,
showSpinner=None,
graphQLServerStarted=0,
processScan=False,
pluginsStates=None,
appVersion=None):
"""
Initialize the application state, optionally overwriting previous values.
@@ -55,14 +54,15 @@ class app_state_class:
graphQLServerStarted (int, optional): Initial GraphQL server timestamp.
processScan (bool, optional): Initial processScan flag.
pluginsStates (dict, optional): Initial plugin states to merge with previous state.
appVersion (str, optional): Application version.
"""
# json file containing the state to communicate with the frontend
stateFile = apiPath + "app_state.json"
previousState = ""
# Update self
self.lastUpdated = str(timeNowTZ())
self.lastUpdated = str(timeNowDB())
if os.path.exists(stateFile):
try:
with open(stateFile, "r") as json_file:
@@ -73,26 +73,28 @@ class app_state_class:
)
# Check if the file exists and recover previous values
if previousState != "":
self.settingsSaved = previousState.get("settingsSaved", 0)
self.settingsImported = previousState.get("settingsImported", 0)
self.processScan = previousState.get("processScan", False)
self.showSpinner = previousState.get("showSpinner", False)
self.isNewVersion = previousState.get("isNewVersion", False)
self.isNewVersionChecked = previousState.get("isNewVersionChecked", 0)
self.graphQLServerStarted = previousState.get("graphQLServerStarted", 0)
self.currentState = previousState.get("currentState", "Init")
self.pluginsStates = previousState.get("pluginsStates", {})
else: # init first time values
self.settingsSaved = 0
self.settingsImported = 0
self.showSpinner = False
self.processScan = False
self.isNewVersion = checkNewVersion()
self.isNewVersionChecked = int(timeNow().timestamp())
self.graphQLServerStarted = 0
self.currentState = "Init"
self.pluginsStates = {}
if previousState != "":
self.settingsSaved = previousState.get("settingsSaved", 0)
self.settingsImported = previousState.get("settingsImported", 0)
self.processScan = previousState.get("processScan", False)
self.showSpinner = previousState.get("showSpinner", False)
self.isNewVersion = previousState.get("isNewVersion", False)
self.isNewVersionChecked = previousState.get("isNewVersionChecked", 0)
self.graphQLServerStarted = previousState.get("graphQLServerStarted", 0)
self.currentState = previousState.get("currentState", "Init")
self.pluginsStates = previousState.get("pluginsStates", {})
self.appVersion = previousState.get("appVersion", "")
else: # init first time values
self.settingsSaved = 0
self.settingsImported = 0
self.showSpinner = False
self.processScan = False
self.isNewVersion = checkNewVersion()
self.isNewVersionChecked = int(timeNow().timestamp())
self.graphQLServerStarted = 0
self.currentState = "Init"
self.pluginsStates = {}
self.appVersion = ""
# Overwrite with provided parameters if supplied
if settingsSaved is not None:
@@ -112,9 +114,7 @@ class app_state_class:
for plugin, state in pluginsStates.items():
if plugin in self.pluginsStates:
# Only update existing keys if both are dicts
if isinstance(self.pluginsStates[plugin], dict) and isinstance(
state, dict
):
if isinstance(self.pluginsStates[plugin], dict) and isinstance(state, dict):
self.pluginsStates[plugin].update(state)
else:
# Replace if types don't match
@@ -123,7 +123,8 @@ class app_state_class:
# Optionally ignore or add new plugin entries
# To ignore new plugins, comment out the next line
self.pluginsStates[plugin] = state
if appVersion is not None:
self.appVersion = appVersion
# check for new version every hour and if currently not running new version
if self.isNewVersion is False and self.isNewVersionChecked + 3600 < int(
timeNow().timestamp()
@@ -157,15 +158,14 @@ class app_state_class:
# -------------------------------------------------------------------------------
# method to update the state
def updateState(
newState=None,
settingsSaved=None,
settingsImported=None,
showSpinner=None,
graphQLServerStarted=None,
processScan=None,
pluginsStates=None,
):
def updateState(newState = None,
settingsSaved = None,
settingsImported = None,
showSpinner = None,
graphQLServerStarted = None,
processScan = None,
pluginsStates=None,
appVersion=None):
"""
Convenience method to create or update the app state.
@@ -177,19 +177,19 @@ def updateState(
graphQLServerStarted (int, optional): Timestamp of GraphQL server start.
processScan (bool, optional): Flag indicating if a scan is active.
pluginsStates (dict, optional): Plugin state updates.
appVersion (str, optional): Application version.
Returns:
app_state_class: Updated state object.
"""
return app_state_class(
newState,
settingsSaved,
settingsImported,
showSpinner,
graphQLServerStarted,
processScan,
pluginsStates,
)
return app_state_class( newState,
settingsSaved,
settingsImported,
showSpinner,
graphQLServerStarted,
processScan,
pluginsStates,
appVersion)
# -------------------------------------------------------------------------------

View File

@@ -7,7 +7,6 @@ import os
import re
import unicodedata
import subprocess
from typing import Union
import pytz
import json
import requests
@@ -26,151 +25,6 @@ from logger import mylog, logResult
INSTALL_PATH = applicationPath
# -------------------------------------------------------------------------------
# DateTime
# -------------------------------------------------------------------------------
# Get the current time in the current TimeZone
def timeNowTZ():
if conf.tz:
return datetime.datetime.now(conf.tz).replace(microsecond=0)
else:
return datetime.datetime.now().replace(microsecond=0)
# if isinstance(conf.TIMEZONE, str):
# tz = pytz.timezone(conf.TIMEZONE)
# else:
# tz = conf.TIMEZONE
# return datetime.datetime.now(tz).replace(microsecond=0)
def timeNow():
return datetime.datetime.now().replace(microsecond=0)
def get_timezone_offset():
now = datetime.datetime.now(conf.tz)
offset_hours = now.utcoffset().total_seconds() / 3600
offset_formatted = "{:+03d}:{:02d}".format(
int(offset_hours), int((offset_hours % 1) * 60)
)
return offset_formatted
# -------------------------------------------------------------------------------
# Date and time methods
# -------------------------------------------------------------------------------
# # -------------------------------------------------------------------------------------------
# def format_date(date_str: str) -> str:
# """Format a date string as 'YYYY-MM-DD HH:MM'"""
# dt = datetime.datetime.fromisoformat(date_str) if isinstance(date_str, str) else date_str
# return dt.strftime('%Y-%m-%d %H:%M')
# # -------------------------------------------------------------------------------------------
# def format_date_diff(date1: str, date2: str) -> str:
# """Return difference between two dates formatted as 'Xd HH:MM'"""
# dt1 = datetime.datetime.fromisoformat(date1) if isinstance(date1, str) else date1
# dt2 = datetime.datetime.fromisoformat(date2) if isinstance(date2, str) else date2
# delta = dt2 - dt1
# days = delta.days
# hours, remainder = divmod(delta.seconds, 3600)
# minutes = remainder // 60
# return f"{days}d {hours:02}:{minutes:02}"
# -------------------------------------------------------------------------------------------
def format_date_iso(date1: str) -> str:
"""Return ISO 8601 string for a date or None if empty"""
if date1 is None:
return None
dt = datetime.datetime.fromisoformat(date1) if isinstance(date1, str) else date1
return dt.isoformat()
# -------------------------------------------------------------------------------------------
def format_event_date(date_str: str, event_type: str) -> str:
"""Format event date with fallback rules."""
if date_str:
return format_date(date_str)
elif event_type == "<missing event>":
return "<missing event>"
else:
return "<still connected>"
# -------------------------------------------------------------------------------------------
def ensure_datetime(dt: Union[str, datetime.datetime, None]) -> datetime.datetime:
if dt is None:
return timeNowTZ()
if isinstance(dt, str):
return datetime.datetime.fromisoformat(dt)
return dt
def parse_datetime(dt_str):
if not dt_str:
return None
try:
# Try ISO8601 first
return datetime.datetime.fromisoformat(dt_str)
except ValueError:
# Try RFC1123 / HTTP format
try:
return datetime.datetime.strptime(dt_str, "%a, %d %b %Y %H:%M:%S GMT")
except ValueError:
return None
def format_date(date_str: str) -> str:
dt = parse_datetime(date_str)
return dt.strftime("%Y-%m-%d %H:%M") if dt else "invalid"
def format_date_diff(date1, date2):
"""
Return difference between two datetimes as 'Xd HH:MM'.
Uses app timezone if datetime is naive.
date2 can be None (uses now).
"""
# Get timezone from settings
tz_name = get_setting_value("TIMEZONE") or "UTC"
tz = pytz.timezone(tz_name)
def parse_dt(dt):
if dt is None:
return datetime.datetime.now(tz)
if isinstance(dt, str):
try:
dt_parsed = email.utils.parsedate_to_datetime(dt)
except Exception:
# fallback: parse ISO string
dt_parsed = datetime.datetime.fromisoformat(dt)
# convert naive GMT/UTC to app timezone
if dt_parsed.tzinfo is None:
dt_parsed = tz.localize(dt_parsed)
else:
dt_parsed = dt_parsed.astimezone(tz)
return dt_parsed
return dt if dt.tzinfo else tz.localize(dt)
dt1 = parse_dt(date1)
dt2 = parse_dt(date2)
delta = dt2 - dt1
total_minutes = int(delta.total_seconds() // 60)
days, rem_minutes = divmod(total_minutes, 1440) # 1440 mins in a day
hours, minutes = divmod(rem_minutes, 60)
return {
"text": f"{days}d {hours:02}:{minutes:02}",
"days": days,
"hours": hours,
"minutes": minutes,
"total_minutes": total_minutes,
}
# -------------------------------------------------------------------------------
# File system permission handling
@@ -438,10 +292,12 @@ def get_setting_value(key):
value = setting_value_to_python_type(set_type, set_value)
else:
value = setting_value_to_python_type(set_type, str(set_value))
SETTINGS_SECONDARYCACHE[key] = value
return value
# Otherwise fall back to retrive from json
# Otherwise fall back to retrieve from json
setting = get_setting(key)
if setting is not None:
@@ -525,11 +381,8 @@ def setting_value_to_python_type(set_type, set_value):
elif dataType == "array" and elementType == "select":
if isinstance(set_value, str):
try:
value = json.loads(set_value.replace("'", '"'))
# reverse transformations to all entries
value = reverseTransformers(value, transformers)
value = json.loads(set_value.replace("'", "\""))
except json.JSONDecodeError as e:
mylog(
"none",
@@ -541,7 +394,10 @@ def setting_value_to_python_type(set_type, set_value):
elif isinstance(set_value, list):
value = set_value
elif dataType == "object" and elementType == "input":
# Always apply transformers (base64, etc.) to array entries
value = reverseTransformers(value, transformers)
elif dataType == 'object' and elementType == 'input':
if isinstance(set_value, str):
try:
value = reverseTransformers(json.loads(set_value), transformers)
@@ -887,38 +743,42 @@ def collect_lang_strings(json, pref, stringSqlParams):
# -------------------------------------------------------------------------------
# Get the value from the buildtimestamp.txt and initialize it if missing
def getBuildTimeStamp():
def getBuildTimeStampAndVersion():
"""
Retrieves the build timestamp from 'front/buildtimestamp.txt' within the
application directory.
If the file does not exist, it is created and initialized with the value '0'.
Retrieves the build timestamp and version from files within the
application directory. Initializes them if missing.
Returns:
int: The integer value of the build timestamp read from the file.
Returns 0 if the file is empty or just initialized.
tuple: (int buildTimestamp, str version)
"""
buildTimestamp = 0
build_timestamp_path = os.path.join(applicationPath, "front/buildtimestamp.txt")
files_defaults = [
('front/buildtimestamp.txt', '0'),
('.VERSION', 'unknown')
]
# Ensure file exists, initialize if missing
if not os.path.exists(build_timestamp_path):
with open(build_timestamp_path, "w") as f:
f.write("0")
results = []
# Now safely read the timestamp
with open(build_timestamp_path, "r") as f:
buildTimestamp = int(f.read().strip() or 0)
for filename, default in files_defaults:
path = os.path.join(applicationPath, filename)
if not os.path.exists(path):
with open(path, 'w') as f:
f.write(default)
with open(path, 'r') as f:
content = f.read().strip() or default
# Convert buildtimestamp to int, leave version as string
value = int(content) if filename.endswith('buildtimestamp.txt') else content
results.append(value)
return tuple(results)
return buildTimestamp
# -------------------------------------------------------------------------------
def checkNewVersion():
mylog("debug", ["[Version check] Checking if new version available"])
newVersion = False
buildTimestamp = getBuildTimeStamp()
buildTimestamp, _version = getBuildTimeStampAndVersion()
try:
response = requests.get(
@@ -946,8 +806,8 @@ def checkNewVersion():
)
if releaseTimestamp > buildTimestamp + 600:
mylog("none", ["[Version check] New version of the container available!"])
newVersion = True
mylog('none', ["[Version check] New version of the container available!"])
return True
else:
mylog("none", ["[Version check] Running the latest version."])
else:
@@ -956,7 +816,7 @@ def checkNewVersion():
["[Version check] ⚠ ERROR: Received unexpected response from GitHub."],
)
return newVersion
return False
# -------------------------------------------------------------------------------

View File

@@ -8,23 +8,18 @@ import shutil
import re
# Register NetAlertX libraries
import conf
from const import fullConfPath, fullConfFolder, default_tz
from helper import (
getBuildTimeStamp,
fixPermissions,
collect_lang_strings,
updateSubnets,
timeNowTZ,
generate_random_string,
)
import conf
from const import fullConfPath, applicationPath, fullConfFolder, default_tz
from helper import getBuildTimeStampAndVersion, fixPermissions, collect_lang_strings, updateSubnets, isJsonObject, setting_value_to_python_type, get_setting_value, generate_random_string
from utils.datetime_utils import timeNowDB
from app_state import updateState
from logger import mylog
from api import update_api
from scheduler import schedule_class
from plugin import plugin_manager, print_plugin_info
from plugin_utils import get_plugins_configs, get_set_value_for_init
from utils.plugin_utils import get_plugins_configs, get_set_value_for_init
from messaging.in_app import write_notification
from utils.crypto_utils import get_random_bytes
# ===============================================================================
# Initialise user defined values
@@ -674,39 +669,23 @@ def importConfigs(pm, db, all_plugins):
# -----------------
# HANDLE APP was upgraded message - clear cache
# Check if app was upgraded
buildTimestamp = getBuildTimeStamp()
cur_version = conf.VERSION
mylog("debug", [f"[Config] buildTimestamp: '{buildTimestamp}'"])
mylog("debug", [f"[Config] conf.VERSION : '{cur_version}'"])
if str(cur_version) != str(buildTimestamp):
mylog("none", ["[Config] App upgraded 🚀"])
buildTimestamp, new_version = getBuildTimeStampAndVersion()
prev_version = conf.VERSION
mylog('debug', [f"[Config] buildTimestamp | prev_version | .VERSION file: '{buildTimestamp}|{prev_version}|{new_version}'"])
if str(prev_version) != str(new_version):
mylog('none', ['[Config] App upgraded 🚀'])
# ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False)
ccd(
"VERSION",
buildTimestamp,
c_d,
"_KEEP_",
"_KEEP_",
"_KEEP_",
"_KEEP_",
None,
"_KEEP_",
None,
None,
True,
)
write_notification(
'[Upgrade] : App upgraded 🚀 Please clear the cache: <ol> <li>Click OK below</li> <li>Clear the browser cache (shift + browser refresh button)</li> <li> Clear app cache with the <i class="fa-solid fa-rotate"></i> (reload) button in the header</li><li>Go to Settings and click Save</li> </ol> Check out new features and what has changed in the <a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.',
"interrupt",
timeNowTZ(),
)
ccd('VERSION', new_version , c_d, '_KEEP_', '_KEEP_', '_KEEP_', '_KEEP_', None, "_KEEP_", None, None, True)
write_notification(f'[Upgrade] : App upgraded from {prev_version} to {new_version} 🚀 Please clear the cache: <ol> <li>Click OK below</li> <li>Clear the browser cache (shift + browser refresh button)</li> <li> Clear app cache with the <i class="fa-solid fa-rotate"></i> (reload) button in the header</li><li>Go to Settings and click Save</li> </ol> Check out new features and what has changed in the <a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.', 'interrupt', timeNowDB())
# -----------------
# Initialization finished, update DB and API endpoints
@@ -738,19 +717,13 @@ def importConfigs(pm, db, all_plugins):
# settingsImported = None (timestamp),
# showSpinner = False (1/0),
# graphQLServerStarted = 1 (1/0))
updateState(
"Config imported",
conf.lastImportedConfFile,
conf.lastImportedConfFile,
False,
1,
)
msg = "[Config] Imported new settings config"
mylog("minimal", msg)
updateState("Config imported", conf.lastImportedConfFile, conf.lastImportedConfFile, False, 1, None, None, new_version)
msg = '[Config] Imported new settings config'
mylog('minimal', msg)
# front end app log loggging
write_notification(msg, "info", timeNowTZ())
write_notification(msg, 'info', timeNowDB())
return pm, all_plugins, True

View File

@@ -4,20 +4,17 @@ import datetime
import threading
import queue
import logging
from zoneinfo import ZoneInfo
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# NetAlertX imports
import conf
from const import *
# -------------------------------------------------------------------------------
# duplication from helper to avoid circle
# -------------------------------------------------------------------------------
def timeNowTZ():
if conf.tz:
return datetime.datetime.now(conf.tz).replace(microsecond=0)
else:
return datetime.datetime.now().replace(microsecond=0)
from utils.datetime_utils import timeNowTZ
# -------------------------------------------------------------------------------

View File

@@ -17,7 +17,10 @@ from helper import (
timeNowTZ,
)
NOTIFICATION_API_FILE = apiPath + "user_notifications.json"
import conf
from const import applicationPath, logPath, apiPath, confFileName, reportTemplatesPath
from logger import mylog
from utils.datetime_utils import timeNowDB
# Show Frontend User Notification
@@ -35,7 +38,7 @@ def write_notification(content, level="alert", timestamp=None):
None
"""
if timestamp is None:
timestamp = timeNowTZ()
timestamp = timeNowDB()
# Generate GUID
guid = str(uuid.uuid4())

View File

@@ -24,6 +24,7 @@ from helper import (
)
from logger import mylog
from db.sql_safe_builder import create_safe_condition_builder
from utils.datetime_utils import get_timezone_offset
# ===============================================================================
# REPORTING

View File

@@ -12,12 +12,12 @@ from logger import mylog, Logger
from helper import (
generate_mac_links,
removeDuplicateNewLines,
timeNowTZ,
write_file,
get_setting_value,
get_timezone_offset,
)
from messaging.in_app import write_notification
from utils.datetime_utils import timeNowDB, get_timezone_offset
# -----------------------------------------------------------------------------
@@ -71,15 +71,15 @@ class NotificationInstance:
else:
self.HasNotifications = True
self.GUID = str(uuid.uuid4())
self.DateTimeCreated = timeNowTZ()
self.DateTimePushed = ""
self.Status = "new"
self.JSON = JSON
self.Text = ""
self.HTML = ""
self.PublishedVia = ""
self.Extra = Extra
self.GUID = str(uuid.uuid4())
self.DateTimeCreated = timeNowDB()
self.DateTimePushed = ""
self.Status = "new"
self.JSON = JSON
self.Text = ""
self.HTML = ""
self.PublishedVia = ""
self.Extra = Extra
if self.HasNotifications:
# if not notiStruc.json['data'] and not notiStruc.text and not notiStruc.html:
@@ -113,9 +113,9 @@ class NotificationInstance:
mail_html = mail_html.replace("<NEW_VERSION>", newVersionText)
# Report "REPORT_DATE" in Header & footer
timeFormated = timeNowTZ().strftime("%Y-%m-%d %H:%M")
mail_text = mail_text.replace("<REPORT_DATE>", timeFormated)
mail_html = mail_html.replace("<REPORT_DATE>", timeFormated)
timeFormated = timeNowDB()
mail_text = mail_text.replace('<REPORT_DATE>', timeFormated)
mail_html = mail_html.replace('<REPORT_DATE>', timeFormated)
# Report "SERVER_NAME" in Header & footer
mail_text = mail_text.replace("<SERVER_NAME>", socket.gethostname())
@@ -226,7 +226,7 @@ class NotificationInstance:
# Updates the Published properties
def updatePublishedVia(self, newPublishedVia):
self.PublishedVia = newPublishedVia
self.DateTimePushed = timeNowTZ()
self.DateTimePushed = timeNowDB()
self.upsert()
# create or update a notification
@@ -284,17 +284,15 @@ class NotificationInstance:
# Clear the Pending Email flag from all events and devices
def clearPendingEmailFlag(self):
# Clean Pending Alert Events
self.db.sql.execute(
"""
# Clean Pending Alert Events
self.db.sql.execute("""
UPDATE Devices SET devLastNotification = ?
WHERE devMac IN (
SELECT eve_MAC FROM Events
WHERE eve_PendingAlertEmail = 1
)
""",
(timeNowTZ(),),
)
""", (timeNowDB(),))
self.db.sql.execute("""
UPDATE Events SET eve_PendingAlertEmail = 0

View File

@@ -1,7 +1,9 @@
import os
import uuid
from const import logPath
from logger import mylog
from utils.datetime_utils import timeNowDB
class UserEventsQueueInstance:
@@ -78,3 +80,44 @@ class UserEventsQueueInstance:
mylog("minimal", ["[UserEventsQueueInstance] Processed event: ", event])
return removed
def add_event(self, action):
"""
Append an action to the execution queue log file.
Args:
action (str): Description of the action to queue.
Returns:
tuple: (success: bool, message: str)
success - True if the event was successfully added.
message - Log message describing the result.
"""
timestamp = timeNowDB()
# Generate GUID
guid = str(uuid.uuid4())
if not action or not isinstance(action, str):
msg = "[UserEventsQueueInstance] Invalid or missing action"
mylog('none', [msg])
return False, msg
try:
with open(self.log_file, "a") as f:
f.write(f"[{timestamp}]|{guid}|{action}\n")
msg = f'[UserEventsQueueInstance] Action "{action}" added to the execution queue.'
mylog('minimal', [msg])
return True, msg
except Exception as e:
msg = f"[UserEventsQueueInstance] ERROR Failed to write to {self.log_file}: {e}"
mylog('none', [msg])
return False, msg

View File

@@ -8,30 +8,17 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
# Register NetAlertX modules
import conf
from const import logPath, reportTemplatesPath, pluginsPath, applicationPath
from logger import mylog, Logger
from helper import (
timeNowTZ,
get_file_content,
get_setting,
get_setting_value,
)
from const import pluginsPath, logPath, applicationPath, reportTemplatesPath
from logger import mylog, Logger
from helper import get_file_content, write_file, get_setting, get_setting_value
from utils.datetime_utils import timeNowTZ, timeNowDB
from app_state import updateState
from api import update_api
from plugin_utils import (
logEventStatusCounts,
get_plugin_setting_obj,
print_plugin_info,
list_to_csv,
combine_plugin_objects,
resolve_wildcards_arr,
handle_empty,
decode_and_rename_files,
)
from utils.plugin_utils import logEventStatusCounts, get_plugin_string, get_plugin_setting_obj, print_plugin_info, list_to_csv, combine_plugin_objects, resolve_wildcards_arr, handle_empty, custom_plugin_decoder, decode_and_rename_files
from models.notification_instance import NotificationInstance
from messaging.in_app import write_notification
from models.user_events_queue_instance import UserEventsQueueInstance
from crypto_utils import generate_deterministic_guid
from utils.crypto_utils import generate_deterministic_guid
# -------------------------------------------------------------------------------
@@ -40,7 +27,7 @@ class plugin_manager:
self.db = db
self.all_plugins = all_plugins
self.plugin_states = {}
self.name_plugins_checked = None
self.plugin_checks = {}
# object cache of settings and schedules for faster lookups
self._cache = {}
@@ -112,12 +99,11 @@ class plugin_manager:
execute_plugin(self.db, self.all_plugins, plugin)
# Update plugin states in app_state
current_plugin_state = self.get_plugin_states(
prefix
) # get latest plugin state
updateState(
pluginsStates={prefix: current_plugin_state.get(prefix, {})}
)
current_plugin_state = self.get_plugin_states(prefix) # get latest plugin state
# mylog('debug', f'current_plugin_state: {current_plugin_state}')
updateState(pluginsStates={prefix: current_plugin_state.get(prefix, {})})
# update last run time
if runType == "schedule":
@@ -189,26 +175,17 @@ class plugin_manager:
# Notify user about executed events (if applicable)
if len(executed_events) > 0 and executed_events:
executed_events_message = ", ".join(executed_events)
mylog(
"minimal",
[
"[check_and_run_user_event] INFO: Executed events: ",
executed_events_message,
],
)
write_notification(
f"[Ad-hoc events] Events executed: {executed_events_message}",
"interrupt",
timeNowTZ(),
)
executed_events_message = ', '.join(executed_events)
mylog('minimal', ['[check_and_run_user_event] INFO: Executed events: ', executed_events_message])
write_notification(f"[Ad-hoc events] Events executed: {executed_events_message}", "interrupt", timeNowDB())
return
# -------------------------------------------------------------------------------
def handle_run(self, runType):
mylog("minimal", ["[", timeNowTZ(), "] START Run: ", runType])
mylog('minimal', ['[', timeNowDB(), '] START Run: ', runType])
# run the plugin
for plugin in self.all_plugins:
if plugin["unique_prefix"] == runType:
@@ -224,7 +201,7 @@ class plugin_manager:
pluginsStates={pluginName: current_plugin_state.get(pluginName, {})}
)
mylog("minimal", ["[", timeNowTZ(), "] END Run: ", runType])
mylog('minimal', ['[', timeNowDB(), '] END Run: ', runType])
return
@@ -232,6 +209,8 @@ class plugin_manager:
def handle_test(self, runType):
mylog("minimal", ["[", timeNowTZ(), "] [Test] START Test: ", runType])
mylog('minimal', ['[', timeNowDB(), '] [Test] START Test: ', runType])
# Prepare test samples
sample_json = json.loads(
get_file_content(reportTemplatesPath + "webhook_json_sample.json")
@@ -258,7 +237,7 @@ class plugin_manager:
If plugin_name is provided, only calculates stats for that plugin.
Structure per plugin:
{
"lastChanged": str,
"lastDataChange": str,
"totalObjects": int,
"newObjects": int,
"changedObjects": int,
@@ -267,32 +246,30 @@ class plugin_manager:
"""
sql = self.db.sql
plugin_states = {}
now_str = timeNowDB()
if plugin_name: # Only compute for single plugin
sql.execute(
"""
SELECT MAX(DateTimeChanged) AS last_changed,
COUNT(*) AS total_objects,
SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects,
CURRENT_TIMESTAMP AS state_updated
SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects
FROM Plugins_Objects
WHERE Plugin = ?
""",
(plugin_name,),
)
row = sql.fetchone()
last_changed, total_objects, new_objects, state_updated = (
row if row else ("", 0, 0, "")
)
last_changed, total_objects, new_objects = row if row else ("", 0, 0)
new_objects = new_objects or 0 # ensure it's int
changed_objects = total_objects - new_objects
plugin_states[plugin_name] = {
"lastChanged": last_changed or "",
"lastDataChange": last_changed or "",
"totalObjects": total_objects or 0,
"newObjects": new_objects or 0,
"changedObjects": changed_objects or 0,
"stateUpdated": state_updated or "",
"stateUpdated": now_str
}
# Save in memory
@@ -303,26 +280,19 @@ class plugin_manager:
SELECT Plugin,
MAX(DateTimeChanged) AS last_changed,
COUNT(*) AS total_objects,
SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects,
CURRENT_TIMESTAMP AS state_updated
SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects
FROM Plugins_Objects
GROUP BY Plugin
""")
for (
plugin,
last_changed,
total_objects,
new_objects,
state_updated,
) in sql.fetchall():
for plugin, last_changed, total_objects, new_objects in sql.fetchall():
new_objects = new_objects or 0 # ensure it's int
changed_objects = total_objects - new_objects
plugin_states[plugin] = {
"lastChanged": last_changed or "",
"lastDataChange": last_changed or "",
"totalObjects": total_objects or 0,
"newObjects": new_objects or 0,
"changedObjects": changed_objects or 0,
"stateUpdated": state_updated or "",
"stateUpdated": now_str
}
# Save in memory
@@ -908,8 +878,8 @@ def process_plugin_events(db, plugin, plugEventsArr):
if isMissing:
# if wasn't missing before, mark as changed
if tmpObj.status != "missing-in-last-scan":
tmpObj.changed = timeNowTZ().strftime("%Y-%m-%d %H:%M:%S")
tmpObj.status = "missing-in-last-scan"
tmpObj.changed = timeNowDB()
tmpObj.status = "missing-in-last-scan"
# mylog('debug', [f'[Plugins] Missing from last scan (PrimaryID | SecondaryID): {tmpObj.primaryId} | {tmpObj.secondaryId}'])
# Merge existing plugin objects with newly discovered ones and update existing ones with new values

View File

@@ -2,13 +2,15 @@ import sys
import subprocess
import os
import re
import datetime
from dateutil import parser
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from helper import timeNowTZ, get_setting_value, check_IP_format
from helper import get_setting_value, check_IP_format
from utils.datetime_utils import timeNowDB, normalizeTimeStamp
from logger import mylog, Logger
from const import vendorsPath, vendorsPathNewest, sql_generateGuid
from models.device_instance import DeviceInstance
@@ -55,11 +57,10 @@ def exclude_ignored_devices(db):
sql.execute(query)
# -------------------------------------------------------------------------------
def update_devices_data_from_scan(db):
sql = db.sql # TO-DO
startTime = timeNowTZ().strftime("%Y-%m-%d %H:%M:%S")
#-------------------------------------------------------------------------------
def update_devices_data_from_scan (db):
sql = db.sql #TO-DO
startTime = timeNowDB()
# Update Last Connection
mylog("debug", "[Update Devices] 1 Last Connection")
@@ -424,10 +425,10 @@ def print_scan_stats(db):
mylog("verbose", f" {row['cur_ScanMethod']}: {row['scan_method_count']}")
# -------------------------------------------------------------------------------
def create_new_devices(db):
sql = db.sql # TO-DO
startTime = timeNowTZ()
#-------------------------------------------------------------------------------
def create_new_devices (db):
sql = db.sql # TO-DO
startTime = timeNowDB()
# Insert events for new devices from CurrentScan (not yet in Devices)
@@ -597,43 +598,86 @@ def create_new_devices(db):
mylog("debug", "[New Devices] New Devices end")
db.commitDB()
#-------------------------------------------------------------------------------
# Check if plugins data changed
def check_plugin_data_changed(pm, plugins_to_check):
"""
Checks whether any of the specified plugins have updated data since their
last recorded check time.
# -------------------------------------------------------------------------------
def update_devices_names(pm):
sql = pm.db.sql
resolver = NameResolver(pm.db)
device_handler = DeviceInstance(pm.db)
This function compares each plugin's `lastDataChange` timestamp from
`pm.plugin_states` with its corresponding `lastDataCheck` timestamp from
`pm.plugin_checks`. If a plugin's data has changed more recently than it
was last checked, it is flagged as changed.
# --- Short-circuit if no name-resolution plugin has changed ---
name_plugins = ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]
Args:
pm (object): Plugin manager or state object containing:
- plugin_states (dict): Per-plugin metadata with "lastDataChange".
- plugin_checks (dict): Per-plugin last check timestamps.
plugins_to_check (list[str]): List of plugin names to validate.
# Retrieve last time name resolution was checked (string or datetime)
last_checked_str = pm.name_plugins_checked
last_checked_dt = (
parser.parse(last_checked_str)
if isinstance(last_checked_str, str)
else last_checked_str
)
Returns:
bool: True if any plugin data has changed since last check,
otherwise False.
# Collect valid state update timestamps for name-related plugins
state_times = []
for p in name_plugins:
state_updated = pm.plugin_states.get(p, {}).get("stateUpdated")
if state_updated and state_updated.strip(): # skip empty or None
state_times.append(state_updated)
Logging:
- Logs unexpected or invalid timestamps at level 'none'.
- Logs when no changes are detected at level 'debug'.
- Logs each changed plugin at level 'debug'.
"""
# Determine the latest valid stateUpdated timestamp
latest_state_str = max(state_times, default=None)
latest_state_dt = parser.parse(latest_state_str) if latest_state_str else None
plugins_changed = []
for plugin_name in plugins_to_check:
last_data_change = pm.plugin_states.get(plugin_name, {}).get("lastDataChange")
last_data_check = pm.plugin_checks.get(plugin_name, "")
if not last_data_change:
continue
# Normalize and validate last_changed timestamp
last_changed_ts = normalizeTimeStamp(last_data_change)
if last_changed_ts == None:
mylog('none', f'[check_plugin_data_changed] Unexpected last_data_change timestamp for {plugin_name} (input|output): ({last_data_change}|{last_changed_ts})')
# Normalize and validate last_data_check timestamp
last_data_check_ts = normalizeTimeStamp(last_data_check)
if last_data_check_ts == None:
mylog('none', f'[check_plugin_data_changed] Unexpected last_data_check timestamp for {plugin_name} (input|output): ({last_data_check}|{last_data_check_ts})')
# Track which plugins have newer state than last_checked
if last_data_check_ts is None or last_changed_ts is None or last_changed_ts > last_data_check_ts:
mylog('debug', f'[check_plugin_data_changed] {plugin_name} changed (last_changed_ts|last_data_check_ts): ({last_changed_ts}|{last_data_check_ts})')
plugins_changed.append(plugin_name)
# Skip if no plugin state changed since last check
if last_checked_dt and latest_state_dt and latest_state_dt <= last_checked_dt:
mylog(
"debug",
"[Update Device Name] No relevant name plugin changes since last check — skipping update.",
)
if len(plugins_changed) == 0:
mylog('debug', f'[check_plugin_data_changed] No relevant plugin changes since last check for {plugins_to_check}')
return False
# Continue if changes detected
for p in plugins_changed:
mylog('debug', f'[check_plugin_data_changed] {p} changed (last_data_change|last_data_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})')
return True
#-------------------------------------------------------------------------------
def update_devices_names(pm):
# --- Short-circuit if no name-resolution plugin has changed ---
if check_plugin_data_changed(pm, ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]) == False:
mylog('debug', '[Update Device Name] No relevant plugin changes since last check.')
return
mylog('debug', '[Update Device Name] Check if unknown devices present to resolve names for or if REFRESH_FQDN enabled.')
sql = pm.db.sql
resolver = NameResolver(pm.db)
device_handler = DeviceInstance(pm.db)
nameNotFound = "(name not found)"
# Define resolution strategies in priority order
@@ -759,10 +803,7 @@ def update_devices_names(pm):
# --- Step 3: Log last checked time ---
# After resolving names, update last checked
sql = pm.db.sql
sql.execute("SELECT CURRENT_TIMESTAMP")
row = sql.fetchone()
pm.name_plugins_checked = row[0] if row else None
pm.plugin_checks = {"DIGSCAN": timeNowDB(), "AVAHISCAN": timeNowDB(), "NSLOOKUP": timeNowDB(), "NBTSCAN": timeNowDB() }
# -------------------------------------------------------------------------------

View File

@@ -71,7 +71,8 @@ class NameResolver:
if match_ip:
name += " (IP match)"
regexes = get_setting_value("NEWDEV_NAME_CLEANUP_REGEX") or []
regexes = get_setting_value('NEWDEV_NAME_CLEANUP_REGEX') or []
mylog('trace', [f"[cleanDeviceName] applying regexes: {regexes}"])
for rgx in regexes:
mylog("trace", [f"[cleanDeviceName] applying regex: {rgx}"])
name = re.sub(rgx, "", name)

View File

@@ -12,8 +12,9 @@ from scan.device_handling import (
exclude_ignored_devices,
update_devices_data_from_scan,
)
from helper import timeNowTZ, get_setting_value
from helper import get_setting_value
from db.db_helper import print_table_schema
from utils.datetime_utils import timeNowDB, timeNowTZ
from logger import mylog, Logger
from messaging.reporting import skip_repeated_notifications
@@ -132,11 +133,11 @@ def create_sessions_snapshot(db):
db.commitDB()
# -------------------------------------------------------------------------------
def insert_events(db):
sql = db.sql # TO-DO
startTime = timeNowTZ()
#-------------------------------------------------------------------------------
def insert_events (db):
sql = db.sql #TO-DO
startTime = timeNowDB()
# Check device down
mylog("debug", "[Events] - 1 - Devices down")
sql.execute(f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
@@ -198,7 +199,7 @@ def insert_events(db):
def insertOnlineHistory(db):
sql = db.sql # TO-DO: Implement sql object
scanTimestamp = timeNowTZ()
scanTimestamp = timeNowDB()
# Query to fetch all relevant device counts in one go
query = """

View File

View File

@@ -0,0 +1,211 @@
#!/usr/bin/env python
import os
import pathlib
import sys
from datetime import datetime
from dateutil import parser
import datetime
import re
import pytz
from pytz import timezone
from typing import Union
from zoneinfo import ZoneInfo
import email.utils
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import *
#-------------------------------------------------------------------------------
# DateTime
#-------------------------------------------------------------------------------
DATETIME_PATTERN = "%Y-%m-%d %H:%M:%S"
DATETIME_REGEX = re.compile(r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$')
def timeNowTZ():
if conf.tz:
return datetime.datetime.now(conf.tz).replace(microsecond=0)
else:
return datetime.datetime.now().replace(microsecond=0)
def timeNow():
return datetime.datetime.now().replace(microsecond=0)
def get_timezone_offset():
now = datetime.datetime.now(conf.tz)
offset_hours = now.utcoffset().total_seconds() / 3600
offset_formatted = "{:+03d}:{:02d}".format(int(offset_hours), int((offset_hours % 1) * 60))
return offset_formatted
def timeNowDB(local=True):
"""
Return the current time (local or UTC) as ISO 8601 for DB storage.
Safe for SQLite, PostgreSQL, etc.
Example local: '2025-11-04 18:09:11'
Example UTC: '2025-11-04 07:09:11'
"""
if local:
try:
if isinstance(conf.tz, datetime.tzinfo):
tz = conf.tz
elif conf.tz:
tz = ZoneInfo(conf.tz)
else:
tz = None
except Exception:
tz = None
return datetime.datetime.now(tz).strftime(DATETIME_PATTERN)
else:
return datetime.datetime.now(datetime.UTC).strftime(DATETIME_PATTERN)
#-------------------------------------------------------------------------------
# Date and time methods
#-------------------------------------------------------------------------------
def normalizeTimeStamp(inputTimeStamp):
"""
Normalize various timestamp formats into a datetime.datetime object.
Supports:
- SQLite-style 'YYYY-MM-DD HH:MM:SS'
- ISO 8601 'YYYY-MM-DDTHH:MM:SSZ'
- Epoch timestamps (int or float)
- datetime.datetime objects (returned as-is)
- Empty or invalid values (returns None)
"""
if inputTimeStamp is None:
return None
# Already a datetime
if isinstance(inputTimeStamp, datetime.datetime):
return inputTimeStamp
# Epoch timestamp (integer or float)
if isinstance(inputTimeStamp, (int, float)):
try:
return datetime.datetime.fromtimestamp(inputTimeStamp)
except (OSError, OverflowError, ValueError):
return None
# String formats (SQLite / ISO8601)
if isinstance(inputTimeStamp, str):
inputTimeStamp = inputTimeStamp.strip()
if not inputTimeStamp:
return None
try:
# match the "2025-11-08 14:32:10" format
pattern = DATETIME_REGEX
if pattern.match(inputTimeStamp):
return datetime.datetime.strptime(inputTimeStamp, DATETIME_PATTERN)
else:
# Handles SQLite and ISO8601 automatically
return parser.parse(inputTimeStamp)
except Exception:
return None
# Unrecognized type
return None
# -------------------------------------------------------------------------------------------
def format_date_iso(date1: str) -> str:
"""Return ISO 8601 string for a date or None if empty"""
if date1 is None:
return None
dt = datetime.datetime.fromisoformat(date1) if isinstance(date1, str) else date1
return dt.isoformat()
# -------------------------------------------------------------------------------------------
def format_event_date(date_str: str, event_type: str) -> str:
"""Format event date with fallback rules."""
if date_str:
return format_date(date_str)
elif event_type == "<missing event>":
return "<missing event>"
else:
return "<still connected>"
# -------------------------------------------------------------------------------------------
def ensure_datetime(dt: Union[str, datetime.datetime, None]) -> datetime.datetime:
if dt is None:
return timeNowTZ()
if isinstance(dt, str):
return datetime.datetime.fromisoformat(dt)
return dt
def parse_datetime(dt_str):
if not dt_str:
return None
try:
# Try ISO8601 first
return datetime.datetime.fromisoformat(dt_str)
except ValueError:
# Try RFC1123 / HTTP format
try:
return datetime.datetime.strptime(dt_str, '%a, %d %b %Y %H:%M:%S GMT')
except ValueError:
return None
def format_date(date_str: str) -> str:
try:
dt = parse_datetime(date_str)
if dt.tzinfo is None:
# Set timezone if missing — change to timezone.utc if you prefer UTC
now = datetime.datetime.now(conf.tz)
dt = dt.replace(tzinfo=now.astimezone().tzinfo)
return dt.astimezone().isoformat()
except (ValueError, AttributeError, TypeError):
return "invalid"
def format_date_diff(date1, date2, tz_name):
"""
Return difference between two datetimes as 'Xd HH:MM'.
Uses app timezone if datetime is naive.
date2 can be None (uses now).
"""
# Get timezone from settings
tz = pytz.timezone(tz_name)
def parse_dt(dt):
if dt is None:
return datetime.datetime.now(tz)
if isinstance(dt, str):
try:
dt_parsed = email.utils.parsedate_to_datetime(dt)
except (ValueError, TypeError):
# fallback: parse ISO string
dt_parsed = datetime.datetime.fromisoformat(dt)
# convert naive GMT/UTC to app timezone
if dt_parsed.tzinfo is None:
dt_parsed = tz.localize(dt_parsed)
else:
dt_parsed = dt_parsed.astimezone(tz)
return dt_parsed
return dt if dt.tzinfo else tz.localize(dt)
dt1 = parse_dt(date1)
dt2 = parse_dt(date2)
delta = dt2 - dt1
total_minutes = int(delta.total_seconds() // 60)
days, rem_minutes = divmod(total_minutes, 1440) # 1440 mins in a day
hours, minutes = divmod(rem_minutes, 60)
return {
"text": f"{days}d {hours:02}:{minutes:02}",
"days": days,
"hours": hours,
"minutes": minutes,
"total_minutes": total_minutes
}

View File

@@ -3,13 +3,13 @@ import json
import conf
from logger import mylog
from utils.crypto_utils import decrypt_data
from const import pluginsPath, apiPath
from helper import (
get_file_content,
get_setting_value,
setting_value_to_python_type,
)
from crypto_utils import decrypt_data
module_name = "Plugin utils"

View File

@@ -7,7 +7,8 @@ import pytest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value, timeNowTZ
from helper import get_setting_value
from utils.datetime_utils import timeNowDB
from api_server.api_server_start import app
@@ -41,9 +42,12 @@ def b64(sql: str) -> str:
# Device lifecycle via dbquery endpoints
# -----------------------------
def test_dbquery_create_device(client, api_token, test_mac):
now = timeNowDB()
sql = f"""
INSERT INTO Devices (devMac, devName, devVendor, devOwner, devFirstConnection, devLastConnection, devLastIP)
VALUES ('{test_mac}', 'UnitTestDevice', 'TestVendor', 'UnitTest', '{timeNowTZ()}', '{timeNowTZ()}', '192.168.100.22' )
VALUES ('{test_mac}', 'UnitTestDevice', 'TestVendor', 'UnitTest', '{now}', '{now}', '192.168.100.22' )
"""
resp = client.post("/dbquery/write", json={"rawSql": b64(sql)}, headers=auth_headers(api_token))
print(resp.json)

View File

@@ -10,7 +10,7 @@ import pytest
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import timeNowTZ, get_setting_value
from helper import get_setting_value
from api_server.api_server_start import app

View File

@@ -11,7 +11,7 @@ import pytest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import timeNowTZ, get_setting_value
from helper import get_setting_value
from api_server.api_server_start import app
@pytest.fixture(scope="session")

View File

@@ -11,7 +11,8 @@ from datetime import datetime, timedelta
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import timeNowTZ, get_setting_value
from helper import get_setting_value
from utils.datetime_utils import timeNowTZ
from api_server.api_server_start import app
@pytest.fixture(scope="session")

View File

@@ -0,0 +1,170 @@
import sys
import pathlib
import sqlite3
import random
import string
import uuid
import pytest
from datetime import datetime, timedelta
INSTALL_PATH = "/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
def test_graphql_debug_get(client):
"""GET /graphql should return the debug string"""
resp = client.get("/graphql")
assert resp.status_code == 200
assert resp.data.decode() == "NetAlertX GraphQL server running."
def test_graphql_post_unauthorized(client):
"""POST /graphql without token should return 401"""
query = {"query": "{ devices { devName devMac } }"}
resp = client.post("/graphql", json=query)
assert resp.status_code == 401
assert "Unauthorized access attempt" in resp.json.get("error", "")
# --- DEVICES TESTS ---
def test_graphql_post_devices(client, api_token):
"""POST /graphql with a valid token should return device data"""
query = {
"query": """
{
devices {
devices {
devGUID
devGroup
devIsRandomMac
devParentChildrenCount
}
count
}
}
"""
}
resp = client.post("/graphql", json=query, headers=auth_headers(api_token))
assert resp.status_code == 200
body = resp.get_json()
# GraphQL spec: response always under "data"
assert "data" in body
data = body["data"]
assert "devices" in data
assert isinstance(data["devices"]["devices"], list)
assert isinstance(data["devices"]["count"], int)
# --- SETTINGS TESTS ---
def test_graphql_post_settings(client, api_token):
"""POST /graphql should return settings data"""
query = {
"query": """
{
settings {
settings { setKey setValue setGroup }
count
}
}
"""
}
resp = client.post("/graphql", json=query, headers=auth_headers(api_token))
assert resp.status_code == 200
data = resp.json.get("data", {})
assert "settings" in data
assert isinstance(data["settings"]["settings"], list)
# --- LANGSTRINGS TESTS ---
def test_graphql_post_langstrings_specific(client, api_token):
"""Retrieve a specific langString in a given language"""
query = {
"query": """
{
langStrings(langCode: "en_us", langStringKey: "settings_other_scanners") {
langStrings { langCode langStringKey langStringText }
count
}
}
"""
}
resp = client.post("/graphql", json=query, headers=auth_headers(api_token))
assert resp.status_code == 200
data = resp.json.get("data", {}).get("langStrings", {})
assert data["count"] >= 1
for entry in data["langStrings"]:
assert entry["langCode"] == "en_us"
assert entry["langStringKey"] == "settings_other_scanners"
assert isinstance(entry["langStringText"], str)
def test_graphql_post_langstrings_fallback(client, api_token):
"""Fallback to en_us if requested language string is empty"""
query = {
"query": """
{
langStrings(langCode: "de_de", langStringKey: "settings_other_scanners") {
langStrings { langCode langStringKey langStringText }
count
}
}
"""
}
resp = client.post("/graphql", json=query, headers=auth_headers(api_token))
assert resp.status_code == 200
data = resp.json.get("data", {}).get("langStrings", {})
assert data["count"] >= 1
# Ensure fallback occurred if de_de text is empty
for entry in data["langStrings"]:
assert entry["langStringText"] != ""
def test_graphql_post_langstrings_all_languages(client, api_token):
"""Retrieve all languages for a given key"""
query = {
"query": """
{
enStrings: langStrings(langCode: "en_us", langStringKey: "settings_other_scanners") {
langStrings { langCode langStringKey langStringText }
count
}
deStrings: langStrings(langCode: "de_de", langStringKey: "settings_other_scanners") {
langStrings { langCode langStringKey langStringText }
count
}
}
"""
}
resp = client.post("/graphql", json=query, headers=auth_headers(api_token))
assert resp.status_code == 200
data = resp.json.get("data", {})
assert "enStrings" in data
assert "deStrings" in data
# At least one string in each language
assert data["enStrings"]["count"] >= 1
assert data["deStrings"]["count"] >= 1
# Ensure langCode matches
assert all(e["langCode"] == "en_us" for e in data["enStrings"]["langStrings"])
assert all(e["langCode"] == "de_de" for e in data["deStrings"]["langStrings"])

View File

@@ -10,7 +10,7 @@ import pytest
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import timeNowTZ, get_setting_value
from helper import get_setting_value
from api_server.api_server_start import app

View File

@@ -0,0 +1,61 @@
import sys
import random
import pytest
INSTALL_PATH = "/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
# ----------------------------
# Fixtures
# ----------------------------
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
# ----------------------------
# Logs Endpoint Tests
# ----------------------------
def test_clean_log(client, api_token):
resp = client.delete("/logs?file=app.log", headers=auth_headers(api_token))
assert resp.status_code == 200
assert resp.json.get("success") is True
def test_clean_log_not_allowed(client, api_token):
resp = client.delete("/logs?file=not_allowed.log", headers=auth_headers(api_token))
assert resp.status_code == 400
assert resp.json.get("success") is False
# ----------------------------
# Execution Queue Endpoint Tests
# ----------------------------
def test_add_to_execution_queue(client, api_token):
action_name = f"test_action_{random.randint(0,9999)}"
resp = client.post(
"/logs/add-to-execution-queue",
json={"action": action_name},
headers=auth_headers(api_token)
)
assert resp.status_code == 200
assert resp.json.get("success") is True
assert action_name in resp.json.get("message", "")
def test_add_to_execution_queue_missing_action(client, api_token):
resp = client.post(
"/logs/add-to-execution-queue",
json={},
headers=auth_headers(api_token)
)
assert resp.status_code == 400
assert resp.json.get("success") is False
assert "Missing required 'action'" in resp.json.get("error", "")

View File

@@ -11,7 +11,7 @@ import pytest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import timeNowTZ, get_setting_value
from helper import get_setting_value
from api_server.api_server_start import app
@pytest.fixture(scope="session")

View File

@@ -11,7 +11,8 @@ from datetime import datetime, timedelta
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import timeNowTZ, get_setting_value
from helper import get_setting_value
from utils.datetime_utils import timeNowTZ, timeNowDB
from api_server.api_server_start import app
@pytest.fixture(scope="session")
@@ -49,7 +50,7 @@ def test_create_session(client, api_token, test_mac):
payload = {
"mac": test_mac,
"ip": "192.168.1.100",
"start_time": timeNowTZ(),
"start_time": timeNowDB(),
"event_type_conn": "Connected",
"event_type_disc": "Disconnected"
}
@@ -64,7 +65,7 @@ def test_list_sessions(client, api_token, test_mac):
payload = {
"mac": test_mac,
"ip": "192.168.1.100",
"start_time": timeNowTZ()
"start_time": timeNowDB()
}
client.post("/sessions/create", json=payload, headers=auth_headers(api_token))
@@ -81,7 +82,7 @@ def test_device_sessions_by_period(client, api_token, test_mac):
payload = {
"mac": test_mac,
"ip": "192.168.1.200",
"start_time": timeNowTZ()
"start_time": timeNowDB()
}
resp_create = client.post("/sessions/create", json=payload, headers=auth_headers(api_token))
assert resp_create.status_code == 200
@@ -116,7 +117,7 @@ def test_device_session_events(client, api_token, test_mac):
payload = {
"mac": test_mac,
"ip": "192.168.1.250",
"start_time": timeNowTZ()
"start_time": timeNowDB()
}
resp_create = client.post(
"/sessions/create",
@@ -164,7 +165,7 @@ def test_delete_session(client, api_token, test_mac):
payload = {
"mac": test_mac,
"ip": "192.168.1.100",
"start_time": timeNowTZ()
"start_time": timeNowDB()
}
client.post("/sessions/create", json=payload, headers=auth_headers(api_token))

View File

@@ -11,7 +11,7 @@ from datetime import datetime, timedelta
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import timeNowTZ, get_setting_value
from helper import get_setting_value
from api_server.api_server_start import app
@pytest.fixture(scope="session")

View File