mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2025-12-07 09:36:05 -08:00
Compare commits
48 Commits
067336dcc1
...
fix-pr-130
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1dee812ce6 | ||
|
|
5c44fd8fea | ||
|
|
bd691f01b1 | ||
|
|
624fd87ee7 | ||
|
|
5d1c63375b | ||
|
|
8c982cd476 | ||
|
|
36e5751221 | ||
|
|
5af760f5ee | ||
|
|
dfd836527e | ||
|
|
8d5a663817 | ||
|
|
fbb4a2f8b4 | ||
|
|
54bce6505b | ||
|
|
6da47cc830 | ||
|
|
9cabbf3622 | ||
|
|
6c28a08bee | ||
|
|
86e3decd4e | ||
|
|
e14e0bb9e8 | ||
|
|
b6023d1373 | ||
|
|
1812cc8ef8 | ||
|
|
e64c490c8a | ||
|
|
5df39f984a | ||
|
|
d007ed711a | ||
|
|
61824abb9f | ||
|
|
33c5548fe1 | ||
|
|
fd41c395ae | ||
|
|
1a980844f0 | ||
|
|
82e018e284 | ||
|
|
e0e1233b1c | ||
|
|
74677f940e | ||
|
|
21a4d20579 | ||
|
|
9634e4e0f7 | ||
|
|
00a47ab5d3 | ||
|
|
59b417705e | ||
|
|
525d082f3d | ||
|
|
ba3481759b | ||
|
|
7125cea29b | ||
|
|
8586c5a307 | ||
|
|
0d81315809 | ||
|
|
8f193f1e2c | ||
|
|
b1eef8aa09 | ||
|
|
531b66effe | ||
|
|
5e4ad10fe0 | ||
|
|
541b932b6d | ||
|
|
2bf3ff9f00 | ||
|
|
2da17f272c | ||
|
|
7bcb4586b2 | ||
|
|
d3326b3362 | ||
|
|
b9d3f430fe |
3
.github/copilot-instructions.md
vendored
3
.github/copilot-instructions.md
vendored
@@ -39,10 +39,11 @@ Backend loop phases (see `server/__main__.py` and `server/plugin.py`): `once`, `
|
||||
## API/Endpoints quick map
|
||||
- Flask app: `server/api_server/api_server_start.py` exposes routes like `/device/<mac>`, `/devices`, `/devices/export/{csv,json}`, `/devices/import`, `/devices/totals`, `/devices/by-status`, plus `nettools`, `events`, `sessions`, `dbquery`, `metrics`, `sync`.
|
||||
- Authorization: all routes expect header `Authorization: Bearer <API_TOKEN>` via `get_setting_value('API_TOKEN')`.
|
||||
- All responses need to return `"success":<False:True>` and if `False` an "error" message needs to be returned, e.g. `{"success": False, "error": f"No stored open ports for Device"}`
|
||||
|
||||
## Conventions & helpers to reuse
|
||||
- Settings: add/modify via `ccd()` in `server/initialise.py` or per‑plugin manifest. Never hardcode ports or secrets; use `get_setting_value()`.
|
||||
- Logging: use `logger.mylog(level, [message])`; levels: none/minimal/verbose/debug/trace.
|
||||
- Logging: use `mylog(level, [message])`; levels: none/minimal/verbose/debug/trace. `none` is used for most important messages that should always appear, such as exceptions.
|
||||
- Time/MAC/strings: `helper.py` (`timeNowDB`, `normalize_mac`, sanitizers). Validate MACs before DB writes.
|
||||
- DB helpers: prefer `server/db/db_helper.py` functions (e.g., `get_table_json`, device condition helpers) over raw SQL in new paths.
|
||||
|
||||
|
||||
6
.github/workflows/docker_dev.yml
vendored
6
.github/workflows/docker_dev.yml
vendored
@@ -47,6 +47,12 @@ jobs:
|
||||
id: get_version
|
||||
run: echo "version=Dev" >> $GITHUB_OUTPUT
|
||||
|
||||
# --- debug output
|
||||
- name: Debug version
|
||||
run: |
|
||||
echo "GITHUB_REF: $GITHUB_REF"
|
||||
echo "Version: '${{ steps.get_version.outputs.version }}'"
|
||||
|
||||
# --- Write the timestamped version to .VERSION file
|
||||
- name: Create .VERSION file
|
||||
run: echo "${{ steps.timestamp.outputs.version }}" > .VERSION
|
||||
|
||||
22
.github/workflows/docker_prod.yml
vendored
22
.github/workflows/docker_prod.yml
vendored
@@ -32,14 +32,34 @@ jobs:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# --- Previous approach Get release version from tag
|
||||
- name: Set up dynamic build ARGs
|
||||
id: getargs
|
||||
run: echo "version=$(cat ./stable/VERSION)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Get release version
|
||||
id: get_version_prev
|
||||
run: echo "::set-output name=version::${GITHUB_REF#refs/tags/}"
|
||||
|
||||
- name: Create .VERSION file
|
||||
run: echo "${{ steps.get_version.outputs.version }}" >> .VERSION_PREV
|
||||
|
||||
# --- Get release version from tag
|
||||
- name: Get release version
|
||||
id: get_version
|
||||
run: echo "version=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
# --- debug output
|
||||
- name: Debug version
|
||||
run: |
|
||||
echo "GITHUB_REF: $GITHUB_REF"
|
||||
echo "Version: '${{ steps.get_version.outputs.version }}'"
|
||||
echo "Version prev: '${{ steps.get_version_prev.outputs.version }}'"
|
||||
|
||||
# --- Write version to .VERSION file
|
||||
- name: Create .VERSION file
|
||||
run: echo "${{ steps.get_version.outputs.version }}" > .VERSION
|
||||
run: echo -n "${{ steps.get_version.outputs.version }}" > .VERSION
|
||||
|
||||
# --- Generate Docker metadata and tags
|
||||
- name: Docker meta
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -11,6 +11,7 @@ nohup.out
|
||||
config/*
|
||||
.ash_history
|
||||
.VERSION
|
||||
.VERSION_PREV
|
||||
config/pialert.conf
|
||||
config/app.conf
|
||||
db/*
|
||||
|
||||
33
Dockerfile
33
Dockerfile
@@ -26,13 +26,23 @@ ENV PATH="/opt/venv/bin:$PATH"
|
||||
|
||||
# Install build dependencies
|
||||
COPY requirements.txt /tmp/requirements.txt
|
||||
RUN apk add --no-cache bash shadow python3 python3-dev gcc musl-dev libffi-dev openssl-dev git \
|
||||
RUN apk add --no-cache \
|
||||
bash \
|
||||
shadow \
|
||||
python3 \
|
||||
python3-dev \
|
||||
gcc \
|
||||
musl-dev \
|
||||
libffi-dev \
|
||||
openssl-dev \
|
||||
git \
|
||||
rust \
|
||||
cargo \
|
||||
&& python -m venv /opt/venv
|
||||
|
||||
# Create virtual environment owned by root, but readable by everyone else. This makes it easy to copy
|
||||
# into hardened stage without worrying about permissions and keeps image size small. Keeping the commands
|
||||
# together makes for a slightly smaller image size.
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements.txt && \
|
||||
# Upgrade pip/wheel/setuptools and install Python packages
|
||||
RUN python -m pip install --upgrade pip setuptools wheel && \
|
||||
pip install --no-cache-dir -r /tmp/requirements.txt && \
|
||||
chmod -R u-rwx,g-rwx /opt
|
||||
|
||||
# second stage is the main runtime stage with just the minimum required to run the application
|
||||
@@ -138,6 +148,7 @@ RUN install -d -o ${NETALERTX_USER} -g ${NETALERTX_GROUP} -m 700 ${READ_WRITE_FO
|
||||
|
||||
# Copy version information into the image
|
||||
COPY --chown=${NETALERTX_USER}:${NETALERTX_GROUP} .[V]ERSION ${NETALERTX_APP}/.VERSION
|
||||
COPY --chown=${NETALERTX_USER}:${NETALERTX_GROUP} .[V]ERSION ${NETALERTX_APP}/.VERSION_PREV
|
||||
|
||||
# Copy the virtualenv from the builder stage
|
||||
COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
||||
@@ -147,12 +158,12 @@ COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
||||
# This is done after the copy of the venv to ensure the venv is in place
|
||||
# although it may be quicker to do it before the copy, it keeps the image
|
||||
# layers smaller to do it after.
|
||||
RUN if [ -f '.VERSION' ]; then \
|
||||
cp '.VERSION' "${NETALERTX_APP}/.VERSION"; \
|
||||
else \
|
||||
echo "DEVELOPMENT 00000000" > "${NETALERTX_APP}/.VERSION"; \
|
||||
fi && \
|
||||
chown 20212:20212 "${NETALERTX_APP}/.VERSION" && \
|
||||
RUN for vfile in .VERSION .VERSION_PREV; do \
|
||||
if [ ! -f "${NETALERTX_APP}/${vfile}" ]; then \
|
||||
echo "DEVELOPMENT 00000000" > "${NETALERTX_APP}/${vfile}"; \
|
||||
fi; \
|
||||
chown 20212:20212 "${NETALERTX_APP}/${vfile}"; \
|
||||
done && \
|
||||
apk add --no-cache libcap && \
|
||||
setcap cap_net_raw+ep /bin/busybox && \
|
||||
setcap cap_net_raw,cap_net_admin+eip /usr/bin/nmap && \
|
||||
|
||||
@@ -34,9 +34,7 @@ Get visibility of what's going on on your WIFI/LAN network and enable presence d
|
||||
## 🚀 Quick Start
|
||||
|
||||
> [!WARNING]
|
||||
> ⚠️ **Important:** The documentation has been recently updated and some instructions may have changed.
|
||||
> If you are using the currently live production image, please follow the instructions on [Docker Hub](https://hub.docker.com/r/jokobsk/netalertx) for building and running the container.
|
||||
> These docs reflect the latest development version and may differ from the production image.
|
||||
> ⚠️ **Important:** The docker-compose has recently changed. Carefully read the [Migration guide](https://jokob-sk.github.io/NetAlertX/MIGRATION/?h=migrat#12-migration-from-netalertx-v25524) for detailed instructions.
|
||||
|
||||
Start NetAlertX in seconds with Docker:
|
||||
|
||||
@@ -44,8 +42,7 @@ Start NetAlertX in seconds with Docker:
|
||||
docker run -d \
|
||||
--network=host \
|
||||
--restart unless-stopped \
|
||||
-v /local_data_dir/config:/data/config \
|
||||
-v /local_data_dir/db:/data/db \
|
||||
-v /local_data_dir:/data \
|
||||
-v /etc/localtime:/etc/localtime:ro \
|
||||
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
||||
-e PORT=20211 \
|
||||
@@ -53,6 +50,8 @@ docker run -d \
|
||||
ghcr.io/jokob-sk/netalertx:latest
|
||||
```
|
||||
|
||||
Note: Your `/local_data_dir` should contain a `config` and `db` folder.
|
||||
|
||||
To deploy a containerized instance directly from the source repository, execute the following BASH sequence:
|
||||
```bash
|
||||
git clone https://github.com/jokob-sk/NetAlertX.git
|
||||
|
||||
24
docs/API.md
24
docs/API.md
@@ -1,4 +1,4 @@
|
||||
# NetAlertX API Documentation
|
||||
# API Documentation
|
||||
|
||||
This API provides programmatic access to **devices, events, sessions, metrics, network tools, and sync** in NetAlertX. It is implemented as a **REST and GraphQL server**. All requests require authentication via **API Token** (`API_TOKEN` setting) unless explicitly noted. For example, to authorize a GraphQL request, you need to use a `Authorization: Bearer API_TOKEN` header as per example below:
|
||||
|
||||
@@ -36,9 +36,15 @@ Authorization: Bearer <API_TOKEN>
|
||||
If the token is missing or invalid, the server will return:
|
||||
|
||||
```json
|
||||
{ "error": "Forbidden" }
|
||||
{
|
||||
"success": false,
|
||||
"message": "ERROR: Not authorized",
|
||||
"error": "Forbidden"
|
||||
}
|
||||
```
|
||||
|
||||
HTTP Status: **403 Forbidden**
|
||||
|
||||
---
|
||||
|
||||
## Base URL
|
||||
@@ -54,6 +60,8 @@ http://<server>:<GRAPHQL_PORT>/
|
||||
> [!TIP]
|
||||
> When retrieving devices or settings try using the GraphQL API endpoint first as it is read-optimized.
|
||||
|
||||
### Standard REST Endpoints
|
||||
|
||||
* [Device API Endpoints](API_DEVICE.md) – Manage individual devices
|
||||
* [Devices Collection](API_DEVICES.md) – Bulk operations on multiple devices
|
||||
* [Events](API_EVENTS.md) – Device event logging and management
|
||||
@@ -69,6 +77,18 @@ http://<server>:<GRAPHQL_PORT>/
|
||||
* [Logs](API_LOGS.md) – Purging of logs and adding to the event execution queue for user triggered events
|
||||
* [DB query](API_DBQUERY.md) (⚠ Internal) - Low level database access - use other endpoints if possible
|
||||
|
||||
### MCP Server Bridge
|
||||
|
||||
NetAlertX includes an **MCP (Model Context Protocol) Server Bridge** that provides AI assistants access to NetAlertX functionality through standardized tools. MCP endpoints are available at `/mcp/sse/*` paths and mirror the functionality of standard REST endpoints:
|
||||
|
||||
* `/mcp/sse` - Server-Sent Events endpoint for MCP client connections
|
||||
* `/mcp/sse/openapi.json` - OpenAPI specification for available MCP tools
|
||||
* `/mcp/sse/device/*`, `/mcp/sse/devices/*`, `/mcp/sse/nettools/*`, `/mcp/sse/events/*` - MCP-enabled versions of REST endpoints
|
||||
|
||||
MCP endpoints require the same Bearer token authentication as REST endpoints.
|
||||
|
||||
**📖 See [MCP Server Bridge API](API_MCP.md) for complete documentation, tool specifications, and integration examples.**
|
||||
|
||||
See [Testing](API_TESTS.md) for example requests and usage.
|
||||
|
||||
---
|
||||
|
||||
@@ -16,10 +16,14 @@ All `/dbquery/*` endpoints require an API token in the HTTP headers:
|
||||
Authorization: Bearer <API_TOKEN>
|
||||
```
|
||||
|
||||
If the token is missing or invalid:
|
||||
If the token is missing or invalid (HTTP 403):
|
||||
|
||||
```json
|
||||
{ "error": "Forbidden" }
|
||||
{
|
||||
\"success\": false,
|
||||
\"message\": \"ERROR: Not authorized\",
|
||||
\"error\": \"Forbidden\"
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -41,6 +41,8 @@ Manage a **single device** by its MAC address. Operations include retrieval, upd
|
||||
* Device not found → HTTP 404
|
||||
* Unauthorized → HTTP 403
|
||||
|
||||
**MCP Integration**: Available as `get_device_info` and `set_device_alias` tools. See [MCP Server Bridge API](API_MCP.md).
|
||||
|
||||
---
|
||||
|
||||
## 2. Update Device Fields
|
||||
|
||||
@@ -207,6 +207,93 @@ The Devices Collection API provides operations to **retrieve, manage, import/exp
|
||||
|
||||
---
|
||||
|
||||
### 9. Search Devices
|
||||
|
||||
* **POST** `/devices/search`
|
||||
Search for devices by MAC, name, or IP address.
|
||||
|
||||
**Request Body** (JSON):
|
||||
|
||||
```json
|
||||
{
|
||||
"query": ".50"
|
||||
}
|
||||
```
|
||||
|
||||
**Response**:
|
||||
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"devices": [
|
||||
{
|
||||
"devName": "Test Device",
|
||||
"devMac": "AA:BB:CC:DD:EE:FF",
|
||||
"devLastIP": "192.168.1.50"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 10. Get Latest Device
|
||||
|
||||
* **GET** `/devices/latest`
|
||||
Get the most recently connected device.
|
||||
|
||||
**Response**:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"devName": "Latest Device",
|
||||
"devMac": "AA:BB:CC:DD:EE:FF",
|
||||
"devLastIP": "192.168.1.100",
|
||||
"devFirstConnection": "2025-12-07 10:30:00"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 11. Get Network Topology
|
||||
|
||||
* **GET** `/devices/network/topology`
|
||||
Get network topology showing device relationships.
|
||||
|
||||
**Response**:
|
||||
|
||||
```json
|
||||
{
|
||||
"nodes": [
|
||||
{
|
||||
"id": "AA:AA:AA:AA:AA:AA",
|
||||
"name": "Router",
|
||||
"vendor": "VendorA"
|
||||
}
|
||||
],
|
||||
"links": [
|
||||
{
|
||||
"source": "AA:AA:AA:AA:AA:AA",
|
||||
"target": "BB:BB:BB:BB:BB:BB",
|
||||
"port": "eth1"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## MCP Tools
|
||||
|
||||
These endpoints are also available as **MCP Tools** for AI assistant integration:
|
||||
- `list_devices`, `search_devices`, `get_latest_device`, `get_network_topology`, `set_device_alias`
|
||||
|
||||
📖 See [MCP Server Bridge API](API_MCP.md) for AI integration details.
|
||||
|
||||
---
|
||||
|
||||
## Example `curl` Requests
|
||||
|
||||
**Get All Devices**:
|
||||
@@ -247,3 +334,26 @@ curl -X GET "http://<server_ip>:<GRAPHQL_PORT>/devices/by-status?status=online"
|
||||
-H "Authorization: Bearer <API_TOKEN>"
|
||||
```
|
||||
|
||||
**Search Devices**:
|
||||
|
||||
```sh
|
||||
curl -X POST "http://<server_ip>:<GRAPHQL_PORT>/devices/search" \
|
||||
-H "Authorization: Bearer <API_TOKEN>" \
|
||||
-H "Content-Type: application/json" \
|
||||
--data '{"query": "192.168.1"}'
|
||||
```
|
||||
|
||||
**Get Latest Device**:
|
||||
|
||||
```sh
|
||||
curl -X GET "http://<server_ip>:<GRAPHQL_PORT>/devices/latest" \
|
||||
-H "Authorization: Bearer <API_TOKEN>"
|
||||
```
|
||||
|
||||
**Get Network Topology**:
|
||||
|
||||
```sh
|
||||
curl -X GET "http://<server_ip>:<GRAPHQL_PORT>/devices/network/topology" \
|
||||
-H "Authorization: Bearer <API_TOKEN>"
|
||||
```
|
||||
|
||||
|
||||
@@ -88,7 +88,56 @@ The Events API provides access to **device event logs**, allowing creation, retr
|
||||
|
||||
---
|
||||
|
||||
### 4. Event Totals Over a Period
|
||||
### 4. Get Recent Events
|
||||
|
||||
* **GET** `/events/recent` → Get events from the last 24 hours
|
||||
* **GET** `/events/<hours>` → Get events from the last N hours
|
||||
|
||||
**Response** (JSON):
|
||||
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"hours": 24,
|
||||
"count": 5,
|
||||
"events": [
|
||||
{
|
||||
"eve_DateTime": "2025-12-07 12:00:00",
|
||||
"eve_EventType": "New Device",
|
||||
"eve_MAC": "AA:BB:CC:DD:EE:FF",
|
||||
"eve_IP": "192.168.1.100",
|
||||
"eve_AdditionalInfo": "Device detected"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 5. Get Latest Events
|
||||
|
||||
* **GET** `/events/last`
|
||||
Get the 10 most recent events.
|
||||
|
||||
**Response** (JSON):
|
||||
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"count": 10,
|
||||
"events": [
|
||||
{
|
||||
"eve_DateTime": "2025-12-07 12:00:00",
|
||||
"eve_EventType": "Device Down",
|
||||
"eve_MAC": "AA:BB:CC:DD:EE:FF"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 6. Event Totals Over a Period
|
||||
|
||||
* **GET** `/sessions/totals?period=<period>`
|
||||
Return event and session totals over a given period.
|
||||
@@ -116,12 +165,25 @@ The Events API provides access to **device event logs**, allowing creation, retr
|
||||
|
||||
---
|
||||
|
||||
## MCP Tools
|
||||
|
||||
Event endpoints are available as **MCP Tools** for AI assistant integration:
|
||||
- `get_recent_alerts`, `get_last_events`
|
||||
|
||||
📖 See [MCP Server Bridge API](API_MCP.md) for AI integration details.
|
||||
|
||||
---
|
||||
|
||||
## Notes
|
||||
|
||||
* All endpoints require **authorization** (Bearer token). Unauthorized requests return:
|
||||
* All endpoints require **authorization** (Bearer token). Unauthorized requests return HTTP 403:
|
||||
|
||||
```json
|
||||
{ "error": "Forbidden" }
|
||||
{
|
||||
"success": false,
|
||||
"message": "ERROR: Not authorized",
|
||||
"error": "Forbidden"
|
||||
}
|
||||
```
|
||||
|
||||
* Events are stored in the **Events table** with the following fields:
|
||||
|
||||
326
docs/API_MCP.md
Normal file
326
docs/API_MCP.md
Normal file
@@ -0,0 +1,326 @@
|
||||
# MCP Server Bridge API
|
||||
|
||||
The **MCP (Model Context Protocol) Server Bridge** provides AI assistants with standardized access to NetAlertX functionality through tools and server-sent events. This enables AI systems to interact with your network monitoring data in real-time.
|
||||
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
The MCP Server Bridge exposes NetAlertX functionality as **MCP Tools** that AI assistants can call to:
|
||||
|
||||
- Search and retrieve device information
|
||||
- Trigger network scans
|
||||
- Get network topology and events
|
||||
- Wake devices via Wake-on-LAN
|
||||
- Access open port information
|
||||
- Set device aliases
|
||||
|
||||
All MCP endpoints mirror the functionality of standard REST endpoints but are optimized for AI assistant integration.
|
||||
|
||||
---
|
||||
|
||||
## Authentication
|
||||
|
||||
MCP endpoints use the same **Bearer token authentication** as REST endpoints:
|
||||
|
||||
```http
|
||||
Authorization: Bearer <API_TOKEN>
|
||||
```
|
||||
|
||||
Unauthorized requests return HTTP 403:
|
||||
|
||||
```json
|
||||
{
|
||||
"success": false,
|
||||
"message": "ERROR: Not authorized",
|
||||
"error": "Forbidden"
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## MCP Connection Endpoint
|
||||
|
||||
### Server-Sent Events (SSE)
|
||||
|
||||
* **GET/POST** `/mcp/sse`
|
||||
|
||||
Main MCP connection endpoint for AI clients. Establishes a persistent connection using Server-Sent Events for real-time communication between AI assistants and NetAlertX.
|
||||
|
||||
**Connection Example**:
|
||||
|
||||
```javascript
|
||||
const eventSource = new EventSource('/mcp/sse', {
|
||||
headers: {
|
||||
'Authorization': 'Bearer <API_TOKEN>'
|
||||
}
|
||||
});
|
||||
|
||||
eventSource.onmessage = function(event) {
|
||||
const response = JSON.parse(event.data);
|
||||
console.log('MCP Response:', response);
|
||||
};
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## OpenAPI Specification
|
||||
|
||||
### Get MCP Tools Specification
|
||||
|
||||
* **GET** `/mcp/sse/openapi.json`
|
||||
|
||||
Returns the OpenAPI specification for all available MCP tools, describing the parameters and schemas for each tool.
|
||||
|
||||
**Response**:
|
||||
|
||||
```json
|
||||
{
|
||||
"openapi": "3.0.0",
|
||||
"info": {
|
||||
"title": "NetAlertX Tools",
|
||||
"version": "1.1.0"
|
||||
},
|
||||
"servers": [{"url": "/"}],
|
||||
"paths": {
|
||||
"/devices/by-status": {
|
||||
"post": {"operationId": "list_devices"}
|
||||
},
|
||||
"/device/{mac}": {
|
||||
"post": {"operationId": "get_device_info"}
|
||||
},
|
||||
"/devices/search": {
|
||||
"post": {"operationId": "search_devices"}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Available MCP Tools
|
||||
|
||||
### Device Management Tools
|
||||
|
||||
| Tool | Endpoint | Description |
|
||||
|------|----------|-------------|
|
||||
| `list_devices` | `/mcp/sse/devices/by-status` | List devices by online status |
|
||||
| `get_device_info` | `/mcp/sse/device/<mac>` | Get detailed device information |
|
||||
| `search_devices` | `/mcp/sse/devices/search` | Search devices by MAC, name, or IP |
|
||||
| `get_latest_device` | `/mcp/sse/devices/latest` | Get most recently connected device |
|
||||
| `set_device_alias` | `/mcp/sse/device/<mac>/set-alias` | Set device friendly name |
|
||||
|
||||
### Network Tools
|
||||
|
||||
| Tool | Endpoint | Description |
|
||||
|------|----------|-------------|
|
||||
| `trigger_scan` | `/mcp/sse/nettools/trigger-scan` | Trigger network discovery scan |
|
||||
| `get_open_ports` | `/mcp/sse/device/open_ports` | Get stored NMAP open ports for device |
|
||||
| `wol_wake_device` | `/mcp/sse/nettools/wakeonlan` | Wake device using Wake-on-LAN |
|
||||
| `get_network_topology` | `/mcp/sse/devices/network/topology` | Get network topology map |
|
||||
|
||||
### Event & Monitoring Tools
|
||||
|
||||
| Tool | Endpoint | Description |
|
||||
|------|----------|-------------|
|
||||
| `get_recent_alerts` | `/mcp/sse/events/recent` | Get events from last 24 hours |
|
||||
| `get_last_events` | `/mcp/sse/events/last` | Get 10 most recent events |
|
||||
|
||||
---
|
||||
|
||||
## Tool Usage Examples
|
||||
|
||||
### Search Devices Tool
|
||||
|
||||
**Tool Call**:
|
||||
```json
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": "1",
|
||||
"method": "tools/call",
|
||||
"params": {
|
||||
"name": "search_devices",
|
||||
"arguments": {
|
||||
"query": "192.168.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Response**:
|
||||
```json
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": "1",
|
||||
"result": {
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "{\n \"success\": true,\n \"devices\": [\n {\n \"devName\": \"Router\",\n \"devMac\": \"AA:BB:CC:DD:EE:FF\",\n \"devLastIP\": \"192.168.1.1\"\n }\n ]\n}"
|
||||
}
|
||||
],
|
||||
"isError": false
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Trigger Network Scan Tool
|
||||
|
||||
**Tool Call**:
|
||||
```json
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": "2",
|
||||
"method": "tools/call",
|
||||
"params": {
|
||||
"name": "trigger_scan",
|
||||
"arguments": {
|
||||
"type": "ARPSCAN"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Response**:
|
||||
```json
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": "2",
|
||||
"result": {
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "{\n \"success\": true,\n \"message\": \"Scan triggered for type: ARPSCAN\"\n}"
|
||||
}
|
||||
],
|
||||
"isError": false
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Wake-on-LAN Tool
|
||||
|
||||
**Tool Call**:
|
||||
```json
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": "3",
|
||||
"method": "tools/call",
|
||||
"params": {
|
||||
"name": "wol_wake_device",
|
||||
"arguments": {
|
||||
"devMac": "AA:BB:CC:DD:EE:FF"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Integration with AI Assistants
|
||||
|
||||
### Claude Desktop Integration
|
||||
|
||||
Add to your Claude Desktop `mcp.json` configuration:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcp": {
|
||||
"servers": {
|
||||
"netalertx": {
|
||||
"command": "node",
|
||||
"args": ["/path/to/mcp-client.js"],
|
||||
"env": {
|
||||
"NETALERTX_URL": "http://your-server:<GRAPHQL_PORT>",
|
||||
"NETALERTX_TOKEN": "your-api-token"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Generic MCP Client
|
||||
|
||||
```python
|
||||
import asyncio
|
||||
import json
|
||||
from mcp import ClientSession, StdioServerParameters
|
||||
from mcp.client.stdio import stdio_client
|
||||
|
||||
async def main():
|
||||
# Connect to NetAlertX MCP server
|
||||
server_params = StdioServerParameters(
|
||||
command="curl",
|
||||
args=[
|
||||
"-N", "-H", "Authorization: Bearer <API_TOKEN>",
|
||||
"http://your-server:<GRAPHQL_PORT>/mcp/sse"
|
||||
]
|
||||
)
|
||||
|
||||
async with stdio_client(server_params) as (read, write):
|
||||
async with ClientSession(read, write) as session:
|
||||
# Initialize connection
|
||||
await session.initialize()
|
||||
|
||||
# List available tools
|
||||
tools = await session.list_tools()
|
||||
print(f"Available tools: {[t.name for t in tools.tools]}")
|
||||
|
||||
# Call a tool
|
||||
result = await session.call_tool("search_devices", {"query": "router"})
|
||||
print(f"Search result: {result}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Error Handling
|
||||
|
||||
MCP tool calls return structured error information:
|
||||
|
||||
**Error Response**:
|
||||
```json
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": "1",
|
||||
"result": {
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "Error calling tool: Device not found"
|
||||
}
|
||||
],
|
||||
"isError": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Common Error Types**:
|
||||
- `401/403` - Authentication failure
|
||||
- `400` - Invalid parameters or missing required fields
|
||||
- `404` - Resource not found (device, scan results, etc.)
|
||||
- `500` - Internal server error
|
||||
|
||||
---
|
||||
|
||||
## Notes
|
||||
|
||||
* MCP endpoints require the same API token authentication as REST endpoints
|
||||
* All MCP tools return JSON responses wrapped in MCP protocol format
|
||||
* Server-Sent Events maintain persistent connections for real-time updates
|
||||
* Tool parameters match their REST endpoint equivalents
|
||||
* Error responses include both HTTP status codes and descriptive messages
|
||||
* MCP bridge automatically handles request/response serialization
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
* [Main API Overview](API.md) - Core REST API documentation
|
||||
* [Device API](API_DEVICE.md) - Individual device management
|
||||
* [Devices Collection API](API_DEVICES.md) - Bulk device operations
|
||||
* [Network Tools API](API_NETTOOLS.md) - Wake-on-LAN, scans, network utilities
|
||||
* [Events API](API_EVENTS.md) - Event logging and monitoring
|
||||
@@ -241,3 +241,12 @@ curl -X POST "http://<server_ip>:<GRAPHQL_PORT>/nettools/nmap" \
|
||||
curl "http://<server_ip>:<GRAPHQL_PORT>/nettools/internetinfo" \
|
||||
-H "Authorization: Bearer <API_TOKEN>"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## MCP Tools
|
||||
|
||||
Network tools are available as **MCP Tools** for AI assistant integration:
|
||||
- `wol_wake_device`, `trigger_scan`, `get_open_ports`
|
||||
|
||||
📖 See [MCP Server Bridge API](API_MCP.md) for AI integration details.
|
||||
|
||||
@@ -16,8 +16,7 @@ Start the container via the **terminal** with a command similar to this one:
|
||||
docker run \
|
||||
--network=host \
|
||||
--restart unless-stopped \
|
||||
-v /local_data_dir/config:/data/config \
|
||||
-v /local_data_dir/db:/data/db \
|
||||
-v /local_data_dir:/data \
|
||||
-v /etc/localtime:/etc/localtime:ro \
|
||||
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
||||
-e PORT=20211 \
|
||||
@@ -26,6 +25,8 @@ docker run \
|
||||
|
||||
```
|
||||
|
||||
Note: Your `/local_data_dir` should contain a `config` and `db` folder.
|
||||
|
||||
> [!NOTE]
|
||||
> ⚠ The most important part is NOT to use the `-d` parameter so you see the error when the container crashes. Use this error in your issue description.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# NetAlertX - Device Management
|
||||
# Device Management
|
||||
|
||||
The Main Info section is where most of the device identifiable information is stored and edited. Some of the information is autodetected via various plugins. Initial values for most of the fields can be specified in the `NEWDEV` plugin.
|
||||
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
# NetAlertX and Docker Compose
|
||||
|
||||
> [!WARNING]
|
||||
> ⚠️ **Important:** The documentation has been recently updated and some instructions may have changed.
|
||||
> If you are using the currently live production image, please follow the instructions on [Docker Hub](https://hub.docker.com/r/jokobsk/netalertx) for building and running the container.
|
||||
> These docs reflect the latest development version and may differ from the production image.
|
||||
> ⚠️ **Important:** The docker-compose has recently changed. Carefully read the [Migration guide](https://jokob-sk.github.io/NetAlertX/MIGRATION/?h=migrat#12-migration-from-netalertx-v25524) for detailed instructions.
|
||||
|
||||
Great care is taken to ensure NetAlertX meets the needs of everyone while being flexible enough for anyone. This document outlines how you can configure your docker-compose. There are many settings, so we recommend using the Baseline Docker Compose as-is, or modifying it for your system.Good care is taken to ensure NetAlertX meets the needs of everyone while being flexible enough for anyone. This document outlines how you can configure your docker-compose. There are many settings, so we recommend using the Baseline Docker Compose as-is, or modifying it for your system.
|
||||
|
||||
@@ -125,9 +123,9 @@ docker compose up
|
||||
|
||||
### Modification 1: Use a Local Folder (Bind Mount)
|
||||
|
||||
By default, the baseline compose file uses a single named volume (netalertx_data) mounted at /data. This single-volume layout is preferred because NetAlertX manages both configuration and the database under /data (for example, /data/config and /data/db) via its web UI. Using one named volume simplifies permissions and portability: Docker manages the storage and NetAlertX manages the files inside /data.
|
||||
By default, the baseline compose file uses a single named volume (netalertx_data) mounted at `/data`. This single-volume layout is preferred because NetAlertX manages both configuration and the database under `/data` (for example, `/data/config` and `/data/db`) via its web UI. Using one named volume simplifies permissions and portability: Docker manages the storage and NetAlertX manages the files inside `/data`.
|
||||
|
||||
A two-volume layout that mounts /data/config and /data/db separately (for example, netalertx_config and netalertx_db) is supported for backward compatibility and some advanced workflows, but it is an abnormal/legacy layout and not recommended for new deployments.
|
||||
A two-volume layout that mounts `/data/config` and `/data/db` separately (for example, `netalertx_config` and `netalertx_db`) is supported for backward compatibility and some advanced workflows, but it is an abnormal/legacy layout and not recommended for new deployments.
|
||||
|
||||
However, if you prefer to have direct, file-level access to your configuration for manual editing, a "bind mount" is a simple alternative. This tells Docker to use a specific folder from your computer (the "host") inside the container.
|
||||
|
||||
|
||||
@@ -25,8 +25,7 @@ Head to [https://netalertx.com/](https://netalertx.com/) for more gifs and scree
|
||||
|
||||
```bash
|
||||
docker run -d --rm --network=host \
|
||||
-v /local_data_dir/config:/data/config \
|
||||
-v /local_data_dir/db:/data/db \
|
||||
-v /local_data_dir:/data \
|
||||
-v /etc/localtime:/etc/localtime \
|
||||
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
||||
-e PORT=20211 \
|
||||
@@ -62,8 +61,7 @@ See alternative [docked-compose examples](https://github.com/jokob-sk/NetAlertX/
|
||||
|
||||
| Required | Path | Description |
|
||||
| :------------- | :------------- | :-------------|
|
||||
| ✅ | `:/data/config` | Folder which will contain the `app.conf` & `devices.csv` ([read about devices.csv](https://github.com/jokob-sk/NetAlertX/blob/main/docs/DEVICES_BULK_EDITING.md)) files |
|
||||
| ✅ | `:/data/db` | Folder which will contain the `app.db` database file |
|
||||
| ✅ | `:/data` | Folder which will contain the `/db/app.db`, `/config/app.conf` & `/config/devices.csv` ([read about devices.csv](https://github.com/jokob-sk/NetAlertX/blob/main/docs/DEVICES_BULK_EDITING.md)) files |
|
||||
| ✅ | `/etc/localtime:/etc/localtime:ro` | Ensuring the timezone is teh same as on teh server. |
|
||||
| | `:/tmp/log` | Logs folder useful for debugging if you have issues setting up the container |
|
||||
| | `:/tmp/api` | The [API endpoint](https://github.com/jokob-sk/NetAlertX/blob/main/docs/API.md) containing static (but regularly updated) json and other files. Path configurable via `NETALERTX_API` environment variable. |
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
# The NetAlertX Container Operator's Guide
|
||||
|
||||
> [!WARNING]
|
||||
> ⚠️ **Important:** The documentation has been recently updated and some instructions may have changed.
|
||||
> If you are using the currently live production image, please follow the instructions on [Docker Hub](https://hub.docker.com/r/jokobsk/netalertx) for building and running the container.
|
||||
> These docs reflect the latest development version and may differ from the production image.
|
||||
> ⚠️ **Important:** The docker-compose has recently changed. Carefully read the [Migration guide](https://jokob-sk.github.io/NetAlertX/MIGRATION/?h=migrat#12-migration-from-netalertx-v25524) for detailed instructions.
|
||||
|
||||
This guide assumes you are starting with the official `docker-compose.yml` file provided with the project. We strongly recommend you start with or migrate to this file as your baseline and modify it to suit your specific needs (e.g., changing file paths). While there are many ways to configure NetAlertX, the default file is designed to meet the mandatory security baseline with layer-2 networking capabilities while operating securely and without startup warnings.
|
||||
|
||||
|
||||
@@ -78,7 +78,7 @@ In the **Environment variables** section of Portainer, add the following:
|
||||
>
|
||||
> `sudo chown -R 20211:20211 /local_data_dir`
|
||||
>
|
||||
> `sudo chmod -R a+rwx /local_data_dir1`
|
||||
> `sudo chmod -R a+rwx /local_data_dir`
|
||||
>
|
||||
|
||||
|
||||
|
||||
@@ -46,8 +46,7 @@ NetAlertX requires certain paths to be writable at runtime. These paths should b
|
||||
|
||||
```bash
|
||||
docker run -it --rm --name netalertx --user "0" \
|
||||
-v /local_data_dir/config:/data/config \
|
||||
-v /local_data_dir/db:/data/db \
|
||||
-v /local_data_dir:/data \
|
||||
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
||||
ghcr.io/jokob-sk/netalertx:latest
|
||||
```
|
||||
@@ -63,7 +62,7 @@ docker run -it --rm --name netalertx --user "0" \
|
||||
>
|
||||
> `sudo chown -R 20211:20211 /local_data_dir`
|
||||
>
|
||||
> `sudo chmod -R a+rwx /local_data_dir1`
|
||||
> `sudo chmod -R a+rwx /local_data_dir`
|
||||
>
|
||||
|
||||
---
|
||||
@@ -84,8 +83,7 @@ services:
|
||||
- NET_BIND_SERVICE # Required to bind to privileged ports (nbtscan)
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- /local_data_dir/config:/data/config
|
||||
- /local_data_dir/db:/data/db
|
||||
- /local_data_dir:/data
|
||||
- /etc/localtime:/etc/localtime
|
||||
environment:
|
||||
- PORT=20211
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# NetAlertX Community Helper Scripts Overview
|
||||
# Community Helper Scripts Overview
|
||||
|
||||
This page provides an overview of community-contributed scripts for NetAlertX. These scripts are not actively maintained and are provided as-is.
|
||||
|
||||
|
||||
@@ -13,8 +13,7 @@ To download and install NetAlertX on the hardware/server directly use the `curl`
|
||||
> Data loss is a possibility, **it is recommended to install NetAlertX using the supplied Docker image**.
|
||||
|
||||
> [!WARNING]
|
||||
> A warning to the installation method below: Piping to bash is [controversial](https://pi-hole.net/2016/07/25/curling-and-piping-to-bash) and may
|
||||
be dangerous, as you cannot see the code that's about to be executed on your system.
|
||||
> A warning to the installation method below: Piping to bash is [controversial](https://pi-hole.net/2016/07/25/curling-and-piping-to-bash) and may be dangerous, as you cannot see the code that's about to be executed on your system.
|
||||
|
||||
If you trust this repo, you can download the install script via one of the methods (curl/wget) below and it will fo its best to install NetAlertX on your system.
|
||||
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
# Migration
|
||||
|
||||
> [!WARNING]
|
||||
> ⚠️ **Important:** The documentation has been recently updated and some instructions may have changed.
|
||||
> If you are using the currently live production image, please follow the instructions on [Docker Hub](https://hub.docker.com/r/jokobsk/netalertx) for building and running the container.
|
||||
> These docs reflect the latest development version and may differ from the production image.
|
||||
|
||||
|
||||
When upgrading from older versions of NetAlertX (or PiAlert by jokob-sk), follow the migration steps below to ensure your data and configuration are properly transferred.
|
||||
|
||||
> [!TIP]
|
||||
@@ -218,7 +212,7 @@ services:
|
||||
|
||||
### 1.3 Migration from NetAlertX `v25.10.1`
|
||||
|
||||
Starting from v25.10.1, the container uses a [more secure, read-only runtime environment](./SECURITY_FEATURES.md), which requires all writable paths (e.g., logs, API cache, temporary data) to be mounted as `tmpfs` or permanent writable volumes, with sufficient access [permissions](./FILE_PERMISSIONS.md).
|
||||
Starting from v25.10.1, the container uses a [more secure, read-only runtime environment](./SECURITY_FEATURES.md), which requires all writable paths (e.g., logs, API cache, temporary data) to be mounted as `tmpfs` or permanent writable volumes, with sufficient access [permissions](./FILE_PERMISSIONS.md). The data location has also hanged from `/app/db` and `/app/config` to `/data/db` and `/data/config`. See detailed steps below.
|
||||
|
||||
#### STEPS:
|
||||
|
||||
@@ -234,8 +228,8 @@ services:
|
||||
network_mode: "host"
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- /local_data_dir/config:/data/config
|
||||
- /local_data_dir/db:/data/db
|
||||
- /local_data_dir/config:/app/config
|
||||
- /local_data_dir/db:/app/db
|
||||
# (optional) useful for debugging if you have issues setting up the container
|
||||
- /local_data_dir/logs:/tmp/log
|
||||
environment:
|
||||
@@ -245,30 +239,7 @@ services:
|
||||
|
||||
4. Start the container and verify everything works as expected.
|
||||
5. Stop the container.
|
||||
6. Perform a one-off migration to the latest `netalertx` image and `20211` user:
|
||||
|
||||
> [!NOTE]
|
||||
> The example below assumes your `/config` and `/db` folders are stored in `local_data_dir`.
|
||||
> Replace this path with your actual configuration directory. `netalertx` is the container name, which might differ from your setup.
|
||||
|
||||
```sh
|
||||
docker run -it --rm --name netalertx --user "0" \
|
||||
-v /local_data_dir/config:/data/config \
|
||||
-v /local_data_dir/db:/data/db \
|
||||
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
||||
ghcr.io/jokob-sk/netalertx:latest
|
||||
```
|
||||
|
||||
..or alternatively execute:
|
||||
|
||||
```bash
|
||||
sudo chown -R 20211:20211 /local_data_dir/config
|
||||
sudo chown -R 20211:20211 /local_data_dir/db
|
||||
sudo chmod -R a+rwx /local_data_dir/
|
||||
```
|
||||
|
||||
7. Stop the container
|
||||
8. Update the `docker-compose.yml` as per example below.
|
||||
6. Update the `docker-compose.yml` as per example below.
|
||||
|
||||
```yaml
|
||||
services:
|
||||
@@ -284,10 +255,7 @@ services:
|
||||
- NET_BIND_SERVICE # 🆕 New line
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- /local_data_dir/config:/data/config
|
||||
- /local_data_dir/db:/data/db
|
||||
# (optional) useful for debugging if you have issues setting up the container
|
||||
#- /local_data_dir/logs:/tmp/log
|
||||
- /local_data_dir:/data # 🆕 This folder contains your /db and /config directories and the parent changed from /app to /data
|
||||
# Ensuring the timezone is the same as on the server - make sure also the TIMEZONE setting is configured
|
||||
- /etc/localtime:/etc/localtime:ro # 🆕 New line
|
||||
environment:
|
||||
@@ -298,5 +266,33 @@ services:
|
||||
- "/tmp:uid=20211,gid=20211,mode=1700,rw,noexec,nosuid,nodev,async,noatime,nodiratime"
|
||||
# 🆕 New "tmpfs" section END 🔼
|
||||
```
|
||||
7. Perform a one-off migration to the latest `netalertx` image and `20211` user.
|
||||
|
||||
9. Start the container and verify everything works as expected.
|
||||
> [!NOTE]
|
||||
> The examples below assumes your `/config` and `/db` folders are stored in `local_data_dir`.
|
||||
> Replace this path with your actual configuration directory. `netalertx` is the container name, which might differ from your setup.
|
||||
|
||||
**Automated approach**:
|
||||
|
||||
Run the container with the `--user "0"` parameter. Please note, some systems will require the manual approach below.
|
||||
|
||||
```sh
|
||||
docker run -it --rm --name netalertx --user "0" \
|
||||
-v /local_data_dir/config:/data/config \
|
||||
-v /local_data_dir/db:/data/db \
|
||||
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
||||
ghcr.io/jokob-sk/netalertx:latest
|
||||
```
|
||||
|
||||
Stop the container and run it as you would normally.
|
||||
|
||||
**Manual approach**:
|
||||
|
||||
Use the manual approach if the Automated approach fails. Execute the below commands:
|
||||
|
||||
```bash
|
||||
sudo chown -R 20211:20211 /local_data_dir
|
||||
sudo chmod -R a+rwx /local_data_dir
|
||||
```
|
||||
|
||||
8. Start the container and verify everything works as expected.
|
||||
@@ -63,7 +63,6 @@ There is also an in-app Help / FAQ section that should be answering frequently a
|
||||
|
||||
#### ♻ Misc
|
||||
|
||||
- [Version history (legacy)](./VERSIONS_HISTORY.md)
|
||||
- [Reverse proxy (Nginx, Apache, SWAG)](./REVERSE_PROXY.md)
|
||||
- [Installing Updates](./UPDATES.md)
|
||||
- [Setting up Authelia](./AUTHELIA.md) (DRAFT)
|
||||
|
||||
@@ -1,62 +1,64 @@
|
||||
# Sessions Section in Device View
|
||||
# Sessions Section – Device View
|
||||
|
||||
The **Sessions Section** provides details about a device's connection history. This data is automatically detected and cannot be edited by the user.
|
||||
The **Sessions Section** shows a device’s connection history. All data is automatically detected and **cannot be edited**.
|
||||
|
||||

|
||||

|
||||
|
||||
---
|
||||
|
||||
## Key Fields
|
||||
|
||||
1. **Date and Time of First Connection**
|
||||
- **Description:** Displays the first detected connection time for the device.
|
||||
- **Editability:** Uneditable (auto-detected).
|
||||
- **Source:** Automatically captured when the device is first added to the system.
|
||||
|
||||
2. **Date and Time of Last Connection**
|
||||
- **Description:** Shows the most recent time the device was online.
|
||||
- **Editability:** Uneditable (auto-detected).
|
||||
- **Source:** Updated with every new connection event.
|
||||
|
||||
3. **Offline Devices with Missing or Conflicting Data**
|
||||
- **Description:** Handles cases where a device is offline but has incomplete or conflicting session data (e.g., missing start times).
|
||||
- **Handling:** The system flags these cases for review and attempts to infer missing details.
|
||||
| Field | Description | Editable? |
|
||||
| ------------------------------ | ------------------------------------------------------------------------------------------------ | --------------- |
|
||||
| **First Connection** | The first time the device was detected on the network. | ❌ Auto-detected |
|
||||
| **Last Connection** | The most recent time the device was online. | ❌ Auto-detected |
|
||||
|
||||
---
|
||||
|
||||
## How Sessions are Discovered and Calculated
|
||||
## How Session Information Works
|
||||
|
||||
### 1. Detecting New Devices
|
||||
When a device is first detected in the network, the system logs it in the events table:
|
||||
|
||||
`INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, eve_EventType, eve_AdditionalInfo, eve_PendingAlertEmail) SELECT cur_MAC, cur_IP, '{startTime}', 'New Device', cur_Vendor, 1 FROM CurrentScan WHERE NOT EXISTS (SELECT 1 FROM Devices WHERE devMac = cur_MAC)`
|
||||
* New devices are automatically detected when they first appear on the network.
|
||||
* A **New Device** record is created, capturing the MAC, IP, vendor, and detection time.
|
||||
|
||||
- Devices scanned in the current cycle (**CurrentScan**) are checked against the **Devices** table.
|
||||
- If a device is new:
|
||||
- A **New Device** event is logged.
|
||||
- The device’s MAC, IP, vendor, and detection time are recorded.
|
||||
### 2. Recording Connection Sessions
|
||||
|
||||
### 2. Logging Connection Sessions
|
||||
When a new connection is detected, the system creates a session record:
|
||||
* Every time a device connects, a session entry is created.
|
||||
* Captured details include:
|
||||
|
||||
`INSERT INTO Sessions (ses_MAC, ses_IP, ses_EventTypeConnection, ses_DateTimeConnection, ses_EventTypeDisconnection, ses_DateTimeDisconnection, ses_StillConnected, ses_AdditionalInfo) SELECT cur_MAC, cur_IP, 'Connected', '{startTime}', NULL, NULL, 1, cur_Vendor FROM CurrentScan WHERE NOT EXISTS (SELECT 1 FROM Sessions WHERE ses_MAC = cur_MAC)`
|
||||
|
||||
- A new session is logged in the **Sessions** table if no prior session exists.
|
||||
- Fields like `MAC`, `IP`, `Connection Type`, and `Connection Time` are populated.
|
||||
- The `Still Connected` flag is set to `1` (active connection).
|
||||
* Connection type (wired or wireless)
|
||||
* Connection time
|
||||
* Device details (MAC, IP, vendor)
|
||||
|
||||
### 3. Handling Missing or Conflicting Data
|
||||
- Devices with incomplete or conflicting session data (e.g., missing start times) are detected.
|
||||
- The system flags these records and attempts corrections by inferring details from available data.
|
||||
|
||||
* **Triggers:**
|
||||
Devices are flagged when session data is incomplete, inconsistent, or conflicting. Examples include:
|
||||
|
||||
* Missing first or last connection timestamps
|
||||
* Overlapping session records
|
||||
* Sessions showing a device as connected and disconnected at the same time
|
||||
|
||||
* **System response:**
|
||||
|
||||
* Automatically highlights affected devices in the **Sessions Section**.
|
||||
* Attempts to **infer missing information** from available data, such as:
|
||||
|
||||
* Estimating first or last connection times from nearby session events
|
||||
* Correcting overlapping session periods
|
||||
* Reconciling conflicting connection statuses
|
||||
|
||||
* **User impact:**
|
||||
|
||||
* Users do **not** need to manually fix session data.
|
||||
* The system ensures the device’s connection history remains as accurate as possible for monitoring and reporting.
|
||||
|
||||
### 4. Updating Sessions
|
||||
- When a device reconnects, its session is updated with a new connection timestamp.
|
||||
- When a device disconnects:
|
||||
- The **Disconnection Time** is recorded.
|
||||
- The `Still Connected` flag is set to `0`.
|
||||
|
||||
The session information is then used to display the device presence under **Monitoring** -> **Presence**.
|
||||
* **Reconnect:** Updates session with the new connection timestamp.
|
||||
* **Disconnect:** Records disconnection time and marks the device as offline.
|
||||
|
||||
This session information feeds directly into **Monitoring → Presence**, providing a live view of which devices are currently online.
|
||||
|
||||

|
||||
|
||||
|
||||
|
||||
@@ -47,8 +47,7 @@ services:
|
||||
- NET_ADMIN
|
||||
- NET_BIND_SERVICE
|
||||
volumes:
|
||||
- /app_storage/netalertx/config:/data/config
|
||||
- /app_storage/netalertx/db:/data/db
|
||||
- /app_storage/netalertx:/data
|
||||
# to sync with system time
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
tmpfs:
|
||||
@@ -66,10 +65,7 @@ services:
|
||||
|
||||
```yaml
|
||||
volumes:
|
||||
- /volume1/app_storage/netalertx/config:/data/config
|
||||
- /volume1/app_storage/netalertx/db:/data/db
|
||||
# (optional) useful for debugging if you have issues setting up the container
|
||||
# - local/path/logs:/tmp/log <- commented out with # ⚠
|
||||
- /volume1/app_storage/netalertx:/data
|
||||
```
|
||||
|
||||

|
||||
@@ -88,5 +84,5 @@ services:
|
||||
>
|
||||
> `sudo chown -R 20211:20211 /local_data_dir`
|
||||
>
|
||||
> `sudo chmod -R a+rwx /local_data_dir1`
|
||||
> `sudo chmod -R a+rwx /local_data_dir`
|
||||
>
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
> [!WARNING]
|
||||
> For versions prior to `v25.6.7` upgrade to version `v25.5.24` first (`docker pull ghcr.io/jokob-sk/netalertx:25.5.24`) as later versions don't support a full upgrade. Alternatively, devices and settings can be migrated manually, e.g. via [CSV import](./DEVICES_BULK_EDITING.md).
|
||||
> See the [Migration guide](./MIGRATION.md) for details.
|
||||
|
||||
This guide outlines approaches for updating Docker containers, usually when upgrading to a newer version of NetAlertX. Each method offers different benefits depending on the situation. Here are the methods:
|
||||
|
||||
|
||||
@@ -70,6 +70,11 @@ a[target="_blank"] {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
[data-is-valid="0"] {
|
||||
/* border: 1px solid red; */
|
||||
background-color: #ff4b4b !important;
|
||||
}
|
||||
|
||||
/* -----------------------------------------------------------------------------
|
||||
Helper Classes
|
||||
----------------------------------------------------------------------------- */
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<!--
|
||||
#---------------------------------------------------------------------------------#
|
||||
# NetAlertX #
|
||||
# NetAlertX #
|
||||
# Open Source Network Guard / WIFI & LAN intrusion detector #
|
||||
# #
|
||||
# devices.php - Front module. Devices list page #
|
||||
@@ -136,7 +136,7 @@
|
||||
<!-- page script ----------------------------------------------------------- -->
|
||||
<script>
|
||||
var deviceStatus = 'all';
|
||||
var tableRows = getCache ("nax_parTableRows") == "" ? parseInt(getSetting("UI_DEFAULT_PAGE_SIZE")) : getCache ("nax_parTableRows") ;
|
||||
|
||||
var tableOrder = getCache ("nax_parTableOrder") == "" ? [[3,'desc'], [0,'asc']] : JSON.parse(getCache ("nax_parTableOrder")) ;
|
||||
|
||||
var tableColumnHide = [];
|
||||
@@ -563,6 +563,9 @@ function initializeDatatable (status) {
|
||||
status = 'my_devices'
|
||||
}
|
||||
|
||||
// retrieve page size
|
||||
var tableRows = getCache ("nax_parTableRows") == "" ? parseInt(getSetting("UI_DEFAULT_PAGE_SIZE")) : getCache ("nax_parTableRows") ;
|
||||
|
||||
// Save status selected
|
||||
deviceStatus = status;
|
||||
|
||||
|
||||
@@ -378,7 +378,7 @@ function localizeTimestamp(input) {
|
||||
let tz = getSetting("TIMEZONE") || 'Europe/Berlin';
|
||||
input = String(input || '').trim();
|
||||
|
||||
// ✅ 1. Unix timestamps (10 or 13 digits)
|
||||
// 1. Unix timestamps (10 or 13 digits)
|
||||
if (/^\d+$/.test(input)) {
|
||||
const ms = input.length === 10 ? parseInt(input, 10) * 1000 : parseInt(input, 10);
|
||||
return new Intl.DateTimeFormat('default', {
|
||||
@@ -389,39 +389,59 @@ function localizeTimestamp(input) {
|
||||
}).format(new Date(ms));
|
||||
}
|
||||
|
||||
// ✅ 2. European DD/MM/YYYY
|
||||
let match = input.match(/^(\d{1,2})\/(\d{1,2})\/(\d{4})(?:[ ,]+(\d{1,2}:\d{2}(?::\d{2})?))?(.*)$/);
|
||||
// 2. European DD/MM/YYYY
|
||||
let match = input.match(/^(\d{1,2})\/(\d{1,2})\/(\d{4})(?:[ ,]+(\d{1,2}:\d{2}(?::\d{2})?))?$/);
|
||||
if (match) {
|
||||
let [ , d, m, y, t = "00:00:00", tzPart = "" ] = match;
|
||||
const iso = `${y}-${m.padStart(2,'0')}-${d.padStart(2,'0')}T${t.length===5?t+":00":t}${tzPart}`;
|
||||
return formatSafe(iso, tz);
|
||||
let [, d, m, y, t = "00:00:00", tzPart = ""] = match;
|
||||
const dNum = parseInt(d, 10);
|
||||
const mNum = parseInt(m, 10);
|
||||
|
||||
if (dNum <= 12 && mNum > 12) {
|
||||
} else {
|
||||
const iso = `${y}-${m.padStart(2,'0')}-${d.padStart(2,'0')}T${t.length===5 ? t + ":00" : t}${tzPart}`;
|
||||
return formatSafe(iso, tz);
|
||||
}
|
||||
}
|
||||
|
||||
// ✅ 3. US MM/DD/YYYY
|
||||
// 3. US MM/DD/YYYY
|
||||
match = input.match(/^(\d{1,2})\/(\d{1,2})\/(\d{4})(?:[ ,]+(\d{1,2}:\d{2}(?::\d{2})?))?(.*)$/);
|
||||
if (match) {
|
||||
let [ , m, d, y, t = "00:00:00", tzPart = "" ] = match;
|
||||
let [, m, d, y, t = "00:00:00", tzPart = ""] = match;
|
||||
const iso = `${y}-${m.padStart(2,'0')}-${d.padStart(2,'0')}T${t.length===5?t+":00":t}${tzPart}`;
|
||||
return formatSafe(iso, tz);
|
||||
}
|
||||
|
||||
// ✅ 4. ISO-style (with T, Z, offsets)
|
||||
match = input.match(/^(\d{4}-\d{1,2}-\d{1,2})[ T](\d{1,2}:\d{2}(?::\d{2})?)(Z|[+-]\d{2}:?\d{2})?$/);
|
||||
// 4. ISO YYYY-MM-DD with optional Z/+offset
|
||||
match = input.match(/^(\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01])[ T](\d{1,2}:\d{2}(?::\d{2})?)(Z|[+-]\d{2}:?\d{2})?$/);
|
||||
if (match) {
|
||||
let [ , ymd, time, offset = "" ] = match;
|
||||
// normalize to YYYY-MM-DD
|
||||
let [y, m, d] = ymd.split('-').map(x => x.padStart(2,'0'));
|
||||
let [, y, m, d, time, offset = ""] = match;
|
||||
const iso = `${y}-${m}-${d}T${time.length===5?time+":00":time}${offset}`;
|
||||
return formatSafe(iso, tz);
|
||||
}
|
||||
|
||||
// ✅ 5. RFC2822 / "25 Aug 2025 13:45:22 +0200"
|
||||
// 5. RFC2822 / "25 Aug 2025 13:45:22 +0200"
|
||||
match = input.match(/^\d{1,2} [A-Za-z]{3,} \d{4}/);
|
||||
if (match) {
|
||||
return formatSafe(input, tz);
|
||||
}
|
||||
|
||||
// ✅ 6. Fallback (whatever Date() can parse)
|
||||
// 6. DD-MM-YYYY with optional time
|
||||
match = input.match(/^(\d{1,2})-(\d{1,2})-(\d{4})(?:[ T](\d{1,2}:\d{2}(?::\d{2})?))?$/);
|
||||
if (match) {
|
||||
let [, d, m, y, time = "00:00:00"] = match;
|
||||
const iso = `${y}-${m.padStart(2,'0')}-${d.padStart(2,'0')}T${time.length===5?time+":00":time}`;
|
||||
return formatSafe(iso, tz);
|
||||
}
|
||||
|
||||
// 7. Strict YYYY-DD-MM with optional time
|
||||
match = input.match(/^(\d{4})-(0[1-9]|[12]\d|3[01])-(0[1-9]|1[0-2])(?:[ T](\d{1,2}:\d{2}(?::\d{2})?))?$/);
|
||||
if (match) {
|
||||
let [, y, d, m, time = "00:00:00"] = match;
|
||||
const iso = `${y}-${m}-${d}T${time.length === 5 ? time + ":00" : time}`;
|
||||
return formatSafe(iso, tz);
|
||||
}
|
||||
|
||||
// 8. Fallback
|
||||
return formatSafe(input, tz);
|
||||
|
||||
function formatSafe(str, tz) {
|
||||
@@ -440,6 +460,7 @@ function localizeTimestamp(input) {
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ----------------------------------------------------
|
||||
/**
|
||||
* Replaces double quotes within single-quoted strings, then converts all single quotes to double quotes,
|
||||
@@ -1622,7 +1643,6 @@ async function executeOnce() {
|
||||
|
||||
if (!isAppInitialized()) {
|
||||
try {
|
||||
console.log("HERE");
|
||||
|
||||
await waitForGraphQLServer(); // Wait for the server to start
|
||||
|
||||
@@ -1630,7 +1650,7 @@ async function executeOnce() {
|
||||
await cacheSettings();
|
||||
await cacheStrings();
|
||||
|
||||
console.log("✅ All AJAX callbacks have completed");
|
||||
console.log("All AJAX callbacks have completed");
|
||||
onAllCallsComplete();
|
||||
} catch (error) {
|
||||
console.error("Error:", error);
|
||||
|
||||
@@ -247,12 +247,19 @@ function showModalPopupForm(
|
||||
let settingsArray = [];
|
||||
if (Array.isArray(popupFormJson)) {
|
||||
popupFormJson.forEach(field => {
|
||||
collectSetting(
|
||||
const result = collectSetting(
|
||||
`${parentSettingKey}_popupform`, // prefix
|
||||
field.function, // setCodeName
|
||||
field.type, // setType (object)
|
||||
settingsArray
|
||||
);
|
||||
settingsArray = result.settingsArray;
|
||||
|
||||
if (!result.dataIsValid) {
|
||||
msg = getString("Gen_Invalid_Value") + ":" + result.failedSettingKey;
|
||||
console.error(msg);
|
||||
showModalOk("ERROR", msg);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -779,6 +779,7 @@ const handleElementOptions = (setKey, elementOptions, transformers, val) => {
|
||||
let getStringKey = "";
|
||||
let onClick = "console.log('onClick - Not implemented');";
|
||||
let onChange = "console.log('onChange - Not implemented');";
|
||||
let focusout = "console.log('focusout - Not implemented');";
|
||||
let customParams = "";
|
||||
let customId = "";
|
||||
let columns = [];
|
||||
@@ -830,6 +831,9 @@ const handleElementOptions = (setKey, elementOptions, transformers, val) => {
|
||||
if (option.onChange) {
|
||||
onChange = option.onChange;
|
||||
}
|
||||
if (option.focusout) {
|
||||
focusout = option.focusout;
|
||||
}
|
||||
if (option.customParams) {
|
||||
customParams = option.customParams;
|
||||
}
|
||||
@@ -867,7 +871,8 @@ const handleElementOptions = (setKey, elementOptions, transformers, val) => {
|
||||
customId,
|
||||
columns,
|
||||
base64Regex,
|
||||
elementOptionsBase64
|
||||
elementOptionsBase64,
|
||||
focusout
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1015,6 +1020,20 @@ function collectSetting(prefix, setCodeName, setType, settingsArray) {
|
||||
|
||||
const { elementType, elementOptions = [], transformers = [] } = elementWithInputValue;
|
||||
|
||||
// Check if validation failed
|
||||
if (
|
||||
$(`#${setCodeName}`)
|
||||
&& $(`#${setCodeName}`).attr("data-is-valid")
|
||||
&& $(`#${setCodeName}`).attr("data-is-valid") == 0
|
||||
)
|
||||
{
|
||||
return {
|
||||
"settingsArray": settingsArray,
|
||||
"dataIsValid": false,
|
||||
"failedSettingKey": setCodeName
|
||||
};
|
||||
}
|
||||
|
||||
const opts = handleElementOptions('none', elementOptions, transformers, val = "");
|
||||
|
||||
// Map of handlers
|
||||
@@ -1084,7 +1103,11 @@ function collectSetting(prefix, setCodeName, setType, settingsArray) {
|
||||
const value = handlers[handlerKey]();
|
||||
settingsArray.push([prefix, setCodeName, dataType, value]);
|
||||
|
||||
return settingsArray;
|
||||
return {
|
||||
"settingsArray": settingsArray,
|
||||
"dataIsValid": true,
|
||||
"failedSettingKey": ""
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1137,7 +1160,8 @@ function generateFormHtml(settingsData, set, overrideValue, overrideOptions, ori
|
||||
customId,
|
||||
columns,
|
||||
base64Regex,
|
||||
elementOptionsBase64
|
||||
elementOptionsBase64,
|
||||
focusout
|
||||
} = handleElementOptions(setKey, elementOptions, transformers, inVal);
|
||||
|
||||
// Override value
|
||||
@@ -1160,6 +1184,7 @@ function generateFormHtml(settingsData, set, overrideValue, overrideOptions, ori
|
||||
const addCss = isOrdeable ? "select2 select2-hidden-accessible" : "";
|
||||
|
||||
inputHtml += `<select onChange="settingsChanged();${onChange}"
|
||||
onfocusout="${focusout}"
|
||||
my-data-type="${dataType}"
|
||||
my-editable="${editable}"
|
||||
class="form-control ${addCss} ${cssClasses}"
|
||||
@@ -1185,6 +1210,7 @@ function generateFormHtml(settingsData, set, overrideValue, overrideOptions, ori
|
||||
inputHtml += `<input
|
||||
class="${inputClass} ${cssClasses}"
|
||||
onChange="settingsChanged();${onChange}"
|
||||
onfocusout="${focusout}"
|
||||
my-data-type="${dataType}"
|
||||
my-customparams="${customParams}"
|
||||
my-customid="${customId}"
|
||||
@@ -1216,6 +1242,8 @@ function generateFormHtml(settingsData, set, overrideValue, overrideOptions, ori
|
||||
case 'textarea':
|
||||
inputHtml += `<textarea
|
||||
class="form-control input"
|
||||
onChange="settingsChanged();${onChange}"
|
||||
onfocusout="${focusout}"
|
||||
my-customparams="${customParams}"
|
||||
my-customid="${customId}"
|
||||
my-originalSetKey="${originalSetKey}"
|
||||
|
||||
@@ -140,8 +140,11 @@ function validateRegex(elem) {
|
||||
// Validate against regex
|
||||
if (regex.test(value)) {
|
||||
iconSpan.html("<i class='fa fa-check'></i>");
|
||||
inputElem.attr("data-is-valid", "1");
|
||||
} else {
|
||||
iconSpan.html("<i class='fa fa-xmark'></i>");
|
||||
showModalOk('WARNING', getString("Gen_Invalid_Value"));
|
||||
inputElem.attr("data-is-valid", "0");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -137,7 +137,8 @@
|
||||
customId,
|
||||
columns,
|
||||
base64Regex,
|
||||
elementOptionsBase64
|
||||
elementOptionsBase64,
|
||||
focusout
|
||||
} = handleElementOptions('none', elementOptions, transformers, val = "");
|
||||
|
||||
// render based on element type
|
||||
|
||||
@@ -521,13 +521,17 @@ function getChildren(node, list, path, visited = [])
|
||||
|
||||
// Loop through all items to find children of the current node
|
||||
for (var i in list) {
|
||||
if (list[i].devParentMAC.toLowerCase() == node.devMac.toLowerCase() && !hiddenMacs.includes(list[i].devParentMAC)) {
|
||||
const item = list[i];
|
||||
const parentMac = item.devParentMAC || ""; // null-safe
|
||||
const nodeMac = node.devMac || ""; // null-safe
|
||||
|
||||
visibleNodesCount++;
|
||||
if (parentMac != "" && parentMac.toLowerCase() == nodeMac.toLowerCase() && !hiddenMacs.includes(parentMac)) {
|
||||
|
||||
// Process children recursively, passing a copy of the visited list
|
||||
children.push(getChildren(list[i], list, path + ((path == "") ? "" : '|') + list[i].devParentMAC, visited));
|
||||
}
|
||||
visibleNodesCount++;
|
||||
|
||||
// Process children recursively, passing a copy of the visited list
|
||||
children.push(getChildren(list[i], list, path + ((path == "") ? "" : '|') + parentMac, visited));
|
||||
}
|
||||
}
|
||||
|
||||
// Track leaf and parent node counts
|
||||
@@ -565,14 +569,27 @@ function getChildren(node, list, path, visited = [])
|
||||
// ---------------------------------------------------------------------------
|
||||
function getHierarchy()
|
||||
{
|
||||
let internetNode = null;
|
||||
|
||||
for(i in deviceListGlobal)
|
||||
{
|
||||
if(deviceListGlobal[i].devMac == 'Internet')
|
||||
{
|
||||
return (getChildren(deviceListGlobal[i], deviceListGlobal, ''))
|
||||
internetNode = deviceListGlobal[i];
|
||||
|
||||
return (getChildren(internetNode, deviceListGlobal, ''))
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!internetNode) {
|
||||
showModalOk(
|
||||
getString('Network_Configuration_Error'),
|
||||
getString('Network_Root_Not_Configured')
|
||||
);
|
||||
console.error("getHierarchy(): Internet node not found");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
//---------------------------------------------------------------------------
|
||||
@@ -671,8 +688,6 @@ function handleNodeClick(el)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
var myTree;
|
||||
|
||||
|
||||
var emSize;
|
||||
var nodeHeight;
|
||||
// var sizeCoefficient = 1.4
|
||||
@@ -689,140 +704,139 @@ function emToPx(em, element) {
|
||||
|
||||
function initTree(myHierarchy)
|
||||
{
|
||||
// calculate the drawing area based on teh tree width and available screen size
|
||||
|
||||
let baseFontSize = parseFloat($('html').css('font-size'));
|
||||
let treeAreaHeight = ($(window).height() - 155); ;
|
||||
// calculate the font size of the leaf nodes to fit everything into the tree area
|
||||
leafNodesCount == 0 ? 1 : leafNodesCount;
|
||||
|
||||
emSize = pxToEm((treeAreaHeight/(leafNodesCount)).toFixed(2));
|
||||
|
||||
let screenWidthEm = pxToEm($('.networkTable').width()-15);
|
||||
|
||||
// init the drawing area size
|
||||
$("#networkTree").attr('style', `height:${treeAreaHeight}px; width:${emToPx(screenWidthEm)}px`)
|
||||
|
||||
if(myHierarchy.type == "")
|
||||
if(myHierarchy && myHierarchy.type !== "")
|
||||
{
|
||||
showModalOk(getString('Network_Configuration_Error'), getString('Network_Root_Not_Configured'))
|
||||
// calculate the drawing area based on the tree width and available screen size
|
||||
let baseFontSize = parseFloat($('html').css('font-size'));
|
||||
let treeAreaHeight = ($(window).height() - 155); ;
|
||||
|
||||
return;
|
||||
}
|
||||
// calculate the font size of the leaf nodes to fit everything into the tree area
|
||||
leafNodesCount == 0 ? 1 : leafNodesCount;
|
||||
|
||||
// handle canvas and node size if only a few nodes
|
||||
emSize > 1 ? emSize = 1 : emSize = emSize;
|
||||
emSize = pxToEm((treeAreaHeight/(leafNodesCount)).toFixed(2));
|
||||
|
||||
let nodeHeightPx = emToPx(emSize*1);
|
||||
let nodeWidthPx = emToPx(screenWidthEm / (parentNodesCount));
|
||||
let screenWidthEm = pxToEm($('.networkTable').width()-15);
|
||||
|
||||
// handle if only a few nodes
|
||||
nodeWidthPx > 160 ? nodeWidthPx = 160 : nodeWidthPx = nodeWidthPx;
|
||||
// init the drawing area size
|
||||
$("#networkTree").attr('style', `height:${treeAreaHeight}px; width:${emToPx(screenWidthEm)}px`)
|
||||
|
||||
console.log(Treeviz);
|
||||
// handle canvas and node size if only a few nodes
|
||||
emSize > 1 ? emSize = 1 : emSize = emSize;
|
||||
|
||||
myTree = Treeviz.create({
|
||||
htmlId: "networkTree",
|
||||
renderNode: nodeData => {
|
||||
let nodeHeightPx = emToPx(emSize*1);
|
||||
let nodeWidthPx = emToPx(screenWidthEm / (parentNodesCount));
|
||||
|
||||
(!emptyArr.includes(nodeData.data.port )) ? port = nodeData.data.port : port = "";
|
||||
// handle if only a few nodes
|
||||
nodeWidthPx > 160 ? nodeWidthPx = 160 : nodeWidthPx = nodeWidthPx;
|
||||
|
||||
(port == "" || port == 0 || port == 'None' ) ? portBckgIcon = `<i class="fa fa-wifi"></i>` : portBckgIcon = `<i class="fa fa-ethernet"></i>`;
|
||||
console.log(Treeviz);
|
||||
|
||||
portHtml = (port == "" || port == 0 || port == 'None' ) ? "   " : port;
|
||||
myTree = Treeviz.create({
|
||||
htmlId: "networkTree",
|
||||
renderNode: nodeData => {
|
||||
|
||||
// Build HTML for individual nodes in the network diagram
|
||||
deviceIcon = (!emptyArr.includes(nodeData.data.icon )) ?
|
||||
`<div class="netIcon">
|
||||
${atob(nodeData.data.icon)}
|
||||
</div>` : "";
|
||||
devicePort = `<div class="netPort"
|
||||
style="width:${emSize}em;height:${emSize}em">
|
||||
${portHtml}</div>
|
||||
<div class="portBckgIcon"
|
||||
style="margin-left:-${emSize*0.7}em;">
|
||||
${portBckgIcon}
|
||||
</div>`;
|
||||
collapseExpandIcon = nodeData.data.hiddenChildren ?
|
||||
"square-plus" : "square-minus";
|
||||
(!emptyArr.includes(nodeData.data.port )) ? port = nodeData.data.port : port = "";
|
||||
|
||||
// generate +/- icon if node has children nodes
|
||||
collapseExpandHtml = nodeData.data.hasChildren ?
|
||||
`<div class="netCollapse"
|
||||
style="font-size:${nodeHeightPx/2}px;top:${Math.floor(nodeHeightPx / 4)}px"
|
||||
data-mytreepath="${nodeData.data.path}"
|
||||
data-mytreemac="${nodeData.data.mac}">
|
||||
<i class="fa fa-${collapseExpandIcon} pointer"></i>
|
||||
</div>` : "";
|
||||
(port == "" || port == 0 || port == 'None' ) ? portBckgIcon = `<i class="fa fa-wifi"></i>` : portBckgIcon = `<i class="fa fa-ethernet"></i>`;
|
||||
|
||||
selectedNodeMac = $(".nav-tabs-custom .active a").attr('data-mytabmac')
|
||||
portHtml = (port == "" || port == 0 || port == 'None' ) ? "   " : port;
|
||||
|
||||
highlightedCss = nodeData.data.mac == selectedNodeMac ?
|
||||
" highlightedNode " : "";
|
||||
cssNodeType = nodeData.data.devIsNetworkNodeDynamic ?
|
||||
" node-network-device " : " node-standard-device ";
|
||||
// Build HTML for individual nodes in the network diagram
|
||||
deviceIcon = (!emptyArr.includes(nodeData.data.icon )) ?
|
||||
`<div class="netIcon">
|
||||
${atob(nodeData.data.icon)}
|
||||
</div>` : "";
|
||||
devicePort = `<div class="netPort"
|
||||
style="width:${emSize}em;height:${emSize}em">
|
||||
${portHtml}</div>
|
||||
<div class="portBckgIcon"
|
||||
style="margin-left:-${emSize*0.7}em;">
|
||||
${portBckgIcon}
|
||||
</div>`;
|
||||
collapseExpandIcon = nodeData.data.hiddenChildren ?
|
||||
"square-plus" : "square-minus";
|
||||
|
||||
networkHardwareIcon = nodeData.data.devIsNetworkNodeDynamic ? `<span class="network-hw-icon">
|
||||
<i class="fa-solid fa-hard-drive"></i>
|
||||
</span>` : "";
|
||||
// generate +/- icon if node has children nodes
|
||||
collapseExpandHtml = nodeData.data.hasChildren ?
|
||||
`<div class="netCollapse"
|
||||
style="font-size:${nodeHeightPx/2}px;top:${Math.floor(nodeHeightPx / 4)}px"
|
||||
data-mytreepath="${nodeData.data.path}"
|
||||
data-mytreemac="${nodeData.data.mac}">
|
||||
<i class="fa fa-${collapseExpandIcon} pointer"></i>
|
||||
</div>` : "";
|
||||
|
||||
const badgeConf = getStatusBadgeParts(nodeData.data.presentLastScan, nodeData.data.alertDown, nodeData.data.mac, statusText = '')
|
||||
selectedNodeMac = $(".nav-tabs-custom .active a").attr('data-mytabmac')
|
||||
|
||||
return result = `<div
|
||||
class="node-inner hover-node-info box pointer ${highlightedCss} ${cssNodeType}"
|
||||
style="height:${nodeHeightPx}px;font-size:${nodeHeightPx-5}px;"
|
||||
onclick="handleNodeClick(this)"
|
||||
data-mac="${nodeData.data.mac}"
|
||||
data-parentMac="${nodeData.data.parentMac}"
|
||||
data-name="${nodeData.data.name}"
|
||||
data-ip="${nodeData.data.ip}"
|
||||
data-mac="${nodeData.data.mac}"
|
||||
data-vendor="${nodeData.data.vendor}"
|
||||
data-type="${nodeData.data.type}"
|
||||
data-devIsNetworkNodeDynamic="${nodeData.data.devIsNetworkNodeDynamic}"
|
||||
data-lastseen="${nodeData.data.lastseen}"
|
||||
data-firstseen="${nodeData.data.firstseen}"
|
||||
data-relationship="${nodeData.data.relType}"
|
||||
data-status="${nodeData.data.status}"
|
||||
data-present="${nodeData.data.presentLastScan}"
|
||||
data-alert="${nodeData.data.alertDown}"
|
||||
data-icon="${nodeData.data.icon}"
|
||||
>
|
||||
<div class="netNodeText">
|
||||
<strong><span>${devicePort} <span class="${badgeConf.cssText}">${deviceIcon}</span></span>
|
||||
<span class="spanNetworkTree anonymizeDev" style="width:${nodeWidthPx-50}px">${nodeData.data.name}</span>
|
||||
${networkHardwareIcon}
|
||||
</strong>
|
||||
highlightedCss = nodeData.data.mac == selectedNodeMac ?
|
||||
" highlightedNode " : "";
|
||||
cssNodeType = nodeData.data.devIsNetworkNodeDynamic ?
|
||||
" node-network-device " : " node-standard-device ";
|
||||
|
||||
networkHardwareIcon = nodeData.data.devIsNetworkNodeDynamic ? `<span class="network-hw-icon">
|
||||
<i class="fa-solid fa-hard-drive"></i>
|
||||
</span>` : "";
|
||||
|
||||
const badgeConf = getStatusBadgeParts(nodeData.data.presentLastScan, nodeData.data.alertDown, nodeData.data.mac, statusText = '')
|
||||
|
||||
return result = `<div
|
||||
class="node-inner hover-node-info box pointer ${highlightedCss} ${cssNodeType}"
|
||||
style="height:${nodeHeightPx}px;font-size:${nodeHeightPx-5}px;"
|
||||
onclick="handleNodeClick(this)"
|
||||
data-mac="${nodeData.data.mac}"
|
||||
data-parentMac="${nodeData.data.parentMac}"
|
||||
data-name="${nodeData.data.name}"
|
||||
data-ip="${nodeData.data.ip}"
|
||||
data-mac="${nodeData.data.mac}"
|
||||
data-vendor="${nodeData.data.vendor}"
|
||||
data-type="${nodeData.data.type}"
|
||||
data-devIsNetworkNodeDynamic="${nodeData.data.devIsNetworkNodeDynamic}"
|
||||
data-lastseen="${nodeData.data.lastseen}"
|
||||
data-firstseen="${nodeData.data.firstseen}"
|
||||
data-relationship="${nodeData.data.relType}"
|
||||
data-status="${nodeData.data.status}"
|
||||
data-present="${nodeData.data.presentLastScan}"
|
||||
data-alert="${nodeData.data.alertDown}"
|
||||
data-icon="${nodeData.data.icon}"
|
||||
>
|
||||
<div class="netNodeText">
|
||||
<strong><span>${devicePort} <span class="${badgeConf.cssText}">${deviceIcon}</span></span>
|
||||
<span class="spanNetworkTree anonymizeDev" style="width:${nodeWidthPx-50}px">${nodeData.data.name}</span>
|
||||
${networkHardwareIcon}
|
||||
</strong>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
${collapseExpandHtml}`;
|
||||
},
|
||||
mainAxisNodeSpacing: 'auto',
|
||||
// secondaryAxisNodeSpacing: 0.3,
|
||||
nodeHeight: nodeHeightPx,
|
||||
nodeWidth: nodeWidthPx,
|
||||
marginTop: '5',
|
||||
isHorizontal : true,
|
||||
hasZoom: true,
|
||||
hasPan: true,
|
||||
marginLeft: '10',
|
||||
marginRight: '10',
|
||||
idKey: "mac",
|
||||
hasFlatData: false,
|
||||
relationnalField: "children",
|
||||
linkWidth: (nodeData) => 2,
|
||||
linkColor: (nodeData) => {
|
||||
relConf = getRelationshipConf(nodeData.data.relType)
|
||||
return relConf.color;
|
||||
}
|
||||
// onNodeClick: (nodeData) => handleNodeClick(nodeData),
|
||||
});
|
||||
${collapseExpandHtml}`;
|
||||
},
|
||||
mainAxisNodeSpacing: 'auto',
|
||||
// secondaryAxisNodeSpacing: 0.3,
|
||||
nodeHeight: nodeHeightPx,
|
||||
nodeWidth: nodeWidthPx,
|
||||
marginTop: '5',
|
||||
isHorizontal : true,
|
||||
hasZoom: true,
|
||||
hasPan: true,
|
||||
marginLeft: '10',
|
||||
marginRight: '10',
|
||||
idKey: "mac",
|
||||
hasFlatData: false,
|
||||
relationnalField: "children",
|
||||
linkWidth: (nodeData) => 2,
|
||||
linkColor: (nodeData) => {
|
||||
relConf = getRelationshipConf(nodeData.data.relType)
|
||||
return relConf.color;
|
||||
}
|
||||
// onNodeClick: (nodeData) => handleNodeClick(nodeData),
|
||||
});
|
||||
|
||||
console.log(deviceListGlobal);
|
||||
myTree.refresh(myHierarchy);
|
||||
console.log(deviceListGlobal);
|
||||
myTree.refresh(myHierarchy);
|
||||
|
||||
// hide spinning icon
|
||||
hideSpinner()
|
||||
// hide spinning icon
|
||||
hideSpinner()
|
||||
} else
|
||||
{
|
||||
console.error("getHierarchy() not returning expected result");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -303,7 +303,7 @@ function saveSettings()
|
||||
|
||||
// save to the file
|
||||
$new_name = $config_file.'_'.$timestamp.'.backup';
|
||||
$new_location = $configFolderPath.$new_name;
|
||||
$new_location = $configFolderPath.'/'.$new_name;
|
||||
|
||||
if(file_exists( $fullConfPath) != 1)
|
||||
{
|
||||
|
||||
@@ -311,6 +311,7 @@
|
||||
"Gen_Filter": "تصفية",
|
||||
"Gen_Generate": "إنشاء",
|
||||
"Gen_InvalidMac": "",
|
||||
"Gen_Invalid_Value": "",
|
||||
"Gen_LockedDB": "قاعدة البيانات مقفلة",
|
||||
"Gen_NetworkMask": "",
|
||||
"Gen_Offline": "غير متصل",
|
||||
|
||||
@@ -311,6 +311,7 @@
|
||||
"Gen_Filter": "Filtrar",
|
||||
"Gen_Generate": "Generar",
|
||||
"Gen_InvalidMac": "Mac address invàlida.",
|
||||
"Gen_Invalid_Value": "",
|
||||
"Gen_LockedDB": "ERROR - DB podria estar bloquejada - Fes servir F12 Eines desenvolupament -> Consola o provar-ho més tard.",
|
||||
"Gen_NetworkMask": "Màscara de xarxa",
|
||||
"Gen_Offline": "Fora de línia",
|
||||
|
||||
@@ -311,6 +311,7 @@
|
||||
"Gen_Filter": "Filtr",
|
||||
"Gen_Generate": "Vygenerovat",
|
||||
"Gen_InvalidMac": "",
|
||||
"Gen_Invalid_Value": "",
|
||||
"Gen_LockedDB": "CHYBA - Databáze je možná zamčená - Zkontrolujte F12 -> Nástroje pro vývojáře -> Konzole. nebo to zkuste později.",
|
||||
"Gen_NetworkMask": "",
|
||||
"Gen_Offline": "Offline",
|
||||
|
||||
@@ -315,6 +315,7 @@
|
||||
"Gen_Filter": "Filter",
|
||||
"Gen_Generate": "Generieren",
|
||||
"Gen_InvalidMac": "Ungültige MAC-Adresse.",
|
||||
"Gen_Invalid_Value": "",
|
||||
"Gen_LockedDB": "ERROR - DB eventuell gesperrt - Nutze die Konsole in den Entwickler Werkzeugen (F12) zur Überprüfung oder probiere es später erneut.",
|
||||
"Gen_NetworkMask": "",
|
||||
"Gen_Offline": "Offline",
|
||||
|
||||
@@ -311,6 +311,7 @@
|
||||
"Gen_Filter": "Filter",
|
||||
"Gen_Generate": "Generate",
|
||||
"Gen_InvalidMac": "Invalid Mac address.",
|
||||
"Gen_Invalid_Value": "An invalid value was entered",
|
||||
"Gen_LockedDB": "ERROR - DB might be locked - Check F12 Dev tools -> Console or try later.",
|
||||
"Gen_NetworkMask": "Network mask",
|
||||
"Gen_Offline": "Offline",
|
||||
|
||||
@@ -313,6 +313,7 @@
|
||||
"Gen_Filter": "Filtro",
|
||||
"Gen_Generate": "Generar",
|
||||
"Gen_InvalidMac": "",
|
||||
"Gen_Invalid_Value": "",
|
||||
"Gen_LockedDB": "Fallo - La base de datos puede estar bloqueada - Pulsa F1 -> Ajustes de desarrolladores -> Consola o prueba más tarde.",
|
||||
"Gen_NetworkMask": "",
|
||||
"Gen_Offline": "Desconectado",
|
||||
|
||||
@@ -311,6 +311,7 @@
|
||||
"Gen_Filter": "",
|
||||
"Gen_Generate": "",
|
||||
"Gen_InvalidMac": "",
|
||||
"Gen_Invalid_Value": "",
|
||||
"Gen_LockedDB": "",
|
||||
"Gen_NetworkMask": "",
|
||||
"Gen_Offline": "",
|
||||
|
||||
1
front/php/templates/language/fr_fr.json
Executable file → Normal file
1
front/php/templates/language/fr_fr.json
Executable file → Normal file
@@ -311,6 +311,7 @@
|
||||
"Gen_Filter": "Filtrer",
|
||||
"Gen_Generate": "Générer",
|
||||
"Gen_InvalidMac": "Adresse MAC invalide.",
|
||||
"Gen_Invalid_Value": "Une valeur invalide a été renseignée",
|
||||
"Gen_LockedDB": "Erreur - La base de données est peut-être verrouillée - Vérifier avec les outils de dév via F12 -> Console ou essayer plus tard.",
|
||||
"Gen_NetworkMask": "Masque réseau",
|
||||
"Gen_Offline": "Hors ligne",
|
||||
|
||||
1
front/php/templates/language/it_it.json
Executable file → Normal file
1
front/php/templates/language/it_it.json
Executable file → Normal file
@@ -311,6 +311,7 @@
|
||||
"Gen_Filter": "Filtro",
|
||||
"Gen_Generate": "Genera",
|
||||
"Gen_InvalidMac": "Indirizzo Mac non valido.",
|
||||
"Gen_Invalid_Value": "È stato inserito un valore non valido",
|
||||
"Gen_LockedDB": "ERRORE: il DB potrebbe essere bloccato, controlla F12 Strumenti di sviluppo -> Console o riprova più tardi.",
|
||||
"Gen_NetworkMask": "Maschera di rete",
|
||||
"Gen_Offline": "Offline",
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -311,6 +311,7 @@
|
||||
"Gen_Filter": "Filter",
|
||||
"Gen_Generate": "",
|
||||
"Gen_InvalidMac": "",
|
||||
"Gen_Invalid_Value": "",
|
||||
"Gen_LockedDB": "FEIL - DB kan være låst - Sjekk F12 Dev tools -> Konsoll eller prøv senere.",
|
||||
"Gen_NetworkMask": "",
|
||||
"Gen_Offline": "Frakoblet",
|
||||
|
||||
@@ -311,6 +311,7 @@
|
||||
"Gen_Filter": "Filtr",
|
||||
"Gen_Generate": "Wygeneruj",
|
||||
"Gen_InvalidMac": "",
|
||||
"Gen_Invalid_Value": "",
|
||||
"Gen_LockedDB": "Błąd - Baza danych może być zablokowana - Sprawdź narzędzia deweloperskie F12 -> Konsola lub spróbuj później.",
|
||||
"Gen_NetworkMask": "",
|
||||
"Gen_Offline": "Niedostępne",
|
||||
|
||||
@@ -311,6 +311,7 @@
|
||||
"Gen_Filter": "Filtro",
|
||||
"Gen_Generate": "Gerar",
|
||||
"Gen_InvalidMac": "",
|
||||
"Gen_Invalid_Value": "",
|
||||
"Gen_LockedDB": "ERRO - O banco de dados pode estar bloqueado - Verifique F12 Ferramentas de desenvolvimento -> Console ou tente mais tarde.",
|
||||
"Gen_NetworkMask": "",
|
||||
"Gen_Offline": "Offline",
|
||||
|
||||
1
front/php/templates/language/pt_pt.json
Normal file → Executable file
1
front/php/templates/language/pt_pt.json
Normal file → Executable file
@@ -310,6 +310,7 @@
|
||||
"Gen_Error": "Erro",
|
||||
"Gen_Filter": "Filtro",
|
||||
"Gen_Generate": "Gerar",
|
||||
"Gen_Invalid_Value": "",
|
||||
"Gen_InvalidMac": "Endereço MAC Inválido.",
|
||||
"Gen_LockedDB": "ERRO - A base de dados pode estar bloqueada - Verifique F12 Ferramentas de desenvolvimento -> Console ou tente mais tarde.",
|
||||
"Gen_NetworkMask": "Máscara de Rede",
|
||||
|
||||
@@ -311,6 +311,7 @@
|
||||
"Gen_Filter": "Фильтр",
|
||||
"Gen_Generate": "Генерировать",
|
||||
"Gen_InvalidMac": "Неверный Mac-адрес.",
|
||||
"Gen_Invalid_Value": "Введено некорректное значение",
|
||||
"Gen_LockedDB": "ОШИБКА - Возможно, база данных заблокирована. Проверьте инструменты разработчика F12 -> Консоль или повторите попытку позже.",
|
||||
"Gen_NetworkMask": "Маска сети",
|
||||
"Gen_Offline": "Оффлайн",
|
||||
|
||||
@@ -311,6 +311,7 @@
|
||||
"Gen_Filter": "",
|
||||
"Gen_Generate": "",
|
||||
"Gen_InvalidMac": "",
|
||||
"Gen_Invalid_Value": "",
|
||||
"Gen_LockedDB": "",
|
||||
"Gen_NetworkMask": "",
|
||||
"Gen_Offline": "",
|
||||
|
||||
@@ -311,6 +311,7 @@
|
||||
"Gen_Filter": "Filtre",
|
||||
"Gen_Generate": "Oluştur",
|
||||
"Gen_InvalidMac": "",
|
||||
"Gen_Invalid_Value": "",
|
||||
"Gen_LockedDB": "HATA - Veritabanı kilitlenmiş olabilir - F12 Geliştirici araçlarını -> Konsol kısmını kontrol edin veya daha sonra tekrar deneyin.",
|
||||
"Gen_NetworkMask": "",
|
||||
"Gen_Offline": "Çevrimdışı",
|
||||
|
||||
1
front/php/templates/language/uk_ua.json
Executable file → Normal file
1
front/php/templates/language/uk_ua.json
Executable file → Normal file
@@ -311,6 +311,7 @@
|
||||
"Gen_Filter": "Фільтр",
|
||||
"Gen_Generate": "Генерувати",
|
||||
"Gen_InvalidMac": "Недійсна Mac-адреса.",
|
||||
"Gen_Invalid_Value": "Введено недійсне значення",
|
||||
"Gen_LockedDB": "ПОМИЛКА – БД може бути заблоковано – перевірте F12 Інструменти розробника -> Консоль або спробуйте пізніше.",
|
||||
"Gen_NetworkMask": "Маска мережі",
|
||||
"Gen_Offline": "Офлайн",
|
||||
|
||||
@@ -311,6 +311,7 @@
|
||||
"Gen_Filter": "筛选",
|
||||
"Gen_Generate": "生成",
|
||||
"Gen_InvalidMac": "无效的 Mac 地址。",
|
||||
"Gen_Invalid_Value": "",
|
||||
"Gen_LockedDB": "错误 - DB 可能被锁定 - 检查 F12 开发工具 -> 控制台或稍后重试。",
|
||||
"Gen_NetworkMask": "网络掩码",
|
||||
"Gen_Offline": "离线",
|
||||
|
||||
@@ -91,7 +91,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -377,7 +377,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -225,7 +225,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -100,7 +100,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -12,7 +12,6 @@ from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
@@ -98,9 +97,7 @@ def main():
|
||||
{"devMac": "00:11:22:33:44:57", "devLastIP": "192.168.1.82"},
|
||||
]
|
||||
else:
|
||||
db = DB()
|
||||
db.open()
|
||||
device_handler = DeviceInstance(db)
|
||||
device_handler = DeviceInstance()
|
||||
devices = (
|
||||
device_handler.getAll()
|
||||
if get_setting_value("REFRESH_FQDN")
|
||||
|
||||
@@ -148,7 +148,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -185,7 +185,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -125,7 +125,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -175,7 +175,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -596,7 +596,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -397,7 +397,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -148,7 +148,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -11,7 +11,6 @@ from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
@@ -38,15 +37,11 @@ def main():
|
||||
|
||||
timeout = get_setting_value('DIGSCAN_RUN_TIMEOUT')
|
||||
|
||||
# Create a database connection
|
||||
db = DB() # instance of class DB
|
||||
db.open()
|
||||
|
||||
# Initialize the Plugin obj output file
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
# Create a DeviceInstance instance
|
||||
device_handler = DeviceInstance(db)
|
||||
device_handler = DeviceInstance()
|
||||
|
||||
# Retrieve devices
|
||||
if get_setting_value("REFRESH_FQDN"):
|
||||
|
||||
@@ -103,7 +103,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -180,7 +180,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -15,7 +15,6 @@ from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
@@ -41,15 +40,11 @@ def main():
|
||||
args = get_setting_value('ICMP_ARGS')
|
||||
in_regex = get_setting_value('ICMP_IN_REGEX')
|
||||
|
||||
# Create a database connection
|
||||
db = DB() # instance of class DB
|
||||
db.open()
|
||||
|
||||
# Initialize the Plugin obj output file
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
# Create a DeviceInstance instance
|
||||
device_handler = DeviceInstance(db)
|
||||
device_handler = DeviceInstance()
|
||||
|
||||
# Retrieve devices
|
||||
all_devices = device_handler.getAll()
|
||||
|
||||
@@ -203,7 +203,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -468,7 +468,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -103,7 +103,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -130,7 +130,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -130,7 +130,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -148,7 +148,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -12,7 +12,6 @@ from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
@@ -40,15 +39,11 @@ def main():
|
||||
# timeout = get_setting_value('NBLOOKUP_RUN_TIMEOUT')
|
||||
timeout = 20
|
||||
|
||||
# Create a database connection
|
||||
db = DB() # instance of class DB
|
||||
db.open()
|
||||
|
||||
# Initialize the Plugin obj output file
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
# Create a DeviceInstance instance
|
||||
device_handler = DeviceInstance(db)
|
||||
device_handler = DeviceInstance()
|
||||
|
||||
# Retrieve devices
|
||||
if get_setting_value("REFRESH_FQDN"):
|
||||
|
||||
@@ -227,7 +227,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import hashlib
|
||||
import re
|
||||
import nmap
|
||||
|
||||
@@ -17,6 +16,7 @@ from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from utils.crypto_utils import string_to_mac_hash # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
@@ -177,16 +177,6 @@ def parse_nmap_xml(xml_output, interface, fakeMac):
|
||||
return devices_list
|
||||
|
||||
|
||||
def string_to_mac_hash(input_string):
|
||||
# Calculate a hash using SHA-256
|
||||
sha256_hash = hashlib.sha256(input_string.encode()).hexdigest()
|
||||
|
||||
# Take the first 12 characters of the hash and format as a MAC address
|
||||
mac_hash = ':'.join(sha256_hash[i:i + 2] for i in range(0, 12, 2))
|
||||
|
||||
return mac_hash
|
||||
|
||||
|
||||
# ===============================================================================
|
||||
# BEGIN
|
||||
# ===============================================================================
|
||||
|
||||
@@ -476,7 +476,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -148,7 +148,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -15,7 +15,6 @@ from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
@@ -39,15 +38,11 @@ def main():
|
||||
|
||||
timeout = get_setting_value('NSLOOKUP_RUN_TIMEOUT')
|
||||
|
||||
# Create a database connection
|
||||
db = DB() # instance of class DB
|
||||
db.open()
|
||||
|
||||
# Initialize the Plugin obj output file
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
# Create a DeviceInstance instance
|
||||
device_handler = DeviceInstance(db)
|
||||
device_handler = DeviceInstance()
|
||||
|
||||
# Retrieve devices
|
||||
if get_setting_value("REFRESH_FQDN"):
|
||||
|
||||
@@ -92,7 +92,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -256,13 +256,11 @@ def main():
|
||||
start_time = time.time()
|
||||
|
||||
mylog("verbose", [f"[{pluginName}] starting execution"])
|
||||
from database import DB
|
||||
|
||||
from models.device_instance import DeviceInstance
|
||||
|
||||
db = DB() # instance of class DB
|
||||
db.open()
|
||||
# Create a DeviceInstance instance
|
||||
device_handler = DeviceInstance(db)
|
||||
device_handler = DeviceInstance()
|
||||
# Retrieve configuration settings
|
||||
# these should be self-explanatory
|
||||
omada_sites = []
|
||||
|
||||
@@ -84,7 +84,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -13,9 +13,6 @@ The plugin connects to your Pi-hole’s API and retrieves:
|
||||
|
||||
NetAlertX then uses this information to match or create devices in your system.
|
||||
|
||||
> [!TIP]
|
||||
> Some tip.
|
||||
|
||||
### Quick setup guide
|
||||
|
||||
* You are running **Pi-hole v6** or newer.
|
||||
@@ -30,21 +27,13 @@ No additional Pi-hole configuration is required.
|
||||
|
||||
| Setting Key | Description |
|
||||
| ---------------------------- | -------------------------------------------------------------------------------- |
|
||||
| **PIHOLEAPI_URL** | Your Pi-hole base URL. |
|
||||
| **PIHOLEAPI_URL** | Your Pi-hole base URL. |
|
||||
| **PIHOLEAPI_PASSWORD** | The Web UI base64 encoded (en-/decoding handled by the app) admin password. |
|
||||
| **PIHOLEAPI_SSL_VERIFY** | Whether to verify HTTPS certificates. Disable only for self-signed certificates. |
|
||||
| **PIHOLEAPI_RUN_TIMEOUT** | Request timeout in seconds. |
|
||||
| **PIHOLEAPI_API_MAXCLIENTS** | Maximum number of devices to request from Pi-hole. Defaults are usually fine. |
|
||||
| **PIHOLEAPI_FAKE_MAC** | Generate FAKE MAC from IP. |
|
||||
|
||||
### Example Configuration
|
||||
|
||||
| Setting Key | Sample Value |
|
||||
| ---------------------------- | -------------------------------------------------- |
|
||||
| **PIHOLEAPI_URL** | `http://pi.hole/` |
|
||||
| **PIHOLEAPI_PASSWORD** | `passw0rd` |
|
||||
| **PIHOLEAPI_SSL_VERIFY** | `true` |
|
||||
| **PIHOLEAPI_RUN_TIMEOUT** | `30` |
|
||||
| **PIHOLEAPI_API_MAXCLIENTS** | `500` |
|
||||
|
||||
### ⚠️ Troubleshooting
|
||||
|
||||
@@ -110,6 +99,32 @@ Then re-run the plugin.
|
||||
|
||||
---
|
||||
|
||||
#### ❌ Some devices are missing
|
||||
|
||||
Check:
|
||||
|
||||
* Pi-hole shows devices under **Settings → Network**
|
||||
* NetAlertX logs contain:
|
||||
|
||||
```
|
||||
[PIHOLEAPI] Skipping invalid MAC (see PIHOLEAPI_FAKE_MAC setting) ...
|
||||
```
|
||||
|
||||
If devices are missing:
|
||||
|
||||
* The app skipps devices with invalid MACs
|
||||
* Enable PIHOLEAPI_FAKE_MAC if you want to import these devices with a fake mac and you are not concerned with data inconsistencies later on
|
||||
|
||||
Try enabling PIHOLEAPI_FAKE_MAC:
|
||||
|
||||
```
|
||||
PIHOLEAPI_FAKE_MAC = 1
|
||||
```
|
||||
|
||||
Then re-run the plugin.
|
||||
|
||||
---
|
||||
|
||||
#### ❌ Wrong or missing hostnames
|
||||
|
||||
Pi-hole only reports names it knows from:
|
||||
|
||||
@@ -89,7 +89,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
@@ -279,6 +279,41 @@
|
||||
"string": "Maximum time in seconds to wait for the script to finish. If this time is exceeded the script is aborted."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"function": "FAKE_MAC",
|
||||
"type": {
|
||||
"dataType": "boolean",
|
||||
"elements": [
|
||||
{
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"type": "checkbox"
|
||||
}
|
||||
],
|
||||
"transformers": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"default_value": false,
|
||||
"options": [],
|
||||
"localized": [
|
||||
"name",
|
||||
"description"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
"string": "Fake MAC if empty"
|
||||
}
|
||||
],
|
||||
"description": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
"string": "Some PiHole devices don't have a MAC assigned. Enabling the FAKE_MAC setting generates a fake MAC address from the IP address to track devices, but it may cause inconsistencies if IPs change or devices are re-discovered with a different MAC. Static IPs are recommended. Device type and icon might not be detected correctly and some plugins might fail if they depend on a valid MAC address. When unchecked, devices with empty MAC addresses are skipped."
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"database_column_definitions": [
|
||||
|
||||
@@ -23,6 +23,7 @@ from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
from utils.crypto_utils import string_to_mac_hash # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Setup timezone & logger using standard NAX helpers
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -42,6 +43,7 @@ PIHOLEAPI_SES_CSRF = None
|
||||
PIHOLEAPI_API_MAXCLIENTS = None
|
||||
PIHOLEAPI_VERIFY_SSL = True
|
||||
PIHOLEAPI_RUN_TIMEOUT = 10
|
||||
PIHOLEAPI_FAKE_MAC = get_setting_value('PIHOLEAPI_FAKE_MAC')
|
||||
VERSION_DATE = "NAX-PIHOLEAPI-1.0"
|
||||
|
||||
|
||||
@@ -222,8 +224,14 @@ def gather_device_entries():
|
||||
if ip in iplist:
|
||||
lastQuery = str(now_ts)
|
||||
|
||||
tmpMac = hwaddr.lower()
|
||||
|
||||
# ensure fake mac if enabled
|
||||
if PIHOLEAPI_FAKE_MAC and is_mac(tmpMac) is False:
|
||||
tmpMac = string_to_mac_hash(ip)
|
||||
|
||||
entries.append({
|
||||
'mac': hwaddr.lower(),
|
||||
'mac': tmpMac,
|
||||
'ip': ip,
|
||||
'name': name,
|
||||
'macVendor': macVendor,
|
||||
@@ -281,7 +289,7 @@ def main():
|
||||
foreignKey=str(entry['mac'])
|
||||
)
|
||||
else:
|
||||
mylog('verbose', [f"[{pluginName}] Skipping invalid MAC: {entry['name']}|{entry['mac']}|{entry['ip']}"])
|
||||
mylog('verbose', [f"[{pluginName}] Skipping invalid MAC (see PIHOLEAPI_FAKE_MAC setting): {entry['name']}|{entry['mac']}|{entry['ip']}"])
|
||||
|
||||
# Write result file for NetAlertX to ingest
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
@@ -182,7 +182,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -14,6 +14,14 @@ Specify the following settings in the Settings section of NetAlertX:
|
||||
|
||||
If unsure, please check [snmpwalk examples](https://www.comparitech.com/net-admin/snmpwalk-examples-windows-linux/).
|
||||
|
||||
Supported output formats:
|
||||
|
||||
```
|
||||
ipNetToMediaPhysAddress[3][192.168.1.9] 6C:6C:6C:6C:6C:b6C1
|
||||
IP-MIB::ipNetToMediaPhysAddress.17.10.10.3.202 = STRING: f8:81:1a:ef:ef:ef
|
||||
mib-2.3.1.1.2.15.1.192.168.1.14 "2C F4 32 18 61 43 "
|
||||
```
|
||||
|
||||
### Setup Cisco IOS
|
||||
|
||||
Enable IOS SNMP service and restrict to selected (internal) IP/Subnet.
|
||||
|
||||
@@ -512,7 +512,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -30,7 +30,7 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
def main():
|
||||
mylog('verbose', ['[SNMPDSC] In script '])
|
||||
mylog('verbose', f"[{pluginName}] In script ")
|
||||
|
||||
# init global variables
|
||||
global snmpWalkCmds
|
||||
@@ -57,7 +57,7 @@ def main():
|
||||
commands = [snmpWalkCmds]
|
||||
|
||||
for cmd in commands:
|
||||
mylog('verbose', ['[SNMPDSC] Router snmpwalk command: ', cmd])
|
||||
mylog('verbose', [f"[{pluginName}] Router snmpwalk command: ", cmd])
|
||||
# split the string, remove white spaces around each item, and exclude any empty strings
|
||||
snmpwalkArgs = [arg.strip() for arg in cmd.split(' ') if arg.strip()]
|
||||
|
||||
@@ -72,7 +72,7 @@ def main():
|
||||
timeout=(timeoutSetting)
|
||||
)
|
||||
|
||||
mylog('verbose', ['[SNMPDSC] output: ', output])
|
||||
mylog('verbose', [f"[{pluginName}] output: ", output])
|
||||
|
||||
lines = output.split('\n')
|
||||
|
||||
@@ -80,6 +80,8 @@ def main():
|
||||
|
||||
tmpSplt = line.split('"')
|
||||
|
||||
# Expected Format:
|
||||
# mib-2.3.1.1.2.15.1.192.168.1.14 "2C F4 32 18 61 43 "
|
||||
if len(tmpSplt) == 3:
|
||||
|
||||
ipStr = tmpSplt[0].split('.')[-4:] # Get the last 4 elements to extract the IP
|
||||
@@ -89,7 +91,7 @@ def main():
|
||||
macAddress = ':'.join(macStr)
|
||||
ipAddress = '.'.join(ipStr)
|
||||
|
||||
mylog('verbose', [f'[SNMPDSC] IP: {ipAddress} MAC: {macAddress}'])
|
||||
mylog('verbose', [f"[{pluginName}] IP: {ipAddress} MAC: {macAddress}"])
|
||||
|
||||
plugin_objects.add_object(
|
||||
primaryId = handleEmpty(macAddress),
|
||||
@@ -100,8 +102,40 @@ def main():
|
||||
foreignKey = handleEmpty(macAddress) # Use the primary ID as the foreign key
|
||||
)
|
||||
else:
|
||||
mylog('verbose', ['[SNMPDSC] ipStr does not seem to contain a valid IP:', ipStr])
|
||||
mylog('verbose', [f"[{pluginName}] ipStr does not seem to contain a valid IP:", ipStr])
|
||||
|
||||
# Expected Format:
|
||||
# IP-MIB::ipNetToMediaPhysAddress.17.10.10.3.202 = STRING: f8:81:1a:ef:ef:ef
|
||||
elif "ipNetToMediaPhysAddress" in line and "=" in line and "STRING:" in line:
|
||||
|
||||
# Split on "=" → ["IP-MIB::ipNetToMediaPhysAddress.xxx.xxx.xxx.xxx ", " STRING: aa:bb:cc:dd:ee:ff"]
|
||||
left, right = line.split("=", 1)
|
||||
|
||||
# Extract the MAC (right side)
|
||||
macAddress = right.split("STRING:")[-1].strip()
|
||||
macAddress = normalize_mac(macAddress)
|
||||
|
||||
# Extract IP address from the left side
|
||||
# tail of the OID: last 4 integers = IPv4 address
|
||||
oid_parts = left.strip().split('.')
|
||||
ip_parts = oid_parts[-4:]
|
||||
ipAddress = ".".join(ip_parts)
|
||||
|
||||
mylog('verbose', [f"[{pluginName}] (fallback) IP: {ipAddress} MAC: {macAddress}"])
|
||||
|
||||
plugin_objects.add_object(
|
||||
primaryId = handleEmpty(macAddress),
|
||||
secondaryId = handleEmpty(ipAddress),
|
||||
watched1 = '(unknown)',
|
||||
watched2 = handleEmpty(snmpwalkArgs[6]),
|
||||
extra = handleEmpty(line),
|
||||
foreignKey = handleEmpty(macAddress)
|
||||
)
|
||||
|
||||
continue
|
||||
|
||||
# Expected Format:
|
||||
# ipNetToMediaPhysAddress[3][192.168.1.9] 6C:6C:6C:6C:6C:b6C1
|
||||
elif line.startswith('ipNetToMediaPhysAddress'):
|
||||
# Format: snmpwalk -OXsq output
|
||||
parts = line.split()
|
||||
@@ -110,7 +144,7 @@ def main():
|
||||
ipAddress = parts[0].split('[')[-1][:-1]
|
||||
macAddress = normalize_mac(parts[1])
|
||||
|
||||
mylog('verbose', [f'[SNMPDSC] IP: {ipAddress} MAC: {macAddress}'])
|
||||
mylog('verbose', [f"[{pluginName}] IP: {ipAddress} MAC: {macAddress}"])
|
||||
|
||||
plugin_objects.add_object(
|
||||
primaryId = handleEmpty(macAddress),
|
||||
@@ -121,7 +155,7 @@ def main():
|
||||
foreignKey = handleEmpty(macAddress)
|
||||
)
|
||||
|
||||
mylog('verbose', ['[SNMPDSC] Entries found: ', len(plugin_objects)])
|
||||
mylog('verbose', [f"[{pluginName}] Entries found: ", len(plugin_objects)])
|
||||
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -100,7 +100,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -140,7 +140,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -90,7 +90,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -13,7 +13,6 @@ from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
@@ -44,12 +43,8 @@ def main():
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] broadcast_ips value {broadcast_ips}'])
|
||||
|
||||
# Create a database connection
|
||||
db = DB() # instance of class DB
|
||||
db.open()
|
||||
|
||||
# Create a DeviceInstance instance
|
||||
device_handler = DeviceInstance(db)
|
||||
device_handler = DeviceInstance()
|
||||
|
||||
# Retrieve devices
|
||||
if 'offline' in devices_to_wake:
|
||||
|
||||
@@ -425,7 +425,7 @@
|
||||
"elementType": "input",
|
||||
"elementOptions": [
|
||||
{
|
||||
"onChange": "validateRegex(this)"
|
||||
"focusout": "validateRegex(this)"
|
||||
},
|
||||
{
|
||||
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||
|
||||
@@ -578,7 +578,17 @@ $settingsJSON_DB = json_encode($settings, JSON_HEX_TAG | JSON_HEX_AMP | JSON_HEX
|
||||
setType = set["setType"]
|
||||
setCodeName = set["setKey"]
|
||||
|
||||
settingsArray = collectSetting(prefix, setCodeName, setType, settingsArray)
|
||||
// settingsArray = collectSetting(prefix, setCodeName, setType, settingsArray)
|
||||
const collectSettingResult = collectSetting(prefix, setCodeName, setType, settingsArray);
|
||||
settingsArray = collectSettingResult.settingsArray;
|
||||
|
||||
if (!collectSettingResult.dataIsValid) {
|
||||
msg = getString("Gen_Invalid_Value") + ": " + collectSettingResult.failedSettingKey;
|
||||
console.error(msg);
|
||||
showMessage (msg, 3000, "modal_red");
|
||||
// return early
|
||||
return;
|
||||
}
|
||||
});
|
||||
|
||||
// sanity check to make sure settings were loaded & collected correctly
|
||||
@@ -586,6 +596,7 @@ $settingsJSON_DB = json_encode($settings, JSON_HEX_TAG | JSON_HEX_AMP | JSON_HEX
|
||||
{
|
||||
|
||||
console.log(settingsArray);
|
||||
console.log(settingsJSON_DB);
|
||||
console.log( JSON.stringify(settingsArray));
|
||||
// return; // 🐛 🔺
|
||||
// trigger a save settings event in the backend
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user