mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2025-12-07 09:36:05 -08:00
Compare commits
46 Commits
d3326b3362
...
fix-pr-130
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1dee812ce6 | ||
|
|
5c44fd8fea | ||
|
|
bd691f01b1 | ||
|
|
624fd87ee7 | ||
|
|
5d1c63375b | ||
|
|
8c982cd476 | ||
|
|
36e5751221 | ||
|
|
5af760f5ee | ||
|
|
dfd836527e | ||
|
|
8d5a663817 | ||
|
|
fbb4a2f8b4 | ||
|
|
54bce6505b | ||
|
|
6da47cc830 | ||
|
|
9cabbf3622 | ||
|
|
6c28a08bee | ||
|
|
86e3decd4e | ||
|
|
e14e0bb9e8 | ||
|
|
b6023d1373 | ||
|
|
1812cc8ef8 | ||
|
|
e64c490c8a | ||
|
|
5df39f984a | ||
|
|
d007ed711a | ||
|
|
61824abb9f | ||
|
|
33c5548fe1 | ||
|
|
fd41c395ae | ||
|
|
1a980844f0 | ||
|
|
82e018e284 | ||
|
|
e0e1233b1c | ||
|
|
74677f940e | ||
|
|
21a4d20579 | ||
|
|
9634e4e0f7 | ||
|
|
00a47ab5d3 | ||
|
|
59b417705e | ||
|
|
525d082f3d | ||
|
|
ba3481759b | ||
|
|
7125cea29b | ||
|
|
8586c5a307 | ||
|
|
0d81315809 | ||
|
|
8f193f1e2c | ||
|
|
b1eef8aa09 | ||
|
|
531b66effe | ||
|
|
5e4ad10fe0 | ||
|
|
541b932b6d | ||
|
|
2bf3ff9f00 | ||
|
|
2da17f272c | ||
|
|
7bcb4586b2 |
@@ -25,7 +25,7 @@
|
|||||||
// even within this container and connect to them as needed.
|
// even within this container and connect to them as needed.
|
||||||
// "--network=host",
|
// "--network=host",
|
||||||
],
|
],
|
||||||
"mounts": [
|
"mounts": [
|
||||||
"source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" //used for testing various conditions in docker
|
"source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" //used for testing various conditions in docker
|
||||||
],
|
],
|
||||||
// ATTENTION: If running with --network=host, COMMENT `forwardPorts` OR ELSE THERE WILL BE NO WEBUI!
|
// ATTENTION: If running with --network=host, COMMENT `forwardPorts` OR ELSE THERE WILL BE NO WEBUI!
|
||||||
@@ -88,7 +88,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"terminal.integrated.defaultProfile.linux": "zsh",
|
"terminal.integrated.defaultProfile.linux": "zsh",
|
||||||
|
|
||||||
// Python testing configuration
|
// Python testing configuration
|
||||||
"python.testing.pytestEnabled": true,
|
"python.testing.pytestEnabled": true,
|
||||||
"python.testing.unittestEnabled": false,
|
"python.testing.unittestEnabled": false,
|
||||||
|
|||||||
7
.github/copilot-instructions.md
vendored
7
.github/copilot-instructions.md
vendored
@@ -39,10 +39,11 @@ Backend loop phases (see `server/__main__.py` and `server/plugin.py`): `once`, `
|
|||||||
## API/Endpoints quick map
|
## API/Endpoints quick map
|
||||||
- Flask app: `server/api_server/api_server_start.py` exposes routes like `/device/<mac>`, `/devices`, `/devices/export/{csv,json}`, `/devices/import`, `/devices/totals`, `/devices/by-status`, plus `nettools`, `events`, `sessions`, `dbquery`, `metrics`, `sync`.
|
- Flask app: `server/api_server/api_server_start.py` exposes routes like `/device/<mac>`, `/devices`, `/devices/export/{csv,json}`, `/devices/import`, `/devices/totals`, `/devices/by-status`, plus `nettools`, `events`, `sessions`, `dbquery`, `metrics`, `sync`.
|
||||||
- Authorization: all routes expect header `Authorization: Bearer <API_TOKEN>` via `get_setting_value('API_TOKEN')`.
|
- Authorization: all routes expect header `Authorization: Bearer <API_TOKEN>` via `get_setting_value('API_TOKEN')`.
|
||||||
|
- All responses need to return `"success":<False:True>` and if `False` an "error" message needs to be returned, e.g. `{"success": False, "error": f"No stored open ports for Device"}`
|
||||||
|
|
||||||
## Conventions & helpers to reuse
|
## Conventions & helpers to reuse
|
||||||
- Settings: add/modify via `ccd()` in `server/initialise.py` or per‑plugin manifest. Never hardcode ports or secrets; use `get_setting_value()`.
|
- Settings: add/modify via `ccd()` in `server/initialise.py` or per‑plugin manifest. Never hardcode ports or secrets; use `get_setting_value()`.
|
||||||
- Logging: use `logger.mylog(level, [message])`; levels: none/minimal/verbose/debug/trace.
|
- Logging: use `mylog(level, [message])`; levels: none/minimal/verbose/debug/trace. `none` is used for most important messages that should always appear, such as exceptions.
|
||||||
- Time/MAC/strings: `helper.py` (`timeNowDB`, `normalize_mac`, sanitizers). Validate MACs before DB writes.
|
- Time/MAC/strings: `helper.py` (`timeNowDB`, `normalize_mac`, sanitizers). Validate MACs before DB writes.
|
||||||
- DB helpers: prefer `server/db/db_helper.py` functions (e.g., `get_table_json`, device condition helpers) over raw SQL in new paths.
|
- DB helpers: prefer `server/db/db_helper.py` functions (e.g., `get_table_json`, device condition helpers) over raw SQL in new paths.
|
||||||
|
|
||||||
@@ -85,7 +86,7 @@ Backend loop phases (see `server/__main__.py` and `server/plugin.py`): `once`, `
|
|||||||
- Above all, use the simplest possible code that meets the need so it can be easily audited and maintained.
|
- Above all, use the simplest possible code that meets the need so it can be easily audited and maintained.
|
||||||
- Always leave logging enabled. If there is a possiblity it will be difficult to debug with current logging, add more logging.
|
- Always leave logging enabled. If there is a possiblity it will be difficult to debug with current logging, add more logging.
|
||||||
- Always run the testFailure tool before executing any tests to gather current failure information and avoid redundant runs.
|
- Always run the testFailure tool before executing any tests to gather current failure information and avoid redundant runs.
|
||||||
- Always prioritize using the appropriate tools in the environment first. As an example if a test is failing use `testFailure` then `runTests`. Never `runTests` first.
|
- Always prioritize using the appropriate tools in the environment first. As an example if a test is failing use `testFailure` then `runTests`. Never `runTests` first.
|
||||||
- Docker tests take an extremely long time to run. Avoid changes to docker or tests until you've examined the exisiting testFailures and runTests results.
|
- Docker tests take an extremely long time to run. Avoid changes to docker or tests until you've examined the exisiting testFailures and runTests results.
|
||||||
- Environment tools are designed specifically for your use in this project and running them in this order will give you the best results.
|
- Environment tools are designed specifically for your use in this project and running them in this order will give you the best results.
|
||||||
|
|
||||||
|
|||||||
6
.github/workflows/docker_dev.yml
vendored
6
.github/workflows/docker_dev.yml
vendored
@@ -47,6 +47,12 @@ jobs:
|
|||||||
id: get_version
|
id: get_version
|
||||||
run: echo "version=Dev" >> $GITHUB_OUTPUT
|
run: echo "version=Dev" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
# --- debug output
|
||||||
|
- name: Debug version
|
||||||
|
run: |
|
||||||
|
echo "GITHUB_REF: $GITHUB_REF"
|
||||||
|
echo "Version: '${{ steps.get_version.outputs.version }}'"
|
||||||
|
|
||||||
# --- Write the timestamped version to .VERSION file
|
# --- Write the timestamped version to .VERSION file
|
||||||
- name: Create .VERSION file
|
- name: Create .VERSION file
|
||||||
run: echo "${{ steps.timestamp.outputs.version }}" > .VERSION
|
run: echo "${{ steps.timestamp.outputs.version }}" > .VERSION
|
||||||
|
|||||||
22
.github/workflows/docker_prod.yml
vendored
22
.github/workflows/docker_prod.yml
vendored
@@ -32,14 +32,34 @@ jobs:
|
|||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
# --- Previous approach Get release version from tag
|
||||||
|
- name: Set up dynamic build ARGs
|
||||||
|
id: getargs
|
||||||
|
run: echo "version=$(cat ./stable/VERSION)" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Get release version
|
||||||
|
id: get_version_prev
|
||||||
|
run: echo "::set-output name=version::${GITHUB_REF#refs/tags/}"
|
||||||
|
|
||||||
|
- name: Create .VERSION file
|
||||||
|
run: echo "${{ steps.get_version.outputs.version }}" >> .VERSION_PREV
|
||||||
|
|
||||||
# --- Get release version from tag
|
# --- Get release version from tag
|
||||||
- name: Get release version
|
- name: Get release version
|
||||||
id: get_version
|
id: get_version
|
||||||
run: echo "version=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
run: echo "version=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
|
||||||
|
# --- debug output
|
||||||
|
- name: Debug version
|
||||||
|
run: |
|
||||||
|
echo "GITHUB_REF: $GITHUB_REF"
|
||||||
|
echo "Version: '${{ steps.get_version.outputs.version }}'"
|
||||||
|
echo "Version prev: '${{ steps.get_version_prev.outputs.version }}'"
|
||||||
|
|
||||||
# --- Write version to .VERSION file
|
# --- Write version to .VERSION file
|
||||||
- name: Create .VERSION file
|
- name: Create .VERSION file
|
||||||
run: echo "${{ steps.get_version.outputs.version }}" > .VERSION
|
run: echo -n "${{ steps.get_version.outputs.version }}" > .VERSION
|
||||||
|
|
||||||
# --- Generate Docker metadata and tags
|
# --- Generate Docker metadata and tags
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -11,6 +11,7 @@ nohup.out
|
|||||||
config/*
|
config/*
|
||||||
.ash_history
|
.ash_history
|
||||||
.VERSION
|
.VERSION
|
||||||
|
.VERSION_PREV
|
||||||
config/pialert.conf
|
config/pialert.conf
|
||||||
config/app.conf
|
config/app.conf
|
||||||
db/*
|
db/*
|
||||||
|
|||||||
49
Dockerfile
49
Dockerfile
@@ -1,16 +1,16 @@
|
|||||||
# The NetAlertX Dockerfile has 3 stages:
|
# The NetAlertX Dockerfile has 3 stages:
|
||||||
#
|
#
|
||||||
# Stage 1. Builder - NetAlertX Requires special tools and packages to build our virtual environment, but
|
# Stage 1. Builder - NetAlertX Requires special tools and packages to build our virtual environment, but
|
||||||
# which are not needed in future stages. We build the builder and extract the venv for runner to use as
|
# which are not needed in future stages. We build the builder and extract the venv for runner to use as
|
||||||
# a base.
|
# a base.
|
||||||
#
|
#
|
||||||
# Stage 2. Runner builds the bare minimum requirements to create an operational NetAlertX. The primary
|
# Stage 2. Runner builds the bare minimum requirements to create an operational NetAlertX. The primary
|
||||||
# reason for breaking at this stage is it leaves the system in a proper state for devcontainer operation
|
# reason for breaking at this stage is it leaves the system in a proper state for devcontainer operation
|
||||||
# This image also provides a break-out point for uses who wish to execute the anti-pattern of using a
|
# This image also provides a break-out point for uses who wish to execute the anti-pattern of using a
|
||||||
# docker container as a VM for experimentation and various development patterns.
|
# docker container as a VM for experimentation and various development patterns.
|
||||||
#
|
#
|
||||||
# Stage 3. Hardened removes root, sudoers, folders, permissions, and locks the system down into a read-only
|
# Stage 3. Hardened removes root, sudoers, folders, permissions, and locks the system down into a read-only
|
||||||
# compatible image. While NetAlertX does require some read-write operations, this image can guarantee the
|
# compatible image. While NetAlertX does require some read-write operations, this image can guarantee the
|
||||||
# code pushed out by the project is the only code which will run on the system after each container restart.
|
# code pushed out by the project is the only code which will run on the system after each container restart.
|
||||||
# It reduces the chance of system hijacking and operates with all modern security protocols in place as is
|
# It reduces the chance of system hijacking and operates with all modern security protocols in place as is
|
||||||
# expected from a security appliance.
|
# expected from a security appliance.
|
||||||
@@ -26,15 +26,25 @@ ENV PATH="/opt/venv/bin:$PATH"
|
|||||||
|
|
||||||
# Install build dependencies
|
# Install build dependencies
|
||||||
COPY requirements.txt /tmp/requirements.txt
|
COPY requirements.txt /tmp/requirements.txt
|
||||||
RUN apk add --no-cache bash shadow python3 python3-dev gcc musl-dev libffi-dev openssl-dev git \
|
RUN apk add --no-cache \
|
||||||
|
bash \
|
||||||
|
shadow \
|
||||||
|
python3 \
|
||||||
|
python3-dev \
|
||||||
|
gcc \
|
||||||
|
musl-dev \
|
||||||
|
libffi-dev \
|
||||||
|
openssl-dev \
|
||||||
|
git \
|
||||||
|
rust \
|
||||||
|
cargo \
|
||||||
&& python -m venv /opt/venv
|
&& python -m venv /opt/venv
|
||||||
|
|
||||||
# Create virtual environment owned by root, but readable by everyone else. This makes it easy to copy
|
# Upgrade pip/wheel/setuptools and install Python packages
|
||||||
# into hardened stage without worrying about permissions and keeps image size small. Keeping the commands
|
RUN python -m pip install --upgrade pip setuptools wheel && \
|
||||||
# together makes for a slightly smaller image size.
|
pip install --no-cache-dir -r /tmp/requirements.txt && \
|
||||||
RUN pip install --no-cache-dir -r /tmp/requirements.txt && \
|
|
||||||
chmod -R u-rwx,g-rwx /opt
|
chmod -R u-rwx,g-rwx /opt
|
||||||
|
|
||||||
# second stage is the main runtime stage with just the minimum required to run the application
|
# second stage is the main runtime stage with just the minimum required to run the application
|
||||||
# The runner is used for both devcontainer, and as a base for the hardened stage.
|
# The runner is used for both devcontainer, and as a base for the hardened stage.
|
||||||
FROM alpine:3.22 AS runner
|
FROM alpine:3.22 AS runner
|
||||||
@@ -95,11 +105,11 @@ ENV READ_WRITE_FOLDERS="${NETALERTX_DATA} ${NETALERTX_CONFIG} ${NETALERTX_DB} ${
|
|||||||
${SYSTEM_SERVICES_ACTIVE_CONFIG}"
|
${SYSTEM_SERVICES_ACTIVE_CONFIG}"
|
||||||
|
|
||||||
#Python environment
|
#Python environment
|
||||||
ENV PYTHONUNBUFFERED=1
|
ENV PYTHONUNBUFFERED=1
|
||||||
ENV VIRTUAL_ENV=/opt/venv
|
ENV VIRTUAL_ENV=/opt/venv
|
||||||
ENV VIRTUAL_ENV_BIN=/opt/venv/bin
|
ENV VIRTUAL_ENV_BIN=/opt/venv/bin
|
||||||
ENV PYTHONPATH=${NETALERTX_APP}:${NETALERTX_SERVER}:${NETALERTX_PLUGINS}:${VIRTUAL_ENV}/lib/python3.12/site-packages
|
ENV PYTHONPATH=${NETALERTX_APP}:${NETALERTX_SERVER}:${NETALERTX_PLUGINS}:${VIRTUAL_ENV}/lib/python3.12/site-packages
|
||||||
ENV PATH="${SYSTEM_SERVICES}:${VIRTUAL_ENV_BIN}:$PATH"
|
ENV PATH="${SYSTEM_SERVICES}:${VIRTUAL_ENV_BIN}:$PATH"
|
||||||
|
|
||||||
# App Environment
|
# App Environment
|
||||||
ENV LISTEN_ADDR=0.0.0.0
|
ENV LISTEN_ADDR=0.0.0.0
|
||||||
@@ -110,7 +120,7 @@ ENV VENDORSPATH_NEWEST=${SYSTEM_SERVICES_RUN_TMP}/ieee-oui.txt
|
|||||||
ENV ENVIRONMENT=alpine
|
ENV ENVIRONMENT=alpine
|
||||||
ENV READ_ONLY_USER=readonly READ_ONLY_GROUP=readonly
|
ENV READ_ONLY_USER=readonly READ_ONLY_GROUP=readonly
|
||||||
ENV NETALERTX_USER=netalertx NETALERTX_GROUP=netalertx
|
ENV NETALERTX_USER=netalertx NETALERTX_GROUP=netalertx
|
||||||
ENV LANG=C.UTF-8
|
ENV LANG=C.UTF-8
|
||||||
|
|
||||||
|
|
||||||
RUN apk add --no-cache bash mtr libbsd zip lsblk tzdata curl arp-scan iproute2 iproute2-ss nmap \
|
RUN apk add --no-cache bash mtr libbsd zip lsblk tzdata curl arp-scan iproute2 iproute2-ss nmap \
|
||||||
@@ -138,6 +148,7 @@ RUN install -d -o ${NETALERTX_USER} -g ${NETALERTX_GROUP} -m 700 ${READ_WRITE_FO
|
|||||||
|
|
||||||
# Copy version information into the image
|
# Copy version information into the image
|
||||||
COPY --chown=${NETALERTX_USER}:${NETALERTX_GROUP} .[V]ERSION ${NETALERTX_APP}/.VERSION
|
COPY --chown=${NETALERTX_USER}:${NETALERTX_GROUP} .[V]ERSION ${NETALERTX_APP}/.VERSION
|
||||||
|
COPY --chown=${NETALERTX_USER}:${NETALERTX_GROUP} .[V]ERSION ${NETALERTX_APP}/.VERSION_PREV
|
||||||
|
|
||||||
# Copy the virtualenv from the builder stage
|
# Copy the virtualenv from the builder stage
|
||||||
COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
||||||
@@ -147,12 +158,12 @@ COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
|||||||
# This is done after the copy of the venv to ensure the venv is in place
|
# This is done after the copy of the venv to ensure the venv is in place
|
||||||
# although it may be quicker to do it before the copy, it keeps the image
|
# although it may be quicker to do it before the copy, it keeps the image
|
||||||
# layers smaller to do it after.
|
# layers smaller to do it after.
|
||||||
RUN if [ -f '.VERSION' ]; then \
|
RUN for vfile in .VERSION .VERSION_PREV; do \
|
||||||
cp '.VERSION' "${NETALERTX_APP}/.VERSION"; \
|
if [ ! -f "${NETALERTX_APP}/${vfile}" ]; then \
|
||||||
else \
|
echo "DEVELOPMENT 00000000" > "${NETALERTX_APP}/${vfile}"; \
|
||||||
echo "DEVELOPMENT 00000000" > "${NETALERTX_APP}/.VERSION"; \
|
fi; \
|
||||||
fi && \
|
chown 20212:20212 "${NETALERTX_APP}/${vfile}"; \
|
||||||
chown 20212:20212 "${NETALERTX_APP}/.VERSION" && \
|
done && \
|
||||||
apk add --no-cache libcap && \
|
apk add --no-cache libcap && \
|
||||||
setcap cap_net_raw+ep /bin/busybox && \
|
setcap cap_net_raw+ep /bin/busybox && \
|
||||||
setcap cap_net_raw,cap_net_admin+eip /usr/bin/nmap && \
|
setcap cap_net_raw,cap_net_admin+eip /usr/bin/nmap && \
|
||||||
@@ -180,7 +191,7 @@ ENV UMASK=0077
|
|||||||
|
|
||||||
# Create readonly user and group with no shell access.
|
# Create readonly user and group with no shell access.
|
||||||
# Readonly user marks folders that are created by NetAlertX, but should not be modified.
|
# Readonly user marks folders that are created by NetAlertX, but should not be modified.
|
||||||
# AI may claim this is stupid, but it's actually least possible permissions as
|
# AI may claim this is stupid, but it's actually least possible permissions as
|
||||||
# read-only user cannot login, cannot sudo, has no write permission, and cannot even
|
# read-only user cannot login, cannot sudo, has no write permission, and cannot even
|
||||||
# read the files it owns. The read-only user is ownership-as-a-lock hardening pattern.
|
# read the files it owns. The read-only user is ownership-as-a-lock hardening pattern.
|
||||||
RUN addgroup -g 20212 "${READ_ONLY_GROUP}" && \
|
RUN addgroup -g 20212 "${READ_ONLY_GROUP}" && \
|
||||||
|
|||||||
@@ -34,9 +34,7 @@ Get visibility of what's going on on your WIFI/LAN network and enable presence d
|
|||||||
## 🚀 Quick Start
|
## 🚀 Quick Start
|
||||||
|
|
||||||
> [!WARNING]
|
> [!WARNING]
|
||||||
> ⚠️ **Important:** The documentation has been recently updated and some instructions may have changed.
|
> ⚠️ **Important:** The docker-compose has recently changed. Carefully read the [Migration guide](https://jokob-sk.github.io/NetAlertX/MIGRATION/?h=migrat#12-migration-from-netalertx-v25524) for detailed instructions.
|
||||||
> If you are using the currently live production image, please follow the instructions on [Docker Hub](https://hub.docker.com/r/jokobsk/netalertx) for building and running the container.
|
|
||||||
> These docs reflect the latest development version and may differ from the production image.
|
|
||||||
|
|
||||||
Start NetAlertX in seconds with Docker:
|
Start NetAlertX in seconds with Docker:
|
||||||
|
|
||||||
@@ -44,8 +42,7 @@ Start NetAlertX in seconds with Docker:
|
|||||||
docker run -d \
|
docker run -d \
|
||||||
--network=host \
|
--network=host \
|
||||||
--restart unless-stopped \
|
--restart unless-stopped \
|
||||||
-v /local_data_dir/config:/data/config \
|
-v /local_data_dir:/data \
|
||||||
-v /local_data_dir/db:/data/db \
|
|
||||||
-v /etc/localtime:/etc/localtime:ro \
|
-v /etc/localtime:/etc/localtime:ro \
|
||||||
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
||||||
-e PORT=20211 \
|
-e PORT=20211 \
|
||||||
@@ -53,6 +50,8 @@ docker run -d \
|
|||||||
ghcr.io/jokob-sk/netalertx:latest
|
ghcr.io/jokob-sk/netalertx:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Note: Your `/local_data_dir` should contain a `config` and `db` folder.
|
||||||
|
|
||||||
To deploy a containerized instance directly from the source repository, execute the following BASH sequence:
|
To deploy a containerized instance directly from the source repository, execute the following BASH sequence:
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/jokob-sk/NetAlertX.git
|
git clone https://github.com/jokob-sk/NetAlertX.git
|
||||||
|
|||||||
22
docs/API.md
22
docs/API.md
@@ -36,9 +36,15 @@ Authorization: Bearer <API_TOKEN>
|
|||||||
If the token is missing or invalid, the server will return:
|
If the token is missing or invalid, the server will return:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{ "error": "Forbidden" }
|
{
|
||||||
|
"success": false,
|
||||||
|
"message": "ERROR: Not authorized",
|
||||||
|
"error": "Forbidden"
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
HTTP Status: **403 Forbidden**
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Base URL
|
## Base URL
|
||||||
@@ -54,6 +60,8 @@ http://<server>:<GRAPHQL_PORT>/
|
|||||||
> [!TIP]
|
> [!TIP]
|
||||||
> When retrieving devices or settings try using the GraphQL API endpoint first as it is read-optimized.
|
> When retrieving devices or settings try using the GraphQL API endpoint first as it is read-optimized.
|
||||||
|
|
||||||
|
### Standard REST Endpoints
|
||||||
|
|
||||||
* [Device API Endpoints](API_DEVICE.md) – Manage individual devices
|
* [Device API Endpoints](API_DEVICE.md) – Manage individual devices
|
||||||
* [Devices Collection](API_DEVICES.md) – Bulk operations on multiple devices
|
* [Devices Collection](API_DEVICES.md) – Bulk operations on multiple devices
|
||||||
* [Events](API_EVENTS.md) – Device event logging and management
|
* [Events](API_EVENTS.md) – Device event logging and management
|
||||||
@@ -69,6 +77,18 @@ http://<server>:<GRAPHQL_PORT>/
|
|||||||
* [Logs](API_LOGS.md) – Purging of logs and adding to the event execution queue for user triggered events
|
* [Logs](API_LOGS.md) – Purging of logs and adding to the event execution queue for user triggered events
|
||||||
* [DB query](API_DBQUERY.md) (⚠ Internal) - Low level database access - use other endpoints if possible
|
* [DB query](API_DBQUERY.md) (⚠ Internal) - Low level database access - use other endpoints if possible
|
||||||
|
|
||||||
|
### MCP Server Bridge
|
||||||
|
|
||||||
|
NetAlertX includes an **MCP (Model Context Protocol) Server Bridge** that provides AI assistants access to NetAlertX functionality through standardized tools. MCP endpoints are available at `/mcp/sse/*` paths and mirror the functionality of standard REST endpoints:
|
||||||
|
|
||||||
|
* `/mcp/sse` - Server-Sent Events endpoint for MCP client connections
|
||||||
|
* `/mcp/sse/openapi.json` - OpenAPI specification for available MCP tools
|
||||||
|
* `/mcp/sse/device/*`, `/mcp/sse/devices/*`, `/mcp/sse/nettools/*`, `/mcp/sse/events/*` - MCP-enabled versions of REST endpoints
|
||||||
|
|
||||||
|
MCP endpoints require the same Bearer token authentication as REST endpoints.
|
||||||
|
|
||||||
|
**📖 See [MCP Server Bridge API](API_MCP.md) for complete documentation, tool specifications, and integration examples.**
|
||||||
|
|
||||||
See [Testing](API_TESTS.md) for example requests and usage.
|
See [Testing](API_TESTS.md) for example requests and usage.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
The **Database Query API** provides direct, low-level access to the NetAlertX database. It allows **read, write, update, and delete** operations against tables, using **base64-encoded** SQL or structured parameters.
|
The **Database Query API** provides direct, low-level access to the NetAlertX database. It allows **read, write, update, and delete** operations against tables, using **base64-encoded** SQL or structured parameters.
|
||||||
|
|
||||||
> [!Warning]
|
> [!Warning]
|
||||||
> This API is primarily used internally to generate and render the application UI. These endpoints are low-level and powerful, and should be used with caution. Wherever possible, prefer the [standard API endpoints](API.md). Invalid or unsafe queries can corrupt data.
|
> This API is primarily used internally to generate and render the application UI. These endpoints are low-level and powerful, and should be used with caution. Wherever possible, prefer the [standard API endpoints](API.md). Invalid or unsafe queries can corrupt data.
|
||||||
> If you need data in a specific format that is not already provided, please open an issue or pull request with a clear, broadly useful use case. This helps ensure new endpoints benefit the wider community rather than relying on raw database queries.
|
> If you need data in a specific format that is not already provided, please open an issue or pull request with a clear, broadly useful use case. This helps ensure new endpoints benefit the wider community rather than relying on raw database queries.
|
||||||
|
|
||||||
@@ -16,10 +16,14 @@ All `/dbquery/*` endpoints require an API token in the HTTP headers:
|
|||||||
Authorization: Bearer <API_TOKEN>
|
Authorization: Bearer <API_TOKEN>
|
||||||
```
|
```
|
||||||
|
|
||||||
If the token is missing or invalid:
|
If the token is missing or invalid (HTTP 403):
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{ "error": "Forbidden" }
|
{
|
||||||
|
\"success\": false,
|
||||||
|
\"message\": \"ERROR: Not authorized\",
|
||||||
|
\"error\": \"Forbidden\"
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
@@ -41,6 +41,8 @@ Manage a **single device** by its MAC address. Operations include retrieval, upd
|
|||||||
* Device not found → HTTP 404
|
* Device not found → HTTP 404
|
||||||
* Unauthorized → HTTP 403
|
* Unauthorized → HTTP 403
|
||||||
|
|
||||||
|
**MCP Integration**: Available as `get_device_info` and `set_device_alias` tools. See [MCP Server Bridge API](API_MCP.md).
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 2. Update Device Fields
|
## 2. Update Device Fields
|
||||||
|
|||||||
@@ -170,7 +170,7 @@ The Devices Collection API provides operations to **retrieve, manage, import/exp
|
|||||||
**Response**:
|
**Response**:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
[
|
[
|
||||||
120, // Total devices
|
120, // Total devices
|
||||||
85, // Connected
|
85, // Connected
|
||||||
5, // Favorites
|
5, // Favorites
|
||||||
@@ -207,6 +207,93 @@ The Devices Collection API provides operations to **retrieve, manage, import/exp
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
### 9. Search Devices
|
||||||
|
|
||||||
|
* **POST** `/devices/search`
|
||||||
|
Search for devices by MAC, name, or IP address.
|
||||||
|
|
||||||
|
**Request Body** (JSON):
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"query": ".50"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"devices": [
|
||||||
|
{
|
||||||
|
"devName": "Test Device",
|
||||||
|
"devMac": "AA:BB:CC:DD:EE:FF",
|
||||||
|
"devLastIP": "192.168.1.50"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 10. Get Latest Device
|
||||||
|
|
||||||
|
* **GET** `/devices/latest`
|
||||||
|
Get the most recently connected device.
|
||||||
|
|
||||||
|
**Response**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"devName": "Latest Device",
|
||||||
|
"devMac": "AA:BB:CC:DD:EE:FF",
|
||||||
|
"devLastIP": "192.168.1.100",
|
||||||
|
"devFirstConnection": "2025-12-07 10:30:00"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 11. Get Network Topology
|
||||||
|
|
||||||
|
* **GET** `/devices/network/topology`
|
||||||
|
Get network topology showing device relationships.
|
||||||
|
|
||||||
|
**Response**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"nodes": [
|
||||||
|
{
|
||||||
|
"id": "AA:AA:AA:AA:AA:AA",
|
||||||
|
"name": "Router",
|
||||||
|
"vendor": "VendorA"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"links": [
|
||||||
|
{
|
||||||
|
"source": "AA:AA:AA:AA:AA:AA",
|
||||||
|
"target": "BB:BB:BB:BB:BB:BB",
|
||||||
|
"port": "eth1"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## MCP Tools
|
||||||
|
|
||||||
|
These endpoints are also available as **MCP Tools** for AI assistant integration:
|
||||||
|
- `list_devices`, `search_devices`, `get_latest_device`, `get_network_topology`, `set_device_alias`
|
||||||
|
|
||||||
|
📖 See [MCP Server Bridge API](API_MCP.md) for AI integration details.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Example `curl` Requests
|
## Example `curl` Requests
|
||||||
|
|
||||||
**Get All Devices**:
|
**Get All Devices**:
|
||||||
@@ -247,3 +334,26 @@ curl -X GET "http://<server_ip>:<GRAPHQL_PORT>/devices/by-status?status=online"
|
|||||||
-H "Authorization: Bearer <API_TOKEN>"
|
-H "Authorization: Bearer <API_TOKEN>"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**Search Devices**:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
curl -X POST "http://<server_ip>:<GRAPHQL_PORT>/devices/search" \
|
||||||
|
-H "Authorization: Bearer <API_TOKEN>" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
--data '{"query": "192.168.1"}'
|
||||||
|
```
|
||||||
|
|
||||||
|
**Get Latest Device**:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
curl -X GET "http://<server_ip>:<GRAPHQL_PORT>/devices/latest" \
|
||||||
|
-H "Authorization: Bearer <API_TOKEN>"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Get Network Topology**:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
curl -X GET "http://<server_ip>:<GRAPHQL_PORT>/devices/network/topology" \
|
||||||
|
-H "Authorization: Bearer <API_TOKEN>"
|
||||||
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -88,7 +88,56 @@ The Events API provides access to **device event logs**, allowing creation, retr
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
### 4. Event Totals Over a Period
|
### 4. Get Recent Events
|
||||||
|
|
||||||
|
* **GET** `/events/recent` → Get events from the last 24 hours
|
||||||
|
* **GET** `/events/<hours>` → Get events from the last N hours
|
||||||
|
|
||||||
|
**Response** (JSON):
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"hours": 24,
|
||||||
|
"count": 5,
|
||||||
|
"events": [
|
||||||
|
{
|
||||||
|
"eve_DateTime": "2025-12-07 12:00:00",
|
||||||
|
"eve_EventType": "New Device",
|
||||||
|
"eve_MAC": "AA:BB:CC:DD:EE:FF",
|
||||||
|
"eve_IP": "192.168.1.100",
|
||||||
|
"eve_AdditionalInfo": "Device detected"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 5. Get Latest Events
|
||||||
|
|
||||||
|
* **GET** `/events/last`
|
||||||
|
Get the 10 most recent events.
|
||||||
|
|
||||||
|
**Response** (JSON):
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"count": 10,
|
||||||
|
"events": [
|
||||||
|
{
|
||||||
|
"eve_DateTime": "2025-12-07 12:00:00",
|
||||||
|
"eve_EventType": "Device Down",
|
||||||
|
"eve_MAC": "AA:BB:CC:DD:EE:FF"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 6. Event Totals Over a Period
|
||||||
|
|
||||||
* **GET** `/sessions/totals?period=<period>`
|
* **GET** `/sessions/totals?period=<period>`
|
||||||
Return event and session totals over a given period.
|
Return event and session totals over a given period.
|
||||||
@@ -116,12 +165,25 @@ The Events API provides access to **device event logs**, allowing creation, retr
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## MCP Tools
|
||||||
|
|
||||||
|
Event endpoints are available as **MCP Tools** for AI assistant integration:
|
||||||
|
- `get_recent_alerts`, `get_last_events`
|
||||||
|
|
||||||
|
📖 See [MCP Server Bridge API](API_MCP.md) for AI integration details.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Notes
|
## Notes
|
||||||
|
|
||||||
* All endpoints require **authorization** (Bearer token). Unauthorized requests return:
|
* All endpoints require **authorization** (Bearer token). Unauthorized requests return HTTP 403:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{ "error": "Forbidden" }
|
{
|
||||||
|
"success": false,
|
||||||
|
"message": "ERROR: Not authorized",
|
||||||
|
"error": "Forbidden"
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
* Events are stored in the **Events table** with the following fields:
|
* Events are stored in the **Events table** with the following fields:
|
||||||
|
|||||||
326
docs/API_MCP.md
Normal file
326
docs/API_MCP.md
Normal file
@@ -0,0 +1,326 @@
|
|||||||
|
# MCP Server Bridge API
|
||||||
|
|
||||||
|
The **MCP (Model Context Protocol) Server Bridge** provides AI assistants with standardized access to NetAlertX functionality through tools and server-sent events. This enables AI systems to interact with your network monitoring data in real-time.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The MCP Server Bridge exposes NetAlertX functionality as **MCP Tools** that AI assistants can call to:
|
||||||
|
|
||||||
|
- Search and retrieve device information
|
||||||
|
- Trigger network scans
|
||||||
|
- Get network topology and events
|
||||||
|
- Wake devices via Wake-on-LAN
|
||||||
|
- Access open port information
|
||||||
|
- Set device aliases
|
||||||
|
|
||||||
|
All MCP endpoints mirror the functionality of standard REST endpoints but are optimized for AI assistant integration.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Authentication
|
||||||
|
|
||||||
|
MCP endpoints use the same **Bearer token authentication** as REST endpoints:
|
||||||
|
|
||||||
|
```http
|
||||||
|
Authorization: Bearer <API_TOKEN>
|
||||||
|
```
|
||||||
|
|
||||||
|
Unauthorized requests return HTTP 403:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": false,
|
||||||
|
"message": "ERROR: Not authorized",
|
||||||
|
"error": "Forbidden"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## MCP Connection Endpoint
|
||||||
|
|
||||||
|
### Server-Sent Events (SSE)
|
||||||
|
|
||||||
|
* **GET/POST** `/mcp/sse`
|
||||||
|
|
||||||
|
Main MCP connection endpoint for AI clients. Establishes a persistent connection using Server-Sent Events for real-time communication between AI assistants and NetAlertX.
|
||||||
|
|
||||||
|
**Connection Example**:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const eventSource = new EventSource('/mcp/sse', {
|
||||||
|
headers: {
|
||||||
|
'Authorization': 'Bearer <API_TOKEN>'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
eventSource.onmessage = function(event) {
|
||||||
|
const response = JSON.parse(event.data);
|
||||||
|
console.log('MCP Response:', response);
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## OpenAPI Specification
|
||||||
|
|
||||||
|
### Get MCP Tools Specification
|
||||||
|
|
||||||
|
* **GET** `/mcp/sse/openapi.json`
|
||||||
|
|
||||||
|
Returns the OpenAPI specification for all available MCP tools, describing the parameters and schemas for each tool.
|
||||||
|
|
||||||
|
**Response**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"openapi": "3.0.0",
|
||||||
|
"info": {
|
||||||
|
"title": "NetAlertX Tools",
|
||||||
|
"version": "1.1.0"
|
||||||
|
},
|
||||||
|
"servers": [{"url": "/"}],
|
||||||
|
"paths": {
|
||||||
|
"/devices/by-status": {
|
||||||
|
"post": {"operationId": "list_devices"}
|
||||||
|
},
|
||||||
|
"/device/{mac}": {
|
||||||
|
"post": {"operationId": "get_device_info"}
|
||||||
|
},
|
||||||
|
"/devices/search": {
|
||||||
|
"post": {"operationId": "search_devices"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Available MCP Tools
|
||||||
|
|
||||||
|
### Device Management Tools
|
||||||
|
|
||||||
|
| Tool | Endpoint | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `list_devices` | `/mcp/sse/devices/by-status` | List devices by online status |
|
||||||
|
| `get_device_info` | `/mcp/sse/device/<mac>` | Get detailed device information |
|
||||||
|
| `search_devices` | `/mcp/sse/devices/search` | Search devices by MAC, name, or IP |
|
||||||
|
| `get_latest_device` | `/mcp/sse/devices/latest` | Get most recently connected device |
|
||||||
|
| `set_device_alias` | `/mcp/sse/device/<mac>/set-alias` | Set device friendly name |
|
||||||
|
|
||||||
|
### Network Tools
|
||||||
|
|
||||||
|
| Tool | Endpoint | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `trigger_scan` | `/mcp/sse/nettools/trigger-scan` | Trigger network discovery scan |
|
||||||
|
| `get_open_ports` | `/mcp/sse/device/open_ports` | Get stored NMAP open ports for device |
|
||||||
|
| `wol_wake_device` | `/mcp/sse/nettools/wakeonlan` | Wake device using Wake-on-LAN |
|
||||||
|
| `get_network_topology` | `/mcp/sse/devices/network/topology` | Get network topology map |
|
||||||
|
|
||||||
|
### Event & Monitoring Tools
|
||||||
|
|
||||||
|
| Tool | Endpoint | Description |
|
||||||
|
|------|----------|-------------|
|
||||||
|
| `get_recent_alerts` | `/mcp/sse/events/recent` | Get events from last 24 hours |
|
||||||
|
| `get_last_events` | `/mcp/sse/events/last` | Get 10 most recent events |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Tool Usage Examples
|
||||||
|
|
||||||
|
### Search Devices Tool
|
||||||
|
|
||||||
|
**Tool Call**:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": "1",
|
||||||
|
"method": "tools/call",
|
||||||
|
"params": {
|
||||||
|
"name": "search_devices",
|
||||||
|
"arguments": {
|
||||||
|
"query": "192.168.1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response**:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": "1",
|
||||||
|
"result": {
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"type": "text",
|
||||||
|
"text": "{\n \"success\": true,\n \"devices\": [\n {\n \"devName\": \"Router\",\n \"devMac\": \"AA:BB:CC:DD:EE:FF\",\n \"devLastIP\": \"192.168.1.1\"\n }\n ]\n}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"isError": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Trigger Network Scan Tool
|
||||||
|
|
||||||
|
**Tool Call**:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": "2",
|
||||||
|
"method": "tools/call",
|
||||||
|
"params": {
|
||||||
|
"name": "trigger_scan",
|
||||||
|
"arguments": {
|
||||||
|
"type": "ARPSCAN"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response**:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": "2",
|
||||||
|
"result": {
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"type": "text",
|
||||||
|
"text": "{\n \"success\": true,\n \"message\": \"Scan triggered for type: ARPSCAN\"\n}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"isError": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Wake-on-LAN Tool
|
||||||
|
|
||||||
|
**Tool Call**:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": "3",
|
||||||
|
"method": "tools/call",
|
||||||
|
"params": {
|
||||||
|
"name": "wol_wake_device",
|
||||||
|
"arguments": {
|
||||||
|
"devMac": "AA:BB:CC:DD:EE:FF"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Integration with AI Assistants
|
||||||
|
|
||||||
|
### Claude Desktop Integration
|
||||||
|
|
||||||
|
Add to your Claude Desktop `mcp.json` configuration:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"mcp": {
|
||||||
|
"servers": {
|
||||||
|
"netalertx": {
|
||||||
|
"command": "node",
|
||||||
|
"args": ["/path/to/mcp-client.js"],
|
||||||
|
"env": {
|
||||||
|
"NETALERTX_URL": "http://your-server:<GRAPHQL_PORT>",
|
||||||
|
"NETALERTX_TOKEN": "your-api-token"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Generic MCP Client
|
||||||
|
|
||||||
|
```python
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
from mcp import ClientSession, StdioServerParameters
|
||||||
|
from mcp.client.stdio import stdio_client
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
# Connect to NetAlertX MCP server
|
||||||
|
server_params = StdioServerParameters(
|
||||||
|
command="curl",
|
||||||
|
args=[
|
||||||
|
"-N", "-H", "Authorization: Bearer <API_TOKEN>",
|
||||||
|
"http://your-server:<GRAPHQL_PORT>/mcp/sse"
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
async with stdio_client(server_params) as (read, write):
|
||||||
|
async with ClientSession(read, write) as session:
|
||||||
|
# Initialize connection
|
||||||
|
await session.initialize()
|
||||||
|
|
||||||
|
# List available tools
|
||||||
|
tools = await session.list_tools()
|
||||||
|
print(f"Available tools: {[t.name for t in tools.tools]}")
|
||||||
|
|
||||||
|
# Call a tool
|
||||||
|
result = await session.call_tool("search_devices", {"query": "router"})
|
||||||
|
print(f"Search result: {result}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
MCP tool calls return structured error information:
|
||||||
|
|
||||||
|
**Error Response**:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": "1",
|
||||||
|
"result": {
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"type": "text",
|
||||||
|
"text": "Error calling tool: Device not found"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"isError": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Common Error Types**:
|
||||||
|
- `401/403` - Authentication failure
|
||||||
|
- `400` - Invalid parameters or missing required fields
|
||||||
|
- `404` - Resource not found (device, scan results, etc.)
|
||||||
|
- `500` - Internal server error
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
* MCP endpoints require the same API token authentication as REST endpoints
|
||||||
|
* All MCP tools return JSON responses wrapped in MCP protocol format
|
||||||
|
* Server-Sent Events maintain persistent connections for real-time updates
|
||||||
|
* Tool parameters match their REST endpoint equivalents
|
||||||
|
* Error responses include both HTTP status codes and descriptive messages
|
||||||
|
* MCP bridge automatically handles request/response serialization
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Related Documentation
|
||||||
|
|
||||||
|
* [Main API Overview](API.md) - Core REST API documentation
|
||||||
|
* [Device API](API_DEVICE.md) - Individual device management
|
||||||
|
* [Devices Collection API](API_DEVICES.md) - Bulk device operations
|
||||||
|
* [Network Tools API](API_NETTOOLS.md) - Wake-on-LAN, scans, network utilities
|
||||||
|
* [Events API](API_EVENTS.md) - Event logging and monitoring
|
||||||
@@ -241,3 +241,12 @@ curl -X POST "http://<server_ip>:<GRAPHQL_PORT>/nettools/nmap" \
|
|||||||
curl "http://<server_ip>:<GRAPHQL_PORT>/nettools/internetinfo" \
|
curl "http://<server_ip>:<GRAPHQL_PORT>/nettools/internetinfo" \
|
||||||
-H "Authorization: Bearer <API_TOKEN>"
|
-H "Authorization: Bearer <API_TOKEN>"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## MCP Tools
|
||||||
|
|
||||||
|
Network tools are available as **MCP Tools** for AI assistant integration:
|
||||||
|
- `wol_wake_device`, `trigger_scan`, `get_open_ports`
|
||||||
|
|
||||||
|
📖 See [MCP Server Bridge API](API_MCP.md) for AI integration details.
|
||||||
|
|||||||
@@ -16,8 +16,7 @@ Start the container via the **terminal** with a command similar to this one:
|
|||||||
docker run \
|
docker run \
|
||||||
--network=host \
|
--network=host \
|
||||||
--restart unless-stopped \
|
--restart unless-stopped \
|
||||||
-v /local_data_dir/config:/data/config \
|
-v /local_data_dir:/data \
|
||||||
-v /local_data_dir/db:/data/db \
|
|
||||||
-v /etc/localtime:/etc/localtime:ro \
|
-v /etc/localtime:/etc/localtime:ro \
|
||||||
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
||||||
-e PORT=20211 \
|
-e PORT=20211 \
|
||||||
@@ -26,6 +25,8 @@ docker run \
|
|||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Note: Your `/local_data_dir` should contain a `config` and `db` folder.
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
> ⚠ The most important part is NOT to use the `-d` parameter so you see the error when the container crashes. Use this error in your issue description.
|
> ⚠ The most important part is NOT to use the `-d` parameter so you see the error when the container crashes. Use this error in your issue description.
|
||||||
|
|
||||||
|
|||||||
@@ -1,18 +1,16 @@
|
|||||||
# NetAlertX and Docker Compose
|
# NetAlertX and Docker Compose
|
||||||
|
|
||||||
> [!WARNING]
|
> [!WARNING]
|
||||||
> ⚠️ **Important:** The documentation has been recently updated and some instructions may have changed.
|
> ⚠️ **Important:** The docker-compose has recently changed. Carefully read the [Migration guide](https://jokob-sk.github.io/NetAlertX/MIGRATION/?h=migrat#12-migration-from-netalertx-v25524) for detailed instructions.
|
||||||
> If you are using the currently live production image, please follow the instructions on [Docker Hub](https://hub.docker.com/r/jokobsk/netalertx) for building and running the container.
|
|
||||||
> These docs reflect the latest development version and may differ from the production image.
|
|
||||||
|
|
||||||
Great care is taken to ensure NetAlertX meets the needs of everyone while being flexible enough for anyone. This document outlines how you can configure your docker-compose. There are many settings, so we recommend using the Baseline Docker Compose as-is, or modifying it for your system.Good care is taken to ensure NetAlertX meets the needs of everyone while being flexible enough for anyone. This document outlines how you can configure your docker-compose. There are many settings, so we recommend using the Baseline Docker Compose as-is, or modifying it for your system.
|
Great care is taken to ensure NetAlertX meets the needs of everyone while being flexible enough for anyone. This document outlines how you can configure your docker-compose. There are many settings, so we recommend using the Baseline Docker Compose as-is, or modifying it for your system.Good care is taken to ensure NetAlertX meets the needs of everyone while being flexible enough for anyone. This document outlines how you can configure your docker-compose. There are many settings, so we recommend using the Baseline Docker Compose as-is, or modifying it for your system.
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
> The container needs to run in `network_mode:"host"` to access Layer 2 networking such as arp, nmap and others. Due to lack of support for this feature, Windows host is not a supported operating system.
|
> The container needs to run in `network_mode:"host"` to access Layer 2 networking such as arp, nmap and others. Due to lack of support for this feature, Windows host is not a supported operating system.
|
||||||
|
|
||||||
## Baseline Docker Compose
|
## Baseline Docker Compose
|
||||||
|
|
||||||
There is one baseline for NetAlertX. That's the default security-enabled official distribution.
|
There is one baseline for NetAlertX. That's the default security-enabled official distribution.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
services:
|
services:
|
||||||
@@ -45,7 +43,7 @@ services:
|
|||||||
# - /home/user/netalertx_data:/data:rw
|
# - /home/user/netalertx_data:/data:rw
|
||||||
|
|
||||||
- type: bind # Bind mount for timezone consistency
|
- type: bind # Bind mount for timezone consistency
|
||||||
source: /etc/localtime
|
source: /etc/localtime
|
||||||
target: /etc/localtime
|
target: /etc/localtime
|
||||||
read_only: true
|
read_only: true
|
||||||
|
|
||||||
@@ -125,9 +123,9 @@ docker compose up
|
|||||||
|
|
||||||
### Modification 1: Use a Local Folder (Bind Mount)
|
### Modification 1: Use a Local Folder (Bind Mount)
|
||||||
|
|
||||||
By default, the baseline compose file uses a single named volume (netalertx_data) mounted at /data. This single-volume layout is preferred because NetAlertX manages both configuration and the database under /data (for example, /data/config and /data/db) via its web UI. Using one named volume simplifies permissions and portability: Docker manages the storage and NetAlertX manages the files inside /data.
|
By default, the baseline compose file uses a single named volume (netalertx_data) mounted at `/data`. This single-volume layout is preferred because NetAlertX manages both configuration and the database under `/data` (for example, `/data/config` and `/data/db`) via its web UI. Using one named volume simplifies permissions and portability: Docker manages the storage and NetAlertX manages the files inside `/data`.
|
||||||
|
|
||||||
A two-volume layout that mounts /data/config and /data/db separately (for example, netalertx_config and netalertx_db) is supported for backward compatibility and some advanced workflows, but it is an abnormal/legacy layout and not recommended for new deployments.
|
A two-volume layout that mounts `/data/config` and `/data/db` separately (for example, `netalertx_config` and `netalertx_db`) is supported for backward compatibility and some advanced workflows, but it is an abnormal/legacy layout and not recommended for new deployments.
|
||||||
|
|
||||||
However, if you prefer to have direct, file-level access to your configuration for manual editing, a "bind mount" is a simple alternative. This tells Docker to use a specific folder from your computer (the "host") inside the container.
|
However, if you prefer to have direct, file-level access to your configuration for manual editing, a "bind mount" is a simple alternative. This tells Docker to use a specific folder from your computer (the "host") inside the container.
|
||||||
|
|
||||||
@@ -187,7 +185,7 @@ services:
|
|||||||
environment:
|
environment:
|
||||||
- PORT=${PORT}
|
- PORT=${PORT}
|
||||||
- GRAPHQL_PORT=${GRAPHQL_PORT}
|
- GRAPHQL_PORT=${GRAPHQL_PORT}
|
||||||
|
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -25,8 +25,7 @@ Head to [https://netalertx.com/](https://netalertx.com/) for more gifs and scree
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run -d --rm --network=host \
|
docker run -d --rm --network=host \
|
||||||
-v /local_data_dir/config:/data/config \
|
-v /local_data_dir:/data \
|
||||||
-v /local_data_dir/db:/data/db \
|
|
||||||
-v /etc/localtime:/etc/localtime \
|
-v /etc/localtime:/etc/localtime \
|
||||||
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
||||||
-e PORT=20211 \
|
-e PORT=20211 \
|
||||||
@@ -62,8 +61,7 @@ See alternative [docked-compose examples](https://github.com/jokob-sk/NetAlertX/
|
|||||||
|
|
||||||
| Required | Path | Description |
|
| Required | Path | Description |
|
||||||
| :------------- | :------------- | :-------------|
|
| :------------- | :------------- | :-------------|
|
||||||
| ✅ | `:/data/config` | Folder which will contain the `app.conf` & `devices.csv` ([read about devices.csv](https://github.com/jokob-sk/NetAlertX/blob/main/docs/DEVICES_BULK_EDITING.md)) files |
|
| ✅ | `:/data` | Folder which will contain the `/db/app.db`, `/config/app.conf` & `/config/devices.csv` ([read about devices.csv](https://github.com/jokob-sk/NetAlertX/blob/main/docs/DEVICES_BULK_EDITING.md)) files |
|
||||||
| ✅ | `:/data/db` | Folder which will contain the `app.db` database file |
|
|
||||||
| ✅ | `/etc/localtime:/etc/localtime:ro` | Ensuring the timezone is teh same as on teh server. |
|
| ✅ | `/etc/localtime:/etc/localtime:ro` | Ensuring the timezone is teh same as on teh server. |
|
||||||
| | `:/tmp/log` | Logs folder useful for debugging if you have issues setting up the container |
|
| | `:/tmp/log` | Logs folder useful for debugging if you have issues setting up the container |
|
||||||
| | `:/tmp/api` | The [API endpoint](https://github.com/jokob-sk/NetAlertX/blob/main/docs/API.md) containing static (but regularly updated) json and other files. Path configurable via `NETALERTX_API` environment variable. |
|
| | `:/tmp/api` | The [API endpoint](https://github.com/jokob-sk/NetAlertX/blob/main/docs/API.md) containing static (but regularly updated) json and other files. Path configurable via `NETALERTX_API` environment variable. |
|
||||||
|
|||||||
@@ -1,20 +1,18 @@
|
|||||||
# The NetAlertX Container Operator's Guide
|
# The NetAlertX Container Operator's Guide
|
||||||
|
|
||||||
> [!WARNING]
|
> [!WARNING]
|
||||||
> ⚠️ **Important:** The documentation has been recently updated and some instructions may have changed.
|
> ⚠️ **Important:** The docker-compose has recently changed. Carefully read the [Migration guide](https://jokob-sk.github.io/NetAlertX/MIGRATION/?h=migrat#12-migration-from-netalertx-v25524) for detailed instructions.
|
||||||
> If you are using the currently live production image, please follow the instructions on [Docker Hub](https://hub.docker.com/r/jokobsk/netalertx) for building and running the container.
|
|
||||||
> These docs reflect the latest development version and may differ from the production image.
|
|
||||||
|
|
||||||
This guide assumes you are starting with the official `docker-compose.yml` file provided with the project. We strongly recommend you start with or migrate to this file as your baseline and modify it to suit your specific needs (e.g., changing file paths). While there are many ways to configure NetAlertX, the default file is designed to meet the mandatory security baseline with layer-2 networking capabilities while operating securely and without startup warnings.
|
This guide assumes you are starting with the official `docker-compose.yml` file provided with the project. We strongly recommend you start with or migrate to this file as your baseline and modify it to suit your specific needs (e.g., changing file paths). While there are many ways to configure NetAlertX, the default file is designed to meet the mandatory security baseline with layer-2 networking capabilities while operating securely and without startup warnings.
|
||||||
|
|
||||||
This guide provides direct, concise solutions for common NetAlertX administrative tasks. It is structured to help you identify a problem, implement the solution, and understand the details.
|
This guide provides direct, concise solutions for common NetAlertX administrative tasks. It is structured to help you identify a problem, implement the solution, and understand the details.
|
||||||
|
|
||||||
## Guide Contents
|
## Guide Contents
|
||||||
|
|
||||||
- Using a Local Folder for Configuration
|
- Using a Local Folder for Configuration
|
||||||
- Migrating from a Local Folder to a Docker Volume
|
- Migrating from a Local Folder to a Docker Volume
|
||||||
- Applying a Custom Nginx Configuration
|
- Applying a Custom Nginx Configuration
|
||||||
- Mounting Additional Files for Plugins
|
- Mounting Additional Files for Plugins
|
||||||
|
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
|
|||||||
@@ -78,7 +78,7 @@ In the **Environment variables** section of Portainer, add the following:
|
|||||||
>
|
>
|
||||||
> `sudo chown -R 20211:20211 /local_data_dir`
|
> `sudo chown -R 20211:20211 /local_data_dir`
|
||||||
>
|
>
|
||||||
> `sudo chmod -R a+rwx /local_data_dir1`
|
> `sudo chmod -R a+rwx /local_data_dir`
|
||||||
>
|
>
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -46,8 +46,7 @@ NetAlertX requires certain paths to be writable at runtime. These paths should b
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run -it --rm --name netalertx --user "0" \
|
docker run -it --rm --name netalertx --user "0" \
|
||||||
-v /local_data_dir/config:/data/config \
|
-v /local_data_dir:/data \
|
||||||
-v /local_data_dir/db:/data/db \
|
|
||||||
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
||||||
ghcr.io/jokob-sk/netalertx:latest
|
ghcr.io/jokob-sk/netalertx:latest
|
||||||
```
|
```
|
||||||
@@ -63,7 +62,7 @@ docker run -it --rm --name netalertx --user "0" \
|
|||||||
>
|
>
|
||||||
> `sudo chown -R 20211:20211 /local_data_dir`
|
> `sudo chown -R 20211:20211 /local_data_dir`
|
||||||
>
|
>
|
||||||
> `sudo chmod -R a+rwx /local_data_dir1`
|
> `sudo chmod -R a+rwx /local_data_dir`
|
||||||
>
|
>
|
||||||
|
|
||||||
---
|
---
|
||||||
@@ -84,8 +83,7 @@ services:
|
|||||||
- NET_BIND_SERVICE # Required to bind to privileged ports (nbtscan)
|
- NET_BIND_SERVICE # Required to bind to privileged ports (nbtscan)
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- /local_data_dir/config:/data/config
|
- /local_data_dir:/data
|
||||||
- /local_data_dir/db:/data/db
|
|
||||||
- /etc/localtime:/etc/localtime
|
- /etc/localtime:/etc/localtime
|
||||||
environment:
|
environment:
|
||||||
- PORT=20211
|
- PORT=20211
|
||||||
|
|||||||
@@ -1,11 +1,5 @@
|
|||||||
# Migration
|
# Migration
|
||||||
|
|
||||||
> [!WARNING]
|
|
||||||
> ⚠️ **Important:** The documentation has been recently updated and some instructions may have changed.
|
|
||||||
> If you are using the currently live production image, please follow the instructions on [Docker Hub](https://hub.docker.com/r/jokobsk/netalertx) for building and running the container.
|
|
||||||
> These docs reflect the latest development version and may differ from the production image.
|
|
||||||
|
|
||||||
|
|
||||||
When upgrading from older versions of NetAlertX (or PiAlert by jokob-sk), follow the migration steps below to ensure your data and configuration are properly transferred.
|
When upgrading from older versions of NetAlertX (or PiAlert by jokob-sk), follow the migration steps below to ensure your data and configuration are properly transferred.
|
||||||
|
|
||||||
> [!TIP]
|
> [!TIP]
|
||||||
@@ -245,30 +239,7 @@ services:
|
|||||||
|
|
||||||
4. Start the container and verify everything works as expected.
|
4. Start the container and verify everything works as expected.
|
||||||
5. Stop the container.
|
5. Stop the container.
|
||||||
6. Perform a one-off migration to the latest `netalertx` image and `20211` user:
|
6. Update the `docker-compose.yml` as per example below.
|
||||||
|
|
||||||
> [!NOTE]
|
|
||||||
> The example below assumes your `/config` and `/db` folders are stored in `local_data_dir`.
|
|
||||||
> Replace this path with your actual configuration directory. `netalertx` is the container name, which might differ from your setup.
|
|
||||||
|
|
||||||
```sh
|
|
||||||
docker run -it --rm --name netalertx --user "0" \
|
|
||||||
-v /local_data_dir/config:/data/config \
|
|
||||||
-v /local_data_dir/db:/data/db \
|
|
||||||
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
|
||||||
ghcr.io/jokob-sk/netalertx:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
..or alternatively execute:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo chown -R 20211:20211 /local_data_dir/config
|
|
||||||
sudo chown -R 20211:20211 /local_data_dir/db
|
|
||||||
sudo chmod -R a+rwx /local_data_dir/
|
|
||||||
```
|
|
||||||
|
|
||||||
7. Stop the container
|
|
||||||
8. Update the `docker-compose.yml` as per example below.
|
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
services:
|
services:
|
||||||
@@ -284,8 +255,7 @@ services:
|
|||||||
- NET_BIND_SERVICE # 🆕 New line
|
- NET_BIND_SERVICE # 🆕 New line
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- /local_data_dir/config:/data/config # 🆕 This has changed from /app to /data
|
- /local_data_dir:/data # 🆕 This folder contains your /db and /config directories and the parent changed from /app to /data
|
||||||
- /local_data_dir/db:/data/db # 🆕 This has changed from /app to /data
|
|
||||||
# Ensuring the timezone is the same as on the server - make sure also the TIMEZONE setting is configured
|
# Ensuring the timezone is the same as on the server - make sure also the TIMEZONE setting is configured
|
||||||
- /etc/localtime:/etc/localtime:ro # 🆕 New line
|
- /etc/localtime:/etc/localtime:ro # 🆕 New line
|
||||||
environment:
|
environment:
|
||||||
@@ -296,5 +266,33 @@ services:
|
|||||||
- "/tmp:uid=20211,gid=20211,mode=1700,rw,noexec,nosuid,nodev,async,noatime,nodiratime"
|
- "/tmp:uid=20211,gid=20211,mode=1700,rw,noexec,nosuid,nodev,async,noatime,nodiratime"
|
||||||
# 🆕 New "tmpfs" section END 🔼
|
# 🆕 New "tmpfs" section END 🔼
|
||||||
```
|
```
|
||||||
|
7. Perform a one-off migration to the latest `netalertx` image and `20211` user.
|
||||||
|
|
||||||
9. Start the container and verify everything works as expected.
|
> [!NOTE]
|
||||||
|
> The examples below assumes your `/config` and `/db` folders are stored in `local_data_dir`.
|
||||||
|
> Replace this path with your actual configuration directory. `netalertx` is the container name, which might differ from your setup.
|
||||||
|
|
||||||
|
**Automated approach**:
|
||||||
|
|
||||||
|
Run the container with the `--user "0"` parameter. Please note, some systems will require the manual approach below.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
docker run -it --rm --name netalertx --user "0" \
|
||||||
|
-v /local_data_dir/config:/data/config \
|
||||||
|
-v /local_data_dir/db:/data/db \
|
||||||
|
--tmpfs /tmp:uid=20211,gid=20211,mode=1700 \
|
||||||
|
ghcr.io/jokob-sk/netalertx:latest
|
||||||
|
```
|
||||||
|
|
||||||
|
Stop the container and run it as you would normally.
|
||||||
|
|
||||||
|
**Manual approach**:
|
||||||
|
|
||||||
|
Use the manual approach if the Automated approach fails. Execute the below commands:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo chown -R 20211:20211 /local_data_dir
|
||||||
|
sudo chmod -R a+rwx /local_data_dir
|
||||||
|
```
|
||||||
|
|
||||||
|
8. Start the container and verify everything works as expected.
|
||||||
@@ -13,13 +13,13 @@ There is also an in-app Help / FAQ section that should be answering frequently a
|
|||||||
|
|
||||||
#### 🐳 Docker (Fully supported)
|
#### 🐳 Docker (Fully supported)
|
||||||
|
|
||||||
- The main installation method is as a [docker container - follow these instructions here](./DOCKER_INSTALLATION.md).
|
- The main installation method is as a [docker container - follow these instructions here](./DOCKER_INSTALLATION.md).
|
||||||
|
|
||||||
#### 💻 Bare-metal / On-server (Experimental/community supported 🧪)
|
#### 💻 Bare-metal / On-server (Experimental/community supported 🧪)
|
||||||
|
|
||||||
- [(Experimental 🧪) On-hardware instructions](./HW_INSTALL.md)
|
- [(Experimental 🧪) On-hardware instructions](./HW_INSTALL.md)
|
||||||
|
|
||||||
- Alternative bare-metal install forks:
|
- Alternative bare-metal install forks:
|
||||||
- [leiweibau's fork](https://github.com/leiweibau/Pi.Alert/) (maintained)
|
- [leiweibau's fork](https://github.com/leiweibau/Pi.Alert/) (maintained)
|
||||||
- [pucherot's original code](https://github.com/pucherot/Pi.Alert/) (un-maintained)
|
- [pucherot's original code](https://github.com/pucherot/Pi.Alert/) (un-maintained)
|
||||||
|
|
||||||
@@ -63,7 +63,6 @@ There is also an in-app Help / FAQ section that should be answering frequently a
|
|||||||
|
|
||||||
#### ♻ Misc
|
#### ♻ Misc
|
||||||
|
|
||||||
- [Version history (legacy)](./VERSIONS_HISTORY.md)
|
|
||||||
- [Reverse proxy (Nginx, Apache, SWAG)](./REVERSE_PROXY.md)
|
- [Reverse proxy (Nginx, Apache, SWAG)](./REVERSE_PROXY.md)
|
||||||
- [Installing Updates](./UPDATES.md)
|
- [Installing Updates](./UPDATES.md)
|
||||||
- [Setting up Authelia](./AUTHELIA.md) (DRAFT)
|
- [Setting up Authelia](./AUTHELIA.md) (DRAFT)
|
||||||
@@ -80,27 +79,27 @@ There is also an in-app Help / FAQ section that should be answering frequently a
|
|||||||
- [Frontend development tips](./FRONTEND_DEVELOPMENT.md)
|
- [Frontend development tips](./FRONTEND_DEVELOPMENT.md)
|
||||||
- [Webhook secrets](./WEBHOOK_SECRET.md)
|
- [Webhook secrets](./WEBHOOK_SECRET.md)
|
||||||
|
|
||||||
Feel free to suggest or submit new docs via a PR.
|
Feel free to suggest or submit new docs via a PR.
|
||||||
|
|
||||||
## 👨💻 Development priorities
|
## 👨💻 Development priorities
|
||||||
|
|
||||||
Priorities from highest to lowest:
|
Priorities from highest to lowest:
|
||||||
|
|
||||||
* 🔼 Fixing core functionality bugs not solvable with workarounds
|
* 🔼 Fixing core functionality bugs not solvable with workarounds
|
||||||
* 🔵 New core functionality unlocking other opportunities (e.g.: plugins)
|
* 🔵 New core functionality unlocking other opportunities (e.g.: plugins)
|
||||||
* 🔵 Refactoring enabling faster implementation of future functionality
|
* 🔵 Refactoring enabling faster implementation of future functionality
|
||||||
* 🔽 (low) UI functionality & improvements (PRs welcome 😉)
|
* 🔽 (low) UI functionality & improvements (PRs welcome 😉)
|
||||||
|
|
||||||
Design philosophy: Focus on core functionality and leverage existing apps and tools to make NetAlertX integrate into other workflows.
|
Design philosophy: Focus on core functionality and leverage existing apps and tools to make NetAlertX integrate into other workflows.
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
|
|
||||||
1. Supporting apprise makes more sense than implementing multiple individual notification gateways
|
1. Supporting apprise makes more sense than implementing multiple individual notification gateways
|
||||||
2. Implementing regular expression support across settings for validation makes more sense than validating one setting with a specific expression.
|
2. Implementing regular expression support across settings for validation makes more sense than validating one setting with a specific expression.
|
||||||
|
|
||||||
UI-specific requests are a low priority as the framework picked by the original developer is not very extensible (and afaik doesn't support components) and has limited mobile support. Also, I argue the value proposition is smaller than working on something else.
|
UI-specific requests are a low priority as the framework picked by the original developer is not very extensible (and afaik doesn't support components) and has limited mobile support. Also, I argue the value proposition is smaller than working on something else.
|
||||||
|
|
||||||
Feel free to submit PRs if interested. try to **keep the PRs small/on-topic** so they are easier to review and approve.
|
Feel free to submit PRs if interested. try to **keep the PRs small/on-topic** so they are easier to review and approve.
|
||||||
|
|
||||||
That being said, I'd reconsider if more people and or recurring sponsors file a request 😉.
|
That being said, I'd reconsider if more people and or recurring sponsors file a request 😉.
|
||||||
|
|
||||||
@@ -112,8 +111,8 @@ Please be as detailed as possible with **workarounds** you considered and why a
|
|||||||
|
|
||||||
If you submit a PR please:
|
If you submit a PR please:
|
||||||
|
|
||||||
1. Check that your changes are backward compatible with existing installations and with a blank setup.
|
1. Check that your changes are backward compatible with existing installations and with a blank setup.
|
||||||
2. Existing features should always be preserved.
|
2. Existing features should always be preserved.
|
||||||
3. Keep the PR small, on-topic and don't change code that is not necessary for the PR to work
|
3. Keep the PR small, on-topic and don't change code that is not necessary for the PR to work
|
||||||
4. New features code should ideally be re-usable for different purposes, not for a very narrow use case.
|
4. New features code should ideally be re-usable for different purposes, not for a very narrow use case.
|
||||||
5. New functionality should ideally be implemented via the Plugins system, if possible.
|
5. New functionality should ideally be implemented via the Plugins system, if possible.
|
||||||
@@ -131,13 +130,13 @@ Suggested test cases:
|
|||||||
Some additional context:
|
Some additional context:
|
||||||
|
|
||||||
* Permanent settings/config is stored in the `app.conf` file
|
* Permanent settings/config is stored in the `app.conf` file
|
||||||
* Currently temporary (session?) settings are stored in the `Parameters` DB table as key-value pairs. This table is wiped during a container rebuild/restart and its values are re-initialized from cookies/session data from the browser.
|
* Currently temporary (session?) settings are stored in the `Parameters` DB table as key-value pairs. This table is wiped during a container rebuild/restart and its values are re-initialized from cookies/session data from the browser.
|
||||||
|
|
||||||
## 🐛 Submitting an issue or bug
|
## 🐛 Submitting an issue or bug
|
||||||
|
|
||||||
Before submitting a new issue please spend a couple of minutes on research:
|
Before submitting a new issue please spend a couple of minutes on research:
|
||||||
|
|
||||||
* Check [🛑 Common issues](./DEBUG_TIPS.md#common-issues)
|
* Check [🛑 Common issues](./DEBUG_TIPS.md#common-issues)
|
||||||
* Check [💡 Closed issues](https://github.com/jokob-sk/NetAlertX/issues?q=is%3Aissue+is%3Aclosed) if a similar issue was solved in the past.
|
* Check [💡 Closed issues](https://github.com/jokob-sk/NetAlertX/issues?q=is%3Aissue+is%3Aclosed) if a similar issue was solved in the past.
|
||||||
* When submitting an issue ❗[enable debug](./DEBUG_TIPS.md)❗
|
* When submitting an issue ❗[enable debug](./DEBUG_TIPS.md)❗
|
||||||
|
|
||||||
|
|||||||
@@ -47,8 +47,7 @@ services:
|
|||||||
- NET_ADMIN
|
- NET_ADMIN
|
||||||
- NET_BIND_SERVICE
|
- NET_BIND_SERVICE
|
||||||
volumes:
|
volumes:
|
||||||
- /app_storage/netalertx/config:/data/config
|
- /app_storage/netalertx:/data
|
||||||
- /app_storage/netalertx/db:/data/db
|
|
||||||
# to sync with system time
|
# to sync with system time
|
||||||
- /etc/localtime:/etc/localtime:ro
|
- /etc/localtime:/etc/localtime:ro
|
||||||
tmpfs:
|
tmpfs:
|
||||||
@@ -66,10 +65,7 @@ services:
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
volumes:
|
volumes:
|
||||||
- /volume1/app_storage/netalertx/config:/data/config
|
- /volume1/app_storage/netalertx:/data
|
||||||
- /volume1/app_storage/netalertx/db:/data/db
|
|
||||||
# (optional) useful for debugging if you have issues setting up the container
|
|
||||||
# - local/path/logs:/tmp/log <- commented out with # ⚠
|
|
||||||
```
|
```
|
||||||
|
|
||||||

|

|
||||||
@@ -88,5 +84,5 @@ services:
|
|||||||
>
|
>
|
||||||
> `sudo chown -R 20211:20211 /local_data_dir`
|
> `sudo chown -R 20211:20211 /local_data_dir`
|
||||||
>
|
>
|
||||||
> `sudo chmod -R a+rwx /local_data_dir1`
|
> `sudo chmod -R a+rwx /local_data_dir`
|
||||||
>
|
>
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ a[target="_blank"] {
|
|||||||
|
|
||||||
[data-is-valid="0"] {
|
[data-is-valid="0"] {
|
||||||
/* border: 1px solid red; */
|
/* border: 1px solid red; */
|
||||||
background-color: #ff4b4b;
|
background-color: #ff4b4b !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* -----------------------------------------------------------------------------
|
/* -----------------------------------------------------------------------------
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
<!--
|
<!--
|
||||||
#---------------------------------------------------------------------------------#
|
#---------------------------------------------------------------------------------#
|
||||||
# NetAlertX #
|
# NetAlertX #
|
||||||
# Open Source Network Guard / WIFI & LAN intrusion detector #
|
# Open Source Network Guard / WIFI & LAN intrusion detector #
|
||||||
# #
|
# #
|
||||||
# devices.php - Front module. Devices list page #
|
# devices.php - Front module. Devices list page #
|
||||||
#---------------------------------------------------------------------------------#
|
#---------------------------------------------------------------------------------#
|
||||||
@@ -15,7 +15,7 @@
|
|||||||
<?php
|
<?php
|
||||||
|
|
||||||
require 'php/templates/header.php';
|
require 'php/templates/header.php';
|
||||||
|
|
||||||
// check permissions
|
// check permissions
|
||||||
// Use environment-aware paths with fallback to legacy locations
|
// Use environment-aware paths with fallback to legacy locations
|
||||||
$dbFolderPath = rtrim(getenv('NETALERTX_DB') ?: '/data/db', '/');
|
$dbFolderPath = rtrim(getenv('NETALERTX_DB') ?: '/data/db', '/');
|
||||||
@@ -36,7 +36,7 @@
|
|||||||
?>
|
?>
|
||||||
|
|
||||||
<!-- ----------------------------------------------------------------------- -->
|
<!-- ----------------------------------------------------------------------- -->
|
||||||
|
|
||||||
|
|
||||||
<!-- Page ------------------------------------------------------------------ -->
|
<!-- Page ------------------------------------------------------------------ -->
|
||||||
<div class="content-wrapper">
|
<div class="content-wrapper">
|
||||||
@@ -55,15 +55,15 @@
|
|||||||
<div class="col-md-12">
|
<div class="col-md-12">
|
||||||
<div class="box" id="clients">
|
<div class="box" id="clients">
|
||||||
<div class="box-header ">
|
<div class="box-header ">
|
||||||
<h3 class="box-title col-md-12"><?= lang('Device_Shortcut_OnlineChart');?> </h3>
|
<h3 class="box-title col-md-12"><?= lang('Device_Shortcut_OnlineChart');?> </h3>
|
||||||
</div>
|
</div>
|
||||||
<div class="box-body">
|
<div class="box-body">
|
||||||
<div class="chart">
|
<div class="chart">
|
||||||
<script src="lib/chart.js/Chart.js?v=<?php include 'php/templates/version.php'; ?>"></script>
|
<script src="lib/chart.js/Chart.js?v=<?php include 'php/templates/version.php'; ?>"></script>
|
||||||
<!-- presence chart -->
|
<!-- presence chart -->
|
||||||
<?php
|
<?php
|
||||||
require 'php/components/graph_online_history.php';
|
require 'php/components/graph_online_history.php';
|
||||||
?>
|
?>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<!-- /.box-body -->
|
<!-- /.box-body -->
|
||||||
@@ -74,7 +74,7 @@
|
|||||||
<!-- Device Filters ------------------------------------------------------- -->
|
<!-- Device Filters ------------------------------------------------------- -->
|
||||||
<div class="box box-aqua hidden" id="columnFiltersWrap">
|
<div class="box box-aqua hidden" id="columnFiltersWrap">
|
||||||
<div class="box-header ">
|
<div class="box-header ">
|
||||||
<h3 class="box-title col-md-12"><?= lang('Devices_Filters');?> </h3>
|
<h3 class="box-title col-md-12"><?= lang('Devices_Filters');?> </h3>
|
||||||
</div>
|
</div>
|
||||||
<!-- Placeholder ------------------------------------------------------- -->
|
<!-- Placeholder ------------------------------------------------------- -->
|
||||||
<div id="columnFilters" ></div>
|
<div id="columnFilters" ></div>
|
||||||
@@ -88,8 +88,8 @@
|
|||||||
<!-- box-header -->
|
<!-- box-header -->
|
||||||
<div class="box-header">
|
<div class="box-header">
|
||||||
<div class=" col-sm-8 ">
|
<div class=" col-sm-8 ">
|
||||||
<h3 id="tableDevicesTitle" class="box-title text-gray "></h3>
|
<h3 id="tableDevicesTitle" class="box-title text-gray "></h3>
|
||||||
</div>
|
</div>
|
||||||
<div class="dummyDevice col-sm-4 ">
|
<div class="dummyDevice col-sm-4 ">
|
||||||
<span id="multiEditPlc">
|
<span id="multiEditPlc">
|
||||||
<!-- multi edit button placeholder -->
|
<!-- multi edit button placeholder -->
|
||||||
@@ -104,8 +104,8 @@
|
|||||||
<div class="box-body table-responsive">
|
<div class="box-body table-responsive">
|
||||||
<table id="tableDevices" class="table table-bordered table-hover table-striped">
|
<table id="tableDevices" class="table table-bordered table-hover table-striped">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
|
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
</table>
|
</table>
|
||||||
@@ -122,7 +122,7 @@
|
|||||||
<!-- ----------------------------------------------------------------------- -->
|
<!-- ----------------------------------------------------------------------- -->
|
||||||
</section>
|
</section>
|
||||||
<!-- /.content -->
|
<!-- /.content -->
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
<!-- /.content-wrapper -->
|
<!-- /.content-wrapper -->
|
||||||
|
|
||||||
@@ -136,9 +136,9 @@
|
|||||||
<!-- page script ----------------------------------------------------------- -->
|
<!-- page script ----------------------------------------------------------- -->
|
||||||
<script>
|
<script>
|
||||||
var deviceStatus = 'all';
|
var deviceStatus = 'all';
|
||||||
var tableRows = getCache ("nax_parTableRows") == "" ? parseInt(getSetting("UI_DEFAULT_PAGE_SIZE")) : getCache ("nax_parTableRows") ;
|
|
||||||
var tableOrder = getCache ("nax_parTableOrder") == "" ? [[3,'desc'], [0,'asc']] : JSON.parse(getCache ("nax_parTableOrder")) ;
|
var tableOrder = getCache ("nax_parTableOrder") == "" ? [[3,'desc'], [0,'asc']] : JSON.parse(getCache ("nax_parTableOrder")) ;
|
||||||
|
|
||||||
var tableColumnHide = [];
|
var tableColumnHide = [];
|
||||||
var tableColumnOrder = [];
|
var tableColumnOrder = [];
|
||||||
var tableColumnVisible = [];
|
var tableColumnVisible = [];
|
||||||
@@ -161,7 +161,7 @@ function main () {
|
|||||||
|
|
||||||
//initialize the table headers in the correct order
|
//initialize the table headers in the correct order
|
||||||
var availableColumns = getSettingOptions("UI_device_columns").split(",");
|
var availableColumns = getSettingOptions("UI_device_columns").split(",");
|
||||||
headersDefaultOrder = availableColumns.map(val => getString(val));
|
headersDefaultOrder = availableColumns.map(val => getString(val));
|
||||||
|
|
||||||
var selectedColumns = JSON.parse(getSetting("UI_device_columns").replace(/'/g, '"'));
|
var selectedColumns = JSON.parse(getSetting("UI_device_columns").replace(/'/g, '"'));
|
||||||
|
|
||||||
@@ -190,10 +190,10 @@ function main () {
|
|||||||
|
|
||||||
// Initialize components with parameters
|
// Initialize components with parameters
|
||||||
initializeDatatable(getUrlAnchor('my_devices'));
|
initializeDatatable(getUrlAnchor('my_devices'));
|
||||||
|
|
||||||
// check if data outdated and show spinner if so
|
// check if data outdated and show spinner if so
|
||||||
handleLoadingDialog()
|
handleLoadingDialog()
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
@@ -202,7 +202,7 @@ function mapIndx(oldIndex)
|
|||||||
{
|
{
|
||||||
// console.log(oldIndex);
|
// console.log(oldIndex);
|
||||||
// console.log(tableColumnOrder);
|
// console.log(tableColumnOrder);
|
||||||
|
|
||||||
for(i=0;i<tableColumnOrder.length;i++)
|
for(i=0;i<tableColumnOrder.length;i++)
|
||||||
{
|
{
|
||||||
if(tableColumnOrder[i] == oldIndex)
|
if(tableColumnOrder[i] == oldIndex)
|
||||||
@@ -311,7 +311,7 @@ function processDeviceTotals(devicesData) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Render info boxes/tile cards
|
// Render info boxes/tile cards
|
||||||
renderInfoboxes(dataArray);
|
renderInfoboxes(dataArray);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -350,9 +350,9 @@ function initFilters() {
|
|||||||
nocache: Date.now() // Prevent caching with a timestamp
|
nocache: Date.now() // Prevent caching with a timestamp
|
||||||
},
|
},
|
||||||
success: function(response) {
|
success: function(response) {
|
||||||
if (response && response.data) {
|
if (response && response.data) {
|
||||||
|
|
||||||
let resultJSON = response.data;
|
let resultJSON = response.data;
|
||||||
|
|
||||||
// Save the result to cache
|
// Save the result to cache
|
||||||
setCache("devicesFilters", JSON.stringify(resultJSON));
|
setCache("devicesFilters", JSON.stringify(resultJSON));
|
||||||
@@ -381,7 +381,7 @@ function initFilters() {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Filter resultJSON to include only entries with columnName in columnFilters
|
// Filter resultJSON to include only entries with columnName in columnFilters
|
||||||
resultJSON = resultJSON.filter(entry =>
|
resultJSON = resultJSON.filter(entry =>
|
||||||
columnFilters.some(filter => filter[0] === entry.columnName)
|
columnFilters.some(filter => filter[0] === entry.columnName)
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -451,7 +451,7 @@ function initFilters() {
|
|||||||
function renderFilters(customData) {
|
function renderFilters(customData) {
|
||||||
|
|
||||||
// console.log(JSON.stringify(customData));
|
// console.log(JSON.stringify(customData));
|
||||||
|
|
||||||
// Load filter data from the JSON file
|
// Load filter data from the JSON file
|
||||||
$.ajax({
|
$.ajax({
|
||||||
url: 'php/components/devices_filters.php', // PHP script URL
|
url: 'php/components/devices_filters.php', // PHP script URL
|
||||||
@@ -471,7 +471,7 @@ function renderFilters(customData) {
|
|||||||
|
|
||||||
// Update DataTable with the new filters or search value (if applicable)
|
// Update DataTable with the new filters or search value (if applicable)
|
||||||
$('#tableDevices').DataTable().draw();
|
$('#tableDevices').DataTable().draw();
|
||||||
|
|
||||||
// Optionally, apply column filters (if using filters for individual columns)
|
// Optionally, apply column filters (if using filters for individual columns)
|
||||||
const table = $('#tableDevices').DataTable();
|
const table = $('#tableDevices').DataTable();
|
||||||
table.columnFilters = columnFilters; // Apply your column filters logic
|
table.columnFilters = columnFilters; // Apply your column filters logic
|
||||||
@@ -493,11 +493,11 @@ function collectFilters() {
|
|||||||
// Loop through each filter group
|
// Loop through each filter group
|
||||||
document.querySelectorAll('.filter-group').forEach(filterGroup => {
|
document.querySelectorAll('.filter-group').forEach(filterGroup => {
|
||||||
const dropdown = filterGroup.querySelector('.filter-dropdown');
|
const dropdown = filterGroup.querySelector('.filter-dropdown');
|
||||||
|
|
||||||
if (dropdown) {
|
if (dropdown) {
|
||||||
const filterColumn = dropdown.getAttribute('data-column');
|
const filterColumn = dropdown.getAttribute('data-column');
|
||||||
const filterValue = dropdown.value;
|
const filterValue = dropdown.value;
|
||||||
|
|
||||||
if (filterValue && filterColumn) {
|
if (filterValue && filterColumn) {
|
||||||
columnFilters.push({
|
columnFilters.push({
|
||||||
filterColumn: filterColumn,
|
filterColumn: filterColumn,
|
||||||
@@ -548,7 +548,7 @@ function mapColumnIndexToFieldName(index, tableColumnVisible) {
|
|||||||
"devReqNicsOnline" // 29
|
"devReqNicsOnline" // 29
|
||||||
];
|
];
|
||||||
|
|
||||||
// console.log("OrderBy: " + columnNames[tableColumnOrder[index]]);
|
// console.log("OrderBy: " + columnNames[tableColumnOrder[index]]);
|
||||||
|
|
||||||
return columnNames[tableColumnOrder[index]] || null;
|
return columnNames[tableColumnOrder[index]] || null;
|
||||||
}
|
}
|
||||||
@@ -557,12 +557,15 @@ function mapColumnIndexToFieldName(index, tableColumnVisible) {
|
|||||||
// ---------------------------------------------------------
|
// ---------------------------------------------------------
|
||||||
// Initializes the main devices list datatable
|
// Initializes the main devices list datatable
|
||||||
function initializeDatatable (status) {
|
function initializeDatatable (status) {
|
||||||
|
|
||||||
if(!status)
|
if(!status)
|
||||||
{
|
{
|
||||||
status = 'my_devices'
|
status = 'my_devices'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// retrieve page size
|
||||||
|
var tableRows = getCache ("nax_parTableRows") == "" ? parseInt(getSetting("UI_DEFAULT_PAGE_SIZE")) : getCache ("nax_parTableRows") ;
|
||||||
|
|
||||||
// Save status selected
|
// Save status selected
|
||||||
deviceStatus = status;
|
deviceStatus = status;
|
||||||
|
|
||||||
@@ -579,7 +582,7 @@ function initializeDatatable (status) {
|
|||||||
case 'all_devices': tableTitle = getString('Gen_All_Devices'); color = 'gray'; break;
|
case 'all_devices': tableTitle = getString('Gen_All_Devices'); color = 'gray'; break;
|
||||||
case 'network_devices': tableTitle = getString('Network_Devices'); color = 'aqua'; break;
|
case 'network_devices': tableTitle = getString('Network_Devices'); color = 'aqua'; break;
|
||||||
default: tableTitle = getString('Device_Shortcut_Devices'); color = 'gray'; break;
|
default: tableTitle = getString('Device_Shortcut_Devices'); color = 'gray'; break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set title and color
|
// Set title and color
|
||||||
$('#tableDevicesTitle')[0].className = 'box-title text-'+ color;
|
$('#tableDevicesTitle')[0].className = 'box-title text-'+ color;
|
||||||
@@ -588,23 +591,23 @@ function initializeDatatable (status) {
|
|||||||
|
|
||||||
// render table headers
|
// render table headers
|
||||||
html = '';
|
html = '';
|
||||||
|
|
||||||
for(index = 0; index < tableColumnOrder.length; index++)
|
for(index = 0; index < tableColumnOrder.length; index++)
|
||||||
{
|
{
|
||||||
html += '<th>' + headersDefaultOrder[tableColumnOrder[index]] + '</th>';
|
html += '<th>' + headersDefaultOrder[tableColumnOrder[index]] + '</th>';
|
||||||
}
|
}
|
||||||
|
|
||||||
$('#tableDevices tr').html(html);
|
$('#tableDevices tr').html(html);
|
||||||
|
|
||||||
hideUIelements("UI_DEV_SECTIONS")
|
hideUIelements("UI_DEV_SECTIONS")
|
||||||
|
|
||||||
for(i = 0; i < tableColumnOrder.length; i++)
|
for(i = 0; i < tableColumnOrder.length; i++)
|
||||||
{
|
{
|
||||||
// hide this column if not in the tableColumnVisible variable (we need to keep the MAC address (index 11) for functionality reasons)
|
// hide this column if not in the tableColumnVisible variable (we need to keep the MAC address (index 11) for functionality reasons)
|
||||||
if(tableColumnVisible.includes(tableColumnOrder[i]) == false)
|
if(tableColumnVisible.includes(tableColumnOrder[i]) == false)
|
||||||
{
|
{
|
||||||
tableColumnHide.push(mapIndx(tableColumnOrder[i]));
|
tableColumnHide.push(mapIndx(tableColumnOrder[i]));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var table = $('#tableDevices').DataTable({
|
var table = $('#tableDevices').DataTable({
|
||||||
@@ -690,7 +693,7 @@ function initializeDatatable (status) {
|
|||||||
"status": deviceStatus,
|
"status": deviceStatus,
|
||||||
"filters" : columnFilters
|
"filters" : columnFilters
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -766,8 +769,8 @@ function initializeDatatable (status) {
|
|||||||
|
|
||||||
// Parameters
|
// Parameters
|
||||||
'pageLength' : tableRows,
|
'pageLength' : tableRows,
|
||||||
'order' : tableOrder,
|
'order' : tableOrder,
|
||||||
'select' : true, // Enable selection
|
'select' : true, // Enable selection
|
||||||
|
|
||||||
'fixedHeader': true,
|
'fixedHeader': true,
|
||||||
'fixedHeader': {
|
'fixedHeader': {
|
||||||
@@ -776,19 +779,19 @@ function initializeDatatable (status) {
|
|||||||
},
|
},
|
||||||
|
|
||||||
'columnDefs' : [
|
'columnDefs' : [
|
||||||
{visible: false, targets: tableColumnHide },
|
{visible: false, targets: tableColumnHide },
|
||||||
{className: 'text-center', targets: [mapIndx(4), mapIndx(9), mapIndx(10), mapIndx(15), mapIndx(18)] },
|
{className: 'text-center', targets: [mapIndx(4), mapIndx(9), mapIndx(10), mapIndx(15), mapIndx(18)] },
|
||||||
{className: 'iconColumn text-center', targets: [mapIndx(3)]},
|
{className: 'iconColumn text-center', targets: [mapIndx(3)]},
|
||||||
{width: '80px', targets: [mapIndx(6), mapIndx(7), mapIndx(15), mapIndx(27)] },
|
{width: '80px', targets: [mapIndx(6), mapIndx(7), mapIndx(15), mapIndx(27)] },
|
||||||
{width: '85px', targets: [mapIndx(9)] },
|
{width: '85px', targets: [mapIndx(9)] },
|
||||||
{width: '30px', targets: [mapIndx(3), mapIndx(10), mapIndx(13), mapIndx(18)] },
|
{width: '30px', targets: [mapIndx(3), mapIndx(10), mapIndx(13), mapIndx(18)] },
|
||||||
{orderData: [mapIndx(12)], targets: mapIndx(8) },
|
{orderData: [mapIndx(12)], targets: mapIndx(8) },
|
||||||
|
|
||||||
// Device Name and FQDN
|
// Device Name and FQDN
|
||||||
{targets: [mapIndx(0), mapIndx(27)],
|
{targets: [mapIndx(0), mapIndx(27)],
|
||||||
'createdCell': function (td, cellData, rowData, row, col) {
|
'createdCell': function (td, cellData, rowData, row, col) {
|
||||||
|
|
||||||
// console.log(cellData)
|
// console.log(cellData)
|
||||||
$(td).html (
|
$(td).html (
|
||||||
`<b class="anonymizeDev "
|
`<b class="anonymizeDev "
|
||||||
>
|
>
|
||||||
@@ -811,9 +814,9 @@ function initializeDatatable (status) {
|
|||||||
);
|
);
|
||||||
} },
|
} },
|
||||||
|
|
||||||
// Connected Devices
|
// Connected Devices
|
||||||
{targets: [mapIndx(15)],
|
{targets: [mapIndx(15)],
|
||||||
'createdCell': function (td, cellData, rowData, row, col) {
|
'createdCell': function (td, cellData, rowData, row, col) {
|
||||||
// check if this is a network device
|
// check if this is a network device
|
||||||
if(getSetting("NETWORK_DEVICE_TYPES").includes(`'${rowData[mapIndx(2)]}'`) )
|
if(getSetting("NETWORK_DEVICE_TYPES").includes(`'${rowData[mapIndx(2)]}'`) )
|
||||||
{
|
{
|
||||||
@@ -823,13 +826,13 @@ function initializeDatatable (status) {
|
|||||||
{
|
{
|
||||||
$(td).html (`<i class="fa-solid fa-xmark" title="${getString("Device_Table_Not_Network_Device")}"></i>`)
|
$(td).html (`<i class="fa-solid fa-xmark" title="${getString("Device_Table_Not_Network_Device")}"></i>`)
|
||||||
}
|
}
|
||||||
|
|
||||||
} },
|
} },
|
||||||
|
|
||||||
// Icon
|
// Icon
|
||||||
{targets: [mapIndx(3)],
|
{targets: [mapIndx(3)],
|
||||||
'createdCell': function (td, cellData, rowData, row, col) {
|
'createdCell': function (td, cellData, rowData, row, col) {
|
||||||
|
|
||||||
if (!emptyArr.includes(cellData)){
|
if (!emptyArr.includes(cellData)){
|
||||||
$(td).html (atob(cellData));
|
$(td).html (atob(cellData));
|
||||||
} else {
|
} else {
|
||||||
@@ -837,7 +840,7 @@ function initializeDatatable (status) {
|
|||||||
}
|
}
|
||||||
} },
|
} },
|
||||||
|
|
||||||
// Full MAC
|
// Full MAC
|
||||||
{targets: [mapIndx(11)],
|
{targets: [mapIndx(11)],
|
||||||
'createdCell': function (td, cellData, rowData, row, col) {
|
'createdCell': function (td, cellData, rowData, row, col) {
|
||||||
if (!emptyArr.includes(cellData)){
|
if (!emptyArr.includes(cellData)){
|
||||||
@@ -846,8 +849,8 @@ function initializeDatatable (status) {
|
|||||||
$(td).html ('');
|
$(td).html ('');
|
||||||
}
|
}
|
||||||
} },
|
} },
|
||||||
|
|
||||||
// IP address
|
// IP address
|
||||||
{targets: [mapIndx(8)],
|
{targets: [mapIndx(8)],
|
||||||
'createdCell': function (td, cellData, rowData, row, col) {
|
'createdCell': function (td, cellData, rowData, row, col) {
|
||||||
if (!emptyArr.includes(cellData)){
|
if (!emptyArr.includes(cellData)){
|
||||||
@@ -864,9 +867,9 @@ function initializeDatatable (status) {
|
|||||||
} else {
|
} else {
|
||||||
$(td).html ('');
|
$(td).html ('');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
// IP address (ordeable)
|
// IP address (ordeable)
|
||||||
{targets: [mapIndx(12)],
|
{targets: [mapIndx(12)],
|
||||||
'createdCell': function (td, cellData, rowData, row, col) {
|
'createdCell': function (td, cellData, rowData, row, col) {
|
||||||
if (!emptyArr.includes(cellData)){
|
if (!emptyArr.includes(cellData)){
|
||||||
@@ -874,10 +877,10 @@ function initializeDatatable (status) {
|
|||||||
} else {
|
} else {
|
||||||
$(td).html ('');
|
$(td).html ('');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
// Custom Properties
|
// Custom Properties
|
||||||
{targets: [mapIndx(26)],
|
{targets: [mapIndx(26)],
|
||||||
'createdCell': function (td, cellData, rowData, row, col) {
|
'createdCell': function (td, cellData, rowData, row, col) {
|
||||||
if (!emptyArr.includes(cellData)){
|
if (!emptyArr.includes(cellData)){
|
||||||
@@ -885,10 +888,10 @@ function initializeDatatable (status) {
|
|||||||
} else {
|
} else {
|
||||||
$(td).html ('');
|
$(td).html ('');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
// Favorite
|
// Favorite
|
||||||
{targets: [mapIndx(4)],
|
{targets: [mapIndx(4)],
|
||||||
'createdCell': function (td, cellData, rowData, row, col) {
|
'createdCell': function (td, cellData, rowData, row, col) {
|
||||||
if (cellData == 1){
|
if (cellData == 1){
|
||||||
@@ -897,8 +900,8 @@ function initializeDatatable (status) {
|
|||||||
$(td).html ('');
|
$(td).html ('');
|
||||||
}
|
}
|
||||||
} },
|
} },
|
||||||
|
|
||||||
// Dates
|
// Dates
|
||||||
{targets: [mapIndx(6), mapIndx(7)],
|
{targets: [mapIndx(6), mapIndx(7)],
|
||||||
'createdCell': function (td, cellData, rowData, row, col) {
|
'createdCell': function (td, cellData, rowData, row, col) {
|
||||||
var result = cellData.toString(); // Convert to string
|
var result = cellData.toString(); // Convert to string
|
||||||
@@ -908,7 +911,7 @@ function initializeDatatable (status) {
|
|||||||
$(td).html (translateHTMLcodes (result));
|
$(td).html (translateHTMLcodes (result));
|
||||||
} },
|
} },
|
||||||
|
|
||||||
// Random MAC
|
// Random MAC
|
||||||
{targets: [mapIndx(9)],
|
{targets: [mapIndx(9)],
|
||||||
'createdCell': function (td, cellData, rowData, row, col) {
|
'createdCell': function (td, cellData, rowData, row, col) {
|
||||||
// console.log(cellData)
|
// console.log(cellData)
|
||||||
@@ -919,7 +922,7 @@ function initializeDatatable (status) {
|
|||||||
}
|
}
|
||||||
} },
|
} },
|
||||||
|
|
||||||
// Parent Mac
|
// Parent Mac
|
||||||
{targets: [mapIndx(14)],
|
{targets: [mapIndx(14)],
|
||||||
'createdCell': function (td, cellData, rowData, row, col) {
|
'createdCell': function (td, cellData, rowData, row, col) {
|
||||||
if (!isValidMac(cellData)) {
|
if (!isValidMac(cellData)) {
|
||||||
@@ -938,13 +941,13 @@ function initializeDatatable (status) {
|
|||||||
|
|
||||||
const chipHtml = renderDeviceLink(data, spanWrap, true); // pass the td as container
|
const chipHtml = renderDeviceLink(data, spanWrap, true); // pass the td as container
|
||||||
|
|
||||||
$(spanWrap).append(chipHtml);
|
$(spanWrap).append(chipHtml);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
// Status color
|
// Status color
|
||||||
{targets: [mapIndx(10)],
|
{targets: [mapIndx(10)],
|
||||||
'createdCell': function (td, cellData, rowData, row, col) {
|
'createdCell': function (td, cellData, rowData, row, col) {
|
||||||
|
|
||||||
tmp_devPresentLastScan = rowData[mapIndx(24)]
|
tmp_devPresentLastScan = rowData[mapIndx(24)]
|
||||||
tmp_devAlertDown = rowData[mapIndx(25)]
|
tmp_devAlertDown = rowData[mapIndx(25)]
|
||||||
|
|
||||||
@@ -954,11 +957,11 @@ function initializeDatatable (status) {
|
|||||||
rowData[mapIndx(11)], // MAC
|
rowData[mapIndx(11)], // MAC
|
||||||
cellData // optional text
|
cellData // optional text
|
||||||
);
|
);
|
||||||
|
|
||||||
$(td).html (`<a href="${badge.url}" class="badge ${badge.cssClass}">${badge.iconHtml} ${badge.text}</a>`);
|
$(td).html (`<a href="${badge.url}" class="badge ${badge.cssClass}">${badge.iconHtml} ${badge.text}</a>`);
|
||||||
} },
|
} },
|
||||||
],
|
],
|
||||||
|
|
||||||
// Processing
|
// Processing
|
||||||
'processing' : true,
|
'processing' : true,
|
||||||
'language' : {
|
'language' : {
|
||||||
@@ -978,7 +981,7 @@ function initializeDatatable (status) {
|
|||||||
$('#tableDevices').on( 'length.dt', function ( e, settings, len ) {
|
$('#tableDevices').on( 'length.dt', function ( e, settings, len ) {
|
||||||
setCache ("nax_parTableRows", len, 129600); // save for 90 days
|
setCache ("nax_parTableRows", len, 129600); // save for 90 days
|
||||||
} );
|
} );
|
||||||
|
|
||||||
$('#tableDevices').on( 'order.dt', function () {
|
$('#tableDevices').on( 'order.dt', function () {
|
||||||
setCache ("nax_parTableOrder", JSON.stringify (table.order()), 129600); // save for 90 days
|
setCache ("nax_parTableOrder", JSON.stringify (table.order()), 129600); // save for 90 days
|
||||||
} );
|
} );
|
||||||
@@ -998,7 +1001,7 @@ function initializeDatatable (status) {
|
|||||||
// Toggle visibility of element with ID 'multiEdit'
|
// Toggle visibility of element with ID 'multiEdit'
|
||||||
$('#multiEdit').toggle(anyRowSelected);
|
$('#multiEdit').toggle(anyRowSelected);
|
||||||
}, 100);
|
}, 100);
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// search only after idle
|
// search only after idle
|
||||||
@@ -1014,59 +1017,59 @@ function initializeDatatable (status) {
|
|||||||
}, debounceTime);
|
}, debounceTime);
|
||||||
});
|
});
|
||||||
|
|
||||||
initHoverNodeInfo();
|
initHoverNodeInfo();
|
||||||
hideSpinner();
|
hideSpinner();
|
||||||
|
|
||||||
},
|
},
|
||||||
createdRow: function(row, data, dataIndex) {
|
createdRow: function(row, data, dataIndex) {
|
||||||
// add devMac to the table row
|
// add devMac to the table row
|
||||||
$(row).attr('my-devMac', data[mapIndx(11)]);
|
$(row).attr('my-devMac', data[mapIndx(11)]);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
function handleLoadingDialog(needsReload = false)
|
function handleLoadingDialog(needsReload = false)
|
||||||
{
|
{
|
||||||
// console.log(`needsReload: ${needsReload}`);
|
// console.log(`needsReload: ${needsReload}`);
|
||||||
|
|
||||||
$.get('php/server/query_logs.php?file=execution_queue.log&nocache=' + Date.now(), function(data) {
|
$.get('php/server/query_logs.php?file=execution_queue.log&nocache=' + Date.now(), function(data) {
|
||||||
|
|
||||||
if(data.includes("update_api|devices"))
|
if(data.includes("update_api|devices"))
|
||||||
{
|
{
|
||||||
showSpinner("devices_old")
|
showSpinner("devices_old")
|
||||||
|
|
||||||
setTimeout(handleLoadingDialog(true), 1000);
|
setTimeout(handleLoadingDialog(true), 1000);
|
||||||
|
|
||||||
} else if (needsReload)
|
} else if (needsReload)
|
||||||
{
|
{
|
||||||
location.reload();
|
location.reload();
|
||||||
}else
|
}else
|
||||||
{
|
{
|
||||||
// hideSpinner();
|
// hideSpinner();
|
||||||
}
|
}
|
||||||
|
|
||||||
})
|
})
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
// Function collects selected devices in the DataTable and redirects the user to
|
// Function collects selected devices in the DataTable and redirects the user to
|
||||||
// the Miantenance section with a 'macs' query string identifying selected devices
|
// the Miantenance section with a 'macs' query string identifying selected devices
|
||||||
function multiEditDevices()
|
function multiEditDevices()
|
||||||
{
|
{
|
||||||
// get selected devices
|
// get selected devices
|
||||||
var selectedDevicesDataTableData = $('#tableDevices').DataTable().rows({ selected: true, page: 'current' }).data().toArray();
|
var selectedDevicesDataTableData = $('#tableDevices').DataTable().rows({ selected: true, page: 'current' }).data().toArray();
|
||||||
|
|
||||||
console.log(selectedDevicesDataTableData);
|
console.log(selectedDevicesDataTableData);
|
||||||
|
|
||||||
macs = ""
|
macs = ""
|
||||||
|
|
||||||
for (var j = 0; j < selectedDevicesDataTableData.length; j++) {
|
for (var j = 0; j < selectedDevicesDataTableData.length; j++) {
|
||||||
macs += selectedDevicesDataTableData[j][mapIndx(11)] + ","; // [11] == MAC
|
macs += selectedDevicesDataTableData[j][mapIndx(11)] + ","; // [11] == MAC
|
||||||
}
|
}
|
||||||
|
|
||||||
// redirect to the Maintenance section
|
// redirect to the Maintenance section
|
||||||
@@ -1075,7 +1078,7 @@ function multiEditDevices()
|
|||||||
|
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
// Function collects shown devices from the DataTable
|
// Function collects shown devices from the DataTable
|
||||||
function getMacsOfShownDevices() {
|
function getMacsOfShownDevices() {
|
||||||
var table = $('#tableDevices').DataTable();
|
var table = $('#tableDevices').DataTable();
|
||||||
|
|
||||||
@@ -1096,15 +1099,15 @@ function getMacsOfShownDevices() {
|
|||||||
|
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
// Handle custom actions/properties on a device
|
// Handle custom actions/properties on a device
|
||||||
function renderCustomProps(custProps, mac) {
|
function renderCustomProps(custProps, mac) {
|
||||||
// Decode and parse the custom properties
|
// Decode and parse the custom properties
|
||||||
|
|
||||||
if (!isBase64(custProps)) {
|
if (!isBase64(custProps)) {
|
||||||
|
|
||||||
console.error(`Unable to decode CustomProps for ${mac}`);
|
console.error(`Unable to decode CustomProps for ${mac}`);
|
||||||
console.error(custProps);
|
console.error(custProps);
|
||||||
|
|
||||||
} else{
|
} else{
|
||||||
const props = JSON.parse(atob(custProps));
|
const props = JSON.parse(atob(custProps));
|
||||||
let html = "";
|
let html = "";
|
||||||
@@ -1150,13 +1153,13 @@ function renderCustomProps(custProps, mac) {
|
|||||||
|
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
// Update cache with shown devices before navigating away
|
// Update cache with shown devices before navigating away
|
||||||
window.addEventListener('beforeunload', function(event) {
|
window.addEventListener('beforeunload', function(event) {
|
||||||
// Call your function here
|
// Call your function here
|
||||||
macs = getMacsOfShownDevices();
|
macs = getMacsOfShownDevices();
|
||||||
|
|
||||||
setCache("ntx_visible_macs", macs)
|
setCache("ntx_visible_macs", macs)
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
/* -----------------------------------------------------------------------------
|
/* -----------------------------------------------------------------------------
|
||||||
* NetAlertX
|
* NetAlertX
|
||||||
* Open Source Network Guard / WIFI & LAN intrusion detector
|
* Open Source Network Guard / WIFI & LAN intrusion detector
|
||||||
*
|
*
|
||||||
* common.js - Front module. Common Javascript functions
|
* common.js - Front module. Common Javascript functions
|
||||||
*-------------------------------------------------------------------------------
|
*-------------------------------------------------------------------------------
|
||||||
@@ -35,16 +35,16 @@ function getCache(key, noCookie = false)
|
|||||||
// }
|
// }
|
||||||
}
|
}
|
||||||
|
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
function setCache(key, data, expirationMinutes='')
|
function setCache(key, data, expirationMinutes='')
|
||||||
{
|
{
|
||||||
localStorage.setItem(key, data);
|
localStorage.setItem(key, data);
|
||||||
|
|
||||||
// // create cookie if expiration set to handle refresh of data
|
// // create cookie if expiration set to handle refresh of data
|
||||||
// if (expirationMinutes != '')
|
// if (expirationMinutes != '')
|
||||||
// {
|
// {
|
||||||
// setCookie ('cache_session_expiry', 'OK', 1)
|
// setCookie ('cache_session_expiry', 'OK', 1)
|
||||||
// }
|
// }
|
||||||
@@ -57,7 +57,7 @@ function setCookie (cookie, value, expirationMinutes='') {
|
|||||||
var expires = '';
|
var expires = '';
|
||||||
if (typeof expirationMinutes === 'number') {
|
if (typeof expirationMinutes === 'number') {
|
||||||
expires = ';expires=' + new Date(Date.now() + expirationMinutes *60*1000).toUTCString();
|
expires = ';expires=' + new Date(Date.now() + expirationMinutes *60*1000).toUTCString();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save Cookie
|
// Save Cookie
|
||||||
document.cookie = cookie + "=" + value + expires;
|
document.cookie = cookie + "=" + value + expires;
|
||||||
@@ -107,42 +107,42 @@ function deleteAllCookies() {
|
|||||||
|
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
// Get settings from the .json file generated by the python backend
|
// Get settings from the .json file generated by the python backend
|
||||||
// and cache them, if available, with options
|
// and cache them, if available, with options
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
function cacheSettings()
|
function cacheSettings()
|
||||||
{
|
{
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
if(!getCache('cacheSettings_completed') === true)
|
if(!getCache('cacheSettings_completed') === true)
|
||||||
{
|
{
|
||||||
$.get('php/server/query_json.php', { file: 'table_settings.json', nocache: Date.now() }, function(resSet) {
|
$.get('php/server/query_json.php', { file: 'table_settings.json', nocache: Date.now() }, function(resSet) {
|
||||||
|
|
||||||
$.get('php/server/query_json.php', { file: 'plugins.json', nocache: Date.now() }, function(resPlug) {
|
$.get('php/server/query_json.php', { file: 'plugins.json', nocache: Date.now() }, function(resPlug) {
|
||||||
|
|
||||||
pluginsData = resPlug["data"];
|
|
||||||
settingsData = resSet["data"];
|
|
||||||
|
|
||||||
settingsData.forEach((set) => {
|
pluginsData = resPlug["data"];
|
||||||
|
settingsData = resSet["data"];
|
||||||
|
|
||||||
|
settingsData.forEach((set) => {
|
||||||
|
|
||||||
resolvedOptions = createArray(set.setOptions)
|
resolvedOptions = createArray(set.setOptions)
|
||||||
resolvedOptionsOld = resolvedOptions
|
resolvedOptionsOld = resolvedOptions
|
||||||
setPlugObj = {};
|
setPlugObj = {};
|
||||||
options_params = [];
|
options_params = [];
|
||||||
resolved = ""
|
resolved = ""
|
||||||
|
|
||||||
// proceed only if first option item contains something to resolve
|
// proceed only if first option item contains something to resolve
|
||||||
if( !set.setKey.includes("__metadata") &&
|
if( !set.setKey.includes("__metadata") &&
|
||||||
resolvedOptions.length != 0 &&
|
resolvedOptions.length != 0 &&
|
||||||
resolvedOptions[0].includes("{value}"))
|
resolvedOptions[0].includes("{value}"))
|
||||||
{
|
{
|
||||||
// get setting definition from the plugin config if available
|
// get setting definition from the plugin config if available
|
||||||
setPlugObj = getPluginSettingObject(pluginsData, set.setKey)
|
setPlugObj = getPluginSettingObject(pluginsData, set.setKey)
|
||||||
|
|
||||||
// check if options contains parameters and resolve
|
// check if options contains parameters and resolve
|
||||||
if(setPlugObj != {} && setPlugObj["options_params"])
|
if(setPlugObj != {} && setPlugObj["options_params"])
|
||||||
{
|
{
|
||||||
// get option_params for {value} resolution
|
// get option_params for {value} resolution
|
||||||
options_params = setPlugObj["options_params"]
|
options_params = setPlugObj["options_params"]
|
||||||
|
|
||||||
if(options_params != [])
|
if(options_params != [])
|
||||||
{
|
{
|
||||||
@@ -154,19 +154,19 @@ function cacheSettings()
|
|||||||
{
|
{
|
||||||
resolvedOptions = `[${resolved}]`
|
resolvedOptions = `[${resolved}]`
|
||||||
} else // one value only
|
} else // one value only
|
||||||
{
|
{
|
||||||
resolvedOptions = `["${resolved}"]`
|
resolvedOptions = `["${resolved}"]`
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
setCache(`nax_set_${set.setKey}`, set.setValue)
|
setCache(`nax_set_${set.setKey}`, set.setValue)
|
||||||
setCache(`nax_set_opt_${set.setKey}`, resolvedOptions)
|
setCache(`nax_set_opt_${set.setKey}`, resolvedOptions)
|
||||||
});
|
});
|
||||||
}).then(() => handleSuccess('cacheSettings', resolve())).catch(() => handleFailure('cacheSettings', reject("cacheSettings already completed"))); // handle AJAX synchronization
|
}).then(() => handleSuccess('cacheSettings', resolve())).catch(() => handleFailure('cacheSettings', reject("cacheSettings already completed"))); // handle AJAX synchronization
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -176,7 +176,7 @@ function getSettingOptions (key) {
|
|||||||
|
|
||||||
// handle initial load to make sure everything is set-up and cached
|
// handle initial load to make sure everything is set-up and cached
|
||||||
// handleFirstLoad()
|
// handleFirstLoad()
|
||||||
|
|
||||||
result = getCache(`nax_set_opt_${key}`, true);
|
result = getCache(`nax_set_opt_${key}`, true);
|
||||||
|
|
||||||
if (result == "")
|
if (result == "")
|
||||||
@@ -194,7 +194,7 @@ function getSetting (key) {
|
|||||||
|
|
||||||
// handle initial load to make sure everything is set-up and cached
|
// handle initial load to make sure everything is set-up and cached
|
||||||
// handleFirstLoad()
|
// handleFirstLoad()
|
||||||
|
|
||||||
result = getCache(`nax_set_${key}`, true);
|
result = getCache(`nax_set_${key}`, true);
|
||||||
|
|
||||||
if (result == "")
|
if (result == "")
|
||||||
@@ -210,7 +210,7 @@ function getSetting (key) {
|
|||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
function cacheStrings() {
|
function cacheStrings() {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
|
|
||||||
// Create a promise for each language (include en_us by default as fallback)
|
// Create a promise for each language (include en_us by default as fallback)
|
||||||
languagesToLoad = ['en_us']
|
languagesToLoad = ['en_us']
|
||||||
|
|
||||||
@@ -222,11 +222,11 @@ function cacheStrings() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
console.log(languagesToLoad);
|
console.log(languagesToLoad);
|
||||||
|
|
||||||
const languagePromises = languagesToLoad.map((language_code) => {
|
const languagePromises = languagesToLoad.map((language_code) => {
|
||||||
return new Promise((resolveLang, rejectLang) => {
|
return new Promise((resolveLang, rejectLang) => {
|
||||||
// Fetch core strings and translations
|
// Fetch core strings and translations
|
||||||
|
|
||||||
$.get(`php/templates/language/${language_code}.json?nocache=${Date.now()}`)
|
$.get(`php/templates/language/${language_code}.json?nocache=${Date.now()}`)
|
||||||
.done((res) => {
|
.done((res) => {
|
||||||
// Iterate over each key-value pair and store the translations
|
// Iterate over each key-value pair and store the translations
|
||||||
@@ -238,7 +238,7 @@ function cacheStrings() {
|
|||||||
$.get('php/server/query_json.php', { file: 'table_plugins_language_strings.json', nocache: Date.now() })
|
$.get('php/server/query_json.php', { file: 'table_plugins_language_strings.json', nocache: Date.now() })
|
||||||
.done((pluginRes) => {
|
.done((pluginRes) => {
|
||||||
const data = pluginRes["data"];
|
const data = pluginRes["data"];
|
||||||
|
|
||||||
// Store plugin translations
|
// Store plugin translations
|
||||||
data.forEach((langString) => {
|
data.forEach((langString) => {
|
||||||
setCache(`pia_lang_${langString.String_Key}_${langString.Language_Code}`, langString.String_Value);
|
setCache(`pia_lang_${langString.String_Key}_${langString.Language_Code}`, langString.String_Value);
|
||||||
@@ -269,7 +269,7 @@ function cacheStrings() {
|
|||||||
// Handle failure in any of the language processing
|
// Handle failure in any of the language processing
|
||||||
handleFailure('cacheStrings', reject);
|
handleFailure('cacheStrings', reject);
|
||||||
});
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -278,7 +278,7 @@ function cacheStrings() {
|
|||||||
function getString(key) {
|
function getString(key) {
|
||||||
|
|
||||||
function fetchString(key) {
|
function fetchString(key) {
|
||||||
|
|
||||||
lang_code = getLangCode();
|
lang_code = getLangCode();
|
||||||
|
|
||||||
let result = getCache(`pia_lang_${key}_${lang_code}`, true);
|
let result = getCache(`pia_lang_${key}_${lang_code}`, true);
|
||||||
@@ -378,7 +378,7 @@ function localizeTimestamp(input) {
|
|||||||
let tz = getSetting("TIMEZONE") || 'Europe/Berlin';
|
let tz = getSetting("TIMEZONE") || 'Europe/Berlin';
|
||||||
input = String(input || '').trim();
|
input = String(input || '').trim();
|
||||||
|
|
||||||
// ✅ 1. Unix timestamps (10 or 13 digits)
|
// 1. Unix timestamps (10 or 13 digits)
|
||||||
if (/^\d+$/.test(input)) {
|
if (/^\d+$/.test(input)) {
|
||||||
const ms = input.length === 10 ? parseInt(input, 10) * 1000 : parseInt(input, 10);
|
const ms = input.length === 10 ? parseInt(input, 10) * 1000 : parseInt(input, 10);
|
||||||
return new Intl.DateTimeFormat('default', {
|
return new Intl.DateTimeFormat('default', {
|
||||||
@@ -389,39 +389,59 @@ function localizeTimestamp(input) {
|
|||||||
}).format(new Date(ms));
|
}).format(new Date(ms));
|
||||||
}
|
}
|
||||||
|
|
||||||
// ✅ 2. European DD/MM/YYYY
|
// 2. European DD/MM/YYYY
|
||||||
let match = input.match(/^(\d{1,2})\/(\d{1,2})\/(\d{4})(?:[ ,]+(\d{1,2}:\d{2}(?::\d{2})?))?(.*)$/);
|
let match = input.match(/^(\d{1,2})\/(\d{1,2})\/(\d{4})(?:[ ,]+(\d{1,2}:\d{2}(?::\d{2})?))?$/);
|
||||||
if (match) {
|
if (match) {
|
||||||
let [ , d, m, y, t = "00:00:00", tzPart = "" ] = match;
|
let [, d, m, y, t = "00:00:00", tzPart = ""] = match;
|
||||||
const iso = `${y}-${m.padStart(2,'0')}-${d.padStart(2,'0')}T${t.length===5?t+":00":t}${tzPart}`;
|
const dNum = parseInt(d, 10);
|
||||||
return formatSafe(iso, tz);
|
const mNum = parseInt(m, 10);
|
||||||
|
|
||||||
|
if (dNum <= 12 && mNum > 12) {
|
||||||
|
} else {
|
||||||
|
const iso = `${y}-${m.padStart(2,'0')}-${d.padStart(2,'0')}T${t.length===5 ? t + ":00" : t}${tzPart}`;
|
||||||
|
return formatSafe(iso, tz);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ✅ 3. US MM/DD/YYYY
|
// 3. US MM/DD/YYYY
|
||||||
match = input.match(/^(\d{1,2})\/(\d{1,2})\/(\d{4})(?:[ ,]+(\d{1,2}:\d{2}(?::\d{2})?))?(.*)$/);
|
match = input.match(/^(\d{1,2})\/(\d{1,2})\/(\d{4})(?:[ ,]+(\d{1,2}:\d{2}(?::\d{2})?))?(.*)$/);
|
||||||
if (match) {
|
if (match) {
|
||||||
let [ , m, d, y, t = "00:00:00", tzPart = "" ] = match;
|
let [, m, d, y, t = "00:00:00", tzPart = ""] = match;
|
||||||
const iso = `${y}-${m.padStart(2,'0')}-${d.padStart(2,'0')}T${t.length===5?t+":00":t}${tzPart}`;
|
const iso = `${y}-${m.padStart(2,'0')}-${d.padStart(2,'0')}T${t.length===5?t+":00":t}${tzPart}`;
|
||||||
return formatSafe(iso, tz);
|
return formatSafe(iso, tz);
|
||||||
}
|
}
|
||||||
|
|
||||||
// ✅ 4. ISO-style (with T, Z, offsets)
|
// 4. ISO YYYY-MM-DD with optional Z/+offset
|
||||||
match = input.match(/^(\d{4}-\d{1,2}-\d{1,2})[ T](\d{1,2}:\d{2}(?::\d{2})?)(Z|[+-]\d{2}:?\d{2})?$/);
|
match = input.match(/^(\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01])[ T](\d{1,2}:\d{2}(?::\d{2})?)(Z|[+-]\d{2}:?\d{2})?$/);
|
||||||
if (match) {
|
if (match) {
|
||||||
let [ , ymd, time, offset = "" ] = match;
|
let [, y, m, d, time, offset = ""] = match;
|
||||||
// normalize to YYYY-MM-DD
|
|
||||||
let [y, m, d] = ymd.split('-').map(x => x.padStart(2,'0'));
|
|
||||||
const iso = `${y}-${m}-${d}T${time.length===5?time+":00":time}${offset}`;
|
const iso = `${y}-${m}-${d}T${time.length===5?time+":00":time}${offset}`;
|
||||||
return formatSafe(iso, tz);
|
return formatSafe(iso, tz);
|
||||||
}
|
}
|
||||||
|
|
||||||
// ✅ 5. RFC2822 / "25 Aug 2025 13:45:22 +0200"
|
// 5. RFC2822 / "25 Aug 2025 13:45:22 +0200"
|
||||||
match = input.match(/^\d{1,2} [A-Za-z]{3,} \d{4}/);
|
match = input.match(/^\d{1,2} [A-Za-z]{3,} \d{4}/);
|
||||||
if (match) {
|
if (match) {
|
||||||
return formatSafe(input, tz);
|
return formatSafe(input, tz);
|
||||||
}
|
}
|
||||||
|
|
||||||
// ✅ 6. Fallback (whatever Date() can parse)
|
// 6. DD-MM-YYYY with optional time
|
||||||
|
match = input.match(/^(\d{1,2})-(\d{1,2})-(\d{4})(?:[ T](\d{1,2}:\d{2}(?::\d{2})?))?$/);
|
||||||
|
if (match) {
|
||||||
|
let [, d, m, y, time = "00:00:00"] = match;
|
||||||
|
const iso = `${y}-${m.padStart(2,'0')}-${d.padStart(2,'0')}T${time.length===5?time+":00":time}`;
|
||||||
|
return formatSafe(iso, tz);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 7. Strict YYYY-DD-MM with optional time
|
||||||
|
match = input.match(/^(\d{4})-(0[1-9]|[12]\d|3[01])-(0[1-9]|1[0-2])(?:[ T](\d{1,2}:\d{2}(?::\d{2})?))?$/);
|
||||||
|
if (match) {
|
||||||
|
let [, y, d, m, time = "00:00:00"] = match;
|
||||||
|
const iso = `${y}-${m}-${d}T${time.length === 5 ? time + ":00" : time}`;
|
||||||
|
return formatSafe(iso, tz);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 8. Fallback
|
||||||
return formatSafe(input, tz);
|
return formatSafe(input, tz);
|
||||||
|
|
||||||
function formatSafe(str, tz) {
|
function formatSafe(str, tz) {
|
||||||
@@ -440,6 +460,7 @@ function localizeTimestamp(input) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// ----------------------------------------------------
|
// ----------------------------------------------------
|
||||||
/**
|
/**
|
||||||
* Replaces double quotes within single-quoted strings, then converts all single quotes to double quotes,
|
* Replaces double quotes within single-quoted strings, then converts all single quotes to double quotes,
|
||||||
@@ -509,7 +530,7 @@ function isBase64(value) {
|
|||||||
const base64Regex = /^[A-Za-z0-9+/]+={0,2}$/;
|
const base64Regex = /^[A-Za-z0-9+/]+={0,2}$/;
|
||||||
if (!base64Regex.test(value)) return false;
|
if (!base64Regex.test(value)) return false;
|
||||||
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const decoded = atob(value);
|
const decoded = atob(value);
|
||||||
|
|
||||||
@@ -568,7 +589,7 @@ function decodeSpecialChars(str) {
|
|||||||
function utf8ToBase64(str) {
|
function utf8ToBase64(str) {
|
||||||
// Convert the string to a Uint8Array using TextEncoder
|
// Convert the string to a Uint8Array using TextEncoder
|
||||||
const utf8Bytes = new TextEncoder().encode(str);
|
const utf8Bytes = new TextEncoder().encode(str);
|
||||||
|
|
||||||
// Convert the Uint8Array to a base64-encoded string
|
// Convert the Uint8Array to a base64-encoded string
|
||||||
return btoa(String.fromCharCode(...utf8Bytes));
|
return btoa(String.fromCharCode(...utf8Bytes));
|
||||||
}
|
}
|
||||||
@@ -597,31 +618,31 @@ function handle_locked_DB(data)
|
|||||||
{
|
{
|
||||||
if(data.includes('database is locked'))
|
if(data.includes('database is locked'))
|
||||||
{
|
{
|
||||||
// console.log(data)
|
// console.log(data)
|
||||||
showSpinner()
|
showSpinner()
|
||||||
|
|
||||||
setTimeout(function() {
|
setTimeout(function() {
|
||||||
console.warn("Database locked - reload")
|
console.warn("Database locked - reload")
|
||||||
location.reload();
|
location.reload();
|
||||||
}, 5000);
|
}, 5000);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
function numberArrayFromString(data)
|
function numberArrayFromString(data)
|
||||||
{
|
{
|
||||||
data = JSON.parse(sanitize(data));
|
data = JSON.parse(sanitize(data));
|
||||||
return data.replace(/\[|\]/g, '').split(',').map(Number);
|
return data.replace(/\[|\]/g, '').split(',').map(Number);
|
||||||
}
|
}
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
function saveData(functionName, id, value) {
|
function saveData(functionName, id, value) {
|
||||||
$.ajax({
|
$.ajax({
|
||||||
method: "GET",
|
method: "GET",
|
||||||
url: "php/server/devices.php",
|
url: "php/server/devices.php",
|
||||||
data: { action: functionName, id: id, value:value },
|
data: { action: functionName, id: id, value:value },
|
||||||
success: function(data) {
|
success: function(data) {
|
||||||
|
|
||||||
if(sanitize(data) == 'OK')
|
if(sanitize(data) == 'OK')
|
||||||
{
|
{
|
||||||
showMessage("Saved")
|
showMessage("Saved")
|
||||||
@@ -630,7 +651,7 @@ function saveData(functionName, id, value) {
|
|||||||
} else
|
} else
|
||||||
{
|
{
|
||||||
showMessage("ERROR")
|
showMessage("ERROR")
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -670,13 +691,13 @@ function sleep(milliseconds) {
|
|||||||
} while (currentDate - date < milliseconds);
|
} while (currentDate - date < milliseconds);
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------
|
// ---------------------------------------------------------
|
||||||
somethingChanged = false;
|
somethingChanged = false;
|
||||||
function settingsChanged()
|
function settingsChanged()
|
||||||
{
|
{
|
||||||
somethingChanged = true;
|
somethingChanged = true;
|
||||||
// Enable navigation prompt ... "Are you sure you want to leave..."
|
// Enable navigation prompt ... "Are you sure you want to leave..."
|
||||||
window.onbeforeunload = function() {
|
window.onbeforeunload = function() {
|
||||||
return true;
|
return true;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -694,16 +715,16 @@ function getUrlAnchor(defaultValue){
|
|||||||
selectedTab = defaultValue
|
selectedTab = defaultValue
|
||||||
|
|
||||||
// the #target from the url
|
// the #target from the url
|
||||||
target = window.location.hash.substr(1)
|
target = window.location.hash.substr(1)
|
||||||
|
|
||||||
// get only the part between #...?
|
// get only the part between #...?
|
||||||
if(target.includes('?'))
|
if(target.includes('?'))
|
||||||
{
|
{
|
||||||
target = target.split('?')[0]
|
target = target.split('?')[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
return target
|
return target
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -715,7 +736,7 @@ function getQueryString(key){
|
|||||||
get: (searchParams, prop) => searchParams.get(prop),
|
get: (searchParams, prop) => searchParams.get(prop),
|
||||||
});
|
});
|
||||||
|
|
||||||
tmp = params[key]
|
tmp = params[key]
|
||||||
|
|
||||||
if(emptyArr.includes(tmp))
|
if(emptyArr.includes(tmp))
|
||||||
{
|
{
|
||||||
@@ -726,17 +747,17 @@ function getQueryString(key){
|
|||||||
|
|
||||||
if (fullUrl.includes('?')) {
|
if (fullUrl.includes('?')) {
|
||||||
var queryString = fullUrl.split('?')[1];
|
var queryString = fullUrl.split('?')[1];
|
||||||
|
|
||||||
// Split the query string into individual parameters
|
// Split the query string into individual parameters
|
||||||
var paramsArray = queryString.split('&');
|
var paramsArray = queryString.split('&');
|
||||||
|
|
||||||
// Loop through the parameters array
|
// Loop through the parameters array
|
||||||
paramsArray.forEach(function(param) {
|
paramsArray.forEach(function(param) {
|
||||||
// Split each parameter into key and value
|
// Split each parameter into key and value
|
||||||
var keyValue = param.split('=');
|
var keyValue = param.split('=');
|
||||||
var keyTmp = decodeURIComponent(keyValue[0]);
|
var keyTmp = decodeURIComponent(keyValue[0]);
|
||||||
var value = decodeURIComponent(keyValue[1] || '');
|
var value = decodeURIComponent(keyValue[1] || '');
|
||||||
|
|
||||||
// Store key-value pair in the queryParams object
|
// Store key-value pair in the queryParams object
|
||||||
queryParams[keyTmp] = value;
|
queryParams[keyTmp] = value;
|
||||||
});
|
});
|
||||||
@@ -750,7 +771,7 @@ function getQueryString(key){
|
|||||||
result = emptyArr.includes(tmp) ? "" : tmp;
|
result = emptyArr.includes(tmp) ? "" : tmp;
|
||||||
|
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
function translateHTMLcodes (text) {
|
function translateHTMLcodes (text) {
|
||||||
if (text == null || emptyArr.includes(text)) {
|
if (text == null || emptyArr.includes(text)) {
|
||||||
@@ -769,14 +790,14 @@ function translateHTMLcodes (text) {
|
|||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
function stopTimerRefreshData () {
|
function stopTimerRefreshData () {
|
||||||
try {
|
try {
|
||||||
clearTimeout (timerRefreshData);
|
clearTimeout (timerRefreshData);
|
||||||
} catch (e) {}
|
} catch (e) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
function newTimerRefreshData (refeshFunction, timeToRefresh) {
|
function newTimerRefreshData (refeshFunction, timeToRefresh) {
|
||||||
|
|
||||||
if(timeToRefresh && (timeToRefresh != 0 || timeToRefresh != ""))
|
if(timeToRefresh && (timeToRefresh != 0 || timeToRefresh != ""))
|
||||||
{
|
{
|
||||||
time = parseInt(timeToRefresh)
|
time = parseInt(timeToRefresh)
|
||||||
@@ -813,7 +834,7 @@ function openInNewTab (url) {
|
|||||||
window.open(url, "_blank");
|
window.open(url, "_blank");
|
||||||
}
|
}
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
// Navigate to URL if the current URL is not in the provided list of URLs
|
// Navigate to URL if the current URL is not in the provided list of URLs
|
||||||
function openUrl(urls) {
|
function openUrl(urls) {
|
||||||
var currentUrl = window.location.href;
|
var currentUrl = window.location.href;
|
||||||
@@ -844,21 +865,21 @@ function openUrl(urls) {
|
|||||||
function forceLoadUrl(relativeUrl) {
|
function forceLoadUrl(relativeUrl) {
|
||||||
|
|
||||||
window.location.replace(relativeUrl);
|
window.location.replace(relativeUrl);
|
||||||
window.location.reload()
|
window.location.reload()
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
function navigateToDeviceWithIp (ip) {
|
function navigateToDeviceWithIp (ip) {
|
||||||
|
|
||||||
$.get('php/server/query_json.php', { file: 'table_devices.json', nocache: Date.now() }, function(res) {
|
$.get('php/server/query_json.php', { file: 'table_devices.json', nocache: Date.now() }, function(res) {
|
||||||
|
|
||||||
devices = res["data"];
|
devices = res["data"];
|
||||||
|
|
||||||
mac = ""
|
mac = ""
|
||||||
|
|
||||||
$.each(devices, function(index, obj) {
|
$.each(devices, function(index, obj) {
|
||||||
|
|
||||||
if(obj.devLastIP.trim() == ip.trim())
|
if(obj.devLastIP.trim() == ip.trim())
|
||||||
{
|
{
|
||||||
mac = obj.devMac;
|
mac = obj.devMac;
|
||||||
@@ -866,7 +887,7 @@ function navigateToDeviceWithIp (ip) {
|
|||||||
window.open('./deviceDetails.php?mac=' + mac , "_blank");
|
window.open('./deviceDetails.php?mac=' + mac , "_blank");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -898,7 +919,7 @@ function getMac(){
|
|||||||
});
|
});
|
||||||
|
|
||||||
return params.mac
|
return params.mac
|
||||||
}
|
}
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
// A function used to make the IP address orderable
|
// A function used to make the IP address orderable
|
||||||
@@ -950,7 +971,7 @@ function isRandomMAC(mac)
|
|||||||
{
|
{
|
||||||
isRandom = false;
|
isRandom = false;
|
||||||
|
|
||||||
isRandom = ["2", "6", "A", "E", "a", "e"].includes(mac[1]);
|
isRandom = ["2", "6", "A", "E", "a", "e"].includes(mac[1]);
|
||||||
|
|
||||||
// if detected as random, make sure it doesn't start with a prefix which teh suer doesn't want to mark as random
|
// if detected as random, make sure it doesn't start with a prefix which teh suer doesn't want to mark as random
|
||||||
if(isRandom)
|
if(isRandom)
|
||||||
@@ -959,17 +980,17 @@ function isRandomMAC(mac)
|
|||||||
|
|
||||||
if(mac.startsWith(prefix))
|
if(mac.startsWith(prefix))
|
||||||
{
|
{
|
||||||
isRandom = false;
|
isRandom = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return isRandom;
|
return isRandom;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------
|
// ---------------------------------------------------------
|
||||||
// Generate an array object from a string representation of an array
|
// Generate an array object from a string representation of an array
|
||||||
function createArray(input) {
|
function createArray(input) {
|
||||||
// Is already array, return
|
// Is already array, return
|
||||||
@@ -980,25 +1001,25 @@ function isRandomMAC(mac)
|
|||||||
if (input === '[]' || input === '') {
|
if (input === '[]' || input === '') {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
// handle integer
|
// handle integer
|
||||||
if (typeof input === 'number') {
|
if (typeof input === 'number') {
|
||||||
input = input.toString();
|
input = input.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Regex pattern for brackets
|
// Regex pattern for brackets
|
||||||
const patternBrackets = /(^\s*\[)|(\]\s*$)/g;
|
const patternBrackets = /(^\s*\[)|(\]\s*$)/g;
|
||||||
const replacement = '';
|
const replacement = '';
|
||||||
|
|
||||||
// Remove brackets
|
// Remove brackets
|
||||||
const noBrackets = input.replace(patternBrackets, replacement);
|
const noBrackets = input.replace(patternBrackets, replacement);
|
||||||
|
|
||||||
const options = [];
|
const options = [];
|
||||||
|
|
||||||
// Detect the type of quote used after the opening bracket
|
// Detect the type of quote used after the opening bracket
|
||||||
const firstChar = noBrackets.trim()[0];
|
const firstChar = noBrackets.trim()[0];
|
||||||
const isDoubleQuoted = firstChar === '"';
|
const isDoubleQuoted = firstChar === '"';
|
||||||
const isSingleQuoted = firstChar === "'";
|
const isSingleQuoted = firstChar === "'";
|
||||||
|
|
||||||
// Create array while handling commas within quoted segments
|
// Create array while handling commas within quoted segments
|
||||||
let currentSegment = '';
|
let currentSegment = '';
|
||||||
let withinQuotes = false;
|
let withinQuotes = false;
|
||||||
@@ -1016,7 +1037,7 @@ function isRandomMAC(mac)
|
|||||||
}
|
}
|
||||||
// Push the last segment
|
// Push the last segment
|
||||||
options.push(currentSegment.trim());
|
options.push(currentSegment.trim());
|
||||||
|
|
||||||
// Remove quotes based on detected type
|
// Remove quotes based on detected type
|
||||||
options.forEach((item, index) => {
|
options.forEach((item, index) => {
|
||||||
let trimmedItem = item.trim();
|
let trimmedItem = item.trim();
|
||||||
@@ -1028,7 +1049,7 @@ function isRandomMAC(mac)
|
|||||||
}
|
}
|
||||||
options[index] = trimmedItem;
|
options[index] = trimmedItem;
|
||||||
});
|
});
|
||||||
|
|
||||||
return options;
|
return options;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1037,7 +1058,7 @@ function isRandomMAC(mac)
|
|||||||
// for the value to be returned
|
// for the value to be returned
|
||||||
function getDevDataByMac(macAddress, dbColumn) {
|
function getDevDataByMac(macAddress, dbColumn) {
|
||||||
|
|
||||||
const sessionDataKey = 'devicesListAll_JSON';
|
const sessionDataKey = 'devicesListAll_JSON';
|
||||||
const devicesCache = getCache(sessionDataKey);
|
const devicesCache = getCache(sessionDataKey);
|
||||||
|
|
||||||
if (!devicesCache || devicesCache == "") {
|
if (!devicesCache || devicesCache == "") {
|
||||||
@@ -1068,11 +1089,11 @@ function getDevDataByMac(macAddress, dbColumn) {
|
|||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
// Cache the devices as one JSON
|
// Cache the devices as one JSON
|
||||||
function cacheDevices()
|
function cacheDevices()
|
||||||
{
|
{
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
|
|
||||||
$.get('php/server/query_json.php', { file: 'table_devices.json', nocache: Date.now() }, function(data) {
|
$.get('php/server/query_json.php', { file: 'table_devices.json', nocache: Date.now() }, function(data) {
|
||||||
|
|
||||||
// console.log(data)
|
// console.log(data)
|
||||||
|
|
||||||
devicesListAll_JSON = data["data"]
|
devicesListAll_JSON = data["data"]
|
||||||
@@ -1093,11 +1114,11 @@ function cacheDevices()
|
|||||||
|
|
||||||
// console.log(getCache('devicesListAll_JSON'))
|
// console.log(getCache('devicesListAll_JSON'))
|
||||||
}).then(() => handleSuccess('cacheDevices', resolve())).catch(() => handleFailure('cacheDevices', reject("cacheDevices already completed"))); // handle AJAX synchronization
|
}).then(() => handleSuccess('cacheDevices', resolve())).catch(() => handleFailure('cacheDevices', reject("cacheDevices already completed"))); // handle AJAX synchronization
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
var devicesListAll_JSON = []; // this will contain a list off all devices
|
var devicesListAll_JSON = []; // this will contain a list off all devices
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
function isEmpty(value)
|
function isEmpty(value)
|
||||||
@@ -1127,7 +1148,7 @@ function getGuid() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
// UI
|
// UI
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
|
|
||||||
@@ -1230,7 +1251,7 @@ function hideSpinner() {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// --------------------------------------------------------
|
// --------------------------------------------------------
|
||||||
// Calls a backend function to add a front-end event to an execution queue
|
// Calls a backend function to add a front-end event to an execution queue
|
||||||
function updateApi(apiEndpoints)
|
function updateApi(apiEndpoints)
|
||||||
@@ -1250,9 +1271,9 @@ function updateApi(apiEndpoints)
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
// handling smooth scrolling
|
// handling smooth scrolling
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
function setupSmoothScrolling() {
|
function setupSmoothScrolling() {
|
||||||
// Function to scroll to the element
|
// Function to scroll to the element
|
||||||
function scrollToElement(id) {
|
function scrollToElement(id) {
|
||||||
@@ -1310,17 +1331,17 @@ function getPluginSettingObject(pluginsData, setting_key, unique_prefix ) {
|
|||||||
|
|
||||||
result = {}
|
result = {}
|
||||||
unique_prefix == undefined ? unique_prefix = setting_key.split("_")[0] : unique_prefix = unique_prefix;
|
unique_prefix == undefined ? unique_prefix = setting_key.split("_")[0] : unique_prefix = unique_prefix;
|
||||||
|
|
||||||
$.each(pluginsData, function (i, plgnObj){
|
$.each(pluginsData, function (i, plgnObj){
|
||||||
// go thru plugins
|
// go thru plugins
|
||||||
if(plgnObj.unique_prefix == unique_prefix)
|
if(plgnObj.unique_prefix == unique_prefix)
|
||||||
{
|
{
|
||||||
// go thru plugin settings
|
// go thru plugin settings
|
||||||
$.each(plgnObj["settings"], function (j, setObj){
|
$.each(plgnObj["settings"], function (j, setObj){
|
||||||
|
|
||||||
if(`${unique_prefix}_${setObj.function}` == setting_key)
|
if(`${unique_prefix}_${setObj.function}` == setting_key)
|
||||||
{
|
{
|
||||||
result = setObj
|
result = setObj
|
||||||
}
|
}
|
||||||
|
|
||||||
});
|
});
|
||||||
@@ -1372,7 +1393,7 @@ function arraysContainSameValues(arr1, arr2) {
|
|||||||
if (!Array.isArray(arr1) || !Array.isArray(arr2)) {
|
if (!Array.isArray(arr1) || !Array.isArray(arr2)) {
|
||||||
return false;
|
return false;
|
||||||
} else
|
} else
|
||||||
{
|
{
|
||||||
// Sort and stringify arrays, then compare
|
// Sort and stringify arrays, then compare
|
||||||
return JSON.stringify(arr1.slice().sort()) === JSON.stringify(arr2.slice().sort());
|
return JSON.stringify(arr1.slice().sort()) === JSON.stringify(arr2.slice().sort());
|
||||||
}
|
}
|
||||||
@@ -1383,7 +1404,7 @@ function arraysContainSameValues(arr1, arr2) {
|
|||||||
function hideUIelements(setKey) {
|
function hideUIelements(setKey) {
|
||||||
|
|
||||||
hiddenSectionsSetting = getSetting(setKey)
|
hiddenSectionsSetting = getSetting(setKey)
|
||||||
|
|
||||||
if(hiddenSectionsSetting != "") // handle if settings not yet initialized
|
if(hiddenSectionsSetting != "") // handle if settings not yet initialized
|
||||||
{
|
{
|
||||||
|
|
||||||
@@ -1398,9 +1419,9 @@ function hideUIelements(setKey) {
|
|||||||
|
|
||||||
if($('#' + hiddenSection))
|
if($('#' + hiddenSection))
|
||||||
{
|
{
|
||||||
$('#' + hiddenSection).hide()
|
$('#' + hiddenSection).hide()
|
||||||
}
|
}
|
||||||
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1411,7 +1432,7 @@ function getDevicesList()
|
|||||||
{
|
{
|
||||||
// Read cache (skip cookie expiry check)
|
// Read cache (skip cookie expiry check)
|
||||||
devicesList = getCache('devicesListAll_JSON', true);
|
devicesList = getCache('devicesListAll_JSON', true);
|
||||||
|
|
||||||
if (devicesList != '') {
|
if (devicesList != '') {
|
||||||
devicesList = JSON.parse (devicesList);
|
devicesList = JSON.parse (devicesList);
|
||||||
} else {
|
} else {
|
||||||
@@ -1468,7 +1489,7 @@ $(document).ready(function() {
|
|||||||
// Restart Backend Python Server
|
// Restart Backend Python Server
|
||||||
|
|
||||||
function askRestartBackend() {
|
function askRestartBackend() {
|
||||||
// Ask
|
// Ask
|
||||||
showModalWarning(getString('Maint_RestartServer'), getString('Maint_Restart_Server_noti_text'),
|
showModalWarning(getString('Maint_RestartServer'), getString('Maint_Restart_Server_noti_text'),
|
||||||
getString('Gen_Cancel'), getString('Maint_RestartServer'), 'restartBackend');
|
getString('Gen_Cancel'), getString('Maint_RestartServer'), 'restartBackend');
|
||||||
}
|
}
|
||||||
@@ -1477,7 +1498,7 @@ function askRestartBackend() {
|
|||||||
function restartBackend() {
|
function restartBackend() {
|
||||||
|
|
||||||
modalEventStatusId = 'modal-message-front-event'
|
modalEventStatusId = 'modal-message-front-event'
|
||||||
|
|
||||||
// Execute
|
// Execute
|
||||||
$.ajax({
|
$.ajax({
|
||||||
method: "POST",
|
method: "POST",
|
||||||
@@ -1523,7 +1544,7 @@ function clearCache() {
|
|||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
// Function to check if cache needs to be refreshed because of setting changes
|
// Function to check if cache needs to be refreshed because of setting changes
|
||||||
function checkSettingChanges() {
|
function checkSettingChanges() {
|
||||||
$.get('php/server/query_json.php', { file: 'app_state.json', nocache: Date.now() }, function(appState) {
|
$.get('php/server/query_json.php', { file: 'app_state.json', nocache: Date.now() }, function(appState) {
|
||||||
const importedMilliseconds = parseInt(appState["settingsImported"] * 1000);
|
const importedMilliseconds = parseInt(appState["settingsImported"] * 1000);
|
||||||
const lastReloaded = parseInt(sessionStorage.getItem(sessionStorageKey + '_time'));
|
const lastReloaded = parseInt(sessionStorage.getItem(sessionStorageKey + '_time'));
|
||||||
|
|
||||||
@@ -1594,7 +1615,7 @@ function isAppInitialized() {
|
|||||||
|
|
||||||
lang_shouldBeCompletedCalls = getLangCode() == 'en_us' ? 1 : 2;
|
lang_shouldBeCompletedCalls = getLangCode() == 'en_us' ? 1 : 2;
|
||||||
|
|
||||||
// check if each ajax call completed succesfully
|
// check if each ajax call completed succesfully
|
||||||
$.each(completedCalls_final, function(index, call_name){
|
$.each(completedCalls_final, function(index, call_name){
|
||||||
|
|
||||||
if(getCache(call_name + "_completed") != "true")
|
if(getCache(call_name + "_completed") != "true")
|
||||||
@@ -1622,15 +1643,14 @@ async function executeOnce() {
|
|||||||
|
|
||||||
if (!isAppInitialized()) {
|
if (!isAppInitialized()) {
|
||||||
try {
|
try {
|
||||||
console.log("HERE");
|
|
||||||
|
|
||||||
await waitForGraphQLServer(); // Wait for the server to start
|
await waitForGraphQLServer(); // Wait for the server to start
|
||||||
|
|
||||||
await cacheDevices();
|
await cacheDevices();
|
||||||
await cacheSettings();
|
await cacheSettings();
|
||||||
await cacheStrings();
|
await cacheStrings();
|
||||||
|
|
||||||
console.log("✅ All AJAX callbacks have completed");
|
console.log("All AJAX callbacks have completed");
|
||||||
onAllCallsComplete();
|
onAllCallsComplete();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error:", error);
|
console.error("Error:", error);
|
||||||
@@ -1680,7 +1700,7 @@ const onAllCallsComplete = () => {
|
|||||||
// setTimeout(() => {
|
// setTimeout(() => {
|
||||||
// location.reload()
|
// location.reload()
|
||||||
// }, 10);
|
// }, 10);
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
// If not all strings are initialized, retry initialization
|
// If not all strings are initialized, retry initialization
|
||||||
console.log('❌ Not all strings are initialized. Retrying...');
|
console.log('❌ Not all strings are initialized. Retrying...');
|
||||||
@@ -1702,7 +1722,7 @@ const areAllStringsInitialized = () => {
|
|||||||
// Call the function to execute the code
|
// Call the function to execute the code
|
||||||
executeOnce();
|
executeOnce();
|
||||||
|
|
||||||
// Set timer for regular UI refresh if enabled
|
// Set timer for regular UI refresh if enabled
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
|
|
||||||
// page refresh if configured
|
// page refresh if configured
|
||||||
|
|||||||
@@ -1,26 +1,26 @@
|
|||||||
<?php
|
<?php
|
||||||
require 'php/templates/header.php';
|
require 'php/templates/header.php';
|
||||||
require 'php/templates/modals.php';
|
require 'php/templates/modals.php';
|
||||||
?>
|
?>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
// show spinning icon
|
// show spinning icon
|
||||||
showSpinner()
|
showSpinner()
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<!-- Page ------------------------------------------------------------------ -->
|
<!-- Page ------------------------------------------------------------------ -->
|
||||||
<div class="content-wrapper">
|
<div class="content-wrapper">
|
||||||
<span class="helpIcon">
|
<span class="helpIcon">
|
||||||
<a target="_blank" href="https://github.com/jokob-sk/NetAlertX/blob/main/docs/NETWORK_TREE.md">
|
<a target="_blank" href="https://github.com/jokob-sk/NetAlertX/blob/main/docs/NETWORK_TREE.md">
|
||||||
<i class="fa fa-circle-question"></i>
|
<i class="fa fa-circle-question"></i>
|
||||||
</a>
|
</a>
|
||||||
</span>
|
</span>
|
||||||
|
|
||||||
<div id="toggleFilters" class="">
|
<div id="toggleFilters" class="">
|
||||||
<div class="checkbox icheck col-xs-12">
|
<div class="checkbox icheck col-xs-12">
|
||||||
<label>
|
<label>
|
||||||
<input type="checkbox" name="showOffline" checked>
|
<input type="checkbox" name="showOffline" checked>
|
||||||
<div style="margin-left: 10px; display: inline-block; vertical-align: top;">
|
<div style="margin-left: 10px; display: inline-block; vertical-align: top;">
|
||||||
<?= lang('Network_ShowOffline');?>
|
<?= lang('Network_ShowOffline');?>
|
||||||
<span id="showOfflineNumber">
|
<span id="showOfflineNumber">
|
||||||
<!-- placeholder -->
|
<!-- placeholder -->
|
||||||
@@ -31,14 +31,14 @@
|
|||||||
<div class="checkbox icheck col-xs-12">
|
<div class="checkbox icheck col-xs-12">
|
||||||
<label>
|
<label>
|
||||||
<input type="checkbox" name="showArchived">
|
<input type="checkbox" name="showArchived">
|
||||||
<div style="margin-left: 10px; display: inline-block; vertical-align: top;">
|
<div style="margin-left: 10px; display: inline-block; vertical-align: top;">
|
||||||
<?= lang('Network_ShowArchived');?>
|
<?= lang('Network_ShowArchived');?>
|
||||||
<span id="showArchivedNumber">
|
<span id="showArchivedNumber">
|
||||||
<!-- placeholder -->
|
<!-- placeholder -->
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div id="networkTree" class="drag">
|
<div id="networkTree" class="drag">
|
||||||
@@ -55,8 +55,8 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="tab-content">
|
<div class="tab-content">
|
||||||
<!-- Placeholder -->
|
<!-- Placeholder -->
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section>
|
||||||
<section id="unassigned-devices-wrapper">
|
<section id="unassigned-devices-wrapper">
|
||||||
<!-- Placeholder -->
|
<!-- Placeholder -->
|
||||||
</section>
|
</section>
|
||||||
@@ -69,7 +69,7 @@
|
|||||||
require 'php/templates/footer.php';
|
require 'php/templates/footer.php';
|
||||||
?>
|
?>
|
||||||
|
|
||||||
<script src="lib/treeviz/bundle.js"></script>
|
<script src="lib/treeviz/bundle.js"></script>
|
||||||
|
|
||||||
<script defer>
|
<script defer>
|
||||||
|
|
||||||
@@ -78,12 +78,12 @@
|
|||||||
|
|
||||||
// Create Top level tabs (List of network devices), explanation of the terminology below:
|
// Create Top level tabs (List of network devices), explanation of the terminology below:
|
||||||
//
|
//
|
||||||
// Switch 1 (node)
|
// Switch 1 (node)
|
||||||
// /(p1) \ (p2) <----- port numbers
|
// /(p1) \ (p2) <----- port numbers
|
||||||
// / \
|
// / \
|
||||||
// Smart TV (leaf) Switch 2 (node (for the PC) and leaf (for Switch 1))
|
// Smart TV (leaf) Switch 2 (node (for the PC) and leaf (for Switch 1))
|
||||||
// \
|
// \
|
||||||
// PC (leaf) <------- leafs are not included in this SQL query
|
// PC (leaf) <------- leafs are not included in this SQL query
|
||||||
const rawSql = `
|
const rawSql = `
|
||||||
SELECT node_name, node_mac, online, node_type, node_ports_count, parent_mac, node_icon, node_alert
|
SELECT node_name, node_mac, online, node_type, node_ports_count, parent_mac, node_icon, node_alert
|
||||||
FROM (
|
FROM (
|
||||||
@@ -120,7 +120,7 @@
|
|||||||
|
|
||||||
const portLabel = node.node_ports_count ? ` (${node.node_ports_count})` : '';
|
const portLabel = node.node_ports_count ? ` (${node.node_ports_count})` : '';
|
||||||
const icon = atob(node.node_icon);
|
const icon = atob(node.node_icon);
|
||||||
const id = node.node_mac.replace(/:/g, '_');
|
const id = node.node_mac.replace(/:/g, '_');
|
||||||
|
|
||||||
html += `
|
html += `
|
||||||
<li class="networkNodeTabHeaders ${i === 0 ? 'active' : ''}">
|
<li class="networkNodeTabHeaders ${i === 0 ? 'active' : ''}">
|
||||||
@@ -137,13 +137,13 @@
|
|||||||
renderNetworkTabContent(nodes);
|
renderNetworkTabContent(nodes);
|
||||||
|
|
||||||
// init selected (first) tab
|
// init selected (first) tab
|
||||||
initTab();
|
initTab();
|
||||||
|
|
||||||
// init selected node highlighting
|
// init selected node highlighting
|
||||||
initSelectedNodeHighlighting()
|
initSelectedNodeHighlighting()
|
||||||
|
|
||||||
// Register events on tab change
|
// Register events on tab change
|
||||||
$('a[data-toggle="tab"]').on('shown.bs.tab', function (e) {
|
$('a[data-toggle="tab"]').on('shown.bs.tab', function (e) {
|
||||||
initSelectedNodeHighlighting()
|
initSelectedNodeHighlighting()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -205,10 +205,10 @@
|
|||||||
<hr/>
|
<hr/>
|
||||||
<div class="box box-aqua box-body" id="connected">
|
<div class="box box-aqua box-body" id="connected">
|
||||||
<h5>
|
<h5>
|
||||||
<i class="fa fa-sitemap fa-rotate-270"></i>
|
<i class="fa fa-sitemap fa-rotate-270"></i>
|
||||||
${getString('Network_Connected')}
|
${getString('Network_Connected')}
|
||||||
</h5>
|
</h5>
|
||||||
|
|
||||||
<div id="leafs_${id}" class="table-responsive"></div>
|
<div id="leafs_${id}" class="table-responsive"></div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -234,9 +234,9 @@
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
$container.html(wrapperHtml);
|
$container.html(wrapperHtml);
|
||||||
|
|
||||||
const $table = $(`#${tableId}`);
|
const $table = $(`#${tableId}`);
|
||||||
|
|
||||||
const columns = [
|
const columns = [
|
||||||
@@ -298,7 +298,7 @@
|
|||||||
title: getString('Device_TableHead_Vendor'),
|
title: getString('Device_TableHead_Vendor'),
|
||||||
data: 'devVendor',
|
data: 'devVendor',
|
||||||
width: '20%'
|
width: '20%'
|
||||||
}
|
}
|
||||||
].filter(Boolean);
|
].filter(Boolean);
|
||||||
|
|
||||||
|
|
||||||
@@ -356,7 +356,7 @@
|
|||||||
function loadConnectedDevices(node_mac) {
|
function loadConnectedDevices(node_mac) {
|
||||||
const sql = `
|
const sql = `
|
||||||
SELECT devName, devMac, devLastIP, devVendor, devPresentLastScan, devAlertDown, devParentPort,
|
SELECT devName, devMac, devLastIP, devVendor, devPresentLastScan, devAlertDown, devParentPort,
|
||||||
CASE
|
CASE
|
||||||
WHEN devIsNew = 1 THEN 'New'
|
WHEN devIsNew = 1 THEN 'New'
|
||||||
WHEN devPresentLastScan = 1 THEN 'On-line'
|
WHEN devPresentLastScan = 1 THEN 'On-line'
|
||||||
WHEN devPresentLastScan = 0 AND devAlertDown != 0 THEN 'Down'
|
WHEN devPresentLastScan = 0 AND devAlertDown != 0 THEN 'Down'
|
||||||
@@ -371,7 +371,7 @@
|
|||||||
|
|
||||||
const wrapperHtml = `
|
const wrapperHtml = `
|
||||||
<table class="table table-bordered table-striped node-leafs-table " id="table_leafs_${id}" data-node-mac="${node_mac}">
|
<table class="table table-bordered table-striped node-leafs-table " id="table_leafs_${id}" data-node-mac="${node_mac}">
|
||||||
|
|
||||||
</table>`;
|
</table>`;
|
||||||
|
|
||||||
loadDeviceTable({
|
loadDeviceTable({
|
||||||
@@ -414,12 +414,12 @@
|
|||||||
$.get(apiUrl, function (data) {
|
$.get(apiUrl, function (data) {
|
||||||
|
|
||||||
console.log(data);
|
console.log(data);
|
||||||
|
|
||||||
const parsed = JSON.parse(data);
|
const parsed = JSON.parse(data);
|
||||||
const allDevices = parsed;
|
const allDevices = parsed;
|
||||||
|
|
||||||
console.log(allDevices);
|
console.log(allDevices);
|
||||||
|
|
||||||
|
|
||||||
if (!allDevices || allDevices.length === 0) {
|
if (!allDevices || allDevices.length === 0) {
|
||||||
showModalOK(getString('Gen_Warning'), getString('Network_NoDevices'));
|
showModalOK(getString('Gen_Warning'), getString('Network_NoDevices'));
|
||||||
@@ -439,7 +439,7 @@
|
|||||||
{
|
{
|
||||||
$('#showArchivedNumber').text(`(${archivedCount})`);
|
$('#showArchivedNumber').text(`(${archivedCount})`);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(offlineCount > 0)
|
if(offlineCount > 0)
|
||||||
{
|
{
|
||||||
$('#showOfflineNumber').text(`(${offlineCount})`);
|
$('#showOfflineNumber').text(`(${offlineCount})`);
|
||||||
@@ -501,7 +501,7 @@ var visibleNodesCount = 0;
|
|||||||
var parentNodesCount = 0;
|
var parentNodesCount = 0;
|
||||||
var hiddenMacs = []; // hidden children
|
var hiddenMacs = []; // hidden children
|
||||||
var hiddenChildren = [];
|
var hiddenChildren = [];
|
||||||
var deviceListGlobal = null;
|
var deviceListGlobal = null;
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Recursively get children nodes and build a tree
|
// Recursively get children nodes and build a tree
|
||||||
@@ -521,13 +521,17 @@ function getChildren(node, list, path, visited = [])
|
|||||||
|
|
||||||
// Loop through all items to find children of the current node
|
// Loop through all items to find children of the current node
|
||||||
for (var i in list) {
|
for (var i in list) {
|
||||||
if (list[i].devParentMAC.toLowerCase() == node.devMac.toLowerCase() && !hiddenMacs.includes(list[i].devParentMAC)) {
|
const item = list[i];
|
||||||
|
const parentMac = item.devParentMAC || ""; // null-safe
|
||||||
|
const nodeMac = node.devMac || ""; // null-safe
|
||||||
|
|
||||||
visibleNodesCount++;
|
if (parentMac != "" && parentMac.toLowerCase() == nodeMac.toLowerCase() && !hiddenMacs.includes(parentMac)) {
|
||||||
|
|
||||||
// Process children recursively, passing a copy of the visited list
|
visibleNodesCount++;
|
||||||
children.push(getChildren(list[i], list, path + ((path == "") ? "" : '|') + list[i].devParentMAC, visited));
|
|
||||||
}
|
// Process children recursively, passing a copy of the visited list
|
||||||
|
children.push(getChildren(list[i], list, path + ((path == "") ? "" : '|') + parentMac, visited));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Track leaf and parent node counts
|
// Track leaf and parent node counts
|
||||||
@@ -537,7 +541,7 @@ function getChildren(node, list, path, visited = [])
|
|||||||
parentNodesCount++;
|
parentNodesCount++;
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
name: node.devName,
|
name: node.devName,
|
||||||
path: path,
|
path: path,
|
||||||
mac: node.devMac,
|
mac: node.devMac,
|
||||||
@@ -562,19 +566,32 @@ function getChildren(node, list, path, visited = [])
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
function getHierarchy()
|
function getHierarchy()
|
||||||
{
|
{
|
||||||
|
let internetNode = null;
|
||||||
|
|
||||||
for(i in deviceListGlobal)
|
for(i in deviceListGlobal)
|
||||||
{
|
{
|
||||||
if(deviceListGlobal[i].devMac == 'Internet')
|
if(deviceListGlobal[i].devMac == 'Internet')
|
||||||
{
|
{
|
||||||
return (getChildren(deviceListGlobal[i], deviceListGlobal, ''))
|
internetNode = deviceListGlobal[i];
|
||||||
|
|
||||||
|
return (getChildren(internetNode, deviceListGlobal, ''))
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!internetNode) {
|
||||||
|
showModalOk(
|
||||||
|
getString('Network_Configuration_Error'),
|
||||||
|
getString('Network_Root_Not_Configured')
|
||||||
|
);
|
||||||
|
console.error("getHierarchy(): Internet node not found");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//---------------------------------------------------------------------------
|
//---------------------------------------------------------------------------
|
||||||
function toggleSubTree(parentMac, treePath)
|
function toggleSubTree(parentMac, treePath)
|
||||||
{
|
{
|
||||||
@@ -593,33 +610,33 @@ function toggleSubTree(parentMac, treePath)
|
|||||||
myTree.refresh(updatedTree);
|
myTree.refresh(updatedTree);
|
||||||
|
|
||||||
// re-attach any onclick events
|
// re-attach any onclick events
|
||||||
attachTreeEvents();
|
attachTreeEvents();
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
function attachTreeEvents()
|
function attachTreeEvents()
|
||||||
{
|
{
|
||||||
// toggle subtree functionality
|
// toggle subtree functionality
|
||||||
$("div[data-mytreemac]").each(function(){
|
$("div[data-mytreemac]").each(function(){
|
||||||
$(this).attr('onclick', 'toggleSubTree("'+$(this).attr('data-mytreemac')+'","'+ $(this).attr('data-mytreepath')+'")')
|
$(this).attr('onclick', 'toggleSubTree("'+$(this).attr('data-mytreemac')+'","'+ $(this).attr('data-mytreepath')+'")')
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Handle network node click - select correct tab in the bottom table
|
// Handle network node click - select correct tab in the bottom table
|
||||||
function handleNodeClick(el)
|
function handleNodeClick(el)
|
||||||
{
|
{
|
||||||
|
|
||||||
isNetworkDevice = $(el).data("devisnetworknodedynamic") == 1;
|
isNetworkDevice = $(el).data("devisnetworknodedynamic") == 1;
|
||||||
targetTabMAC = ""
|
targetTabMAC = ""
|
||||||
thisDevMac= $(el).data("mac");
|
thisDevMac= $(el).data("mac");
|
||||||
|
|
||||||
if (isNetworkDevice == false)
|
if (isNetworkDevice == false)
|
||||||
{
|
{
|
||||||
targetTabMAC = $(el).data("parentmac");
|
targetTabMAC = $(el).data("parentmac");
|
||||||
} else
|
} else
|
||||||
{
|
{
|
||||||
targetTabMAC = thisDevMac;
|
targetTabMAC = thisDevMac;
|
||||||
}
|
}
|
||||||
|
|
||||||
var targetTab = $(`a[data-mytabmac="${targetTabMAC}"]`);
|
var targetTab = $(`a[data-mytabmac="${targetTabMAC}"]`);
|
||||||
@@ -628,8 +645,8 @@ function handleNodeClick(el)
|
|||||||
// Simulate a click event on the target tab
|
// Simulate a click event on the target tab
|
||||||
targetTab.click();
|
targetTab.click();
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isNetworkDevice) {
|
if (isNetworkDevice) {
|
||||||
// Smooth scroll to the tab content
|
// Smooth scroll to the tab content
|
||||||
@@ -639,7 +656,7 @@ function handleNodeClick(el)
|
|||||||
} else {
|
} else {
|
||||||
$("tr.selected").removeClass("selected");
|
$("tr.selected").removeClass("selected");
|
||||||
$(`tr[data-mac="${thisDevMac}"]`).addClass("selected");
|
$(`tr[data-mac="${thisDevMac}"]`).addClass("selected");
|
||||||
|
|
||||||
const tableId = "table_leafs_" + targetTabMAC.replace(/:/g, '_');
|
const tableId = "table_leafs_" + targetTabMAC.replace(/:/g, '_');
|
||||||
const $table = $(`#${tableId}`).DataTable();
|
const $table = $(`#${tableId}`).DataTable();
|
||||||
|
|
||||||
@@ -669,10 +686,8 @@ function handleNodeClick(el)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
var myTree;
|
var myTree;
|
||||||
|
|
||||||
|
|
||||||
var emSize;
|
var emSize;
|
||||||
var nodeHeight;
|
var nodeHeight;
|
||||||
// var sizeCoefficient = 1.4
|
// var sizeCoefficient = 1.4
|
||||||
@@ -689,140 +704,139 @@ function emToPx(em, element) {
|
|||||||
|
|
||||||
function initTree(myHierarchy)
|
function initTree(myHierarchy)
|
||||||
{
|
{
|
||||||
// calculate the drawing area based on teh tree width and available screen size
|
if(myHierarchy && myHierarchy.type !== "")
|
||||||
|
|
||||||
let baseFontSize = parseFloat($('html').css('font-size'));
|
|
||||||
let treeAreaHeight = ($(window).height() - 155); ;
|
|
||||||
// calculate the font size of the leaf nodes to fit everything into the tree area
|
|
||||||
leafNodesCount == 0 ? 1 : leafNodesCount;
|
|
||||||
|
|
||||||
emSize = pxToEm((treeAreaHeight/(leafNodesCount)).toFixed(2));
|
|
||||||
|
|
||||||
let screenWidthEm = pxToEm($('.networkTable').width()-15);
|
|
||||||
|
|
||||||
// init the drawing area size
|
|
||||||
$("#networkTree").attr('style', `height:${treeAreaHeight}px; width:${emToPx(screenWidthEm)}px`)
|
|
||||||
|
|
||||||
if(myHierarchy.type == "")
|
|
||||||
{
|
{
|
||||||
showModalOk(getString('Network_Configuration_Error'), getString('Network_Root_Not_Configured'))
|
// calculate the drawing area based on the tree width and available screen size
|
||||||
|
let baseFontSize = parseFloat($('html').css('font-size'));
|
||||||
return;
|
let treeAreaHeight = ($(window).height() - 155); ;
|
||||||
|
|
||||||
|
// calculate the font size of the leaf nodes to fit everything into the tree area
|
||||||
|
leafNodesCount == 0 ? 1 : leafNodesCount;
|
||||||
|
|
||||||
|
emSize = pxToEm((treeAreaHeight/(leafNodesCount)).toFixed(2));
|
||||||
|
|
||||||
|
let screenWidthEm = pxToEm($('.networkTable').width()-15);
|
||||||
|
|
||||||
|
// init the drawing area size
|
||||||
|
$("#networkTree").attr('style', `height:${treeAreaHeight}px; width:${emToPx(screenWidthEm)}px`)
|
||||||
|
|
||||||
|
// handle canvas and node size if only a few nodes
|
||||||
|
emSize > 1 ? emSize = 1 : emSize = emSize;
|
||||||
|
|
||||||
|
let nodeHeightPx = emToPx(emSize*1);
|
||||||
|
let nodeWidthPx = emToPx(screenWidthEm / (parentNodesCount));
|
||||||
|
|
||||||
|
// handle if only a few nodes
|
||||||
|
nodeWidthPx > 160 ? nodeWidthPx = 160 : nodeWidthPx = nodeWidthPx;
|
||||||
|
|
||||||
|
console.log(Treeviz);
|
||||||
|
|
||||||
|
myTree = Treeviz.create({
|
||||||
|
htmlId: "networkTree",
|
||||||
|
renderNode: nodeData => {
|
||||||
|
|
||||||
|
(!emptyArr.includes(nodeData.data.port )) ? port = nodeData.data.port : port = "";
|
||||||
|
|
||||||
|
(port == "" || port == 0 || port == 'None' ) ? portBckgIcon = `<i class="fa fa-wifi"></i>` : portBckgIcon = `<i class="fa fa-ethernet"></i>`;
|
||||||
|
|
||||||
|
portHtml = (port == "" || port == 0 || port == 'None' ) ? "   " : port;
|
||||||
|
|
||||||
|
// Build HTML for individual nodes in the network diagram
|
||||||
|
deviceIcon = (!emptyArr.includes(nodeData.data.icon )) ?
|
||||||
|
`<div class="netIcon">
|
||||||
|
${atob(nodeData.data.icon)}
|
||||||
|
</div>` : "";
|
||||||
|
devicePort = `<div class="netPort"
|
||||||
|
style="width:${emSize}em;height:${emSize}em">
|
||||||
|
${portHtml}</div>
|
||||||
|
<div class="portBckgIcon"
|
||||||
|
style="margin-left:-${emSize*0.7}em;">
|
||||||
|
${portBckgIcon}
|
||||||
|
</div>`;
|
||||||
|
collapseExpandIcon = nodeData.data.hiddenChildren ?
|
||||||
|
"square-plus" : "square-minus";
|
||||||
|
|
||||||
|
// generate +/- icon if node has children nodes
|
||||||
|
collapseExpandHtml = nodeData.data.hasChildren ?
|
||||||
|
`<div class="netCollapse"
|
||||||
|
style="font-size:${nodeHeightPx/2}px;top:${Math.floor(nodeHeightPx / 4)}px"
|
||||||
|
data-mytreepath="${nodeData.data.path}"
|
||||||
|
data-mytreemac="${nodeData.data.mac}">
|
||||||
|
<i class="fa fa-${collapseExpandIcon} pointer"></i>
|
||||||
|
</div>` : "";
|
||||||
|
|
||||||
|
selectedNodeMac = $(".nav-tabs-custom .active a").attr('data-mytabmac')
|
||||||
|
|
||||||
|
highlightedCss = nodeData.data.mac == selectedNodeMac ?
|
||||||
|
" highlightedNode " : "";
|
||||||
|
cssNodeType = nodeData.data.devIsNetworkNodeDynamic ?
|
||||||
|
" node-network-device " : " node-standard-device ";
|
||||||
|
|
||||||
|
networkHardwareIcon = nodeData.data.devIsNetworkNodeDynamic ? `<span class="network-hw-icon">
|
||||||
|
<i class="fa-solid fa-hard-drive"></i>
|
||||||
|
</span>` : "";
|
||||||
|
|
||||||
|
const badgeConf = getStatusBadgeParts(nodeData.data.presentLastScan, nodeData.data.alertDown, nodeData.data.mac, statusText = '')
|
||||||
|
|
||||||
|
return result = `<div
|
||||||
|
class="node-inner hover-node-info box pointer ${highlightedCss} ${cssNodeType}"
|
||||||
|
style="height:${nodeHeightPx}px;font-size:${nodeHeightPx-5}px;"
|
||||||
|
onclick="handleNodeClick(this)"
|
||||||
|
data-mac="${nodeData.data.mac}"
|
||||||
|
data-parentMac="${nodeData.data.parentMac}"
|
||||||
|
data-name="${nodeData.data.name}"
|
||||||
|
data-ip="${nodeData.data.ip}"
|
||||||
|
data-mac="${nodeData.data.mac}"
|
||||||
|
data-vendor="${nodeData.data.vendor}"
|
||||||
|
data-type="${nodeData.data.type}"
|
||||||
|
data-devIsNetworkNodeDynamic="${nodeData.data.devIsNetworkNodeDynamic}"
|
||||||
|
data-lastseen="${nodeData.data.lastseen}"
|
||||||
|
data-firstseen="${nodeData.data.firstseen}"
|
||||||
|
data-relationship="${nodeData.data.relType}"
|
||||||
|
data-status="${nodeData.data.status}"
|
||||||
|
data-present="${nodeData.data.presentLastScan}"
|
||||||
|
data-alert="${nodeData.data.alertDown}"
|
||||||
|
data-icon="${nodeData.data.icon}"
|
||||||
|
>
|
||||||
|
<div class="netNodeText">
|
||||||
|
<strong><span>${devicePort} <span class="${badgeConf.cssText}">${deviceIcon}</span></span>
|
||||||
|
<span class="spanNetworkTree anonymizeDev" style="width:${nodeWidthPx-50}px">${nodeData.data.name}</span>
|
||||||
|
${networkHardwareIcon}
|
||||||
|
</strong>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
${collapseExpandHtml}`;
|
||||||
|
},
|
||||||
|
mainAxisNodeSpacing: 'auto',
|
||||||
|
// secondaryAxisNodeSpacing: 0.3,
|
||||||
|
nodeHeight: nodeHeightPx,
|
||||||
|
nodeWidth: nodeWidthPx,
|
||||||
|
marginTop: '5',
|
||||||
|
isHorizontal : true,
|
||||||
|
hasZoom: true,
|
||||||
|
hasPan: true,
|
||||||
|
marginLeft: '10',
|
||||||
|
marginRight: '10',
|
||||||
|
idKey: "mac",
|
||||||
|
hasFlatData: false,
|
||||||
|
relationnalField: "children",
|
||||||
|
linkWidth: (nodeData) => 2,
|
||||||
|
linkColor: (nodeData) => {
|
||||||
|
relConf = getRelationshipConf(nodeData.data.relType)
|
||||||
|
return relConf.color;
|
||||||
|
}
|
||||||
|
// onNodeClick: (nodeData) => handleNodeClick(nodeData),
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(deviceListGlobal);
|
||||||
|
myTree.refresh(myHierarchy);
|
||||||
|
|
||||||
|
// hide spinning icon
|
||||||
|
hideSpinner()
|
||||||
|
} else
|
||||||
|
{
|
||||||
|
console.error("getHierarchy() not returning expected result");
|
||||||
}
|
}
|
||||||
|
|
||||||
// handle canvas and node size if only a few nodes
|
|
||||||
emSize > 1 ? emSize = 1 : emSize = emSize;
|
|
||||||
|
|
||||||
let nodeHeightPx = emToPx(emSize*1);
|
|
||||||
let nodeWidthPx = emToPx(screenWidthEm / (parentNodesCount));
|
|
||||||
|
|
||||||
// handle if only a few nodes
|
|
||||||
nodeWidthPx > 160 ? nodeWidthPx = 160 : nodeWidthPx = nodeWidthPx;
|
|
||||||
|
|
||||||
console.log(Treeviz);
|
|
||||||
|
|
||||||
myTree = Treeviz.create({
|
|
||||||
htmlId: "networkTree",
|
|
||||||
renderNode: nodeData => {
|
|
||||||
|
|
||||||
(!emptyArr.includes(nodeData.data.port )) ? port = nodeData.data.port : port = "";
|
|
||||||
|
|
||||||
(port == "" || port == 0 || port == 'None' ) ? portBckgIcon = `<i class="fa fa-wifi"></i>` : portBckgIcon = `<i class="fa fa-ethernet"></i>`;
|
|
||||||
|
|
||||||
portHtml = (port == "" || port == 0 || port == 'None' ) ? "   " : port;
|
|
||||||
|
|
||||||
// Build HTML for individual nodes in the network diagram
|
|
||||||
deviceIcon = (!emptyArr.includes(nodeData.data.icon )) ?
|
|
||||||
`<div class="netIcon">
|
|
||||||
${atob(nodeData.data.icon)}
|
|
||||||
</div>` : "";
|
|
||||||
devicePort = `<div class="netPort"
|
|
||||||
style="width:${emSize}em;height:${emSize}em">
|
|
||||||
${portHtml}</div>
|
|
||||||
<div class="portBckgIcon"
|
|
||||||
style="margin-left:-${emSize*0.7}em;">
|
|
||||||
${portBckgIcon}
|
|
||||||
</div>`;
|
|
||||||
collapseExpandIcon = nodeData.data.hiddenChildren ?
|
|
||||||
"square-plus" : "square-minus";
|
|
||||||
|
|
||||||
// generate +/- icon if node has children nodes
|
|
||||||
collapseExpandHtml = nodeData.data.hasChildren ?
|
|
||||||
`<div class="netCollapse"
|
|
||||||
style="font-size:${nodeHeightPx/2}px;top:${Math.floor(nodeHeightPx / 4)}px"
|
|
||||||
data-mytreepath="${nodeData.data.path}"
|
|
||||||
data-mytreemac="${nodeData.data.mac}">
|
|
||||||
<i class="fa fa-${collapseExpandIcon} pointer"></i>
|
|
||||||
</div>` : "";
|
|
||||||
|
|
||||||
selectedNodeMac = $(".nav-tabs-custom .active a").attr('data-mytabmac')
|
|
||||||
|
|
||||||
highlightedCss = nodeData.data.mac == selectedNodeMac ?
|
|
||||||
" highlightedNode " : "";
|
|
||||||
cssNodeType = nodeData.data.devIsNetworkNodeDynamic ?
|
|
||||||
" node-network-device " : " node-standard-device ";
|
|
||||||
|
|
||||||
networkHardwareIcon = nodeData.data.devIsNetworkNodeDynamic ? `<span class="network-hw-icon">
|
|
||||||
<i class="fa-solid fa-hard-drive"></i>
|
|
||||||
</span>` : "";
|
|
||||||
|
|
||||||
const badgeConf = getStatusBadgeParts(nodeData.data.presentLastScan, nodeData.data.alertDown, nodeData.data.mac, statusText = '')
|
|
||||||
|
|
||||||
return result = `<div
|
|
||||||
class="node-inner hover-node-info box pointer ${highlightedCss} ${cssNodeType}"
|
|
||||||
style="height:${nodeHeightPx}px;font-size:${nodeHeightPx-5}px;"
|
|
||||||
onclick="handleNodeClick(this)"
|
|
||||||
data-mac="${nodeData.data.mac}"
|
|
||||||
data-parentMac="${nodeData.data.parentMac}"
|
|
||||||
data-name="${nodeData.data.name}"
|
|
||||||
data-ip="${nodeData.data.ip}"
|
|
||||||
data-mac="${nodeData.data.mac}"
|
|
||||||
data-vendor="${nodeData.data.vendor}"
|
|
||||||
data-type="${nodeData.data.type}"
|
|
||||||
data-devIsNetworkNodeDynamic="${nodeData.data.devIsNetworkNodeDynamic}"
|
|
||||||
data-lastseen="${nodeData.data.lastseen}"
|
|
||||||
data-firstseen="${nodeData.data.firstseen}"
|
|
||||||
data-relationship="${nodeData.data.relType}"
|
|
||||||
data-status="${nodeData.data.status}"
|
|
||||||
data-present="${nodeData.data.presentLastScan}"
|
|
||||||
data-alert="${nodeData.data.alertDown}"
|
|
||||||
data-icon="${nodeData.data.icon}"
|
|
||||||
>
|
|
||||||
<div class="netNodeText">
|
|
||||||
<strong><span>${devicePort} <span class="${badgeConf.cssText}">${deviceIcon}</span></span>
|
|
||||||
<span class="spanNetworkTree anonymizeDev" style="width:${nodeWidthPx-50}px">${nodeData.data.name}</span>
|
|
||||||
${networkHardwareIcon}
|
|
||||||
</strong>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
${collapseExpandHtml}`;
|
|
||||||
},
|
|
||||||
mainAxisNodeSpacing: 'auto',
|
|
||||||
// secondaryAxisNodeSpacing: 0.3,
|
|
||||||
nodeHeight: nodeHeightPx,
|
|
||||||
nodeWidth: nodeWidthPx,
|
|
||||||
marginTop: '5',
|
|
||||||
isHorizontal : true,
|
|
||||||
hasZoom: true,
|
|
||||||
hasPan: true,
|
|
||||||
marginLeft: '10',
|
|
||||||
marginRight: '10',
|
|
||||||
idKey: "mac",
|
|
||||||
hasFlatData: false,
|
|
||||||
relationnalField: "children",
|
|
||||||
linkWidth: (nodeData) => 2,
|
|
||||||
linkColor: (nodeData) => {
|
|
||||||
relConf = getRelationshipConf(nodeData.data.relType)
|
|
||||||
return relConf.color;
|
|
||||||
}
|
|
||||||
// onNodeClick: (nodeData) => handleNodeClick(nodeData),
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log(deviceListGlobal);
|
|
||||||
myTree.refresh(myHierarchy);
|
|
||||||
|
|
||||||
// hide spinning icon
|
|
||||||
hideSpinner()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -839,11 +853,11 @@ function initTab()
|
|||||||
selectedTab = "Internet_id"
|
selectedTab = "Internet_id"
|
||||||
|
|
||||||
// the #target from the url
|
// the #target from the url
|
||||||
target = getQueryString('mac')
|
target = getQueryString('mac')
|
||||||
|
|
||||||
// update cookie if target specified
|
// update cookie if target specified
|
||||||
if(target != "")
|
if(target != "")
|
||||||
{
|
{
|
||||||
setCache(key, target.replaceAll(":","_")+'_id') // _id is added so it doesn't conflict with AdminLTE tab behavior
|
setCache(key, target.replaceAll(":","_")+'_id') // _id is added so it doesn't conflict with AdminLTE tab behavior
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -860,12 +874,12 @@ function initTab()
|
|||||||
$('a[data-toggle="tab"]').on('shown.bs.tab', function (e) {
|
$('a[data-toggle="tab"]').on('shown.bs.tab', function (e) {
|
||||||
setCache(key, $(e.target).attr('id'))
|
setCache(key, $(e.target).attr('id'))
|
||||||
});
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
function initSelectedNodeHighlighting()
|
function initSelectedNodeHighlighting()
|
||||||
{
|
{
|
||||||
|
|
||||||
var currentNodeMac = $(".networkNodeTabHeaders.active a").data("mytabmac");
|
var currentNodeMac = $(".networkNodeTabHeaders.active a").data("mytabmac");
|
||||||
|
|
||||||
@@ -882,7 +896,7 @@ function initSelectedNodeHighlighting()
|
|||||||
newSelNode = $("#networkTree div[data-mac='"+currentNodeMac+"']")[0]
|
newSelNode = $("#networkTree div[data-mac='"+currentNodeMac+"']")[0]
|
||||||
|
|
||||||
console.log(newSelNode)
|
console.log(newSelNode)
|
||||||
|
|
||||||
$(newSelNode).attr('class', $(newSelNode).attr('class') + ' highlightedNode')
|
$(newSelNode).attr('class', $(newSelNode).attr('class') + ' highlightedNode')
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -913,7 +927,7 @@ function updateLeaf(leafMac, action) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// showing icons or device names in tabs depending on available screen size
|
// showing icons or device names in tabs depending on available screen size
|
||||||
function checkTabsOverflow() {
|
function checkTabsOverflow() {
|
||||||
const $ul = $('.nav-tabs');
|
const $ul = $('.nav-tabs');
|
||||||
const $lis = $ul.find('li');
|
const $lis = $ul.find('li');
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
<?php
|
<?php
|
||||||
//------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------
|
||||||
// NetAlertX
|
// NetAlertX
|
||||||
// Open Source Network Guard / WIFI & LAN intrusion detector
|
// Open Source Network Guard / WIFI & LAN intrusion detector
|
||||||
//
|
//
|
||||||
// util.php - Front module. Server side. Common generic functions
|
// util.php - Front module. Server side. Common generic functions
|
||||||
//------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------
|
||||||
@@ -22,8 +22,8 @@ $ACTION = "";
|
|||||||
|
|
||||||
// init request params
|
// init request params
|
||||||
if(array_key_exists('function', $_REQUEST) != FALSE)
|
if(array_key_exists('function', $_REQUEST) != FALSE)
|
||||||
{
|
{
|
||||||
$FUNCTION = $_REQUEST['function'];
|
$FUNCTION = $_REQUEST['function'];
|
||||||
}
|
}
|
||||||
if(array_key_exists('settings', $_REQUEST) != FALSE)
|
if(array_key_exists('settings', $_REQUEST) != FALSE)
|
||||||
{
|
{
|
||||||
@@ -33,13 +33,13 @@ if(array_key_exists('settings', $_REQUEST) != FALSE)
|
|||||||
|
|
||||||
// call functions based on requested params
|
// call functions based on requested params
|
||||||
switch ($FUNCTION) {
|
switch ($FUNCTION) {
|
||||||
|
|
||||||
case 'savesettings':
|
case 'savesettings':
|
||||||
|
|
||||||
saveSettings();
|
saveSettings();
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case 'cleanLog':
|
case 'cleanLog':
|
||||||
|
|
||||||
cleanLog($SETTINGS);
|
cleanLog($SETTINGS);
|
||||||
break;
|
break;
|
||||||
@@ -66,7 +66,7 @@ switch ($FUNCTION) {
|
|||||||
// Creates a PHP array from a string representing a python array (input format ['...','...'])
|
// Creates a PHP array from a string representing a python array (input format ['...','...'])
|
||||||
// Only supports:
|
// Only supports:
|
||||||
// - one level arrays, not nested ones
|
// - one level arrays, not nested ones
|
||||||
// - single quotes
|
// - single quotes
|
||||||
function createArray($input){
|
function createArray($input){
|
||||||
|
|
||||||
// empty array
|
// empty array
|
||||||
@@ -81,9 +81,9 @@ function createArray($input){
|
|||||||
$replacement = '';
|
$replacement = '';
|
||||||
|
|
||||||
// remove brackets
|
// remove brackets
|
||||||
$noBrackets = preg_replace($patternBrackets, $replacement, $input);
|
$noBrackets = preg_replace($patternBrackets, $replacement, $input);
|
||||||
|
|
||||||
$options = array();
|
$options = array();
|
||||||
|
|
||||||
// create array
|
// create array
|
||||||
$optionsTmp = explode(",", $noBrackets);
|
$optionsTmp = explode(",", $noBrackets);
|
||||||
@@ -99,7 +99,7 @@ function createArray($input){
|
|||||||
{
|
{
|
||||||
array_push($options, preg_replace($patternQuotes, $replacement, $item) );
|
array_push($options, preg_replace($patternQuotes, $replacement, $item) );
|
||||||
}
|
}
|
||||||
|
|
||||||
return $options;
|
return $options;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -117,7 +117,7 @@ function printArray ($array) {
|
|||||||
{
|
{
|
||||||
echo $val.', ';
|
echo $val.', ';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
echo ']<br/>';
|
echo ']<br/>';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -171,9 +171,9 @@ function checkPermissions($files)
|
|||||||
if(file_exists($file) != 1)
|
if(file_exists($file) != 1)
|
||||||
{
|
{
|
||||||
$message = "File '".$file."' not found or inaccessible. Correct file permissions, create one yourself or generate a new one in 'Settings' by clicking the 'Save' button.";
|
$message = "File '".$file."' not found or inaccessible. Correct file permissions, create one yourself or generate a new one in 'Settings' by clicking the 'Save' button.";
|
||||||
displayMessage($message, TRUE);
|
displayMessage($message, TRUE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ----------------------------------------------------------------------------------------
|
// ----------------------------------------------------------------------------------------
|
||||||
@@ -189,8 +189,8 @@ function displayMessage($message, $logAlert = FALSE, $logConsole = TRUE, $logFil
|
|||||||
$message = str_replace(array("\n", "\r", PHP_EOL), '', $message);
|
$message = str_replace(array("\n", "\r", PHP_EOL), '', $message);
|
||||||
|
|
||||||
echo "<script>function escape(html, encode) {
|
echo "<script>function escape(html, encode) {
|
||||||
return html.replace(!encode ? /&(?!#?\w+;)/g : /&/g, '&')
|
return html.replace(!encode ? /&(?!#?\w+;)/g : /&/g, '&')
|
||||||
.replace(/\t/g, '')
|
.replace(/\t/g, '')
|
||||||
}</script>";
|
}</script>";
|
||||||
|
|
||||||
// Javascript Alert pop-up
|
// Javascript Alert pop-up
|
||||||
@@ -210,7 +210,7 @@ function displayMessage($message, $logAlert = FALSE, $logConsole = TRUE, $logFil
|
|||||||
{
|
{
|
||||||
|
|
||||||
if (is_writable($logFolderPath.$log_file)) {
|
if (is_writable($logFolderPath.$log_file)) {
|
||||||
|
|
||||||
|
|
||||||
if(file_exists($logFolderPath.$log_file) != 1) // file doesn't exist, create one
|
if(file_exists($logFolderPath.$log_file) != 1) // file doesn't exist, create one
|
||||||
{
|
{
|
||||||
@@ -219,7 +219,7 @@ function displayMessage($message, $logAlert = FALSE, $logConsole = TRUE, $logFil
|
|||||||
{
|
{
|
||||||
$log = fopen($logFolderPath.$log_file, "a") or die("Unable to open file - Permissions issue!");
|
$log = fopen($logFolderPath.$log_file, "a") or die("Unable to open file - Permissions issue!");
|
||||||
}
|
}
|
||||||
|
|
||||||
fwrite($log, "[".$timestamp. "] " . str_replace('<br>',"\n ",str_replace('<br/>',"\n ",$message)).PHP_EOL."" );
|
fwrite($log, "[".$timestamp. "] " . str_replace('<br>',"\n ",str_replace('<br/>',"\n ",$message)).PHP_EOL."" );
|
||||||
fclose($log);
|
fclose($log);
|
||||||
|
|
||||||
@@ -269,13 +269,13 @@ function addToExecutionQueue($action)
|
|||||||
// equivalent: /logs DELETE
|
// equivalent: /logs DELETE
|
||||||
// 🔺----- API ENDPOINTS SUPERSEDED -----🔺
|
// 🔺----- API ENDPOINTS SUPERSEDED -----🔺
|
||||||
function cleanLog($logFile)
|
function cleanLog($logFile)
|
||||||
{
|
{
|
||||||
global $logFolderPath, $timestamp;
|
global $logFolderPath, $timestamp;
|
||||||
|
|
||||||
$path = "";
|
$path = "";
|
||||||
|
|
||||||
$allowedFiles = ['app.log', 'app_front.log', 'IP_changes.log', 'stdout.log', 'stderr.log', 'app.php_errors.log', 'execution_queue.log', 'db_is_locked.log', 'nginx-error.log', 'cron.log'];
|
$allowedFiles = ['app.log', 'app_front.log', 'IP_changes.log', 'stdout.log', 'stderr.log', 'app.php_errors.log', 'execution_queue.log', 'db_is_locked.log', 'nginx-error.log', 'cron.log'];
|
||||||
|
|
||||||
if(in_array($logFile, $allowedFiles))
|
if(in_array($logFile, $allowedFiles))
|
||||||
{
|
{
|
||||||
$path = $logFolderPath.$logFile;
|
$path = $logFolderPath.$logFile;
|
||||||
@@ -287,11 +287,11 @@ function cleanLog($logFile)
|
|||||||
$file = fopen($path, "w") or die("Unable to open file!");
|
$file = fopen($path, "w") or die("Unable to open file!");
|
||||||
fwrite($file, "");
|
fwrite($file, "");
|
||||||
fclose($file);
|
fclose($file);
|
||||||
displayMessage('File <code>'.$logFile.'</code> purged.', FALSE, TRUE, TRUE, TRUE);
|
displayMessage('File <code>'.$logFile.'</code> purged.', FALSE, TRUE, TRUE, TRUE);
|
||||||
} else
|
} else
|
||||||
{
|
{
|
||||||
displayMessage('File <code>'.$logFile.'</code> is not allowed to be purged.', FALSE, TRUE, TRUE, TRUE);
|
displayMessage('File <code>'.$logFile.'</code> is not allowed to be purged.', FALSE, TRUE, TRUE, TRUE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -299,23 +299,23 @@ function cleanLog($logFile)
|
|||||||
// ----------------------------------------------------------------------------------------
|
// ----------------------------------------------------------------------------------------
|
||||||
function saveSettings()
|
function saveSettings()
|
||||||
{
|
{
|
||||||
global $SETTINGS, $FUNCTION, $config_file, $fullConfPath, $configFolderPath, $timestamp;
|
global $SETTINGS, $FUNCTION, $config_file, $fullConfPath, $configFolderPath, $timestamp;
|
||||||
|
|
||||||
// save to the file
|
// save to the file
|
||||||
$new_name = $config_file.'_'.$timestamp.'.backup';
|
$new_name = $config_file.'_'.$timestamp.'.backup';
|
||||||
$new_location = $configFolderPath.$new_name;
|
$new_location = $configFolderPath.'/'.$new_name;
|
||||||
|
|
||||||
if(file_exists( $fullConfPath) != 1)
|
if(file_exists( $fullConfPath) != 1)
|
||||||
{
|
{
|
||||||
displayMessage('File "'.$fullConfPath.'" not found or missing read permissions. Creating a new <code>'.$config_file.'</code> file.', FALSE, TRUE, TRUE, TRUE);
|
displayMessage('File "'.$fullConfPath.'" not found or missing read permissions. Creating a new <code>'.$config_file.'</code> file.', FALSE, TRUE, TRUE, TRUE);
|
||||||
}
|
}
|
||||||
// create a backup copy
|
// create a backup copy
|
||||||
elseif (!copy($fullConfPath, $new_location))
|
elseif (!copy($fullConfPath, $new_location))
|
||||||
{
|
{
|
||||||
displayMessage("Failed to copy file ".$fullConfPath." to ".$new_location." <br/> Check your permissions to allow read/write access to the /config folder.", FALSE, TRUE, TRUE, TRUE);
|
displayMessage("Failed to copy file ".$fullConfPath." to ".$new_location." <br/> Check your permissions to allow read/write access to the /config folder.", FALSE, TRUE, TRUE, TRUE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// generate a clean .conf file
|
// generate a clean .conf file
|
||||||
$groups = [];
|
$groups = [];
|
||||||
|
|
||||||
@@ -339,12 +339,12 @@ function saveSettings()
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
foreach ($decodedSettings as $setting) {
|
foreach ($decodedSettings as $setting) {
|
||||||
if( in_array($setting[0] , $groups) == false) {
|
if( in_array($setting[0] , $groups) == false) {
|
||||||
array_push($groups ,$setting[0]);
|
array_push($groups ,$setting[0]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// go thru the groups and prepare settings to write to file
|
// go thru the groups and prepare settings to write to file
|
||||||
foreach ($groups as $group) {
|
foreach ($groups as $group) {
|
||||||
$txt .= "\n\n# " . $group;
|
$txt .= "\n\n# " . $group;
|
||||||
@@ -353,20 +353,20 @@ function saveSettings()
|
|||||||
foreach ($decodedSettings as $setting) {
|
foreach ($decodedSettings as $setting) {
|
||||||
$settingGroup = $setting[0];
|
$settingGroup = $setting[0];
|
||||||
$setKey = $setting[1];
|
$setKey = $setting[1];
|
||||||
$dataType = $setting[2];
|
$dataType = $setting[2];
|
||||||
$settingValue = $setting[3];
|
$settingValue = $setting[3];
|
||||||
|
|
||||||
// // Parse the settingType JSON
|
// // Parse the settingType JSON
|
||||||
// $settingType = json_decode($settingTypeJson, true);
|
// $settingType = json_decode($settingTypeJson, true);
|
||||||
|
|
||||||
// Sanity check
|
// Sanity check
|
||||||
if($setKey == "UI_LANG" && $settingValue == "") {
|
if($setKey == "UI_LANG" && $settingValue == "") {
|
||||||
echo "🔴 Error: important settings missing. Refresh the page with 🔃 on the top and try again.";
|
echo "🔴 Error: important settings missing. Refresh the page with 🔃 on the top and try again.";
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($group == $settingGroup) {
|
if ($group == $settingGroup) {
|
||||||
|
|
||||||
if ($dataType == 'string' ) {
|
if ($dataType == 'string' ) {
|
||||||
$val = encode_single_quotes($settingValue);
|
$val = encode_single_quotes($settingValue);
|
||||||
$txt .= $setKey . "='" . $val . "'\n";
|
$txt .= $setKey . "='" . $val . "'\n";
|
||||||
@@ -381,7 +381,7 @@ function saveSettings()
|
|||||||
$txt .= $setKey . "=" . $val . "\n";
|
$txt .= $setKey . "=" . $val . "\n";
|
||||||
} elseif ($dataType == 'array' ) {
|
} elseif ($dataType == 'array' ) {
|
||||||
$temp = '';
|
$temp = '';
|
||||||
|
|
||||||
if(is_array($settingValue) == FALSE)
|
if(is_array($settingValue) == FALSE)
|
||||||
{
|
{
|
||||||
$settingValue = json_decode($settingValue);
|
$settingValue = json_decode($settingValue);
|
||||||
@@ -397,22 +397,22 @@ function saveSettings()
|
|||||||
|
|
||||||
$temp = '['.$temp.']'; // wrap brackets
|
$temp = '['.$temp.']'; // wrap brackets
|
||||||
$txt .= $setKey . "=" . $temp . "\n";
|
$txt .= $setKey . "=" . $temp . "\n";
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
$txt .= $setKey . "='⭕Not handled⭕'\n";
|
$txt .= $setKey . "='⭕Not handled⭕'\n";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
$txt = $txt."\n\n";
|
$txt = $txt."\n\n";
|
||||||
$txt = $txt."#-------------------IMPORTANT INFO-------------------#\n";
|
$txt = $txt."#-------------------IMPORTANT INFO-------------------#\n";
|
||||||
$txt = $txt."# This file is ingested by a python script, so if #\n";
|
$txt = $txt."# This file is ingested by a python script, so if #\n";
|
||||||
$txt = $txt."# modified it needs to use python syntax #\n";
|
$txt = $txt."# modified it needs to use python syntax #\n";
|
||||||
$txt = $txt."#-------------------IMPORTANT INFO-------------------#\n";
|
$txt = $txt."#-------------------IMPORTANT INFO-------------------#\n";
|
||||||
|
|
||||||
// open new file and write the new configuration
|
// open new file and write the new configuration
|
||||||
// Create a temporary file
|
// Create a temporary file
|
||||||
$tempConfPath = $fullConfPath . ".tmp";
|
$tempConfPath = $fullConfPath . ".tmp";
|
||||||
|
|
||||||
@@ -426,8 +426,8 @@ function saveSettings()
|
|||||||
fwrite($file, $txt);
|
fwrite($file, $txt);
|
||||||
fclose($file);
|
fclose($file);
|
||||||
|
|
||||||
// displayMessage(lang('settings_saved'),
|
// displayMessage(lang('settings_saved'),
|
||||||
// FALSE, TRUE, TRUE, TRUE);
|
// FALSE, TRUE, TRUE, TRUE);
|
||||||
|
|
||||||
echo "OK";
|
echo "OK";
|
||||||
|
|
||||||
@@ -445,7 +445,7 @@ function getString ($setKey, $default) {
|
|||||||
if ($result )
|
if ($result )
|
||||||
{
|
{
|
||||||
return $result;
|
return $result;
|
||||||
}
|
}
|
||||||
|
|
||||||
return $default;
|
return $default;
|
||||||
}
|
}
|
||||||
@@ -520,14 +520,14 @@ function getDateFromPeriod () {
|
|||||||
$days = "3650"; //10 years
|
$days = "3650"; //10 years
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
$days = "1";
|
$days = "1";
|
||||||
}
|
}
|
||||||
|
|
||||||
$periodDateSQL = "-".$days." day";
|
$periodDateSQL = "-".$days." day";
|
||||||
|
|
||||||
return " date('now', '".$periodDateSQL."') ";
|
return " date('now', '".$periodDateSQL."') ";
|
||||||
|
|
||||||
// $period = $_REQUEST['period'];
|
// $period = $_REQUEST['period'];
|
||||||
// return '"'. date ('Y-m-d', strtotime ('+2 day -'. $period) ) .'"';
|
// return '"'. date ('Y-m-d', strtotime ('+2 day -'. $period) ) .'"';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -537,13 +537,13 @@ function getDateFromPeriod () {
|
|||||||
function quotes ($text) {
|
function quotes ($text) {
|
||||||
return str_replace ('"','""',$text);
|
return str_replace ('"','""',$text);
|
||||||
}
|
}
|
||||||
|
|
||||||
// -------------------------------------------------------------------------------------------
|
// -------------------------------------------------------------------------------------------
|
||||||
function logServerConsole ($text) {
|
function logServerConsole ($text) {
|
||||||
$x = array();
|
$x = array();
|
||||||
$y = $x['__________'. $text .'__________'];
|
$y = $x['__________'. $text .'__________'];
|
||||||
}
|
}
|
||||||
|
|
||||||
// -------------------------------------------------------------------------------------------
|
// -------------------------------------------------------------------------------------------
|
||||||
function handleNull ($text, $default = "") {
|
function handleNull ($text, $default = "") {
|
||||||
if($text == NULL || $text == 'NULL')
|
if($text == NULL || $text == 'NULL')
|
||||||
@@ -553,7 +553,7 @@ function handleNull ($text, $default = "") {
|
|||||||
{
|
{
|
||||||
return $text;
|
return $text;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// -------------------------------------------------------------------------------------------
|
// -------------------------------------------------------------------------------------------
|
||||||
@@ -581,14 +581,14 @@ function decodeSpecialChars($str) {
|
|||||||
// used in Export CSV
|
// used in Export CSV
|
||||||
function getDevicesColumns(){
|
function getDevicesColumns(){
|
||||||
|
|
||||||
$columns = ["devMac",
|
$columns = ["devMac",
|
||||||
"devName",
|
"devName",
|
||||||
"devOwner",
|
"devOwner",
|
||||||
"devType",
|
"devType",
|
||||||
"devVendor",
|
"devVendor",
|
||||||
"devFavorite",
|
"devFavorite",
|
||||||
"devGroup",
|
"devGroup",
|
||||||
"devComments",
|
"devComments",
|
||||||
"devFirstConnection",
|
"devFirstConnection",
|
||||||
"devLastConnection",
|
"devLastConnection",
|
||||||
"devLastIP",
|
"devLastIP",
|
||||||
@@ -615,8 +615,8 @@ function getDevicesColumns(){
|
|||||||
"devFQDN",
|
"devFQDN",
|
||||||
"devParentRelType",
|
"devParentRelType",
|
||||||
"devReqNicsOnline"
|
"devReqNicsOnline"
|
||||||
];
|
];
|
||||||
|
|
||||||
return $columns;
|
return $columns;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -646,7 +646,7 @@ function getCache($key) {
|
|||||||
}
|
}
|
||||||
// -------------------------------------------------------------------------------------------
|
// -------------------------------------------------------------------------------------------
|
||||||
function setCache($key, $value, $expireMinutes = 5) {
|
function setCache($key, $value, $expireMinutes = 5) {
|
||||||
setcookie($key, $value, time()+$expireMinutes*60, "/","", 0);
|
setcookie($key, $value, time()+$expireMinutes*60, "/","", 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
4
front/php/templates/language/fr_fr.json
Executable file → Normal file
4
front/php/templates/language/fr_fr.json
Executable file → Normal file
@@ -311,7 +311,7 @@
|
|||||||
"Gen_Filter": "Filtrer",
|
"Gen_Filter": "Filtrer",
|
||||||
"Gen_Generate": "Générer",
|
"Gen_Generate": "Générer",
|
||||||
"Gen_InvalidMac": "Adresse MAC invalide.",
|
"Gen_InvalidMac": "Adresse MAC invalide.",
|
||||||
"Gen_Invalid_Value": "",
|
"Gen_Invalid_Value": "Une valeur invalide a été renseignée",
|
||||||
"Gen_LockedDB": "Erreur - La base de données est peut-être verrouillée - Vérifier avec les outils de dév via F12 -> Console ou essayer plus tard.",
|
"Gen_LockedDB": "Erreur - La base de données est peut-être verrouillée - Vérifier avec les outils de dév via F12 -> Console ou essayer plus tard.",
|
||||||
"Gen_NetworkMask": "Masque réseau",
|
"Gen_NetworkMask": "Masque réseau",
|
||||||
"Gen_Offline": "Hors ligne",
|
"Gen_Offline": "Hors ligne",
|
||||||
@@ -762,4 +762,4 @@
|
|||||||
"settings_system_label": "Système",
|
"settings_system_label": "Système",
|
||||||
"settings_update_item_warning": "Mettre à jour la valeur ci-dessous. Veillez à bien suivre le même format qu'auparavant. <b>Il n'y a pas de pas de contrôle.</b>",
|
"settings_update_item_warning": "Mettre à jour la valeur ci-dessous. Veillez à bien suivre le même format qu'auparavant. <b>Il n'y a pas de pas de contrôle.</b>",
|
||||||
"test_event_tooltip": "Enregistrer d'abord vos modifications avant de tester vôtre paramétrage."
|
"test_event_tooltip": "Enregistrer d'abord vos modifications avant de tester vôtre paramétrage."
|
||||||
}
|
}
|
||||||
|
|||||||
4
front/php/templates/language/it_it.json
Executable file → Normal file
4
front/php/templates/language/it_it.json
Executable file → Normal file
@@ -311,7 +311,7 @@
|
|||||||
"Gen_Filter": "Filtro",
|
"Gen_Filter": "Filtro",
|
||||||
"Gen_Generate": "Genera",
|
"Gen_Generate": "Genera",
|
||||||
"Gen_InvalidMac": "Indirizzo Mac non valido.",
|
"Gen_InvalidMac": "Indirizzo Mac non valido.",
|
||||||
"Gen_Invalid_Value": "",
|
"Gen_Invalid_Value": "È stato inserito un valore non valido",
|
||||||
"Gen_LockedDB": "ERRORE: il DB potrebbe essere bloccato, controlla F12 Strumenti di sviluppo -> Console o riprova più tardi.",
|
"Gen_LockedDB": "ERRORE: il DB potrebbe essere bloccato, controlla F12 Strumenti di sviluppo -> Console o riprova più tardi.",
|
||||||
"Gen_NetworkMask": "Maschera di rete",
|
"Gen_NetworkMask": "Maschera di rete",
|
||||||
"Gen_Offline": "Offline",
|
"Gen_Offline": "Offline",
|
||||||
@@ -762,4 +762,4 @@
|
|||||||
"settings_system_label": "Sistema",
|
"settings_system_label": "Sistema",
|
||||||
"settings_update_item_warning": "Aggiorna il valore qui sotto. Fai attenzione a seguire il formato precedente. <b>La convalida non viene eseguita.</b>",
|
"settings_update_item_warning": "Aggiorna il valore qui sotto. Fai attenzione a seguire il formato precedente. <b>La convalida non viene eseguita.</b>",
|
||||||
"test_event_tooltip": "Salva le modifiche prima di provare le nuove impostazioni."
|
"test_event_tooltip": "Salva le modifiche prima di provare le nuove impostazioni."
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -311,7 +311,7 @@
|
|||||||
"Gen_Filter": "Фильтр",
|
"Gen_Filter": "Фильтр",
|
||||||
"Gen_Generate": "Генерировать",
|
"Gen_Generate": "Генерировать",
|
||||||
"Gen_InvalidMac": "Неверный Mac-адрес.",
|
"Gen_InvalidMac": "Неверный Mac-адрес.",
|
||||||
"Gen_Invalid_Value": "",
|
"Gen_Invalid_Value": "Введено некорректное значение",
|
||||||
"Gen_LockedDB": "ОШИБКА - Возможно, база данных заблокирована. Проверьте инструменты разработчика F12 -> Консоль или повторите попытку позже.",
|
"Gen_LockedDB": "ОШИБКА - Возможно, база данных заблокирована. Проверьте инструменты разработчика F12 -> Консоль или повторите попытку позже.",
|
||||||
"Gen_NetworkMask": "Маска сети",
|
"Gen_NetworkMask": "Маска сети",
|
||||||
"Gen_Offline": "Оффлайн",
|
"Gen_Offline": "Оффлайн",
|
||||||
@@ -762,4 +762,4 @@
|
|||||||
"settings_system_label": "Система",
|
"settings_system_label": "Система",
|
||||||
"settings_update_item_warning": "Обновить значение ниже. Будьте осторожны, следуя предыдущему формату. <b>Проверка не выполняется.</b>",
|
"settings_update_item_warning": "Обновить значение ниже. Будьте осторожны, следуя предыдущему формату. <b>Проверка не выполняется.</b>",
|
||||||
"test_event_tooltip": "Сначала сохраните изменения, прежде чем проверять настройки."
|
"test_event_tooltip": "Сначала сохраните изменения, прежде чем проверять настройки."
|
||||||
}
|
}
|
||||||
|
|||||||
4
front/php/templates/language/uk_ua.json
Executable file → Normal file
4
front/php/templates/language/uk_ua.json
Executable file → Normal file
@@ -311,7 +311,7 @@
|
|||||||
"Gen_Filter": "Фільтр",
|
"Gen_Filter": "Фільтр",
|
||||||
"Gen_Generate": "Генерувати",
|
"Gen_Generate": "Генерувати",
|
||||||
"Gen_InvalidMac": "Недійсна Mac-адреса.",
|
"Gen_InvalidMac": "Недійсна Mac-адреса.",
|
||||||
"Gen_Invalid_Value": "",
|
"Gen_Invalid_Value": "Введено недійсне значення",
|
||||||
"Gen_LockedDB": "ПОМИЛКА – БД може бути заблоковано – перевірте F12 Інструменти розробника -> Консоль або спробуйте пізніше.",
|
"Gen_LockedDB": "ПОМИЛКА – БД може бути заблоковано – перевірте F12 Інструменти розробника -> Консоль або спробуйте пізніше.",
|
||||||
"Gen_NetworkMask": "Маска мережі",
|
"Gen_NetworkMask": "Маска мережі",
|
||||||
"Gen_Offline": "Офлайн",
|
"Gen_Offline": "Офлайн",
|
||||||
@@ -762,4 +762,4 @@
|
|||||||
"settings_system_label": "Система",
|
"settings_system_label": "Система",
|
||||||
"settings_update_item_warning": "Оновіть значення нижче. Слідкуйте за попереднім форматом. <b>Перевірка не виконана.</b>",
|
"settings_update_item_warning": "Оновіть значення нижче. Слідкуйте за попереднім форматом. <b>Перевірка не виконана.</b>",
|
||||||
"test_event_tooltip": "Перш ніж перевіряти налаштування, збережіть зміни."
|
"test_event_tooltip": "Перш ніж перевіряти налаштування, збережіть зміни."
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
|||||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
|
||||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||||
import conf # noqa: E402 [flake8 lint suppression]
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
@@ -98,9 +97,7 @@ def main():
|
|||||||
{"devMac": "00:11:22:33:44:57", "devLastIP": "192.168.1.82"},
|
{"devMac": "00:11:22:33:44:57", "devLastIP": "192.168.1.82"},
|
||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
db = DB()
|
device_handler = DeviceInstance()
|
||||||
db.open()
|
|
||||||
device_handler = DeviceInstance(db)
|
|
||||||
devices = (
|
devices = (
|
||||||
device_handler.getAll()
|
device_handler.getAll()
|
||||||
if get_setting_value("REFRESH_FQDN")
|
if get_setting_value("REFRESH_FQDN")
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
|||||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
|
||||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||||
import conf # noqa: E402 [flake8 lint suppression]
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
@@ -38,15 +37,11 @@ def main():
|
|||||||
|
|
||||||
timeout = get_setting_value('DIGSCAN_RUN_TIMEOUT')
|
timeout = get_setting_value('DIGSCAN_RUN_TIMEOUT')
|
||||||
|
|
||||||
# Create a database connection
|
|
||||||
db = DB() # instance of class DB
|
|
||||||
db.open()
|
|
||||||
|
|
||||||
# Initialize the Plugin obj output file
|
# Initialize the Plugin obj output file
|
||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
# Create a DeviceInstance instance
|
# Create a DeviceInstance instance
|
||||||
device_handler = DeviceInstance(db)
|
device_handler = DeviceInstance()
|
||||||
|
|
||||||
# Retrieve devices
|
# Retrieve devices
|
||||||
if get_setting_value("REFRESH_FQDN"):
|
if get_setting_value("REFRESH_FQDN"):
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
|||||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
|
||||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||||
import conf # noqa: E402 [flake8 lint suppression]
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
@@ -41,15 +40,11 @@ def main():
|
|||||||
args = get_setting_value('ICMP_ARGS')
|
args = get_setting_value('ICMP_ARGS')
|
||||||
in_regex = get_setting_value('ICMP_IN_REGEX')
|
in_regex = get_setting_value('ICMP_IN_REGEX')
|
||||||
|
|
||||||
# Create a database connection
|
|
||||||
db = DB() # instance of class DB
|
|
||||||
db.open()
|
|
||||||
|
|
||||||
# Initialize the Plugin obj output file
|
# Initialize the Plugin obj output file
|
||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
# Create a DeviceInstance instance
|
# Create a DeviceInstance instance
|
||||||
device_handler = DeviceInstance(db)
|
device_handler = DeviceInstance()
|
||||||
|
|
||||||
# Retrieve devices
|
# Retrieve devices
|
||||||
all_devices = device_handler.getAll()
|
all_devices = device_handler.getAll()
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
|||||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
|
||||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||||
import conf # noqa: E402 [flake8 lint suppression]
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
@@ -40,15 +39,11 @@ def main():
|
|||||||
# timeout = get_setting_value('NBLOOKUP_RUN_TIMEOUT')
|
# timeout = get_setting_value('NBLOOKUP_RUN_TIMEOUT')
|
||||||
timeout = 20
|
timeout = 20
|
||||||
|
|
||||||
# Create a database connection
|
|
||||||
db = DB() # instance of class DB
|
|
||||||
db.open()
|
|
||||||
|
|
||||||
# Initialize the Plugin obj output file
|
# Initialize the Plugin obj output file
|
||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
# Create a DeviceInstance instance
|
# Create a DeviceInstance instance
|
||||||
device_handler = DeviceInstance(db)
|
device_handler = DeviceInstance()
|
||||||
|
|
||||||
# Retrieve devices
|
# Retrieve devices
|
||||||
if get_setting_value("REFRESH_FQDN"):
|
if get_setting_value("REFRESH_FQDN"):
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import hashlib
|
|
||||||
import re
|
import re
|
||||||
import nmap
|
import nmap
|
||||||
|
|
||||||
@@ -17,6 +16,7 @@ from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
|||||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
|
from utils.crypto_utils import string_to_mac_hash # noqa: E402 [flake8 lint suppression]
|
||||||
import conf # noqa: E402 [flake8 lint suppression]
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
@@ -177,16 +177,6 @@ def parse_nmap_xml(xml_output, interface, fakeMac):
|
|||||||
return devices_list
|
return devices_list
|
||||||
|
|
||||||
|
|
||||||
def string_to_mac_hash(input_string):
|
|
||||||
# Calculate a hash using SHA-256
|
|
||||||
sha256_hash = hashlib.sha256(input_string.encode()).hexdigest()
|
|
||||||
|
|
||||||
# Take the first 12 characters of the hash and format as a MAC address
|
|
||||||
mac_hash = ':'.join(sha256_hash[i:i + 2] for i in range(0, 12, 2))
|
|
||||||
|
|
||||||
return mac_hash
|
|
||||||
|
|
||||||
|
|
||||||
# ===============================================================================
|
# ===============================================================================
|
||||||
# BEGIN
|
# BEGIN
|
||||||
# ===============================================================================
|
# ===============================================================================
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
|||||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
|
||||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||||
import conf # noqa: E402 [flake8 lint suppression]
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
@@ -39,15 +38,11 @@ def main():
|
|||||||
|
|
||||||
timeout = get_setting_value('NSLOOKUP_RUN_TIMEOUT')
|
timeout = get_setting_value('NSLOOKUP_RUN_TIMEOUT')
|
||||||
|
|
||||||
# Create a database connection
|
|
||||||
db = DB() # instance of class DB
|
|
||||||
db.open()
|
|
||||||
|
|
||||||
# Initialize the Plugin obj output file
|
# Initialize the Plugin obj output file
|
||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
# Create a DeviceInstance instance
|
# Create a DeviceInstance instance
|
||||||
device_handler = DeviceInstance(db)
|
device_handler = DeviceInstance()
|
||||||
|
|
||||||
# Retrieve devices
|
# Retrieve devices
|
||||||
if get_setting_value("REFRESH_FQDN"):
|
if get_setting_value("REFRESH_FQDN"):
|
||||||
|
|||||||
@@ -256,13 +256,11 @@ def main():
|
|||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
|
|
||||||
mylog("verbose", [f"[{pluginName}] starting execution"])
|
mylog("verbose", [f"[{pluginName}] starting execution"])
|
||||||
from database import DB
|
|
||||||
from models.device_instance import DeviceInstance
|
from models.device_instance import DeviceInstance
|
||||||
|
|
||||||
db = DB() # instance of class DB
|
|
||||||
db.open()
|
|
||||||
# Create a DeviceInstance instance
|
# Create a DeviceInstance instance
|
||||||
device_handler = DeviceInstance(db)
|
device_handler = DeviceInstance()
|
||||||
# Retrieve configuration settings
|
# Retrieve configuration settings
|
||||||
# these should be self-explanatory
|
# these should be self-explanatory
|
||||||
omada_sites = []
|
omada_sites = []
|
||||||
|
|||||||
@@ -13,9 +13,6 @@ The plugin connects to your Pi-hole’s API and retrieves:
|
|||||||
|
|
||||||
NetAlertX then uses this information to match or create devices in your system.
|
NetAlertX then uses this information to match or create devices in your system.
|
||||||
|
|
||||||
> [!TIP]
|
|
||||||
> Some tip.
|
|
||||||
|
|
||||||
### Quick setup guide
|
### Quick setup guide
|
||||||
|
|
||||||
* You are running **Pi-hole v6** or newer.
|
* You are running **Pi-hole v6** or newer.
|
||||||
@@ -30,21 +27,13 @@ No additional Pi-hole configuration is required.
|
|||||||
|
|
||||||
| Setting Key | Description |
|
| Setting Key | Description |
|
||||||
| ---------------------------- | -------------------------------------------------------------------------------- |
|
| ---------------------------- | -------------------------------------------------------------------------------- |
|
||||||
| **PIHOLEAPI_URL** | Your Pi-hole base URL. |
|
| **PIHOLEAPI_URL** | Your Pi-hole base URL. |
|
||||||
| **PIHOLEAPI_PASSWORD** | The Web UI base64 encoded (en-/decoding handled by the app) admin password. |
|
| **PIHOLEAPI_PASSWORD** | The Web UI base64 encoded (en-/decoding handled by the app) admin password. |
|
||||||
| **PIHOLEAPI_SSL_VERIFY** | Whether to verify HTTPS certificates. Disable only for self-signed certificates. |
|
| **PIHOLEAPI_SSL_VERIFY** | Whether to verify HTTPS certificates. Disable only for self-signed certificates. |
|
||||||
| **PIHOLEAPI_RUN_TIMEOUT** | Request timeout in seconds. |
|
| **PIHOLEAPI_RUN_TIMEOUT** | Request timeout in seconds. |
|
||||||
| **PIHOLEAPI_API_MAXCLIENTS** | Maximum number of devices to request from Pi-hole. Defaults are usually fine. |
|
| **PIHOLEAPI_API_MAXCLIENTS** | Maximum number of devices to request from Pi-hole. Defaults are usually fine. |
|
||||||
|
| **PIHOLEAPI_FAKE_MAC** | Generate FAKE MAC from IP. |
|
||||||
|
|
||||||
### Example Configuration
|
|
||||||
|
|
||||||
| Setting Key | Sample Value |
|
|
||||||
| ---------------------------- | -------------------------------------------------- |
|
|
||||||
| **PIHOLEAPI_URL** | `http://pi.hole/` |
|
|
||||||
| **PIHOLEAPI_PASSWORD** | `passw0rd` |
|
|
||||||
| **PIHOLEAPI_SSL_VERIFY** | `true` |
|
|
||||||
| **PIHOLEAPI_RUN_TIMEOUT** | `30` |
|
|
||||||
| **PIHOLEAPI_API_MAXCLIENTS** | `500` |
|
|
||||||
|
|
||||||
### ⚠️ Troubleshooting
|
### ⚠️ Troubleshooting
|
||||||
|
|
||||||
@@ -110,6 +99,32 @@ Then re-run the plugin.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
#### ❌ Some devices are missing
|
||||||
|
|
||||||
|
Check:
|
||||||
|
|
||||||
|
* Pi-hole shows devices under **Settings → Network**
|
||||||
|
* NetAlertX logs contain:
|
||||||
|
|
||||||
|
```
|
||||||
|
[PIHOLEAPI] Skipping invalid MAC (see PIHOLEAPI_FAKE_MAC setting) ...
|
||||||
|
```
|
||||||
|
|
||||||
|
If devices are missing:
|
||||||
|
|
||||||
|
* The app skipps devices with invalid MACs
|
||||||
|
* Enable PIHOLEAPI_FAKE_MAC if you want to import these devices with a fake mac and you are not concerned with data inconsistencies later on
|
||||||
|
|
||||||
|
Try enabling PIHOLEAPI_FAKE_MAC:
|
||||||
|
|
||||||
|
```
|
||||||
|
PIHOLEAPI_FAKE_MAC = 1
|
||||||
|
```
|
||||||
|
|
||||||
|
Then re-run the plugin.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
#### ❌ Wrong or missing hostnames
|
#### ❌ Wrong or missing hostnames
|
||||||
|
|
||||||
Pi-hole only reports names it knows from:
|
Pi-hole only reports names it knows from:
|
||||||
@@ -122,7 +137,7 @@ If names are missing, confirm they appear in Pi-hole’s own UI first.
|
|||||||
|
|
||||||
### Notes
|
### Notes
|
||||||
|
|
||||||
- Additional notes, limitations, Author info.
|
- Additional notes, limitations, Author info.
|
||||||
|
|
||||||
- Version: 1.0.0
|
- Version: 1.0.0
|
||||||
- Author: `jokob-sk`, `leiweibau`
|
- Author: `jokob-sk`, `leiweibau`
|
||||||
|
|||||||
@@ -279,6 +279,41 @@
|
|||||||
"string": "Maximum time in seconds to wait for the script to finish. If this time is exceeded the script is aborted."
|
"string": "Maximum time in seconds to wait for the script to finish. If this time is exceeded the script is aborted."
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"function": "FAKE_MAC",
|
||||||
|
"type": {
|
||||||
|
"dataType": "boolean",
|
||||||
|
"elements": [
|
||||||
|
{
|
||||||
|
"elementType": "input",
|
||||||
|
"elementOptions": [
|
||||||
|
{
|
||||||
|
"type": "checkbox"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"transformers": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"default_value": false,
|
||||||
|
"options": [],
|
||||||
|
"localized": [
|
||||||
|
"name",
|
||||||
|
"description"
|
||||||
|
],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Fake MAC if empty"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Some PiHole devices don't have a MAC assigned. Enabling the FAKE_MAC setting generates a fake MAC address from the IP address to track devices, but it may cause inconsistencies if IPs change or devices are re-discovered with a different MAC. Static IPs are recommended. Device type and icon might not be detected correctly and some plugins might fail if they depend on a valid MAC address. When unchecked, devices with empty MAC addresses are skipped."
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"database_column_definitions": [
|
"database_column_definitions": [
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
|||||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
import conf # noqa: E402 [flake8 lint suppression]
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
from utils.crypto_utils import string_to_mac_hash # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Setup timezone & logger using standard NAX helpers
|
# Setup timezone & logger using standard NAX helpers
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -42,6 +43,7 @@ PIHOLEAPI_SES_CSRF = None
|
|||||||
PIHOLEAPI_API_MAXCLIENTS = None
|
PIHOLEAPI_API_MAXCLIENTS = None
|
||||||
PIHOLEAPI_VERIFY_SSL = True
|
PIHOLEAPI_VERIFY_SSL = True
|
||||||
PIHOLEAPI_RUN_TIMEOUT = 10
|
PIHOLEAPI_RUN_TIMEOUT = 10
|
||||||
|
PIHOLEAPI_FAKE_MAC = get_setting_value('PIHOLEAPI_FAKE_MAC')
|
||||||
VERSION_DATE = "NAX-PIHOLEAPI-1.0"
|
VERSION_DATE = "NAX-PIHOLEAPI-1.0"
|
||||||
|
|
||||||
|
|
||||||
@@ -222,8 +224,14 @@ def gather_device_entries():
|
|||||||
if ip in iplist:
|
if ip in iplist:
|
||||||
lastQuery = str(now_ts)
|
lastQuery = str(now_ts)
|
||||||
|
|
||||||
|
tmpMac = hwaddr.lower()
|
||||||
|
|
||||||
|
# ensure fake mac if enabled
|
||||||
|
if PIHOLEAPI_FAKE_MAC and is_mac(tmpMac) is False:
|
||||||
|
tmpMac = string_to_mac_hash(ip)
|
||||||
|
|
||||||
entries.append({
|
entries.append({
|
||||||
'mac': hwaddr.lower(),
|
'mac': tmpMac,
|
||||||
'ip': ip,
|
'ip': ip,
|
||||||
'name': name,
|
'name': name,
|
||||||
'macVendor': macVendor,
|
'macVendor': macVendor,
|
||||||
@@ -281,7 +289,7 @@ def main():
|
|||||||
foreignKey=str(entry['mac'])
|
foreignKey=str(entry['mac'])
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
mylog('verbose', [f"[{pluginName}] Skipping invalid MAC: {entry['name']}|{entry['mac']}|{entry['ip']}"])
|
mylog('verbose', [f"[{pluginName}] Skipping invalid MAC (see PIHOLEAPI_FAKE_MAC setting): {entry['name']}|{entry['mac']}|{entry['ip']}"])
|
||||||
|
|
||||||
# Write result file for NetAlertX to ingest
|
# Write result file for NetAlertX to ingest
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ A plugin for importing devices from an SNMP-enabled router or switch. Using SNMP
|
|||||||
|
|
||||||
Specify the following settings in the Settings section of NetAlertX:
|
Specify the following settings in the Settings section of NetAlertX:
|
||||||
|
|
||||||
- `SNMPDSC_routers` - A list of `snmpwalk` commands to execute against IP addresses of routers/switches with SNMP turned on. For example:
|
- `SNMPDSC_routers` - A list of `snmpwalk` commands to execute against IP addresses of routers/switches with SNMP turned on. For example:
|
||||||
|
|
||||||
- `snmpwalk -v 2c -c public -OXsq 192.168.1.1 .1.3.6.1.2.1.3.1.1.2`
|
- `snmpwalk -v 2c -c public -OXsq 192.168.1.1 .1.3.6.1.2.1.3.1.1.2`
|
||||||
- `snmpwalk -v 2c -c public -Oxsq 192.168.1.1 .1.3.6.1.2.1.3.1.1.2` (note: lower case `x`)
|
- `snmpwalk -v 2c -c public -Oxsq 192.168.1.1 .1.3.6.1.2.1.3.1.1.2` (note: lower case `x`)
|
||||||
@@ -14,6 +14,14 @@ Specify the following settings in the Settings section of NetAlertX:
|
|||||||
|
|
||||||
If unsure, please check [snmpwalk examples](https://www.comparitech.com/net-admin/snmpwalk-examples-windows-linux/).
|
If unsure, please check [snmpwalk examples](https://www.comparitech.com/net-admin/snmpwalk-examples-windows-linux/).
|
||||||
|
|
||||||
|
Supported output formats:
|
||||||
|
|
||||||
|
```
|
||||||
|
ipNetToMediaPhysAddress[3][192.168.1.9] 6C:6C:6C:6C:6C:b6C1
|
||||||
|
IP-MIB::ipNetToMediaPhysAddress.17.10.10.3.202 = STRING: f8:81:1a:ef:ef:ef
|
||||||
|
mib-2.3.1.1.2.15.1.192.168.1.14 "2C F4 32 18 61 43 "
|
||||||
|
```
|
||||||
|
|
||||||
### Setup Cisco IOS
|
### Setup Cisco IOS
|
||||||
|
|
||||||
Enable IOS SNMP service and restrict to selected (internal) IP/Subnet.
|
Enable IOS SNMP service and restrict to selected (internal) IP/Subnet.
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
mylog('verbose', ['[SNMPDSC] In script '])
|
mylog('verbose', f"[{pluginName}] In script ")
|
||||||
|
|
||||||
# init global variables
|
# init global variables
|
||||||
global snmpWalkCmds
|
global snmpWalkCmds
|
||||||
@@ -57,7 +57,7 @@ def main():
|
|||||||
commands = [snmpWalkCmds]
|
commands = [snmpWalkCmds]
|
||||||
|
|
||||||
for cmd in commands:
|
for cmd in commands:
|
||||||
mylog('verbose', ['[SNMPDSC] Router snmpwalk command: ', cmd])
|
mylog('verbose', [f"[{pluginName}] Router snmpwalk command: ", cmd])
|
||||||
# split the string, remove white spaces around each item, and exclude any empty strings
|
# split the string, remove white spaces around each item, and exclude any empty strings
|
||||||
snmpwalkArgs = [arg.strip() for arg in cmd.split(' ') if arg.strip()]
|
snmpwalkArgs = [arg.strip() for arg in cmd.split(' ') if arg.strip()]
|
||||||
|
|
||||||
@@ -72,7 +72,7 @@ def main():
|
|||||||
timeout=(timeoutSetting)
|
timeout=(timeoutSetting)
|
||||||
)
|
)
|
||||||
|
|
||||||
mylog('verbose', ['[SNMPDSC] output: ', output])
|
mylog('verbose', [f"[{pluginName}] output: ", output])
|
||||||
|
|
||||||
lines = output.split('\n')
|
lines = output.split('\n')
|
||||||
|
|
||||||
@@ -80,6 +80,8 @@ def main():
|
|||||||
|
|
||||||
tmpSplt = line.split('"')
|
tmpSplt = line.split('"')
|
||||||
|
|
||||||
|
# Expected Format:
|
||||||
|
# mib-2.3.1.1.2.15.1.192.168.1.14 "2C F4 32 18 61 43 "
|
||||||
if len(tmpSplt) == 3:
|
if len(tmpSplt) == 3:
|
||||||
|
|
||||||
ipStr = tmpSplt[0].split('.')[-4:] # Get the last 4 elements to extract the IP
|
ipStr = tmpSplt[0].split('.')[-4:] # Get the last 4 elements to extract the IP
|
||||||
@@ -89,7 +91,7 @@ def main():
|
|||||||
macAddress = ':'.join(macStr)
|
macAddress = ':'.join(macStr)
|
||||||
ipAddress = '.'.join(ipStr)
|
ipAddress = '.'.join(ipStr)
|
||||||
|
|
||||||
mylog('verbose', [f'[SNMPDSC] IP: {ipAddress} MAC: {macAddress}'])
|
mylog('verbose', [f"[{pluginName}] IP: {ipAddress} MAC: {macAddress}"])
|
||||||
|
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = handleEmpty(macAddress),
|
primaryId = handleEmpty(macAddress),
|
||||||
@@ -100,8 +102,40 @@ def main():
|
|||||||
foreignKey = handleEmpty(macAddress) # Use the primary ID as the foreign key
|
foreignKey = handleEmpty(macAddress) # Use the primary ID as the foreign key
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
mylog('verbose', ['[SNMPDSC] ipStr does not seem to contain a valid IP:', ipStr])
|
mylog('verbose', [f"[{pluginName}] ipStr does not seem to contain a valid IP:", ipStr])
|
||||||
|
|
||||||
|
# Expected Format:
|
||||||
|
# IP-MIB::ipNetToMediaPhysAddress.17.10.10.3.202 = STRING: f8:81:1a:ef:ef:ef
|
||||||
|
elif "ipNetToMediaPhysAddress" in line and "=" in line and "STRING:" in line:
|
||||||
|
|
||||||
|
# Split on "=" → ["IP-MIB::ipNetToMediaPhysAddress.xxx.xxx.xxx.xxx ", " STRING: aa:bb:cc:dd:ee:ff"]
|
||||||
|
left, right = line.split("=", 1)
|
||||||
|
|
||||||
|
# Extract the MAC (right side)
|
||||||
|
macAddress = right.split("STRING:")[-1].strip()
|
||||||
|
macAddress = normalize_mac(macAddress)
|
||||||
|
|
||||||
|
# Extract IP address from the left side
|
||||||
|
# tail of the OID: last 4 integers = IPv4 address
|
||||||
|
oid_parts = left.strip().split('.')
|
||||||
|
ip_parts = oid_parts[-4:]
|
||||||
|
ipAddress = ".".join(ip_parts)
|
||||||
|
|
||||||
|
mylog('verbose', [f"[{pluginName}] (fallback) IP: {ipAddress} MAC: {macAddress}"])
|
||||||
|
|
||||||
|
plugin_objects.add_object(
|
||||||
|
primaryId = handleEmpty(macAddress),
|
||||||
|
secondaryId = handleEmpty(ipAddress),
|
||||||
|
watched1 = '(unknown)',
|
||||||
|
watched2 = handleEmpty(snmpwalkArgs[6]),
|
||||||
|
extra = handleEmpty(line),
|
||||||
|
foreignKey = handleEmpty(macAddress)
|
||||||
|
)
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Expected Format:
|
||||||
|
# ipNetToMediaPhysAddress[3][192.168.1.9] 6C:6C:6C:6C:6C:b6C1
|
||||||
elif line.startswith('ipNetToMediaPhysAddress'):
|
elif line.startswith('ipNetToMediaPhysAddress'):
|
||||||
# Format: snmpwalk -OXsq output
|
# Format: snmpwalk -OXsq output
|
||||||
parts = line.split()
|
parts = line.split()
|
||||||
@@ -110,7 +144,7 @@ def main():
|
|||||||
ipAddress = parts[0].split('[')[-1][:-1]
|
ipAddress = parts[0].split('[')[-1][:-1]
|
||||||
macAddress = normalize_mac(parts[1])
|
macAddress = normalize_mac(parts[1])
|
||||||
|
|
||||||
mylog('verbose', [f'[SNMPDSC] IP: {ipAddress} MAC: {macAddress}'])
|
mylog('verbose', [f"[{pluginName}] IP: {ipAddress} MAC: {macAddress}"])
|
||||||
|
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = handleEmpty(macAddress),
|
primaryId = handleEmpty(macAddress),
|
||||||
@@ -121,7 +155,7 @@ def main():
|
|||||||
foreignKey = handleEmpty(macAddress)
|
foreignKey = handleEmpty(macAddress)
|
||||||
)
|
)
|
||||||
|
|
||||||
mylog('verbose', ['[SNMPDSC] Entries found: ', len(plugin_objects)])
|
mylog('verbose', [f"[{pluginName}] Entries found: ", len(plugin_objects)])
|
||||||
|
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
|||||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
|
||||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||||
import conf # noqa: E402 [flake8 lint suppression]
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
@@ -44,12 +43,8 @@ def main():
|
|||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] broadcast_ips value {broadcast_ips}'])
|
mylog('verbose', [f'[{pluginName}] broadcast_ips value {broadcast_ips}'])
|
||||||
|
|
||||||
# Create a database connection
|
|
||||||
db = DB() # instance of class DB
|
|
||||||
db.open()
|
|
||||||
|
|
||||||
# Create a DeviceInstance instance
|
# Create a DeviceInstance instance
|
||||||
device_handler = DeviceInstance(db)
|
device_handler = DeviceInstance()
|
||||||
|
|
||||||
# Retrieve devices
|
# Retrieve devices
|
||||||
if 'offline' in devices_to_wake:
|
if 'offline' in devices_to_wake:
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ if ! awk '$2 == "/" && $4 ~ /ro/ {found=1} END {exit !found}' /proc/mounts; then
|
|||||||
══════════════════════════════════════════════════════════════════════════════
|
══════════════════════════════════════════════════════════════════════════════
|
||||||
⚠️ Warning: Container is running as read-write, not in read-only mode.
|
⚠️ Warning: Container is running as read-write, not in read-only mode.
|
||||||
|
|
||||||
Please mount the root filesystem as --read-only or use read-only: true
|
Please mount the root filesystem as --read-only or use read_only: true
|
||||||
https://github.com/jokob-sk/NetAlertX/blob/main/docs/docker-troubleshooting/read-only-filesystem.md
|
https://github.com/jokob-sk/NetAlertX/blob/main/docs/docker-troubleshooting/read-only-filesystem.md
|
||||||
══════════════════════════════════════════════════════════════════════════════
|
══════════════════════════════════════════════════════════════════════════════
|
||||||
EOF
|
EOF
|
||||||
|
|||||||
@@ -98,6 +98,7 @@ nav:
|
|||||||
- Sync: API_SYNC.md
|
- Sync: API_SYNC.md
|
||||||
- GraphQL: API_GRAPHQL.md
|
- GraphQL: API_GRAPHQL.md
|
||||||
- DB query: API_DBQUERY.md
|
- DB query: API_DBQUERY.md
|
||||||
|
- MCP: API_MCP.md
|
||||||
- Tests: API_TESTS.md
|
- Tests: API_TESTS.md
|
||||||
- SUPERSEDED OLD API Overview: API_OLD.md
|
- SUPERSEDED OLD API Overview: API_OLD.md
|
||||||
- Integrations:
|
- Integrations:
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
cryptography<40
|
||||||
openwrt-luci-rpc
|
openwrt-luci-rpc
|
||||||
asusrouter
|
asusrouter
|
||||||
aiohttp
|
aiohttp
|
||||||
@@ -30,3 +31,4 @@ urllib3
|
|||||||
httplib2
|
httplib2
|
||||||
gunicorn
|
gunicorn
|
||||||
git+https://github.com/foreign-sub/aiofreepybox.git
|
git+https://github.com/foreign-sub/aiofreepybox.git
|
||||||
|
mcp
|
||||||
|
|||||||
@@ -3,11 +3,12 @@ import sys
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from flask import Flask, request, jsonify, Response
|
from flask import Flask, request, jsonify, Response
|
||||||
|
from models.device_instance import DeviceInstance # noqa: E402
|
||||||
from flask_cors import CORS
|
from flask_cors import CORS
|
||||||
|
|
||||||
# Register NetAlertX directories
|
# Register NetAlertX directories
|
||||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
@@ -63,6 +64,12 @@ from .dbquery_endpoint import read_query, write_query, update_query, delete_quer
|
|||||||
from .sync_endpoint import handle_sync_post, handle_sync_get # noqa: E402 [flake8 lint suppression]
|
from .sync_endpoint import handle_sync_post, handle_sync_get # noqa: E402 [flake8 lint suppression]
|
||||||
from .logs_endpoint import clean_log # noqa: E402 [flake8 lint suppression]
|
from .logs_endpoint import clean_log # noqa: E402 [flake8 lint suppression]
|
||||||
from models.user_events_queue_instance import UserEventsQueueInstance # noqa: E402 [flake8 lint suppression]
|
from models.user_events_queue_instance import UserEventsQueueInstance # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
|
from models.event_instance import EventInstance # noqa: E402 [flake8 lint suppression]
|
||||||
|
# Import tool logic from the MCP/tools module to reuse behavior (no blueprints)
|
||||||
|
from plugin_helper import is_mac # noqa: E402 [flake8 lint suppression]
|
||||||
|
# is_mac is provided in mcp_endpoint and used by those handlers
|
||||||
|
# mcp_endpoint contains helper functions; routes moved into this module to keep a single place for routes
|
||||||
from messaging.in_app import ( # noqa: E402 [flake8 lint suppression]
|
from messaging.in_app import ( # noqa: E402 [flake8 lint suppression]
|
||||||
write_notification,
|
write_notification,
|
||||||
mark_all_notifications_read,
|
mark_all_notifications_read,
|
||||||
@@ -71,9 +78,17 @@ from messaging.in_app import ( # noqa: E402 [flake8 lint suppression]
|
|||||||
delete_notification,
|
delete_notification,
|
||||||
mark_notification_as_read
|
mark_notification_as_read
|
||||||
)
|
)
|
||||||
|
from .mcp_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||||
|
mcp_sse,
|
||||||
|
mcp_messages,
|
||||||
|
openapi_spec
|
||||||
|
)
|
||||||
|
# tools and mcp routes have been moved into this module (api_server_start)
|
||||||
|
|
||||||
# Flask application
|
# Flask application
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
|
|
||||||
|
|
||||||
CORS(
|
CORS(
|
||||||
app,
|
app,
|
||||||
resources={
|
resources={
|
||||||
@@ -87,22 +102,62 @@ CORS(
|
|||||||
r"/dbquery/*": {"origins": "*"},
|
r"/dbquery/*": {"origins": "*"},
|
||||||
r"/messaging/*": {"origins": "*"},
|
r"/messaging/*": {"origins": "*"},
|
||||||
r"/events/*": {"origins": "*"},
|
r"/events/*": {"origins": "*"},
|
||||||
r"/logs/*": {"origins": "*"}
|
r"/logs/*": {"origins": "*"},
|
||||||
|
r"/api/tools/*": {"origins": "*"},
|
||||||
|
r"/auth/*": {"origins": "*"},
|
||||||
|
r"/mcp/*": {"origins": "*"}
|
||||||
},
|
},
|
||||||
supports_credentials=True,
|
supports_credentials=True,
|
||||||
allow_headers=["Authorization", "Content-Type"],
|
allow_headers=["Authorization", "Content-Type"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
|
# MCP bridge variables + helpers (moved from mcp_routes)
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
BACKEND_PORT = get_setting_value("GRAPHQL_PORT")
|
||||||
|
API_BASE_URL = f"http://localhost:{BACKEND_PORT}"
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/mcp/sse', methods=['GET', 'POST'])
|
||||||
|
def api_mcp_sse():
|
||||||
|
if not is_authorized():
|
||||||
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
return mcp_sse()
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/api/mcp/messages', methods=['POST'])
|
||||||
|
def api_mcp_messages():
|
||||||
|
if not is_authorized():
|
||||||
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
return mcp_messages()
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------
|
# -------------------------------------------------------------------
|
||||||
# Custom handler for 404 - Route not found
|
# Custom handler for 404 - Route not found
|
||||||
# -------------------------------------------------------------------
|
# -------------------------------------------------------------------
|
||||||
|
@app.before_request
|
||||||
|
def log_request_info():
|
||||||
|
"""Log details of every incoming request."""
|
||||||
|
# Filter out noisy requests if needed, but user asked for drastic logging
|
||||||
|
mylog("verbose", [f"[HTTP] {request.method} {request.path} from {request.remote_addr}"])
|
||||||
|
# Filter sensitive headers before logging
|
||||||
|
safe_headers = {k: v for k, v in request.headers if k.lower() not in ('authorization', 'cookie', 'x-api-key')}
|
||||||
|
mylog("debug", [f"[HTTP] Headers: {safe_headers}"])
|
||||||
|
if request.method == "POST":
|
||||||
|
# Be careful with large bodies, but log first 1000 chars
|
||||||
|
data = request.get_data(as_text=True)
|
||||||
|
mylog("debug", [f"[HTTP] Body length: {len(data)} chars"])
|
||||||
|
|
||||||
|
|
||||||
@app.errorhandler(404)
|
@app.errorhandler(404)
|
||||||
def not_found(error):
|
def not_found(error):
|
||||||
|
# Get the requested path from the request object instead of error.description
|
||||||
|
requested_url = request.path if request else "unknown"
|
||||||
response = {
|
response = {
|
||||||
"success": False,
|
"success": False,
|
||||||
"error": "API route not found",
|
"error": "API route not found",
|
||||||
"message": f"The requested URL {error.description if hasattr(error, 'description') else ''} was not found on the server.",
|
"message": f"The requested URL {requested_url} was not found on the server.",
|
||||||
}
|
}
|
||||||
return jsonify(response), 404
|
return jsonify(response), 404
|
||||||
|
|
||||||
@@ -125,7 +180,7 @@ def graphql_endpoint():
|
|||||||
if not is_authorized():
|
if not is_authorized():
|
||||||
msg = '[graphql_server] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct.'
|
msg = '[graphql_server] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct.'
|
||||||
mylog('verbose', [msg])
|
mylog('verbose', [msg])
|
||||||
return jsonify({"success": False, "message": msg, "error": "Forbidden"}), 401
|
return jsonify({"success": False, "message": msg, "error": "Forbidden"}), 403
|
||||||
|
|
||||||
# Retrieve and log request data
|
# Retrieve and log request data
|
||||||
data = request.get_json()
|
data = request.get_json()
|
||||||
@@ -145,11 +200,12 @@ def graphql_endpoint():
|
|||||||
return jsonify(response)
|
return jsonify(response)
|
||||||
|
|
||||||
|
|
||||||
|
# Tools endpoints are registered via `mcp_endpoint.tools_bp` blueprint.
|
||||||
|
|
||||||
|
|
||||||
# --------------------------
|
# --------------------------
|
||||||
# Settings Endpoints
|
# Settings Endpoints
|
||||||
# --------------------------
|
# --------------------------
|
||||||
|
|
||||||
|
|
||||||
@app.route("/settings/<setKey>", methods=["GET"])
|
@app.route("/settings/<setKey>", methods=["GET"])
|
||||||
def api_get_setting(setKey):
|
def api_get_setting(setKey):
|
||||||
if not is_authorized():
|
if not is_authorized():
|
||||||
@@ -161,8 +217,7 @@ def api_get_setting(setKey):
|
|||||||
# --------------------------
|
# --------------------------
|
||||||
# Device Endpoints
|
# Device Endpoints
|
||||||
# --------------------------
|
# --------------------------
|
||||||
|
@app.route('/mcp/sse/device/<mac>', methods=['GET', 'POST'])
|
||||||
|
|
||||||
@app.route("/device/<mac>", methods=["GET"])
|
@app.route("/device/<mac>", methods=["GET"])
|
||||||
def api_get_device(mac):
|
def api_get_device(mac):
|
||||||
if not is_authorized():
|
if not is_authorized():
|
||||||
@@ -228,11 +283,45 @@ def api_update_device_column(mac):
|
|||||||
return update_device_column(mac, column_name, column_value)
|
return update_device_column(mac, column_name, column_value)
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/mcp/sse/device/<mac>/set-alias', methods=['POST'])
|
||||||
|
@app.route('/device/<mac>/set-alias', methods=['POST'])
|
||||||
|
def api_device_set_alias(mac):
|
||||||
|
"""Set the device alias - convenience wrapper around update_device_column."""
|
||||||
|
if not is_authorized():
|
||||||
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
data = request.get_json() or {}
|
||||||
|
alias = data.get('alias')
|
||||||
|
if not alias:
|
||||||
|
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "alias is required"}), 400
|
||||||
|
return update_device_column(mac, 'devName', alias)
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/mcp/sse/device/open_ports', methods=['POST'])
|
||||||
|
@app.route('/device/open_ports', methods=['POST'])
|
||||||
|
def api_device_open_ports():
|
||||||
|
"""Get stored NMAP open ports for a target IP or MAC."""
|
||||||
|
if not is_authorized():
|
||||||
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
|
||||||
|
data = request.get_json(silent=True) or {}
|
||||||
|
target = data.get('target')
|
||||||
|
if not target:
|
||||||
|
return jsonify({"success": False, "error": "Target (IP or MAC) is required"}), 400
|
||||||
|
|
||||||
|
device_handler = DeviceInstance()
|
||||||
|
|
||||||
|
# Use DeviceInstance method to get stored open ports
|
||||||
|
open_ports = device_handler.getOpenPorts(target)
|
||||||
|
|
||||||
|
if not open_ports:
|
||||||
|
return jsonify({"success": False, "error": f"No stored open ports for {target}. Run a scan with `/nettools/trigger-scan`"}), 404
|
||||||
|
|
||||||
|
return jsonify({"success": True, "target": target, "open_ports": open_ports})
|
||||||
|
|
||||||
|
|
||||||
# --------------------------
|
# --------------------------
|
||||||
# Devices Collections
|
# Devices Collections
|
||||||
# --------------------------
|
# --------------------------
|
||||||
|
|
||||||
|
|
||||||
@app.route("/devices", methods=["GET"])
|
@app.route("/devices", methods=["GET"])
|
||||||
def api_get_devices():
|
def api_get_devices():
|
||||||
if not is_authorized():
|
if not is_authorized():
|
||||||
@@ -288,6 +377,7 @@ def api_devices_totals():
|
|||||||
return devices_totals()
|
return devices_totals()
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/mcp/sse/devices/by-status', methods=['GET', 'POST'])
|
||||||
@app.route("/devices/by-status", methods=["GET"])
|
@app.route("/devices/by-status", methods=["GET"])
|
||||||
def api_devices_by_status():
|
def api_devices_by_status():
|
||||||
if not is_authorized():
|
if not is_authorized():
|
||||||
@@ -298,15 +388,93 @@ def api_devices_by_status():
|
|||||||
return devices_by_status(status)
|
return devices_by_status(status)
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/mcp/sse/devices/search', methods=['POST'])
|
||||||
|
@app.route('/devices/search', methods=['POST'])
|
||||||
|
def api_devices_search():
|
||||||
|
"""Device search: accepts 'query' in JSON and maps to device info/search."""
|
||||||
|
if not is_authorized():
|
||||||
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
|
||||||
|
data = request.get_json(silent=True) or {}
|
||||||
|
query = data.get('query')
|
||||||
|
|
||||||
|
if not query:
|
||||||
|
return jsonify({"error": "Missing 'query' parameter"}), 400
|
||||||
|
|
||||||
|
if is_mac(query):
|
||||||
|
device_data = get_device_data(query)
|
||||||
|
if device_data.status_code == 200:
|
||||||
|
return jsonify({"success": True, "devices": [device_data.get_json()]})
|
||||||
|
else:
|
||||||
|
return jsonify({"success": False, "error": "Device not found"}), 404
|
||||||
|
|
||||||
|
# Create fresh DB instance for this thread
|
||||||
|
device_handler = DeviceInstance()
|
||||||
|
|
||||||
|
matches = device_handler.search(query)
|
||||||
|
|
||||||
|
if not matches:
|
||||||
|
return jsonify({"success": False, "error": "No devices found"}), 404
|
||||||
|
|
||||||
|
return jsonify({"success": True, "devices": matches})
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/mcp/sse/devices/latest', methods=['GET'])
|
||||||
|
@app.route('/devices/latest', methods=['GET'])
|
||||||
|
def api_devices_latest():
|
||||||
|
"""Get latest device (most recent) - maps to DeviceInstance.getLatest()."""
|
||||||
|
if not is_authorized():
|
||||||
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
|
||||||
|
device_handler = DeviceInstance()
|
||||||
|
|
||||||
|
latest = device_handler.getLatest()
|
||||||
|
|
||||||
|
if not latest:
|
||||||
|
return jsonify({"message": "No devices found"}), 404
|
||||||
|
return jsonify([latest])
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/mcp/sse/devices/network/topology', methods=['GET'])
|
||||||
|
@app.route('/devices/network/topology', methods=['GET'])
|
||||||
|
def api_devices_network_topology():
|
||||||
|
"""Network topology mapping."""
|
||||||
|
if not is_authorized():
|
||||||
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
|
||||||
|
device_handler = DeviceInstance()
|
||||||
|
|
||||||
|
result = device_handler.getNetworkTopology()
|
||||||
|
|
||||||
|
return jsonify(result)
|
||||||
|
|
||||||
|
|
||||||
# --------------------------
|
# --------------------------
|
||||||
# Net tools
|
# Net tools
|
||||||
# --------------------------
|
# --------------------------
|
||||||
|
@app.route('/mcp/sse/nettools/wakeonlan', methods=['POST'])
|
||||||
@app.route("/nettools/wakeonlan", methods=["POST"])
|
@app.route("/nettools/wakeonlan", methods=["POST"])
|
||||||
def api_wakeonlan():
|
def api_wakeonlan():
|
||||||
if not is_authorized():
|
if not is_authorized():
|
||||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
|
||||||
mac = request.json.get("devMac")
|
data = request.json or {}
|
||||||
|
mac = data.get("devMac")
|
||||||
|
ip = data.get("devLastIP") or data.get('ip')
|
||||||
|
if not mac and ip:
|
||||||
|
|
||||||
|
device_handler = DeviceInstance()
|
||||||
|
|
||||||
|
dev = device_handler.getByIP(ip)
|
||||||
|
|
||||||
|
if not dev or not dev.get('devMac'):
|
||||||
|
return jsonify({"success": False, "message": "ERROR: Device not found", "error": "MAC not resolved"}), 404
|
||||||
|
mac = dev.get('devMac')
|
||||||
|
|
||||||
|
# Validate that we have a valid MAC address
|
||||||
|
if not mac:
|
||||||
|
return jsonify({"success": False, "message": "ERROR: Missing device MAC or IP", "error": "Bad Request"}), 400
|
||||||
|
|
||||||
return wakeonlan(mac)
|
return wakeonlan(mac)
|
||||||
|
|
||||||
|
|
||||||
@@ -367,11 +535,42 @@ def api_internet_info():
|
|||||||
return internet_info()
|
return internet_info()
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/mcp/sse/nettools/trigger-scan', methods=['POST'])
|
||||||
|
@app.route("/nettools/trigger-scan", methods=["GET"])
|
||||||
|
def api_trigger_scan():
|
||||||
|
if not is_authorized():
|
||||||
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
|
||||||
|
data = request.get_json(silent=True) or {}
|
||||||
|
scan_type = data.get('type', 'ARPSCAN')
|
||||||
|
|
||||||
|
# Validate scan type
|
||||||
|
loaded_plugins = get_setting_value('LOADED_PLUGINS')
|
||||||
|
if scan_type not in loaded_plugins:
|
||||||
|
return jsonify({"success": False, "error": f"Invalid scan type. Must be one of: {', '.join(loaded_plugins)}"}), 400
|
||||||
|
|
||||||
|
queue = UserEventsQueueInstance()
|
||||||
|
|
||||||
|
action = f"run|{scan_type}"
|
||||||
|
|
||||||
|
queue.add_event(action)
|
||||||
|
|
||||||
|
return jsonify({"success": True, "message": f"Scan triggered for type: {scan_type}"}), 200
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------
|
||||||
|
# MCP Server
|
||||||
|
# --------------------------
|
||||||
|
@app.route('/mcp/sse/openapi.json', methods=['GET'])
|
||||||
|
def api_openapi_spec():
|
||||||
|
if not is_authorized():
|
||||||
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
return openapi_spec()
|
||||||
|
|
||||||
|
|
||||||
# --------------------------
|
# --------------------------
|
||||||
# DB query
|
# DB query
|
||||||
# --------------------------
|
# --------------------------
|
||||||
|
|
||||||
|
|
||||||
@app.route("/dbquery/read", methods=["POST"])
|
@app.route("/dbquery/read", methods=["POST"])
|
||||||
def dbquery_read():
|
def dbquery_read():
|
||||||
if not is_authorized():
|
if not is_authorized():
|
||||||
@@ -394,6 +593,7 @@ def dbquery_write():
|
|||||||
data = request.get_json() or {}
|
data = request.get_json() or {}
|
||||||
raw_sql_b64 = data.get("rawSql")
|
raw_sql_b64 = data.get("rawSql")
|
||||||
if not raw_sql_b64:
|
if not raw_sql_b64:
|
||||||
|
|
||||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
|
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
|
||||||
|
|
||||||
return write_query(raw_sql_b64)
|
return write_query(raw_sql_b64)
|
||||||
@@ -459,11 +659,13 @@ def api_delete_online_history():
|
|||||||
|
|
||||||
@app.route("/logs", methods=["DELETE"])
|
@app.route("/logs", methods=["DELETE"])
|
||||||
def api_clean_log():
|
def api_clean_log():
|
||||||
|
|
||||||
if not is_authorized():
|
if not is_authorized():
|
||||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
|
||||||
file = request.args.get("file")
|
file = request.args.get("file")
|
||||||
if not file:
|
if not file:
|
||||||
|
|
||||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing 'file' query parameter"}), 400
|
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing 'file' query parameter"}), 400
|
||||||
|
|
||||||
return clean_log(file)
|
return clean_log(file)
|
||||||
@@ -498,8 +700,6 @@ def api_add_to_execution_queue():
|
|||||||
# --------------------------
|
# --------------------------
|
||||||
# Device Events
|
# Device Events
|
||||||
# --------------------------
|
# --------------------------
|
||||||
|
|
||||||
|
|
||||||
@app.route("/events/create/<mac>", methods=["POST"])
|
@app.route("/events/create/<mac>", methods=["POST"])
|
||||||
def api_create_event(mac):
|
def api_create_event(mac):
|
||||||
if not is_authorized():
|
if not is_authorized():
|
||||||
@@ -563,6 +763,45 @@ def api_get_events_totals():
|
|||||||
return get_events_totals(period)
|
return get_events_totals(period)
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/mcp/sse/events/recent', methods=['GET', 'POST'])
|
||||||
|
@app.route('/events/recent', methods=['GET'])
|
||||||
|
def api_events_default_24h():
|
||||||
|
return api_events_recent(24) # Reuse handler
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/mcp/sse/events/last', methods=['GET', 'POST'])
|
||||||
|
@app.route('/events/last', methods=['GET'])
|
||||||
|
def get_last_events():
|
||||||
|
if not is_authorized():
|
||||||
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
# Create fresh DB instance for this thread
|
||||||
|
event_handler = EventInstance()
|
||||||
|
|
||||||
|
events = event_handler.get_last_n(10)
|
||||||
|
return jsonify({"success": True, "count": len(events), "events": events}), 200
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/events/<int:hours>', methods=['GET'])
|
||||||
|
def api_events_recent(hours):
|
||||||
|
"""Return events from the last <hours> hours using EventInstance."""
|
||||||
|
|
||||||
|
if not is_authorized():
|
||||||
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
|
||||||
|
# Validate hours input
|
||||||
|
if hours <= 0:
|
||||||
|
return jsonify({"success": False, "error": "Hours must be > 0"}), 400
|
||||||
|
try:
|
||||||
|
# Create fresh DB instance for this thread
|
||||||
|
event_handler = EventInstance()
|
||||||
|
|
||||||
|
events = event_handler.get_by_hours(hours)
|
||||||
|
|
||||||
|
return jsonify({"success": True, "hours": hours, "count": len(events), "events": events}), 200
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
return jsonify({"success": False, "error": str(ex)}), 500
|
||||||
|
|
||||||
# --------------------------
|
# --------------------------
|
||||||
# Sessions
|
# Sessions
|
||||||
# --------------------------
|
# --------------------------
|
||||||
@@ -744,6 +983,23 @@ def sync_endpoint():
|
|||||||
return jsonify({"success": False, "message": "ERROR: No allowed", "error": "Method Not Allowed"}), 405
|
return jsonify({"success": False, "message": "ERROR: No allowed", "error": "Method Not Allowed"}), 405
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------
|
||||||
|
# Auth endpoint
|
||||||
|
# --------------------------
|
||||||
|
@app.route("/auth", methods=["GET"])
|
||||||
|
def check_auth():
|
||||||
|
if not is_authorized():
|
||||||
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
|
||||||
|
elif request.method == "GET":
|
||||||
|
return jsonify({"success": True, "message": "Authentication check successful"}), 200
|
||||||
|
else:
|
||||||
|
msg = "[sync endpoint] Method Not Allowed"
|
||||||
|
write_notification(msg, "alert")
|
||||||
|
mylog("verbose", [msg])
|
||||||
|
return jsonify({"success": False, "message": "ERROR: No allowed", "error": "Method Not Allowed"}), 405
|
||||||
|
|
||||||
|
|
||||||
# --------------------------
|
# --------------------------
|
||||||
# Background Server Start
|
# Background Server Start
|
||||||
# --------------------------
|
# --------------------------
|
||||||
@@ -775,3 +1031,9 @@ def start_server(graphql_port, app_state):
|
|||||||
|
|
||||||
# Update the state to indicate the server has started
|
# Update the state to indicate the server has started
|
||||||
app_state = updateState("Process: Idle", None, None, None, 1)
|
app_state = updateState("Process: Idle", None, None, None, 1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# This block is for running the server directly for testing purposes
|
||||||
|
# In production, start_server is called from api.py
|
||||||
|
pass
|
||||||
|
|||||||
@@ -228,7 +228,8 @@ def devices_totals():
|
|||||||
|
|
||||||
def devices_by_status(status=None):
|
def devices_by_status(status=None):
|
||||||
"""
|
"""
|
||||||
Return devices filtered by status.
|
Return devices filtered by status. Returns all if no status provided.
|
||||||
|
Possible statuses: my, connected, favorites, new, down, archived
|
||||||
"""
|
"""
|
||||||
|
|
||||||
conn = get_temp_db_connection()
|
conn = get_temp_db_connection()
|
||||||
|
|||||||
207
server/api_server/mcp_endpoint.py
Normal file
207
server/api_server/mcp_endpoint.py
Normal file
@@ -0,0 +1,207 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import threading
|
||||||
|
from flask import Blueprint, request, jsonify, Response, stream_with_context
|
||||||
|
from helper import get_setting_value
|
||||||
|
from helper import mylog
|
||||||
|
# from .events_endpoint import get_events # will import locally where needed
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
import queue
|
||||||
|
|
||||||
|
# Blueprints
|
||||||
|
mcp_bp = Blueprint('mcp', __name__)
|
||||||
|
tools_bp = Blueprint('tools', __name__)
|
||||||
|
|
||||||
|
mcp_sessions = {}
|
||||||
|
mcp_sessions_lock = threading.Lock()
|
||||||
|
|
||||||
|
|
||||||
|
def check_auth():
|
||||||
|
token = request.headers.get("Authorization")
|
||||||
|
expected_token = f"Bearer {get_setting_value('API_TOKEN')}"
|
||||||
|
return token == expected_token
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------
|
||||||
|
# Specs
|
||||||
|
# --------------------------
|
||||||
|
def openapi_spec():
|
||||||
|
# Spec matching actual available routes for MCP tools
|
||||||
|
mylog("verbose", ["[MCP] OpenAPI spec requested"])
|
||||||
|
spec = {
|
||||||
|
"openapi": "3.0.0",
|
||||||
|
"info": {"title": "NetAlertX Tools", "version": "1.1.0"},
|
||||||
|
"servers": [{"url": "/"}],
|
||||||
|
"paths": {
|
||||||
|
"/devices/by-status": {"post": {"operationId": "list_devices"}},
|
||||||
|
"/device/{mac}": {"post": {"operationId": "get_device_info"}},
|
||||||
|
"/devices/search": {"post": {"operationId": "search_devices"}},
|
||||||
|
"/devices/latest": {"get": {"operationId": "get_latest_device"}},
|
||||||
|
"/nettools/trigger-scan": {"post": {"operationId": "trigger_scan"}},
|
||||||
|
"/device/open_ports": {"post": {"operationId": "get_open_ports"}},
|
||||||
|
"/devices/network/topology": {"get": {"operationId": "get_network_topology"}},
|
||||||
|
"/events/recent": {"get": {"operationId": "get_recent_alerts"}, "post": {"operationId": "get_recent_alerts"}},
|
||||||
|
"/events/last": {"get": {"operationId": "get_last_events"}, "post": {"operationId": "get_last_events"}},
|
||||||
|
"/device/{mac}/set-alias": {"post": {"operationId": "set_device_alias"}},
|
||||||
|
"/nettools/wakeonlan": {"post": {"operationId": "wol_wake_device"}}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return jsonify(spec)
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------
|
||||||
|
# MCP SSE/JSON-RPC Endpoint
|
||||||
|
# --------------------------
|
||||||
|
|
||||||
|
|
||||||
|
# Sessions for SSE
|
||||||
|
_openapi_spec_cache = None
|
||||||
|
API_BASE_URL = f"http://localhost:{get_setting_value('GRAPHQL_PORT')}"
|
||||||
|
|
||||||
|
|
||||||
|
def get_openapi_spec():
|
||||||
|
global _openapi_spec_cache
|
||||||
|
|
||||||
|
if _openapi_spec_cache:
|
||||||
|
return _openapi_spec_cache
|
||||||
|
try:
|
||||||
|
r = requests.get(f"{API_BASE_URL}/mcp/openapi.json", timeout=10)
|
||||||
|
r.raise_for_status()
|
||||||
|
_openapi_spec_cache = r.json()
|
||||||
|
return _openapi_spec_cache
|
||||||
|
except Exception as e:
|
||||||
|
mylog("none", [f"[MCP] Failed to fetch OpenAPI spec: {e}"])
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def map_openapi_to_mcp_tools(spec):
|
||||||
|
tools = []
|
||||||
|
if not spec or 'paths' not in spec:
|
||||||
|
return tools
|
||||||
|
for path, methods in spec['paths'].items():
|
||||||
|
for method, details in methods.items():
|
||||||
|
if 'operationId' in details:
|
||||||
|
tool = {'name': details['operationId'], 'description': details.get('description', ''), 'inputSchema': {'type': 'object', 'properties': {}, 'required': []}}
|
||||||
|
if 'requestBody' in details:
|
||||||
|
content = details['requestBody'].get('content', {})
|
||||||
|
if 'application/json' in content:
|
||||||
|
schema = content['application/json'].get('schema', {})
|
||||||
|
tool['inputSchema'] = schema.copy()
|
||||||
|
if 'parameters' in details:
|
||||||
|
for param in details['parameters']:
|
||||||
|
if param.get('in') == 'query':
|
||||||
|
tool['inputSchema']['properties'][param['name']] = {'type': param.get('schema', {}).get('type', 'string'), 'description': param.get('description', '')}
|
||||||
|
if param.get('required'):
|
||||||
|
tool['inputSchema']['required'].append(param['name'])
|
||||||
|
tools.append(tool)
|
||||||
|
return tools
|
||||||
|
|
||||||
|
|
||||||
|
def process_mcp_request(data):
|
||||||
|
method = data.get('method')
|
||||||
|
msg_id = data.get('id')
|
||||||
|
if method == 'initialize':
|
||||||
|
return {'jsonrpc': '2.0', 'id': msg_id, 'result': {'protocolVersion': '2024-11-05', 'capabilities': {'tools': {}}, 'serverInfo': {'name': 'NetAlertX', 'version': '1.0.0'}}}
|
||||||
|
if method == 'notifications/initialized':
|
||||||
|
return None
|
||||||
|
if method == 'tools/list':
|
||||||
|
spec = get_openapi_spec()
|
||||||
|
tools = map_openapi_to_mcp_tools(spec)
|
||||||
|
return {'jsonrpc': '2.0', 'id': msg_id, 'result': {'tools': tools}}
|
||||||
|
if method == 'tools/call':
|
||||||
|
params = data.get('params', {})
|
||||||
|
tool_name = params.get('name')
|
||||||
|
tool_args = params.get('arguments', {})
|
||||||
|
spec = get_openapi_spec()
|
||||||
|
target_path = None
|
||||||
|
target_method = None
|
||||||
|
if spec and 'paths' in spec:
|
||||||
|
for path, methods in spec['paths'].items():
|
||||||
|
for m, details in methods.items():
|
||||||
|
if details.get('operationId') == tool_name:
|
||||||
|
target_path = path
|
||||||
|
target_method = m.upper()
|
||||||
|
break
|
||||||
|
if target_path:
|
||||||
|
break
|
||||||
|
if not target_path:
|
||||||
|
return {'jsonrpc': '2.0', 'id': msg_id, 'error': {'code': -32601, 'message': f"Tool {tool_name} not found"}}
|
||||||
|
try:
|
||||||
|
headers = {'Content-Type': 'application/json'}
|
||||||
|
if 'Authorization' in request.headers:
|
||||||
|
headers['Authorization'] = request.headers['Authorization']
|
||||||
|
url = f"{API_BASE_URL}{target_path}"
|
||||||
|
if target_method == 'POST':
|
||||||
|
api_res = requests.post(url, json=tool_args, headers=headers, timeout=30)
|
||||||
|
else:
|
||||||
|
api_res = requests.get(url, params=tool_args, headers=headers, timeout=30)
|
||||||
|
content = []
|
||||||
|
try:
|
||||||
|
json_content = api_res.json()
|
||||||
|
content.append({'type': 'text', 'text': json.dumps(json_content, indent=2)})
|
||||||
|
except Exception as e:
|
||||||
|
mylog("none", [f"[MCP] Failed to parse API response as JSON: {e}"])
|
||||||
|
content.append({'type': 'text', 'text': api_res.text})
|
||||||
|
is_error = api_res.status_code >= 400
|
||||||
|
return {'jsonrpc': '2.0', 'id': msg_id, 'result': {'content': content, 'isError': is_error}}
|
||||||
|
except Exception as e:
|
||||||
|
mylog("none", [f"[MCP] Error calling tool {tool_name}: {e}"])
|
||||||
|
return {'jsonrpc': '2.0', 'id': msg_id, 'result': {'content': [{'type': 'text', 'text': f"Error calling tool: {str(e)}"}], 'isError': True}}
|
||||||
|
if method == 'ping':
|
||||||
|
return {'jsonrpc': '2.0', 'id': msg_id, 'result': {}}
|
||||||
|
if msg_id:
|
||||||
|
return {'jsonrpc': '2.0', 'id': msg_id, 'error': {'code': -32601, 'message': 'Method not found'}}
|
||||||
|
|
||||||
|
|
||||||
|
def mcp_messages():
|
||||||
|
session_id = request.args.get('session_id')
|
||||||
|
if not session_id:
|
||||||
|
return jsonify({"error": "Missing session_id"}), 400
|
||||||
|
with mcp_sessions_lock:
|
||||||
|
if session_id not in mcp_sessions:
|
||||||
|
return jsonify({"error": "Session not found"}), 404
|
||||||
|
q = mcp_sessions[session_id]
|
||||||
|
data = request.json
|
||||||
|
if not data:
|
||||||
|
return jsonify({"error": "Invalid JSON"}), 400
|
||||||
|
response = process_mcp_request(data)
|
||||||
|
if response:
|
||||||
|
q.put(response)
|
||||||
|
return jsonify({"status": "accepted"}), 202
|
||||||
|
|
||||||
|
|
||||||
|
def mcp_sse():
|
||||||
|
if request.method == 'POST':
|
||||||
|
try:
|
||||||
|
data = request.get_json(silent=True)
|
||||||
|
if data and 'method' in data and 'jsonrpc' in data:
|
||||||
|
response = process_mcp_request(data)
|
||||||
|
if response:
|
||||||
|
return jsonify(response)
|
||||||
|
else:
|
||||||
|
return '', 202
|
||||||
|
except Exception as e:
|
||||||
|
mylog("none", [f"[MCP] SSE POST processing error: {e}"])
|
||||||
|
return jsonify({'status': 'ok', 'message': 'MCP SSE endpoint active'}), 200
|
||||||
|
|
||||||
|
session_id = uuid.uuid4().hex
|
||||||
|
q = queue.Queue()
|
||||||
|
with mcp_sessions_lock:
|
||||||
|
mcp_sessions[session_id] = q
|
||||||
|
|
||||||
|
def stream():
|
||||||
|
yield f"event: endpoint\ndata: /mcp/messages?session_id={session_id}\n\n"
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
message = q.get(timeout=20)
|
||||||
|
yield f"event: message\ndata: {json.dumps(message)}\n\n"
|
||||||
|
except queue.Empty:
|
||||||
|
yield ": keep-alive\n\n"
|
||||||
|
except GeneratorExit:
|
||||||
|
with mcp_sessions_lock:
|
||||||
|
if session_id in mcp_sessions:
|
||||||
|
del mcp_sessions[session_id]
|
||||||
|
return Response(stream_with_context(stream()), mimetype='text/event-stream')
|
||||||
304
server/api_server/mcp_routes.py
Normal file
304
server/api_server/mcp_routes.py
Normal file
@@ -0,0 +1,304 @@
|
|||||||
|
"""MCP bridge routes exposing NetAlertX tool endpoints via JSON-RPC."""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
import queue
|
||||||
|
import requests
|
||||||
|
import threading
|
||||||
|
import logging
|
||||||
|
from flask import Blueprint, request, Response, stream_with_context, jsonify
|
||||||
|
from helper import get_setting_value
|
||||||
|
|
||||||
|
mcp_bp = Blueprint('mcp', __name__)
|
||||||
|
|
||||||
|
# Store active sessions: session_id -> Queue
|
||||||
|
sessions = {}
|
||||||
|
sessions_lock = threading.Lock()
|
||||||
|
|
||||||
|
# Cache for OpenAPI spec to avoid fetching on every request
|
||||||
|
openapi_spec_cache = None
|
||||||
|
|
||||||
|
BACKEND_PORT = get_setting_value("GRAPHQL_PORT")
|
||||||
|
|
||||||
|
API_BASE_URL = f"http://localhost:{BACKEND_PORT}/api/tools"
|
||||||
|
|
||||||
|
|
||||||
|
def get_openapi_spec():
|
||||||
|
"""Fetch and cache the tools OpenAPI specification from the local API server."""
|
||||||
|
global openapi_spec_cache
|
||||||
|
if openapi_spec_cache:
|
||||||
|
return openapi_spec_cache
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Fetch from local server
|
||||||
|
# We use localhost because this code runs on the server
|
||||||
|
response = requests.get(f"{API_BASE_URL}/openapi.json", timeout=10)
|
||||||
|
response.raise_for_status()
|
||||||
|
openapi_spec_cache = response.json()
|
||||||
|
return openapi_spec_cache
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error fetching OpenAPI spec: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def map_openapi_to_mcp_tools(spec):
|
||||||
|
"""Convert OpenAPI paths into MCP tool descriptors."""
|
||||||
|
tools = []
|
||||||
|
if not spec or "paths" not in spec:
|
||||||
|
return tools
|
||||||
|
|
||||||
|
for path, methods in spec["paths"].items():
|
||||||
|
for method, details in methods.items():
|
||||||
|
if "operationId" in details:
|
||||||
|
tool = {
|
||||||
|
"name": details["operationId"],
|
||||||
|
"description": details.get("description", details.get("summary", "")),
|
||||||
|
"inputSchema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {},
|
||||||
|
"required": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Extract parameters from requestBody if present
|
||||||
|
if "requestBody" in details:
|
||||||
|
content = details["requestBody"].get("content", {})
|
||||||
|
if "application/json" in content:
|
||||||
|
schema = content["application/json"].get("schema", {})
|
||||||
|
tool["inputSchema"] = schema.copy()
|
||||||
|
if "properties" not in tool["inputSchema"]:
|
||||||
|
tool["inputSchema"]["properties"] = {}
|
||||||
|
if "required" not in tool["inputSchema"]:
|
||||||
|
tool["inputSchema"]["required"] = []
|
||||||
|
|
||||||
|
# Extract parameters from 'parameters' list (query/path params) - simplistic support
|
||||||
|
if "parameters" in details:
|
||||||
|
for param in details["parameters"]:
|
||||||
|
if param.get("in") == "query":
|
||||||
|
tool["inputSchema"]["properties"][param["name"]] = {
|
||||||
|
"type": param.get("schema", {}).get("type", "string"),
|
||||||
|
"description": param.get("description", "")
|
||||||
|
}
|
||||||
|
if param.get("required"):
|
||||||
|
if "required" not in tool["inputSchema"]:
|
||||||
|
tool["inputSchema"]["required"] = []
|
||||||
|
tool["inputSchema"]["required"].append(param["name"])
|
||||||
|
|
||||||
|
tools.append(tool)
|
||||||
|
return tools
|
||||||
|
|
||||||
|
|
||||||
|
def process_mcp_request(data):
|
||||||
|
"""Handle incoming MCP JSON-RPC requests and route them to tools."""
|
||||||
|
method = data.get("method")
|
||||||
|
msg_id = data.get("id")
|
||||||
|
|
||||||
|
response = None
|
||||||
|
|
||||||
|
if method == "initialize":
|
||||||
|
response = {
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": msg_id,
|
||||||
|
"result": {
|
||||||
|
"protocolVersion": "2024-11-05",
|
||||||
|
"capabilities": {
|
||||||
|
"tools": {}
|
||||||
|
},
|
||||||
|
"serverInfo": {
|
||||||
|
"name": "NetAlertX",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
elif method == "notifications/initialized":
|
||||||
|
# No response needed for notification
|
||||||
|
pass
|
||||||
|
|
||||||
|
elif method == "tools/list":
|
||||||
|
spec = get_openapi_spec()
|
||||||
|
tools = map_openapi_to_mcp_tools(spec)
|
||||||
|
response = {
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": msg_id,
|
||||||
|
"result": {
|
||||||
|
"tools": tools
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
elif method == "tools/call":
|
||||||
|
params = data.get("params", {})
|
||||||
|
tool_name = params.get("name")
|
||||||
|
tool_args = params.get("arguments", {})
|
||||||
|
|
||||||
|
# Find the endpoint for this tool
|
||||||
|
spec = get_openapi_spec()
|
||||||
|
target_path = None
|
||||||
|
target_method = None
|
||||||
|
|
||||||
|
if spec and "paths" in spec:
|
||||||
|
for path, methods in spec["paths"].items():
|
||||||
|
for m, details in methods.items():
|
||||||
|
if details.get("operationId") == tool_name:
|
||||||
|
target_path = path
|
||||||
|
target_method = m.upper()
|
||||||
|
break
|
||||||
|
if target_path:
|
||||||
|
break
|
||||||
|
|
||||||
|
if target_path:
|
||||||
|
try:
|
||||||
|
# Make the request to the local API
|
||||||
|
# We forward the Authorization header from the incoming request if present
|
||||||
|
headers = {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
}
|
||||||
|
|
||||||
|
if "Authorization" in request.headers:
|
||||||
|
headers["Authorization"] = request.headers["Authorization"]
|
||||||
|
|
||||||
|
url = f"{API_BASE_URL}{target_path}"
|
||||||
|
|
||||||
|
if target_method == "POST":
|
||||||
|
api_res = requests.post(url, json=tool_args, headers=headers, timeout=30)
|
||||||
|
elif target_method == "GET":
|
||||||
|
api_res = requests.get(url, params=tool_args, headers=headers, timeout=30)
|
||||||
|
else:
|
||||||
|
api_res = None
|
||||||
|
|
||||||
|
if api_res:
|
||||||
|
content = []
|
||||||
|
try:
|
||||||
|
json_content = api_res.json()
|
||||||
|
content.append({
|
||||||
|
"type": "text",
|
||||||
|
"text": json.dumps(json_content, indent=2)
|
||||||
|
})
|
||||||
|
except (ValueError, json.JSONDecodeError):
|
||||||
|
content.append({
|
||||||
|
"type": "text",
|
||||||
|
"text": api_res.text
|
||||||
|
})
|
||||||
|
|
||||||
|
is_error = api_res.status_code >= 400
|
||||||
|
response = {
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": msg_id,
|
||||||
|
"result": {
|
||||||
|
"content": content,
|
||||||
|
"isError": is_error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
response = {
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": msg_id,
|
||||||
|
"error": {"code": -32601, "message": f"Method {target_method} not supported"}
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
response = {
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": msg_id,
|
||||||
|
"result": {
|
||||||
|
"content": [{"type": "text", "text": f"Error calling tool: {str(e)}"}],
|
||||||
|
"isError": True
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
response = {
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": msg_id,
|
||||||
|
"error": {"code": -32601, "message": f"Tool {tool_name} not found"}
|
||||||
|
}
|
||||||
|
|
||||||
|
elif method == "ping":
|
||||||
|
response = {
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": msg_id,
|
||||||
|
"result": {}
|
||||||
|
}
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Unknown method
|
||||||
|
if msg_id: # Only respond if it's a request (has id)
|
||||||
|
response = {
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": msg_id,
|
||||||
|
"error": {"code": -32601, "message": "Method not found"}
|
||||||
|
}
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@mcp_bp.route('/sse', methods=['GET', 'POST'])
|
||||||
|
def handle_sse():
|
||||||
|
"""Expose an SSE endpoint that streams MCP responses to connected clients."""
|
||||||
|
if request.method == 'POST':
|
||||||
|
# Handle verification or keep-alive pings
|
||||||
|
try:
|
||||||
|
data = request.get_json(silent=True)
|
||||||
|
if data and "method" in data and "jsonrpc" in data:
|
||||||
|
response = process_mcp_request(data)
|
||||||
|
if response:
|
||||||
|
return jsonify(response)
|
||||||
|
else:
|
||||||
|
# Notification or no response needed
|
||||||
|
return "", 202
|
||||||
|
except Exception as e:
|
||||||
|
# Log but don't fail - malformed requests shouldn't crash the endpoint
|
||||||
|
logging.getLogger(__name__).debug(f"SSE POST processing error: {e}")
|
||||||
|
|
||||||
|
return jsonify({"status": "ok", "message": "MCP SSE endpoint active"}), 200
|
||||||
|
|
||||||
|
session_id = uuid.uuid4().hex
|
||||||
|
q = queue.Queue()
|
||||||
|
|
||||||
|
with sessions_lock:
|
||||||
|
sessions[session_id] = q
|
||||||
|
|
||||||
|
def stream():
|
||||||
|
"""Yield SSE messages for queued MCP responses until the client disconnects."""
|
||||||
|
# Send the endpoint event
|
||||||
|
# The client should POST to /api/mcp/messages?session_id=<session_id>
|
||||||
|
yield f"event: endpoint\ndata: /api/mcp/messages?session_id={session_id}\n\n"
|
||||||
|
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
# Wait for messages
|
||||||
|
message = q.get(timeout=20) # Keep-alive timeout
|
||||||
|
yield f"event: message\ndata: {json.dumps(message)}\n\n"
|
||||||
|
except queue.Empty:
|
||||||
|
# Send keep-alive comment
|
||||||
|
yield ": keep-alive\n\n"
|
||||||
|
except GeneratorExit:
|
||||||
|
with sessions_lock:
|
||||||
|
if session_id in sessions:
|
||||||
|
del sessions[session_id]
|
||||||
|
|
||||||
|
return Response(stream_with_context(stream()), mimetype='text/event-stream')
|
||||||
|
|
||||||
|
|
||||||
|
@mcp_bp.route('/messages', methods=['POST'])
|
||||||
|
def handle_messages():
|
||||||
|
"""Receive MCP JSON-RPC messages and enqueue responses for an SSE session."""
|
||||||
|
session_id = request.args.get('session_id')
|
||||||
|
if not session_id:
|
||||||
|
return jsonify({"error": "Missing session_id"}), 400
|
||||||
|
|
||||||
|
with sessions_lock:
|
||||||
|
if session_id not in sessions:
|
||||||
|
return jsonify({"error": "Session not found"}), 404
|
||||||
|
q = sessions[session_id]
|
||||||
|
|
||||||
|
data = request.json
|
||||||
|
if not data:
|
||||||
|
return jsonify({"error": "Invalid JSON"}), 400
|
||||||
|
|
||||||
|
response = process_mcp_request(data)
|
||||||
|
|
||||||
|
if response:
|
||||||
|
q.put(response)
|
||||||
|
|
||||||
|
return jsonify({"status": "accepted"}), 202
|
||||||
@@ -1,83 +1,134 @@
|
|||||||
|
from front.plugins.plugin_helper import is_mac
|
||||||
from logger import mylog
|
from logger import mylog
|
||||||
|
from models.plugin_object_instance import PluginObjectInstance
|
||||||
|
from database import get_temp_db_connection
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------------------
|
|
||||||
# Device object handling (WIP)
|
|
||||||
# -------------------------------------------------------------------------------
|
|
||||||
class DeviceInstance:
|
class DeviceInstance:
|
||||||
def __init__(self, db):
|
|
||||||
self.db = db
|
|
||||||
|
|
||||||
# Get all
|
# --- helpers --------------------------------------------------------------
|
||||||
|
def _fetchall(self, query, params=()):
|
||||||
|
conn = get_temp_db_connection()
|
||||||
|
rows = conn.execute(query, params).fetchall()
|
||||||
|
conn.close()
|
||||||
|
return [dict(r) for r in rows]
|
||||||
|
|
||||||
|
def _fetchone(self, query, params=()):
|
||||||
|
conn = get_temp_db_connection()
|
||||||
|
row = conn.execute(query, params).fetchone()
|
||||||
|
conn.close()
|
||||||
|
return dict(row) if row else None
|
||||||
|
|
||||||
|
def _execute(self, query, params=()):
|
||||||
|
conn = get_temp_db_connection()
|
||||||
|
cur = conn.cursor()
|
||||||
|
cur.execute(query, params)
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
# --- public API -----------------------------------------------------------
|
||||||
def getAll(self):
|
def getAll(self):
|
||||||
self.db.sql.execute("""
|
return self._fetchall("SELECT * FROM Devices")
|
||||||
SELECT * FROM Devices
|
|
||||||
""")
|
|
||||||
return self.db.sql.fetchall()
|
|
||||||
|
|
||||||
# Get all with unknown names
|
|
||||||
def getUnknown(self):
|
def getUnknown(self):
|
||||||
self.db.sql.execute("""
|
return self._fetchall("""
|
||||||
SELECT * FROM Devices WHERE devName in ("(unknown)", "(name not found)", "" )
|
SELECT * FROM Devices
|
||||||
|
WHERE devName IN ("(unknown)", "(name not found)", "")
|
||||||
""")
|
""")
|
||||||
return self.db.sql.fetchall()
|
|
||||||
|
|
||||||
# Get specific column value based on devMac
|
|
||||||
def getValueWithMac(self, column_name, devMac):
|
def getValueWithMac(self, column_name, devMac):
|
||||||
query = f"SELECT {column_name} FROM Devices WHERE devMac = ?"
|
row = self._fetchone(f"""
|
||||||
self.db.sql.execute(query, (devMac,))
|
SELECT {column_name} FROM Devices WHERE devMac = ?
|
||||||
result = self.db.sql.fetchone()
|
""", (devMac,))
|
||||||
return result[column_name] if result else None
|
return row.get(column_name) if row else None
|
||||||
|
|
||||||
# Get all down
|
|
||||||
def getDown(self):
|
def getDown(self):
|
||||||
self.db.sql.execute("""
|
return self._fetchall("""
|
||||||
SELECT * FROM Devices WHERE devAlertDown = 1 and devPresentLastScan = 0
|
SELECT * FROM Devices
|
||||||
|
WHERE devAlertDown = 1 AND devPresentLastScan = 0
|
||||||
""")
|
""")
|
||||||
return self.db.sql.fetchall()
|
|
||||||
|
|
||||||
# Get all down
|
|
||||||
def getOffline(self):
|
def getOffline(self):
|
||||||
self.db.sql.execute("""
|
return self._fetchall("""
|
||||||
SELECT * FROM Devices WHERE devPresentLastScan = 0
|
SELECT * FROM Devices
|
||||||
|
WHERE devPresentLastScan = 0
|
||||||
""")
|
""")
|
||||||
return self.db.sql.fetchall()
|
|
||||||
|
|
||||||
# Get a device by devGUID
|
|
||||||
def getByGUID(self, devGUID):
|
def getByGUID(self, devGUID):
|
||||||
self.db.sql.execute("SELECT * FROM Devices WHERE devGUID = ?", (devGUID,))
|
return self._fetchone("""
|
||||||
result = self.db.sql.fetchone()
|
SELECT * FROM Devices WHERE devGUID = ?
|
||||||
return dict(result) if result else None
|
""", (devGUID,))
|
||||||
|
|
||||||
# Check if a device exists by devGUID
|
|
||||||
def exists(self, devGUID):
|
def exists(self, devGUID):
|
||||||
self.db.sql.execute(
|
row = self._fetchone("""
|
||||||
"SELECT COUNT(*) AS count FROM Devices WHERE devGUID = ?", (devGUID,)
|
SELECT COUNT(*) as count FROM Devices WHERE devGUID = ?
|
||||||
)
|
""", (devGUID,))
|
||||||
result = self.db.sql.fetchone()
|
return row['count'] > 0 if row else False
|
||||||
return result["count"] > 0
|
|
||||||
|
def getByIP(self, ip):
|
||||||
|
return self._fetchone("""
|
||||||
|
SELECT * FROM Devices WHERE devLastIP = ?
|
||||||
|
""", (ip,))
|
||||||
|
|
||||||
|
def search(self, query):
|
||||||
|
like = f"%{query}%"
|
||||||
|
return self._fetchall("""
|
||||||
|
SELECT * FROM Devices
|
||||||
|
WHERE devMac LIKE ? OR devName LIKE ? OR devLastIP LIKE ?
|
||||||
|
""", (like, like, like))
|
||||||
|
|
||||||
|
def getLatest(self):
|
||||||
|
return self._fetchone("""
|
||||||
|
SELECT * FROM Devices
|
||||||
|
ORDER BY devFirstConnection DESC LIMIT 1
|
||||||
|
""")
|
||||||
|
|
||||||
|
def getNetworkTopology(self):
|
||||||
|
rows = self._fetchall("""
|
||||||
|
SELECT devName, devMac, devParentMAC, devParentPort, devVendor FROM Devices
|
||||||
|
""")
|
||||||
|
nodes = [{"id": r["devMac"], "name": r["devName"], "vendor": r["devVendor"]} for r in rows]
|
||||||
|
links = [{"source": r["devParentMAC"], "target": r["devMac"], "port": r["devParentPort"]}
|
||||||
|
for r in rows if r["devParentMAC"]]
|
||||||
|
return {"nodes": nodes, "links": links}
|
||||||
|
|
||||||
# Update a specific field for a device
|
|
||||||
def updateField(self, devGUID, field, value):
|
def updateField(self, devGUID, field, value):
|
||||||
if not self.exists(devGUID):
|
if not self.exists(devGUID):
|
||||||
m = f"[Device] In 'updateField': GUID {devGUID} not found."
|
msg = f"[Device] updateField: GUID {devGUID} not found"
|
||||||
mylog("none", m)
|
mylog("none", msg)
|
||||||
raise ValueError(m)
|
raise ValueError(msg)
|
||||||
|
self._execute(f"UPDATE Devices SET {field}=? WHERE devGUID=?", (value, devGUID))
|
||||||
|
|
||||||
self.db.sql.execute(
|
|
||||||
f"""
|
|
||||||
UPDATE Devices SET {field} = ? WHERE devGUID = ?
|
|
||||||
""",
|
|
||||||
(value, devGUID),
|
|
||||||
)
|
|
||||||
self.db.commitDB()
|
|
||||||
|
|
||||||
# Delete a device by devGUID
|
|
||||||
def delete(self, devGUID):
|
def delete(self, devGUID):
|
||||||
if not self.exists(devGUID):
|
if not self.exists(devGUID):
|
||||||
m = f"[Device] In 'delete': GUID {devGUID} not found."
|
msg = f"[Device] delete: GUID {devGUID} not found"
|
||||||
mylog("none", m)
|
mylog("none", msg)
|
||||||
raise ValueError(m)
|
raise ValueError(msg)
|
||||||
|
self._execute("DELETE FROM Devices WHERE devGUID=?", (devGUID,))
|
||||||
|
|
||||||
self.db.sql.execute("DELETE FROM Devices WHERE devGUID = ?", (devGUID,))
|
def resolvePrimaryID(self, target):
|
||||||
self.db.commitDB()
|
if is_mac(target):
|
||||||
|
return target.lower()
|
||||||
|
dev = self.getByIP(target)
|
||||||
|
return dev['devMac'].lower() if dev else None
|
||||||
|
|
||||||
|
def getOpenPorts(self, target):
|
||||||
|
primary = self.resolvePrimaryID(target)
|
||||||
|
if not primary:
|
||||||
|
return []
|
||||||
|
|
||||||
|
objs = PluginObjectInstance().getByField(
|
||||||
|
plugPrefix='NMAP',
|
||||||
|
matchedColumn='Object_PrimaryID',
|
||||||
|
matchedKey=primary,
|
||||||
|
returnFields=['Object_SecondaryID', 'Watched_Value2']
|
||||||
|
)
|
||||||
|
|
||||||
|
ports = []
|
||||||
|
for o in objs:
|
||||||
|
|
||||||
|
port = int(o.get('Object_SecondaryID') or 0)
|
||||||
|
|
||||||
|
ports.append({"port": port, "service": o.get('Watched_Value2', '')})
|
||||||
|
|
||||||
|
return ports
|
||||||
|
|||||||
107
server/models/event_instance.py
Normal file
107
server/models/event_instance.py
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
from datetime import datetime, timedelta
|
||||||
|
from logger import mylog
|
||||||
|
from database import get_temp_db_connection
|
||||||
|
|
||||||
|
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
|
# Event handling (Matches table: Events)
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
|
class EventInstance:
|
||||||
|
|
||||||
|
def _conn(self):
|
||||||
|
"""Always return a new DB connection (thread-safe)."""
|
||||||
|
return get_temp_db_connection()
|
||||||
|
|
||||||
|
def _rows_to_list(self, rows):
|
||||||
|
return [dict(r) for r in rows]
|
||||||
|
|
||||||
|
# Get all events
|
||||||
|
def get_all(self):
|
||||||
|
conn = self._conn()
|
||||||
|
rows = conn.execute(
|
||||||
|
"SELECT * FROM Events ORDER BY eve_DateTime DESC"
|
||||||
|
).fetchall()
|
||||||
|
conn.close()
|
||||||
|
return self._rows_to_list(rows)
|
||||||
|
|
||||||
|
# --- Get last n events ---
|
||||||
|
def get_last_n(self, n=10):
|
||||||
|
conn = self._conn()
|
||||||
|
rows = conn.execute("""
|
||||||
|
SELECT * FROM Events
|
||||||
|
ORDER BY eve_DateTime DESC
|
||||||
|
LIMIT ?
|
||||||
|
""", (n,)).fetchall()
|
||||||
|
conn.close()
|
||||||
|
return self._rows_to_list(rows)
|
||||||
|
|
||||||
|
# --- Specific helper for last 10 ---
|
||||||
|
def get_last(self):
|
||||||
|
return self.get_last_n(10)
|
||||||
|
|
||||||
|
# Get events in the last 24h
|
||||||
|
def get_recent(self):
|
||||||
|
since = datetime.now() - timedelta(hours=24)
|
||||||
|
conn = self._conn()
|
||||||
|
rows = conn.execute("""
|
||||||
|
SELECT * FROM Events
|
||||||
|
WHERE eve_DateTime >= ?
|
||||||
|
ORDER BY eve_DateTime DESC
|
||||||
|
""", (since,)).fetchall()
|
||||||
|
conn.close()
|
||||||
|
return self._rows_to_list(rows)
|
||||||
|
|
||||||
|
# Get events from last N hours
|
||||||
|
def get_by_hours(self, hours: int):
|
||||||
|
if hours <= 0:
|
||||||
|
mylog("warn", f"[Events] get_by_hours({hours}) -> invalid value")
|
||||||
|
return []
|
||||||
|
|
||||||
|
since = datetime.now() - timedelta(hours=hours)
|
||||||
|
conn = self._conn()
|
||||||
|
rows = conn.execute("""
|
||||||
|
SELECT * FROM Events
|
||||||
|
WHERE eve_DateTime >= ?
|
||||||
|
ORDER BY eve_DateTime DESC
|
||||||
|
""", (since,)).fetchall()
|
||||||
|
conn.close()
|
||||||
|
return self._rows_to_list(rows)
|
||||||
|
|
||||||
|
# Get events in a date range
|
||||||
|
def get_by_range(self, start: datetime, end: datetime):
|
||||||
|
if end < start:
|
||||||
|
mylog("error", f"[Events] get_by_range invalid: {start} > {end}")
|
||||||
|
raise ValueError("Start must not be after end")
|
||||||
|
|
||||||
|
conn = self._conn()
|
||||||
|
rows = conn.execute("""
|
||||||
|
SELECT * FROM Events
|
||||||
|
WHERE eve_DateTime BETWEEN ? AND ?
|
||||||
|
ORDER BY eve_DateTime DESC
|
||||||
|
""", (start, end)).fetchall()
|
||||||
|
conn.close()
|
||||||
|
return self._rows_to_list(rows)
|
||||||
|
|
||||||
|
# Insert new event
|
||||||
|
def add(self, mac, ip, eventType, info="", pendingAlert=True, pairRow=None):
|
||||||
|
conn = self._conn()
|
||||||
|
conn.execute("""
|
||||||
|
INSERT INTO Events (
|
||||||
|
eve_MAC, eve_IP, eve_DateTime,
|
||||||
|
eve_EventType, eve_AdditionalInfo,
|
||||||
|
eve_PendingAlertEmail, eve_PairEventRowid
|
||||||
|
) VALUES (?,?,?,?,?,?,?)
|
||||||
|
""", (mac, ip, datetime.now(), eventType, info,
|
||||||
|
1 if pendingAlert else 0, pairRow))
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
# Delete old events
|
||||||
|
def delete_older_than(self, days: int):
|
||||||
|
cutoff = datetime.now() - timedelta(days=days)
|
||||||
|
conn = self._conn()
|
||||||
|
result = conn.execute("DELETE FROM Events WHERE eve_DateTime < ?", (cutoff,))
|
||||||
|
conn.commit()
|
||||||
|
deleted_count = result.rowcount
|
||||||
|
conn.close()
|
||||||
|
return deleted_count
|
||||||
@@ -1,70 +1,91 @@
|
|||||||
from logger import mylog
|
from logger import mylog
|
||||||
|
from database import get_temp_db_connection
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------------------
|
# -------------------------------------------------------------------------------
|
||||||
# Plugin object handling (WIP)
|
# Plugin object handling (THREAD-SAFE REWRITE)
|
||||||
# -------------------------------------------------------------------------------
|
# -------------------------------------------------------------------------------
|
||||||
class PluginObjectInstance:
|
class PluginObjectInstance:
|
||||||
def __init__(self, db):
|
|
||||||
self.db = db
|
|
||||||
|
|
||||||
# Get all plugin objects
|
# -------------- Internal DB helper wrappers --------------------------------
|
||||||
|
def _fetchall(self, query, params=()):
|
||||||
|
conn = get_temp_db_connection()
|
||||||
|
rows = conn.execute(query, params).fetchall()
|
||||||
|
conn.close()
|
||||||
|
return [dict(r) for r in rows]
|
||||||
|
|
||||||
|
def _fetchone(self, query, params=()):
|
||||||
|
conn = get_temp_db_connection()
|
||||||
|
row = conn.execute(query, params).fetchone()
|
||||||
|
conn.close()
|
||||||
|
return dict(row) if row else None
|
||||||
|
|
||||||
|
def _execute(self, query, params=()):
|
||||||
|
conn = get_temp_db_connection()
|
||||||
|
conn.execute(query, params)
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Public API — identical behaviour, now thread-safe + self-contained
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
def getAll(self):
|
def getAll(self):
|
||||||
self.db.sql.execute("""
|
return self._fetchall("SELECT * FROM Plugins_Objects")
|
||||||
SELECT * FROM Plugins_Objects
|
|
||||||
""")
|
|
||||||
return self.db.sql.fetchall()
|
|
||||||
|
|
||||||
# Get plugin object by ObjectGUID
|
|
||||||
def getByGUID(self, ObjectGUID):
|
def getByGUID(self, ObjectGUID):
|
||||||
self.db.sql.execute(
|
return self._fetchone(
|
||||||
"SELECT * FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,)
|
"SELECT * FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,)
|
||||||
)
|
)
|
||||||
result = self.db.sql.fetchone()
|
|
||||||
return dict(result) if result else None
|
|
||||||
|
|
||||||
# Check if a plugin object exists by ObjectGUID
|
|
||||||
def exists(self, ObjectGUID):
|
def exists(self, ObjectGUID):
|
||||||
self.db.sql.execute(
|
row = self._fetchone("""
|
||||||
"SELECT COUNT(*) AS count FROM Plugins_Objects WHERE ObjectGUID = ?",
|
SELECT COUNT(*) AS count FROM Plugins_Objects WHERE ObjectGUID = ?
|
||||||
(ObjectGUID,),
|
""", (ObjectGUID,))
|
||||||
)
|
return row["count"] > 0 if row else False
|
||||||
result = self.db.sql.fetchone()
|
|
||||||
return result["count"] > 0
|
|
||||||
|
|
||||||
# Get objects by plugin name
|
|
||||||
def getByPlugin(self, plugin):
|
def getByPlugin(self, plugin):
|
||||||
self.db.sql.execute("SELECT * FROM Plugins_Objects WHERE Plugin = ?", (plugin,))
|
return self._fetchall(
|
||||||
return self.db.sql.fetchall()
|
"SELECT * FROM Plugins_Objects WHERE Plugin = ?", (plugin,)
|
||||||
|
)
|
||||||
|
|
||||||
|
def getByField(self, plugPrefix, matchedColumn, matchedKey, returnFields=None):
|
||||||
|
rows = self._fetchall(
|
||||||
|
f"SELECT * FROM Plugins_Objects WHERE Plugin = ? AND {matchedColumn} = ?",
|
||||||
|
(plugPrefix, matchedKey.lower())
|
||||||
|
)
|
||||||
|
|
||||||
|
if not returnFields:
|
||||||
|
return rows
|
||||||
|
|
||||||
|
return [{f: row.get(f) for f in returnFields} for row in rows]
|
||||||
|
|
||||||
|
def getByPrimary(self, plugin, primary_id):
|
||||||
|
return self._fetchall("""
|
||||||
|
SELECT * FROM Plugins_Objects
|
||||||
|
WHERE Plugin = ? AND Object_PrimaryID = ?
|
||||||
|
""", (plugin, primary_id))
|
||||||
|
|
||||||
# Get objects by status
|
|
||||||
def getByStatus(self, status):
|
def getByStatus(self, status):
|
||||||
self.db.sql.execute("SELECT * FROM Plugins_Objects WHERE Status = ?", (status,))
|
return self._fetchall("""
|
||||||
return self.db.sql.fetchall()
|
SELECT * FROM Plugins_Objects WHERE Status = ?
|
||||||
|
""", (status,))
|
||||||
|
|
||||||
# Update a specific field for a plugin object
|
|
||||||
def updateField(self, ObjectGUID, field, value):
|
def updateField(self, ObjectGUID, field, value):
|
||||||
if not self.exists(ObjectGUID):
|
if not self.exists(ObjectGUID):
|
||||||
m = f"[PluginObject] In 'updateField': GUID {ObjectGUID} not found."
|
msg = f"[PluginObject] updateField: GUID {ObjectGUID} not found."
|
||||||
mylog("none", m)
|
mylog("none", msg)
|
||||||
raise ValueError(m)
|
raise ValueError(msg)
|
||||||
|
|
||||||
self.db.sql.execute(
|
self._execute(
|
||||||
f"""
|
f"UPDATE Plugins_Objects SET {field}=? WHERE ObjectGUID=?",
|
||||||
UPDATE Plugins_Objects SET {field} = ? WHERE ObjectGUID = ?
|
(value, ObjectGUID)
|
||||||
""",
|
|
||||||
(value, ObjectGUID),
|
|
||||||
)
|
)
|
||||||
self.db.commitDB()
|
|
||||||
|
|
||||||
# Delete a plugin object by ObjectGUID
|
|
||||||
def delete(self, ObjectGUID):
|
def delete(self, ObjectGUID):
|
||||||
if not self.exists(ObjectGUID):
|
if not self.exists(ObjectGUID):
|
||||||
m = f"[PluginObject] In 'delete': GUID {ObjectGUID} not found."
|
msg = f"[PluginObject] delete: GUID {ObjectGUID} not found."
|
||||||
mylog("none", m)
|
mylog("none", msg)
|
||||||
raise ValueError(m)
|
raise ValueError(msg)
|
||||||
|
|
||||||
self.db.sql.execute(
|
self._execute("DELETE FROM Plugins_Objects WHERE ObjectGUID=?", (ObjectGUID,))
|
||||||
"DELETE FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,)
|
|
||||||
)
|
|
||||||
self.db.commitDB()
|
|
||||||
|
|||||||
@@ -650,7 +650,7 @@ def update_devices_names(pm):
|
|||||||
|
|
||||||
sql = pm.db.sql
|
sql = pm.db.sql
|
||||||
resolver = NameResolver(pm.db)
|
resolver = NameResolver(pm.db)
|
||||||
device_handler = DeviceInstance(pm.db)
|
device_handler = DeviceInstance()
|
||||||
|
|
||||||
nameNotFound = "(name not found)"
|
nameNotFound = "(name not found)"
|
||||||
|
|
||||||
|
|||||||
@@ -70,3 +70,13 @@ def generate_deterministic_guid(plugin, primary_id, secondary_id):
|
|||||||
"""Generates a deterministic GUID based on plugin, primary ID, and secondary ID."""
|
"""Generates a deterministic GUID based on plugin, primary ID, and secondary ID."""
|
||||||
data = f"{plugin}-{primary_id}-{secondary_id}".encode("utf-8")
|
data = f"{plugin}-{primary_id}-{secondary_id}".encode("utf-8")
|
||||||
return str(uuid.UUID(hashlib.md5(data).hexdigest()))
|
return str(uuid.UUID(hashlib.md5(data).hexdigest()))
|
||||||
|
|
||||||
|
|
||||||
|
def string_to_mac_hash(input_string):
|
||||||
|
# Calculate a hash using SHA-256
|
||||||
|
sha256_hash = hashlib.sha256(input_string.encode()).hexdigest()
|
||||||
|
|
||||||
|
# Take the first 12 characters of the hash and format as a MAC address
|
||||||
|
mac_hash = ':'.join(sha256_hash[i:i + 2] for i in range(0, 12, 2))
|
||||||
|
|
||||||
|
return mac_hash
|
||||||
|
|||||||
@@ -42,13 +42,13 @@ class UpdateFieldAction(Action):
|
|||||||
# currently unused
|
# currently unused
|
||||||
if isinstance(obj, dict) and "ObjectGUID" in obj:
|
if isinstance(obj, dict) and "ObjectGUID" in obj:
|
||||||
mylog("debug", f"[WF] Updating Object '{obj}' ")
|
mylog("debug", f"[WF] Updating Object '{obj}' ")
|
||||||
plugin_instance = PluginObjectInstance(self.db)
|
plugin_instance = PluginObjectInstance()
|
||||||
plugin_instance.updateField(obj["ObjectGUID"], self.field, self.value)
|
plugin_instance.updateField(obj["ObjectGUID"], self.field, self.value)
|
||||||
processed = True
|
processed = True
|
||||||
|
|
||||||
elif isinstance(obj, dict) and "devGUID" in obj:
|
elif isinstance(obj, dict) and "devGUID" in obj:
|
||||||
mylog("debug", f"[WF] Updating Device '{obj}' ")
|
mylog("debug", f"[WF] Updating Device '{obj}' ")
|
||||||
device_instance = DeviceInstance(self.db)
|
device_instance = DeviceInstance()
|
||||||
device_instance.updateField(obj["devGUID"], self.field, self.value)
|
device_instance.updateField(obj["devGUID"], self.field, self.value)
|
||||||
processed = True
|
processed = True
|
||||||
|
|
||||||
@@ -79,13 +79,13 @@ class DeleteObjectAction(Action):
|
|||||||
# currently unused
|
# currently unused
|
||||||
if isinstance(obj, dict) and "ObjectGUID" in obj:
|
if isinstance(obj, dict) and "ObjectGUID" in obj:
|
||||||
mylog("debug", f"[WF] Updating Object '{obj}' ")
|
mylog("debug", f"[WF] Updating Object '{obj}' ")
|
||||||
plugin_instance = PluginObjectInstance(self.db)
|
plugin_instance = PluginObjectInstance()
|
||||||
plugin_instance.delete(obj["ObjectGUID"])
|
plugin_instance.delete(obj["ObjectGUID"])
|
||||||
processed = True
|
processed = True
|
||||||
|
|
||||||
elif isinstance(obj, dict) and "devGUID" in obj:
|
elif isinstance(obj, dict) and "devGUID" in obj:
|
||||||
mylog("debug", f"[WF] Updating Device '{obj}' ")
|
mylog("debug", f"[WF] Updating Device '{obj}' ")
|
||||||
device_instance = DeviceInstance(self.db)
|
device_instance = DeviceInstance()
|
||||||
device_instance.delete(obj["devGUID"])
|
device_instance.delete(obj["devGUID"])
|
||||||
processed = True
|
processed = True
|
||||||
|
|
||||||
|
|||||||
66
test/api_endpoints/test_auth_endpoints.py
Normal file
66
test/api_endpoints/test_auth_endpoints.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
# tests/test_auth.py
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
# Register NetAlertX directories
|
||||||
|
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||||
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
|
from helper import get_setting_value # noqa: E402
|
||||||
|
from api_server.api_server_start import app # noqa: E402
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def api_token():
|
||||||
|
"""Load API token from system settings (same as other tests)."""
|
||||||
|
return get_setting_value("API_TOKEN")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def client():
|
||||||
|
"""Flask test client."""
|
||||||
|
with app.test_client() as client:
|
||||||
|
yield client
|
||||||
|
|
||||||
|
|
||||||
|
def auth_headers(token):
|
||||||
|
return {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
|
||||||
|
# -------------------------
|
||||||
|
# AUTH ENDPOINT TESTS
|
||||||
|
# -------------------------
|
||||||
|
|
||||||
|
def test_auth_ok(client, api_token):
|
||||||
|
"""Valid token should allow access."""
|
||||||
|
resp = client.get("/auth", headers=auth_headers(api_token))
|
||||||
|
assert resp.status_code == 200
|
||||||
|
|
||||||
|
data = resp.get_json()
|
||||||
|
assert data is not None
|
||||||
|
assert data.get("success") is True
|
||||||
|
assert "successful" in data.get("message", "").lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_auth_missing_token(client):
|
||||||
|
"""Missing token should be forbidden."""
|
||||||
|
resp = client.get("/auth")
|
||||||
|
assert resp.status_code == 403
|
||||||
|
|
||||||
|
data = resp.get_json()
|
||||||
|
assert data is not None
|
||||||
|
assert data.get("success") is False
|
||||||
|
assert "not authorized" in data.get("message", "").lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_auth_invalid_token(client):
|
||||||
|
"""Invalid bearer token should be forbidden."""
|
||||||
|
resp = client.get("/auth", headers=auth_headers("INVALID-TOKEN"))
|
||||||
|
assert resp.status_code == 403
|
||||||
|
|
||||||
|
data = resp.get_json()
|
||||||
|
assert data is not None
|
||||||
|
assert data.get("success") is False
|
||||||
|
assert "not authorized" in data.get("message", "").lower()
|
||||||
306
test/api_endpoints/test_mcp_tools_endpoints.py
Normal file
306
test/api_endpoints/test_mcp_tools_endpoints.py
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import patch, MagicMock
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
|
from helper import get_setting_value # noqa: E402
|
||||||
|
from api_server.api_server_start import app # noqa: E402
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def api_token():
|
||||||
|
return get_setting_value("API_TOKEN")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def client():
|
||||||
|
with app.test_client() as client:
|
||||||
|
yield client
|
||||||
|
|
||||||
|
|
||||||
|
def auth_headers(token):
|
||||||
|
return {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
|
||||||
|
# --- Device Search Tests ---
|
||||||
|
|
||||||
|
@patch('models.device_instance.get_temp_db_connection')
|
||||||
|
def test_get_device_info_ip_partial(mock_db_conn, client, api_token):
|
||||||
|
"""Test device search with partial IP search."""
|
||||||
|
# Mock database connection - DeviceInstance._fetchall calls conn.execute().fetchall()
|
||||||
|
mock_conn = MagicMock()
|
||||||
|
mock_execute_result = MagicMock()
|
||||||
|
mock_execute_result.fetchall.return_value = [
|
||||||
|
{"devName": "Test Device", "devMac": "AA:BB:CC:DD:EE:FF", "devLastIP": "192.168.1.50"}
|
||||||
|
]
|
||||||
|
mock_conn.execute.return_value = mock_execute_result
|
||||||
|
mock_db_conn.return_value = mock_conn
|
||||||
|
|
||||||
|
payload = {"query": ".50"}
|
||||||
|
response = client.post('/devices/search',
|
||||||
|
json=payload,
|
||||||
|
headers=auth_headers(api_token))
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.get_json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert len(data["devices"]) == 1
|
||||||
|
assert data["devices"][0]["devLastIP"] == "192.168.1.50"
|
||||||
|
|
||||||
|
|
||||||
|
# --- Trigger Scan Tests ---
|
||||||
|
|
||||||
|
@patch('api_server.api_server_start.UserEventsQueueInstance')
|
||||||
|
def test_trigger_scan_ARPSCAN(mock_queue_class, client, api_token):
|
||||||
|
"""Test trigger_scan with ARPSCAN type."""
|
||||||
|
mock_queue = MagicMock()
|
||||||
|
mock_queue_class.return_value = mock_queue
|
||||||
|
|
||||||
|
payload = {"type": "ARPSCAN"}
|
||||||
|
response = client.post('/mcp/sse/nettools/trigger-scan',
|
||||||
|
json=payload,
|
||||||
|
headers=auth_headers(api_token))
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.get_json()
|
||||||
|
assert data["success"] is True
|
||||||
|
mock_queue.add_event.assert_called_once()
|
||||||
|
call_args = mock_queue.add_event.call_args[0]
|
||||||
|
assert "run|ARPSCAN" in call_args[0]
|
||||||
|
|
||||||
|
|
||||||
|
@patch('api_server.api_server_start.UserEventsQueueInstance')
|
||||||
|
def test_trigger_scan_invalid_type(mock_queue_class, client, api_token):
|
||||||
|
"""Test trigger_scan with invalid scan type."""
|
||||||
|
mock_queue = MagicMock()
|
||||||
|
mock_queue_class.return_value = mock_queue
|
||||||
|
|
||||||
|
payload = {"type": "invalid_type", "target": "192.168.1.0/24"}
|
||||||
|
response = client.post('/mcp/sse/nettools/trigger-scan',
|
||||||
|
json=payload,
|
||||||
|
headers=auth_headers(api_token))
|
||||||
|
|
||||||
|
assert response.status_code == 400
|
||||||
|
data = response.get_json()
|
||||||
|
assert data["success"] is False
|
||||||
|
|
||||||
|
|
||||||
|
# --- get_open_ports Tests ---
|
||||||
|
|
||||||
|
|
||||||
|
@patch('models.plugin_object_instance.get_temp_db_connection')
|
||||||
|
@patch('models.device_instance.get_temp_db_connection')
|
||||||
|
def test_get_open_ports_ip(mock_plugin_db_conn, mock_device_db_conn, client, api_token):
|
||||||
|
"""Test get_open_ports with an IP address."""
|
||||||
|
# Mock database connections for both device lookup and plugin objects
|
||||||
|
mock_conn = MagicMock()
|
||||||
|
mock_execute_result = MagicMock()
|
||||||
|
|
||||||
|
# Mock for PluginObjectInstance.getByField (returns port data)
|
||||||
|
mock_execute_result.fetchall.return_value = [
|
||||||
|
{"Object_SecondaryID": "22", "Watched_Value2": "ssh"},
|
||||||
|
{"Object_SecondaryID": "80", "Watched_Value2": "http"}
|
||||||
|
]
|
||||||
|
# Mock for DeviceInstance.getByIP (returns device with MAC)
|
||||||
|
mock_execute_result.fetchone.return_value = {"devMac": "AA:BB:CC:DD:EE:FF"}
|
||||||
|
|
||||||
|
mock_conn.execute.return_value = mock_execute_result
|
||||||
|
mock_plugin_db_conn.return_value = mock_conn
|
||||||
|
mock_device_db_conn.return_value = mock_conn
|
||||||
|
|
||||||
|
payload = {"target": "192.168.1.1"}
|
||||||
|
response = client.post('/device/open_ports',
|
||||||
|
json=payload,
|
||||||
|
headers=auth_headers(api_token))
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.get_json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert len(data["open_ports"]) == 2
|
||||||
|
assert data["open_ports"][0]["port"] == 22
|
||||||
|
assert data["open_ports"][1]["service"] == "http"
|
||||||
|
|
||||||
|
|
||||||
|
@patch('models.plugin_object_instance.get_temp_db_connection')
|
||||||
|
def test_get_open_ports_mac_resolve(mock_plugin_db_conn, client, api_token):
|
||||||
|
"""Test get_open_ports with a MAC address that resolves to an IP."""
|
||||||
|
# Mock database connection for MAC-based open ports query
|
||||||
|
mock_conn = MagicMock()
|
||||||
|
mock_execute_result = MagicMock()
|
||||||
|
mock_execute_result.fetchall.return_value = [
|
||||||
|
{"Object_SecondaryID": "80", "Watched_Value2": "http"}
|
||||||
|
]
|
||||||
|
mock_conn.execute.return_value = mock_execute_result
|
||||||
|
mock_plugin_db_conn.return_value = mock_conn
|
||||||
|
|
||||||
|
payload = {"target": "AA:BB:CC:DD:EE:FF"}
|
||||||
|
response = client.post('/device/open_ports',
|
||||||
|
json=payload,
|
||||||
|
headers=auth_headers(api_token))
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.get_json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert "target" in data
|
||||||
|
assert len(data["open_ports"]) == 1
|
||||||
|
assert data["open_ports"][0]["port"] == 80
|
||||||
|
|
||||||
|
|
||||||
|
# --- get_network_topology Tests ---
|
||||||
|
@patch('models.device_instance.get_temp_db_connection')
|
||||||
|
def test_get_network_topology(mock_db_conn, client, api_token):
|
||||||
|
"""Test get_network_topology."""
|
||||||
|
# Mock database connection for topology query
|
||||||
|
mock_conn = MagicMock()
|
||||||
|
mock_execute_result = MagicMock()
|
||||||
|
mock_execute_result.fetchall.return_value = [
|
||||||
|
{"devName": "Router", "devMac": "AA:AA:AA:AA:AA:AA", "devParentMAC": None, "devParentPort": None, "devVendor": "VendorA"},
|
||||||
|
{"devName": "Device1", "devMac": "BB:BB:BB:BB:BB:BB", "devParentMAC": "AA:AA:AA:AA:AA:AA", "devParentPort": "eth1", "devVendor": "VendorB"}
|
||||||
|
]
|
||||||
|
mock_conn.execute.return_value = mock_execute_result
|
||||||
|
mock_db_conn.return_value = mock_conn
|
||||||
|
|
||||||
|
response = client.get('/devices/network/topology',
|
||||||
|
headers=auth_headers(api_token))
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.get_json()
|
||||||
|
assert len(data["nodes"]) == 2
|
||||||
|
assert len(data["links"]) == 1
|
||||||
|
assert data["links"][0]["source"] == "AA:AA:AA:AA:AA:AA"
|
||||||
|
assert data["links"][0]["target"] == "BB:BB:BB:BB:BB:BB"
|
||||||
|
|
||||||
|
|
||||||
|
# --- get_recent_alerts Tests ---
|
||||||
|
@patch('models.event_instance.get_temp_db_connection')
|
||||||
|
def test_get_recent_alerts(mock_db_conn, client, api_token):
|
||||||
|
"""Test get_recent_alerts."""
|
||||||
|
# Mock database connection for events query
|
||||||
|
mock_conn = MagicMock()
|
||||||
|
mock_execute_result = MagicMock()
|
||||||
|
now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
mock_execute_result.fetchall.return_value = [
|
||||||
|
{"eve_DateTime": now, "eve_EventType": "New Device", "eve_MAC": "AA:BB:CC:DD:EE:FF"}
|
||||||
|
]
|
||||||
|
mock_conn.execute.return_value = mock_execute_result
|
||||||
|
mock_db_conn.return_value = mock_conn
|
||||||
|
|
||||||
|
response = client.get('/events/recent',
|
||||||
|
headers=auth_headers(api_token))
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.get_json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert data["hours"] == 24
|
||||||
|
|
||||||
|
|
||||||
|
# --- Device Alias Tests ---
|
||||||
|
|
||||||
|
@patch('api_server.api_server_start.update_device_column')
|
||||||
|
def test_set_device_alias(mock_update_col, client, api_token):
|
||||||
|
"""Test set_device_alias."""
|
||||||
|
mock_update_col.return_value = {"success": True, "message": "Device alias updated"}
|
||||||
|
|
||||||
|
payload = {"alias": "New Device Name"}
|
||||||
|
response = client.post('/device/AA:BB:CC:DD:EE:FF/set-alias',
|
||||||
|
json=payload,
|
||||||
|
headers=auth_headers(api_token))
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.get_json()
|
||||||
|
assert data["success"] is True
|
||||||
|
mock_update_col.assert_called_once_with("AA:BB:CC:DD:EE:FF", "devName", "New Device Name")
|
||||||
|
|
||||||
|
|
||||||
|
@patch('api_server.api_server_start.update_device_column')
|
||||||
|
def test_set_device_alias_not_found(mock_update_col, client, api_token):
|
||||||
|
"""Test set_device_alias when device is not found."""
|
||||||
|
mock_update_col.return_value = {"success": False, "error": "Device not found"}
|
||||||
|
|
||||||
|
payload = {"alias": "New Device Name"}
|
||||||
|
response = client.post('/device/FF:FF:FF:FF:FF:FF/set-alias',
|
||||||
|
json=payload,
|
||||||
|
headers=auth_headers(api_token))
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.get_json()
|
||||||
|
assert data["success"] is False
|
||||||
|
assert "Device not found" in data["error"]
|
||||||
|
|
||||||
|
|
||||||
|
# --- Wake-on-LAN Tests ---
|
||||||
|
|
||||||
|
@patch('api_server.api_server_start.wakeonlan')
|
||||||
|
def test_wol_wake_device(mock_wakeonlan, client, api_token):
|
||||||
|
"""Test wol_wake_device."""
|
||||||
|
mock_wakeonlan.return_value = {"success": True, "message": "WOL packet sent to AA:BB:CC:DD:EE:FF"}
|
||||||
|
|
||||||
|
payload = {"devMac": "AA:BB:CC:DD:EE:FF"}
|
||||||
|
response = client.post('/nettools/wakeonlan',
|
||||||
|
json=payload,
|
||||||
|
headers=auth_headers(api_token))
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.get_json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert "AA:BB:CC:DD:EE:FF" in data["message"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_wol_wake_device_invalid_mac(client, api_token):
|
||||||
|
"""Test wol_wake_device with invalid MAC."""
|
||||||
|
payload = {"devMac": "invalid-mac"}
|
||||||
|
response = client.post('/nettools/wakeonlan',
|
||||||
|
json=payload,
|
||||||
|
headers=auth_headers(api_token))
|
||||||
|
|
||||||
|
assert response.status_code == 400
|
||||||
|
data = response.get_json()
|
||||||
|
assert data["success"] is False
|
||||||
|
|
||||||
|
|
||||||
|
# --- OpenAPI Spec Tests ---
|
||||||
|
|
||||||
|
# --- Latest Device Tests ---
|
||||||
|
|
||||||
|
@patch('models.device_instance.get_temp_db_connection')
|
||||||
|
def test_get_latest_device(mock_db_conn, client, api_token):
|
||||||
|
"""Test get_latest_device endpoint."""
|
||||||
|
# Mock database connection for latest device query
|
||||||
|
mock_conn = MagicMock()
|
||||||
|
mock_execute_result = MagicMock()
|
||||||
|
mock_execute_result.fetchone.return_value = {
|
||||||
|
"devName": "Latest Device",
|
||||||
|
"devMac": "AA:BB:CC:DD:EE:FF",
|
||||||
|
"devLastIP": "192.168.1.100",
|
||||||
|
"devFirstConnection": "2025-12-07 10:30:00"
|
||||||
|
}
|
||||||
|
mock_conn.execute.return_value = mock_execute_result
|
||||||
|
mock_db_conn.return_value = mock_conn
|
||||||
|
|
||||||
|
response = client.get('/devices/latest',
|
||||||
|
headers=auth_headers(api_token))
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.get_json()
|
||||||
|
assert len(data) == 1
|
||||||
|
assert data[0]["devName"] == "Latest Device"
|
||||||
|
assert data[0]["devMac"] == "AA:BB:CC:DD:EE:FF"
|
||||||
|
|
||||||
|
|
||||||
|
def test_openapi_spec(client, api_token):
|
||||||
|
"""Test openapi_spec endpoint contains MCP tool paths."""
|
||||||
|
response = client.get('/mcp/sse/openapi.json', headers=auth_headers(api_token))
|
||||||
|
assert response.status_code == 200
|
||||||
|
spec = response.get_json()
|
||||||
|
|
||||||
|
# Check for MCP tool endpoints in the spec with correct paths
|
||||||
|
assert "/nettools/trigger-scan" in spec["paths"]
|
||||||
|
assert "/device/open_ports" in spec["paths"]
|
||||||
|
assert "/devices/network/topology" in spec["paths"]
|
||||||
|
assert "/events/recent" in spec["paths"]
|
||||||
|
assert "/device/{mac}/set-alias" in spec["paths"]
|
||||||
|
assert "/nettools/wakeonlan" in spec["paths"]
|
||||||
Reference in New Issue
Block a user