mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2025-12-07 09:36:05 -08:00
Compare commits
70 Commits
pr-1279
...
linting-fi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4f5a40ffce | ||
|
|
f5aea55b29 | ||
|
|
e3e7e2f52e | ||
|
|
872ac1ce0f | ||
|
|
ebeb7a07af | ||
|
|
5c14b34a8b | ||
|
|
f0abd500d9 | ||
|
|
8503cb86f1 | ||
|
|
5f0b670a82 | ||
|
|
9df814e351 | ||
|
|
88509ce8c2 | ||
|
|
995c371f48 | ||
|
|
aee5e04b9f | ||
|
|
e0c96052bb | ||
|
|
fd5235dd0a | ||
|
|
f3de66a287 | ||
|
|
9a4fb35ea5 | ||
|
|
a1ad904042 | ||
|
|
81ff1da756 | ||
|
|
85c9b0b99b | ||
|
|
4ccac66a73 | ||
|
|
c7b9fdaff2 | ||
|
|
c7dcc20a1d | ||
|
|
bb365a5e81 | ||
|
|
e2633d0251 | ||
|
|
09c40e76b2 | ||
|
|
abc3e71440 | ||
|
|
d13596c35c | ||
|
|
7d5dcf061c | ||
|
|
6206e483a9 | ||
|
|
f1ecc61de3 | ||
|
|
92a6a3a916 | ||
|
|
8a89f3b340 | ||
|
|
a93e87493f | ||
|
|
c7032bceba | ||
|
|
0cd7528284 | ||
|
|
2309b8eb3f | ||
|
|
dbd1bdabc2 | ||
|
|
093d595fc5 | ||
|
|
c38758d61a | ||
|
|
6034b12af6 | ||
|
|
972654dc78 | ||
|
|
ec417b0dac | ||
|
|
2e9352dc12 | ||
|
|
566b263d0a | ||
|
|
61b42b4fea | ||
|
|
a45de018fb | ||
|
|
bfe6987867 | ||
|
|
b6567ab5fc | ||
|
|
f71c2fbe94 | ||
|
|
aeb03f50ba | ||
|
|
734db423ee | ||
|
|
4f47dbfe14 | ||
|
|
d23bf45310 | ||
|
|
9c366881f1 | ||
|
|
9dd482618b | ||
|
|
84cc01566d | ||
|
|
ac7b912b45 | ||
|
|
62852f1b2f | ||
|
|
b659a0f06d | ||
|
|
fb3620a378 | ||
|
|
9d56e13818 | ||
|
|
43c5a11271 | ||
|
|
ac957ce599 | ||
|
|
3567906fcd | ||
|
|
be6801d98f | ||
|
|
bb9b242d0a | ||
|
|
5f27d3b9aa | ||
|
|
93af0e9d19 | ||
|
|
398e2a896f |
@@ -80,8 +80,9 @@ ENV SYSTEM_SERVICES=/services
|
|||||||
ENV SYSTEM_SERVICES_SCRIPTS=${SYSTEM_SERVICES}/scripts
|
ENV SYSTEM_SERVICES_SCRIPTS=${SYSTEM_SERVICES}/scripts
|
||||||
ENV SYSTEM_SERVICES_CONFIG=${SYSTEM_SERVICES}/config
|
ENV SYSTEM_SERVICES_CONFIG=${SYSTEM_SERVICES}/config
|
||||||
ENV SYSTEM_NGINX_CONFIG=${SYSTEM_SERVICES_CONFIG}/nginx
|
ENV SYSTEM_NGINX_CONFIG=${SYSTEM_SERVICES_CONFIG}/nginx
|
||||||
ENV SYSTEM_NGINX_CONFIG_FILE=${SYSTEM_NGINX_CONFIG}/nginx.conf
|
ENV SYSTEM_NGINX_CONFIG_TEMPLATE=${SYSTEM_NGINX_CONFIG}/netalertx.conf.template
|
||||||
ENV SYSTEM_SERVICES_ACTIVE_CONFIG=/tmp/nginx/active-config
|
ENV SYSTEM_SERVICES_ACTIVE_CONFIG=/tmp/nginx/active-config
|
||||||
|
ENV SYSTEM_SERVICES_ACTIVE_CONFIG_FILE=${SYSTEM_SERVICES_ACTIVE_CONFIG}/nginx.conf
|
||||||
ENV SYSTEM_SERVICES_PHP_FOLDER=${SYSTEM_SERVICES_CONFIG}/php
|
ENV SYSTEM_SERVICES_PHP_FOLDER=${SYSTEM_SERVICES_CONFIG}/php
|
||||||
ENV SYSTEM_SERVICES_PHP_FPM_D=${SYSTEM_SERVICES_PHP_FOLDER}/php-fpm.d
|
ENV SYSTEM_SERVICES_PHP_FPM_D=${SYSTEM_SERVICES_PHP_FOLDER}/php-fpm.d
|
||||||
ENV SYSTEM_SERVICES_CROND=${SYSTEM_SERVICES_CONFIG}/crond
|
ENV SYSTEM_SERVICES_CROND=${SYSTEM_SERVICES_CONFIG}/crond
|
||||||
@@ -138,6 +139,9 @@ RUN install -d -o ${NETALERTX_USER} -g ${NETALERTX_GROUP} -m 700 ${READ_WRITE_FO
|
|||||||
sh -c "find ${NETALERTX_APP} -type f \( -name '*.sh' -o -name 'speedtest-cli' \) \
|
sh -c "find ${NETALERTX_APP} -type f \( -name '*.sh' -o -name 'speedtest-cli' \) \
|
||||||
-exec chmod 750 {} \;"
|
-exec chmod 750 {} \;"
|
||||||
|
|
||||||
|
# Copy version information into the image
|
||||||
|
COPY --chown=${NETALERTX_USER}:${NETALERTX_GROUP} .[V]ERSION ${NETALERTX_APP}/.VERSION
|
||||||
|
|
||||||
# Copy the virtualenv from the builder stage
|
# Copy the virtualenv from the builder stage
|
||||||
COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
||||||
|
|
||||||
@@ -146,7 +150,13 @@ COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
|||||||
# This is done after the copy of the venv to ensure the venv is in place
|
# This is done after the copy of the venv to ensure the venv is in place
|
||||||
# although it may be quicker to do it before the copy, it keeps the image
|
# although it may be quicker to do it before the copy, it keeps the image
|
||||||
# layers smaller to do it after.
|
# layers smaller to do it after.
|
||||||
RUN apk add libcap && \
|
RUN if [ -f .VERSION ]; then \
|
||||||
|
cp .VERSION ${NETALERTX_APP}/.VERSION; \
|
||||||
|
else \
|
||||||
|
echo "DEVELOPMENT 00000000" > ${NETALERTX_APP}/.VERSION; \
|
||||||
|
fi && \
|
||||||
|
chown 20212:20212 ${NETALERTX_APP}/.VERSION && \
|
||||||
|
apk add libcap && \
|
||||||
setcap cap_net_raw+ep /bin/busybox && \
|
setcap cap_net_raw+ep /bin/busybox && \
|
||||||
setcap cap_net_raw,cap_net_admin+eip /usr/bin/nmap && \
|
setcap cap_net_raw,cap_net_admin+eip /usr/bin/nmap && \
|
||||||
setcap cap_net_raw,cap_net_admin+eip /usr/bin/arp-scan && \
|
setcap cap_net_raw,cap_net_admin+eip /usr/bin/arp-scan && \
|
||||||
|
|||||||
@@ -1,118 +0,0 @@
|
|||||||
# DO NOT MODIFY THIS FILE DIRECTLY. IT IS AUTO-GENERATED BY .devcontainer/scripts/generate-configs.sh
|
|
||||||
# Generated from: install/production-filesystem/services/config/nginx/netalertx.conf.template
|
|
||||||
|
|
||||||
# Set number of worker processes automatically based on number of CPU cores.
|
|
||||||
worker_processes auto;
|
|
||||||
|
|
||||||
# Enables the use of JIT for regular expressions to speed-up their processing.
|
|
||||||
pcre_jit on;
|
|
||||||
|
|
||||||
# Configures default error logger.
|
|
||||||
error_log /tmp/log/nginx-error.log warn;
|
|
||||||
|
|
||||||
pid /tmp/run/nginx.pid;
|
|
||||||
|
|
||||||
events {
|
|
||||||
# The maximum number of simultaneous connections that can be opened by
|
|
||||||
# a worker process.
|
|
||||||
worker_connections 1024;
|
|
||||||
}
|
|
||||||
|
|
||||||
http {
|
|
||||||
# Mapping of temp paths for various nginx modules.
|
|
||||||
client_body_temp_path /tmp/nginx/client_body;
|
|
||||||
proxy_temp_path /tmp/nginx/proxy;
|
|
||||||
fastcgi_temp_path /tmp/nginx/fastcgi;
|
|
||||||
uwsgi_temp_path /tmp/nginx/uwsgi;
|
|
||||||
scgi_temp_path /tmp/nginx/scgi;
|
|
||||||
|
|
||||||
# Includes mapping of file name extensions to MIME types of responses
|
|
||||||
# and defines the default type.
|
|
||||||
include /services/config/nginx/mime.types;
|
|
||||||
default_type application/octet-stream;
|
|
||||||
|
|
||||||
# Name servers used to resolve names of upstream servers into addresses.
|
|
||||||
# It's also needed when using tcpsocket and udpsocket in Lua modules.
|
|
||||||
#resolver 1.1.1.1 1.0.0.1 [2606:4700:4700::1111] [2606:4700:4700::1001];
|
|
||||||
|
|
||||||
# Don't tell nginx version to the clients. Default is 'on'.
|
|
||||||
server_tokens off;
|
|
||||||
|
|
||||||
# Specifies the maximum accepted body size of a client request, as
|
|
||||||
# indicated by the request header Content-Length. If the stated content
|
|
||||||
# length is greater than this size, then the client receives the HTTP
|
|
||||||
# error code 413. Set to 0 to disable. Default is '1m'.
|
|
||||||
client_max_body_size 1m;
|
|
||||||
|
|
||||||
# Sendfile copies data between one FD and other from within the kernel,
|
|
||||||
# which is more efficient than read() + write(). Default is off.
|
|
||||||
sendfile on;
|
|
||||||
|
|
||||||
# Causes nginx to attempt to send its HTTP response head in one packet,
|
|
||||||
# instead of using partial frames. Default is 'off'.
|
|
||||||
tcp_nopush on;
|
|
||||||
|
|
||||||
|
|
||||||
# Enables the specified protocols. Default is TLSv1 TLSv1.1 TLSv1.2.
|
|
||||||
# TIP: If you're not obligated to support ancient clients, remove TLSv1.1.
|
|
||||||
ssl_protocols TLSv1.2 TLSv1.3;
|
|
||||||
|
|
||||||
# Path of the file with Diffie-Hellman parameters for EDH ciphers.
|
|
||||||
# TIP: Generate with: `openssl dhparam -out /etc/ssl/nginx/dh2048.pem 2048`
|
|
||||||
#ssl_dhparam /etc/ssl/nginx/dh2048.pem;
|
|
||||||
|
|
||||||
# Specifies that our cipher suits should be preferred over client ciphers.
|
|
||||||
# Default is 'off'.
|
|
||||||
ssl_prefer_server_ciphers on;
|
|
||||||
|
|
||||||
# Enables a shared SSL cache with size that can hold around 8000 sessions.
|
|
||||||
# Default is 'none'.
|
|
||||||
ssl_session_cache shared:SSL:2m;
|
|
||||||
|
|
||||||
# Specifies a time during which a client may reuse the session parameters.
|
|
||||||
# Default is '5m'.
|
|
||||||
ssl_session_timeout 1h;
|
|
||||||
|
|
||||||
# Disable TLS session tickets (they are insecure). Default is 'on'.
|
|
||||||
ssl_session_tickets off;
|
|
||||||
|
|
||||||
|
|
||||||
# Enable gzipping of responses.
|
|
||||||
gzip on;
|
|
||||||
|
|
||||||
# Set the Vary HTTP header as defined in the RFC 2616. Default is 'off'.
|
|
||||||
gzip_vary on;
|
|
||||||
|
|
||||||
|
|
||||||
# Specifies the main log format.
|
|
||||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
|
||||||
'$status $body_bytes_sent "$http_referer" '
|
|
||||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
|
||||||
|
|
||||||
# Sets the path, format, and configuration for a buffered log write.
|
|
||||||
access_log /tmp/log/nginx-access.log main;
|
|
||||||
|
|
||||||
|
|
||||||
# Virtual host config
|
|
||||||
server {
|
|
||||||
listen 0.0.0.0:20211 default_server;
|
|
||||||
large_client_header_buffers 4 16k;
|
|
||||||
root /app/front;
|
|
||||||
index index.php;
|
|
||||||
add_header X-Forwarded-Prefix "/app" always;
|
|
||||||
|
|
||||||
location ~* \.php$ {
|
|
||||||
# Set Cache-Control header to prevent caching on the first load
|
|
||||||
add_header Cache-Control "no-store";
|
|
||||||
fastcgi_pass unix:/tmp/run/php.sock;
|
|
||||||
include /services/config/nginx/fastcgi_params;
|
|
||||||
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
|
|
||||||
fastcgi_param SCRIPT_NAME $fastcgi_script_name;
|
|
||||||
|
|
||||||
fastcgi_param PHP_VALUE "xdebug.remote_enable=1";
|
|
||||||
fastcgi_connect_timeout 75;
|
|
||||||
fastcgi_send_timeout 600;
|
|
||||||
fastcgi_read_timeout 600;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -30,33 +30,4 @@ cat "${DEVCONTAINER_DIR}/resources/devcontainer-Dockerfile" >> "$OUT_FILE"
|
|||||||
|
|
||||||
echo "Generated $OUT_FILE using root dir $ROOT_DIR" >&2
|
echo "Generated $OUT_FILE using root dir $ROOT_DIR" >&2
|
||||||
|
|
||||||
# Generate devcontainer nginx config from production template
|
|
||||||
echo "Generating devcontainer nginx config"
|
|
||||||
NGINX_TEMPLATE="${ROOT_DIR}/install/production-filesystem/services/config/nginx/netalertx.conf.template"
|
|
||||||
NGINX_OUT="${DEVCONTAINER_DIR}/resources/devcontainer-overlay/services/config/nginx/netalertx.conf.template"
|
|
||||||
|
|
||||||
# Create output directory if it doesn't exist
|
|
||||||
mkdir -p "$(dirname "$NGINX_OUT")"
|
|
||||||
|
|
||||||
# Start with header comment
|
|
||||||
cat > "$NGINX_OUT" << 'EOF'
|
|
||||||
# DO NOT MODIFY THIS FILE DIRECTLY. IT IS AUTO-GENERATED BY .devcontainer/scripts/generate-configs.sh
|
|
||||||
# Generated from: install/production-filesystem/services/config/nginx/netalertx.conf.template
|
|
||||||
|
|
||||||
EOF
|
|
||||||
|
|
||||||
# Process the template: replace listen directive and inject Xdebug params
|
|
||||||
sed 's/${LISTEN_ADDR}:${PORT}/0.0.0.0:20211/g' "$NGINX_TEMPLATE" | \
|
|
||||||
awk '
|
|
||||||
/fastcgi_param SCRIPT_NAME \$fastcgi_script_name;/ {
|
|
||||||
print $0
|
|
||||||
print ""
|
|
||||||
print " fastcgi_param PHP_VALUE \"xdebug.remote_enable=1\";"
|
|
||||||
next
|
|
||||||
}
|
|
||||||
{ print }
|
|
||||||
' >> "$NGINX_OUT"
|
|
||||||
|
|
||||||
echo "Generated $NGINX_OUT from $NGINX_TEMPLATE" >&2
|
|
||||||
|
|
||||||
echo "Done."
|
echo "Done."
|
||||||
@@ -50,9 +50,6 @@ sudo chmod 777 /tmp/log /tmp/api /tmp/run /tmp/nginx
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
sudo rm -rf "${SYSTEM_NGINX_CONFIG}/conf.active"
|
|
||||||
sudo ln -s "${SYSTEM_SERVICES_ACTIVE_CONFIG}" "${SYSTEM_NGINX_CONFIG}/conf.active"
|
|
||||||
|
|
||||||
sudo rm -rf /entrypoint.d
|
sudo rm -rf /entrypoint.d
|
||||||
sudo ln -s "${SOURCE_DIR}/install/production-filesystem/entrypoint.d" /entrypoint.d
|
sudo ln -s "${SOURCE_DIR}/install/production-filesystem/entrypoint.d" /entrypoint.d
|
||||||
|
|
||||||
@@ -67,6 +64,7 @@ for dir in \
|
|||||||
"${SYSTEM_SERVICES_RUN_LOG}" \
|
"${SYSTEM_SERVICES_RUN_LOG}" \
|
||||||
"${SYSTEM_SERVICES_ACTIVE_CONFIG}" \
|
"${SYSTEM_SERVICES_ACTIVE_CONFIG}" \
|
||||||
"${NETALERTX_PLUGINS_LOG}" \
|
"${NETALERTX_PLUGINS_LOG}" \
|
||||||
|
"${SYSTEM_SERVICES_RUN_TMP}" \
|
||||||
"/tmp/nginx/client_body" \
|
"/tmp/nginx/client_body" \
|
||||||
"/tmp/nginx/proxy" \
|
"/tmp/nginx/proxy" \
|
||||||
"/tmp/nginx/fastcgi" \
|
"/tmp/nginx/fastcgi" \
|
||||||
@@ -75,9 +73,6 @@ for dir in \
|
|||||||
sudo install -d -m 777 "${dir}"
|
sudo install -d -m 777 "${dir}"
|
||||||
done
|
done
|
||||||
|
|
||||||
# Create nginx temp subdirs with permissions
|
|
||||||
sudo mkdir -p "${SYSTEM_SERVICES_RUN_TMP}/client_body" "${SYSTEM_SERVICES_RUN_TMP}/proxy" "${SYSTEM_SERVICES_RUN_TMP}/fastcgi" "${SYSTEM_SERVICES_RUN_TMP}/uwsgi" "${SYSTEM_SERVICES_RUN_TMP}/scgi"
|
|
||||||
sudo chmod -R 777 "${SYSTEM_SERVICES_RUN_TMP}"
|
|
||||||
|
|
||||||
for var in "${LOG_FILES[@]}"; do
|
for var in "${LOG_FILES[@]}"; do
|
||||||
path=${!var}
|
path=${!var}
|
||||||
|
|||||||
14
.github/ISSUE_TEMPLATE/i-have-an-issue.yml
vendored
14
.github/ISSUE_TEMPLATE/i-have-an-issue.yml
vendored
@@ -44,7 +44,7 @@ body:
|
|||||||
required: false
|
required: false
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: app.conf
|
label: Relevant `app.conf` settings
|
||||||
description: |
|
description: |
|
||||||
Paste relevant `app.conf`settings (remove sensitive info)
|
Paste relevant `app.conf`settings (remove sensitive info)
|
||||||
render: python
|
render: python
|
||||||
@@ -55,7 +55,7 @@ body:
|
|||||||
label: docker-compose.yml
|
label: docker-compose.yml
|
||||||
description: |
|
description: |
|
||||||
Paste your `docker-compose.yml`
|
Paste your `docker-compose.yml`
|
||||||
render: python
|
render: yaml
|
||||||
validations:
|
validations:
|
||||||
required: false
|
required: false
|
||||||
- type: dropdown
|
- type: dropdown
|
||||||
@@ -79,7 +79,11 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: app.log
|
label: Relevant `app.log` section
|
||||||
|
value: |
|
||||||
|
```
|
||||||
|
PASTE LOG HERE. Using the triple backticks preserves format.
|
||||||
|
```
|
||||||
description: |
|
description: |
|
||||||
Logs with debug enabled (https://github.com/jokob-sk/NetAlertX/blob/main/docs/DEBUG_TIPS.md) ⚠
|
Logs with debug enabled (https://github.com/jokob-sk/NetAlertX/blob/main/docs/DEBUG_TIPS.md) ⚠
|
||||||
***Generally speaking, all bug reports should have logs provided.***
|
***Generally speaking, all bug reports should have logs provided.***
|
||||||
@@ -93,6 +97,10 @@ body:
|
|||||||
label: Docker Logs
|
label: Docker Logs
|
||||||
description: |
|
description: |
|
||||||
You can retrieve the logs from Portainer -> Containers -> your NetAlertX container -> Logs or by running `sudo docker logs netalertx`.
|
You can retrieve the logs from Portainer -> Containers -> your NetAlertX container -> Logs or by running `sudo docker logs netalertx`.
|
||||||
|
value: |
|
||||||
|
```
|
||||||
|
PASTE DOCKER LOG HERE. Using the triple backticks preserves format.
|
||||||
|
```
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
|
|||||||
60
.github/workflows/code_checks.yml
vendored
60
.github/workflows/code_checks.yml
vendored
@@ -21,7 +21,8 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "🔍 Checking for incorrect absolute '/php/' URLs (should be 'php/' or './php/')..."
|
echo "🔍 Checking for incorrect absolute '/php/' URLs (should be 'php/' or './php/')..."
|
||||||
|
|
||||||
MATCHES=$(grep -rE "['\"]\/php\/" --include=\*.{js,php,html} ./front | grep -E "\.get|\.post|\.ajax|fetch|url\s*:") || true
|
MATCHES=$(grep -rE "['\"]/php/" --include=\*.{js,php,html} ./front \
|
||||||
|
| grep -E "\.get|\.post|\.ajax|fetch|url\s*:") || true
|
||||||
|
|
||||||
if [ -n "$MATCHES" ]; then
|
if [ -n "$MATCHES" ]; then
|
||||||
echo "$MATCHES"
|
echo "$MATCHES"
|
||||||
@@ -39,3 +40,60 @@ jobs:
|
|||||||
echo "🔍 Checking Python syntax..."
|
echo "🔍 Checking Python syntax..."
|
||||||
find . -name "*.py" -print0 | xargs -0 -n1 python3 -m py_compile
|
find . -name "*.py" -print0 | xargs -0 -n1 python3 -m py_compile
|
||||||
|
|
||||||
|
lint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
|
||||||
|
- name: Install linting tools
|
||||||
|
run: |
|
||||||
|
# Python linting
|
||||||
|
pip install flake8
|
||||||
|
# Docker linting
|
||||||
|
wget -O /tmp/hadolint https://github.com/hadolint/hadolint/releases/latest/download/hadolint-Linux-x86_64
|
||||||
|
chmod +x /tmp/hadolint
|
||||||
|
# PHP and shellcheck for syntax checking
|
||||||
|
sudo apt-get update && sudo apt-get install -y php-cli shellcheck
|
||||||
|
|
||||||
|
- name: Shell check
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
echo "🔍 Checking shell scripts..."
|
||||||
|
find . -name "*.sh" -exec shellcheck {} \;
|
||||||
|
|
||||||
|
- name: Python lint
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
echo "🔍 Linting Python code..."
|
||||||
|
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||||
|
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||||
|
|
||||||
|
- name: PHP check
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
echo "🔍 Checking PHP syntax..."
|
||||||
|
find . -name "*.php" -exec php -l {} \;
|
||||||
|
|
||||||
|
- name: Docker lint
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
echo "🔍 Linting Dockerfiles..."
|
||||||
|
/tmp/hadolint Dockerfile* || true
|
||||||
|
|
||||||
|
docker-tests:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Run Docker-based tests
|
||||||
|
run: |
|
||||||
|
echo "🐳 Running Docker-based tests..."
|
||||||
|
chmod +x ./run_docker_tests.sh
|
||||||
|
./run_docker_tests.sh
|
||||||
|
|||||||
6
.github/workflows/docker_dev.yml
vendored
6
.github/workflows/docker_dev.yml
vendored
@@ -3,12 +3,12 @@ name: docker
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- next_release
|
- main
|
||||||
tags:
|
tags:
|
||||||
- '*.*.*'
|
- '*.*.*'
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- next_release
|
- main
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
docker_dev:
|
docker_dev:
|
||||||
@@ -83,7 +83,7 @@ jobs:
|
|||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6
|
||||||
|
|||||||
2
.github/workflows/docker_prod.yml
vendored
2
.github/workflows/docker_prod.yml
vendored
@@ -72,7 +72,7 @@ jobs:
|
|||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6
|
||||||
|
|||||||
13
Dockerfile
13
Dockerfile
@@ -77,8 +77,9 @@ ENV SYSTEM_SERVICES=/services
|
|||||||
ENV SYSTEM_SERVICES_SCRIPTS=${SYSTEM_SERVICES}/scripts
|
ENV SYSTEM_SERVICES_SCRIPTS=${SYSTEM_SERVICES}/scripts
|
||||||
ENV SYSTEM_SERVICES_CONFIG=${SYSTEM_SERVICES}/config
|
ENV SYSTEM_SERVICES_CONFIG=${SYSTEM_SERVICES}/config
|
||||||
ENV SYSTEM_NGINX_CONFIG=${SYSTEM_SERVICES_CONFIG}/nginx
|
ENV SYSTEM_NGINX_CONFIG=${SYSTEM_SERVICES_CONFIG}/nginx
|
||||||
ENV SYSTEM_NGINX_CONFIG_FILE=${SYSTEM_NGINX_CONFIG}/nginx.conf
|
ENV SYSTEM_NGINX_CONFIG_TEMPLATE=${SYSTEM_NGINX_CONFIG}/netalertx.conf.template
|
||||||
ENV SYSTEM_SERVICES_ACTIVE_CONFIG=/tmp/nginx/active-config
|
ENV SYSTEM_SERVICES_ACTIVE_CONFIG=/tmp/nginx/active-config
|
||||||
|
ENV SYSTEM_SERVICES_ACTIVE_CONFIG_FILE=${SYSTEM_SERVICES_ACTIVE_CONFIG}/nginx.conf
|
||||||
ENV SYSTEM_SERVICES_PHP_FOLDER=${SYSTEM_SERVICES_CONFIG}/php
|
ENV SYSTEM_SERVICES_PHP_FOLDER=${SYSTEM_SERVICES_CONFIG}/php
|
||||||
ENV SYSTEM_SERVICES_PHP_FPM_D=${SYSTEM_SERVICES_PHP_FOLDER}/php-fpm.d
|
ENV SYSTEM_SERVICES_PHP_FPM_D=${SYSTEM_SERVICES_PHP_FOLDER}/php-fpm.d
|
||||||
ENV SYSTEM_SERVICES_CROND=${SYSTEM_SERVICES_CONFIG}/crond
|
ENV SYSTEM_SERVICES_CROND=${SYSTEM_SERVICES_CONFIG}/crond
|
||||||
@@ -136,7 +137,7 @@ RUN install -d -o ${NETALERTX_USER} -g ${NETALERTX_GROUP} -m 700 ${READ_WRITE_FO
|
|||||||
-exec chmod 750 {} \;"
|
-exec chmod 750 {} \;"
|
||||||
|
|
||||||
# Copy version information into the image
|
# Copy version information into the image
|
||||||
COPY --chown=${NETALERTX_USER}:${NETALERTX_GROUP} .VERSION ${NETALERTX_APP}/.VERSION
|
COPY --chown=${NETALERTX_USER}:${NETALERTX_GROUP} .[V]ERSION ${NETALERTX_APP}/.VERSION
|
||||||
|
|
||||||
# Copy the virtualenv from the builder stage
|
# Copy the virtualenv from the builder stage
|
||||||
COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
||||||
@@ -146,7 +147,13 @@ COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
|||||||
# This is done after the copy of the venv to ensure the venv is in place
|
# This is done after the copy of the venv to ensure the venv is in place
|
||||||
# although it may be quicker to do it before the copy, it keeps the image
|
# although it may be quicker to do it before the copy, it keeps the image
|
||||||
# layers smaller to do it after.
|
# layers smaller to do it after.
|
||||||
RUN apk add libcap && \
|
RUN if [ -f .VERSION ]; then \
|
||||||
|
cp .VERSION ${NETALERTX_APP}/.VERSION; \
|
||||||
|
else \
|
||||||
|
echo "DEVELOPMENT 00000000" > ${NETALERTX_APP}/.VERSION; \
|
||||||
|
fi && \
|
||||||
|
chown 20212:20212 ${NETALERTX_APP}/.VERSION && \
|
||||||
|
apk add libcap && \
|
||||||
setcap cap_net_raw+ep /bin/busybox && \
|
setcap cap_net_raw+ep /bin/busybox && \
|
||||||
setcap cap_net_raw,cap_net_admin+eip /usr/bin/nmap && \
|
setcap cap_net_raw,cap_net_admin+eip /usr/bin/nmap && \
|
||||||
setcap cap_net_raw,cap_net_admin+eip /usr/bin/arp-scan && \
|
setcap cap_net_raw,cap_net_admin+eip /usr/bin/arp-scan && \
|
||||||
|
|||||||
13
README.md
13
README.md
@@ -33,16 +33,21 @@ Get visibility of what's going on on your WIFI/LAN network and enable presence d
|
|||||||
|
|
||||||
## 🚀 Quick Start
|
## 🚀 Quick Start
|
||||||
|
|
||||||
|
> [!WARNING]
|
||||||
|
> ⚠️ **Important:** The documentation has been recently updated and some instructions may have changed.
|
||||||
|
> If you are using the currently live production image, please follow the instructions on [Docker Hub](https://hub.docker.com/r/jokobsk/netalertx) for building and running the container.
|
||||||
|
> These docs reflect the latest development version and may differ from the production image.
|
||||||
|
|
||||||
Start NetAlertX in seconds with Docker:
|
Start NetAlertX in seconds with Docker:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run -d --rm --network=host \
|
docker run -d --rm --network=host \
|
||||||
-v local_path/config:/data/config \
|
-v /local_data_dir/config:/data/config \
|
||||||
-v local_path/db:/data/db \
|
-v /local_data_dir/db:/data/db \
|
||||||
|
-v /etc/localtime:/etc/localtime \
|
||||||
--mount type=tmpfs,target=/tmp/api \
|
--mount type=tmpfs,target=/tmp/api \
|
||||||
-e PUID=200 -e PGID=300 \
|
|
||||||
-e TZ=Europe/Berlin \
|
|
||||||
-e PORT=20211 \
|
-e PORT=20211 \
|
||||||
|
-e APP_CONF_OVERRIDE={"GRAPHQL_PORT":"20214"} \
|
||||||
ghcr.io/jokob-sk/netalertx:latest
|
ghcr.io/jokob-sk/netalertx:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ query GetDevices($options: PageQueryOptionsInput) {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
See also: [Debugging GraphQL issues](./DEBUG_GRAPHQL.md)
|
See also: [Debugging GraphQL issues](./DEBUG_API_SERVER.md)
|
||||||
|
|
||||||
### `curl` Command
|
### `curl` Command
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,15 @@
|
|||||||
|
|
||||||
Often if the application is misconfigured the `Loading...` dialog is continuously displayed. This is most likely caused by the backed failing to start. The **Maintenance -> Logs** section should give you more details on what's happening. If there is no exception, check the Portainer log, or start the container in the foreground (without the `-d` parameter) to observe any exceptions. It's advisable to enable `trace` or `debug`. Check the [Debug tips](./DEBUG_TIPS.md) on detailed instructions.
|
Often if the application is misconfigured the `Loading...` dialog is continuously displayed. This is most likely caused by the backed failing to start. The **Maintenance -> Logs** section should give you more details on what's happening. If there is no exception, check the Portainer log, or start the container in the foreground (without the `-d` parameter) to observe any exceptions. It's advisable to enable `trace` or `debug`. Check the [Debug tips](./DEBUG_TIPS.md) on detailed instructions.
|
||||||
|
|
||||||
|
The issue might be related to the backend server, so please check [Debugging GraphQL issues](./DEBUG_API_SERVER.md).
|
||||||
|
|
||||||
|
Please also check the browser logs (usually accessible by pressing `F12`):
|
||||||
|
|
||||||
|
1. Switch to the Console tab and refresh the page
|
||||||
|
2. Switch to teh Network tab and refresh the page
|
||||||
|
|
||||||
|
If you are not sure how to resolve the errors yourself, please post screenshots of the above into the issue, or discord discussion, where your problem is being solved.
|
||||||
|
|
||||||
### Incorrect SCAN_SUBNETS
|
### Incorrect SCAN_SUBNETS
|
||||||
|
|
||||||
One of the most common issues is not configuring `SCAN_SUBNETS` correctly. If this setting is misconfigured you will only see one or two devices in your devices list after a scan. Please read the [subnets docs](./SUBNETS.md) carefully to resolve this.
|
One of the most common issues is not configuring `SCAN_SUBNETS` correctly. If this setting is misconfigured you will only see one or two devices in your devices list after a scan. Please read the [subnets docs](./SUBNETS.md) carefully to resolve this.
|
||||||
|
|||||||
13
docs/DEBUG_GRAPHQL.md → docs/DEBUG_API_SERVER.md
Executable file → Normal file
13
docs/DEBUG_GRAPHQL.md → docs/DEBUG_API_SERVER.md
Executable file → Normal file
@@ -12,7 +12,7 @@ As a first troubleshooting step try changing the default `GRAPHQL_PORT` setting.
|
|||||||
|
|
||||||
Ideally use the Settings UI to update the setting under General -> Core -> GraphQL port:
|
Ideally use the Settings UI to update the setting under General -> Core -> GraphQL port:
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
You might need to temporarily stop other applications or NetAlertX instances causing conflicts to update the setting. The `API_TOKEN` is used to authenticate any API calls, including GraphQL requests.
|
You might need to temporarily stop other applications or NetAlertX instances causing conflicts to update the setting. The `API_TOKEN` is used to authenticate any API calls, including GraphQL requests.
|
||||||
|
|
||||||
@@ -20,7 +20,7 @@ You might need to temporarily stop other applications or NetAlertX instances cau
|
|||||||
|
|
||||||
If the UI is not accessible, you can directly edit the `app.conf` file in your `/config` folder:
|
If the UI is not accessible, you can directly edit the `app.conf` file in your `/config` folder:
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
### Using a docker variable
|
### Using a docker variable
|
||||||
|
|
||||||
@@ -29,7 +29,6 @@ All application settings can also be initialized via the `APP_CONF_OVERRIDE` doc
|
|||||||
```yaml
|
```yaml
|
||||||
...
|
...
|
||||||
environment:
|
environment:
|
||||||
- TZ=Europe/Berlin
|
|
||||||
- PORT=20213
|
- PORT=20213
|
||||||
- APP_CONF_OVERRIDE={"GRAPHQL_PORT":"20214"}
|
- APP_CONF_OVERRIDE={"GRAPHQL_PORT":"20214"}
|
||||||
...
|
...
|
||||||
@@ -43,22 +42,22 @@ There are several ways to check if the GraphQL server is running.
|
|||||||
|
|
||||||
You can navigate to Maintenance -> Init Check to see if `isGraphQLServerRunning` is ticked:
|
You can navigate to Maintenance -> Init Check to see if `isGraphQLServerRunning` is ticked:
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
### Checking the Logs
|
### Checking the Logs
|
||||||
|
|
||||||
You can navigate to Maintenance -> Logs and search for `graphql` to see if it started correctly and serving requests:
|
You can navigate to Maintenance -> Logs and search for `graphql` to see if it started correctly and serving requests:
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
### Inspecting the Browser console
|
### Inspecting the Browser console
|
||||||
|
|
||||||
In your browser open the dev console (usually F12) and navigate to the Network tab where you can filter GraphQL requests (e.g., reload the Devices page).
|
In your browser open the dev console (usually F12) and navigate to the Network tab where you can filter GraphQL requests (e.g., reload the Devices page).
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
You can then inspect any of the POST requests by opening them in a new tab.
|
You can then inspect any of the POST requests by opening them in a new tab.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|
||||||
@@ -14,9 +14,9 @@ Start the container via the **terminal** with a command similar to this one:
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run --rm --network=host \
|
docker run --rm --network=host \
|
||||||
-v local/path/netalertx/config:/data/config \
|
-v /local_data_dir/netalertx/config:/data/config \
|
||||||
-v local/path/netalertx/db:/data/db \
|
-v /local_data_dir/netalertx/db:/data/db \
|
||||||
-e TZ=Europe/Berlin \
|
-v /etc/localtime:/etc/localtime \
|
||||||
-e PORT=20211 \
|
-e PORT=20211 \
|
||||||
ghcr.io/jokob-sk/netalertx:latest
|
ghcr.io/jokob-sk/netalertx:latest
|
||||||
|
|
||||||
|
|||||||
@@ -55,7 +55,6 @@ The file content should be following, with your custom values.
|
|||||||
#--------------------------------
|
#--------------------------------
|
||||||
#NETALERTX
|
#NETALERTX
|
||||||
#--------------------------------
|
#--------------------------------
|
||||||
TZ=Europe/Berlin
|
|
||||||
PORT=22222 # make sure this port is unique on your whole network
|
PORT=22222 # make sure this port is unique on your whole network
|
||||||
DEV_LOCATION=/development/NetAlertX
|
DEV_LOCATION=/development/NetAlertX
|
||||||
APP_DATA_LOCATION=/volume/docker_appdata
|
APP_DATA_LOCATION=/volume/docker_appdata
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ services:
|
|||||||
# - /home/user/netalertx_data:/data:rw
|
# - /home/user/netalertx_data:/data:rw
|
||||||
|
|
||||||
- type: bind # Bind mount for timezone consistency
|
- type: bind # Bind mount for timezone consistency
|
||||||
source: /etc/localtime # Alternatively add environment TZ: America/New York
|
source: /etc/localtime
|
||||||
target: /etc/localtime
|
target: /etc/localtime
|
||||||
read_only: true
|
read_only: true
|
||||||
|
|
||||||
@@ -125,15 +125,17 @@ docker compose up
|
|||||||
|
|
||||||
### Modification 1: Use a Local Folder (Bind Mount)
|
### Modification 1: Use a Local Folder (Bind Mount)
|
||||||
|
|
||||||
By default, the baseline compose file uses "named volumes" (`netalertx_config`, `netalertx_db`). **This is the preferred method** because NetAlertX is designed to manage all configuration and database settings directly from its web UI. Named volumes let Docker handle this data cleanly without you needing to manage local file permissions or paths.
|
By default, the baseline compose file uses a single named volume (netalertx_data) mounted at /data. This single-volume layout is preferred because NetAlertX manages both configuration and the database under /data (for example, /data/config and /data/db) via its web UI. Using one named volume simplifies permissions and portability: Docker manages the storage and NetAlertX manages the files inside /data.
|
||||||
|
|
||||||
|
A two-volume layout that mounts /data/config and /data/db separately (for example, netalertx_config and netalertx_db) is supported for backward compatibility and some advanced workflows, but it is an abnormal/legacy layout and not recommended for new deployments.
|
||||||
|
|
||||||
However, if you prefer to have direct, file-level access to your configuration for manual editing, a "bind mount" is a simple alternative. This tells Docker to use a specific folder from your computer (the "host") inside the container.
|
However, if you prefer to have direct, file-level access to your configuration for manual editing, a "bind mount" is a simple alternative. This tells Docker to use a specific folder from your computer (the "host") inside the container.
|
||||||
|
|
||||||
**How to make the change:**
|
**How to make the change:**
|
||||||
|
|
||||||
1. Choose a location on your computer. For example, `/home/adam/netalertx-files`.
|
1. Choose a location on your computer. For example, `/local_data_dir`.
|
||||||
|
|
||||||
2. Create the subfolders: `mkdir -p /home/adam/netalertx-files/config` and `mkdir -p /home/adam/netalertx-files/db`.
|
2. Create the subfolders: `mkdir -p /local_data_dir/config` and `mkdir -p /local_data_dir/db`.
|
||||||
|
|
||||||
3. Edit your `docker-compose.yml` and find the `volumes:` section (the one *inside* the `netalertx:` service).
|
3. Edit your `docker-compose.yml` and find the `volumes:` section (the one *inside* the `netalertx:` service).
|
||||||
|
|
||||||
@@ -152,19 +154,19 @@ However, if you prefer to have direct, file-level access to your configuration f
|
|||||||
```
|
```
|
||||||
|
|
||||||
**After (Using a Local Folder / Bind Mount):**
|
**After (Using a Local Folder / Bind Mount):**
|
||||||
Make sure to replace `/home/adam/netalertx-files` with your actual path. The format is `<path_on_your_computer>:<path_inside_container>:<options>`.
|
Make sure to replace `/local_data_dir` with your actual path. The format is `<path_on_your_computer>:<path_inside_container>:<options>`.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
...
|
...
|
||||||
volumes:
|
volumes:
|
||||||
# - netalertx_config:/data/config:rw
|
# - netalertx_config:/data/config:rw
|
||||||
# - netalertx_db:/data/db:rw
|
# - netalertx_db:/data/db:rw
|
||||||
- /home/adam/netalertx-files/config:/data/config:rw
|
- /local_data_dir/config:/data/config:rw
|
||||||
- /home/adam/netalertx-files/db:/data/db:rw
|
- /local_data_dir/db:/data/db:rw
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
Now, any files created by NetAlertX in `/data/config` will appear in your `/home/adam/netalertx-files/config` folder.
|
Now, any files created by NetAlertX in `/data/config` will appear in your `/local_data_dir/config` folder.
|
||||||
|
|
||||||
This same method works for mounting other things, like custom plugins or enterprise NGINX files, as shown in the commented-out examples in the baseline file.
|
This same method works for mounting other things, like custom plugins or enterprise NGINX files, as shown in the commented-out examples in the baseline file.
|
||||||
|
|
||||||
@@ -183,8 +185,8 @@ This method is useful for keeping your paths and other settings separate from yo
|
|||||||
services:
|
services:
|
||||||
netalertx:
|
netalertx:
|
||||||
environment:
|
environment:
|
||||||
- TZ=${TZ}
|
|
||||||
- PORT=${PORT}
|
- PORT=${PORT}
|
||||||
|
- GRAPHQL_PORT=${GRAPHQL_PORT}
|
||||||
|
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
@@ -192,11 +194,9 @@ services:
|
|||||||
**`.env` file contents:**
|
**`.env` file contents:**
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
TZ=Europe/Paris
|
|
||||||
PORT=20211
|
PORT=20211
|
||||||
NETALERTX_NETWORK_MODE=host
|
NETALERTX_NETWORK_MODE=host
|
||||||
LISTEN_ADDR=0.0.0.0
|
LISTEN_ADDR=0.0.0.0
|
||||||
PORT=20211
|
|
||||||
GRAPHQL_PORT=20212
|
GRAPHQL_PORT=20212
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -23,28 +23,32 @@ Head to [https://netalertx.com/](https://netalertx.com/) for more gifs and scree
|
|||||||
> [!WARNING]
|
> [!WARNING]
|
||||||
> You will have to run the container on the `host` network and specify `SCAN_SUBNETS` unless you use other [plugin scanners](https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS.md). The initial scan can take a few minutes, so please wait 5-10 minutes for the initial discovery to finish.
|
> You will have to run the container on the `host` network and specify `SCAN_SUBNETS` unless you use other [plugin scanners](https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS.md). The initial scan can take a few minutes, so please wait 5-10 minutes for the initial discovery to finish.
|
||||||
|
|
||||||
```yaml
|
```bash
|
||||||
docker run -d --rm --network=host \
|
docker run -d --rm --network=host \
|
||||||
-v local_path/config:/data/config \
|
-v /local_data_dir/config:/data/config \
|
||||||
-v local_path/db:/data/db \
|
-v /local_data_dir/db:/data/db \
|
||||||
|
-v /etc/localtime:/etc/localtime \
|
||||||
--mount type=tmpfs,target=/tmp/api \
|
--mount type=tmpfs,target=/tmp/api \
|
||||||
-e PUID=200 -e PGID=300 \
|
|
||||||
-e TZ=Europe/Berlin \
|
|
||||||
-e PORT=20211 \
|
-e PORT=20211 \
|
||||||
|
-e APP_CONF_OVERRIDE={"GRAPHQL_PORT":"20214"} \
|
||||||
ghcr.io/jokob-sk/netalertx:latest
|
ghcr.io/jokob-sk/netalertx:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
See alternative [docked-compose examples](https://github.com/jokob-sk/NetAlertX/blob/main/docs/DOCKER_COMPOSE.md).
|
See alternative [docked-compose examples](https://github.com/jokob-sk/NetAlertX/blob/main/docs/DOCKER_COMPOSE.md).
|
||||||
|
|
||||||
|
### Default ports
|
||||||
|
|
||||||
|
| Default | Description | How to override |
|
||||||
|
| :------------- |:-------------------------------| ----------------------------------------------------------------------------------:|
|
||||||
|
| `20211` |Port of the web interface | `-e PORT=20222` |
|
||||||
|
| `20212` |Port of the backend API server | `-e APP_CONF_OVERRIDE={"GRAPHQL_PORT":"20214"}` or via the `GRAPHQL_PORT` Setting |
|
||||||
|
|
||||||
### Docker environment variables
|
### Docker environment variables
|
||||||
|
|
||||||
| Variable | Description | Example Value |
|
| Variable | Description | Example Value |
|
||||||
| :------------- |:------------------------| -----:|
|
| :------------- |:------------------------| -----:|
|
||||||
| `PORT` |Port of the web interface | `20211` |
|
| `PORT` |Port of the web interface | `20211` |
|
||||||
| `PUID` |Application User UID | `102` |
|
|
||||||
| `PGID` |Application User GID | `82` |
|
|
||||||
| `LISTEN_ADDR` |Set the specific IP Address for the listener address for the nginx webserver (web interface). This could be useful when using multiple subnets to hide the web interface from all untrusted networks. | `0.0.0.0` |
|
| `LISTEN_ADDR` |Set the specific IP Address for the listener address for the nginx webserver (web interface). This could be useful when using multiple subnets to hide the web interface from all untrusted networks. | `0.0.0.0` |
|
||||||
|`TZ` |Time zone to display stats correctly. Find your time zone [here](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) | `Europe/Berlin` |
|
|
||||||
|`LOADED_PLUGINS` | Default [plugins](https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS.md) to load. Plugins cannot be loaded with `APP_CONF_OVERRIDE`, you need to use this variable instead and then specify the plugins settings with `APP_CONF_OVERRIDE`. | `["PIHOLE","ASUSWRT"]` |
|
|`LOADED_PLUGINS` | Default [plugins](https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS.md) to load. Plugins cannot be loaded with `APP_CONF_OVERRIDE`, you need to use this variable instead and then specify the plugins settings with `APP_CONF_OVERRIDE`. | `["PIHOLE","ASUSWRT"]` |
|
||||||
|`APP_CONF_OVERRIDE` | JSON override for settings (except `LOADED_PLUGINS`). | `{"SCAN_SUBNETS":"['192.168.1.0/24 --interface=eth1']","GRAPHQL_PORT":"20212"}` |
|
|`APP_CONF_OVERRIDE` | JSON override for settings (except `LOADED_PLUGINS`). | `{"SCAN_SUBNETS":"['192.168.1.0/24 --interface=eth1']","GRAPHQL_PORT":"20212"}` |
|
||||||
|`ALWAYS_FRESH_INSTALL` | ⚠ If `true` will delete the content of the `/db` & `/config` folders. For testing purposes. Can be coupled with [watchtower](https://github.com/containrrr/watchtower) to have an always freshly installed `netalertx`/`netalertx-dev` image. | `true` |
|
|`ALWAYS_FRESH_INSTALL` | ⚠ If `true` will delete the content of the `/db` & `/config` folders. For testing purposes. Can be coupled with [watchtower](https://github.com/containrrr/watchtower) to have an always freshly installed `netalertx`/`netalertx-dev` image. | `true` |
|
||||||
@@ -60,8 +64,9 @@ See alternative [docked-compose examples](https://github.com/jokob-sk/NetAlertX/
|
|||||||
| :------------- | :------------- | :-------------|
|
| :------------- | :------------- | :-------------|
|
||||||
| ✅ | `:/data/config` | Folder which will contain the `app.conf` & `devices.csv` ([read about devices.csv](https://github.com/jokob-sk/NetAlertX/blob/main/docs/DEVICES_BULK_EDITING.md)) files |
|
| ✅ | `:/data/config` | Folder which will contain the `app.conf` & `devices.csv` ([read about devices.csv](https://github.com/jokob-sk/NetAlertX/blob/main/docs/DEVICES_BULK_EDITING.md)) files |
|
||||||
| ✅ | `:/data/db` | Folder which will contain the `app.db` database file |
|
| ✅ | `:/data/db` | Folder which will contain the `app.db` database file |
|
||||||
|
| ✅ | `/etc/localtime:/etc/localtime:ro` | Ensuring the timezone is teh same as on teh server. |
|
||||||
| | `:/tmp/log` | Logs folder useful for debugging if you have issues setting up the container |
|
| | `:/tmp/log` | Logs folder useful for debugging if you have issues setting up the container |
|
||||||
| | `:/tmp/api` | A simple [API endpoint](https://github.com/jokob-sk/NetAlertX/blob/main/docs/API.md) containing static (but regularly updated) json and other files. Path configurable via `NETALERTX_API` environment variable. |
|
| | `:/tmp/api` | The [API endpoint](https://github.com/jokob-sk/NetAlertX/blob/main/docs/API.md) containing static (but regularly updated) json and other files. Path configurable via `NETALERTX_API` environment variable. |
|
||||||
| | `:/app/front/plugins/<plugin>/ignore_plugin` | Map a file `ignore_plugin` to ignore a plugin. Plugins can be soft-disabled via settings. More in the [Plugin docs](https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS.md). |
|
| | `:/app/front/plugins/<plugin>/ignore_plugin` | Map a file `ignore_plugin` to ignore a plugin. Plugins can be soft-disabled via settings. More in the [Plugin docs](https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS.md). |
|
||||||
| | `:/etc/resolv.conf` | Use a custom `resolv.conf` file for [better name resolution](https://github.com/jokob-sk/NetAlertX/blob/main/docs/REVERSE_DNS.md). |
|
| | `:/etc/resolv.conf` | Use a custom `resolv.conf` file for [better name resolution](https://github.com/jokob-sk/NetAlertX/blob/main/docs/REVERSE_DNS.md). |
|
||||||
|
|
||||||
|
|||||||
@@ -8,12 +8,12 @@ This guide shows you how to set up **NetAlertX** using Portainer’s **Stacks**
|
|||||||
|
|
||||||
## 1. Prepare Your Host
|
## 1. Prepare Your Host
|
||||||
|
|
||||||
Before deploying, make sure you have a folder on your Docker host for NetAlertX data. Replace `APP_FOLDER` with your preferred location, for example `/opt` here:
|
Before deploying, make sure you have a folder on your Docker host for NetAlertX data. Replace `APP_FOLDER` with your preferred location, for example `/local_data_dir` here:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
mkdir -p /opt/netalertx/config
|
mkdir -p /local_data_dir/netalertx/config
|
||||||
mkdir -p /opt/netalertx/db
|
mkdir -p /local_data_dir/netalertx/db
|
||||||
mkdir -p /opt/netalertx/log
|
mkdir -p /local_data_dir/netalertx/log
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
@@ -59,7 +59,6 @@ services:
|
|||||||
# - ${APP_FOLDER}/netalertx/api:/tmp/api
|
# - ${APP_FOLDER}/netalertx/api:/tmp/api
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
- TZ=${TZ}
|
|
||||||
- PORT=${PORT}
|
- PORT=${PORT}
|
||||||
- APP_CONF_OVERRIDE=${APP_CONF_OVERRIDE}
|
- APP_CONF_OVERRIDE=${APP_CONF_OVERRIDE}
|
||||||
```
|
```
|
||||||
@@ -70,14 +69,25 @@ services:
|
|||||||
|
|
||||||
In the **Environment variables** section of Portainer, add the following:
|
In the **Environment variables** section of Portainer, add the following:
|
||||||
|
|
||||||
* `APP_FOLDER=/opt` (or wherever you created the directories in step 1)
|
* `APP_FOLDER=/local_data_dir` (or wherever you created the directories in step 1)
|
||||||
* `TZ=Europe/Berlin` (replace with your timezone)
|
|
||||||
* `PORT=22022` (or another port if needed)
|
* `PORT=22022` (or another port if needed)
|
||||||
* `APP_CONF_OVERRIDE={"GRAPHQL_PORT":"22023"}` (optional advanced settings)
|
* `APP_CONF_OVERRIDE={"GRAPHQL_PORT":"22023"}` (optional advanced settings, otherwise the backend API server PORT defaults to `20212`)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 5. Deploy the Stack
|
## 5. Ensure permissions
|
||||||
|
|
||||||
|
> [!TIP]
|
||||||
|
> If you are facing permissions issues run the following commands on your server. This will change the owner and assure sufficient access to the database and config files that are stored in the `/local_data_dir/db` and `/local_data_dir/config` folders (replace `local_data_dir` with the location where your `/db` and `/config` folders are located).
|
||||||
|
> ```bash
|
||||||
|
> sudo chown -R 20211:20211 /local_data_dir
|
||||||
|
> sudo chmod -R a+rwx /local_data_dir
|
||||||
|
> ```
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Deploy the Stack
|
||||||
|
|
||||||
1. Scroll down and click **Deploy the stack**.
|
1. Scroll down and click **Deploy the stack**.
|
||||||
2. Portainer will pull the image and start NetAlertX.
|
2. Portainer will pull the image and start NetAlertX.
|
||||||
@@ -89,7 +99,7 @@ http://<your-docker-host-ip>:22022
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 6. Verify and Troubleshoot
|
## 7. Verify and Troubleshoot
|
||||||
|
|
||||||
* Check logs via Portainer → **Containers** → `netalertx` → **Logs**.
|
* Check logs via Portainer → **Containers** → `netalertx` → **Logs**.
|
||||||
* Logs are stored under `${APP_FOLDER}/netalertx/log` if you enabled that volume.
|
* Logs are stored under `${APP_FOLDER}/netalertx/log` if you enabled that volume.
|
||||||
|
|||||||
@@ -47,8 +47,8 @@ services:
|
|||||||
- /mnt/YOUR_SERVER/netalertx/config:/data/config:rw
|
- /mnt/YOUR_SERVER/netalertx/config:/data/config:rw
|
||||||
- /mnt/YOUR_SERVER/netalertx/db:/netalertx/data/db:rw
|
- /mnt/YOUR_SERVER/netalertx/db:/netalertx/data/db:rw
|
||||||
- /mnt/YOUR_SERVER/netalertx/logs:/netalertx/tmp/log:rw
|
- /mnt/YOUR_SERVER/netalertx/logs:/netalertx/tmp/log:rw
|
||||||
|
- /etc/localtime:/etc/localtime:ro
|
||||||
environment:
|
environment:
|
||||||
- TZ=Europe/London
|
|
||||||
- PORT=20211
|
- PORT=20211
|
||||||
networks:
|
networks:
|
||||||
swarm-ipvlan:
|
swarm-ipvlan:
|
||||||
|
|||||||
@@ -35,8 +35,8 @@ Sometimes, permission issues arise if your existing host directories were create
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run -it --rm --name netalertx --user "0" \
|
docker run -it --rm --name netalertx --user "0" \
|
||||||
-v local/path/config:/data/config \
|
-v /local_data_dir/config:/data/config \
|
||||||
-v local/path/db:/data/db \
|
-v /local_data_dir/db:/data/db \
|
||||||
ghcr.io/jokob-sk/netalertx:latest
|
ghcr.io/jokob-sk/netalertx:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -46,6 +46,13 @@ docker run -it --rm --name netalertx --user "0" \
|
|||||||
|
|
||||||
> The container startup script detects `root` and runs `chown -R 20211:20211` on all volumes, fixing ownership for the secure `netalertx` user.
|
> The container startup script detects `root` and runs `chown -R 20211:20211` on all volumes, fixing ownership for the secure `netalertx` user.
|
||||||
|
|
||||||
|
> [!TIP]
|
||||||
|
> If you are facing permissions issues run the following commands on your server. This will change the owner and assure sufficient access to the database and config files that are stored in the `/local_data_dir/db` and `/local_data_dir/config` folders (replace `local_data_dir` with the location where your `/db` and `/config` folders are located).
|
||||||
|
> ```bash
|
||||||
|
> sudo chown -R 20211:20211 /local_data_dir
|
||||||
|
> sudo chmod -R a+rwx /local_data_dir
|
||||||
|
> ```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Example: docker-compose.yml with `tmpfs`
|
## Example: docker-compose.yml with `tmpfs`
|
||||||
@@ -55,17 +62,19 @@ services:
|
|||||||
netalertx:
|
netalertx:
|
||||||
container_name: netalertx
|
container_name: netalertx
|
||||||
image: "ghcr.io/jokob-sk/netalertx"
|
image: "ghcr.io/jokob-sk/netalertx"
|
||||||
network_mode: "host"
|
network_mode: "host"
|
||||||
cap_add:
|
cap_drop: # Drop all capabilities for enhanced security
|
||||||
- NET_RAW
|
- ALL
|
||||||
- NET_ADMIN
|
cap_add: # Add only the necessary capabilities
|
||||||
- NET_BIND_SERVICE
|
- NET_ADMIN # Required for ARP scanning
|
||||||
|
- NET_RAW # Required for raw socket operations
|
||||||
|
- NET_BIND_SERVICE # Required to bind to privileged ports (nbtscan)
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- local/path/config:/data/config
|
- /local_data_dir/config:/data/config
|
||||||
- local/path/db:/data/db
|
- /local_data_dir/db:/data/db
|
||||||
environment:
|
- /etc/localtime:/etc/localtime
|
||||||
- TZ=Europe/Berlin
|
environment:
|
||||||
- PORT=20211
|
- PORT=20211
|
||||||
tmpfs:
|
tmpfs:
|
||||||
- "/tmp:uid=20211,gid=20211,mode=1700,rw,noexec,nosuid,nodev,async,noatime,nodiratime"
|
- "/tmp:uid=20211,gid=20211,mode=1700,rw,noexec,nosuid,nodev,async,noatime,nodiratime"
|
||||||
|
|||||||
@@ -85,10 +85,10 @@ services:
|
|||||||
network_mode: "host"
|
network_mode: "host"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- local/path/config:/home/pi/pialert/config
|
- /local_data_dir/config:/home/pi/pialert/config
|
||||||
- local/path/db:/home/pi/pialert/db
|
- /local_data_dir/db:/home/pi/pialert/db
|
||||||
# (optional) useful for debugging if you have issues setting up the container
|
# (optional) useful for debugging if you have issues setting up the container
|
||||||
- local/path/logs:/home/pi/pialert/front/log
|
- /local_data_dir/logs:/home/pi/pialert/front/log
|
||||||
environment:
|
environment:
|
||||||
- TZ=Europe/Berlin
|
- TZ=Europe/Berlin
|
||||||
- PORT=20211
|
- PORT=20211
|
||||||
@@ -104,10 +104,10 @@ services:
|
|||||||
network_mode: "host"
|
network_mode: "host"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- local/path/config:/data/config # 🆕 This has changed
|
- /local_data_dir/config:/data/config # 🆕 This has changed
|
||||||
- local/path/db:/data/db # 🆕 This has changed
|
- /local_data_dir/db:/data/db # 🆕 This has changed
|
||||||
# (optional) useful for debugging if you have issues setting up the container
|
# (optional) useful for debugging if you have issues setting up the container
|
||||||
- local/path/logs:/tmp/log # 🆕 This has changed
|
- /local_data_dir/logs:/tmp/log # 🆕 This has changed
|
||||||
environment:
|
environment:
|
||||||
- TZ=Europe/Berlin
|
- TZ=Europe/Berlin
|
||||||
- PORT=20211
|
- PORT=20211
|
||||||
@@ -131,10 +131,10 @@ services:
|
|||||||
network_mode: "host"
|
network_mode: "host"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- local/path/config/pialert.conf:/home/pi/pialert/config/pialert.conf
|
- /local_data_dir/config/pialert.conf:/home/pi/pialert/config/pialert.conf
|
||||||
- local/path/db/pialert.db:/home/pi/pialert/db/pialert.db
|
- /local_data_dir/db/pialert.db:/home/pi/pialert/db/pialert.db
|
||||||
# (optional) useful for debugging if you have issues setting up the container
|
# (optional) useful for debugging if you have issues setting up the container
|
||||||
- local/path/logs:/home/pi/pialert/front/log
|
- /local_data_dir/logs:/home/pi/pialert/front/log
|
||||||
environment:
|
environment:
|
||||||
- TZ=Europe/Berlin
|
- TZ=Europe/Berlin
|
||||||
- PORT=20211
|
- PORT=20211
|
||||||
@@ -150,10 +150,10 @@ services:
|
|||||||
network_mode: "host"
|
network_mode: "host"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- local/path/config/app.conf:/data/config/app.conf # 🆕 This has changed
|
- /local_data_dir/config/app.conf:/data/config/app.conf # 🆕 This has changed
|
||||||
- local/path/db/app.db:/data/db/app.db # 🆕 This has changed
|
- /local_data_dir/db/app.db:/data/db/app.db # 🆕 This has changed
|
||||||
# (optional) useful for debugging if you have issues setting up the container
|
# (optional) useful for debugging if you have issues setting up the container
|
||||||
- local/path/logs:/tmp/log # 🆕 This has changed
|
- /local_data_dir/logs:/tmp/log # 🆕 This has changed
|
||||||
environment:
|
environment:
|
||||||
- TZ=Europe/Berlin
|
- TZ=Europe/Berlin
|
||||||
- PORT=20211
|
- PORT=20211
|
||||||
@@ -190,10 +190,10 @@ services:
|
|||||||
network_mode: "host"
|
network_mode: "host"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- local/path/config:/data/config
|
- /local_data_dir/config:/data/config
|
||||||
- local/path/db:/data/db
|
- /local_data_dir/db:/data/db
|
||||||
# (optional) useful for debugging if you have issues setting up the container
|
# (optional) useful for debugging if you have issues setting up the container
|
||||||
- local/path/logs:/tmp/log
|
- /local_data_dir/logs:/tmp/log
|
||||||
environment:
|
environment:
|
||||||
- TZ=Europe/Berlin
|
- TZ=Europe/Berlin
|
||||||
- PORT=20211
|
- PORT=20211
|
||||||
@@ -207,10 +207,10 @@ services:
|
|||||||
network_mode: "host"
|
network_mode: "host"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- local/path/config:/data/config
|
- /local_data_dir/config:/data/config
|
||||||
- local/path/db:/data/db
|
- /local_data_dir/db:/data/db
|
||||||
# (optional) useful for debugging if you have issues setting up the container
|
# (optional) useful for debugging if you have issues setting up the container
|
||||||
- local/path/logs:/tmp/log
|
- /local_data_dir/logs:/tmp/log
|
||||||
environment:
|
environment:
|
||||||
- TZ=Europe/Berlin
|
- TZ=Europe/Berlin
|
||||||
- PORT=20211
|
- PORT=20211
|
||||||
@@ -234,10 +234,10 @@ services:
|
|||||||
network_mode: "host"
|
network_mode: "host"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- local/path/config:/data/config
|
- /local_data_dir/config:/data/config
|
||||||
- local/path/db:/data/db
|
- /local_data_dir/db:/data/db
|
||||||
# (optional) useful for debugging if you have issues setting up the container
|
# (optional) useful for debugging if you have issues setting up the container
|
||||||
- local/path/logs:/tmp/log
|
- /local_data_dir/logs:/tmp/log
|
||||||
environment:
|
environment:
|
||||||
- TZ=Europe/Berlin
|
- TZ=Europe/Berlin
|
||||||
- PORT=20211
|
- PORT=20211
|
||||||
@@ -248,16 +248,24 @@ services:
|
|||||||
6. Perform a one-off migration to the latest `netalertx` image and `20211` user:
|
6. Perform a one-off migration to the latest `netalertx` image and `20211` user:
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
> The example below assumes your `/config` and `/db` folders are stored in `local/path`.
|
> The example below assumes your `/config` and `/db` folders are stored in `local_data_dir`.
|
||||||
> Replace this path with your actual configuration directory. `netalertx` is the container name, which might differ from your setup.
|
> Replace this path with your actual configuration directory. `netalertx` is the container name, which might differ from your setup.
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker run -it --rm --name netalertx --user "0" \
|
docker run -it --rm --name netalertx --user "0" \
|
||||||
-v local/path/config:/data/config \
|
-v /local_data_dir/config:/data/config \
|
||||||
-v local/path/db:/data/db \
|
-v /local_data_dir/db:/data/db \
|
||||||
ghcr.io/jokob-sk/netalertx:latest
|
ghcr.io/jokob-sk/netalertx:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
|
..or alternatively execute:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo chown -R 20211:20211 /local_data_dir/config
|
||||||
|
sudo chown -R 20211:20211 /local_data_dir/db
|
||||||
|
sudo chmod -R a+rwx /local_data_dir/
|
||||||
|
```
|
||||||
|
|
||||||
7. Stop the container
|
7. Stop the container
|
||||||
8. Update the `docker-compose.yml` as per example below.
|
8. Update the `docker-compose.yml` as per example below.
|
||||||
|
|
||||||
@@ -265,20 +273,23 @@ docker run -it --rm --name netalertx --user "0" \
|
|||||||
services:
|
services:
|
||||||
netalertx:
|
netalertx:
|
||||||
container_name: netalertx
|
container_name: netalertx
|
||||||
image: "ghcr.io/jokob-sk/netalertx" # 🆕 This is important
|
image: "ghcr.io/jokob-sk/netalertx" # 🆕 This is important
|
||||||
network_mode: "host"
|
network_mode: "host"
|
||||||
cap_add: # 🆕 New line
|
cap_drop: # 🆕 New line
|
||||||
- NET_RAW # 🆕 New line
|
- ALL # 🆕 New line
|
||||||
- NET_ADMIN # 🆕 New line
|
cap_add: # 🆕 New line
|
||||||
- NET_BIND_SERVICE # 🆕 New line
|
- NET_RAW # 🆕 New line
|
||||||
|
- NET_ADMIN # 🆕 New line
|
||||||
|
- NET_BIND_SERVICE # 🆕 New line
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- local/path/config:/data/config
|
- /local_data_dir/config:/data/config
|
||||||
- local/path/db:/data/db
|
- /local_data_dir/db:/data/db
|
||||||
# (optional) useful for debugging if you have issues setting up the container
|
# (optional) useful for debugging if you have issues setting up the container
|
||||||
#- local/path/logs:/tmp/log
|
#- /local_data_dir/logs:/tmp/log
|
||||||
|
# Ensuring the timezone is the same as on the server - make sure also the TIMEZONE setting is configured
|
||||||
|
- /etc/localtime:/etc/localtime:ro # 🆕 New line
|
||||||
environment:
|
environment:
|
||||||
- TZ=Europe/Berlin
|
|
||||||
- PORT=20211
|
- PORT=20211
|
||||||
# 🆕 New "tmpfs" section START 🔽
|
# 🆕 New "tmpfs" section START 🔽
|
||||||
tmpfs:
|
tmpfs:
|
||||||
|
|||||||
@@ -80,17 +80,18 @@ services:
|
|||||||
network_mode: "host"
|
network_mode: "host"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- local/path/config:/data/config
|
- /local_data_dir/config:/data/config
|
||||||
- local/path/db:/data/db
|
- /local_data_dir/db:/data/db
|
||||||
# (Optional) Useful for debugging setup issues
|
# (Optional) Useful for debugging setup issues
|
||||||
- local/path/logs:/tmp/log
|
- /local_data_dir/logs:/tmp/log
|
||||||
# (API: OPTION 1) Store temporary files in memory (recommended for performance)
|
# (API: OPTION 1) Store temporary files in memory (recommended for performance)
|
||||||
- type: tmpfs # ◀ 🔺
|
- type: tmpfs # ◀ 🔺
|
||||||
target: /tmp/api # ◀ 🔺
|
target: /tmp/api # ◀ 🔺
|
||||||
# (API: OPTION 2) Store API data on disk (useful for debugging)
|
# (API: OPTION 2) Store API data on disk (useful for debugging)
|
||||||
# - local/path/api:/tmp/api
|
# - /local_data_dir/api:/tmp/api
|
||||||
environment:
|
# Ensuring the timezone is the same as on the server - make sure also the TIMEZONE setting is configured
|
||||||
- TZ=Europe/Berlin
|
- /etc/localtime:/etc/localtime:ro
|
||||||
|
environment:
|
||||||
- PORT=20211
|
- PORT=20211
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -64,6 +64,7 @@ Device-detecting plugins insert values into the `CurrentScan` database table. T
|
|||||||
| `LUCIRPC` | [luci_import](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/luci_import/) | 🔍 | Import connected devices from OpenWRT | | |
|
| `LUCIRPC` | [luci_import](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/luci_import/) | 🔍 | Import connected devices from OpenWRT | | |
|
||||||
| `MAINT` | [maintenance](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/maintenance/) | ⚙ | Maintenance of logs, etc. | | |
|
| `MAINT` | [maintenance](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/maintenance/) | ⚙ | Maintenance of logs, etc. | | |
|
||||||
| `MQTT` | [_publisher_mqtt](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/_publisher_mqtt/) | ▶️ | MQTT for synching to Home Assistant | | |
|
| `MQTT` | [_publisher_mqtt](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/_publisher_mqtt/) | ▶️ | MQTT for synching to Home Assistant | | |
|
||||||
|
| `MTSCAN` | [mikrotik_scan](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/mikrotik_scan/) | 🔍 | Mikrotik device import & sync | | |
|
||||||
| `NBTSCAN` | [nbtscan_scan](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/nbtscan_scan/) | 🆎 | Nbtscan (NetBIOS-based) name resolution | | |
|
| `NBTSCAN` | [nbtscan_scan](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/nbtscan_scan/) | 🆎 | Nbtscan (NetBIOS-based) name resolution | | |
|
||||||
| `NEWDEV` | [newdev_template](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/newdev_template/) | ⚙ | New device template | | Yes |
|
| `NEWDEV` | [newdev_template](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/newdev_template/) | ⚙ | New device template | | Yes |
|
||||||
| `NMAP` | [nmap_scan](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/nmap_scan/) | ♻ | Nmap port scanning & discovery | | |
|
| `NMAP` | [nmap_scan](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/nmap_scan/) | ♻ | Nmap port scanning & discovery | | |
|
||||||
@@ -74,6 +75,7 @@ Device-detecting plugins insert values into the `CurrentScan` database table. T
|
|||||||
| `OMDSDN` | [omada_sdn_imp](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/omada_sdn_imp/) | 📥/🆎 ❌ | UNMAINTAINED use `OMDSDNOPENAPI` | 🖧 🔄 | |
|
| `OMDSDN` | [omada_sdn_imp](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/omada_sdn_imp/) | 📥/🆎 ❌ | UNMAINTAINED use `OMDSDNOPENAPI` | 🖧 🔄 | |
|
||||||
| `OMDSDNOPENAPI` | [omada_sdn_openapi](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/omada_sdn_openapi/) | 📥/🆎 | OMADA TP-Link import via OpenAPI | 🖧 | |
|
| `OMDSDNOPENAPI` | [omada_sdn_openapi](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/omada_sdn_openapi/) | 📥/🆎 | OMADA TP-Link import via OpenAPI | 🖧 | |
|
||||||
| `PIHOLE` | [pihole_scan](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/pihole_scan/) | 🔍/🆎/📥 | Pi-hole device import & sync | | |
|
| `PIHOLE` | [pihole_scan](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/pihole_scan/) | 🔍/🆎/📥 | Pi-hole device import & sync | | |
|
||||||
|
| `PIHOLEAPI` | [pihole_api_scan](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/pihole_api_scan/) | 🔍/🆎/📥 | Pi-hole device import & sync via API v6+ | | |
|
||||||
| `PUSHSAFER` | [_publisher_pushsafer](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/_publisher_pushsafer/) | ▶️ | Pushsafer notifications | | |
|
| `PUSHSAFER` | [_publisher_pushsafer](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/_publisher_pushsafer/) | ▶️ | Pushsafer notifications | | |
|
||||||
| `PUSHOVER` | [_publisher_pushover](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/_publisher_pushover/) | ▶️ | Pushover notifications | | |
|
| `PUSHOVER` | [_publisher_pushover](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/_publisher_pushover/) | ▶️ | Pushover notifications | | |
|
||||||
| `SETPWD` | [set_password](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/set_password/) | ⚙ | Set password | | Yes |
|
| `SETPWD` | [set_password](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/set_password/) | ⚙ | Set password | | Yes |
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
If you are running a DNS server, such as **AdGuard**, set up **Private reverse DNS servers** for a better name resolution on your network. Enabling this setting will enable NetAlertX to execute dig and nslookup commands to automatically resolve device names based on their IP addresses.
|
If you are running a DNS server, such as **AdGuard**, set up **Private reverse DNS servers** for a better name resolution on your network. Enabling this setting will enable NetAlertX to execute dig and nslookup commands to automatically resolve device names based on their IP addresses.
|
||||||
|
|
||||||
> [!TIP]
|
> [!TIP]
|
||||||
> Before proceeding, ensure that [name resolution plugins](./NAME_RESOLUTION.md) are enabled.
|
> Before proceeding, ensure that [name resolution plugins](/local_data_dir/NAME_RESOLUTION.md) are enabled.
|
||||||
> You can customize how names are cleaned using the `NEWDEV_NAME_CLEANUP_REGEX` setting.
|
> You can customize how names are cleaned using the `NEWDEV_NAME_CLEANUP_REGEX` setting.
|
||||||
> To auto-update Fully Qualified Domain Names (FQDN), enable the `REFRESH_FQDN` setting.
|
> To auto-update Fully Qualified Domain Names (FQDN), enable the `REFRESH_FQDN` setting.
|
||||||
|
|
||||||
@@ -42,11 +42,12 @@ services:
|
|||||||
image: "ghcr.io/jokob-sk/netalertx:latest"
|
image: "ghcr.io/jokob-sk/netalertx:latest"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- /home/netalertx/config:/data/config
|
- /local_data_dir/config:/data/config
|
||||||
- /home/netalertx/db:/data/db
|
- /local_data_dir/db:/data/db
|
||||||
- /home/netalertx/log:/tmp/log
|
# - /local_data_dir/log:/tmp/log
|
||||||
|
# Ensuring the timezone is the same as on the server - make sure also the TIMEZONE setting is configured
|
||||||
|
- /etc/localtime:/etc/localtime:ro
|
||||||
environment:
|
environment:
|
||||||
- TZ=Europe/Berlin
|
|
||||||
- PORT=20211
|
- PORT=20211
|
||||||
network_mode: host
|
network_mode: host
|
||||||
dns: # specifying the DNS servers used for the container
|
dns: # specifying the DNS servers used for the container
|
||||||
@@ -68,19 +69,18 @@ services:
|
|||||||
image: "ghcr.io/jokob-sk/netalertx:latest"
|
image: "ghcr.io/jokob-sk/netalertx:latest"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- ./config/app.conf:/data/config/app.conf
|
- /local_data_dir/config/app.conf:/data/config/app.conf
|
||||||
- ./db:/data/db
|
- /local_data_dir/db:/data/db
|
||||||
- ./log:/tmp/log
|
- /local_data_dir/log:/tmp/log
|
||||||
- ./config/resolv.conf:/etc/resolv.conf # Mapping the /resolv.conf file for better name resolution
|
- /local_data_dir/config/resolv.conf:/etc/resolv.conf # ⚠ Mapping the /resolv.conf file for better name resolution
|
||||||
|
# Ensuring the timezone is the same as on the server - make sure also the TIMEZONE setting is configured
|
||||||
|
- /etc/localtime:/etc/localtime:ro
|
||||||
environment:
|
environment:
|
||||||
- TZ=Europe/Berlin
|
|
||||||
- PORT=20211
|
- PORT=20211
|
||||||
ports:
|
|
||||||
- "20211:20211"
|
|
||||||
network_mode: host
|
network_mode: host
|
||||||
```
|
```
|
||||||
|
|
||||||
#### ./config/resolv.conf:
|
#### /local_data_dir/config/resolv.conf:
|
||||||
|
|
||||||
The most important below is the `nameserver` entry (you can add multiple):
|
The most important below is the `nameserver` entry (you can add multiple):
|
||||||
|
|
||||||
|
|||||||
@@ -501,8 +501,8 @@ docker run -d --rm --network=host \
|
|||||||
--name=netalertx \
|
--name=netalertx \
|
||||||
-v /appl/docker/netalertx/config:/data/config \
|
-v /appl/docker/netalertx/config:/data/config \
|
||||||
-v /appl/docker/netalertx/db:/data/db \
|
-v /appl/docker/netalertx/db:/data/db \
|
||||||
|
-v /etc/localtime:/etc/localtime \
|
||||||
-v /appl/docker/netalertx/default:/etc/nginx/sites-available/default \
|
-v /appl/docker/netalertx/default:/etc/nginx/sites-available/default \
|
||||||
-e TZ=Europe/Amsterdam \
|
|
||||||
-e PORT=20211 \
|
-e PORT=20211 \
|
||||||
ghcr.io/jokob-sk/netalertx:latest
|
ghcr.io/jokob-sk/netalertx:latest
|
||||||
|
|
||||||
|
|||||||
@@ -44,8 +44,9 @@ services:
|
|||||||
- local/path/db:/data/db
|
- local/path/db:/data/db
|
||||||
# (optional) useful for debugging if you have issues setting up the container
|
# (optional) useful for debugging if you have issues setting up the container
|
||||||
- local/path/logs:/tmp/log
|
- local/path/logs:/tmp/log
|
||||||
|
# Ensuring the timezone is the same as on the server - make sure also the TIMEZONE setting is configured
|
||||||
|
- /etc/localtime:/etc/localtime:ro
|
||||||
environment:
|
environment:
|
||||||
- TZ=Europe/Berlin
|
|
||||||
- PORT=20211
|
- PORT=20211
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ var timerRefreshData = ''
|
|||||||
|
|
||||||
var emptyArr = ['undefined', "", undefined, null, 'null'];
|
var emptyArr = ['undefined', "", undefined, null, 'null'];
|
||||||
var UI_LANG = "English (en_us)";
|
var UI_LANG = "English (en_us)";
|
||||||
const allLanguages = ["ar_ar","ca_ca","cs_cz","de_de","en_us","es_es","fa_fa","fr_fr","it_it","nb_no","pl_pl","pt_br","pt_pt","ru_ru","sv_sv","tr_tr","uk_ua","zh_cn"]; // needs to be same as in lang.php
|
const allLanguages = ["ar_ar","ca_ca","cs_cz","de_de","en_us","es_es","fa_fa","fr_fr","it_it","ja_jp","nb_no","pl_pl","pt_br","pt_pt","ru_ru","sv_sv","tr_tr","uk_ua","zh_cn"]; // needs to be same as in lang.php
|
||||||
var settingsJSON = {}
|
var settingsJSON = {}
|
||||||
|
|
||||||
|
|
||||||
@@ -343,6 +343,9 @@ function getLangCode() {
|
|||||||
case 'Italian (it_it)':
|
case 'Italian (it_it)':
|
||||||
lang_code = 'it_it';
|
lang_code = 'it_it';
|
||||||
break;
|
break;
|
||||||
|
case 'Japanese (ja_jp)':
|
||||||
|
lang_code = 'ja_jp';
|
||||||
|
break;
|
||||||
case 'Russian (ru_ru)':
|
case 'Russian (ru_ru)':
|
||||||
lang_code = 'ru_ru';
|
lang_code = 'ru_ru';
|
||||||
break;
|
break;
|
||||||
@@ -497,11 +500,39 @@ function isValidBase64(str) {
|
|||||||
// -------------------------------------------------------------------
|
// -------------------------------------------------------------------
|
||||||
// Utility function to check if the value is already Base64
|
// Utility function to check if the value is already Base64
|
||||||
function isBase64(value) {
|
function isBase64(value) {
|
||||||
const base64Regex =
|
if (typeof value !== "string" || value.trim() === "") return false;
|
||||||
/^(?:[A-Za-z0-9+\/]{4})*?(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/;
|
|
||||||
return base64Regex.test(value);
|
// Must have valid length
|
||||||
|
if (value.length % 4 !== 0) return false;
|
||||||
|
|
||||||
|
// Valid Base64 characters
|
||||||
|
const base64Regex = /^[A-Za-z0-9+/]+={0,2}$/;
|
||||||
|
if (!base64Regex.test(value)) return false;
|
||||||
|
|
||||||
|
|
||||||
|
try {
|
||||||
|
const decoded = atob(value);
|
||||||
|
|
||||||
|
// Re-encode
|
||||||
|
const reencoded = btoa(decoded);
|
||||||
|
|
||||||
|
if (reencoded !== value) return false;
|
||||||
|
|
||||||
|
// Extra verification:
|
||||||
|
// Ensure decoding didn't silently drop bytes (atob bug)
|
||||||
|
// Encode raw bytes: check if large char codes exist (invalid UTF-16)
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const code = decoded.charCodeAt(i);
|
||||||
|
if (code > 255) return false; // invalid binary byte
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (e) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// ----------------------------------------------------
|
// ----------------------------------------------------
|
||||||
function isValidJSON(jsonString) {
|
function isValidJSON(jsonString) {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -462,10 +462,17 @@
|
|||||||
|
|
||||||
switch (orderTopologyBy[0]) {
|
switch (orderTopologyBy[0]) {
|
||||||
case "Name":
|
case "Name":
|
||||||
const nameCompare = a.devName.localeCompare(b.devName);
|
// ensuring string
|
||||||
return nameCompare !== 0 ? nameCompare : parsePort(a.devParentPort) - parsePort(b.devParentPort);
|
const nameA = (a.devName ?? "").toString();
|
||||||
|
const nameB = (b.devName ?? "").toString();
|
||||||
|
const nameCompare = nameA.localeCompare(nameB);
|
||||||
|
return nameCompare !== 0
|
||||||
|
? nameCompare
|
||||||
|
: parsePort(a.devParentPort) - parsePort(b.devParentPort);
|
||||||
|
|
||||||
case "Port":
|
case "Port":
|
||||||
return parsePort(a.devParentPort) - parsePort(b.devParentPort);
|
return parsePort(a.devParentPort) - parsePort(b.devParentPort);
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return a.rowid - b.rowid;
|
return a.rowid - b.rowid;
|
||||||
}
|
}
|
||||||
|
|||||||
0
front/php/templates/language/ar_ar.json
Executable file → Normal file
0
front/php/templates/language/ar_ar.json
Executable file → Normal file
@@ -761,4 +761,4 @@
|
|||||||
"settings_system_label": "",
|
"settings_system_label": "",
|
||||||
"settings_update_item_warning": "",
|
"settings_update_item_warning": "",
|
||||||
"test_event_tooltip": ""
|
"test_event_tooltip": ""
|
||||||
}
|
}
|
||||||
@@ -834,4 +834,4 @@
|
|||||||
"settings_system_label": "System",
|
"settings_system_label": "System",
|
||||||
"settings_update_item_warning": "",
|
"settings_update_item_warning": "",
|
||||||
"test_event_tooltip": "Speichere die Änderungen, bevor Sie die Einstellungen testen."
|
"test_event_tooltip": "Speichere die Änderungen, bevor Sie die Einstellungen testen."
|
||||||
}
|
}
|
||||||
@@ -761,4 +761,4 @@
|
|||||||
"settings_system_label": "",
|
"settings_system_label": "",
|
||||||
"settings_update_item_warning": "",
|
"settings_update_item_warning": "",
|
||||||
"test_event_tooltip": ""
|
"test_event_tooltip": ""
|
||||||
}
|
}
|
||||||
764
front/php/templates/language/ja_jp.json
Normal file
764
front/php/templates/language/ja_jp.json
Normal file
@@ -0,0 +1,764 @@
|
|||||||
|
{
|
||||||
|
"API_CUSTOM_SQL_description": "",
|
||||||
|
"API_CUSTOM_SQL_name": "",
|
||||||
|
"API_TOKEN_description": "",
|
||||||
|
"API_TOKEN_name": "",
|
||||||
|
"API_display_name": "",
|
||||||
|
"API_icon": "",
|
||||||
|
"About_Design": "",
|
||||||
|
"About_Exit": "",
|
||||||
|
"About_Title": "",
|
||||||
|
"AppEvents_AppEventProcessed": "",
|
||||||
|
"AppEvents_DateTimeCreated": "",
|
||||||
|
"AppEvents_Extra": "",
|
||||||
|
"AppEvents_GUID": "",
|
||||||
|
"AppEvents_Helper1": "",
|
||||||
|
"AppEvents_Helper2": "",
|
||||||
|
"AppEvents_Helper3": "",
|
||||||
|
"AppEvents_ObjectForeignKey": "",
|
||||||
|
"AppEvents_ObjectIndex": "",
|
||||||
|
"AppEvents_ObjectIsArchived": "",
|
||||||
|
"AppEvents_ObjectIsNew": "",
|
||||||
|
"AppEvents_ObjectPlugin": "",
|
||||||
|
"AppEvents_ObjectPrimaryID": "",
|
||||||
|
"AppEvents_ObjectSecondaryID": "",
|
||||||
|
"AppEvents_ObjectStatus": "",
|
||||||
|
"AppEvents_ObjectStatusColumn": "",
|
||||||
|
"AppEvents_ObjectType": "",
|
||||||
|
"AppEvents_Plugin": "",
|
||||||
|
"AppEvents_Type": "",
|
||||||
|
"BackDevDetail_Actions_Ask_Run": "",
|
||||||
|
"BackDevDetail_Actions_Not_Registered": "",
|
||||||
|
"BackDevDetail_Actions_Title_Run": "",
|
||||||
|
"BackDevDetail_Copy_Ask": "",
|
||||||
|
"BackDevDetail_Copy_Title": "",
|
||||||
|
"BackDevDetail_Tools_WOL_error": "",
|
||||||
|
"BackDevDetail_Tools_WOL_okay": "",
|
||||||
|
"BackDevices_Arpscan_disabled": "",
|
||||||
|
"BackDevices_Arpscan_enabled": "",
|
||||||
|
"BackDevices_Backup_CopError": "",
|
||||||
|
"BackDevices_Backup_Failed": "",
|
||||||
|
"BackDevices_Backup_okay": "",
|
||||||
|
"BackDevices_DBTools_DelDevError_a": "",
|
||||||
|
"BackDevices_DBTools_DelDevError_b": "",
|
||||||
|
"BackDevices_DBTools_DelDev_a": "",
|
||||||
|
"BackDevices_DBTools_DelDev_b": "",
|
||||||
|
"BackDevices_DBTools_DelEvents": "",
|
||||||
|
"BackDevices_DBTools_DelEventsError": "",
|
||||||
|
"BackDevices_DBTools_ImportCSV": "",
|
||||||
|
"BackDevices_DBTools_ImportCSVError": "",
|
||||||
|
"BackDevices_DBTools_ImportCSVMissing": "",
|
||||||
|
"BackDevices_DBTools_Purge": "",
|
||||||
|
"BackDevices_DBTools_UpdDev": "",
|
||||||
|
"BackDevices_DBTools_UpdDevError": "",
|
||||||
|
"BackDevices_DBTools_Upgrade": "",
|
||||||
|
"BackDevices_DBTools_UpgradeError": "",
|
||||||
|
"BackDevices_Device_UpdDevError": "",
|
||||||
|
"BackDevices_Restore_CopError": "",
|
||||||
|
"BackDevices_Restore_Failed": "",
|
||||||
|
"BackDevices_Restore_okay": "",
|
||||||
|
"BackDevices_darkmode_disabled": "",
|
||||||
|
"BackDevices_darkmode_enabled": "",
|
||||||
|
"CLEAR_NEW_FLAG_description": "",
|
||||||
|
"CLEAR_NEW_FLAG_name": "",
|
||||||
|
"CustProps_cant_remove": "",
|
||||||
|
"DAYS_TO_KEEP_EVENTS_description": "",
|
||||||
|
"DAYS_TO_KEEP_EVENTS_name": "",
|
||||||
|
"DISCOVER_PLUGINS_description": "",
|
||||||
|
"DISCOVER_PLUGINS_name": "",
|
||||||
|
"DevDetail_Children_Title": "",
|
||||||
|
"DevDetail_Copy_Device_Title": "",
|
||||||
|
"DevDetail_Copy_Device_Tooltip": "",
|
||||||
|
"DevDetail_CustomProperties_Title": "",
|
||||||
|
"DevDetail_CustomProps_reset_info": "",
|
||||||
|
"DevDetail_DisplayFields_Title": "",
|
||||||
|
"DevDetail_EveandAl_AlertAllEvents": "",
|
||||||
|
"DevDetail_EveandAl_AlertDown": "",
|
||||||
|
"DevDetail_EveandAl_Archived": "",
|
||||||
|
"DevDetail_EveandAl_NewDevice": "",
|
||||||
|
"DevDetail_EveandAl_NewDevice_Tooltip": "",
|
||||||
|
"DevDetail_EveandAl_RandomMAC": "",
|
||||||
|
"DevDetail_EveandAl_ScanCycle": "",
|
||||||
|
"DevDetail_EveandAl_ScanCycle_a": "",
|
||||||
|
"DevDetail_EveandAl_ScanCycle_z": "",
|
||||||
|
"DevDetail_EveandAl_Skip": "",
|
||||||
|
"DevDetail_EveandAl_Title": "",
|
||||||
|
"DevDetail_Events_CheckBox": "",
|
||||||
|
"DevDetail_GoToNetworkNode": "",
|
||||||
|
"DevDetail_Icon": "",
|
||||||
|
"DevDetail_Icon_Descr": "",
|
||||||
|
"DevDetail_Loading": "",
|
||||||
|
"DevDetail_MainInfo_Comments": "",
|
||||||
|
"DevDetail_MainInfo_Favorite": "",
|
||||||
|
"DevDetail_MainInfo_Group": "",
|
||||||
|
"DevDetail_MainInfo_Location": "",
|
||||||
|
"DevDetail_MainInfo_Name": "",
|
||||||
|
"DevDetail_MainInfo_Network": "",
|
||||||
|
"DevDetail_MainInfo_Network_Port": "",
|
||||||
|
"DevDetail_MainInfo_Network_Site": "",
|
||||||
|
"DevDetail_MainInfo_Network_Title": "",
|
||||||
|
"DevDetail_MainInfo_Owner": "",
|
||||||
|
"DevDetail_MainInfo_SSID": "",
|
||||||
|
"DevDetail_MainInfo_Title": "",
|
||||||
|
"DevDetail_MainInfo_Type": "",
|
||||||
|
"DevDetail_MainInfo_Vendor": "",
|
||||||
|
"DevDetail_MainInfo_mac": "",
|
||||||
|
"DevDetail_NavToChildNode": "",
|
||||||
|
"DevDetail_Network_Node_hover": "",
|
||||||
|
"DevDetail_Network_Port_hover": "",
|
||||||
|
"DevDetail_Nmap_Scans": "",
|
||||||
|
"DevDetail_Nmap_Scans_desc": "",
|
||||||
|
"DevDetail_Nmap_buttonDefault": "",
|
||||||
|
"DevDetail_Nmap_buttonDefault_text": "",
|
||||||
|
"DevDetail_Nmap_buttonDetail": "",
|
||||||
|
"DevDetail_Nmap_buttonDetail_text": "",
|
||||||
|
"DevDetail_Nmap_buttonFast": "",
|
||||||
|
"DevDetail_Nmap_buttonFast_text": "",
|
||||||
|
"DevDetail_Nmap_buttonSkipDiscovery": "",
|
||||||
|
"DevDetail_Nmap_buttonSkipDiscovery_text": "",
|
||||||
|
"DevDetail_Nmap_resultsLink": "",
|
||||||
|
"DevDetail_Owner_hover": "",
|
||||||
|
"DevDetail_Periodselect_All": "",
|
||||||
|
"DevDetail_Periodselect_LastMonth": "",
|
||||||
|
"DevDetail_Periodselect_LastWeek": "",
|
||||||
|
"DevDetail_Periodselect_LastYear": "",
|
||||||
|
"DevDetail_Periodselect_today": "",
|
||||||
|
"DevDetail_Run_Actions_Title": "",
|
||||||
|
"DevDetail_Run_Actions_Tooltip": "",
|
||||||
|
"DevDetail_SessionInfo_FirstSession": "",
|
||||||
|
"DevDetail_SessionInfo_LastIP": "",
|
||||||
|
"DevDetail_SessionInfo_LastSession": "",
|
||||||
|
"DevDetail_SessionInfo_StaticIP": "",
|
||||||
|
"DevDetail_SessionInfo_Status": "",
|
||||||
|
"DevDetail_SessionInfo_Title": "",
|
||||||
|
"DevDetail_SessionTable_Additionalinfo": "",
|
||||||
|
"DevDetail_SessionTable_Connection": "",
|
||||||
|
"DevDetail_SessionTable_Disconnection": "",
|
||||||
|
"DevDetail_SessionTable_Duration": "",
|
||||||
|
"DevDetail_SessionTable_IP": "",
|
||||||
|
"DevDetail_SessionTable_Order": "",
|
||||||
|
"DevDetail_Shortcut_CurrentStatus": "",
|
||||||
|
"DevDetail_Shortcut_DownAlerts": "",
|
||||||
|
"DevDetail_Shortcut_Presence": "",
|
||||||
|
"DevDetail_Shortcut_Sessions": "",
|
||||||
|
"DevDetail_Tab_Details": "",
|
||||||
|
"DevDetail_Tab_Events": "",
|
||||||
|
"DevDetail_Tab_EventsTableDate": "",
|
||||||
|
"DevDetail_Tab_EventsTableEvent": "",
|
||||||
|
"DevDetail_Tab_EventsTableIP": "",
|
||||||
|
"DevDetail_Tab_EventsTableInfo": "",
|
||||||
|
"DevDetail_Tab_Nmap": "",
|
||||||
|
"DevDetail_Tab_NmapEmpty": "",
|
||||||
|
"DevDetail_Tab_NmapTableExtra": "",
|
||||||
|
"DevDetail_Tab_NmapTableHeader": "",
|
||||||
|
"DevDetail_Tab_NmapTableIndex": "",
|
||||||
|
"DevDetail_Tab_NmapTablePort": "",
|
||||||
|
"DevDetail_Tab_NmapTableService": "",
|
||||||
|
"DevDetail_Tab_NmapTableState": "",
|
||||||
|
"DevDetail_Tab_NmapTableText": "",
|
||||||
|
"DevDetail_Tab_NmapTableTime": "",
|
||||||
|
"DevDetail_Tab_Plugins": "",
|
||||||
|
"DevDetail_Tab_Presence": "",
|
||||||
|
"DevDetail_Tab_Sessions": "",
|
||||||
|
"DevDetail_Tab_Tools": "",
|
||||||
|
"DevDetail_Tab_Tools_Internet_Info_Description": "",
|
||||||
|
"DevDetail_Tab_Tools_Internet_Info_Error": "",
|
||||||
|
"DevDetail_Tab_Tools_Internet_Info_Start": "",
|
||||||
|
"DevDetail_Tab_Tools_Internet_Info_Title": "",
|
||||||
|
"DevDetail_Tab_Tools_Nslookup_Description": "",
|
||||||
|
"DevDetail_Tab_Tools_Nslookup_Error": "",
|
||||||
|
"DevDetail_Tab_Tools_Nslookup_Start": "",
|
||||||
|
"DevDetail_Tab_Tools_Nslookup_Title": "",
|
||||||
|
"DevDetail_Tab_Tools_Speedtest_Description": "",
|
||||||
|
"DevDetail_Tab_Tools_Speedtest_Start": "",
|
||||||
|
"DevDetail_Tab_Tools_Speedtest_Title": "",
|
||||||
|
"DevDetail_Tab_Tools_Traceroute_Description": "",
|
||||||
|
"DevDetail_Tab_Tools_Traceroute_Error": "",
|
||||||
|
"DevDetail_Tab_Tools_Traceroute_Start": "",
|
||||||
|
"DevDetail_Tab_Tools_Traceroute_Title": "",
|
||||||
|
"DevDetail_Tools_WOL": "",
|
||||||
|
"DevDetail_Tools_WOL_noti": "",
|
||||||
|
"DevDetail_Tools_WOL_noti_text": "",
|
||||||
|
"DevDetail_Type_hover": "",
|
||||||
|
"DevDetail_Vendor_hover": "",
|
||||||
|
"DevDetail_WOL_Title": "",
|
||||||
|
"DevDetail_button_AddIcon": "",
|
||||||
|
"DevDetail_button_AddIcon_Help": "",
|
||||||
|
"DevDetail_button_AddIcon_Tooltip": "",
|
||||||
|
"DevDetail_button_Delete": "",
|
||||||
|
"DevDetail_button_DeleteEvents": "",
|
||||||
|
"DevDetail_button_DeleteEvents_Warning": "",
|
||||||
|
"DevDetail_button_Delete_ask": "",
|
||||||
|
"DevDetail_button_OverwriteIcons": "",
|
||||||
|
"DevDetail_button_OverwriteIcons_Tooltip": "",
|
||||||
|
"DevDetail_button_OverwriteIcons_Warning": "",
|
||||||
|
"DevDetail_button_Reset": "",
|
||||||
|
"DevDetail_button_Save": "",
|
||||||
|
"DeviceEdit_ValidMacIp": "",
|
||||||
|
"Device_MultiEdit": "",
|
||||||
|
"Device_MultiEdit_Backup": "",
|
||||||
|
"Device_MultiEdit_Fields": "",
|
||||||
|
"Device_MultiEdit_MassActions": "",
|
||||||
|
"Device_MultiEdit_No_Devices": "",
|
||||||
|
"Device_MultiEdit_Tooltip": "",
|
||||||
|
"Device_Searchbox": "",
|
||||||
|
"Device_Shortcut_AllDevices": "",
|
||||||
|
"Device_Shortcut_AllNodes": "",
|
||||||
|
"Device_Shortcut_Archived": "",
|
||||||
|
"Device_Shortcut_Connected": "",
|
||||||
|
"Device_Shortcut_Devices": "",
|
||||||
|
"Device_Shortcut_DownAlerts": "",
|
||||||
|
"Device_Shortcut_DownOnly": "",
|
||||||
|
"Device_Shortcut_Favorites": "",
|
||||||
|
"Device_Shortcut_NewDevices": "",
|
||||||
|
"Device_Shortcut_OnlineChart": "",
|
||||||
|
"Device_TableHead_AlertDown": "",
|
||||||
|
"Device_TableHead_Connected_Devices": "",
|
||||||
|
"Device_TableHead_CustomProps": "",
|
||||||
|
"Device_TableHead_FQDN": "",
|
||||||
|
"Device_TableHead_Favorite": "",
|
||||||
|
"Device_TableHead_FirstSession": "",
|
||||||
|
"Device_TableHead_GUID": "",
|
||||||
|
"Device_TableHead_Group": "",
|
||||||
|
"Device_TableHead_Icon": "",
|
||||||
|
"Device_TableHead_LastIP": "",
|
||||||
|
"Device_TableHead_LastIPOrder": "",
|
||||||
|
"Device_TableHead_LastSession": "",
|
||||||
|
"Device_TableHead_Location": "",
|
||||||
|
"Device_TableHead_MAC": "",
|
||||||
|
"Device_TableHead_MAC_full": "",
|
||||||
|
"Device_TableHead_Name": "",
|
||||||
|
"Device_TableHead_NetworkSite": "",
|
||||||
|
"Device_TableHead_Owner": "",
|
||||||
|
"Device_TableHead_ParentRelType": "",
|
||||||
|
"Device_TableHead_Parent_MAC": "",
|
||||||
|
"Device_TableHead_Port": "",
|
||||||
|
"Device_TableHead_PresentLastScan": "",
|
||||||
|
"Device_TableHead_ReqNicsOnline": "",
|
||||||
|
"Device_TableHead_RowID": "",
|
||||||
|
"Device_TableHead_Rowid": "",
|
||||||
|
"Device_TableHead_SSID": "",
|
||||||
|
"Device_TableHead_SourcePlugin": "",
|
||||||
|
"Device_TableHead_Status": "",
|
||||||
|
"Device_TableHead_SyncHubNodeName": "",
|
||||||
|
"Device_TableHead_Type": "",
|
||||||
|
"Device_TableHead_Vendor": "",
|
||||||
|
"Device_Table_Not_Network_Device": "",
|
||||||
|
"Device_Table_info": "",
|
||||||
|
"Device_Table_nav_next": "",
|
||||||
|
"Device_Table_nav_prev": "",
|
||||||
|
"Device_Tablelenght": "",
|
||||||
|
"Device_Tablelenght_all": "",
|
||||||
|
"Device_Title": "",
|
||||||
|
"Devices_Filters": "",
|
||||||
|
"ENABLE_PLUGINS_description": "",
|
||||||
|
"ENABLE_PLUGINS_name": "",
|
||||||
|
"ENCRYPTION_KEY_description": "",
|
||||||
|
"ENCRYPTION_KEY_name": "",
|
||||||
|
"Email_display_name": "",
|
||||||
|
"Email_icon": "",
|
||||||
|
"Events_Loading": "",
|
||||||
|
"Events_Periodselect_All": "",
|
||||||
|
"Events_Periodselect_LastMonth": "",
|
||||||
|
"Events_Periodselect_LastWeek": "",
|
||||||
|
"Events_Periodselect_LastYear": "",
|
||||||
|
"Events_Periodselect_today": "",
|
||||||
|
"Events_Searchbox": "",
|
||||||
|
"Events_Shortcut_AllEvents": "",
|
||||||
|
"Events_Shortcut_DownAlerts": "",
|
||||||
|
"Events_Shortcut_Events": "",
|
||||||
|
"Events_Shortcut_MissSessions": "",
|
||||||
|
"Events_Shortcut_NewDevices": "",
|
||||||
|
"Events_Shortcut_Sessions": "",
|
||||||
|
"Events_Shortcut_VoidSessions": "",
|
||||||
|
"Events_TableHead_AdditionalInfo": "",
|
||||||
|
"Events_TableHead_Connection": "",
|
||||||
|
"Events_TableHead_Date": "",
|
||||||
|
"Events_TableHead_Device": "",
|
||||||
|
"Events_TableHead_Disconnection": "",
|
||||||
|
"Events_TableHead_Duration": "",
|
||||||
|
"Events_TableHead_DurationOrder": "",
|
||||||
|
"Events_TableHead_EventType": "",
|
||||||
|
"Events_TableHead_IP": "",
|
||||||
|
"Events_TableHead_IPOrder": "",
|
||||||
|
"Events_TableHead_Order": "",
|
||||||
|
"Events_TableHead_Owner": "",
|
||||||
|
"Events_TableHead_PendingAlert": "",
|
||||||
|
"Events_Table_info": "",
|
||||||
|
"Events_Table_nav_next": "",
|
||||||
|
"Events_Table_nav_prev": "",
|
||||||
|
"Events_Tablelenght": "",
|
||||||
|
"Events_Tablelenght_all": "",
|
||||||
|
"Events_Title": "",
|
||||||
|
"GRAPHQL_PORT_description": "",
|
||||||
|
"GRAPHQL_PORT_name": "",
|
||||||
|
"Gen_Action": "",
|
||||||
|
"Gen_Add": "",
|
||||||
|
"Gen_AddDevice": "",
|
||||||
|
"Gen_Add_All": "",
|
||||||
|
"Gen_All_Devices": "",
|
||||||
|
"Gen_AreYouSure": "",
|
||||||
|
"Gen_Backup": "",
|
||||||
|
"Gen_Cancel": "",
|
||||||
|
"Gen_Change": "",
|
||||||
|
"Gen_Copy": "",
|
||||||
|
"Gen_CopyToClipboard": "",
|
||||||
|
"Gen_DataUpdatedUITakesTime": "",
|
||||||
|
"Gen_Delete": "",
|
||||||
|
"Gen_DeleteAll": "",
|
||||||
|
"Gen_Description": "",
|
||||||
|
"Gen_Error": "",
|
||||||
|
"Gen_Filter": "",
|
||||||
|
"Gen_Generate": "",
|
||||||
|
"Gen_InvalidMac": "",
|
||||||
|
"Gen_LockedDB": "",
|
||||||
|
"Gen_NetworkMask": "",
|
||||||
|
"Gen_Offline": "",
|
||||||
|
"Gen_Okay": "",
|
||||||
|
"Gen_Online": "",
|
||||||
|
"Gen_Purge": "",
|
||||||
|
"Gen_ReadDocs": "",
|
||||||
|
"Gen_Remove_All": "",
|
||||||
|
"Gen_Remove_Last": "",
|
||||||
|
"Gen_Reset": "",
|
||||||
|
"Gen_Restore": "",
|
||||||
|
"Gen_Run": "",
|
||||||
|
"Gen_Save": "",
|
||||||
|
"Gen_Saved": "",
|
||||||
|
"Gen_Search": "",
|
||||||
|
"Gen_Select": "",
|
||||||
|
"Gen_SelectIcon": "",
|
||||||
|
"Gen_SelectToPreview": "",
|
||||||
|
"Gen_Selected_Devices": "",
|
||||||
|
"Gen_Subnet": "",
|
||||||
|
"Gen_Switch": "",
|
||||||
|
"Gen_Upd": "",
|
||||||
|
"Gen_Upd_Fail": "",
|
||||||
|
"Gen_Update": "",
|
||||||
|
"Gen_Update_Value": "",
|
||||||
|
"Gen_ValidIcon": "",
|
||||||
|
"Gen_Warning": "",
|
||||||
|
"Gen_Work_In_Progress": "",
|
||||||
|
"Gen_create_new_device": "",
|
||||||
|
"Gen_create_new_device_info": "",
|
||||||
|
"General_display_name": "",
|
||||||
|
"General_icon": "",
|
||||||
|
"HRS_TO_KEEP_NEWDEV_description": "",
|
||||||
|
"HRS_TO_KEEP_NEWDEV_name": "",
|
||||||
|
"HRS_TO_KEEP_OFFDEV_description": "",
|
||||||
|
"HRS_TO_KEEP_OFFDEV_name": "",
|
||||||
|
"LOADED_PLUGINS_description": "",
|
||||||
|
"LOADED_PLUGINS_name": "",
|
||||||
|
"LOG_LEVEL_description": "",
|
||||||
|
"LOG_LEVEL_name": "",
|
||||||
|
"Loading": "",
|
||||||
|
"Login_Box": "",
|
||||||
|
"Login_Default_PWD": "",
|
||||||
|
"Login_Info": "",
|
||||||
|
"Login_Psw-box": "",
|
||||||
|
"Login_Psw_alert": "",
|
||||||
|
"Login_Psw_folder": "",
|
||||||
|
"Login_Psw_new": "",
|
||||||
|
"Login_Psw_run": "",
|
||||||
|
"Login_Remember": "",
|
||||||
|
"Login_Remember_small": "",
|
||||||
|
"Login_Submit": "",
|
||||||
|
"Login_Toggle_Alert_headline": "",
|
||||||
|
"Login_Toggle_Info": "",
|
||||||
|
"Login_Toggle_Info_headline": "",
|
||||||
|
"Maint_PurgeLog": "",
|
||||||
|
"Maint_RestartServer": "",
|
||||||
|
"Maint_Restart_Server_noti_text": "",
|
||||||
|
"Maintenance_InitCheck": "",
|
||||||
|
"Maintenance_InitCheck_Checking": "",
|
||||||
|
"Maintenance_InitCheck_QuickSetupGuide": "",
|
||||||
|
"Maintenance_InitCheck_Success": "",
|
||||||
|
"Maintenance_ReCheck": "",
|
||||||
|
"Maintenance_Running_Version": "",
|
||||||
|
"Maintenance_Status": "",
|
||||||
|
"Maintenance_Title": "",
|
||||||
|
"Maintenance_Tool_DownloadConfig": "",
|
||||||
|
"Maintenance_Tool_DownloadConfig_text": "",
|
||||||
|
"Maintenance_Tool_DownloadWorkflows": "",
|
||||||
|
"Maintenance_Tool_DownloadWorkflows_text": "",
|
||||||
|
"Maintenance_Tool_ExportCSV": "",
|
||||||
|
"Maintenance_Tool_ExportCSV_noti": "",
|
||||||
|
"Maintenance_Tool_ExportCSV_noti_text": "",
|
||||||
|
"Maintenance_Tool_ExportCSV_text": "",
|
||||||
|
"Maintenance_Tool_ImportCSV": "",
|
||||||
|
"Maintenance_Tool_ImportCSV_noti": "",
|
||||||
|
"Maintenance_Tool_ImportCSV_noti_text": "",
|
||||||
|
"Maintenance_Tool_ImportCSV_text": "",
|
||||||
|
"Maintenance_Tool_ImportConfig_noti": "",
|
||||||
|
"Maintenance_Tool_ImportPastedCSV": "",
|
||||||
|
"Maintenance_Tool_ImportPastedCSV_noti_text": "",
|
||||||
|
"Maintenance_Tool_ImportPastedCSV_text": "",
|
||||||
|
"Maintenance_Tool_ImportPastedConfig": "",
|
||||||
|
"Maintenance_Tool_ImportPastedConfig_noti_text": "",
|
||||||
|
"Maintenance_Tool_ImportPastedConfig_text": "",
|
||||||
|
"Maintenance_Tool_arpscansw": "",
|
||||||
|
"Maintenance_Tool_arpscansw_noti": "",
|
||||||
|
"Maintenance_Tool_arpscansw_noti_text": "",
|
||||||
|
"Maintenance_Tool_arpscansw_text": "",
|
||||||
|
"Maintenance_Tool_backup": "",
|
||||||
|
"Maintenance_Tool_backup_noti": "",
|
||||||
|
"Maintenance_Tool_backup_noti_text": "",
|
||||||
|
"Maintenance_Tool_backup_text": "",
|
||||||
|
"Maintenance_Tool_check_visible": "",
|
||||||
|
"Maintenance_Tool_darkmode": "",
|
||||||
|
"Maintenance_Tool_darkmode_noti": "",
|
||||||
|
"Maintenance_Tool_darkmode_noti_text": "",
|
||||||
|
"Maintenance_Tool_darkmode_text": "",
|
||||||
|
"Maintenance_Tool_del_ActHistory": "",
|
||||||
|
"Maintenance_Tool_del_ActHistory_noti": "",
|
||||||
|
"Maintenance_Tool_del_ActHistory_noti_text": "",
|
||||||
|
"Maintenance_Tool_del_ActHistory_text": "",
|
||||||
|
"Maintenance_Tool_del_alldev": "",
|
||||||
|
"Maintenance_Tool_del_alldev_noti": "",
|
||||||
|
"Maintenance_Tool_del_alldev_noti_text": "",
|
||||||
|
"Maintenance_Tool_del_alldev_text": "",
|
||||||
|
"Maintenance_Tool_del_allevents": "",
|
||||||
|
"Maintenance_Tool_del_allevents30": "",
|
||||||
|
"Maintenance_Tool_del_allevents30_noti": "",
|
||||||
|
"Maintenance_Tool_del_allevents30_noti_text": "",
|
||||||
|
"Maintenance_Tool_del_allevents30_text": "",
|
||||||
|
"Maintenance_Tool_del_allevents_noti": "",
|
||||||
|
"Maintenance_Tool_del_allevents_noti_text": "",
|
||||||
|
"Maintenance_Tool_del_allevents_text": "",
|
||||||
|
"Maintenance_Tool_del_empty_macs": "",
|
||||||
|
"Maintenance_Tool_del_empty_macs_noti": "",
|
||||||
|
"Maintenance_Tool_del_empty_macs_noti_text": "",
|
||||||
|
"Maintenance_Tool_del_empty_macs_text": "",
|
||||||
|
"Maintenance_Tool_del_selecteddev": "",
|
||||||
|
"Maintenance_Tool_del_selecteddev_text": "",
|
||||||
|
"Maintenance_Tool_del_unknowndev": "",
|
||||||
|
"Maintenance_Tool_del_unknowndev_noti": "",
|
||||||
|
"Maintenance_Tool_del_unknowndev_noti_text": "",
|
||||||
|
"Maintenance_Tool_del_unknowndev_text": "",
|
||||||
|
"Maintenance_Tool_displayed_columns_text": "",
|
||||||
|
"Maintenance_Tool_drag_me": "",
|
||||||
|
"Maintenance_Tool_order_columns_text": "",
|
||||||
|
"Maintenance_Tool_purgebackup": "",
|
||||||
|
"Maintenance_Tool_purgebackup_noti": "",
|
||||||
|
"Maintenance_Tool_purgebackup_noti_text": "",
|
||||||
|
"Maintenance_Tool_purgebackup_text": "",
|
||||||
|
"Maintenance_Tool_restore": "",
|
||||||
|
"Maintenance_Tool_restore_noti": "",
|
||||||
|
"Maintenance_Tool_restore_noti_text": "",
|
||||||
|
"Maintenance_Tool_restore_text": "",
|
||||||
|
"Maintenance_Tool_upgrade_database_noti": "",
|
||||||
|
"Maintenance_Tool_upgrade_database_noti_text": "",
|
||||||
|
"Maintenance_Tool_upgrade_database_text": "",
|
||||||
|
"Maintenance_Tools_Tab_BackupRestore": "",
|
||||||
|
"Maintenance_Tools_Tab_Logging": "",
|
||||||
|
"Maintenance_Tools_Tab_Settings": "",
|
||||||
|
"Maintenance_Tools_Tab_Tools": "",
|
||||||
|
"Maintenance_Tools_Tab_UISettings": "",
|
||||||
|
"Maintenance_arp_status": "",
|
||||||
|
"Maintenance_arp_status_off": "",
|
||||||
|
"Maintenance_arp_status_on": "",
|
||||||
|
"Maintenance_built_on": "",
|
||||||
|
"Maintenance_current_version": "",
|
||||||
|
"Maintenance_database_backup": "",
|
||||||
|
"Maintenance_database_backup_found": "",
|
||||||
|
"Maintenance_database_backup_total": "",
|
||||||
|
"Maintenance_database_lastmod": "",
|
||||||
|
"Maintenance_database_path": "",
|
||||||
|
"Maintenance_database_rows": "",
|
||||||
|
"Maintenance_database_size": "",
|
||||||
|
"Maintenance_lang_selector_apply": "",
|
||||||
|
"Maintenance_lang_selector_empty": "",
|
||||||
|
"Maintenance_lang_selector_lable": "",
|
||||||
|
"Maintenance_lang_selector_text": "",
|
||||||
|
"Maintenance_new_version": "",
|
||||||
|
"Maintenance_themeselector_apply": "",
|
||||||
|
"Maintenance_themeselector_empty": "",
|
||||||
|
"Maintenance_themeselector_lable": "",
|
||||||
|
"Maintenance_themeselector_text": "",
|
||||||
|
"Maintenance_version": "",
|
||||||
|
"NETWORK_DEVICE_TYPES_description": "",
|
||||||
|
"NETWORK_DEVICE_TYPES_name": "",
|
||||||
|
"Navigation_About": "",
|
||||||
|
"Navigation_AppEvents": "",
|
||||||
|
"Navigation_Devices": "",
|
||||||
|
"Navigation_Donations": "",
|
||||||
|
"Navigation_Events": "",
|
||||||
|
"Navigation_Integrations": "",
|
||||||
|
"Navigation_Maintenance": "",
|
||||||
|
"Navigation_Monitoring": "",
|
||||||
|
"Navigation_Network": "",
|
||||||
|
"Navigation_Notifications": "",
|
||||||
|
"Navigation_Plugins": "",
|
||||||
|
"Navigation_Presence": "",
|
||||||
|
"Navigation_Report": "",
|
||||||
|
"Navigation_Settings": "",
|
||||||
|
"Navigation_SystemInfo": "",
|
||||||
|
"Navigation_Workflows": "",
|
||||||
|
"Network_Assign": "",
|
||||||
|
"Network_Cant_Assign": "",
|
||||||
|
"Network_Cant_Assign_No_Node_Selected": "",
|
||||||
|
"Network_Configuration_Error": "",
|
||||||
|
"Network_Connected": "",
|
||||||
|
"Network_Devices": "",
|
||||||
|
"Network_ManageAdd": "",
|
||||||
|
"Network_ManageAdd_Name": "",
|
||||||
|
"Network_ManageAdd_Name_text": "",
|
||||||
|
"Network_ManageAdd_Port": "",
|
||||||
|
"Network_ManageAdd_Port_text": "",
|
||||||
|
"Network_ManageAdd_Submit": "",
|
||||||
|
"Network_ManageAdd_Type": "",
|
||||||
|
"Network_ManageAdd_Type_text": "",
|
||||||
|
"Network_ManageAssign": "",
|
||||||
|
"Network_ManageDel": "",
|
||||||
|
"Network_ManageDel_Name": "",
|
||||||
|
"Network_ManageDel_Name_text": "",
|
||||||
|
"Network_ManageDel_Submit": "",
|
||||||
|
"Network_ManageDevices": "",
|
||||||
|
"Network_ManageEdit": "",
|
||||||
|
"Network_ManageEdit_ID": "",
|
||||||
|
"Network_ManageEdit_ID_text": "",
|
||||||
|
"Network_ManageEdit_Name": "",
|
||||||
|
"Network_ManageEdit_Name_text": "",
|
||||||
|
"Network_ManageEdit_Port": "",
|
||||||
|
"Network_ManageEdit_Port_text": "",
|
||||||
|
"Network_ManageEdit_Submit": "",
|
||||||
|
"Network_ManageEdit_Type": "",
|
||||||
|
"Network_ManageEdit_Type_text": "",
|
||||||
|
"Network_ManageLeaf": "",
|
||||||
|
"Network_ManageUnassign": "",
|
||||||
|
"Network_NoAssignedDevices": "",
|
||||||
|
"Network_NoDevices": "",
|
||||||
|
"Network_Node": "",
|
||||||
|
"Network_Node_Name": "",
|
||||||
|
"Network_Parent": "",
|
||||||
|
"Network_Root": "",
|
||||||
|
"Network_Root_Not_Configured": "",
|
||||||
|
"Network_Root_Unconfigurable": "",
|
||||||
|
"Network_ShowArchived": "",
|
||||||
|
"Network_ShowOffline": "",
|
||||||
|
"Network_Table_Hostname": "",
|
||||||
|
"Network_Table_IP": "",
|
||||||
|
"Network_Table_State": "",
|
||||||
|
"Network_Title": "",
|
||||||
|
"Network_UnassignedDevices": "",
|
||||||
|
"Notifications_All": "",
|
||||||
|
"Notifications_Mark_All_Read": "",
|
||||||
|
"PIALERT_WEB_PASSWORD_description": "",
|
||||||
|
"PIALERT_WEB_PASSWORD_name": "",
|
||||||
|
"PIALERT_WEB_PROTECTION_description": "",
|
||||||
|
"PIALERT_WEB_PROTECTION_name": "",
|
||||||
|
"PLUGINS_KEEP_HIST_description": "",
|
||||||
|
"PLUGINS_KEEP_HIST_name": "",
|
||||||
|
"Plugins_DeleteAll": "",
|
||||||
|
"Plugins_Filters_Mac": "",
|
||||||
|
"Plugins_History": "",
|
||||||
|
"Plugins_Obj_DeleteListed": "",
|
||||||
|
"Plugins_Objects": "",
|
||||||
|
"Plugins_Out_of": "",
|
||||||
|
"Plugins_Unprocessed_Events": "",
|
||||||
|
"Plugins_no_control": "",
|
||||||
|
"Presence_CalHead_day": "",
|
||||||
|
"Presence_CalHead_lang": "",
|
||||||
|
"Presence_CalHead_month": "",
|
||||||
|
"Presence_CalHead_quarter": "",
|
||||||
|
"Presence_CalHead_week": "",
|
||||||
|
"Presence_CalHead_year": "",
|
||||||
|
"Presence_CallHead_Devices": "",
|
||||||
|
"Presence_Key_OnlineNow": "",
|
||||||
|
"Presence_Key_OnlineNow_desc": "",
|
||||||
|
"Presence_Key_OnlinePast": "",
|
||||||
|
"Presence_Key_OnlinePastMiss": "",
|
||||||
|
"Presence_Key_OnlinePastMiss_desc": "",
|
||||||
|
"Presence_Key_OnlinePast_desc": "",
|
||||||
|
"Presence_Loading": "",
|
||||||
|
"Presence_Shortcut_AllDevices": "",
|
||||||
|
"Presence_Shortcut_Archived": "",
|
||||||
|
"Presence_Shortcut_Connected": "",
|
||||||
|
"Presence_Shortcut_Devices": "",
|
||||||
|
"Presence_Shortcut_DownAlerts": "",
|
||||||
|
"Presence_Shortcut_Favorites": "",
|
||||||
|
"Presence_Shortcut_NewDevices": "",
|
||||||
|
"Presence_Title": "",
|
||||||
|
"REFRESH_FQDN_description": "",
|
||||||
|
"REFRESH_FQDN_name": "",
|
||||||
|
"REPORT_DASHBOARD_URL_description": "",
|
||||||
|
"REPORT_DASHBOARD_URL_name": "",
|
||||||
|
"REPORT_ERROR": "",
|
||||||
|
"REPORT_MAIL_description": "",
|
||||||
|
"REPORT_MAIL_name": "",
|
||||||
|
"REPORT_TITLE": "",
|
||||||
|
"RandomMAC_hover": "",
|
||||||
|
"Reports_Sent_Log": "",
|
||||||
|
"SCAN_SUBNETS_description": "",
|
||||||
|
"SCAN_SUBNETS_name": "",
|
||||||
|
"SYSTEM_TITLE": "",
|
||||||
|
"Setting_Override": "",
|
||||||
|
"Setting_Override_Description": "",
|
||||||
|
"Settings_Metadata_Toggle": "",
|
||||||
|
"Settings_Show_Description": "",
|
||||||
|
"Settings_device_Scanners_desync": "",
|
||||||
|
"Settings_device_Scanners_desync_popup": "",
|
||||||
|
"Speedtest_Results": "",
|
||||||
|
"Systeminfo_AvailableIps": "",
|
||||||
|
"Systeminfo_CPU": "",
|
||||||
|
"Systeminfo_CPU_Cores": "",
|
||||||
|
"Systeminfo_CPU_Name": "",
|
||||||
|
"Systeminfo_CPU_Speed": "",
|
||||||
|
"Systeminfo_CPU_Temp": "",
|
||||||
|
"Systeminfo_CPU_Vendor": "",
|
||||||
|
"Systeminfo_Client_Resolution": "",
|
||||||
|
"Systeminfo_Client_User_Agent": "",
|
||||||
|
"Systeminfo_General": "",
|
||||||
|
"Systeminfo_General_Date": "",
|
||||||
|
"Systeminfo_General_Date2": "",
|
||||||
|
"Systeminfo_General_Full_Date": "",
|
||||||
|
"Systeminfo_General_TimeZone": "",
|
||||||
|
"Systeminfo_Memory": "",
|
||||||
|
"Systeminfo_Memory_Total_Memory": "",
|
||||||
|
"Systeminfo_Memory_Usage": "",
|
||||||
|
"Systeminfo_Memory_Usage_Percent": "",
|
||||||
|
"Systeminfo_Motherboard": "",
|
||||||
|
"Systeminfo_Motherboard_BIOS": "",
|
||||||
|
"Systeminfo_Motherboard_BIOS_Date": "",
|
||||||
|
"Systeminfo_Motherboard_BIOS_Vendor": "",
|
||||||
|
"Systeminfo_Motherboard_Manufactured": "",
|
||||||
|
"Systeminfo_Motherboard_Name": "",
|
||||||
|
"Systeminfo_Motherboard_Revision": "",
|
||||||
|
"Systeminfo_Network": "",
|
||||||
|
"Systeminfo_Network_Accept_Encoding": "",
|
||||||
|
"Systeminfo_Network_Accept_Language": "",
|
||||||
|
"Systeminfo_Network_Connection_Port": "",
|
||||||
|
"Systeminfo_Network_HTTP_Host": "",
|
||||||
|
"Systeminfo_Network_HTTP_Referer": "",
|
||||||
|
"Systeminfo_Network_HTTP_Referer_String": "",
|
||||||
|
"Systeminfo_Network_Hardware": "",
|
||||||
|
"Systeminfo_Network_Hardware_Interface_Mask": "",
|
||||||
|
"Systeminfo_Network_Hardware_Interface_Name": "",
|
||||||
|
"Systeminfo_Network_Hardware_Interface_RX": "",
|
||||||
|
"Systeminfo_Network_Hardware_Interface_TX": "",
|
||||||
|
"Systeminfo_Network_IP": "",
|
||||||
|
"Systeminfo_Network_IP_Connection": "",
|
||||||
|
"Systeminfo_Network_IP_Server": "",
|
||||||
|
"Systeminfo_Network_MIME": "",
|
||||||
|
"Systeminfo_Network_Request_Method": "",
|
||||||
|
"Systeminfo_Network_Request_Time": "",
|
||||||
|
"Systeminfo_Network_Request_URI": "",
|
||||||
|
"Systeminfo_Network_Secure_Connection": "",
|
||||||
|
"Systeminfo_Network_Secure_Connection_String": "",
|
||||||
|
"Systeminfo_Network_Server_Name": "",
|
||||||
|
"Systeminfo_Network_Server_Name_String": "",
|
||||||
|
"Systeminfo_Network_Server_Query": "",
|
||||||
|
"Systeminfo_Network_Server_Query_String": "",
|
||||||
|
"Systeminfo_Network_Server_Version": "",
|
||||||
|
"Systeminfo_Services": "",
|
||||||
|
"Systeminfo_Services_Description": "",
|
||||||
|
"Systeminfo_Services_Name": "",
|
||||||
|
"Systeminfo_Storage": "",
|
||||||
|
"Systeminfo_Storage_Device": "",
|
||||||
|
"Systeminfo_Storage_Mount": "",
|
||||||
|
"Systeminfo_Storage_Size": "",
|
||||||
|
"Systeminfo_Storage_Type": "",
|
||||||
|
"Systeminfo_Storage_Usage": "",
|
||||||
|
"Systeminfo_Storage_Usage_Free": "",
|
||||||
|
"Systeminfo_Storage_Usage_Mount": "",
|
||||||
|
"Systeminfo_Storage_Usage_Total": "",
|
||||||
|
"Systeminfo_Storage_Usage_Used": "",
|
||||||
|
"Systeminfo_System": "",
|
||||||
|
"Systeminfo_System_AVG": "",
|
||||||
|
"Systeminfo_System_Architecture": "",
|
||||||
|
"Systeminfo_System_Kernel": "",
|
||||||
|
"Systeminfo_System_OSVersion": "",
|
||||||
|
"Systeminfo_System_Running_Processes": "",
|
||||||
|
"Systeminfo_System_System": "",
|
||||||
|
"Systeminfo_System_Uname": "",
|
||||||
|
"Systeminfo_System_Uptime": "",
|
||||||
|
"Systeminfo_This_Client": "",
|
||||||
|
"Systeminfo_USB_Devices": "",
|
||||||
|
"TICKER_MIGRATE_TO_NETALERTX": "",
|
||||||
|
"TIMEZONE_description": "",
|
||||||
|
"TIMEZONE_name": "",
|
||||||
|
"UI_DEV_SECTIONS_description": "",
|
||||||
|
"UI_DEV_SECTIONS_name": "",
|
||||||
|
"UI_ICONS_description": "",
|
||||||
|
"UI_ICONS_name": "",
|
||||||
|
"UI_LANG_description": "",
|
||||||
|
"UI_LANG_name": "",
|
||||||
|
"UI_MY_DEVICES_description": "",
|
||||||
|
"UI_MY_DEVICES_name": "",
|
||||||
|
"UI_NOT_RANDOM_MAC_description": "",
|
||||||
|
"UI_NOT_RANDOM_MAC_name": "",
|
||||||
|
"UI_PRESENCE_description": "",
|
||||||
|
"UI_PRESENCE_name": "",
|
||||||
|
"UI_REFRESH_description": "",
|
||||||
|
"UI_REFRESH_name": "",
|
||||||
|
"VERSION_description": "",
|
||||||
|
"VERSION_name": "",
|
||||||
|
"WF_Action_Add": "",
|
||||||
|
"WF_Action_field": "",
|
||||||
|
"WF_Action_type": "",
|
||||||
|
"WF_Action_value": "",
|
||||||
|
"WF_Actions": "",
|
||||||
|
"WF_Add": "",
|
||||||
|
"WF_Add_Condition": "",
|
||||||
|
"WF_Add_Group": "",
|
||||||
|
"WF_Condition_field": "",
|
||||||
|
"WF_Condition_operator": "",
|
||||||
|
"WF_Condition_value": "",
|
||||||
|
"WF_Conditions": "",
|
||||||
|
"WF_Conditions_logic_rules": "",
|
||||||
|
"WF_Duplicate": "",
|
||||||
|
"WF_Enabled": "",
|
||||||
|
"WF_Export": "",
|
||||||
|
"WF_Export_Copy": "",
|
||||||
|
"WF_Import": "",
|
||||||
|
"WF_Import_Copy": "",
|
||||||
|
"WF_Name": "",
|
||||||
|
"WF_Remove": "",
|
||||||
|
"WF_Remove_Copy": "",
|
||||||
|
"WF_Save": "",
|
||||||
|
"WF_Trigger": "",
|
||||||
|
"WF_Trigger_event_type": "",
|
||||||
|
"WF_Trigger_type": "",
|
||||||
|
"add_icon_event_tooltip": "",
|
||||||
|
"add_option_event_tooltip": "",
|
||||||
|
"copy_icons_event_tooltip": "",
|
||||||
|
"devices_old": "",
|
||||||
|
"general_event_description": "",
|
||||||
|
"general_event_title": "",
|
||||||
|
"go_to_device_event_tooltip": "",
|
||||||
|
"go_to_node_event_tooltip": "",
|
||||||
|
"new_version_available": "",
|
||||||
|
"report_guid": "",
|
||||||
|
"report_guid_missing": "",
|
||||||
|
"report_select_format": "",
|
||||||
|
"report_time": "",
|
||||||
|
"run_event_tooltip": "",
|
||||||
|
"select_icon_event_tooltip": "",
|
||||||
|
"settings_core_icon": "",
|
||||||
|
"settings_core_label": "",
|
||||||
|
"settings_device_scanners": "",
|
||||||
|
"settings_device_scanners_icon": "",
|
||||||
|
"settings_device_scanners_info": "",
|
||||||
|
"settings_device_scanners_label": "",
|
||||||
|
"settings_enabled": "",
|
||||||
|
"settings_enabled_icon": "",
|
||||||
|
"settings_expand_all": "",
|
||||||
|
"settings_imported": "",
|
||||||
|
"settings_imported_label": "",
|
||||||
|
"settings_missing": "",
|
||||||
|
"settings_missing_block": "",
|
||||||
|
"settings_old": "",
|
||||||
|
"settings_other_scanners": "",
|
||||||
|
"settings_other_scanners_icon": "",
|
||||||
|
"settings_other_scanners_label": "",
|
||||||
|
"settings_publishers": "",
|
||||||
|
"settings_publishers_icon": "",
|
||||||
|
"settings_publishers_info": "",
|
||||||
|
"settings_publishers_label": "",
|
||||||
|
"settings_readonly": "",
|
||||||
|
"settings_saved": "",
|
||||||
|
"settings_system_icon": "",
|
||||||
|
"settings_system_label": "",
|
||||||
|
"settings_update_item_warning": "",
|
||||||
|
"test_event_tooltip": ""
|
||||||
|
}
|
||||||
@@ -5,7 +5,7 @@
|
|||||||
// ###################################
|
// ###################################
|
||||||
|
|
||||||
$defaultLang = "en_us";
|
$defaultLang = "en_us";
|
||||||
$allLanguages = [ "ar_ar", "ca_ca", "cs_cz", "de_de", "en_us", "es_es", "fa_fa", "fr_fr", "it_it", "nb_no", "pl_pl", "pt_br", "pt_pt", "ru_ru", "sv_sv", "tr_tr", "uk_ua", "zh_cn"];
|
$allLanguages = [ "ar_ar", "ca_ca", "cs_cz", "de_de", "en_us", "es_es", "fa_fa", "fr_fr", "it_it", "ja_jp", "nb_no", "pl_pl", "pt_br", "pt_pt", "ru_ru", "sv_sv", "tr_tr", "uk_ua", "zh_cn"];
|
||||||
|
|
||||||
|
|
||||||
global $db;
|
global $db;
|
||||||
@@ -23,6 +23,7 @@ switch($result){
|
|||||||
case 'Farsi (fa_fa)': $pia_lang_selected = 'fa_fa'; break;
|
case 'Farsi (fa_fa)': $pia_lang_selected = 'fa_fa'; break;
|
||||||
case 'French (fr_fr)': $pia_lang_selected = 'fr_fr'; break;
|
case 'French (fr_fr)': $pia_lang_selected = 'fr_fr'; break;
|
||||||
case 'Italian (it_it)': $pia_lang_selected = 'it_it'; break;
|
case 'Italian (it_it)': $pia_lang_selected = 'it_it'; break;
|
||||||
|
case 'Japanese (ja_jp)': $pia_lang_selected = 'ja_jp'; break;
|
||||||
case 'Norwegian (nb_no)': $pia_lang_selected = 'nb_no'; break;
|
case 'Norwegian (nb_no)': $pia_lang_selected = 'nb_no'; break;
|
||||||
case 'Polish (pl_pl)': $pia_lang_selected = 'pl_pl'; break;
|
case 'Polish (pl_pl)': $pia_lang_selected = 'pl_pl'; break;
|
||||||
case 'Portuguese (pt_br)': $pia_lang_selected = 'pt_br'; break;
|
case 'Portuguese (pt_br)': $pia_lang_selected = 'pt_br'; break;
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
|
|
||||||
def merge_translations(main_file, other_files):
|
def merge_translations(main_file, other_files):
|
||||||
# Load main file
|
# Load main file
|
||||||
@@ -30,10 +30,14 @@ def merge_translations(main_file, other_files):
|
|||||||
json.dump(data, f, indent=4, ensure_ascii=False)
|
json.dump(data, f, indent=4, ensure_ascii=False)
|
||||||
f.truncate()
|
f.truncate()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
current_path = os.path.dirname(os.path.abspath(__file__))
|
current_path = os.path.dirname(os.path.abspath(__file__))
|
||||||
# language codes can be found here: http://www.lingoes.net/en/translator/langcode.htm
|
# language codes can be found here: http://www.lingoes.net/en/translator/langcode.htm
|
||||||
# "en_us.json" has to be first!
|
# ⚠ "en_us.json" has to be first!
|
||||||
json_files = [ "en_us.json", "ar_ar.json", "ca_ca.json", "cs_cz.json", "de_de.json", "es_es.json", "fa_fa.json", "fr_fr.json", "it_it.json", "nb_no.json", "pl_pl.json", "pt_br.json", "pt_pt.json", "ru_ru.json", "sv_sv.json", "tr_tr.json", "uk_ua.json", "zh_cn.json"]
|
json_files = ["en_us.json", "ar_ar.json", "ca_ca.json", "cs_cz.json", "de_de.json",
|
||||||
|
"es_es.json", "fa_fa.json", "fr_fr.json", "it_it.json", "ja_jp.json",
|
||||||
|
"nb_no.json", "pl_pl.json", "pt_br.json", "pt_pt.json", "ru_ru.json",
|
||||||
|
"sv_sv.json", "tr_tr.json", "uk_ua.json", "zh_cn.json"]
|
||||||
file_paths = [os.path.join(current_path, file) for file in json_files]
|
file_paths = [os.path.join(current_path, file) for file in json_files]
|
||||||
merge_translations(file_paths[0], file_paths[1:])
|
merge_translations(file_paths[0], file_paths[1:])
|
||||||
|
|||||||
@@ -761,4 +761,4 @@
|
|||||||
"settings_system_label": "Система",
|
"settings_system_label": "Система",
|
||||||
"settings_update_item_warning": "Обновить значение ниже. Будьте осторожны, следуя предыдущему формату. <b>Проверка не выполняется.</b>",
|
"settings_update_item_warning": "Обновить значение ниже. Будьте осторожны, следуя предыдущему формату. <b>Проверка не выполняется.</b>",
|
||||||
"test_event_tooltip": "Сначала сохраните изменения, прежде чем проверять настройки."
|
"test_event_tooltip": "Сначала сохраните изменения, прежде чем проверять настройки."
|
||||||
}
|
}
|
||||||
@@ -8,12 +8,12 @@ from pytz import timezone
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from const import logPath
|
from const import logPath # noqa: E402, E261 [flake8 lint suppression]
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402, E261 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402, E261 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402, E261 [flake8 lint suppression]
|
||||||
|
|
||||||
import conf
|
import conf # noqa: E402, E261 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -32,9 +32,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
|||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
# Retrieve configuration settings
|
# Retrieve configuration settings
|
||||||
some_setting = get_setting_value('SYNC_plugins')
|
some_setting = get_setting_value('SYNC_plugins')
|
||||||
@@ -47,14 +46,14 @@ def main():
|
|||||||
# Process the data into native application tables
|
# Process the data into native application tables
|
||||||
if len(device_data) > 0:
|
if len(device_data) > 0:
|
||||||
|
|
||||||
# insert devices into the lats_result.log
|
# insert devices into the lats_result.log
|
||||||
# make sure the below mapping is mapped in config.json, for example:
|
# make sure the below mapping is mapped in config.json, for example:
|
||||||
# "database_column_definitions": [
|
# "database_column_definitions": [
|
||||||
# {
|
# {
|
||||||
# "column": "Object_PrimaryID", <--------- the value I save into primaryId
|
# "column": "Object_PrimaryID", <--------- the value I save into primaryId
|
||||||
# "mapped_to_column": "cur_MAC", <--------- gets inserted into the CurrentScan DB
|
# "mapped_to_column": "cur_MAC", <--------- gets inserted into the CurrentScan DB
|
||||||
# table column cur_MAC
|
# table column cur_MAC
|
||||||
#
|
#
|
||||||
for device in device_data:
|
for device in device_data:
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = device['mac_address'],
|
primaryId = device['mac_address'],
|
||||||
@@ -65,11 +64,11 @@ def main():
|
|||||||
watched4 = device['last_seen'],
|
watched4 = device['last_seen'],
|
||||||
extra = '',
|
extra = '',
|
||||||
foreignKey = device['mac_address']
|
foreignKey = device['mac_address']
|
||||||
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
|
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
|
||||||
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
|
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
|
||||||
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
|
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
|
||||||
# helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md
|
# helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md
|
||||||
)
|
)
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] New entries: "{len(device_data)}"'])
|
mylog('verbose', [f'[{pluginName}] New entries: "{len(device_data)}"'])
|
||||||
|
|
||||||
@@ -78,14 +77,15 @@ def main():
|
|||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
# retrieve data
|
# retrieve data
|
||||||
def get_device_data(some_setting):
|
def get_device_data(some_setting):
|
||||||
|
|
||||||
device_data = []
|
device_data = []
|
||||||
|
|
||||||
# do some processing, call exteranl APIs, and return a device_data list
|
# do some processing, call exteranl APIs, and return a device_data list
|
||||||
# ...
|
# ...
|
||||||
#
|
#
|
||||||
# Sample data for testing purposes, you can adjust the processing in main() as needed
|
# Sample data for testing purposes, you can adjust the processing in main() as needed
|
||||||
# ... before adding it to the plugin_objects.add_object(...)
|
# ... before adding it to the plugin_objects.add_object(...)
|
||||||
device_data = [
|
device_data = [
|
||||||
@@ -113,8 +113,9 @@ def get_device_data(some_setting):
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
# Return the data to be detected by the main application
|
# Return the data to be detected by the main application
|
||||||
return device_data
|
return device_data
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -11,10 +11,10 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
|||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
# NetAlertX modules
|
# NetAlertX modules
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog
|
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
pluginName = 'TESTONLY'
|
pluginName = 'TESTONLY'
|
||||||
|
|
||||||
@@ -28,14 +28,11 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
|
|||||||
md5_hash = hashlib.md5()
|
md5_hash = hashlib.md5()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# globals
|
# globals
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
# START
|
# START
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
# SPACE FOR TESTING 🔽
|
# SPACE FOR TESTING 🔽
|
||||||
|
|
||||||
str = "ABC-MBP._another.localdomain."
|
str = "ABC-MBP._another.localdomain."
|
||||||
@@ -43,28 +40,23 @@ def main():
|
|||||||
# result = cleanDeviceName(str, True)
|
# result = cleanDeviceName(str, True)
|
||||||
|
|
||||||
regexes = get_setting_value('NEWDEV_NAME_CLEANUP_REGEX')
|
regexes = get_setting_value('NEWDEV_NAME_CLEANUP_REGEX')
|
||||||
|
|
||||||
print(regexes)
|
print(regexes)
|
||||||
subnets = get_setting_value('SCAN_SUBNETS')
|
subnets = get_setting_value('SCAN_SUBNETS')
|
||||||
|
|
||||||
print(subnets)
|
print(subnets)
|
||||||
|
|
||||||
for rgx in regexes:
|
for rgx in regexes:
|
||||||
mylog('trace', ["[cleanDeviceName] applying regex : " + rgx])
|
mylog('trace', ["[cleanDeviceName] applying regex : " + rgx])
|
||||||
mylog('trace', ["[cleanDeviceName] name before regex : " + str])
|
mylog('trace', ["[cleanDeviceName] name before regex : " + str])
|
||||||
|
|
||||||
str = re.sub(rgx, "", str)
|
str = re.sub(rgx, "", str)
|
||||||
mylog('trace', ["[cleanDeviceName] name after regex : " + str])
|
mylog('trace', ["[cleanDeviceName] name after regex : " + str])
|
||||||
|
|
||||||
mylog('debug', ["[cleanDeviceName] output: " + str])
|
mylog('debug', ["[cleanDeviceName] output: " + str])
|
||||||
|
|
||||||
|
|
||||||
# SPACE FOR TESTING 🔼
|
# SPACE FOR TESTING 🔼
|
||||||
|
|
||||||
# END
|
# END
|
||||||
mylog('verbose', [f'[{pluginName}] result "{str}"'])
|
mylog('verbose', [f'[{pluginName}] result "{str}"'])
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# -------------INIT---------------------
|
# -------------INIT---------------------
|
||||||
|
|||||||
@@ -9,15 +9,15 @@ import sys
|
|||||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from const import confFileName, logPath
|
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.datetime_utils import timeNowDB
|
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import timeNowTZ, get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from models.notification_instance import NotificationInstance
|
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB
|
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
||||||
@@ -35,7 +35,7 @@ def main():
|
|||||||
mylog("verbose", [f"[{pluginName}](publisher) In script"])
|
mylog("verbose", [f"[{pluginName}](publisher) In script"])
|
||||||
|
|
||||||
# Check if basic config settings supplied
|
# Check if basic config settings supplied
|
||||||
if check_config() == False:
|
if check_config() is False:
|
||||||
mylog(
|
mylog(
|
||||||
"none",
|
"none",
|
||||||
[
|
[
|
||||||
@@ -65,9 +65,9 @@ def main():
|
|||||||
# Log result
|
# Log result
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = pluginName,
|
primaryId = pluginName,
|
||||||
secondaryId = timeNowDB(),
|
secondaryId = timeNowDB(),
|
||||||
watched1 = notification["GUID"],
|
watched1 = notification["GUID"],
|
||||||
watched2 = result,
|
watched2 = result,
|
||||||
watched3 = 'null',
|
watched3 = 'null',
|
||||||
watched4 = 'null',
|
watched4 = 'null',
|
||||||
extra = 'null',
|
extra = 'null',
|
||||||
@@ -80,8 +80,7 @@ def main():
|
|||||||
# -------------------------------------------------------------------------------
|
# -------------------------------------------------------------------------------
|
||||||
def check_config():
|
def check_config():
|
||||||
if get_setting_value("APPRISE_HOST") == "" or (
|
if get_setting_value("APPRISE_HOST") == "" or (
|
||||||
get_setting_value("APPRISE_URL") == ""
|
get_setting_value("APPRISE_URL") == "" and get_setting_value("APPRISE_TAG") == ""
|
||||||
and get_setting_value("APPRISE_TAG") == ""
|
|
||||||
):
|
):
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -16,15 +16,15 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
|||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
# NetAlertX modules
|
# NetAlertX modules
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from const import confFileName, logPath
|
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.datetime_utils import timeNowDB
|
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import timeNowTZ, get_setting_value, hide_email
|
from helper import get_setting_value, hide_email # noqa: E402 [flake8 lint suppression]
|
||||||
from models.notification_instance import NotificationInstance
|
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB
|
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -38,13 +38,12 @@ LOG_PATH = logPath + '/plugins'
|
|||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
||||||
|
|
||||||
# Check if basic config settings supplied
|
# Check if basic config settings supplied
|
||||||
if check_config() == False:
|
if check_config() is False:
|
||||||
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
|
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -61,7 +60,7 @@ def main():
|
|||||||
# Retrieve new notifications
|
# Retrieve new notifications
|
||||||
new_notifications = notifications.getNew()
|
new_notifications = notifications.getNew()
|
||||||
|
|
||||||
# mylog('verbose', [f'[{pluginName}] new_notifications: ', new_notifications])
|
# mylog('verbose', [f'[{pluginName}] new_notifications: ', new_notifications])
|
||||||
mylog('verbose', [f'[{pluginName}] SMTP_SERVER: ', get_setting_value("SMTP_SERVER")])
|
mylog('verbose', [f'[{pluginName}] SMTP_SERVER: ', get_setting_value("SMTP_SERVER")])
|
||||||
mylog('verbose', [f'[{pluginName}] SMTP_PORT: ', get_setting_value("SMTP_PORT")])
|
mylog('verbose', [f'[{pluginName}] SMTP_PORT: ', get_setting_value("SMTP_PORT")])
|
||||||
mylog('verbose', [f'[{pluginName}] SMTP_SKIP_LOGIN: ', get_setting_value("SMTP_SKIP_LOGIN")])
|
mylog('verbose', [f'[{pluginName}] SMTP_SKIP_LOGIN: ', get_setting_value("SMTP_SKIP_LOGIN")])
|
||||||
@@ -72,19 +71,18 @@ def main():
|
|||||||
# mylog('verbose', [f'[{pluginName}] SMTP_REPORT_TO: ', get_setting_value("SMTP_REPORT_TO")])
|
# mylog('verbose', [f'[{pluginName}] SMTP_REPORT_TO: ', get_setting_value("SMTP_REPORT_TO")])
|
||||||
# mylog('verbose', [f'[{pluginName}] SMTP_REPORT_FROM: ', get_setting_value("SMTP_REPORT_FROM")])
|
# mylog('verbose', [f'[{pluginName}] SMTP_REPORT_FROM: ', get_setting_value("SMTP_REPORT_FROM")])
|
||||||
|
|
||||||
|
|
||||||
# Process the new notifications (see the Notifications DB table for structure or check the /php/server/query_json.php?file=table_notifications.json endpoint)
|
# Process the new notifications (see the Notifications DB table for structure or check the /php/server/query_json.php?file=table_notifications.json endpoint)
|
||||||
for notification in new_notifications:
|
for notification in new_notifications:
|
||||||
|
|
||||||
# Send notification
|
# Send notification
|
||||||
result = send(notification["HTML"], notification["Text"])
|
result = send(notification["HTML"], notification["Text"])
|
||||||
|
|
||||||
# Log result
|
# Log result
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = pluginName,
|
primaryId = pluginName,
|
||||||
secondaryId = timeNowDB(),
|
secondaryId = timeNowDB(),
|
||||||
watched1 = notification["GUID"],
|
watched1 = notification["GUID"],
|
||||||
watched2 = result,
|
watched2 = result,
|
||||||
watched3 = 'null',
|
watched3 = 'null',
|
||||||
watched4 = 'null',
|
watched4 = 'null',
|
||||||
extra = 'null',
|
extra = 'null',
|
||||||
@@ -93,25 +91,33 @@ def main():
|
|||||||
|
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
def check_config ():
|
# -------------------------------------------------------------------------------
|
||||||
|
def check_config():
|
||||||
|
|
||||||
server = get_setting_value('SMTP_SERVER')
|
server = get_setting_value('SMTP_SERVER')
|
||||||
report_to = get_setting_value("SMTP_REPORT_TO")
|
report_to = get_setting_value("SMTP_REPORT_TO")
|
||||||
report_from = get_setting_value("SMTP_REPORT_FROM")
|
report_from = get_setting_value("SMTP_REPORT_FROM")
|
||||||
|
|
||||||
if server == '' or report_from == '' or report_to == '':
|
if server == '' or report_from == '' or report_to == '':
|
||||||
mylog('none', [f'[Email Check Config] ⚠ ERROR: Email service not set up correctly. Check your {confFileName} SMTP_*, SMTP_REPORT_FROM and SMTP_REPORT_TO variables.'])
|
mylog('none', [f'[Email Check Config] ⚠ ERROR: Email service not set up correctly. Check your {confFileName} SMTP_*, SMTP_REPORT_FROM and SMTP_REPORT_TO variables.'])
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
def send(pHTML, pText):
|
def send(pHTML, pText):
|
||||||
|
|
||||||
mylog('debug', [f'[{pluginName}] SMTP_REPORT_TO: {hide_email(str(get_setting_value("SMTP_REPORT_TO")))} SMTP_USER: {hide_email(str(get_setting_value("SMTP_USER")))}'])
|
mylog('debug', [f'[{pluginName}] SMTP_REPORT_TO: {hide_email(str(get_setting_value("SMTP_REPORT_TO")))} SMTP_USER: {hide_email(str(get_setting_value("SMTP_USER")))}'])
|
||||||
|
|
||||||
subject, from_email, to_email, message_html, message_text = sanitize_email_content(str(get_setting_value("SMTP_SUBJECT")), get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), pHTML, pText)
|
subject, from_email, to_email, message_html, message_text = sanitize_email_content(
|
||||||
|
str(get_setting_value("SMTP_SUBJECT")),
|
||||||
|
get_setting_value("SMTP_REPORT_FROM"),
|
||||||
|
get_setting_value("SMTP_REPORT_TO"),
|
||||||
|
pHTML,
|
||||||
|
pText
|
||||||
|
)
|
||||||
|
|
||||||
emails = []
|
emails = []
|
||||||
|
|
||||||
@@ -132,10 +138,10 @@ def send(pHTML, pText):
|
|||||||
msg['Subject'] = subject
|
msg['Subject'] = subject
|
||||||
msg['From'] = from_email
|
msg['From'] = from_email
|
||||||
msg['To'] = mail_addr
|
msg['To'] = mail_addr
|
||||||
msg['Date'] = formatdate(localtime=True)
|
msg['Date'] = formatdate(localtime=True)
|
||||||
|
|
||||||
msg.attach (MIMEText (message_text, 'plain'))
|
msg.attach(MIMEText(message_text, 'plain'))
|
||||||
msg.attach (MIMEText (message_html, 'html'))
|
msg.attach(MIMEText(message_html, 'html'))
|
||||||
|
|
||||||
# Set a timeout for the SMTP connection (in seconds)
|
# Set a timeout for the SMTP connection (in seconds)
|
||||||
smtp_timeout = 30
|
smtp_timeout = 30
|
||||||
@@ -144,30 +150,31 @@ def send(pHTML, pText):
|
|||||||
|
|
||||||
if get_setting_value("LOG_LEVEL") == 'debug':
|
if get_setting_value("LOG_LEVEL") == 'debug':
|
||||||
|
|
||||||
send_email(msg,smtp_timeout)
|
send_email(msg, smtp_timeout)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
send_email(msg,smtp_timeout)
|
send_email(msg, smtp_timeout)
|
||||||
|
|
||||||
except smtplib.SMTPAuthenticationError as e:
|
except smtplib.SMTPAuthenticationError as e:
|
||||||
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPAuthenticationError)'])
|
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPAuthenticationError)'])
|
||||||
mylog('none', [' ERROR: Double-check your SMTP_USER and SMTP_PASS settings.)'])
|
mylog('none', [' ERROR: Double-check your SMTP_USER and SMTP_PASS settings.)'])
|
||||||
mylog('none', [' ERROR: ', str(e)])
|
mylog('none', [' ERROR: ', str(e)])
|
||||||
except smtplib.SMTPServerDisconnected as e:
|
except smtplib.SMTPServerDisconnected as e:
|
||||||
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPServerDisconnected)'])
|
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPServerDisconnected)'])
|
||||||
mylog('none', [' ERROR: ', str(e)])
|
mylog('none', [' ERROR: ', str(e)])
|
||||||
except socket.gaierror as e:
|
except socket.gaierror as e:
|
||||||
mylog('none', [' ERROR: Could not resolve hostname (socket.gaierror)'])
|
mylog('none', [' ERROR: Could not resolve hostname (socket.gaierror)'])
|
||||||
mylog('none', [' ERROR: ', str(e)])
|
mylog('none', [' ERROR: ', str(e)])
|
||||||
except ssl.SSLError as e:
|
except ssl.SSLError as e:
|
||||||
mylog('none', [' ERROR: Could not establish SSL connection (ssl.SSLError)'])
|
mylog('none', [' ERROR: Could not establish SSL connection (ssl.SSLError)'])
|
||||||
mylog('none', [' ERROR: Are you sure you need SMTP_FORCE_SSL enabled? Check your SMTP provider docs.'])
|
mylog('none', [' ERROR: Are you sure you need SMTP_FORCE_SSL enabled? Check your SMTP provider docs.'])
|
||||||
mylog('none', [' ERROR: ', str(e)])
|
mylog('none', [' ERROR: ', str(e)])
|
||||||
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------------
|
||||||
def send_email(msg,smtp_timeout):
|
def send_email(msg, smtp_timeout):
|
||||||
# Send mail
|
# Send mail
|
||||||
if get_setting_value('SMTP_FORCE_SSL'):
|
if get_setting_value('SMTP_FORCE_SSL'):
|
||||||
mylog('debug', ['SMTP_FORCE_SSL == True so using .SMTP_SSL()'])
|
mylog('debug', ['SMTP_FORCE_SSL == True so using .SMTP_SSL()'])
|
||||||
@@ -182,10 +189,10 @@ def send_email(msg,smtp_timeout):
|
|||||||
mylog('debug', ['SMTP_FORCE_SSL == False so using .SMTP()'])
|
mylog('debug', ['SMTP_FORCE_SSL == False so using .SMTP()'])
|
||||||
if get_setting_value("SMTP_PORT") == 0:
|
if get_setting_value("SMTP_PORT") == 0:
|
||||||
mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER)'])
|
mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER)'])
|
||||||
smtp_connection = smtplib.SMTP (get_setting_value('SMTP_SERVER'))
|
smtp_connection = smtplib.SMTP(get_setting_value('SMTP_SERVER'))
|
||||||
else:
|
else:
|
||||||
mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER, SMTP_PORT)'])
|
mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER, SMTP_PORT)'])
|
||||||
smtp_connection = smtplib.SMTP (get_setting_value('SMTP_SERVER'), get_setting_value('SMTP_PORT'))
|
smtp_connection = smtplib.SMTP(get_setting_value('SMTP_SERVER'), get_setting_value('SMTP_PORT'))
|
||||||
|
|
||||||
mylog('debug', ['Setting SMTP debug level'])
|
mylog('debug', ['Setting SMTP debug level'])
|
||||||
|
|
||||||
@@ -193,7 +200,7 @@ def send_email(msg,smtp_timeout):
|
|||||||
if get_setting_value('LOG_LEVEL') == 'debug':
|
if get_setting_value('LOG_LEVEL') == 'debug':
|
||||||
smtp_connection.set_debuglevel(1)
|
smtp_connection.set_debuglevel(1)
|
||||||
|
|
||||||
mylog('debug', [ 'Sending .ehlo()'])
|
mylog('debug', ['Sending .ehlo()'])
|
||||||
smtp_connection.ehlo()
|
smtp_connection.ehlo()
|
||||||
|
|
||||||
if not get_setting_value('SMTP_SKIP_TLS'):
|
if not get_setting_value('SMTP_SKIP_TLS'):
|
||||||
@@ -203,12 +210,13 @@ def send_email(msg,smtp_timeout):
|
|||||||
smtp_connection.ehlo()
|
smtp_connection.ehlo()
|
||||||
if not get_setting_value('SMTP_SKIP_LOGIN'):
|
if not get_setting_value('SMTP_SKIP_LOGIN'):
|
||||||
mylog('debug', ['SMTP_SKIP_LOGIN == False so sending .login()'])
|
mylog('debug', ['SMTP_SKIP_LOGIN == False so sending .login()'])
|
||||||
smtp_connection.login (get_setting_value('SMTP_USER'), get_setting_value('SMTP_PASS'))
|
smtp_connection.login(get_setting_value('SMTP_USER'), get_setting_value('SMTP_PASS'))
|
||||||
|
|
||||||
mylog('debug', ['Sending .sendmail()'])
|
mylog('debug', ['Sending .sendmail()'])
|
||||||
smtp_connection.sendmail (get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), msg.as_string())
|
smtp_connection.sendmail(get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), msg.as_string())
|
||||||
smtp_connection.quit()
|
smtp_connection.quit()
|
||||||
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------------
|
||||||
def sanitize_email_content(subject, from_email, to_email, message_html, message_text):
|
def sanitize_email_content(subject, from_email, to_email, message_html, message_text):
|
||||||
# Validate and sanitize subject
|
# Validate and sanitize subject
|
||||||
@@ -229,6 +237,7 @@ def sanitize_email_content(subject, from_email, to_email, message_html, message_
|
|||||||
|
|
||||||
return subject, from_email, to_email, message_html, message_text
|
return subject, from_email, to_email, message_html, message_text
|
||||||
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------------
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
|||||||
@@ -18,15 +18,14 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
|||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
# NetAlertX modules
|
# NetAlertX modules
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from const import confFileName, logPath
|
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.plugin_utils import getPluginObject
|
from utils.plugin_utils import getPluginObject # noqa: E402 [flake8 lint suppression]
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value, bytes_to_string, \
|
from helper import get_setting_value, bytes_to_string, \
|
||||||
sanitize_string, normalize_string
|
sanitize_string, normalize_string # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.datetime_utils import timeNowDB
|
from database import DB, get_device_stats # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB, get_device_stats
|
|
||||||
|
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
@@ -234,7 +233,6 @@ class sensor_config:
|
|||||||
Store the sensor configuration in the global plugin_objects, which tracks sensors based on a unique combination
|
Store the sensor configuration in the global plugin_objects, which tracks sensors based on a unique combination
|
||||||
of attributes including deviceId, sensorName, hash, and MAC.
|
of attributes including deviceId, sensorName, hash, and MAC.
|
||||||
"""
|
"""
|
||||||
global plugin_objects
|
|
||||||
|
|
||||||
# Add the sensor to the global plugin_objects
|
# Add the sensor to the global plugin_objects
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
@@ -287,11 +285,11 @@ def publish_mqtt(mqtt_client, topic, message):
|
|||||||
# mylog('verbose', [f"[{pluginName}] mqtt_client.is_connected(): {mqtt_client.is_connected()} "])
|
# mylog('verbose', [f"[{pluginName}] mqtt_client.is_connected(): {mqtt_client.is_connected()} "])
|
||||||
|
|
||||||
result = mqtt_client.publish(
|
result = mqtt_client.publish(
|
||||||
topic=topic,
|
topic=topic,
|
||||||
payload=message,
|
payload=message,
|
||||||
qos=qos,
|
qos=qos,
|
||||||
retain=True,
|
retain=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
status = result[0]
|
status = result[0]
|
||||||
|
|
||||||
@@ -303,6 +301,7 @@ def publish_mqtt(mqtt_client, topic, message):
|
|||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# Create a generic device for overal stats
|
# Create a generic device for overal stats
|
||||||
def create_generic_device(mqtt_client, deviceId, deviceName):
|
def create_generic_device(mqtt_client, deviceId, deviceName):
|
||||||
@@ -318,7 +317,6 @@ def create_generic_device(mqtt_client, deviceId, deviceName):
|
|||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# Register sensor config on the broker
|
# Register sensor config on the broker
|
||||||
def create_sensor(mqtt_client, deviceId, deviceName, sensorType, sensorName, icon, mac=""):
|
def create_sensor(mqtt_client, deviceId, deviceName, sensorType, sensorName, icon, mac=""):
|
||||||
global mqtt_sensors
|
|
||||||
|
|
||||||
# check previous configs
|
# check previous configs
|
||||||
sensorConfig = sensor_config(deviceId, deviceName, sensorType, sensorName, icon, mac)
|
sensorConfig = sensor_config(deviceId, deviceName, sensorType, sensorName, icon, mac)
|
||||||
@@ -429,12 +427,11 @@ def mqtt_create_client():
|
|||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
def mqtt_start(db):
|
def mqtt_start(db):
|
||||||
|
|
||||||
global mqtt_client, mqtt_connected_to_broker
|
global mqtt_client
|
||||||
|
|
||||||
if not mqtt_connected_to_broker:
|
if not mqtt_connected_to_broker:
|
||||||
mqtt_client = mqtt_create_client()
|
mqtt_client = mqtt_create_client()
|
||||||
|
|
||||||
|
|
||||||
deviceName = get_setting_value('MQTT_DEVICE_NAME')
|
deviceName = get_setting_value('MQTT_DEVICE_NAME')
|
||||||
deviceId = get_setting_value('MQTT_DEVICE_ID')
|
deviceId = get_setting_value('MQTT_DEVICE_ID')
|
||||||
|
|
||||||
@@ -449,16 +446,18 @@ def mqtt_start(db):
|
|||||||
row = get_device_stats(db)
|
row = get_device_stats(db)
|
||||||
|
|
||||||
# Publish (wrap into {} and remove last ',' from above)
|
# Publish (wrap into {} and remove last ',' from above)
|
||||||
publish_mqtt(mqtt_client, f"{topic_root}/sensor/{deviceId}/state",
|
publish_mqtt(
|
||||||
{
|
mqtt_client,
|
||||||
"online": row[0],
|
f"{topic_root}/sensor/{deviceId}/state",
|
||||||
"down": row[1],
|
{
|
||||||
"all": row[2],
|
"online": row[0],
|
||||||
"archived": row[3],
|
"down": row[1],
|
||||||
"new": row[4],
|
"all": row[2],
|
||||||
"unknown": row[5]
|
"archived": row[3],
|
||||||
}
|
"new": row[4],
|
||||||
)
|
"unknown": row[5]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
# Generate device-specific MQTT messages if enabled
|
# Generate device-specific MQTT messages if enabled
|
||||||
if get_setting_value('MQTT_SEND_DEVICES'):
|
if get_setting_value('MQTT_SEND_DEVICES'):
|
||||||
@@ -466,11 +465,11 @@ def mqtt_start(db):
|
|||||||
# Specific devices processing
|
# Specific devices processing
|
||||||
|
|
||||||
# Get all devices
|
# Get all devices
|
||||||
devices = db.read(get_setting_value('MQTT_DEVICES_SQL').replace('{s-quote}',"'"))
|
devices = db.read(get_setting_value('MQTT_DEVICES_SQL').replace('{s-quote}', "'"))
|
||||||
|
|
||||||
sec_delay = len(devices) * int(get_setting_value('MQTT_DELAY_SEC'))*5
|
sec_delay = len(devices) * int(get_setting_value('MQTT_DELAY_SEC')) * 5
|
||||||
|
|
||||||
mylog('verbose', [f"[{pluginName}] Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60, 1), 'min)'])
|
mylog('verbose', [f"[{pluginName}] Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay / 60, 1), 'min)'])
|
||||||
|
|
||||||
for device in devices:
|
for device in devices:
|
||||||
|
|
||||||
@@ -495,27 +494,29 @@ def mqtt_start(db):
|
|||||||
# handle device_tracker
|
# handle device_tracker
|
||||||
# IMPORTANT: shared payload - device_tracker attributes and individual sensors
|
# IMPORTANT: shared payload - device_tracker attributes and individual sensors
|
||||||
devJson = {
|
devJson = {
|
||||||
"last_ip": device["devLastIP"],
|
"last_ip": device["devLastIP"],
|
||||||
"is_new": str(device["devIsNew"]),
|
"is_new": str(device["devIsNew"]),
|
||||||
"alert_down": str(device["devAlertDown"]),
|
"alert_down": str(device["devAlertDown"]),
|
||||||
"vendor": sanitize_string(device["devVendor"]),
|
"vendor": sanitize_string(device["devVendor"]),
|
||||||
"mac_address": str(device["devMac"]),
|
"mac_address": str(device["devMac"]),
|
||||||
"model": devDisplayName,
|
"model": devDisplayName,
|
||||||
"last_connection": prepTimeStamp(str(device["devLastConnection"])),
|
"last_connection": prepTimeStamp(str(device["devLastConnection"])),
|
||||||
"first_connection": prepTimeStamp(str(device["devFirstConnection"])),
|
"first_connection": prepTimeStamp(str(device["devFirstConnection"])),
|
||||||
"sync_node": device["devSyncHubNode"],
|
"sync_node": device["devSyncHubNode"],
|
||||||
"group": device["devGroup"],
|
"group": device["devGroup"],
|
||||||
"location": device["devLocation"],
|
"location": device["devLocation"],
|
||||||
"network_parent_mac": device["devParentMAC"],
|
"network_parent_mac": device["devParentMAC"],
|
||||||
"network_parent_name": next((dev["devName"] for dev in devices if dev["devMAC"] == device["devParentMAC"]), "")
|
"network_parent_name": next((dev["devName"] for dev in devices if dev["devMAC"] == device["devParentMAC"]), "")
|
||||||
}
|
}
|
||||||
|
|
||||||
# bulk update device sensors in home assistant
|
# bulk update device sensors in home assistant
|
||||||
publish_mqtt(mqtt_client, sensorConfig.state_topic, devJson) # REQUIRED, DON'T DELETE
|
publish_mqtt(mqtt_client, sensorConfig.state_topic, devJson) # REQUIRED, DON'T DELETE
|
||||||
|
|
||||||
# create and update is_present sensor
|
# create and update is_present sensor
|
||||||
sensorConfig = create_sensor(mqtt_client, deviceId, devDisplayName, 'binary_sensor', 'is_present', 'wifi', device["devMac"])
|
sensorConfig = create_sensor(mqtt_client, deviceId, devDisplayName, 'binary_sensor', 'is_present', 'wifi', device["devMac"])
|
||||||
publish_mqtt(mqtt_client, sensorConfig.state_topic,
|
publish_mqtt(
|
||||||
|
mqtt_client,
|
||||||
|
sensorConfig.state_topic,
|
||||||
{
|
{
|
||||||
"is_present": to_binary_sensor(str(device["devPresentLastScan"]))
|
"is_present": to_binary_sensor(str(device["devPresentLastScan"]))
|
||||||
}
|
}
|
||||||
@@ -547,7 +548,7 @@ def to_binary_sensor(input):
|
|||||||
elif isinstance(input, bool) and input:
|
elif isinstance(input, bool) and input:
|
||||||
return "ON"
|
return "ON"
|
||||||
elif isinstance(input, str) and input == "1":
|
elif isinstance(input, str) and input == "1":
|
||||||
return "ON"
|
return "ON"
|
||||||
elif isinstance(input, bytes) and bytes_to_string(input) == "1":
|
elif isinstance(input, bytes) and bytes_to_string(input) == "1":
|
||||||
return "ON"
|
return "ON"
|
||||||
return "OFF"
|
return "OFF"
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import json
|
import json
|
||||||
@@ -11,15 +10,15 @@ from base64 import b64encode
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from const import confFileName, logPath
|
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from plugin_helper import Plugin_Objects, handleEmpty
|
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.datetime_utils import timeNowDB
|
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from models.notification_instance import NotificationInstance
|
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB
|
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -33,13 +32,12 @@ LOG_PATH = logPath + '/plugins'
|
|||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
||||||
|
|
||||||
# Check if basic config settings supplied
|
# Check if basic config settings supplied
|
||||||
if check_config() == False:
|
if check_config() is False:
|
||||||
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
|
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -65,9 +63,9 @@ def main():
|
|||||||
# Log result
|
# Log result
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = pluginName,
|
primaryId = pluginName,
|
||||||
secondaryId = timeNowDB(),
|
secondaryId = timeNowDB(),
|
||||||
watched1 = notification["GUID"],
|
watched1 = notification["GUID"],
|
||||||
watched2 = handleEmpty(response_text),
|
watched2 = handleEmpty(response_text),
|
||||||
watched3 = response_status_code,
|
watched3 = response_status_code,
|
||||||
watched4 = 'null',
|
watched4 = 'null',
|
||||||
extra = 'null',
|
extra = 'null',
|
||||||
@@ -77,15 +75,15 @@ def main():
|
|||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
|
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
def check_config():
|
def check_config():
|
||||||
if get_setting_value('NTFY_HOST') == '' or get_setting_value('NTFY_TOPIC') == '':
|
if get_setting_value('NTFY_HOST') == '' or get_setting_value('NTFY_TOPIC') == '':
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
def send(html, text):
|
def send(html, text):
|
||||||
|
|
||||||
response_text = ''
|
response_text = ''
|
||||||
@@ -100,7 +98,7 @@ def send(html, text):
|
|||||||
# prepare request headers
|
# prepare request headers
|
||||||
headers = {
|
headers = {
|
||||||
"Title": "NetAlertX Notification",
|
"Title": "NetAlertX Notification",
|
||||||
"Actions": "view, Open Dashboard, "+ get_setting_value('REPORT_DASHBOARD_URL'),
|
"Actions": "view, Open Dashboard, " + get_setting_value('REPORT_DASHBOARD_URL'),
|
||||||
"Priority": get_setting_value('NTFY_PRIORITY'),
|
"Priority": get_setting_value('NTFY_PRIORITY'),
|
||||||
"Tags": "warning"
|
"Tags": "warning"
|
||||||
}
|
}
|
||||||
@@ -109,37 +107,39 @@ def send(html, text):
|
|||||||
if token != '':
|
if token != '':
|
||||||
headers["Authorization"] = "Bearer {}".format(token)
|
headers["Authorization"] = "Bearer {}".format(token)
|
||||||
elif user != "" and pwd != "":
|
elif user != "" and pwd != "":
|
||||||
# Generate hash for basic auth
|
# Generate hash for basic auth
|
||||||
basichash = b64encode(bytes(user + ':' + pwd, "utf-8")).decode("ascii")
|
basichash = b64encode(bytes(user + ':' + pwd, "utf-8")).decode("ascii")
|
||||||
# add authorization header with hash
|
# add authorization header with hash
|
||||||
headers["Authorization"] = "Basic {}".format(basichash)
|
headers["Authorization"] = "Basic {}".format(basichash)
|
||||||
|
|
||||||
# call NTFY service
|
# call NTFY service
|
||||||
try:
|
try:
|
||||||
response = requests.post("{}/{}".format( get_setting_value('NTFY_HOST'),
|
response = requests.post("{}/{}".format(
|
||||||
get_setting_value('NTFY_TOPIC')),
|
get_setting_value('NTFY_HOST'),
|
||||||
data = text,
|
get_setting_value('NTFY_TOPIC')),
|
||||||
headers = headers,
|
data = text,
|
||||||
verify = verify_ssl)
|
headers = headers,
|
||||||
|
verify = verify_ssl,
|
||||||
|
timeout = get_setting_value('NTFY_RUN_TIMEOUT')
|
||||||
|
)
|
||||||
|
|
||||||
response_status_code = response.status_code
|
response_status_code = response.status_code
|
||||||
|
|
||||||
# Check if the request was successful (status code 200)
|
# Check if the request was successful (status code 200)
|
||||||
if response_status_code == 200:
|
if response_status_code == 200:
|
||||||
response_text = response.text # This captures the response body/message
|
response_text = response.text # This captures the response body/message
|
||||||
else:
|
else:
|
||||||
response_text = json.dumps(response.text)
|
response_text = json.dumps(response.text)
|
||||||
|
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
mylog('none', [f'[{pluginName}] ⚠ ERROR: ', e])
|
mylog('none', [f'[{pluginName}] ⚠ ERROR: ', e])
|
||||||
|
|
||||||
response_text = e
|
response_text = e
|
||||||
|
|
||||||
return response_text, response_status_code
|
return response_text, response_status_code
|
||||||
|
|
||||||
return response_text, response_status_code
|
return response_text, response_status_code
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
|
||||||
|
|||||||
@@ -12,12 +12,12 @@ import requests
|
|||||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402
|
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger # noqa: E402
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value, hide_string # noqa: E402
|
from helper import get_setting_value, hide_string # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.datetime_utils import timeNowDB
|
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||||
from models.notification_instance import NotificationInstance # noqa: E402
|
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB # noqa: E402
|
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
|
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@@ -10,15 +8,15 @@ import requests
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from const import confFileName, logPath
|
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from plugin_helper import Plugin_Objects, handleEmpty
|
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value, hide_string
|
from helper import get_setting_value, hide_string # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.datetime_utils import timeNowDB
|
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||||
from models.notification_instance import NotificationInstance
|
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB
|
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -32,13 +30,12 @@ LOG_PATH = logPath + '/plugins'
|
|||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
||||||
|
|
||||||
# Check if basic config settings supplied
|
# Check if basic config settings supplied
|
||||||
if check_config() == False:
|
if check_config() is False:
|
||||||
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
|
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -59,14 +56,14 @@ def main():
|
|||||||
for notification in new_notifications:
|
for notification in new_notifications:
|
||||||
|
|
||||||
# Send notification
|
# Send notification
|
||||||
response_text, response_status_code = send(notification["Text"])
|
response_text, response_status_code = send(notification["Text"])
|
||||||
|
|
||||||
# Log result
|
# Log result
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = pluginName,
|
primaryId = pluginName,
|
||||||
secondaryId = timeNowDB(),
|
secondaryId = timeNowDB(),
|
||||||
watched1 = notification["GUID"],
|
watched1 = notification["GUID"],
|
||||||
watched2 = handleEmpty(response_text),
|
watched2 = handleEmpty(response_text),
|
||||||
watched3 = response_status_code,
|
watched3 = response_status_code,
|
||||||
watched4 = 'null',
|
watched4 = 'null',
|
||||||
extra = 'null',
|
extra = 'null',
|
||||||
@@ -76,8 +73,7 @@ def main():
|
|||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
|
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
def send(text):
|
def send(text):
|
||||||
|
|
||||||
response_text = ''
|
response_text = ''
|
||||||
@@ -85,8 +81,7 @@ def send(text):
|
|||||||
|
|
||||||
token = get_setting_value('PUSHSAFER_TOKEN')
|
token = get_setting_value('PUSHSAFER_TOKEN')
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] PUSHSAFER_TOKEN: "{hide_string(token)}"'])
|
mylog('verbose', [f'[{pluginName}] PUSHSAFER_TOKEN: "{hide_string(token)}"'])
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
url = 'https://www.pushsafer.com/api'
|
url = 'https://www.pushsafer.com/api'
|
||||||
@@ -101,40 +96,34 @@ def send(text):
|
|||||||
"u" : get_setting_value('REPORT_DASHBOARD_URL'),
|
"u" : get_setting_value('REPORT_DASHBOARD_URL'),
|
||||||
"ut" : 'Open NetAlertX',
|
"ut" : 'Open NetAlertX',
|
||||||
"k" : token,
|
"k" : token,
|
||||||
}
|
}
|
||||||
response = requests.post(url, data=post_fields)
|
response = requests.post(url, data=post_fields, timeout=get_setting_value("PUSHSAFER_RUN_TIMEOUT"))
|
||||||
|
|
||||||
response_status_code = response.status_code
|
response_status_code = response.status_code
|
||||||
|
|
||||||
|
|
||||||
# Check if the request was successful (status code 200)
|
# Check if the request was successful (status code 200)
|
||||||
if response_status_code == 200:
|
if response_status_code == 200:
|
||||||
response_text = response.text # This captures the response body/message
|
response_text = response.text # This captures the response body/message
|
||||||
else:
|
else:
|
||||||
response_text = json.dumps(response.text)
|
response_text = json.dumps(response.text)
|
||||||
|
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
mylog('none', [f'[{pluginName}] ⚠ ERROR: ', e])
|
mylog('none', [f'[{pluginName}] ⚠ ERROR: ', e])
|
||||||
|
|
||||||
response_text = e
|
response_text = e
|
||||||
|
|
||||||
return response_text, response_status_code
|
return response_text, response_status_code
|
||||||
|
|
||||||
|
|
||||||
return response_text, response_status_code
|
return response_text, response_status_code
|
||||||
|
|
||||||
|
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
def check_config():
|
def check_config():
|
||||||
if get_setting_value('PUSHSAFER_TOKEN') == 'ApiKey':
|
if get_setting_value('PUSHSAFER_TOKEN') == 'ApiKey':
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------
|
# -------------------------------------------------------
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
|
||||||
|
|||||||
@@ -8,15 +8,15 @@ import sys
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from const import confFileName, logPath
|
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.datetime_utils import timeNowDB
|
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from models.notification_instance import NotificationInstance
|
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB
|
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -30,13 +30,11 @@ LOG_PATH = logPath + '/plugins'
|
|||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
||||||
|
|
||||||
# Check if basic config settings supplied
|
# Check if basic config settings supplied
|
||||||
if check_config() == False:
|
if check_config() is False:
|
||||||
mylog('none', [
|
mylog('none', [
|
||||||
f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
|
f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import json
|
import json
|
||||||
@@ -13,15 +12,15 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
|||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
|
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath, confFileName
|
from const import logPath, confFileName # noqa: E402 [flake8 lint suppression]
|
||||||
from plugin_helper import Plugin_Objects, handleEmpty
|
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.datetime_utils import timeNowDB
|
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value, write_file
|
from helper import get_setting_value, write_file # noqa: E402 [flake8 lint suppression]
|
||||||
from models.notification_instance import NotificationInstance
|
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB
|
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -35,13 +34,12 @@ LOG_PATH = logPath + '/plugins'
|
|||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
||||||
|
|
||||||
# Check if basic config settings supplied
|
# Check if basic config settings supplied
|
||||||
if check_config() == False:
|
if check_config() is False:
|
||||||
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
|
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -62,15 +60,19 @@ def main():
|
|||||||
for notification in new_notifications:
|
for notification in new_notifications:
|
||||||
|
|
||||||
# Send notification
|
# Send notification
|
||||||
response_stdout, response_stderr = send(notification["Text"], notification["HTML"], notification["JSON"])
|
response_stdout, response_stderr = send(
|
||||||
|
notification["Text"],
|
||||||
|
notification["HTML"],
|
||||||
|
notification["JSON"]
|
||||||
|
)
|
||||||
|
|
||||||
# Log result
|
# Log result
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = pluginName,
|
primaryId = pluginName,
|
||||||
secondaryId = timeNowDB(),
|
secondaryId = timeNowDB(),
|
||||||
watched1 = notification["GUID"],
|
watched1 = notification["GUID"],
|
||||||
watched2 = handleEmpty(response_stdout),
|
watched2 = handleEmpty(response_stdout),
|
||||||
watched3 = handleEmpty(response_stderr),
|
watched3 = handleEmpty(response_stderr),
|
||||||
watched4 = 'null',
|
watched4 = 'null',
|
||||||
extra = 'null',
|
extra = 'null',
|
||||||
foreignKey = notification["GUID"]
|
foreignKey = notification["GUID"]
|
||||||
@@ -79,16 +81,16 @@ def main():
|
|||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
# -------------------------------------------------------------------------------
|
||||||
def check_config():
|
def check_config():
|
||||||
if get_setting_value('WEBHOOK_URL') == '':
|
if get_setting_value('WEBHOOK_URL') == '':
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
def send (text_data, html_data, json_data):
|
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
|
def send(text_data, html_data, json_data):
|
||||||
|
|
||||||
response_stderr = ''
|
response_stderr = ''
|
||||||
response_stdout = ''
|
response_stdout = ''
|
||||||
@@ -102,9 +104,9 @@ def send (text_data, html_data, json_data):
|
|||||||
|
|
||||||
# use data type based on specified payload type
|
# use data type based on specified payload type
|
||||||
if payloadType == 'json':
|
if payloadType == 'json':
|
||||||
# In this code, the truncate_json function is used to recursively traverse the JSON object
|
# In this code, the truncate_json function is used to recursively traverse the JSON object
|
||||||
# and remove nodes that exceed the size limit. It checks the size of each node's JSON representation
|
# and remove nodes that exceed the size limit. It checks the size of each node's JSON representation
|
||||||
# using json.dumps and includes only the nodes that are within the limit.
|
# using json.dumps and includes only the nodes that are within the limit.
|
||||||
json_str = json.dumps(json_data)
|
json_str = json.dumps(json_data)
|
||||||
|
|
||||||
if len(json_str) <= limit:
|
if len(json_str) <= limit:
|
||||||
@@ -127,45 +129,48 @@ def send (text_data, html_data, json_data):
|
|||||||
return obj
|
return obj
|
||||||
|
|
||||||
payloadData = truncate_json(json_data)
|
payloadData = truncate_json(json_data)
|
||||||
if payloadType == 'html':
|
if payloadType == 'html':
|
||||||
if len(html_data) > limit:
|
if len(html_data) > limit:
|
||||||
payloadData = html_data[:limit] + " <h1>(text was truncated)</h1>"
|
payloadData = html_data[:limit] + " <h1>(text was truncated)</h1>"
|
||||||
else:
|
else:
|
||||||
payloadData = html_data
|
payloadData = html_data
|
||||||
if payloadType == 'text':
|
if payloadType == 'text':
|
||||||
if len(text_data) > limit:
|
if len(text_data) > limit:
|
||||||
payloadData = text_data[:limit] + " (text was truncated)"
|
payloadData = text_data[:limit] + " (text was truncated)"
|
||||||
else:
|
else:
|
||||||
payloadData = text_data
|
payloadData = text_data
|
||||||
|
|
||||||
# Define slack-compatible payload
|
# Define slack-compatible payload
|
||||||
_json_payload = { "text": payloadData } if payloadType == 'text' else {
|
if payloadType == 'text':
|
||||||
"username": "NetAlertX",
|
_json_payload = {"text": payloadData}
|
||||||
"text": "There are new notifications",
|
else:
|
||||||
"attachments": [{
|
_json_payload = {
|
||||||
"title": "NetAlertX Notifications",
|
"username": "NetAlertX",
|
||||||
"title_link": get_setting_value('REPORT_DASHBOARD_URL'),
|
"text": "There are new notifications",
|
||||||
"text": payloadData
|
"attachments": [{
|
||||||
}]
|
"title": "NetAlertX Notifications",
|
||||||
}
|
"title_link": get_setting_value('REPORT_DASHBOARD_URL'),
|
||||||
|
"text": payloadData
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
# DEBUG - Write the json payload into a log file for debugging
|
# DEBUG - Write the json payload into a log file for debugging
|
||||||
write_file (logPath + '/webhook_payload.json', json.dumps(_json_payload))
|
write_file(logPath + '/webhook_payload.json', json.dumps(_json_payload))
|
||||||
|
|
||||||
# Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both
|
# Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both
|
||||||
# Consider: curl has the ability to load in data to POST from a file + piping
|
# Consider: curl has the ability to load in data to POST from a file + piping
|
||||||
if(endpointUrl.startswith('https://discord.com/api/webhooks/') and not endpointUrl.endswith("/slack")):
|
if (endpointUrl.startswith('https://discord.com/api/webhooks/') and not endpointUrl.endswith("/slack")):
|
||||||
_WEBHOOK_URL = f"{endpointUrl}/slack"
|
_WEBHOOK_URL = f"{endpointUrl}/slack"
|
||||||
curlParams = ["curl","-i","-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL]
|
curlParams = ["curl", "-i", "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
|
||||||
else:
|
else:
|
||||||
_WEBHOOK_URL = endpointUrl
|
_WEBHOOK_URL = endpointUrl
|
||||||
curlParams = ["curl","-i","-X", requestMethod , "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
|
curlParams = ["curl", "-i", "-X", requestMethod , "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
|
||||||
|
|
||||||
# Add HMAC signature if configured
|
# Add HMAC signature if configured
|
||||||
if(secret != ''):
|
if (secret != ''):
|
||||||
h = hmac.new(secret.encode("UTF-8"), json.dumps(_json_payload, separators=(',', ':')).encode(), hashlib.sha256).hexdigest()
|
h = hmac.new(secret.encode("UTF-8"), json.dumps(_json_payload, separators=(',', ':')).encode(), hashlib.sha256).hexdigest()
|
||||||
curlParams.insert(4,"-H")
|
curlParams.insert(4, "-H")
|
||||||
curlParams.insert(5,f"X-Webhook-Signature: sha256={h}")
|
curlParams.insert(5, f"X-Webhook-Signature: sha256={h}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Execute CURL call
|
# Execute CURL call
|
||||||
@@ -173,13 +178,11 @@ def send (text_data, html_data, json_data):
|
|||||||
result = subprocess.run(curlParams, capture_output=True, text=True)
|
result = subprocess.run(curlParams, capture_output=True, text=True)
|
||||||
|
|
||||||
response_stderr = result.stderr
|
response_stderr = result.stderr
|
||||||
response_stdout = result.stdout
|
response_stdout = result.stdout
|
||||||
|
|
||||||
# Write stdout and stderr into .log files for debugging if needed
|
# Write stdout and stderr into .log files for debugging if needed
|
||||||
mylog('debug', [f'[{pluginName}] stdout: ', response_stdout])
|
mylog('debug', [f'[{pluginName}] stdout: ', response_stdout])
|
||||||
mylog('debug', [f'[{pluginName}] stderr: ', response_stderr])
|
mylog('debug', [f'[{pluginName}] stderr: ', response_stderr])
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
# An error occurred, handle it
|
# An error occurred, handle it
|
||||||
@@ -187,10 +190,9 @@ def send (text_data, html_data, json_data):
|
|||||||
|
|
||||||
response_stderr = e.output
|
response_stderr = e.output
|
||||||
|
|
||||||
|
return response_stdout, response_stderr
|
||||||
|
|
||||||
return response_stdout, response_stderr
|
|
||||||
|
|
||||||
# -------------------------------------------------------
|
# -------------------------------------------------------
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import pathlib
|
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
@@ -9,16 +8,16 @@ import base64
|
|||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
# Register NetAlertX directories
|
# Register NetAlertX directories
|
||||||
INSTALL_PATH="/app"
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from database import DB
|
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||||
from plugin_helper import Plugin_Objects, handleEmpty
|
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger, append_line_to_file
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath, applicationPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
||||||
|
|||||||
@@ -6,17 +6,16 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
|||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
pluginName = "ASUSWRT"
|
pluginName = "ASUSWRT"
|
||||||
|
|
||||||
import asyncio
|
import asyncio # noqa: E402 [flake8 lint suppression]
|
||||||
|
import aiohttp # noqa: E402 [flake8 lint suppression]
|
||||||
import aiohttp
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
from asusrouter import AsusData, AsusRouter # noqa: E402 [flake8 lint suppression]
|
||||||
from asusrouter import AsusData, AsusRouter
|
from asusrouter.modules.connection import ConnectionState # noqa: E402 [flake8 lint suppression]
|
||||||
from asusrouter.modules.connection import ConnectionState
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from logger import Logger, mylog # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import Logger, mylog
|
from plugin_helper import (Plugin_Objects, handleEmpty) # noqa: E402 [flake8 lint suppression]
|
||||||
from plugin_helper import (Plugin_Objects, handleEmpty)
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
|
||||||
|
|
||||||
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
||||||
|
|
||||||
|
|||||||
@@ -8,14 +8,14 @@ from zeroconf import Zeroconf
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB
|
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||||
from models.device_instance import DeviceInstance
|
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Configure timezone and logging
|
# Configure timezone and logging
|
||||||
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
||||||
@@ -67,7 +67,7 @@ def resolve_mdns_name(ip: str, timeout: int = 5) -> str:
|
|||||||
hostname = socket.getnameinfo((ip, 0), socket.NI_NAMEREQD)[0]
|
hostname = socket.getnameinfo((ip, 0), socket.NI_NAMEREQD)[0]
|
||||||
zeroconf.close()
|
zeroconf.close()
|
||||||
if hostname and hostname != ip:
|
if hostname and hostname != ip:
|
||||||
mylog("debug", [f"[{pluginName}] Found mDNS name: {hostname}"])
|
mylog("debug", [f"[{pluginName}] Found mDNS name (rev_name): {hostname} ({rev_name})"])
|
||||||
return hostname
|
return hostname
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
mylog("debug", [f"[{pluginName}] Zeroconf lookup failed for {ip}: {e}"])
|
mylog("debug", [f"[{pluginName}] Zeroconf lookup failed for {ip}: {e}"])
|
||||||
@@ -89,7 +89,7 @@ def main():
|
|||||||
|
|
||||||
timeout = get_setting_value("AVAHISCAN_RUN_TIMEOUT")
|
timeout = get_setting_value("AVAHISCAN_RUN_TIMEOUT")
|
||||||
use_mock = "--mockdata" in sys.argv
|
use_mock = "--mockdata" in sys.argv
|
||||||
|
|
||||||
if use_mock:
|
if use_mock:
|
||||||
mylog("verbose", [f"[{pluginName}] Running in MOCK mode"])
|
mylog("verbose", [f"[{pluginName}] Running in MOCK mode"])
|
||||||
devices = [
|
devices = [
|
||||||
@@ -137,4 +137,4 @@ def main():
|
|||||||
# Entrypoint
|
# Entrypoint
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -11,11 +11,11 @@ from datetime import datetime
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath, fullDbPath
|
from const import logPath, fullDbPath # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -29,6 +29,7 @@ LOG_PATH = logPath + '/plugins'
|
|||||||
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
# the script expects a parameter in the format of devices=device1,device2,...
|
# the script expects a parameter in the format of devices=device1,device2,...
|
||||||
@@ -44,7 +45,7 @@ def main():
|
|||||||
else:
|
else:
|
||||||
overwrite = False
|
overwrite = False
|
||||||
|
|
||||||
mylog('verbose', ['[CSVBCKP] In script'])
|
mylog('verbose', ['[CSVBCKP] In script'])
|
||||||
|
|
||||||
# Connect to the App database
|
# Connect to the App database
|
||||||
conn = sqlite3.connect(fullDbPath)
|
conn = sqlite3.connect(fullDbPath)
|
||||||
@@ -64,7 +65,7 @@ def main():
|
|||||||
|
|
||||||
fullPath = os.path.join(values.location.split('=')[1], filename)
|
fullPath = os.path.join(values.location.split('=')[1], filename)
|
||||||
|
|
||||||
mylog('verbose', ['[CSVBCKP] Writing file ', fullPath])
|
mylog('verbose', ['[CSVBCKP] Writing file ', fullPath])
|
||||||
|
|
||||||
# Create a CSV file in the specified location
|
# Create a CSV file in the specified location
|
||||||
with open(fullPath, 'w', newline='') as csvfile:
|
with open(fullPath, 'w', newline='') as csvfile:
|
||||||
@@ -72,7 +73,7 @@ def main():
|
|||||||
csv_writer = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL)
|
csv_writer = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL)
|
||||||
|
|
||||||
# Wrap the header values in double quotes and write the header row
|
# Wrap the header values in double quotes and write the header row
|
||||||
csv_writer.writerow([ '"' + col + '"' for col in columns])
|
csv_writer.writerow(['"' + col + '"' for col in columns])
|
||||||
|
|
||||||
# Fetch and write data rows
|
# Fetch and write data rows
|
||||||
for row in cursor.fetchall():
|
for row in cursor.fetchall():
|
||||||
@@ -96,8 +97,8 @@ def main():
|
|||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
# BEGIN
|
# BEGIN
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -8,11 +8,11 @@ import sqlite3
|
|||||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath, fullDbPath
|
from const import logPath, fullDbPath # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
||||||
@@ -81,7 +81,7 @@ def cleanup_database(
|
|||||||
)
|
)
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"""DELETE from Online_History where "Index" not in (
|
"""DELETE from Online_History where "Index" not in (
|
||||||
SELECT "Index" from Online_History
|
SELECT "Index" from Online_History
|
||||||
order by Scan_Date desc limit 150)"""
|
order by Scan_Date desc limit 150)"""
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -94,7 +94,7 @@ def cleanup_database(
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
f"""DELETE FROM Events
|
f"""DELETE FROM Events
|
||||||
WHERE eve_DateTime <= date('now', '-{str(DAYS_TO_KEEP_EVENTS)} day')"""
|
WHERE eve_DateTime <= date('now', '-{str(DAYS_TO_KEEP_EVENTS)} day')"""
|
||||||
)
|
)
|
||||||
# -----------------------------------------------------
|
# -----------------------------------------------------
|
||||||
@@ -107,11 +107,11 @@ def cleanup_database(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Build the SQL query to delete entries that exceed the limit per unique "Plugin" column entry
|
# Build the SQL query to delete entries that exceed the limit per unique "Plugin" column entry
|
||||||
delete_query = f"""DELETE FROM Plugins_History
|
delete_query = f"""DELETE FROM Plugins_History
|
||||||
WHERE "Index" NOT IN (
|
WHERE "Index" NOT IN (
|
||||||
SELECT "Index"
|
SELECT "Index"
|
||||||
FROM (
|
FROM (
|
||||||
SELECT "Index",
|
SELECT "Index",
|
||||||
ROW_NUMBER() OVER(PARTITION BY "Plugin" ORDER BY DateTimeChanged DESC) AS row_num
|
ROW_NUMBER() OVER(PARTITION BY "Plugin" ORDER BY DateTimeChanged DESC) AS row_num
|
||||||
FROM Plugins_History
|
FROM Plugins_History
|
||||||
) AS ranked_objects
|
) AS ranked_objects
|
||||||
@@ -133,11 +133,11 @@ def cleanup_database(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Build the SQL query to delete entries
|
# Build the SQL query to delete entries
|
||||||
delete_query = f"""DELETE FROM Notifications
|
delete_query = f"""DELETE FROM Notifications
|
||||||
WHERE "Index" NOT IN (
|
WHERE "Index" NOT IN (
|
||||||
SELECT "Index"
|
SELECT "Index"
|
||||||
FROM (
|
FROM (
|
||||||
SELECT "Index",
|
SELECT "Index",
|
||||||
ROW_NUMBER() OVER(PARTITION BY "Notifications" ORDER BY DateTimeCreated DESC) AS row_num
|
ROW_NUMBER() OVER(PARTITION BY "Notifications" ORDER BY DateTimeCreated DESC) AS row_num
|
||||||
FROM Notifications
|
FROM Notifications
|
||||||
) AS ranked_objects
|
) AS ranked_objects
|
||||||
@@ -153,11 +153,11 @@ def cleanup_database(
|
|||||||
mylog("verbose", [f"[{pluginName}] Trim AppEvents to less than {histCount}"])
|
mylog("verbose", [f"[{pluginName}] Trim AppEvents to less than {histCount}"])
|
||||||
|
|
||||||
# Build the SQL query to delete entries
|
# Build the SQL query to delete entries
|
||||||
delete_query = f"""DELETE FROM AppEvents
|
delete_query = f"""DELETE FROM AppEvents
|
||||||
WHERE "Index" NOT IN (
|
WHERE "Index" NOT IN (
|
||||||
SELECT "Index"
|
SELECT "Index"
|
||||||
FROM (
|
FROM (
|
||||||
SELECT "Index",
|
SELECT "Index",
|
||||||
ROW_NUMBER() OVER(PARTITION BY "AppEvents" ORDER BY DateTimeCreated DESC) AS row_num
|
ROW_NUMBER() OVER(PARTITION BY "AppEvents" ORDER BY DateTimeCreated DESC) AS row_num
|
||||||
FROM AppEvents
|
FROM AppEvents
|
||||||
) AS ranked_objects
|
) AS ranked_objects
|
||||||
|
|||||||
@@ -9,11 +9,11 @@ import subprocess
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value, check_IP_format
|
from helper import get_setting_value, check_IP_format # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -28,91 +28,88 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
|||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Check internet connectivity and IP')
|
parser = argparse.ArgumentParser(description='Check internet connectivity and IP')
|
||||||
|
|
||||||
parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP")
|
parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP")
|
||||||
parser.add_argument('DDNS_UPDATE_URL', action="store", help="URL for updating Dynamic DNS (DDNS)")
|
parser.add_argument('DDNS_UPDATE_URL', action="store", help="URL for updating Dynamic DNS (DDNS)")
|
||||||
parser.add_argument('DDNS_USER', action="store", help="Username for Dynamic DNS (DDNS) authentication")
|
parser.add_argument('DDNS_USER', action="store", help="Username for Dynamic DNS (DDNS) authentication")
|
||||||
parser.add_argument('DDNS_PASSWORD', action="store", help="Password for Dynamic DNS (DDNS) authentication")
|
parser.add_argument('DDNS_PASSWORD', action="store", help="Password for Dynamic DNS (DDNS) authentication")
|
||||||
parser.add_argument('DDNS_DOMAIN', action="store", help="Dynamic DNS (DDNS) domain name")
|
parser.add_argument('DDNS_DOMAIN', action="store", help="Dynamic DNS (DDNS) domain name")
|
||||||
|
|
||||||
|
|
||||||
values = parser.parse_args()
|
values = parser.parse_args()
|
||||||
|
|
||||||
PREV_IP = values.prev_ip.split('=')[1]
|
PREV_IP = values.prev_ip.split('=')[1]
|
||||||
DDNS_UPDATE_URL = values.DDNS_UPDATE_URL.split('=')[1]
|
DDNS_UPDATE_URL = values.DDNS_UPDATE_URL.split('=')[1]
|
||||||
DDNS_USER = values.DDNS_USER.split('=')[1]
|
DDNS_USER = values.DDNS_USER.split('=')[1]
|
||||||
DDNS_PASSWORD = values.DDNS_PASSWORD.split('=')[1]
|
DDNS_PASSWORD = values.DDNS_PASSWORD.split('=')[1]
|
||||||
DDNS_DOMAIN = values.DDNS_DOMAIN.split('=')[1]
|
DDNS_DOMAIN = values.DDNS_DOMAIN.split('=')[1]
|
||||||
|
|
||||||
# perform the new IP lookup and DDNS tasks if enabled
|
# perform the new IP lookup and DDNS tasks if enabled
|
||||||
ddns_update( DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP)
|
ddns_update(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP)
|
||||||
|
|
||||||
|
mylog('verbose', [f'[{pluginName}] Finished '])
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Finished '])
|
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
# INTERNET IP CHANGE
|
# INTERNET IP CHANGE
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
def ddns_update ( DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP ):
|
def ddns_update(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP):
|
||||||
|
|
||||||
# Update DDNS record if enabled and IP is different
|
# Update DDNS record if enabled and IP is different
|
||||||
# Get Dynamic DNS IP
|
# Get Dynamic DNS IP
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Retrieving Dynamic DNS IP'])
|
mylog('verbose', [f'[{pluginName}] Retrieving Dynamic DNS IP'])
|
||||||
dns_IP = get_dynamic_DNS_IP(DDNS_DOMAIN)
|
dns_IP = get_dynamic_DNS_IP(DDNS_DOMAIN)
|
||||||
|
|
||||||
# Check Dynamic DNS IP
|
# Check Dynamic DNS IP
|
||||||
if dns_IP == "" or dns_IP == "0.0.0.0" :
|
if dns_IP == "" or dns_IP == "0.0.0.0" :
|
||||||
mylog('none', [f'[{pluginName}] Error retrieving Dynamic DNS IP'])
|
mylog('none', [f'[{pluginName}] Error retrieving Dynamic DNS IP'])
|
||||||
|
|
||||||
mylog('none', [f'[{pluginName}] ', dns_IP])
|
mylog('none', [f'[{pluginName}] ', dns_IP])
|
||||||
|
|
||||||
# Check DNS Change
|
# Check DNS Change
|
||||||
if dns_IP != PREV_IP :
|
if dns_IP != PREV_IP :
|
||||||
mylog('none', [f'[{pluginName}] Updating Dynamic DNS IP'])
|
mylog('none', [f'[{pluginName}] Updating Dynamic DNS IP'])
|
||||||
message = set_dynamic_DNS_IP (DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN)
|
message = set_dynamic_DNS_IP(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN)
|
||||||
mylog('none', [f'[{pluginName}] ', message])
|
mylog('none', [f'[{pluginName}] ', message])
|
||||||
|
|
||||||
# plugin_objects = Plugin_Objects(RESULT_FILE)
|
# plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
# plugin_objects.add_object(
|
# plugin_objects.add_object(
|
||||||
# primaryId = 'Internet', # MAC (Device Name)
|
# primaryId = 'Internet', # MAC (Device Name)
|
||||||
# secondaryId = new_internet_IP, # IP Address
|
# secondaryId = new_internet_IP, # IP Address
|
||||||
# watched1 = f'Previous IP: {PREV_IP}',
|
# watched1 = f'Previous IP: {PREV_IP}',
|
||||||
# watched2 = '',
|
# watched2 = '',
|
||||||
# watched3 = '',
|
# watched3 = '',
|
||||||
# watched4 = '',
|
# watched4 = '',
|
||||||
# extra = f'Previous IP: {PREV_IP}',
|
# extra = f'Previous IP: {PREV_IP}',
|
||||||
# foreignKey = 'Internet')
|
# foreignKey = 'Internet')
|
||||||
|
|
||||||
# plugin_objects.write_result_file()
|
# plugin_objects.write_result_file()
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
def get_dynamic_DNS_IP (DDNS_DOMAIN):
|
# -------------------------------------------------------------------------------
|
||||||
|
def get_dynamic_DNS_IP(DDNS_DOMAIN):
|
||||||
|
|
||||||
# Using supplied DNS server
|
# Using supplied DNS server
|
||||||
dig_args = ['dig', '+short', DDNS_DOMAIN]
|
dig_args = ['dig', '+short', DDNS_DOMAIN]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# try runnning a subprocess
|
# try runnning a subprocess
|
||||||
dig_output = subprocess.check_output (dig_args, universal_newlines=True)
|
dig_output = subprocess.check_output(dig_args, universal_newlines=True)
|
||||||
mylog('none', [f'[{pluginName}] DIG output :', dig_output])
|
mylog('none', [f'[{pluginName}] DIG output :', dig_output])
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
# An error occured, handle it
|
# An error occured, handle it
|
||||||
mylog('none', [f'[{pluginName}] ⚠ ERROR - ', e.output])
|
mylog('none', [f'[{pluginName}] ⚠ ERROR - ', e.output])
|
||||||
dig_output = '' # probably no internet
|
dig_output = '' # probably no internet
|
||||||
|
|
||||||
# Check result is an IP
|
# Check result is an IP
|
||||||
IP = check_IP_format (dig_output)
|
IP = check_IP_format(dig_output)
|
||||||
|
|
||||||
# Handle invalid response
|
# Handle invalid response
|
||||||
if IP == '':
|
if IP == '':
|
||||||
@@ -120,28 +117,27 @@ def get_dynamic_DNS_IP (DDNS_DOMAIN):
|
|||||||
|
|
||||||
return IP
|
return IP
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
def set_dynamic_DNS_IP (DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN):
|
# -------------------------------------------------------------------------------
|
||||||
|
def set_dynamic_DNS_IP(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN):
|
||||||
try:
|
try:
|
||||||
# try runnning a subprocess
|
# try runnning a subprocess
|
||||||
# Update Dynamic IP
|
# Update Dynamic IP
|
||||||
curl_output = subprocess.check_output (['curl',
|
curl_output = subprocess.check_output([
|
||||||
'-s',
|
'curl',
|
||||||
DDNS_UPDATE_URL +
|
'-s',
|
||||||
'username=' + DDNS_USER +
|
DDNS_UPDATE_URL + 'username=' + DDNS_USER + '&password=' + DDNS_PASSWORD + '&hostname=' + DDNS_DOMAIN],
|
||||||
'&password=' + DDNS_PASSWORD +
|
universal_newlines=True)
|
||||||
'&hostname=' + DDNS_DOMAIN],
|
|
||||||
universal_newlines=True)
|
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
# An error occured, handle it
|
# An error occured, handle it
|
||||||
mylog('none', [f'[{pluginName}] ⚠ ERROR - ',e.output])
|
mylog('none', [f'[{pluginName}] ⚠ ERROR - ', e.output])
|
||||||
curl_output = ""
|
curl_output = ""
|
||||||
|
|
||||||
return curl_output
|
return curl_output
|
||||||
|
|
||||||
|
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
# BEGIN
|
# BEGIN
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -4,19 +4,19 @@ from __future__ import unicode_literals
|
|||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import chardet
|
import chardet
|
||||||
|
|
||||||
# Register NetAlertX directories
|
# Register NetAlertX directories
|
||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects, handleEmpty, is_mac
|
from plugin_helper import Plugin_Objects, handleEmpty, is_mac # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from dhcp_leases import DhcpLeases
|
from dhcp_leases import DhcpLeases # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -24,34 +24,38 @@ conf.tz = timezone(get_setting_value('TIMEZONE'))
|
|||||||
# Make sure log level is initialized correctly
|
# Make sure log level is initialized correctly
|
||||||
Logger(get_setting_value('LOG_LEVEL'))
|
Logger(get_setting_value('LOG_LEVEL'))
|
||||||
|
|
||||||
pluginName= 'DHCPLSS'
|
pluginName = 'DHCPLSS'
|
||||||
|
|
||||||
LOG_PATH = logPath + '/plugins'
|
LOG_PATH = logPath + '/plugins'
|
||||||
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------
|
# -------------------------------------------------------------
|
||||||
def main():
|
def main():
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
last_run_logfile = open(RESULT_FILE, 'a')
|
last_run_logfile = open(RESULT_FILE, 'a')
|
||||||
last_run_logfile.write("")
|
last_run_logfile.write("")
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Import devices from dhcp.leases files')
|
parser = argparse.ArgumentParser(description='Import devices from dhcp.leases files')
|
||||||
parser.add_argument('paths', action="store", help="absolute dhcp.leases file paths to check separated by ','")
|
parser.add_argument(
|
||||||
|
'paths',
|
||||||
|
action="store",
|
||||||
|
help="absolute dhcp.leases file paths to check separated by ','"
|
||||||
|
)
|
||||||
|
|
||||||
values = parser.parse_args()
|
values = parser.parse_args()
|
||||||
|
|
||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
if values.paths:
|
if values.paths:
|
||||||
for path in values.paths.split('=')[1].split(','):
|
for path in values.paths.split('=')[1].split(','):
|
||||||
plugin_objects = get_entries(path, plugin_objects)
|
plugin_objects = get_entries(path, plugin_objects)
|
||||||
mylog('verbose', [f'[{pluginName}] {len(plugin_objects)} Entries found in "{path}"'])
|
mylog('verbose', [f'[{pluginName}] {len(plugin_objects)} Entries found in "{path}"'])
|
||||||
|
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------
|
# -------------------------------------------------------------
|
||||||
def get_entries(path, plugin_objects):
|
def get_entries(path, plugin_objects):
|
||||||
|
|
||||||
@@ -66,7 +70,7 @@ def get_entries(path, plugin_objects):
|
|||||||
# Use the detected encoding
|
# Use the detected encoding
|
||||||
encoding = result['encoding']
|
encoding = result['encoding']
|
||||||
|
|
||||||
# Order: MAC, IP, IsActive, NAME, Hardware
|
# Order: MAC, IP, IsActive, NAME, Hardware
|
||||||
# Handle pihole-specific dhcp.leases files
|
# Handle pihole-specific dhcp.leases files
|
||||||
if 'pihole' in path:
|
if 'pihole' in path:
|
||||||
with open(path, 'r', encoding=encoding, errors='replace') as f:
|
with open(path, 'r', encoding=encoding, errors='replace') as f:
|
||||||
@@ -111,9 +115,9 @@ def get_entries(path, plugin_objects):
|
|||||||
if is_mac(lease.ethernet):
|
if is_mac(lease.ethernet):
|
||||||
|
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = handleEmpty(lease.ethernet),
|
primaryId = handleEmpty(lease.ethernet),
|
||||||
secondaryId = handleEmpty(lease.ip),
|
secondaryId = handleEmpty(lease.ip),
|
||||||
watched1 = handleEmpty(lease.active),
|
watched1 = handleEmpty(lease.active),
|
||||||
watched2 = handleEmpty(lease.hostname),
|
watched2 = handleEmpty(lease.hostname),
|
||||||
watched3 = handleEmpty(lease.hardware),
|
watched3 = handleEmpty(lease.hardware),
|
||||||
watched4 = handleEmpty(lease.binding_state),
|
watched4 = handleEmpty(lease.binding_state),
|
||||||
@@ -122,5 +126,6 @@ def get_entries(path, plugin_objects):
|
|||||||
)
|
)
|
||||||
return plugin_objects
|
return plugin_objects
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import os
|
import os
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
@@ -11,12 +10,12 @@ import sys
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects, Plugin_Object
|
from plugin_helper import Plugin_Objects, Plugin_Object # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
@@ -31,13 +30,14 @@ LOG_PATH = logPath + '/plugins'
|
|||||||
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
mylog('verbose', ['[DHCPSRVS] In script'])
|
mylog('verbose', ['[DHCPSRVS] In script'])
|
||||||
|
|
||||||
last_run_logfile = open(RESULT_FILE, 'a')
|
last_run_logfile = open(RESULT_FILE, 'a')
|
||||||
last_run_logfile.write("")
|
last_run_logfile.write("")
|
||||||
|
|
||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
timeoutSec = get_setting_value('DHCPSRVS_RUN_TIMEOUT')
|
timeoutSec = get_setting_value('DHCPSRVS_RUN_TIMEOUT')
|
||||||
|
|
||||||
@@ -46,10 +46,10 @@ def main():
|
|||||||
try:
|
try:
|
||||||
# Number of DHCP discovery probes to send
|
# Number of DHCP discovery probes to send
|
||||||
dhcp_probes = 1
|
dhcp_probes = 1
|
||||||
|
|
||||||
# Initialize a list to store output lines from the scan
|
# Initialize a list to store output lines from the scan
|
||||||
newLines = []
|
newLines = []
|
||||||
|
|
||||||
for _ in range(dhcp_probes):
|
for _ in range(dhcp_probes):
|
||||||
output = subprocess.check_output(nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=timeoutSec)
|
output = subprocess.check_output(nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=timeoutSec)
|
||||||
newLines += output.split("\n")
|
newLines += output.split("\n")
|
||||||
@@ -57,9 +57,9 @@ def main():
|
|||||||
newEntries = []
|
newEntries = []
|
||||||
|
|
||||||
for line in newLines:
|
for line in newLines:
|
||||||
|
|
||||||
mylog('verbose', [f'[DHCPSRVS] Processing line: {line} '])
|
mylog('verbose', [f'[DHCPSRVS] Processing line: {line} '])
|
||||||
|
|
||||||
if 'Response ' in line and ' of ' in line:
|
if 'Response ' in line and ' of ' in line:
|
||||||
newEntries.append(Plugin_Object())
|
newEntries.append(Plugin_Object())
|
||||||
elif 'Server Identifier' in line:
|
elif 'Server Identifier' in line:
|
||||||
@@ -85,7 +85,7 @@ def main():
|
|||||||
newEntries[-1].extra += ',' + newVal
|
newEntries[-1].extra += ',' + newVal
|
||||||
|
|
||||||
for e in newEntries:
|
for e in newEntries:
|
||||||
|
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId=e.primaryId,
|
primaryId=e.primaryId,
|
||||||
secondaryId=e.secondaryId,
|
secondaryId=e.secondaryId,
|
||||||
@@ -101,5 +101,6 @@ def main():
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
mylog('verbose', ['[DHCPSRVS] Error in main:', str(e)])
|
mylog('verbose', ['[DHCPSRVS] Error in main:', str(e)])
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
@@ -8,14 +7,14 @@ import subprocess
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB
|
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||||
from models.device_instance import DeviceInstance
|
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -35,7 +34,7 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
timeout = get_setting_value('DIGSCAN_RUN_TIMEOUT')
|
timeout = get_setting_value('DIGSCAN_RUN_TIMEOUT')
|
||||||
|
|
||||||
@@ -50,13 +49,13 @@ def main():
|
|||||||
device_handler = DeviceInstance(db)
|
device_handler = DeviceInstance(db)
|
||||||
|
|
||||||
# Retrieve devices
|
# Retrieve devices
|
||||||
if get_setting_value("REFRESH_FQDN"):
|
if get_setting_value("REFRESH_FQDN"):
|
||||||
devices = device_handler.getAll()
|
devices = device_handler.getAll()
|
||||||
else:
|
else:
|
||||||
devices = device_handler.getUnknown()
|
devices = device_handler.getUnknown()
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
|
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
|
||||||
|
|
||||||
# TEST - below is a WINDOWS host IP
|
# TEST - below is a WINDOWS host IP
|
||||||
# execute_name_lookup('192.168.1.121', timeout)
|
# execute_name_lookup('192.168.1.121', timeout)
|
||||||
|
|
||||||
@@ -65,27 +64,27 @@ def main():
|
|||||||
|
|
||||||
if domain_name != '':
|
if domain_name != '':
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
# "MAC", "IP", "Server", "Name"
|
primaryId = device['devMac'],
|
||||||
primaryId = device['devMac'],
|
secondaryId = device['devLastIP'],
|
||||||
secondaryId = device['devLastIP'],
|
watched1 = dns_server,
|
||||||
watched1 = dns_server,
|
watched2 = domain_name,
|
||||||
watched2 = domain_name,
|
watched3 = '',
|
||||||
watched3 = '',
|
watched4 = '',
|
||||||
watched4 = '',
|
extra = '',
|
||||||
extra = '',
|
foreignKey = device['devMac']
|
||||||
foreignKey = device['devMac'])
|
)
|
||||||
|
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
|
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
#===============================================================================
|
|
||||||
|
# ===============================================================================
|
||||||
# Execute scan
|
# Execute scan
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
def execute_name_lookup (ip, timeout):
|
def execute_name_lookup(ip, timeout):
|
||||||
"""
|
"""
|
||||||
Execute the DIG command on IP.
|
Execute the DIG command on IP.
|
||||||
"""
|
"""
|
||||||
@@ -97,32 +96,38 @@ def execute_name_lookup (ip, timeout):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args])
|
mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args])
|
||||||
|
|
||||||
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
|
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
|
||||||
output = subprocess.check_output (args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True).strip()
|
output = subprocess.check_output(
|
||||||
|
args,
|
||||||
|
universal_newlines=True,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
timeout=(timeout),
|
||||||
|
text=True
|
||||||
|
).strip()
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
|
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
|
||||||
|
|
||||||
domain_name = output
|
domain_name = output
|
||||||
dns_server = ''
|
dns_server = ''
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Domain Name: {domain_name}'])
|
mylog('verbose', [f'[{pluginName}] Domain Name: {domain_name}'])
|
||||||
|
|
||||||
return domain_name, dns_server
|
return domain_name, dns_server
|
||||||
|
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
|
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
|
||||||
|
|
||||||
except subprocess.TimeoutExpired:
|
|
||||||
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
|
||||||
|
|
||||||
if output == "": # check if the subprocess failed
|
except subprocess.TimeoutExpired:
|
||||||
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
|
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||||
else:
|
|
||||||
|
if output == "": # check if the subprocess failed
|
||||||
|
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
|
||||||
|
else:
|
||||||
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
|
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
|
||||||
|
|
||||||
return '', ''
|
return '', ''
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|||||||
@@ -17,11 +17,12 @@ from aiofreepybox.exceptions import NotOpenError, AuthorizationError
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
|
from utils.datetime_utils import timeNowDB, DATETIME_PATTERN # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
||||||
@@ -79,6 +80,7 @@ def map_device_type(type: str):
|
|||||||
mylog("minimal", [f"[{pluginName}] Unknown device type: {type}"])
|
mylog("minimal", [f"[{pluginName}] Unknown device type: {type}"])
|
||||||
return device_type_map["other"]
|
return device_type_map["other"]
|
||||||
|
|
||||||
|
|
||||||
async def get_device_data(api_version: int, api_address: str, api_port: int):
|
async def get_device_data(api_version: int, api_address: str, api_port: int):
|
||||||
# ensure existence of db path
|
# ensure existence of db path
|
||||||
config_base = Path(os.getenv("NETALERTX_CONFIG", "/data/config"))
|
config_base = Path(os.getenv("NETALERTX_CONFIG", "/data/config"))
|
||||||
@@ -149,7 +151,7 @@ def main():
|
|||||||
watched1=freebox["name"],
|
watched1=freebox["name"],
|
||||||
watched2=freebox["operator"],
|
watched2=freebox["operator"],
|
||||||
watched3="Gateway",
|
watched3="Gateway",
|
||||||
watched4=datetime.now,
|
watched4=timeNowDB(),
|
||||||
extra="",
|
extra="",
|
||||||
foreignKey=freebox["mac"],
|
foreignKey=freebox["mac"],
|
||||||
)
|
)
|
||||||
@@ -165,7 +167,7 @@ def main():
|
|||||||
watched1=host.get("primary_name", "(unknown)"),
|
watched1=host.get("primary_name", "(unknown)"),
|
||||||
watched2=host.get("vendor_name", "(unknown)"),
|
watched2=host.get("vendor_name", "(unknown)"),
|
||||||
watched3=map_device_type(host.get("host_type", "")),
|
watched3=map_device_type(host.get("host_type", "")),
|
||||||
watched4=datetime.fromtimestamp(ip.get("last_time_reachable", 0)),
|
watched4=datetime.fromtimestamp(ip.get("last_time_reachable", 0)).strftime(DATETIME_PATTERN),
|
||||||
extra="",
|
extra="",
|
||||||
foreignKey=mac,
|
foreignKey=mac,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -11,14 +11,14 @@ import re
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB
|
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||||
from models.device_instance import DeviceInstance
|
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -33,16 +33,14 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
|||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
|
|
||||||
timeout = get_setting_value('ICMP_RUN_TIMEOUT')
|
timeout = get_setting_value('ICMP_RUN_TIMEOUT')
|
||||||
args = get_setting_value('ICMP_ARGS')
|
args = get_setting_value('ICMP_ARGS')
|
||||||
in_regex = get_setting_value('ICMP_IN_REGEX')
|
in_regex = get_setting_value('ICMP_IN_REGEX')
|
||||||
|
|
||||||
# Create a database connection
|
# Create a database connection
|
||||||
db = DB() # instance of class DB
|
db = DB() # instance of class DB
|
||||||
db.open()
|
db.open()
|
||||||
@@ -61,46 +59,45 @@ def main():
|
|||||||
|
|
||||||
# Filter devices based on the regex match
|
# Filter devices based on the regex match
|
||||||
filtered_devices = [
|
filtered_devices = [
|
||||||
device for device in all_devices
|
device for device in all_devices
|
||||||
if regex_pattern.match(device['devLastIP'])
|
if regex_pattern.match(device['devLastIP'])
|
||||||
]
|
]
|
||||||
|
|
||||||
|
mylog('verbose', [f'[{pluginName}] Devices to PING: {len(filtered_devices)}'])
|
||||||
mylog('verbose', [f'[{pluginName}] Devices to PING: {len(filtered_devices)}'])
|
|
||||||
|
|
||||||
for device in filtered_devices:
|
for device in filtered_devices:
|
||||||
is_online, output = execute_scan(device['devLastIP'], timeout, args)
|
is_online, output = execute_scan(device['devLastIP'], timeout, args)
|
||||||
|
|
||||||
mylog('verbose', [f"[{pluginName}] ip: {device['devLastIP']} is_online: {is_online}"])
|
mylog('verbose', [f"[{pluginName}] ip: {device['devLastIP']} is_online: {is_online}"])
|
||||||
|
|
||||||
|
|
||||||
if is_online:
|
if is_online:
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
# "MAC", "IP", "Name", "Output"
|
# "MAC", "IP", "Name", "Output"
|
||||||
primaryId = device['devMac'],
|
primaryId = device['devMac'],
|
||||||
secondaryId = device['devLastIP'],
|
secondaryId = device['devLastIP'],
|
||||||
watched1 = device['devName'],
|
watched1 = device['devName'],
|
||||||
watched2 = output.replace('\n',''),
|
watched2 = output.replace('\n', ''),
|
||||||
watched3 = '',
|
watched3 = '',
|
||||||
watched4 = '',
|
watched4 = '',
|
||||||
extra = '',
|
extra = '',
|
||||||
foreignKey = device['devMac'])
|
foreignKey = device['devMac']
|
||||||
|
)
|
||||||
|
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
|
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
#===============================================================================
|
|
||||||
|
# ===============================================================================
|
||||||
# Execute scan
|
# Execute scan
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
def execute_scan (ip, timeout, args):
|
def execute_scan(ip, timeout, args):
|
||||||
"""
|
"""
|
||||||
Execute the ICMP command on IP.
|
Execute the ICMP command on IP.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
icmp_args = ['ping'] + args.split() + [ip]
|
icmp_args = ['ping'] + args.split() + [ip]
|
||||||
|
|
||||||
# Execute command
|
# Execute command
|
||||||
@@ -108,12 +105,18 @@ def execute_scan (ip, timeout, args):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
|
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
|
||||||
output = subprocess.check_output (icmp_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True)
|
output = subprocess.check_output(
|
||||||
|
icmp_args,
|
||||||
|
universal_newlines=True,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
timeout=(timeout),
|
||||||
|
text=True
|
||||||
|
)
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
|
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
|
||||||
|
|
||||||
# Parse output using case-insensitive regular expressions
|
# Parse output using case-insensitive regular expressions
|
||||||
#Synology-NAS:/# ping -i 0.5 -c 3 -W 8 -w 9 192.168.1.82
|
# Synology-NAS:/# ping -i 0.5 -c 3 -W 8 -w 9 192.168.1.82
|
||||||
# PING 192.168.1.82 (192.168.1.82): 56 data bytes
|
# PING 192.168.1.82 (192.168.1.82): 56 data bytes
|
||||||
# 64 bytes from 192.168.1.82: seq=0 ttl=64 time=0.080 ms
|
# 64 bytes from 192.168.1.82: seq=0 ttl=64 time=0.080 ms
|
||||||
# 64 bytes from 192.168.1.82: seq=1 ttl=64 time=0.081 ms
|
# 64 bytes from 192.168.1.82: seq=1 ttl=64 time=0.081 ms
|
||||||
@@ -130,7 +133,7 @@ def execute_scan (ip, timeout, args):
|
|||||||
# --- 192.168.1.92 ping statistics ---
|
# --- 192.168.1.92 ping statistics ---
|
||||||
# 3 packets transmitted, 0 packets received, 100% packet loss
|
# 3 packets transmitted, 0 packets received, 100% packet loss
|
||||||
|
|
||||||
# TODO: parse output and return True if online, False if Offline (100% packet loss, bad address)
|
# TODO: parse output and return True if online, False if Offline (100% packet loss, bad address)
|
||||||
is_online = True
|
is_online = True
|
||||||
|
|
||||||
# Check for 0% packet loss in the output
|
# Check for 0% packet loss in the output
|
||||||
@@ -145,22 +148,20 @@ def execute_scan (ip, timeout, args):
|
|||||||
|
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
# An error occurred, handle it
|
# An error occurred, handle it
|
||||||
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
|
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
|
||||||
mylog('verbose', [f'[{pluginName}]', e.output])
|
mylog('verbose', [f'[{pluginName}]', e.output])
|
||||||
|
|
||||||
return False, output
|
return False, output
|
||||||
|
|
||||||
except subprocess.TimeoutExpired:
|
except subprocess.TimeoutExpired:
|
||||||
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||||
return False, output
|
return False, output
|
||||||
|
|
||||||
return False, output
|
return False, output
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#===============================================================================
|
|
||||||
|
# ===============================================================================
|
||||||
# BEGIN
|
# BEGIN
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -11,13 +11,13 @@ import re
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.datetime_utils import timeNowDB
|
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger, append_line_to_file
|
from logger import mylog, Logger, append_line_to_file # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import check_IP_format, get_setting_value
|
from helper import check_IP_format, get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -31,39 +31,39 @@ LOG_PATH = logPath + '/plugins'
|
|||||||
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
|
||||||
no_internet_ip = '0.0.0.0'
|
no_internet_ip = '0.0.0.0'
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Check internet connectivity and IP')
|
parser = argparse.ArgumentParser(description='Check internet connectivity and IP')
|
||||||
|
|
||||||
parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP")
|
parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP")
|
||||||
parser.add_argument('DIG_GET_IP_ARG', action="store", help="Arguments for the 'dig' command to retrieve the IP address") # unused
|
parser.add_argument('DIG_GET_IP_ARG', action="store", help="Arguments for the 'dig' command to retrieve the IP address") # unused
|
||||||
|
|
||||||
values = parser.parse_args()
|
values = parser.parse_args()
|
||||||
|
|
||||||
PREV_IP = values.prev_ip.split('=')[1]
|
PREV_IP = values.prev_ip.split('=')[1]
|
||||||
DIG_GET_IP_ARG = get_setting_value("INTRNT_DIG_GET_IP_ARG")
|
DIG_GET_IP_ARG = get_setting_value("INTRNT_DIG_GET_IP_ARG")
|
||||||
|
|
||||||
new_internet_IP = no_internet_ip
|
new_internet_IP = no_internet_ip
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] INTRNT_DIG_GET_IP_ARG: ', DIG_GET_IP_ARG])
|
mylog('verbose', [f'[{pluginName}] INTRNT_DIG_GET_IP_ARG: ', DIG_GET_IP_ARG])
|
||||||
|
|
||||||
# METHOD 1: dig
|
# METHOD 1: dig
|
||||||
# perform the new IP lookup N times specified by the INTRNT_TRIES setting
|
# perform the new IP lookup N times specified by the INTRNT_TRIES setting
|
||||||
|
|
||||||
INTRNT_RETRIES = get_setting_value("INTRNT_RETRIES")
|
INTRNT_RETRIES = get_setting_value("INTRNT_RETRIES")
|
||||||
retries_needed = 0
|
retries_needed = 0
|
||||||
|
|
||||||
for i in range(INTRNT_RETRIES + 1):
|
for i in range(INTRNT_RETRIES + 1):
|
||||||
|
|
||||||
new_internet_IP, cmd_output = check_internet_IP( PREV_IP, DIG_GET_IP_ARG)
|
new_internet_IP, cmd_output = check_internet_IP(PREV_IP, DIG_GET_IP_ARG)
|
||||||
|
|
||||||
if new_internet_IP == no_internet_ip:
|
if new_internet_IP == no_internet_ip:
|
||||||
time.sleep(1*i) # Exponential backoff strategy
|
time.sleep(1 * i) # Exponential backoff strategy
|
||||||
else:
|
else:
|
||||||
retries_needed = i
|
retries_needed = i
|
||||||
break
|
break
|
||||||
@@ -71,68 +71,69 @@ def main():
|
|||||||
# METHOD 2: curl
|
# METHOD 2: curl
|
||||||
if new_internet_IP == no_internet_ip:
|
if new_internet_IP == no_internet_ip:
|
||||||
new_internet_IP, cmd_output = fallback_check_ip()
|
new_internet_IP, cmd_output = fallback_check_ip()
|
||||||
mylog('verbose', [f'[{pluginName}] Curl Fallback (new_internet_IP|cmd_output): {new_internet_IP} | {cmd_output}'])
|
mylog('verbose', [f'[{pluginName}] Curl Fallback (new_internet_IP|cmd_output): {new_internet_IP} | {cmd_output}'])
|
||||||
|
|
||||||
# logging
|
# logging
|
||||||
append_line_to_file (logPath + '/IP_changes.log', '['+str(timeNowDB()) +']\t'+ new_internet_IP +'\n')
|
append_line_to_file(logPath + '/IP_changes.log', '[' + str(timeNowDB()) + ']\t' + new_internet_IP + '\n')
|
||||||
|
|
||||||
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
|
||||||
|
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = 'Internet', # MAC (Device Name)
|
primaryId = 'Internet', # MAC (Device Name)
|
||||||
secondaryId = new_internet_IP, # IP Address
|
secondaryId = new_internet_IP, # IP Address
|
||||||
watched1 = f'Previous IP: {PREV_IP}',
|
watched1 = f'Previous IP: {PREV_IP}',
|
||||||
watched2 = cmd_output.replace('\n',''),
|
watched2 = cmd_output.replace('\n', ''),
|
||||||
watched3 = retries_needed,
|
watched3 = retries_needed,
|
||||||
watched4 = 'Gateway',
|
watched4 = 'Gateway',
|
||||||
extra = f'Previous IP: {PREV_IP}',
|
extra = f'Previous IP: {PREV_IP}',
|
||||||
foreignKey = 'Internet')
|
foreignKey = 'Internet'
|
||||||
|
)
|
||||||
|
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
|
mylog('verbose', [f'[{pluginName}] Finished '])
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Finished '])
|
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
# INTERNET IP CHANGE
|
# INTERNET IP CHANGE
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
def check_internet_IP ( PREV_IP, DIG_GET_IP_ARG ):
|
def check_internet_IP(PREV_IP, DIG_GET_IP_ARG):
|
||||||
|
|
||||||
# Get Internet IP
|
# Get Internet IP
|
||||||
mylog('verbose', [f'[{pluginName}] - Retrieving Internet IP'])
|
mylog('verbose', [f'[{pluginName}] - Retrieving Internet IP'])
|
||||||
internet_IP, cmd_output = get_internet_IP(DIG_GET_IP_ARG)
|
internet_IP, cmd_output = get_internet_IP(DIG_GET_IP_ARG)
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Current internet_IP : {internet_IP}'])
|
mylog('verbose', [f'[{pluginName}] Current internet_IP : {internet_IP}'])
|
||||||
|
|
||||||
# Check previously stored IP
|
# Check previously stored IP
|
||||||
previous_IP = no_internet_ip
|
previous_IP = no_internet_ip
|
||||||
|
|
||||||
if PREV_IP is not None and len(PREV_IP) > 0 :
|
if PREV_IP is not None and len(PREV_IP) > 0 :
|
||||||
previous_IP = PREV_IP
|
previous_IP = PREV_IP
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] previous_IP : {previous_IP}'])
|
mylog('verbose', [f'[{pluginName}] previous_IP : {previous_IP}'])
|
||||||
|
|
||||||
return internet_IP, cmd_output
|
return internet_IP, cmd_output
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
def get_internet_IP (DIG_GET_IP_ARG):
|
# -------------------------------------------------------------------------------
|
||||||
|
def get_internet_IP(DIG_GET_IP_ARG):
|
||||||
|
|
||||||
cmd_output = ''
|
cmd_output = ''
|
||||||
|
|
||||||
# Using 'dig'
|
# Using 'dig'
|
||||||
dig_args = ['dig', '+short'] + DIG_GET_IP_ARG.strip().split()
|
dig_args = ['dig', '+short'] + DIG_GET_IP_ARG.strip().split()
|
||||||
try:
|
try:
|
||||||
cmd_output = subprocess.check_output (dig_args, universal_newlines=True)
|
cmd_output = subprocess.check_output(dig_args, universal_newlines=True)
|
||||||
mylog('verbose', [f'[{pluginName}] DIG result : {cmd_output}'])
|
mylog('verbose', [f'[{pluginName}] DIG result : {cmd_output}'])
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
mylog('verbose', [e.output])
|
mylog('verbose', [e.output])
|
||||||
cmd_output = '' # no internet
|
cmd_output = '' # no internet
|
||||||
|
|
||||||
# Check result is an IP
|
# Check result is an IP
|
||||||
IP = check_IP_format (cmd_output)
|
IP = check_IP_format(cmd_output)
|
||||||
|
|
||||||
# Handle invalid response
|
# Handle invalid response
|
||||||
if IP == '':
|
if IP == '':
|
||||||
@@ -140,7 +141,8 @@ def get_internet_IP (DIG_GET_IP_ARG):
|
|||||||
|
|
||||||
return IP, cmd_output
|
return IP, cmd_output
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
def fallback_check_ip():
|
def fallback_check_ip():
|
||||||
"""Fallback mechanism using `curl ifconfig.me/ip`."""
|
"""Fallback mechanism using `curl ifconfig.me/ip`."""
|
||||||
try:
|
try:
|
||||||
@@ -155,8 +157,9 @@ def fallback_check_ip():
|
|||||||
mylog('none', [f'[{pluginName}] Fallback curl exception: {e}'])
|
mylog('none', [f'[{pluginName}] Fallback curl exception: {e}'])
|
||||||
return no_internet_ip, f'Fallback via curl exception: "{e}"'
|
return no_internet_ip, f'Fallback via curl exception: "{e}"'
|
||||||
|
|
||||||
#===============================================================================
|
|
||||||
|
# ===============================================================================
|
||||||
# BEGIN
|
# BEGIN
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import argparse
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import speedtest
|
import speedtest
|
||||||
@@ -9,13 +8,13 @@ import speedtest
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.datetime_utils import timeNowDB
|
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -28,18 +27,16 @@ pluginName = 'INTRSPD'
|
|||||||
LOG_PATH = logPath + '/plugins'
|
LOG_PATH = logPath + '/plugins'
|
||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
def main():
|
|
||||||
|
|
||||||
mylog('verbose', ['[INTRSPD] In script'])
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Speedtest Plugin for NetAlertX')
|
def main():
|
||||||
values = parser.parse_args()
|
|
||||||
|
mylog('verbose', ['[INTRSPD] In script'])
|
||||||
|
|
||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
speedtest_result = run_speedtest()
|
speedtest_result = run_speedtest()
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = 'Speedtest',
|
primaryId = 'Speedtest',
|
||||||
secondaryId = timeNowDB(),
|
secondaryId = timeNowDB(),
|
||||||
watched1 = speedtest_result['download_speed'],
|
watched1 = speedtest_result['download_speed'],
|
||||||
watched2 = speedtest_result['upload_speed'],
|
watched2 = speedtest_result['upload_speed'],
|
||||||
watched3 = 'null',
|
watched3 = 'null',
|
||||||
@@ -49,25 +46,27 @@ def main():
|
|||||||
)
|
)
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
|
|
||||||
def run_speedtest():
|
def run_speedtest():
|
||||||
try:
|
try:
|
||||||
st = speedtest.Speedtest(secure=True)
|
st = speedtest.Speedtest(secure=True)
|
||||||
st.get_best_server()
|
st.get_best_server()
|
||||||
download_speed = round(st.download() / 10**6, 2) # Convert to Mbps
|
download_speed = round(st.download() / 10**6, 2) # Convert to Mbps
|
||||||
upload_speed = round(st.upload() / 10**6, 2) # Convert to Mbps
|
upload_speed = round(st.upload() / 10**6, 2) # Convert to Mbps
|
||||||
|
|
||||||
mylog('verbose', [f"[INTRSPD] Result (down|up): {str(download_speed)} Mbps|{upload_speed} Mbps"])
|
mylog('verbose', [f"[INTRSPD] Result (down|up): {str(download_speed)} Mbps|{upload_speed} Mbps"])
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'download_speed': download_speed,
|
'download_speed': download_speed,
|
||||||
'upload_speed': upload_speed,
|
'upload_speed': upload_speed,
|
||||||
}
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
mylog('verbose', [f"[INTRSPD] Error running speedtest: {str(e)}"])
|
mylog('verbose', [f"[INTRSPD] Error running speedtest: {str(e)}"])
|
||||||
return {
|
return {
|
||||||
'download_speed': -1,
|
'download_speed': -1,
|
||||||
'upload_speed': -1,
|
'upload_speed': -1,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
|||||||
@@ -11,11 +11,11 @@ from functools import reduce
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -34,9 +34,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
|||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
# Retrieve configuration settings
|
# Retrieve configuration settings
|
||||||
SCAN_SUBNETS = get_setting_value('SCAN_SUBNETS')
|
SCAN_SUBNETS = get_setting_value('SCAN_SUBNETS')
|
||||||
@@ -48,33 +47,33 @@ def main():
|
|||||||
entry.split('--interface=')[-1].strip() for entry in SCAN_SUBNETS if '--interface=' in entry
|
entry.split('--interface=')[-1].strip() for entry in SCAN_SUBNETS if '--interface=' in entry
|
||||||
)
|
)
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Interfaces value: "{interfaces}"'])
|
mylog('verbose', [f'[{pluginName}] Interfaces value: "{interfaces}"'])
|
||||||
|
|
||||||
# retrieve data
|
# retrieve data
|
||||||
raw_neighbors = get_neighbors(interfaces)
|
raw_neighbors = get_neighbors(interfaces)
|
||||||
|
|
||||||
neighbors = parse_neighbors(raw_neighbors)
|
neighbors = parse_neighbors(raw_neighbors)
|
||||||
|
|
||||||
# Process the data into native application tables
|
# Process the data into native application tables
|
||||||
if len(neighbors) > 0:
|
if len(neighbors) > 0:
|
||||||
|
|
||||||
for device in neighbors:
|
for device in neighbors:
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = device['mac'],
|
primaryId = device['mac'],
|
||||||
secondaryId = device['ip'],
|
secondaryId = device['ip'],
|
||||||
watched4 = device['last_seen'],
|
watched4 = device['last_seen'],
|
||||||
|
|
||||||
# The following are always unknown
|
# The following are always unknown
|
||||||
watched1 = device['hostname'], # don't use these --> handleEmpty(device['hostname']),
|
watched1 = device['hostname'], # don't use these --> handleEmpty(device['hostname']),
|
||||||
watched2 = device['vendor'], # handleEmpty(device['vendor']),
|
watched2 = device['vendor'], # don't use these --> handleEmpty(device['vendor']),
|
||||||
watched3 = device['device_type'], # handleEmpty(device['device_type']),
|
watched3 = device['device_type'], # don't use these --> handleEmpty(device['device_type']),
|
||||||
extra = '',
|
extra = '',
|
||||||
foreignKey = "" #device['mac']
|
foreignKey = "" # device['mac']
|
||||||
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
|
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
|
||||||
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
|
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
|
||||||
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
|
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
|
||||||
# helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md
|
# helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md
|
||||||
)
|
)
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] New entries: "{len(neighbors)}"'])
|
mylog('verbose', [f'[{pluginName}] New entries: "{len(neighbors)}"'])
|
||||||
|
|
||||||
@@ -83,13 +82,14 @@ def main():
|
|||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def parse_neighbors(raw_neighbors: list[str]):
|
def parse_neighbors(raw_neighbors: list[str]):
|
||||||
neighbors = []
|
neighbors = []
|
||||||
for line in raw_neighbors:
|
for line in raw_neighbors:
|
||||||
if "lladdr" in line and "REACHABLE" in line:
|
if "lladdr" in line and "REACHABLE" in line:
|
||||||
# Known data
|
# Known data
|
||||||
fields = line.split()
|
fields = line.split()
|
||||||
|
|
||||||
if not is_multicast(fields[0]):
|
if not is_multicast(fields[0]):
|
||||||
# mylog('verbose', [f'[{pluginName}] adding ip {fields[0]}"'])
|
# mylog('verbose', [f'[{pluginName}] adding ip {fields[0]}"'])
|
||||||
neighbor = {}
|
neighbor = {}
|
||||||
@@ -101,9 +101,9 @@ def parse_neighbors(raw_neighbors: list[str]):
|
|||||||
neighbor['hostname'] = '(unknown)'
|
neighbor['hostname'] = '(unknown)'
|
||||||
neighbor['vendor'] = '(unknown)'
|
neighbor['vendor'] = '(unknown)'
|
||||||
neighbor['device_type'] = '(unknown)'
|
neighbor['device_type'] = '(unknown)'
|
||||||
|
|
||||||
neighbors.append(neighbor)
|
neighbors.append(neighbor)
|
||||||
|
|
||||||
return neighbors
|
return neighbors
|
||||||
|
|
||||||
|
|
||||||
@@ -111,6 +111,7 @@ def is_multicast(ip):
|
|||||||
prefixes = ['ff', '224', '231', '232', '233', '234', '238', '239']
|
prefixes = ['ff', '224', '231', '232', '233', '234', '238', '239']
|
||||||
return reduce(lambda acc, prefix: acc or ip.startswith(prefix), prefixes, False)
|
return reduce(lambda acc, prefix: acc or ip.startswith(prefix), prefixes, False)
|
||||||
|
|
||||||
|
|
||||||
# retrieve data
|
# retrieve data
|
||||||
def get_neighbors(interfaces):
|
def get_neighbors(interfaces):
|
||||||
|
|
||||||
@@ -119,7 +120,7 @@ def get_neighbors(interfaces):
|
|||||||
for interface in interfaces.split(","):
|
for interface in interfaces.split(","):
|
||||||
try:
|
try:
|
||||||
|
|
||||||
# Ping all IPv6 devices in multicast to trigger NDP
|
# Ping all IPv6 devices in multicast to trigger NDP
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Pinging on interface: "{interface}"'])
|
mylog('verbose', [f'[{pluginName}] Pinging on interface: "{interface}"'])
|
||||||
command = f"ping ff02::1%{interface} -c 2".split()
|
command = f"ping ff02::1%{interface} -c 2".split()
|
||||||
@@ -136,11 +137,11 @@ def get_neighbors(interfaces):
|
|||||||
mylog('verbose', [f'[{pluginName}] Scanning interface succeded: "{interface}"'])
|
mylog('verbose', [f'[{pluginName}] Scanning interface succeded: "{interface}"'])
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
# An error occurred, handle it
|
# An error occurred, handle it
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Scanning interface failed: "{interface}"'])
|
|
||||||
error_type = type(e).__name__ # Capture the error type
|
error_type = type(e).__name__ # Capture the error type
|
||||||
|
mylog('verbose', [f'[{pluginName}] Scanning interface failed: "{interface}" ({error_type})'])
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -7,18 +7,18 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
|||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
pluginName = 'LUCIRPC'
|
pluginName = 'LUCIRPC'
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from openwrt_luci_rpc import OpenWrtRpc
|
from openwrt_luci_rpc import OpenWrtRpc
|
||||||
except:
|
except ImportError as e:
|
||||||
mylog('error', [f'[{pluginName}] Failed import openwrt_luci_rpc'])
|
mylog('error', [f'[{pluginName}] Failed import openwrt_luci_rpc: {e}'])
|
||||||
exit()
|
exit(1)
|
||||||
|
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
|
|
||||||
@@ -30,13 +30,14 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
|||||||
|
|
||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
def main():
|
|
||||||
mylog('verbose', [f'[{pluginName}] start script.'])
|
def main():
|
||||||
|
mylog('verbose', [f'[{pluginName}] start script.'])
|
||||||
|
|
||||||
device_data = get_device_data()
|
device_data = get_device_data()
|
||||||
|
|
||||||
for entry in device_data:
|
for entry in device_data:
|
||||||
mylog('verbose', [f'[{pluginName}] found: ', str(entry.mac).lower()])
|
mylog('verbose', [f'[{pluginName}] found: ', str(entry.mac).lower()])
|
||||||
|
|
||||||
name = str(entry.hostname)
|
name = str(entry.hostname)
|
||||||
|
|
||||||
@@ -45,36 +46,38 @@ def main():
|
|||||||
|
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = str(entry.mac).lower(),
|
primaryId = str(entry.mac).lower(),
|
||||||
secondaryId = entry.ip,
|
secondaryId = entry.ip,
|
||||||
watched1 = entry.host,
|
watched1 = entry.host,
|
||||||
watched2 = name,
|
watched2 = name,
|
||||||
watched3 = "",
|
watched3 = "",
|
||||||
watched4 = "",
|
watched4 = "",
|
||||||
extra = pluginName,
|
extra = pluginName,
|
||||||
foreignKey = str(entry.mac).lower())
|
foreignKey = str(entry.mac).lower())
|
||||||
|
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def get_device_data():
|
def get_device_data():
|
||||||
router = OpenWrtRpc(
|
router = OpenWrtRpc(
|
||||||
get_setting_value("LUCIRPC_host"),
|
get_setting_value("LUCIRPC_host"),
|
||||||
get_setting_value("LUCIRPC_user"),
|
get_setting_value("LUCIRPC_user"),
|
||||||
get_setting_value("LUCIRPC_password"),
|
get_setting_value("LUCIRPC_password"),
|
||||||
get_setting_value("LUCIRPC_ssl"),
|
get_setting_value("LUCIRPC_ssl"),
|
||||||
get_setting_value("LUCIRPC_verify_ssl")
|
get_setting_value("LUCIRPC_verify_ssl")
|
||||||
)
|
)
|
||||||
|
|
||||||
if router.is_logged_in():
|
if router.is_logged_in():
|
||||||
mylog('verbose', [f'[{pluginName}] login successfully.'])
|
mylog('verbose', [f'[{pluginName}] login successfully.'])
|
||||||
else:
|
else:
|
||||||
mylog('error', [f'[{pluginName}] login fail.'])
|
mylog('error', [f'[{pluginName}] login fail.'])
|
||||||
|
|
||||||
device_data = router.get_all_connected_devices(only_reachable=get_setting_value("LUCIRPC_only_reachable"))
|
device_data = router.get_all_connected_devices(only_reachable=get_setting_value("LUCIRPC_only_reachable"))
|
||||||
return device_data
|
return device_data
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -8,12 +8,12 @@ from collections import deque
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from messaging.in_app import remove_old
|
from messaging.in_app import remove_old # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -28,10 +28,9 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
|||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
MAINT_LOG_LENGTH = int(get_setting_value('MAINT_LOG_LENGTH'))
|
MAINT_LOG_LENGTH = int(get_setting_value('MAINT_LOG_LENGTH'))
|
||||||
MAINT_NOTI_LENGTH = int(get_setting_value('MAINT_NOTI_LENGTH'))
|
MAINT_NOTI_LENGTH = int(get_setting_value('MAINT_NOTI_LENGTH'))
|
||||||
@@ -39,7 +38,7 @@ def main():
|
|||||||
# Check if set
|
# Check if set
|
||||||
if MAINT_LOG_LENGTH != 0:
|
if MAINT_LOG_LENGTH != 0:
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Cleaning file'])
|
mylog('verbose', [f'[{pluginName}] Cleaning file'])
|
||||||
|
|
||||||
logFile = logPath + "/app.log"
|
logFile = logPath + "/app.log"
|
||||||
|
|
||||||
@@ -54,19 +53,19 @@ def main():
|
|||||||
with open(logFile, 'w') as file:
|
with open(logFile, 'w') as file:
|
||||||
# Write the last N lines back to the file
|
# Write the last N lines back to the file
|
||||||
file.writelines(lines_to_keep)
|
file.writelines(lines_to_keep)
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Cleanup finished'])
|
mylog('verbose', [f'[{pluginName}] Cleanup finished'])
|
||||||
|
|
||||||
# Check if set
|
# Check if set
|
||||||
if MAINT_NOTI_LENGTH != 0:
|
if MAINT_NOTI_LENGTH != 0:
|
||||||
mylog('verbose', [f'[{pluginName}] Cleaning in-app notification history'])
|
mylog('verbose', [f'[{pluginName}] Cleaning in-app notification history'])
|
||||||
remove_old(MAINT_NOTI_LENGTH)
|
remove_old(MAINT_NOTI_LENGTH)
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
# BEGIN
|
# BEGIN
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -7,14 +7,14 @@ import sys
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
from librouteros import connect
|
from librouteros import connect # noqa: E402 [flake8 lint suppression]
|
||||||
from librouteros.exceptions import TrapError
|
from librouteros.exceptions import TrapError # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -29,7 +29,6 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
|||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
@@ -49,7 +48,7 @@ def main():
|
|||||||
plugin_objects = get_entries(plugin_objects)
|
plugin_objects = get_entries(plugin_objects)
|
||||||
|
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices'])
|
mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices'])
|
||||||
|
|
||||||
|
|
||||||
@@ -58,10 +57,10 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
|||||||
try:
|
try:
|
||||||
# connect router
|
# connect router
|
||||||
api = connect(username=MT_USER, password=MT_PASS, host=MT_HOST, port=MT_PORT)
|
api = connect(username=MT_USER, password=MT_PASS, host=MT_HOST, port=MT_PORT)
|
||||||
|
|
||||||
# get dhcp leases
|
# get dhcp leases
|
||||||
leases = api('/ip/dhcp-server/lease/print')
|
leases = api('/ip/dhcp-server/lease/print')
|
||||||
|
|
||||||
for lease in leases:
|
for lease in leases:
|
||||||
lease_id = lease.get('.id')
|
lease_id = lease.get('.id')
|
||||||
address = lease.get('address')
|
address = lease.get('address')
|
||||||
@@ -71,8 +70,11 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
|||||||
last_seen = lease.get('last-seen')
|
last_seen = lease.get('last-seen')
|
||||||
status = lease.get('status')
|
status = lease.get('status')
|
||||||
device_name = comment or host_name or "(unknown)"
|
device_name = comment or host_name or "(unknown)"
|
||||||
|
|
||||||
mylog('verbose', [f"ID: {lease_id}, Address: {address}, MAC Address: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}"])
|
mylog(
|
||||||
|
'verbose',
|
||||||
|
[f"ID: {lease_id}, Address: {address}, MAC Address: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}"]
|
||||||
|
)
|
||||||
|
|
||||||
if (status == "bound"):
|
if (status == "bound"):
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
@@ -83,7 +85,7 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
|||||||
watched3 = host_name,
|
watched3 = host_name,
|
||||||
watched4 = last_seen,
|
watched4 = last_seen,
|
||||||
extra = '',
|
extra = '',
|
||||||
helpVal1 = comment,
|
helpVal1 = comment,
|
||||||
foreignKey = mac_address)
|
foreignKey = mac_address)
|
||||||
|
|
||||||
except TrapError as e:
|
except TrapError as e:
|
||||||
@@ -91,13 +93,13 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
mylog('error', [f"Failed to connect to MikroTik API: {e}"])
|
mylog('error', [f"Failed to connect to MikroTik API: {e}"])
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||||
|
|
||||||
return plugin_objects
|
return plugin_objects
|
||||||
|
|
||||||
|
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
# BEGIN
|
# BEGIN
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -8,14 +8,14 @@ import subprocess
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB
|
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||||
from models.device_instance import DeviceInstance
|
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -34,9 +34,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
|||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
# timeout = get_setting_value('NBLOOKUP_RUN_TIMEOUT')
|
# timeout = get_setting_value('NBLOOKUP_RUN_TIMEOUT')
|
||||||
timeout = 20
|
timeout = 20
|
||||||
@@ -52,13 +51,13 @@ def main():
|
|||||||
device_handler = DeviceInstance(db)
|
device_handler = DeviceInstance(db)
|
||||||
|
|
||||||
# Retrieve devices
|
# Retrieve devices
|
||||||
if get_setting_value("REFRESH_FQDN"):
|
if get_setting_value("REFRESH_FQDN"):
|
||||||
devices = device_handler.getAll()
|
devices = device_handler.getAll()
|
||||||
else:
|
else:
|
||||||
devices = device_handler.getUnknown()
|
devices = device_handler.getUnknown()
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
|
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
|
||||||
|
|
||||||
# TEST - below is a WINDOWS host IP
|
# TEST - below is a WINDOWS host IP
|
||||||
# execute_name_lookup('192.168.1.121', timeout)
|
# execute_name_lookup('192.168.1.121', timeout)
|
||||||
|
|
||||||
@@ -67,31 +66,32 @@ def main():
|
|||||||
|
|
||||||
if domain_name != '':
|
if domain_name != '':
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
# "MAC", "IP", "Server", "Name"
|
# "MAC", "IP", "Server", "Name"
|
||||||
primaryId = device['devMac'],
|
primaryId = device['devMac'],
|
||||||
secondaryId = device['devLastIP'],
|
secondaryId = device['devLastIP'],
|
||||||
watched1 = dns_server,
|
watched1 = dns_server,
|
||||||
watched2 = domain_name,
|
watched2 = domain_name,
|
||||||
watched3 = '',
|
watched3 = '',
|
||||||
watched4 = '',
|
watched4 = '',
|
||||||
extra = '',
|
extra = '',
|
||||||
foreignKey = device['devMac'])
|
foreignKey = device['devMac']
|
||||||
|
)
|
||||||
|
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
|
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
#===============================================================================
|
|
||||||
|
# ===============================================================================
|
||||||
# Execute scan
|
# Execute scan
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
def execute_name_lookup (ip, timeout):
|
def execute_name_lookup(ip, timeout):
|
||||||
"""
|
"""
|
||||||
Execute the NBTSCAN command on IP.
|
Execute the NBTSCAN command on IP.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
args = ['nbtscan', ip]
|
args = ['nbtscan', ip]
|
||||||
|
|
||||||
# Execute command
|
# Execute command
|
||||||
@@ -99,20 +99,25 @@ def execute_name_lookup (ip, timeout):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args])
|
mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args])
|
||||||
|
|
||||||
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
|
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
|
||||||
output = subprocess.check_output (args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True)
|
output = subprocess.check_output(
|
||||||
|
args,
|
||||||
|
universal_newlines=True,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
timeout=(timeout),
|
||||||
|
text=True
|
||||||
|
)
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
|
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
|
||||||
|
|
||||||
domain_name = ''
|
domain_name = ''
|
||||||
dns_server = ''
|
dns_server = ''
|
||||||
|
|
||||||
# Split the output into lines
|
# Split the output into lines
|
||||||
lines = output.splitlines()
|
lines = output.splitlines()
|
||||||
|
|
||||||
# Look for the first line containing a valid NetBIOS name entry
|
# Look for the first line containing a valid NetBIOS name entry
|
||||||
index = 0
|
|
||||||
for line in lines:
|
for line in lines:
|
||||||
if 'Doing NBT name scan' not in line and ip in line:
|
if 'Doing NBT name scan' not in line and ip in line:
|
||||||
# Split the line and extract the primary NetBIOS name
|
# Split the line and extract the primary NetBIOS name
|
||||||
@@ -121,7 +126,6 @@ def execute_name_lookup (ip, timeout):
|
|||||||
domain_name = parts[1]
|
domain_name = parts[1]
|
||||||
else:
|
else:
|
||||||
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - Unexpected output format: {line}'])
|
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - Unexpected output format: {line}'])
|
||||||
|
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Domain Name: {domain_name}'])
|
mylog('verbose', [f'[{pluginName}] Domain Name: {domain_name}'])
|
||||||
|
|
||||||
@@ -132,18 +136,21 @@ def execute_name_lookup (ip, timeout):
|
|||||||
# if "NXDOMAIN" in e.output:
|
# if "NXDOMAIN" in e.output:
|
||||||
# mylog('verbose', [f'[{pluginName}]', f"No PTR record found for IP: {ip}"])
|
# mylog('verbose', [f'[{pluginName}]', f"No PTR record found for IP: {ip}"])
|
||||||
# else:
|
# else:
|
||||||
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
|
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
|
||||||
|
|
||||||
except subprocess.TimeoutExpired:
|
|
||||||
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
|
||||||
|
|
||||||
if output == "": # check if the subprocess failed
|
except subprocess.TimeoutExpired:
|
||||||
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
|
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||||
else:
|
|
||||||
|
if output == "": # check if the subprocess failed
|
||||||
|
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
|
||||||
|
else:
|
||||||
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
|
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
|
||||||
|
|
||||||
return '', ''
|
return '', ''
|
||||||
|
|
||||||
|
|
||||||
|
# ===============================================================================
|
||||||
|
# BEGIN
|
||||||
|
# ===============================================================================
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|||||||
@@ -419,6 +419,41 @@
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"function": "IP_MATCH_NAME",
|
||||||
|
"type": {
|
||||||
|
"dataType": "boolean",
|
||||||
|
"elements": [
|
||||||
|
{
|
||||||
|
"elementType": "input",
|
||||||
|
"elementOptions": [
|
||||||
|
{
|
||||||
|
"type": "checkbox"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"transformers": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"default_value": true,
|
||||||
|
"options": [],
|
||||||
|
"localized": [
|
||||||
|
"name",
|
||||||
|
"description"
|
||||||
|
],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Name IP match"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "If checked, the application will guess the name also by IPs, not only MACs. This approach works if your IPs are mostly static."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"function": "replace_preset_icon",
|
"function": "replace_preset_icon",
|
||||||
"type": {
|
"type": {
|
||||||
|
|||||||
@@ -448,7 +448,7 @@
|
|||||||
"description": [
|
"description": [
|
||||||
{
|
{
|
||||||
"language_code": "en_us",
|
"language_code": "en_us",
|
||||||
"string": "When scanning remote networks, NMAP can only retrieve the IP address, not the MAC address. Enabling this setting generates a fake MAC address from the IP address to track devices, but it may cause inconsistencies if IPs change or devices are rediscovered. Static IPs are recommended. Device type and icon will not be detected correctly. When unchecked, devices with empty MAC addresses are skipped."
|
"string": "When scanning remote networks, NMAP can only retrieve the IP address, not the MAC address. Enabling the FAKE_MAC setting generates a fake MAC address from the IP address to track devices, but it may cause inconsistencies if IPs change or devices are re-discovered with a different MAC. Static IPs are recommended. Device type and icon might not be detected correctly and some plugins might fail if they depend on a valid MAC address. When unchecked, devices with empty MAC addresses are skipped."
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,19 +7,18 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
import hashlib
|
import hashlib
|
||||||
import re
|
import re
|
||||||
import nmap
|
import nmap
|
||||||
|
|
||||||
# Register NetAlertX directories
|
# Register NetAlertX directories
|
||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -37,46 +36,46 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
|||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
timeout = get_setting_value('NMAPDEV_RUN_TIMEOUT')
|
timeout = get_setting_value('NMAPDEV_RUN_TIMEOUT')
|
||||||
fakeMac = get_setting_value('NMAPDEV_FAKE_MAC')
|
fakeMac = get_setting_value('NMAPDEV_FAKE_MAC')
|
||||||
subnets = get_setting_value('SCAN_SUBNETS')
|
subnets = get_setting_value('SCAN_SUBNETS')
|
||||||
args = get_setting_value('NMAPDEV_ARGS')
|
args = get_setting_value('NMAPDEV_ARGS')
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] subnets: ', subnets])
|
mylog('verbose', [f'[{pluginName}] subnets: ', subnets])
|
||||||
|
|
||||||
|
|
||||||
# Initialize the Plugin obj output file
|
# Initialize the Plugin obj output file
|
||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
unique_devices = execute_scan(subnets, timeout, fakeMac, args)
|
unique_devices = execute_scan(subnets, timeout, fakeMac, args)
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Devices found: {len(unique_devices)}'])
|
mylog('verbose', [f'[{pluginName}] Devices found: {len(unique_devices)}'])
|
||||||
|
|
||||||
for device in unique_devices:
|
for device in unique_devices:
|
||||||
|
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
# "MAC", "IP", "Name", "Vendor", "Interface"
|
# "MAC", "IP", "Name", "Vendor", "Interface"
|
||||||
primaryId = device['mac'].lower(),
|
primaryId = device['mac'].lower(),
|
||||||
secondaryId = device['ip'],
|
secondaryId = device['ip'],
|
||||||
watched1 = device['name'],
|
watched1 = device['name'],
|
||||||
watched2 = device['vendor'],
|
watched2 = device['vendor'],
|
||||||
watched3 = device['interface'],
|
watched3 = device['interface'],
|
||||||
watched4 = '',
|
watched4 = '',
|
||||||
extra = '',
|
extra = '',
|
||||||
foreignKey = device['mac'])
|
foreignKey = device['mac']
|
||||||
|
)
|
||||||
|
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
|
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
#===============================================================================
|
|
||||||
|
# ===============================================================================
|
||||||
# Execute scan
|
# Execute scan
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
def execute_scan(subnets_list, timeout, fakeMac, args):
|
def execute_scan(subnets_list, timeout, fakeMac, args):
|
||||||
devices_list = []
|
devices_list = []
|
||||||
|
|
||||||
@@ -103,22 +102,25 @@ def execute_scan(subnets_list, timeout, fakeMac, args):
|
|||||||
return devices_list
|
return devices_list
|
||||||
|
|
||||||
|
|
||||||
|
def execute_scan_on_interface(interface, timeout, args):
|
||||||
def execute_scan_on_interface (interface, timeout, args):
|
# Remove unsupported VLAN flags
|
||||||
# Remove unsupported VLAN flags
|
|
||||||
interface = re.sub(r'--vlan=\S+', '', interface).strip()
|
interface = re.sub(r'--vlan=\S+', '', interface).strip()
|
||||||
|
|
||||||
# Prepare command arguments
|
# Prepare command arguments
|
||||||
scan_args = args.split() + interface.replace('--interface=','-e ').split()
|
scan_args = args.split() + interface.replace('--interface=', '-e ').split()
|
||||||
|
|
||||||
|
mylog('verbose', [f'[{pluginName}] scan_args: ', scan_args])
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] scan_args: ', scan_args])
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = subprocess.check_output(scan_args, universal_newlines=True)
|
result = subprocess.check_output(
|
||||||
|
scan_args,
|
||||||
|
universal_newlines=True,
|
||||||
|
timeout=timeout
|
||||||
|
)
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
error_type = type(e).__name__
|
error_type = type(e).__name__
|
||||||
result = ""
|
result = ""
|
||||||
mylog('verbose', [f'[{pluginName}] ERROR: ', error_type])
|
mylog('verbose', [f'[{pluginName}] ERROR: ', error_type])
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@@ -130,28 +132,25 @@ def parse_nmap_xml(xml_output, interface, fakeMac):
|
|||||||
nm = nmap.PortScanner()
|
nm = nmap.PortScanner()
|
||||||
nm.analyse_nmap_xml_scan(xml_output)
|
nm.analyse_nmap_xml_scan(xml_output)
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Number of hosts: ', len(nm.all_hosts())])
|
mylog('verbose', [f'[{pluginName}] Number of hosts: ', len(nm.all_hosts())])
|
||||||
|
|
||||||
for host in nm.all_hosts():
|
for host in nm.all_hosts():
|
||||||
hostname = nm[host].hostname() or '(unknown)'
|
hostname = nm[host].hostname() or '(unknown)'
|
||||||
|
|
||||||
ip = nm[host]['addresses']['ipv4'] if 'ipv4' in nm[host]['addresses'] else ''
|
ip = nm[host]['addresses']['ipv4'] if 'ipv4' in nm[host]['addresses'] else ''
|
||||||
mac = nm[host]['addresses']['mac'] if 'mac' in nm[host]['addresses'] else ''
|
mac = nm[host]['addresses']['mac'] if 'mac' in nm[host]['addresses'] else ''
|
||||||
|
|
||||||
|
mylog('verbose', [f'[{pluginName}] nm[host]: ', nm[host]])
|
||||||
mylog('verbose', [f'[{pluginName}] nm[host]: ', nm[host]])
|
|
||||||
|
|
||||||
vendor = ''
|
vendor = ''
|
||||||
|
|
||||||
if nm[host]['vendor']:
|
if nm[host]['vendor']:
|
||||||
mylog('verbose', [f'[{pluginName}] entry: ', nm[host]['vendor']])
|
mylog('verbose', [f'[{pluginName}] entry: ', nm[host]['vendor']])
|
||||||
|
|
||||||
for key, value in nm[host]['vendor'].items():
|
for key, value in nm[host]['vendor'].items():
|
||||||
vendor = value
|
vendor = value
|
||||||
|
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
# Log debug information
|
# Log debug information
|
||||||
mylog('verbose', [f"[{pluginName}] Hostname: {hostname}, IP: {ip}, MAC: {mac}, Vendor: {vendor}"])
|
mylog('verbose', [f"[{pluginName}] Hostname: {hostname}, IP: {ip}, MAC: {mac}, Vendor: {vendor}"])
|
||||||
|
|
||||||
@@ -172,24 +171,24 @@ def parse_nmap_xml(xml_output, interface, fakeMac):
|
|||||||
# MAC or IP missing
|
# MAC or IP missing
|
||||||
mylog('verbose', [f"[{pluginName}] Skipping: {hostname}, IP or MAC missing, or NMAPDEV_GENERATE_MAC setting not enabled"])
|
mylog('verbose', [f"[{pluginName}] Skipping: {hostname}, IP or MAC missing, or NMAPDEV_GENERATE_MAC setting not enabled"])
|
||||||
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
mylog('verbose', [f"[{pluginName}] Error parsing nmap XML: ", str(e)])
|
mylog('verbose', [f"[{pluginName}] Error parsing nmap XML: ", str(e)])
|
||||||
|
|
||||||
return devices_list
|
return devices_list
|
||||||
|
|
||||||
|
|
||||||
def string_to_mac_hash(input_string):
|
def string_to_mac_hash(input_string):
|
||||||
# Calculate a hash using SHA-256
|
# Calculate a hash using SHA-256
|
||||||
sha256_hash = hashlib.sha256(input_string.encode()).hexdigest()
|
sha256_hash = hashlib.sha256(input_string.encode()).hexdigest()
|
||||||
|
|
||||||
# Take the first 12 characters of the hash and format as a MAC address
|
# Take the first 12 characters of the hash and format as a MAC address
|
||||||
mac_hash = ':'.join(sha256_hash[i:i+2] for i in range(0, 12, 2))
|
mac_hash = ':'.join(sha256_hash[i:i + 2] for i in range(0, 12, 2))
|
||||||
|
|
||||||
return mac_hash
|
return mac_hash
|
||||||
|
|
||||||
#===============================================================================
|
|
||||||
|
# ===============================================================================
|
||||||
# BEGIN
|
# BEGIN
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -9,13 +9,13 @@ import subprocess
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects, decodeBase64
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger, append_line_to_file
|
from logger import mylog, Logger, append_line_to_file # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.datetime_utils import timeNowDB
|
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -29,37 +29,64 @@ LOG_PATH = logPath + '/plugins'
|
|||||||
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
# Initialize the Plugin obj output file
|
||||||
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
|
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser(description='Scan ports of devices specified by IP addresses')
|
parser = argparse.ArgumentParser(
|
||||||
parser.add_argument('ips', nargs='+', help="list of IPs to scan")
|
description='Scan ports of devices specified by IP addresses'
|
||||||
parser.add_argument('macs', nargs='+', help="list of MACs related to the supplied IPs in the same order")
|
)
|
||||||
parser.add_argument('timeout', nargs='+', help="timeout")
|
|
||||||
parser.add_argument('args', nargs='+', help="args")
|
|
||||||
values = parser.parse_args()
|
|
||||||
|
|
||||||
# Plugin_Objects is a class that reads data from the RESULT_FILE
|
# Accept ANY key=value pairs
|
||||||
# and returns a list of results.
|
parser.add_argument('params', nargs='+', help="key=value style params")
|
||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
|
||||||
|
|
||||||
# Print a message to indicate that the script is starting.
|
raw = parser.parse_args()
|
||||||
mylog('debug', [f'[{pluginName}] In script '])
|
|
||||||
|
|
||||||
# Printing the params list to check its content.
|
try:
|
||||||
mylog('debug', [f'[{pluginName}] values.ips: ', values.ips])
|
args = parse_kv_args(raw.params)
|
||||||
mylog('debug', [f'[{pluginName}] values.macs: ', values.macs])
|
except ValueError as e:
|
||||||
mylog('debug', [f'[{pluginName}] values.timeout: ', values.timeout])
|
mylog('error', [f"[{pluginName}] Argument error: {e}"])
|
||||||
mylog('debug', [f'[{pluginName}] values.args: ', values.args])
|
sys.exit(1)
|
||||||
|
|
||||||
argsDecoded = decodeBase64(values.args[0].split('=b')[1])
|
# Required keys
|
||||||
|
required = ['ips', 'macs']
|
||||||
|
for key in required:
|
||||||
|
if key not in args:
|
||||||
|
mylog('error', [f"[{pluginName}] Missing required parameter: {key}"])
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
mylog('debug', [f'[{pluginName}] argsDecoded: ', argsDecoded])
|
# Parse lists
|
||||||
|
ip_list = safe_split_list(args['ips'], "ips")
|
||||||
|
mac_list = safe_split_list(args['macs'], "macs")
|
||||||
|
|
||||||
entries = performNmapScan(values.ips[0].split('=')[1].split(','), values.macs[0].split('=')[1].split(',') , values.timeout[0].split('=')[1], argsDecoded)
|
if len(ip_list) != len(mac_list):
|
||||||
|
mylog('error', [
|
||||||
|
f"[{pluginName}] Mismatch: {len(ip_list)} IPs but {len(mac_list)} MACs"
|
||||||
|
])
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Optional
|
||||||
|
timeout = int(args.get("timeout", get_setting_value("NMAP_RUN_TIMEOUT")))
|
||||||
|
|
||||||
|
NMAP_ARGS = get_setting_value("NMAP_ARGS")
|
||||||
|
|
||||||
|
mylog('debug', [f'[{pluginName}] Parsed IPs: {ip_list}'])
|
||||||
|
mylog('debug', [f'[{pluginName}] Parsed MACs: {mac_list}'])
|
||||||
|
mylog('debug', [f'[{pluginName}] Timeout: {timeout}'])
|
||||||
|
mylog('debug', [f'[{pluginName}] NMAP_ARGS: {NMAP_ARGS}'])
|
||||||
|
|
||||||
|
entries = performNmapScan(
|
||||||
|
ip_list,
|
||||||
|
mac_list,
|
||||||
|
timeout,
|
||||||
|
NMAP_ARGS
|
||||||
|
)
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Total number of ports found by NMAP: ', len(entries)])
|
mylog('verbose', [f'[{pluginName}] Total number of ports found by NMAP: ', len(entries)])
|
||||||
|
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
|
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = entry.mac, # MAC (Device Name)
|
primaryId = entry.mac, # MAC (Device Name)
|
||||||
@@ -68,14 +95,14 @@ def main():
|
|||||||
watched2 = entry.service,
|
watched2 = entry.service,
|
||||||
watched3 = entry.ip + ":" + entry.port,
|
watched3 = entry.ip + ":" + entry.port,
|
||||||
watched4 = "",
|
watched4 = "",
|
||||||
extra = entry.extra,
|
extra = entry.extra,
|
||||||
foreignKey = entry.mac
|
foreignKey = entry.mac
|
||||||
)
|
)
|
||||||
|
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
class nmap_entry:
|
class nmap_entry:
|
||||||
def __init__(self, ip, mac, time, port, state, service, name = '', extra = '', index = 0):
|
def __init__(self, ip, mac, time, port, state, service, name = '', extra = '', index = 0):
|
||||||
self.ip = ip
|
self.ip = ip
|
||||||
@@ -83,13 +110,44 @@ class nmap_entry:
|
|||||||
self.time = time
|
self.time = time
|
||||||
self.port = port
|
self.port = port
|
||||||
self.state = state
|
self.state = state
|
||||||
self.service = service
|
self.service = service
|
||||||
self.extra = extra
|
self.extra = extra
|
||||||
self.index = index
|
self.index = index
|
||||||
self.hash = str(mac) + str(port)+ str(state)+ str(service)
|
self.hash = str(mac) + str(port) + str(state) + str(service)
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
# -------------------------------------------------------------------------------
|
||||||
|
def parse_kv_args(raw_args):
|
||||||
|
"""
|
||||||
|
Converts ['ips=a,b,c', 'macs=x,y,z', 'timeout=5'] to a dict.
|
||||||
|
Ignores unknown keys.
|
||||||
|
"""
|
||||||
|
parsed = {}
|
||||||
|
|
||||||
|
for item in raw_args:
|
||||||
|
if '=' not in item:
|
||||||
|
mylog('none', [f"[{pluginName}] Scan: Invalid parameter (missing '='): {item}"])
|
||||||
|
|
||||||
|
key, value = item.split('=', 1)
|
||||||
|
|
||||||
|
if key in parsed:
|
||||||
|
mylog('none', [f"[{pluginName}] Scan: Duplicate parameter supplied: {key}"])
|
||||||
|
|
||||||
|
parsed[key] = value
|
||||||
|
|
||||||
|
return parsed
|
||||||
|
|
||||||
|
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
|
def safe_split_list(value, keyname):
|
||||||
|
"""Split comma list safely and ensure no empty items."""
|
||||||
|
items = [x.strip() for x in value.split(',') if x.strip()]
|
||||||
|
if not items:
|
||||||
|
mylog('none', [f"[{pluginName}] Scan: {keyname} list is empty or invalid"])
|
||||||
|
return items
|
||||||
|
|
||||||
|
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
|
def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
|
||||||
"""
|
"""
|
||||||
run nmap scan on a list of devices
|
run nmap scan on a list of devices
|
||||||
@@ -99,15 +157,12 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
|
|||||||
# collect ports / new Nmap Entries
|
# collect ports / new Nmap Entries
|
||||||
newEntriesTmp = []
|
newEntriesTmp = []
|
||||||
|
|
||||||
|
if len(deviceIPs) > 0:
|
||||||
if len(deviceIPs) > 0:
|
|
||||||
|
|
||||||
devTotal = len(deviceIPs)
|
devTotal = len(deviceIPs)
|
||||||
|
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device'])
|
|
||||||
mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ])
|
|
||||||
|
|
||||||
|
mylog('verbose', [f'[{pluginName}] Scan: Nmap for max ', str(timeoutSec), 's (' + str(round(int(timeoutSec) / 60, 1)) + 'min) per device'])
|
||||||
|
mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec)) / 60, 1) , 'min)'])
|
||||||
|
|
||||||
devIndex = 0
|
devIndex = 0
|
||||||
for ip in deviceIPs:
|
for ip in deviceIPs:
|
||||||
@@ -116,67 +171,63 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
|
|||||||
# prepare arguments from user supplied ones
|
# prepare arguments from user supplied ones
|
||||||
nmapArgs = ['nmap'] + args.split() + [ip]
|
nmapArgs = ['nmap'] + args.split() + [ip]
|
||||||
|
|
||||||
progress = ' (' + str(devIndex+1) + '/' + str(devTotal) + ')'
|
progress = ' (' + str(devIndex + 1) + '/' + str(devTotal) + ')'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
|
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
|
||||||
output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(float(timeoutSec)))
|
output = subprocess.check_output(
|
||||||
|
nmapArgs,
|
||||||
|
universal_newlines=True,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
timeout=(float(timeoutSec))
|
||||||
|
)
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
# An error occured, handle it
|
# An error occured, handle it
|
||||||
mylog('none', ["[NMAP Scan] " ,e.output])
|
mylog('none', ["[NMAP Scan] ", e.output])
|
||||||
mylog('none', ["[NMAP Scan] ⚠ ERROR - Nmap Scan - check logs", progress])
|
mylog('none', ["[NMAP Scan] ⚠ ERROR - Nmap Scan - check logs", progress])
|
||||||
except subprocess.TimeoutExpired:
|
except subprocess.TimeoutExpired:
|
||||||
mylog('verbose', [f'[{pluginName}] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', ip, progress])
|
mylog('verbose', [f'[{pluginName}] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', ip, progress])
|
||||||
|
|
||||||
if output == "": # check if the subprocess failed
|
if output == "": # check if the subprocess failed
|
||||||
mylog('minimal', [f'[{pluginName}] Nmap FAIL for ', ip, progress ,' check logs for details'])
|
mylog('minimal', [f'[{pluginName}] Nmap FAIL for ', ip, progress, ' check logs for details'])
|
||||||
else:
|
else:
|
||||||
mylog('verbose', [f'[{pluginName}] Nmap SUCCESS for ', ip, progress])
|
mylog('verbose', [f'[{pluginName}] Nmap SUCCESS for ', ip, progress])
|
||||||
|
|
||||||
|
# check the last run output
|
||||||
|
|
||||||
# check the last run output
|
|
||||||
newLines = output.split('\n')
|
newLines = output.split('\n')
|
||||||
|
|
||||||
# regular logging
|
# regular logging
|
||||||
for line in newLines:
|
for line in newLines:
|
||||||
append_line_to_file (logPath + '/app_nmap.log', line +'\n')
|
append_line_to_file(logPath + '/app_nmap.log', line + '\n')
|
||||||
|
|
||||||
|
|
||||||
index = 0
|
index = 0
|
||||||
startCollecting = False
|
startCollecting = False
|
||||||
duration = ""
|
duration = ""
|
||||||
newPortsPerDevice = 0
|
newPortsPerDevice = 0
|
||||||
for line in newLines:
|
for line in newLines:
|
||||||
if 'Starting Nmap' in line:
|
if 'Starting Nmap' in line:
|
||||||
if len(newLines) > index+1 and 'Note: Host seems down' in newLines[index+1]:
|
if len(newLines) > index + 1 and 'Note: Host seems down' in newLines[index + 1]:
|
||||||
break # this entry is empty
|
break # this entry is empty
|
||||||
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
|
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
|
||||||
startCollecting = True
|
startCollecting = True
|
||||||
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
|
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
|
||||||
startCollecting = False # end reached
|
startCollecting = False # end reached
|
||||||
elif startCollecting and len(line.split()) == 3:
|
elif startCollecting and len(line.split()) == 3:
|
||||||
newEntriesTmp.append(nmap_entry(ip, deviceMACs[devIndex], timeNowDB(), line.split()[0], line.split()[1], line.split()[2]))
|
newEntriesTmp.append(nmap_entry(ip, deviceMACs[devIndex], timeNowDB(), line.split()[0], line.split()[1], line.split()[2]))
|
||||||
newPortsPerDevice += 1
|
newPortsPerDevice += 1
|
||||||
elif 'Nmap done' in line:
|
elif 'Nmap done' in line:
|
||||||
duration = line.split('scanned in ')[1]
|
duration = line.split('scanned in ')[1]
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] {newPortsPerDevice} ports found on {deviceMACs[devIndex]}'])
|
mylog('verbose', [f'[{pluginName}] {newPortsPerDevice} ports found on {deviceMACs[devIndex]} after {duration}'])
|
||||||
|
|
||||||
index += 1
|
index += 1
|
||||||
devIndex += 1
|
devIndex += 1
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#end for loop
|
|
||||||
|
|
||||||
return newEntriesTmp
|
return newEntriesTmp
|
||||||
|
|
||||||
#===============================================================================
|
|
||||||
|
# ===============================================================================
|
||||||
# BEGIN
|
# BEGIN
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -11,14 +11,14 @@ import re
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB
|
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||||
from models.device_instance import DeviceInstance
|
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -33,11 +33,9 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
|||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
|
|
||||||
timeout = get_setting_value('NSLOOKUP_RUN_TIMEOUT')
|
timeout = get_setting_value('NSLOOKUP_RUN_TIMEOUT')
|
||||||
|
|
||||||
@@ -52,13 +50,13 @@ def main():
|
|||||||
device_handler = DeviceInstance(db)
|
device_handler = DeviceInstance(db)
|
||||||
|
|
||||||
# Retrieve devices
|
# Retrieve devices
|
||||||
if get_setting_value("REFRESH_FQDN"):
|
if get_setting_value("REFRESH_FQDN"):
|
||||||
devices = device_handler.getAll()
|
devices = device_handler.getAll()
|
||||||
else:
|
else:
|
||||||
devices = device_handler.getUnknown()
|
devices = device_handler.getUnknown()
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
|
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
|
||||||
|
|
||||||
# TEST - below is a WINDOWS host IP
|
# TEST - below is a WINDOWS host IP
|
||||||
# execute_name_lookup('192.168.1.121', timeout)
|
# execute_name_lookup('192.168.1.121', timeout)
|
||||||
|
|
||||||
@@ -67,31 +65,32 @@ def main():
|
|||||||
|
|
||||||
if domain_name != '':
|
if domain_name != '':
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
# "MAC", "IP", "Server", "Name"
|
# "MAC", "IP", "Server", "Name"
|
||||||
primaryId = device['devMac'],
|
primaryId = device['devMac'],
|
||||||
secondaryId = device['devLastIP'],
|
secondaryId = device['devLastIP'],
|
||||||
watched1 = dns_server,
|
watched1 = dns_server,
|
||||||
watched2 = domain_name,
|
watched2 = domain_name,
|
||||||
watched3 = '',
|
watched3 = '',
|
||||||
watched4 = '',
|
watched4 = '',
|
||||||
extra = '',
|
extra = '',
|
||||||
foreignKey = device['devMac'])
|
foreignKey = device['devMac']
|
||||||
|
)
|
||||||
|
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
|
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
#===============================================================================
|
|
||||||
|
# ===============================================================================
|
||||||
# Execute scan
|
# Execute scan
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
def execute_nslookup (ip, timeout):
|
def execute_nslookup(ip, timeout):
|
||||||
"""
|
"""
|
||||||
Execute the NSLOOKUP command on IP.
|
Execute the NSLOOKUP command on IP.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
nslookup_args = ['nslookup', ip]
|
nslookup_args = ['nslookup', ip]
|
||||||
|
|
||||||
# Execute command
|
# Execute command
|
||||||
@@ -99,7 +98,13 @@ def execute_nslookup (ip, timeout):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
|
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
|
||||||
output = subprocess.check_output (nslookup_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True)
|
output = subprocess.check_output(
|
||||||
|
nslookup_args,
|
||||||
|
universal_newlines=True,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
timeout=(timeout),
|
||||||
|
text=True
|
||||||
|
)
|
||||||
|
|
||||||
domain_name = ''
|
domain_name = ''
|
||||||
dns_server = ''
|
dns_server = ''
|
||||||
@@ -110,8 +115,7 @@ def execute_nslookup (ip, timeout):
|
|||||||
domain_pattern = re.compile(r'name\s*=\s*([^\s]+)', re.IGNORECASE)
|
domain_pattern = re.compile(r'name\s*=\s*([^\s]+)', re.IGNORECASE)
|
||||||
server_pattern = re.compile(r'Server:\s+(.+)', re.IGNORECASE)
|
server_pattern = re.compile(r'Server:\s+(.+)', re.IGNORECASE)
|
||||||
|
|
||||||
|
domain_match = domain_pattern.search(output)
|
||||||
domain_match = domain_pattern.search(output)
|
|
||||||
server_match = server_pattern.search(output)
|
server_match = server_pattern.search(output)
|
||||||
|
|
||||||
if domain_match:
|
if domain_match:
|
||||||
@@ -131,24 +135,20 @@ def execute_nslookup (ip, timeout):
|
|||||||
else:
|
else:
|
||||||
mylog('verbose', [f'[{pluginName}]', e.output])
|
mylog('verbose', [f'[{pluginName}]', e.output])
|
||||||
# Handle other errors here
|
# Handle other errors here
|
||||||
# mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
|
# mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
|
||||||
|
|
||||||
except subprocess.TimeoutExpired:
|
except subprocess.TimeoutExpired:
|
||||||
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||||
|
|
||||||
|
if output != "": # check if the subprocess failed
|
||||||
|
|
||||||
if output == "": # check if the subprocess failed
|
|
||||||
tmp = 1 # can't have empty
|
|
||||||
# mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
|
|
||||||
else:
|
|
||||||
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
|
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
|
||||||
|
|
||||||
return '', ''
|
return '', ''
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#===============================================================================
|
|
||||||
|
# ===============================================================================
|
||||||
# BEGIN
|
# BEGIN
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -15,10 +15,9 @@ __version__ = "1.3" # fix detection of the default gateway IP address that woul
|
|||||||
# try to identify and populate their connections by switch/accesspoints and ports/SSID
|
# try to identify and populate their connections by switch/accesspoints and ports/SSID
|
||||||
# try to differentiate root bridges from accessory
|
# try to differentiate root bridges from accessory
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# sample code to update unbound on opnsense - for reference...
|
# sample code to update unbound on opnsense - for reference...
|
||||||
# curl -X POST -d '{"host":{"enabled":"1","hostname":"test","domain":"testdomain.com","rr":"A","mxprio":"","mx":"","server":"10.0.1.1","description":""}}' -H "Content-Type: application/json" -k -u $OPNS_KEY:$OPNS_SECRET https://$IPFW/api/unbound/settings/AddHostOverride
|
# curl -X POST -d '{"host":{"enabled":"1","hostname":"test","domain":"testdomain.com","rr":"A","mxprio":"","mx":"","server":"10.0.1.1","description":""}}'\
|
||||||
|
# -H "Content-Type: application/json" -k -u $OPNS_KEY:$OPNS_SECRET https://$IPFW/api/unbound/settings/AddHostOverride
|
||||||
#
|
#
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@@ -35,12 +34,12 @@ import multiprocessing
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -87,8 +86,6 @@ cMAC, cIP, cNAME, cSWITCH_AP, cPORT_SSID = range(5)
|
|||||||
OMDLOGLEVEL = "debug"
|
OMDLOGLEVEL = "debug"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# translate MAC address from standard ieee model to ietf draft
|
# translate MAC address from standard ieee model to ietf draft
|
||||||
# AA-BB-CC-DD-EE-FF to aa:bb:cc:dd:ee:ff
|
# AA-BB-CC-DD-EE-FF to aa:bb:cc:dd:ee:ff
|
||||||
# tplink adheres to ieee, Nax adheres to ietf
|
# tplink adheres to ieee, Nax adheres to ietf
|
||||||
@@ -137,12 +134,12 @@ def callomada(myargs):
|
|||||||
|
|
||||||
omada_output = ""
|
omada_output = ""
|
||||||
retries = 2
|
retries = 2
|
||||||
while omada_output == "" and retries > 1:
|
while omada_output == "" and retries > 0:
|
||||||
retries = retries - 1
|
retries = retries - 1
|
||||||
try:
|
try:
|
||||||
mf = io.StringIO()
|
mf = io.StringIO()
|
||||||
with redirect_stdout(mf):
|
with redirect_stdout(mf):
|
||||||
bar = omada(myargs)
|
omada(myargs)
|
||||||
omada_output = mf.getvalue()
|
omada_output = mf.getvalue()
|
||||||
except Exception:
|
except Exception:
|
||||||
mylog(
|
mylog(
|
||||||
@@ -186,55 +183,71 @@ def add_uplink(
|
|||||||
sadevices_linksbymac,
|
sadevices_linksbymac,
|
||||||
port_byswitchmac_byclientmac,
|
port_byswitchmac_byclientmac,
|
||||||
):
|
):
|
||||||
# Ensure switch_mac exists in device_data_bymac
|
# Ensure switch exists
|
||||||
if switch_mac not in device_data_bymac:
|
if switch_mac not in device_data_bymac:
|
||||||
mylog("none", [f"[{pluginName}] switch_mac '{switch_mac}' not found in device_data_bymac"])
|
mylog("none", [f"[{pluginName}] switch_mac '{switch_mac}' not found in device_data_bymac"])
|
||||||
return
|
return
|
||||||
|
|
||||||
# Ensure SWITCH_AP key exists in the dictionary
|
|
||||||
if SWITCH_AP not in device_data_bymac[switch_mac]:
|
|
||||||
mylog("none", [f"[{pluginName}] Missing key '{SWITCH_AP}' in device_data_bymac[{switch_mac}]"])
|
|
||||||
return
|
|
||||||
|
|
||||||
# Check if uplink should be added
|
|
||||||
if device_data_bymac[switch_mac][SWITCH_AP] in [None, "null"]:
|
|
||||||
device_data_bymac[switch_mac][SWITCH_AP] = uplink_mac
|
|
||||||
|
|
||||||
# Ensure uplink_mac exists in device_data_bymac
|
dev_switch = device_data_bymac[switch_mac]
|
||||||
|
|
||||||
|
# Ensure list is long enough to contain SWITCH_AP index
|
||||||
|
if len(dev_switch) <= SWITCH_AP:
|
||||||
|
mylog("none", [f"[{pluginName}] SWITCH_AP index {SWITCH_AP} missing in record for {switch_mac}"])
|
||||||
|
return
|
||||||
|
|
||||||
|
# Add uplink only if empty
|
||||||
|
if dev_switch[SWITCH_AP] in (None, "null"):
|
||||||
|
dev_switch[SWITCH_AP] = uplink_mac
|
||||||
|
|
||||||
|
# Validate uplink_mac exists
|
||||||
if uplink_mac not in device_data_bymac:
|
if uplink_mac not in device_data_bymac:
|
||||||
mylog("none", [f"[{pluginName}] uplink_mac '{uplink_mac}' not found in device_data_bymac"])
|
mylog("none", [f"[{pluginName}] uplink_mac '{uplink_mac}' not found in device_data_bymac"])
|
||||||
return
|
return
|
||||||
|
|
||||||
# Determine port to uplink
|
dev_uplink = device_data_bymac[uplink_mac]
|
||||||
if (
|
|
||||||
device_data_bymac[switch_mac].get(TYPE) == "Switch"
|
# Get TYPE safely
|
||||||
and device_data_bymac[uplink_mac].get(TYPE) == "Switch"
|
switch_type = dev_switch[TYPE] if len(dev_switch) > TYPE else None
|
||||||
):
|
uplink_type = dev_uplink[TYPE] if len(dev_uplink) > TYPE else None
|
||||||
|
|
||||||
|
# Switch-to-switch link → use port mapping
|
||||||
|
if switch_type == "Switch" and uplink_type == "Switch":
|
||||||
port_to_uplink = port_byswitchmac_byclientmac.get(switch_mac, {}).get(uplink_mac)
|
port_to_uplink = port_byswitchmac_byclientmac.get(switch_mac, {}).get(uplink_mac)
|
||||||
if port_to_uplink is None:
|
if port_to_uplink is None:
|
||||||
mylog("none", [f"[{pluginName}] Missing port info for switch_mac '{switch_mac}' and uplink_mac '{uplink_mac}'"])
|
mylog("none", [
|
||||||
|
f"[{pluginName}] Missing port info for {switch_mac} → {uplink_mac}"
|
||||||
|
])
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
port_to_uplink = device_data_bymac[uplink_mac].get(PORT_SSID)
|
# Other device types → read PORT_SSID index
|
||||||
|
if len(dev_uplink) <= PORT_SSID:
|
||||||
# Assign port to switch_mac
|
mylog("none", [
|
||||||
device_data_bymac[switch_mac][PORT_SSID] = port_to_uplink
|
f"[{pluginName}] PORT_SSID index missing for uplink {uplink_mac}"
|
||||||
|
])
|
||||||
# Recursively add uplinks for linked devices
|
return
|
||||||
|
port_to_uplink = dev_uplink[PORT_SSID]
|
||||||
|
|
||||||
|
# Assign port to switch
|
||||||
|
if len(dev_switch) > PORT_SSID:
|
||||||
|
dev_switch[PORT_SSID] = port_to_uplink
|
||||||
|
else:
|
||||||
|
mylog("none", [
|
||||||
|
f"[{pluginName}] PORT_SSID index missing in switch {switch_mac}"
|
||||||
|
])
|
||||||
|
|
||||||
|
# Process children recursively
|
||||||
for link in sadevices_linksbymac.get(switch_mac, []):
|
for link in sadevices_linksbymac.get(switch_mac, []):
|
||||||
if (
|
if (
|
||||||
link in device_data_bymac
|
link in device_data_bymac and len(device_data_bymac[link]) > SWITCH_AP and device_data_bymac[link][SWITCH_AP] in (None, "null") and len(dev_switch) > TYPE
|
||||||
and device_data_bymac[link].get(SWITCH_AP) in [None, "null"]
|
|
||||||
and device_data_bymac[switch_mac].get(TYPE) == "Switch"
|
|
||||||
):
|
):
|
||||||
add_uplink(
|
if dev_switch[TYPE] == "Switch":
|
||||||
switch_mac,
|
add_uplink(
|
||||||
link,
|
switch_mac,
|
||||||
device_data_bymac,
|
link,
|
||||||
sadevices_linksbymac,
|
device_data_bymac,
|
||||||
port_byswitchmac_byclientmac,
|
sadevices_linksbymac,
|
||||||
)
|
port_byswitchmac_byclientmac,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# ----------------------------------------------
|
# ----------------------------------------------
|
||||||
@@ -324,16 +337,16 @@ def main():
|
|||||||
)
|
)
|
||||||
mymac = ieee2ietf_mac_formater(device[MAC])
|
mymac = ieee2ietf_mac_formater(device[MAC])
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId=mymac, # MAC
|
primaryId=mymac, # MAC
|
||||||
secondaryId=device[IP], # IP
|
secondaryId=device[IP], # IP
|
||||||
watched1=device[NAME], # NAME/HOSTNAME
|
watched1=device[NAME], # NAME/HOSTNAME
|
||||||
watched2=ParentNetworkNode, # PARENT NETWORK NODE MAC
|
watched2=ParentNetworkNode, # PARENT NETWORK NODE MAC
|
||||||
watched3=myport, # PORT
|
watched3=myport, # PORT
|
||||||
watched4=myssid, # SSID
|
watched4=myssid, # SSID
|
||||||
extra=device[TYPE],
|
extra=device[TYPE],
|
||||||
# omada_site, # SITENAME (cur_NetworkSite) or VENDOR (cur_Vendor) (PICK one and adjust config.json -> "column": "Extra")
|
# omada_site, # SITENAME (cur_NetworkSite) or VENDOR (cur_Vendor) (PICK one and adjust config.json -> "column": "Extra")
|
||||||
foreignKey=device[MAC].lower().replace("-", ":"),
|
foreignKey=device[MAC].lower().replace("-", ":"),
|
||||||
) # usually MAC
|
) # usually MAC
|
||||||
|
|
||||||
mylog(
|
mylog(
|
||||||
"verbose",
|
"verbose",
|
||||||
@@ -369,7 +382,6 @@ def get_omada_devices_details(msadevice_data):
|
|||||||
mswitch_dump = callomada(["-t", "myomada", "switch", "-d", mthisswitch])
|
mswitch_dump = callomada(["-t", "myomada", "switch", "-d", mthisswitch])
|
||||||
else:
|
else:
|
||||||
mswitch_detail = ""
|
mswitch_detail = ""
|
||||||
nswitch_dump = ""
|
|
||||||
return mswitch_detail, mswitch_dump
|
return mswitch_detail, mswitch_dump
|
||||||
|
|
||||||
|
|
||||||
@@ -414,7 +426,6 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
|
|||||||
# 17:27:10 [<unique_prefix>] token: "['1A-2B-3C-4D-5E-6F', '192.168.0.217', '1A-2B-3C-4D-5E-6F', '17', '40-AE-30-A5-A7-50, 'Switch']"
|
# 17:27:10 [<unique_prefix>] token: "['1A-2B-3C-4D-5E-6F', '192.168.0.217', '1A-2B-3C-4D-5E-6F', '17', '40-AE-30-A5-A7-50, 'Switch']"
|
||||||
# constants
|
# constants
|
||||||
sadevices_macbyname = {}
|
sadevices_macbyname = {}
|
||||||
sadevices_macbymac = {}
|
|
||||||
sadevices_linksbymac = {}
|
sadevices_linksbymac = {}
|
||||||
port_byswitchmac_byclientmac = {}
|
port_byswitchmac_byclientmac = {}
|
||||||
device_data_bymac = {}
|
device_data_bymac = {}
|
||||||
@@ -427,7 +438,7 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
|
|||||||
def run_command(command, index):
|
def run_command(command, index):
|
||||||
result = subprocess.run(command, capture_output=True, text=True, shell=True)
|
result = subprocess.run(command, capture_output=True, text=True, shell=True)
|
||||||
return str(index), result.stdout.strip()
|
return str(index), result.stdout.strip()
|
||||||
|
|
||||||
myindex, command_output= run_command(command, 2)
|
myindex, command_output= run_command(command, 2)
|
||||||
mylog('verbose', [f'[{pluginName}] command={command} index={myindex} results={command_output}'])
|
mylog('verbose', [f'[{pluginName}] command={command} index={myindex} results={command_output}'])
|
||||||
"""
|
"""
|
||||||
@@ -556,11 +567,11 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
|
|||||||
#
|
#
|
||||||
|
|
||||||
naxname = real_naxname
|
naxname = real_naxname
|
||||||
if real_naxname != None:
|
if real_naxname is not None:
|
||||||
if "(" in real_naxname:
|
if "(" in real_naxname:
|
||||||
# removing parenthesis and domains from the name
|
# removing parenthesis and domains from the name
|
||||||
naxname = real_naxname.split("(")[0]
|
naxname = real_naxname.split("(")[0]
|
||||||
if naxname != None and "." in naxname:
|
if naxname is not None and "." in naxname:
|
||||||
naxname = naxname.split(".")[0]
|
naxname = naxname.split(".")[0]
|
||||||
if naxname in (None, "null", ""):
|
if naxname in (None, "null", ""):
|
||||||
naxname = (
|
naxname = (
|
||||||
|
|||||||
@@ -25,7 +25,6 @@ import sys
|
|||||||
import urllib3
|
import urllib3
|
||||||
import requests
|
import requests
|
||||||
import time
|
import time
|
||||||
import datetime
|
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
@@ -35,11 +34,11 @@ from typing import Literal, Any, Dict
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects, is_typical_router_ip, is_mac
|
from plugin_helper import Plugin_Objects, is_typical_router_ip, is_mac # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = pytz.timezone(get_setting_value('TIMEZONE'))
|
conf.tz = pytz.timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -176,7 +175,10 @@ class OmadaHelper:
|
|||||||
# If it's not a gateway try to assign parent node MAC
|
# If it's not a gateway try to assign parent node MAC
|
||||||
if data.get("type", "") != "gateway":
|
if data.get("type", "") != "gateway":
|
||||||
parent_mac = OmadaHelper.normalize_mac(data.get("uplinkDeviceMac"))
|
parent_mac = OmadaHelper.normalize_mac(data.get("uplinkDeviceMac"))
|
||||||
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
|
|
||||||
|
resp_type = parent_mac.get("response_type")
|
||||||
|
|
||||||
|
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
|
||||||
|
|
||||||
# Applicable only for CLIENT
|
# Applicable only for CLIENT
|
||||||
if input_type == "client":
|
if input_type == "client":
|
||||||
@@ -185,15 +187,26 @@ class OmadaHelper:
|
|||||||
# Try to assign parent node MAC and PORT/SSID to the CLIENT
|
# Try to assign parent node MAC and PORT/SSID to the CLIENT
|
||||||
if data.get("connectDevType", "") == "gateway":
|
if data.get("connectDevType", "") == "gateway":
|
||||||
parent_mac = OmadaHelper.normalize_mac(data.get("gatewayMac"))
|
parent_mac = OmadaHelper.normalize_mac(data.get("gatewayMac"))
|
||||||
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
|
|
||||||
|
resp_type = parent_mac.get("response_type")
|
||||||
|
|
||||||
|
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
|
||||||
entry["parent_node_port"] = data.get("port", "")
|
entry["parent_node_port"] = data.get("port", "")
|
||||||
|
|
||||||
elif data.get("connectDevType", "") == "switch":
|
elif data.get("connectDevType", "") == "switch":
|
||||||
parent_mac = OmadaHelper.normalize_mac(data.get("switchMac"))
|
parent_mac = OmadaHelper.normalize_mac(data.get("switchMac"))
|
||||||
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
|
|
||||||
|
resp_type = parent_mac.get("response_type")
|
||||||
|
|
||||||
|
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
|
||||||
entry["parent_node_port"] = data.get("port", "")
|
entry["parent_node_port"] = data.get("port", "")
|
||||||
|
|
||||||
elif data.get("connectDevType", "") == "ap":
|
elif data.get("connectDevType", "") == "ap":
|
||||||
parent_mac = OmadaHelper.normalize_mac(data.get("apMac"))
|
parent_mac = OmadaHelper.normalize_mac(data.get("apMac"))
|
||||||
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
|
|
||||||
|
resp_type = parent_mac.get("response_type")
|
||||||
|
|
||||||
|
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
|
||||||
entry["parent_node_ssid"] = data.get("ssid", "")
|
entry["parent_node_ssid"] = data.get("ssid", "")
|
||||||
|
|
||||||
# Add the entry to the result
|
# Add the entry to the result
|
||||||
@@ -253,7 +266,7 @@ class OmadaAPI:
|
|||||||
"""Return request headers."""
|
"""Return request headers."""
|
||||||
headers = {"Content-type": "application/json"}
|
headers = {"Content-type": "application/json"}
|
||||||
# Add access token to header if requested and available
|
# Add access token to header if requested and available
|
||||||
if include_auth == True:
|
if include_auth is True:
|
||||||
if not self.access_token:
|
if not self.access_token:
|
||||||
OmadaHelper.debug("No access token available for headers")
|
OmadaHelper.debug("No access token available for headers")
|
||||||
else:
|
else:
|
||||||
@@ -283,7 +296,7 @@ class OmadaAPI:
|
|||||||
OmadaHelper.verbose(f"{method} request error: {str(ex)}")
|
OmadaHelper.verbose(f"{method} request error: {str(ex)}")
|
||||||
return OmadaHelper.response("error", f"{method} request failed to endpoint '{endpoint}' with error: {str(ex)}")
|
return OmadaHelper.response("error", f"{method} request failed to endpoint '{endpoint}' with error: {str(ex)}")
|
||||||
|
|
||||||
def authenticate(self) -> Dict[str, any]:
|
def authenticate(self) -> Dict[str, Any]:
|
||||||
"""Make an endpoint request to get access token."""
|
"""Make an endpoint request to get access token."""
|
||||||
OmadaHelper.verbose("Starting authentication process")
|
OmadaHelper.verbose("Starting authentication process")
|
||||||
|
|
||||||
@@ -368,7 +381,7 @@ class OmadaAPI:
|
|||||||
|
|
||||||
# Failed site population
|
# Failed site population
|
||||||
OmadaHelper.debug(f"Site population response: {response}")
|
OmadaHelper.debug(f"Site population response: {response}")
|
||||||
return OmadaHelper.response("error", f"Site population failed - error: {response.get('response_message', 'Not provided')}")
|
return OmadaHelper.response("error", f"Site population failed - error: {response.get('response_message', 'Not provided')}")
|
||||||
|
|
||||||
def requested_sites(self) -> list:
|
def requested_sites(self) -> list:
|
||||||
"""Returns sites requested by user."""
|
"""Returns sites requested by user."""
|
||||||
|
|||||||
133
front/plugins/pihole_api_scan/README.md
Normal file
133
front/plugins/pihole_api_scan/README.md
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
## Overview - PIHOLEAPI Plugin — Pi-hole v6 Device Import
|
||||||
|
|
||||||
|
The **PIHOLEAPI** plugin lets NetAlertX import network devices directly from a **Pi-hole v6** instance.
|
||||||
|
This turns Pi-hole into an additional discovery source, helping NetAlertX stay aware of devices seen by your DNS server.
|
||||||
|
|
||||||
|
The plugin connects to your Pi-hole’s API and retrieves:
|
||||||
|
|
||||||
|
* MAC addresses
|
||||||
|
* IP addresses
|
||||||
|
* Hostnames (if available)
|
||||||
|
* Vendor info
|
||||||
|
* Last-seen timestamps
|
||||||
|
|
||||||
|
NetAlertX then uses this information to match or create devices in your system.
|
||||||
|
|
||||||
|
> [!TIP]
|
||||||
|
> Some tip.
|
||||||
|
|
||||||
|
### Quick setup guide
|
||||||
|
|
||||||
|
* You are running **Pi-hole v6** or newer.
|
||||||
|
* The Web UI password in **Pi-hole** is set.
|
||||||
|
* Local network devices appear under **Settings → Network** in Pi-hole.
|
||||||
|
|
||||||
|
No additional Pi-hole configuration is required.
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
- Head to **Settings** > **Plugin name** to adjust the default values.
|
||||||
|
|
||||||
|
| Setting Key | Description |
|
||||||
|
| ---------------------------- | -------------------------------------------------------------------------------- |
|
||||||
|
| **PIHOLEAPI_URL** | Your Pi-hole base URL. |
|
||||||
|
| **PIHOLEAPI_PASSWORD** | The Web UI base64 encoded (en-/decoding handled by the app) admin password. |
|
||||||
|
| **PIHOLEAPI_SSL_VERIFY** | Whether to verify HTTPS certificates. Disable only for self-signed certificates. |
|
||||||
|
| **PIHOLEAPI_RUN_TIMEOUT** | Request timeout in seconds. |
|
||||||
|
| **PIHOLEAPI_API_MAXCLIENTS** | Maximum number of devices to request from Pi-hole. Defaults are usually fine. |
|
||||||
|
|
||||||
|
### Example Configuration
|
||||||
|
|
||||||
|
| Setting Key | Sample Value |
|
||||||
|
| ---------------------------- | -------------------------------------------------- |
|
||||||
|
| **PIHOLEAPI_URL** | `http://pi.hole/` |
|
||||||
|
| **PIHOLEAPI_PASSWORD** | `passw0rd` |
|
||||||
|
| **PIHOLEAPI_SSL_VERIFY** | `true` |
|
||||||
|
| **PIHOLEAPI_RUN_TIMEOUT** | `30` |
|
||||||
|
| **PIHOLEAPI_API_MAXCLIENTS** | `500` |
|
||||||
|
|
||||||
|
### ⚠️ Troubleshooting
|
||||||
|
|
||||||
|
Below are the most common issues and how to resolve them.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### ❌ Authentication failed
|
||||||
|
|
||||||
|
Check the following:
|
||||||
|
|
||||||
|
* The Pi-hole URL is correct and includes a trailing slash
|
||||||
|
|
||||||
|
* `http://192.168.1.10/` ✔
|
||||||
|
* `http://192.168.1.10/admin` ❌
|
||||||
|
* Your Pi-hole password is correct
|
||||||
|
* You are using **Pi-hole v6**, not v5
|
||||||
|
* SSL verification matches your setup (disable for self-signed certificates)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### ❌ Connection error
|
||||||
|
|
||||||
|
Usually caused by:
|
||||||
|
|
||||||
|
* Wrong URL
|
||||||
|
* Wrong HTTP/HTTPS selection
|
||||||
|
* Timeout too low
|
||||||
|
|
||||||
|
Try:
|
||||||
|
|
||||||
|
```
|
||||||
|
PIHOLEAPI_URL = http://<pi-hole-ip>/
|
||||||
|
PIHOLEAPI_RUN_TIMEOUT = 60
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### ❌ No devices imported
|
||||||
|
|
||||||
|
Check:
|
||||||
|
|
||||||
|
* Pi-hole shows devices under **Settings → Network**
|
||||||
|
* NetAlertX logs contain:
|
||||||
|
|
||||||
|
```
|
||||||
|
[PIHOLEAPI] Pi-hole API returned data
|
||||||
|
```
|
||||||
|
|
||||||
|
If nothing appears:
|
||||||
|
|
||||||
|
* Pi-hole might be returning empty results
|
||||||
|
* Your network interface list may be empty
|
||||||
|
* A firewall or reverse proxy is blocking access
|
||||||
|
|
||||||
|
Try enabling debug logging:
|
||||||
|
|
||||||
|
```
|
||||||
|
LOG_LEVEL = debug
|
||||||
|
```
|
||||||
|
|
||||||
|
Then re-run the plugin.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### ❌ Wrong or missing hostnames
|
||||||
|
|
||||||
|
Pi-hole only reports names it knows from:
|
||||||
|
|
||||||
|
* Local DNS
|
||||||
|
* DHCP leases
|
||||||
|
* Previously seen queries
|
||||||
|
|
||||||
|
If names are missing, confirm they appear in Pi-hole’s own UI first.
|
||||||
|
|
||||||
|
### Notes
|
||||||
|
|
||||||
|
- Additional notes, limitations, Author info.
|
||||||
|
|
||||||
|
- Version: 1.0.0
|
||||||
|
- Author: `jokob-sk`, `leiweibau`
|
||||||
|
- Release Date: `11-2025`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
|
||||||
476
front/plugins/pihole_api_scan/config.json
Normal file
476
front/plugins/pihole_api_scan/config.json
Normal file
@@ -0,0 +1,476 @@
|
|||||||
|
{
|
||||||
|
"code_name": "pihole_api_scan",
|
||||||
|
"unique_prefix": "PIHOLEAPI",
|
||||||
|
"plugin_type": "device_scanner",
|
||||||
|
"execution_order" : "Layer_0",
|
||||||
|
"enabled": true,
|
||||||
|
"data_source": "script",
|
||||||
|
"mapped_to_table": "CurrentScan",
|
||||||
|
"data_filters": [
|
||||||
|
{
|
||||||
|
"compare_column": "Object_PrimaryID",
|
||||||
|
"compare_operator": "==",
|
||||||
|
"compare_field_id": "txtMacFilter",
|
||||||
|
"compare_js_template": "'{value}'.toString()",
|
||||||
|
"compare_use_quotes": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"show_ui": true,
|
||||||
|
"localized": ["display_name", "description", "icon"],
|
||||||
|
"display_name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "PiHole API scan"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Imports devices from PiHole via APIv6"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"icon": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "<i class=\"fa fa-search\"></i>"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"params": [],
|
||||||
|
"settings": [
|
||||||
|
{
|
||||||
|
"function": "RUN",
|
||||||
|
"events": ["run"],
|
||||||
|
"type": {
|
||||||
|
"dataType": "string",
|
||||||
|
"elements": [
|
||||||
|
{ "elementType": "select", "elementOptions": [], "transformers": [] }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
|
||||||
|
"default_value": "disabled",
|
||||||
|
"options": [
|
||||||
|
"disabled",
|
||||||
|
"once",
|
||||||
|
"schedule",
|
||||||
|
"always_after_scan"
|
||||||
|
],
|
||||||
|
"localized": ["name", "description"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "When to run"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "When the plugin should run. Good options are <code>always_after_scan</code>, <code>schedule</code>."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"function": "RUN_SCHD",
|
||||||
|
"type": {
|
||||||
|
"dataType": "string",
|
||||||
|
"elements": [
|
||||||
|
{
|
||||||
|
"elementType": "span",
|
||||||
|
"elementOptions": [
|
||||||
|
{
|
||||||
|
"cssClasses": "input-group-addon validityCheck"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"getStringKey": "Gen_ValidIcon"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"transformers": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"elementType": "input",
|
||||||
|
"elementOptions": [
|
||||||
|
{
|
||||||
|
"onChange": "validateRegex(this)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"base64Regex": "Xig/OlwqfCg/OlswLTldfFsxLTVdWzAtOV18WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlswLTldfDFbMC05XXwyWzAtM118WzAtOV0rLVswLTldK3xcKi9bMC05XSspKVxzKyg/OlwqfCg/OlsxLTldfFsxMl1bMC05XXwzWzAxXXxbMC05XSstWzAtOV0rfFwqL1swLTldKykpXHMrKD86XCp8KD86WzEtOV18MVswLTJdfFswLTldKy1bMC05XSt8XCovWzAtOV0rKSlccysoPzpcKnwoPzpbMC02XXxbMC02XS1bMC02XXxcKi9bMC05XSspKSQ="
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"transformers": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"default_value": "*/5 * * * *",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name", "description"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Schedule"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Only enabled if you select <code>schedule</code> in the <a href=\"#SYNC_RUN\"><code>SYNC_RUN</code> setting</a>. Make sure you enter the schedule in the correct cron-like format (e.g. validate at <a href=\"https://crontab.guru/\" target=\"_blank\">crontab.guru</a>). For example entering <code>0 4 * * *</code> will run the scan after 4 am in the <a onclick=\"toggleAllSettings()\" href=\"#TIMEZONE\"><code>TIMEZONE</code> you set above</a>. Will be run NEXT time the time passes."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"function": "URL",
|
||||||
|
"type": {
|
||||||
|
"dataType": "string",
|
||||||
|
"elements": [
|
||||||
|
{ "elementType": "input", "elementOptions": [], "transformers": [] }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"maxLength": 50,
|
||||||
|
"default_value": "",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name", "description"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Setting name"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "URL to your PiHole instance, for example <code>http://pi.hole:8080/</code>"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"function": "PASSWORD",
|
||||||
|
"type": {
|
||||||
|
"dataType": "string",
|
||||||
|
"elements": [
|
||||||
|
{
|
||||||
|
"elementType": "input",
|
||||||
|
"elementOptions": [{ "type": "password" }],
|
||||||
|
"transformers": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"default_value": "",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name", "description"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Password"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "PiHole WEB UI password."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"function": "VERIFY_SSL",
|
||||||
|
"type": {
|
||||||
|
"dataType": "boolean",
|
||||||
|
"elements": [
|
||||||
|
{
|
||||||
|
"elementType": "input",
|
||||||
|
"elementOptions": [{ "type": "checkbox" }],
|
||||||
|
"transformers": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"default_value": false,
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name", "description"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Verify SSL"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Enable TLS support. Disable if you are using a self-signed certificate."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"function": "API_MAXCLIENTS",
|
||||||
|
"type": {
|
||||||
|
"dataType": "integer",
|
||||||
|
"elements": [
|
||||||
|
{
|
||||||
|
"elementType": "input",
|
||||||
|
"elementOptions": [{ "type": "number" }],
|
||||||
|
"transformers": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"default_value": 500,
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name", "description"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Max Clients"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Maximum number of devices to import."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"function": "CMD",
|
||||||
|
"type": {
|
||||||
|
"dataType": "string",
|
||||||
|
"elements": [
|
||||||
|
{
|
||||||
|
"elementType": "input",
|
||||||
|
"elementOptions": [{ "readonly": "true" }],
|
||||||
|
"transformers": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"default_value": "python3 /app/front/plugins/pihole_api_scan/pihole_api_scan.py",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name", "description"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Command"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Command to run. This can not be changed"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"function": "RUN_TIMEOUT",
|
||||||
|
"type": {
|
||||||
|
"dataType": "integer",
|
||||||
|
"elements": [
|
||||||
|
{
|
||||||
|
"elementType": "input",
|
||||||
|
"elementOptions": [{ "type": "number" }],
|
||||||
|
"transformers": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"default_value": 30,
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name", "description"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Run timeout"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Maximum time in seconds to wait for the script to finish. If this time is exceeded the script is aborted."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"database_column_definitions": [
|
||||||
|
{
|
||||||
|
"column": "Index",
|
||||||
|
"css_classes": "col-sm-2",
|
||||||
|
"show": true,
|
||||||
|
"type": "none",
|
||||||
|
"default_value": "",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Index"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "Object_PrimaryID",
|
||||||
|
"mapped_to_column": "cur_MAC",
|
||||||
|
"css_classes": "col-sm-3",
|
||||||
|
"show": true,
|
||||||
|
"type": "device_name_mac",
|
||||||
|
"default_value": "",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "MAC (name)"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "Object_SecondaryID",
|
||||||
|
"mapped_to_column": "cur_IP",
|
||||||
|
"css_classes": "col-sm-2",
|
||||||
|
"show": true,
|
||||||
|
"type": "device_ip",
|
||||||
|
"default_value": "",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "IP"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "Watched_Value1",
|
||||||
|
"mapped_to_column": "cur_Name",
|
||||||
|
"css_classes": "col-sm-2",
|
||||||
|
"show": true,
|
||||||
|
"type": "label",
|
||||||
|
"default_value": "",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Name"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "Watched_Value2",
|
||||||
|
"mapped_to_column": "cur_Vendor",
|
||||||
|
"css_classes": "col-sm-2",
|
||||||
|
"show": true,
|
||||||
|
"type": "label",
|
||||||
|
"default_value": "",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Vendor"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "Watched_Value3",
|
||||||
|
"css_classes": "col-sm-2",
|
||||||
|
"show": true,
|
||||||
|
"type": "label",
|
||||||
|
"default_value": "",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Last Query"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "Watched_Value4",
|
||||||
|
"css_classes": "col-sm-2",
|
||||||
|
"show": false,
|
||||||
|
"type": "label",
|
||||||
|
"default_value": "",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "N/A"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "Dummy",
|
||||||
|
"mapped_to_column": "cur_ScanMethod",
|
||||||
|
"mapped_to_column_data": {
|
||||||
|
"value": "PIHOLEAPI"
|
||||||
|
},
|
||||||
|
"css_classes": "col-sm-2",
|
||||||
|
"show": false,
|
||||||
|
"type": "label",
|
||||||
|
"default_value": "",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Scan method"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "DateTimeCreated",
|
||||||
|
"css_classes": "col-sm-2",
|
||||||
|
"show": true,
|
||||||
|
"type": "label",
|
||||||
|
"default_value": "",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Created"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "DateTimeChanged",
|
||||||
|
"css_classes": "col-sm-2",
|
||||||
|
"show": true,
|
||||||
|
"type": "label",
|
||||||
|
"default_value": "",
|
||||||
|
"options": [],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Changed"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"column": "Status",
|
||||||
|
"css_classes": "col-sm-1",
|
||||||
|
"show": true,
|
||||||
|
"type": "replace",
|
||||||
|
"default_value": "",
|
||||||
|
"options": [
|
||||||
|
{
|
||||||
|
"equals": "watched-not-changed",
|
||||||
|
"replacement": "<div style='text-align:center'><i class='fa-solid fa-square-check'></i><div></div>"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"equals": "watched-changed",
|
||||||
|
"replacement": "<div style='text-align:center'><i class='fa-solid fa-triangle-exclamation'></i></div>"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"equals": "new",
|
||||||
|
"replacement": "<div style='text-align:center'><i class='fa-solid fa-circle-plus'></i></div>"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"equals": "missing-in-last-scan",
|
||||||
|
"replacement": "<div style='text-align:center'><i class='fa-solid fa-question'></i></div>"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"localized": ["name"],
|
||||||
|
"name": [
|
||||||
|
{
|
||||||
|
"language_code": "en_us",
|
||||||
|
"string": "Status"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
298
front/plugins/pihole_api_scan/pihole_api_scan.py
Normal file
298
front/plugins/pihole_api_scan/pihole_api_scan.py
Normal file
@@ -0,0 +1,298 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
"""
|
||||||
|
NetAlertX plugin: PIHOLEAPI
|
||||||
|
Imports devices from Pi-hole v6 API (Network endpoints) into NetAlertX plugin results.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import datetime
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||||
|
|
||||||
|
# --- NetAlertX plugin bootstrap (match example) ---
|
||||||
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
|
pluginName = 'PIHOLEAPI'
|
||||||
|
|
||||||
|
from plugin_helper import Plugin_Objects, is_mac # noqa: E402 [flake8 lint suppression]
|
||||||
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
|
# Setup timezone & logger using standard NAX helpers
|
||||||
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
|
Logger(get_setting_value('LOG_LEVEL'))
|
||||||
|
|
||||||
|
LOG_PATH = logPath + '/plugins'
|
||||||
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
|
# --- Global state for session ---
|
||||||
|
PIHOLEAPI_URL = None
|
||||||
|
PIHOLEAPI_PASSWORD = None
|
||||||
|
PIHOLEAPI_SES_VALID = False
|
||||||
|
PIHOLEAPI_SES_SID = None
|
||||||
|
PIHOLEAPI_SES_CSRF = None
|
||||||
|
PIHOLEAPI_API_MAXCLIENTS = None
|
||||||
|
PIHOLEAPI_VERIFY_SSL = True
|
||||||
|
PIHOLEAPI_RUN_TIMEOUT = 10
|
||||||
|
VERSION_DATE = "NAX-PIHOLEAPI-1.0"
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
def pihole_api_auth():
|
||||||
|
"""Authenticate to Pi-hole v6 API and populate session globals."""
|
||||||
|
|
||||||
|
global PIHOLEAPI_SES_VALID, PIHOLEAPI_SES_SID, PIHOLEAPI_SES_CSRF
|
||||||
|
|
||||||
|
if not PIHOLEAPI_URL:
|
||||||
|
mylog('none', [f'[{pluginName}] PIHOLEAPI_URL not configured — skipping.'])
|
||||||
|
return False
|
||||||
|
|
||||||
|
# handle SSL verification setting - disable insecure warnings only when PIHOLEAPI_VERIFY_SSL=False
|
||||||
|
if not PIHOLEAPI_VERIFY_SSL:
|
||||||
|
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"accept": "application/json",
|
||||||
|
"content-type": "application/json",
|
||||||
|
"User-Agent": "NetAlertX/" + VERSION_DATE
|
||||||
|
}
|
||||||
|
data = {"password": PIHOLEAPI_PASSWORD}
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp = requests.post(PIHOLEAPI_URL + 'api/auth', headers=headers, json=data, verify=PIHOLEAPI_VERIFY_SSL, timeout=PIHOLEAPI_RUN_TIMEOUT)
|
||||||
|
resp.raise_for_status()
|
||||||
|
except requests.exceptions.Timeout:
|
||||||
|
mylog('none', [f'[{pluginName}] Pi-hole auth request timed out. Try increasing PIHOLEAPI_RUN_TIMEOUT.'])
|
||||||
|
return False
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
mylog('none', [f'[{pluginName}] Connection error during Pi-hole auth. Check PIHOLEAPI_URL and PIHOLEAPI_PASSWORD'])
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
mylog('none', [f'[{pluginName}] Unexpected auth error: {e}'])
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
response_json = resp.json()
|
||||||
|
except Exception:
|
||||||
|
mylog('none', [f'[{pluginName}] Unable to parse Pi-hole auth response JSON.'])
|
||||||
|
return False
|
||||||
|
|
||||||
|
session_data = response_json.get('session', {})
|
||||||
|
|
||||||
|
if session_data.get('valid', False):
|
||||||
|
PIHOLEAPI_SES_VALID = True
|
||||||
|
PIHOLEAPI_SES_SID = session_data.get('sid')
|
||||||
|
# csrf might not be present if no password set
|
||||||
|
PIHOLEAPI_SES_CSRF = session_data.get('csrf')
|
||||||
|
mylog('verbose', [f'[{pluginName}] Authenticated to Pi-hole (sid present).'])
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
mylog('none', [f'[{pluginName}] Pi-hole auth required or failed.'])
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
def pihole_api_deauth():
|
||||||
|
"""Logout from Pi-hole v6 API (best-effort)."""
|
||||||
|
global PIHOLEAPI_SES_VALID, PIHOLEAPI_SES_SID, PIHOLEAPI_SES_CSRF
|
||||||
|
|
||||||
|
if not PIHOLEAPI_URL:
|
||||||
|
return
|
||||||
|
if not PIHOLEAPI_SES_SID:
|
||||||
|
return
|
||||||
|
|
||||||
|
headers = {"X-FTL-SID": PIHOLEAPI_SES_SID}
|
||||||
|
try:
|
||||||
|
requests.delete(PIHOLEAPI_URL + 'api/auth', headers=headers, verify=PIHOLEAPI_VERIFY_SSL, timeout=PIHOLEAPI_RUN_TIMEOUT)
|
||||||
|
except Exception:
|
||||||
|
# ignore errors on logout
|
||||||
|
pass
|
||||||
|
PIHOLEAPI_SES_VALID = False
|
||||||
|
PIHOLEAPI_SES_SID = None
|
||||||
|
PIHOLEAPI_SES_CSRF = None
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
def get_pihole_interface_data():
|
||||||
|
"""Return dict mapping mac -> [ipv4 addresses] from Pi-hole interfaces endpoint."""
|
||||||
|
|
||||||
|
result = {}
|
||||||
|
if not PIHOLEAPI_SES_VALID:
|
||||||
|
return result
|
||||||
|
|
||||||
|
headers = {"X-FTL-SID": PIHOLEAPI_SES_SID}
|
||||||
|
if PIHOLEAPI_SES_CSRF:
|
||||||
|
headers["X-FTL-CSRF"] = PIHOLEAPI_SES_CSRF
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp = requests.get(PIHOLEAPI_URL + 'api/network/interfaces', headers=headers, verify=PIHOLEAPI_VERIFY_SSL, timeout=PIHOLEAPI_RUN_TIMEOUT)
|
||||||
|
resp.raise_for_status()
|
||||||
|
data = resp.json()
|
||||||
|
except Exception as e:
|
||||||
|
mylog('none', [f'[{pluginName}] Failed to fetch Pi-hole interfaces: {e}'])
|
||||||
|
return result
|
||||||
|
|
||||||
|
for interface in data.get('interfaces', []):
|
||||||
|
mac_address = interface.get('address')
|
||||||
|
if not mac_address or mac_address == "00:00:00:00:00:00":
|
||||||
|
continue
|
||||||
|
addrs = []
|
||||||
|
for addr in interface.get('addresses', []):
|
||||||
|
if addr.get('family') == 'inet':
|
||||||
|
a = addr.get('address')
|
||||||
|
if a:
|
||||||
|
addrs.append(a)
|
||||||
|
if addrs:
|
||||||
|
result[mac_address] = addrs
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
def get_pihole_network_devices():
|
||||||
|
"""Return list of devices from Pi-hole v6 API (devices endpoint)."""
|
||||||
|
|
||||||
|
devices = []
|
||||||
|
|
||||||
|
# return empty list if no session available
|
||||||
|
if not PIHOLEAPI_SES_VALID:
|
||||||
|
return devices
|
||||||
|
|
||||||
|
# prepare headers
|
||||||
|
headers = {"X-FTL-SID": PIHOLEAPI_SES_SID}
|
||||||
|
if PIHOLEAPI_SES_CSRF:
|
||||||
|
headers["X-FTL-CSRF"] = PIHOLEAPI_SES_CSRF
|
||||||
|
|
||||||
|
params = {
|
||||||
|
'max_devices': str(PIHOLEAPI_API_MAXCLIENTS),
|
||||||
|
'max_addresses': '2'
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp = requests.get(PIHOLEAPI_URL + 'api/network/devices', headers=headers, params=params, verify=PIHOLEAPI_VERIFY_SSL, timeout=PIHOLEAPI_RUN_TIMEOUT)
|
||||||
|
resp.raise_for_status()
|
||||||
|
data = resp.json()
|
||||||
|
|
||||||
|
mylog('debug', [f'[{pluginName}] Pi-hole API returned data: {json.dumps(data)}'])
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
mylog('none', [f'[{pluginName}] Failed to fetch Pi-hole devices: {e}'])
|
||||||
|
return devices
|
||||||
|
|
||||||
|
# The API returns 'devices' list
|
||||||
|
return data.get('devices', [])
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
def gather_device_entries():
|
||||||
|
"""
|
||||||
|
Build a list of device entries suitable for Plugin_Objects.add_object.
|
||||||
|
Each entry is a dict with: mac, ip, name, macVendor, lastQuery
|
||||||
|
"""
|
||||||
|
entries = []
|
||||||
|
|
||||||
|
iface_map = get_pihole_interface_data()
|
||||||
|
devices = get_pihole_network_devices()
|
||||||
|
now_ts = int(datetime.datetime.now().timestamp())
|
||||||
|
|
||||||
|
for device in devices:
|
||||||
|
hwaddr = device.get('hwaddr')
|
||||||
|
if not hwaddr or hwaddr == "00:00:00:00:00:00":
|
||||||
|
continue
|
||||||
|
|
||||||
|
macVendor = device.get('macVendor', '')
|
||||||
|
lastQuery = device.get('lastQuery')
|
||||||
|
# 'ips' is a list of dicts: {ip, name}
|
||||||
|
for ip_info in device.get('ips', []):
|
||||||
|
ip = ip_info.get('ip')
|
||||||
|
if not ip:
|
||||||
|
continue
|
||||||
|
|
||||||
|
name = ip_info.get('name') or '(unknown)'
|
||||||
|
|
||||||
|
# mark active if ip present on local interfaces
|
||||||
|
for mac, iplist in iface_map.items():
|
||||||
|
if ip in iplist:
|
||||||
|
lastQuery = str(now_ts)
|
||||||
|
|
||||||
|
entries.append({
|
||||||
|
'mac': hwaddr.lower(),
|
||||||
|
'ip': ip,
|
||||||
|
'name': name,
|
||||||
|
'macVendor': macVendor,
|
||||||
|
'lastQuery': str(lastQuery) if lastQuery is not None else ''
|
||||||
|
})
|
||||||
|
|
||||||
|
return entries
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
def main():
|
||||||
|
"""Main plugin entrypoint."""
|
||||||
|
global PIHOLEAPI_URL, PIHOLEAPI_PASSWORD, PIHOLEAPI_API_MAXCLIENTS, PIHOLEAPI_VERIFY_SSL, PIHOLEAPI_RUN_TIMEOUT
|
||||||
|
|
||||||
|
mylog('verbose', [f'[{pluginName}] start script.'])
|
||||||
|
|
||||||
|
# Load settings from NAX config
|
||||||
|
PIHOLEAPI_URL = get_setting_value('PIHOLEAPI_URL')
|
||||||
|
|
||||||
|
# ensure trailing slash
|
||||||
|
if not PIHOLEAPI_URL.endswith('/'):
|
||||||
|
PIHOLEAPI_URL += '/'
|
||||||
|
|
||||||
|
PIHOLEAPI_PASSWORD = get_setting_value('PIHOLEAPI_PASSWORD')
|
||||||
|
PIHOLEAPI_API_MAXCLIENTS = get_setting_value('PIHOLEAPI_API_MAXCLIENTS')
|
||||||
|
# Accept boolean or string "True"/"False"
|
||||||
|
PIHOLEAPI_VERIFY_SSL = get_setting_value('PIHOLEAPI_SSL_VERIFY')
|
||||||
|
PIHOLEAPI_RUN_TIMEOUT = get_setting_value('PIHOLEAPI_RUN_TIMEOUT')
|
||||||
|
|
||||||
|
# Authenticate
|
||||||
|
if not pihole_api_auth():
|
||||||
|
mylog('none', [f'[{pluginName}] Authentication failed — no devices imported.'])
|
||||||
|
return 1
|
||||||
|
|
||||||
|
try:
|
||||||
|
device_entries = gather_device_entries()
|
||||||
|
|
||||||
|
if not device_entries:
|
||||||
|
mylog('verbose', [f'[{pluginName}] No devices found on Pi-hole.'])
|
||||||
|
else:
|
||||||
|
for entry in device_entries:
|
||||||
|
|
||||||
|
if is_mac(entry['mac']):
|
||||||
|
# Map to Plugin_Objects fields
|
||||||
|
mylog('verbose', [f"[{pluginName}] found: {entry['name']}|{entry['mac']}|{entry['ip']}"])
|
||||||
|
|
||||||
|
plugin_objects.add_object(
|
||||||
|
primaryId=str(entry['mac']),
|
||||||
|
secondaryId=str(entry['ip']),
|
||||||
|
watched1=str(entry['name']),
|
||||||
|
watched2=str(entry['macVendor']),
|
||||||
|
watched3=str(entry['lastQuery']),
|
||||||
|
watched4="",
|
||||||
|
extra=pluginName,
|
||||||
|
foreignKey=str(entry['mac'])
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
mylog('verbose', [f"[{pluginName}] Skipping invalid MAC: {entry['name']}|{entry['mac']}|{entry['ip']}"])
|
||||||
|
|
||||||
|
# Write result file for NetAlertX to ingest
|
||||||
|
plugin_objects.write_result_file()
|
||||||
|
mylog('verbose', [f'[{pluginName}] Script finished. Imported {len(device_entries)} entries.'])
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Deauth best-effort
|
||||||
|
pihole_api_deauth()
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
@@ -5,18 +5,18 @@ import os
|
|||||||
import re
|
import re
|
||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
|
|
||||||
sys.path.append(f"{INSTALL_PATH}/front/plugins")
|
sys.path.append(f"{INSTALL_PATH}/front/plugins")
|
||||||
sys.path.append(f'{INSTALL_PATH}/server')
|
sys.path.append(f'{INSTALL_PATH}/server')
|
||||||
|
|
||||||
from logger import mylog, Logger
|
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.datetime_utils import timeNowDB
|
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||||
from const import default_tz, fullConfPath
|
from const import default_tz, fullConfPath # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
def read_config_file():
|
def read_config_file():
|
||||||
"""
|
"""
|
||||||
retuns dict on the config file key:value pairs
|
retuns dict on the config file key:value pairs
|
||||||
@@ -25,15 +25,15 @@ def read_config_file():
|
|||||||
|
|
||||||
filename = fullConfPath
|
filename = fullConfPath
|
||||||
|
|
||||||
|
|
||||||
print('[plugin_helper] reading config file')
|
print('[plugin_helper] reading config file')
|
||||||
|
|
||||||
# load the variables from .conf
|
# load the variables from .conf
|
||||||
with open(filename, "r") as file:
|
with open(filename, "r") as file:
|
||||||
code = compile(file.read(), filename, "exec")
|
code = compile(file.read(), filename, "exec")
|
||||||
|
|
||||||
confDict = {} # config dictionary
|
confDict = {} # config dictionary
|
||||||
exec(code, {"__builtins__": {}}, confDict)
|
exec(code, {"__builtins__": {}}, confDict)
|
||||||
return confDict
|
return confDict
|
||||||
|
|
||||||
|
|
||||||
configFile = read_config_file()
|
configFile = read_config_file()
|
||||||
@@ -42,6 +42,7 @@ if timeZoneSetting not in all_timezones:
|
|||||||
timeZoneSetting = default_tz
|
timeZoneSetting = default_tz
|
||||||
timeZone = pytz.timezone(timeZoneSetting)
|
timeZone = pytz.timezone(timeZoneSetting)
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------
|
# -------------------------------------------------------------------
|
||||||
# Sanitizes plugin output
|
# Sanitizes plugin output
|
||||||
def handleEmpty(input):
|
def handleEmpty(input):
|
||||||
@@ -54,70 +55,72 @@ def handleEmpty(input):
|
|||||||
input = re.sub(r'[^\x00-\x7F]+', ' ', input)
|
input = re.sub(r'[^\x00-\x7F]+', ' ', input)
|
||||||
input = input.replace('\n', '') # Removing new lines
|
input = input.replace('\n', '') # Removing new lines
|
||||||
return input
|
return input
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------
|
# -------------------------------------------------------------------
|
||||||
# Sanitizes string
|
# Sanitizes string
|
||||||
def rmBadChars(input):
|
def rmBadChars(input):
|
||||||
|
|
||||||
input = handleEmpty(input)
|
input = handleEmpty(input)
|
||||||
input = input.replace("'", '_') # Removing ' (single quotes)
|
input = input.replace("'", '_') # Removing ' (single quotes)
|
||||||
|
|
||||||
return input
|
return input
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------
|
# -------------------------------------------------------------------
|
||||||
# check if this is a router IP
|
# check if this is a router IP
|
||||||
def is_typical_router_ip(ip_address):
|
def is_typical_router_ip(ip_address):
|
||||||
# List of common default gateway IP addresses
|
# List of common default gateway IP addresses
|
||||||
common_router_ips = [
|
common_router_ips = [
|
||||||
"192.168.0.1", "192.168.1.1", "192.168.1.254", "192.168.0.254",
|
"192.168.0.1", "192.168.1.1", "192.168.1.254", "192.168.0.254",
|
||||||
"10.0.0.1", "10.1.1.1", "192.168.2.1", "192.168.10.1", "192.168.11.1",
|
"10.0.0.1", "10.1.1.1", "192.168.2.1", "192.168.10.1", "192.168.11.1",
|
||||||
"192.168.100.1", "192.168.101.1", "192.168.123.254", "192.168.223.1",
|
"192.168.100.1", "192.168.101.1", "192.168.123.254", "192.168.223.1",
|
||||||
"192.168.31.1", "192.168.8.1", "192.168.254.254", "192.168.50.1",
|
"192.168.31.1", "192.168.8.1", "192.168.254.254", "192.168.50.1",
|
||||||
"192.168.3.1", "192.168.4.1", "192.168.5.1", "192.168.9.1",
|
"192.168.3.1", "192.168.4.1", "192.168.5.1", "192.168.9.1",
|
||||||
"192.168.15.1", "192.168.16.1", "192.168.20.1", "192.168.30.1",
|
"192.168.15.1", "192.168.16.1", "192.168.20.1", "192.168.30.1",
|
||||||
"192.168.42.1", "192.168.62.1", "192.168.178.1", "192.168.1.1",
|
"192.168.42.1", "192.168.62.1", "192.168.178.1", "192.168.1.1",
|
||||||
"192.168.1.254", "192.168.0.1", "192.168.0.10", "192.168.0.100",
|
"192.168.1.254", "192.168.0.1", "192.168.0.10", "192.168.0.100",
|
||||||
"192.168.0.254"
|
"192.168.0.254"
|
||||||
]
|
]
|
||||||
|
|
||||||
return ip_address in common_router_ips
|
return ip_address in common_router_ips
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------
|
# -------------------------------------------------------------------
|
||||||
# Check if a valid MAC address
|
# Check if a valid MAC address
|
||||||
def is_mac(input):
|
def is_mac(input):
|
||||||
input_str = str(input).lower() # Convert to string and lowercase so non-string values won't raise errors
|
input_str = str(input).lower() # Convert to string and lowercase so non-string values won't raise errors
|
||||||
|
|
||||||
isMac = bool(re.match("[0-9a-f]{2}([-:]?)[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$", input_str))
|
isMac = bool(re.match("[0-9a-f]{2}([-:]?)[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$", input_str))
|
||||||
|
|
||||||
if not isMac: # If it's not a MAC address, log the input
|
if not isMac: # If it's not a MAC address, log the input
|
||||||
mylog('verbose', [f'[is_mac] not a MAC: {input_str}'])
|
mylog('verbose', [f'[is_mac] not a MAC: {input_str}'])
|
||||||
|
|
||||||
return isMac
|
return isMac
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------
|
# -------------------------------------------------------------------
|
||||||
def decodeBase64(inputParamBase64):
|
def decodeBase64(inputParamBase64):
|
||||||
|
|
||||||
# Printing the input list to check its content.
|
# Printing the input list to check its content.
|
||||||
mylog('debug', ['[Plugins] Helper base64 input: ', input])
|
mylog('debug', ['[Plugins] Helper base64 input: ', input])
|
||||||
print('[Plugins] Helper base64 input: ')
|
print('[Plugins] Helper base64 input: ')
|
||||||
print(input)
|
print(input)
|
||||||
|
|
||||||
|
|
||||||
# Extract the base64-encoded subnet information from the first element
|
|
||||||
# The format of the element is assumed to be like 'param=b<base64-encoded-data>'.
|
|
||||||
|
|
||||||
|
# Extract the base64-encoded subnet information from the first element
|
||||||
|
# The format of the element is assumed to be like 'param=b<base64-encoded-data>'.
|
||||||
# Printing the extracted base64-encoded information.
|
# Printing the extracted base64-encoded information.
|
||||||
mylog('debug', ['[Plugins] Helper base64 inputParamBase64: ', inputParamBase64])
|
mylog('debug', ['[Plugins] Helper base64 inputParamBase64: ', inputParamBase64])
|
||||||
|
|
||||||
|
|
||||||
# Decode the base64-encoded subnet information to get the actual subnet information in ASCII format.
|
# Decode the base64-encoded subnet information to get the actual subnet information in ASCII format.
|
||||||
result = base64.b64decode(inputParamBase64).decode('ascii')
|
result = base64.b64decode(inputParamBase64).decode('ascii')
|
||||||
|
|
||||||
# Print the decoded subnet information.
|
# Print the decoded subnet information.
|
||||||
mylog('debug', ['[Plugins] Helper base64 result: ', result])
|
mylog('debug', ['[Plugins] Helper base64 result: ', result])
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------
|
# -------------------------------------------------------------------
|
||||||
def decode_settings_base64(encoded_str, convert_types=True):
|
def decode_settings_base64(encoded_str, convert_types=True):
|
||||||
"""
|
"""
|
||||||
@@ -167,7 +170,7 @@ def decode_settings_base64(encoded_str, convert_types=True):
|
|||||||
def normalize_mac(mac):
|
def normalize_mac(mac):
|
||||||
# Split the MAC address by colon (:) or hyphen (-) and convert each part to uppercase
|
# Split the MAC address by colon (:) or hyphen (-) and convert each part to uppercase
|
||||||
parts = mac.upper().split(':')
|
parts = mac.upper().split(':')
|
||||||
|
|
||||||
# If the MAC address is split by hyphen instead of colon
|
# If the MAC address is split by hyphen instead of colon
|
||||||
if len(parts) == 1:
|
if len(parts) == 1:
|
||||||
parts = mac.upper().split('-')
|
parts = mac.upper().split('-')
|
||||||
@@ -177,14 +180,15 @@ def normalize_mac(mac):
|
|||||||
|
|
||||||
# Join the parts with colon (:)
|
# Join the parts with colon (:)
|
||||||
normalized_mac = ':'.join(normalized_parts)
|
normalized_mac = ':'.join(normalized_parts)
|
||||||
|
|
||||||
return normalized_mac
|
return normalized_mac
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------
|
# -------------------------------------------------------------------
|
||||||
class Plugin_Object:
|
class Plugin_Object:
|
||||||
"""
|
"""
|
||||||
Plugin_Object class to manage one object introduced by the plugin.
|
Plugin_Object class to manage one object introduced by the plugin.
|
||||||
An object typically is a device but could also be a website or something
|
An object typically is a device but could also be a website or something
|
||||||
else that is monitored by the plugin.
|
else that is monitored by the plugin.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -222,8 +226,8 @@ class Plugin_Object:
|
|||||||
self.helpVal4 = helpVal4 or ""
|
self.helpVal4 = helpVal4 or ""
|
||||||
|
|
||||||
def write(self):
|
def write(self):
|
||||||
"""
|
"""
|
||||||
Write the object details as a string in the
|
Write the object details as a string in the
|
||||||
format required to write the result file.
|
format required to write the result file.
|
||||||
"""
|
"""
|
||||||
line = "{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}\n".format(
|
line = "{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}\n".format(
|
||||||
@@ -243,6 +247,7 @@ class Plugin_Object:
|
|||||||
)
|
)
|
||||||
return line
|
return line
|
||||||
|
|
||||||
|
|
||||||
class Plugin_Objects:
|
class Plugin_Objects:
|
||||||
"""
|
"""
|
||||||
Plugin_Objects is the class that manages and holds all the objects created by the plugin.
|
Plugin_Objects is the class that manages and holds all the objects created by the plugin.
|
||||||
@@ -303,7 +308,3 @@ class Plugin_Objects:
|
|||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self.objects)
|
return len(self.objects)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -10,12 +10,12 @@ import sys
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects, handleEmpty, normalize_mac
|
from plugin_helper import Plugin_Objects, handleEmpty, normalize_mac # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -28,53 +28,60 @@ pluginName = "SNMPDSC"
|
|||||||
LOG_PATH = logPath + '/plugins'
|
LOG_PATH = logPath + '/plugins'
|
||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
# Workflow
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
mylog('verbose', ['[SNMPDSC] In script '])
|
mylog('verbose', ['[SNMPDSC] In script '])
|
||||||
|
|
||||||
# init global variables
|
# init global variables
|
||||||
global snmpWalkCmds
|
global snmpWalkCmds
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description='This plugin is used to discover devices via the arp table(s) of a RFC1213 compliant router or switch.')
|
||||||
parser = argparse.ArgumentParser(description='This plugin is used to discover devices via the arp table(s) of a RFC1213 compliant router or switch.')
|
parser.add_argument(
|
||||||
parser.add_argument('routers', action="store", help="IP(s) of routers, separated by comma (,) if passing multiple")
|
'routers',
|
||||||
|
action="store",
|
||||||
|
help="IP(s) of routers, separated by comma (,) if passing multiple"
|
||||||
|
)
|
||||||
|
|
||||||
values = parser.parse_args()
|
values = parser.parse_args()
|
||||||
|
|
||||||
timeoutSetting = get_setting_value("SNMPDSC_RUN_TIMEOUT")
|
timeoutSetting = get_setting_value("SNMPDSC_RUN_TIMEOUT")
|
||||||
|
|
||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
if values.routers:
|
if values.routers:
|
||||||
snmpWalkCmds = values.routers.split('=')[1].replace('\'','')
|
snmpWalkCmds = values.routers.split('=')[1].replace('\'', '')
|
||||||
|
|
||||||
|
|
||||||
if ',' in snmpWalkCmds:
|
if ',' in snmpWalkCmds:
|
||||||
commands = snmpWalkCmds.split(',')
|
commands = snmpWalkCmds.split(',')
|
||||||
else:
|
else:
|
||||||
commands = [snmpWalkCmds]
|
commands = [snmpWalkCmds]
|
||||||
|
|
||||||
for cmd in commands:
|
for cmd in commands:
|
||||||
mylog('verbose', ['[SNMPDSC] Router snmpwalk command: ', cmd])
|
mylog('verbose', ['[SNMPDSC] Router snmpwalk command: ', cmd])
|
||||||
# split the string, remove white spaces around each item, and exclude any empty strings
|
# split the string, remove white spaces around each item, and exclude any empty strings
|
||||||
snmpwalkArgs = [arg.strip() for arg in cmd.split(' ') if arg.strip()]
|
snmpwalkArgs = [arg.strip() for arg in cmd.split(' ') if arg.strip()]
|
||||||
|
|
||||||
# Execute N probes and insert in list
|
# Execute N probes and insert in list
|
||||||
probes = 1 # N probes
|
probes = 1 # N probes
|
||||||
|
|
||||||
for _ in range(probes):
|
|
||||||
output = subprocess.check_output (snmpwalkArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSetting))
|
|
||||||
|
|
||||||
mylog('verbose', ['[SNMPDSC] output: ', output])
|
for _ in range(probes):
|
||||||
|
output = subprocess.check_output(
|
||||||
|
snmpwalkArgs,
|
||||||
|
universal_newlines=True,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
timeout=(timeoutSetting)
|
||||||
|
)
|
||||||
|
|
||||||
|
mylog('verbose', ['[SNMPDSC] output: ', output])
|
||||||
|
|
||||||
lines = output.split('\n')
|
lines = output.split('\n')
|
||||||
|
|
||||||
for line in lines:
|
for line in lines:
|
||||||
|
|
||||||
tmpSplt = line.split('"')
|
tmpSplt = line.split('"')
|
||||||
|
|
||||||
if len(tmpSplt) == 3:
|
if len(tmpSplt) == 3:
|
||||||
|
|
||||||
ipStr = tmpSplt[0].split('.')[-4:] # Get the last 4 elements to extract the IP
|
ipStr = tmpSplt[0].split('.')[-4:] # Get the last 4 elements to extract the IP
|
||||||
macStr = tmpSplt[1].strip().split(' ') # Remove leading/trailing spaces from MAC
|
macStr = tmpSplt[1].strip().split(' ') # Remove leading/trailing spaces from MAC
|
||||||
|
|
||||||
@@ -82,19 +89,18 @@ def main():
|
|||||||
macAddress = ':'.join(macStr)
|
macAddress = ':'.join(macStr)
|
||||||
ipAddress = '.'.join(ipStr)
|
ipAddress = '.'.join(ipStr)
|
||||||
|
|
||||||
mylog('verbose', [f'[SNMPDSC] IP: {ipAddress} MAC: {macAddress}'])
|
mylog('verbose', [f'[SNMPDSC] IP: {ipAddress} MAC: {macAddress}'])
|
||||||
|
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = handleEmpty(macAddress),
|
primaryId = handleEmpty(macAddress),
|
||||||
secondaryId = handleEmpty(ipAddress.strip()), # Remove leading/trailing spaces from IP
|
secondaryId = handleEmpty(ipAddress.strip()), # Remove leading/trailing spaces from IP
|
||||||
watched1 = '(unknown)',
|
watched1 = '(unknown)',
|
||||||
watched2 = handleEmpty(snmpwalkArgs[6]), # router IP
|
watched2 = handleEmpty(snmpwalkArgs[6]), # router IP
|
||||||
extra = handleEmpty(line),
|
extra = handleEmpty(line),
|
||||||
foreignKey = handleEmpty(macAddress) # Use the primary ID as the foreign key
|
foreignKey = handleEmpty(macAddress) # Use the primary ID as the foreign key
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
mylog('verbose', ['[SNMPDSC] ipStr does not seem to contain a valid IP:', ipStr])
|
mylog('verbose', ['[SNMPDSC] ipStr does not seem to contain a valid IP:', ipStr])
|
||||||
|
|
||||||
|
|
||||||
elif line.startswith('ipNetToMediaPhysAddress'):
|
elif line.startswith('ipNetToMediaPhysAddress'):
|
||||||
# Format: snmpwalk -OXsq output
|
# Format: snmpwalk -OXsq output
|
||||||
@@ -115,12 +121,11 @@ def main():
|
|||||||
foreignKey = handleEmpty(macAddress)
|
foreignKey = handleEmpty(macAddress)
|
||||||
)
|
)
|
||||||
|
|
||||||
mylog('verbose', ['[SNMPDSC] Entries found: ', len(plugin_objects)])
|
mylog('verbose', ['[SNMPDSC] Entries found: ', len(plugin_objects)])
|
||||||
|
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# BEGIN
|
# BEGIN
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -12,16 +12,16 @@ import base64
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.plugin_utils import get_plugins_configs, decode_and_rename_files
|
from utils.plugin_utils import get_plugins_configs, decode_and_rename_files # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from const import fullDbPath, logPath
|
from const import fullDbPath, logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.datetime_utils import timeNowDB
|
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.crypto_utils import encrypt_data
|
from utils.crypto_utils import encrypt_data # noqa: E402 [flake8 lint suppression]
|
||||||
from messaging.in_app import write_notification
|
from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -41,21 +41,21 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
# Retrieve configuration settings
|
# Retrieve configuration settings
|
||||||
plugins_to_sync = get_setting_value('SYNC_plugins')
|
plugins_to_sync = get_setting_value('SYNC_plugins')
|
||||||
api_token = get_setting_value('API_TOKEN')
|
api_token = get_setting_value('API_TOKEN')
|
||||||
encryption_key = get_setting_value('SYNC_encryption_key')
|
encryption_key = get_setting_value('SYNC_encryption_key')
|
||||||
hub_url = get_setting_value('SYNC_hub_url')
|
hub_url = get_setting_value('SYNC_hub_url')
|
||||||
node_name = get_setting_value('SYNC_node_name')
|
node_name = get_setting_value('SYNC_node_name')
|
||||||
send_devices = get_setting_value('SYNC_devices')
|
send_devices = get_setting_value('SYNC_devices')
|
||||||
pull_nodes = get_setting_value('SYNC_nodes')
|
pull_nodes = get_setting_value('SYNC_nodes')
|
||||||
|
|
||||||
# variables to determine operation mode
|
# variables to determine operation mode
|
||||||
is_hub = False
|
is_hub = False
|
||||||
is_node = False
|
is_node = False
|
||||||
|
|
||||||
# Check if api_token set
|
# Check if api_token set
|
||||||
if not api_token:
|
if not api_token:
|
||||||
mylog('verbose', [f'[{pluginName}] ⚠ ERROR api_token not defined - quitting.'])
|
mylog('verbose', [f'[{pluginName}] ⚠ ERROR api_token not defined - quitting.'])
|
||||||
@@ -63,23 +63,23 @@ def main():
|
|||||||
|
|
||||||
# check if this is a hub or a node
|
# check if this is a hub or a node
|
||||||
if len(hub_url) > 0 and (send_devices or plugins_to_sync):
|
if len(hub_url) > 0 and (send_devices or plugins_to_sync):
|
||||||
is_node = True
|
is_node = True
|
||||||
mylog('verbose', [f'[{pluginName}] Mode 1: PUSH (NODE) - This is a NODE as SYNC_hub_url, SYNC_devices or SYNC_plugins are set'])
|
mylog('verbose', [f'[{pluginName}] Mode 1: PUSH (NODE) - This is a NODE as SYNC_hub_url, SYNC_devices or SYNC_plugins are set'])
|
||||||
if len(pull_nodes) > 0:
|
if len(pull_nodes) > 0:
|
||||||
is_hub = True
|
is_hub = True
|
||||||
mylog('verbose', [f'[{pluginName}] Mode 2: PULL (HUB) - This is a HUB as SYNC_nodes is set'])
|
mylog('verbose', [f'[{pluginName}] Mode 2: PULL (HUB) - This is a HUB as SYNC_nodes is set'])
|
||||||
|
|
||||||
# Mode 1: PUSH/SEND (NODE)
|
# Mode 1: PUSH/SEND (NODE)
|
||||||
if is_node:
|
if is_node:
|
||||||
# PUSHING/SENDING Plugins
|
# PUSHING/SENDING Plugins
|
||||||
|
|
||||||
# Get all plugin configurations
|
# Get all plugin configurations
|
||||||
all_plugins = get_plugins_configs(False)
|
all_plugins = get_plugins_configs(False)
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] plugins_to_sync {plugins_to_sync}'])
|
mylog('verbose', [f'[{pluginName}] plugins_to_sync {plugins_to_sync}'])
|
||||||
|
|
||||||
for plugin in all_plugins:
|
for plugin in all_plugins:
|
||||||
pref = plugin["unique_prefix"]
|
pref = plugin["unique_prefix"]
|
||||||
|
|
||||||
index = 0
|
index = 0
|
||||||
if pref in plugins_to_sync:
|
if pref in plugins_to_sync:
|
||||||
@@ -100,9 +100,8 @@ def main():
|
|||||||
send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url)
|
send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
mylog('verbose', [f'[{pluginName}] {file_path} not found'])
|
mylog('verbose', [f'[{pluginName}] {file_path} not found'])
|
||||||
|
|
||||||
|
|
||||||
# PUSHING/SENDING devices
|
# PUSHING/SENDING devices
|
||||||
if send_devices:
|
if send_devices:
|
||||||
|
|
||||||
@@ -117,27 +116,27 @@ def main():
|
|||||||
mylog('verbose', [f'[{pluginName}] Sending file_content: "{file_content}"'])
|
mylog('verbose', [f'[{pluginName}] Sending file_content: "{file_content}"'])
|
||||||
send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url)
|
send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url)
|
||||||
else:
|
else:
|
||||||
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Devices" data'])
|
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Devices" data'])
|
||||||
else:
|
else:
|
||||||
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Plugins" and "Devices" data'])
|
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Plugins" and "Devices" data'])
|
||||||
|
|
||||||
# Mode 2: PULL/GET (HUB)
|
# Mode 2: PULL/GET (HUB)
|
||||||
|
|
||||||
# PULLING DEVICES
|
# PULLING DEVICES
|
||||||
file_prefix = 'last_result'
|
file_prefix = 'last_result'
|
||||||
|
|
||||||
# pull data from nodes if specified
|
# pull data from nodes if specified
|
||||||
if is_hub:
|
if is_hub:
|
||||||
for node_url in pull_nodes:
|
for node_url in pull_nodes:
|
||||||
response_json = get_data(api_token, node_url)
|
response_json = get_data(api_token, node_url)
|
||||||
|
|
||||||
# Extract node_name and base64 data
|
# Extract node_name and base64 data
|
||||||
node_name = response_json.get('node_name', 'unknown_node')
|
node_name = response_json.get('node_name', 'unknown_node')
|
||||||
data_base64 = response_json.get('data_base64', '')
|
data_base64 = response_json.get('data_base64', '')
|
||||||
|
|
||||||
# Decode base64 data
|
# Decode base64 data
|
||||||
decoded_data = base64.b64decode(data_base64)
|
decoded_data = base64.b64decode(data_base64)
|
||||||
|
|
||||||
# Create log file name using node name
|
# Create log file name using node name
|
||||||
log_file_name = f'{file_prefix}.{node_name}.log'
|
log_file_name = f'{file_prefix}.{node_name}.log'
|
||||||
|
|
||||||
@@ -148,18 +147,17 @@ def main():
|
|||||||
message = f'[{pluginName}] Device data from node "{node_name}" written to {log_file_name}'
|
message = f'[{pluginName}] Device data from node "{node_name}" written to {log_file_name}'
|
||||||
mylog('verbose', [message])
|
mylog('verbose', [message])
|
||||||
if lggr.isAbove('verbose'):
|
if lggr.isAbove('verbose'):
|
||||||
write_notification(message, 'info', timeNowDB())
|
write_notification(message, 'info', timeNowDB())
|
||||||
|
|
||||||
|
|
||||||
# Process any received data for the Device DB table (ONLY JSON)
|
# Process any received data for the Device DB table (ONLY JSON)
|
||||||
# Create the file path
|
# Create the file path
|
||||||
|
|
||||||
# Get all "last_result" files from the sync folder, decode, rename them, and get the list of files
|
# Get all "last_result" files from the sync folder, decode, rename them, and get the list of files
|
||||||
files_to_process = decode_and_rename_files(LOG_PATH, file_prefix)
|
files_to_process = decode_and_rename_files(LOG_PATH, file_prefix)
|
||||||
|
|
||||||
if len(files_to_process) > 0:
|
if len(files_to_process) > 0:
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Mode 3: RECEIVE (HUB) - This is a HUB as received data found'])
|
mylog('verbose', [f'[{pluginName}] Mode 3: RECEIVE (HUB) - This is a HUB as received data found'])
|
||||||
|
|
||||||
# Connect to the App database
|
# Connect to the App database
|
||||||
conn = sqlite3.connect(fullDbPath)
|
conn = sqlite3.connect(fullDbPath)
|
||||||
@@ -176,24 +174,24 @@ def main():
|
|||||||
# only process received .log files, skipping the one logging the progress of this plugin
|
# only process received .log files, skipping the one logging the progress of this plugin
|
||||||
if file_name != 'last_result.log':
|
if file_name != 'last_result.log':
|
||||||
mylog('verbose', [f'[{pluginName}] Processing: "{file_name}"'])
|
mylog('verbose', [f'[{pluginName}] Processing: "{file_name}"'])
|
||||||
|
|
||||||
# make sure the file has the correct name (e.g last_result.encoded.Node_1.1.log) to skip any otehr plugin files
|
# make sure the file has the correct name (e.g last_result.encoded.Node_1.1.log) to skip any otehr plugin files
|
||||||
if len(file_name.split('.')) > 2:
|
if len(file_name.split('.')) > 2:
|
||||||
# Extract node name from either last_result.decoded.Node_1.1.log or last_result.Node_1.log
|
# Extract node name from either last_result.decoded.Node_1.1.log or last_result.Node_1.log
|
||||||
parts = file_name.split('.')
|
parts = file_name.split('.')
|
||||||
# If decoded/encoded file, node name is at index 2; otherwise at index 1
|
# If decoded/encoded file, node name is at index 2; otherwise at index 1
|
||||||
syncHubNodeName = parts[2] if 'decoded' in file_name or 'encoded' in file_name else parts[1]
|
syncHubNodeName = parts[2] if 'decoded' in file_name or 'encoded' in file_name else parts[1]
|
||||||
|
|
||||||
file_path = f"{LOG_PATH}/{file_name}"
|
file_path = f"{LOG_PATH}/{file_name}"
|
||||||
|
|
||||||
with open(file_path, 'r') as f:
|
with open(file_path, 'r') as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
for device in data['data']:
|
for device in data['data']:
|
||||||
if device['devMac'] not in unique_mac_addresses:
|
if device['devMac'] not in unique_mac_addresses:
|
||||||
device['devSyncHubNode'] = syncHubNodeName
|
device['devSyncHubNode'] = syncHubNodeName
|
||||||
unique_mac_addresses.add(device['devMac'])
|
unique_mac_addresses.add(device['devMac'])
|
||||||
device_data.append(device)
|
device_data.append(device)
|
||||||
|
|
||||||
# Rename the file to "processed_" + current name
|
# Rename the file to "processed_" + current name
|
||||||
new_file_name = f"processed_{file_name}"
|
new_file_name = f"processed_{file_name}"
|
||||||
new_file_path = os.path.join(LOG_PATH, new_file_name)
|
new_file_path = os.path.join(LOG_PATH, new_file_name)
|
||||||
@@ -209,7 +207,6 @@ def main():
|
|||||||
placeholders = ', '.join('?' for _ in unique_mac_addresses)
|
placeholders = ', '.join('?' for _ in unique_mac_addresses)
|
||||||
cursor.execute(f'SELECT devMac FROM Devices WHERE devMac IN ({placeholders})', tuple(unique_mac_addresses))
|
cursor.execute(f'SELECT devMac FROM Devices WHERE devMac IN ({placeholders})', tuple(unique_mac_addresses))
|
||||||
existing_mac_addresses = set(row[0] for row in cursor.fetchall())
|
existing_mac_addresses = set(row[0] for row in cursor.fetchall())
|
||||||
|
|
||||||
|
|
||||||
# insert devices into the last_result.log and thus CurrentScan table to manage state
|
# insert devices into the last_result.log and thus CurrentScan table to manage state
|
||||||
for device in device_data:
|
for device in device_data:
|
||||||
@@ -228,7 +225,7 @@ def main():
|
|||||||
# Filter out existing devices
|
# Filter out existing devices
|
||||||
new_devices = [device for device in device_data if device['devMac'] not in existing_mac_addresses]
|
new_devices = [device for device in device_data if device['devMac'] not in existing_mac_addresses]
|
||||||
|
|
||||||
# Remove 'rowid' key if it exists
|
# Remove 'rowid' key if it exists
|
||||||
for device in new_devices:
|
for device in new_devices:
|
||||||
device.pop('rowid', None)
|
device.pop('rowid', None)
|
||||||
device.pop('devStatus', None)
|
device.pop('devStatus', None)
|
||||||
@@ -257,7 +254,6 @@ def main():
|
|||||||
|
|
||||||
mylog('verbose', [message])
|
mylog('verbose', [message])
|
||||||
write_notification(message, 'info', timeNowDB())
|
write_notification(message, 'info', timeNowDB())
|
||||||
|
|
||||||
|
|
||||||
# Commit and close the connection
|
# Commit and close the connection
|
||||||
conn.commit()
|
conn.commit()
|
||||||
@@ -268,6 +264,7 @@ def main():
|
|||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
# Data retrieval methods
|
# Data retrieval methods
|
||||||
api_endpoints = [
|
api_endpoints = [
|
||||||
@@ -275,6 +272,7 @@ api_endpoints = [
|
|||||||
"/plugins/sync/hub.php" # Legacy PHP endpoint
|
"/plugins/sync/hub.php" # Legacy PHP endpoint
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
# send data to the HUB
|
# send data to the HUB
|
||||||
def send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url):
|
def send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url):
|
||||||
"""Send encrypted data to HUB, preferring /sync endpoint and falling back to PHP version."""
|
"""Send encrypted data to HUB, preferring /sync endpoint and falling back to PHP version."""
|
||||||
@@ -345,6 +343,5 @@ def get_data(api_token, node_url):
|
|||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -10,12 +10,11 @@ from unifi_sm_api.api import SiteManagerAPI
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects, decode_settings_base64
|
from plugin_helper import Plugin_Objects, decode_settings_base64 # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -35,13 +34,13 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
# Retrieve configuration settings
|
# Retrieve configuration settings
|
||||||
unifi_sites_configs = get_setting_value('UNIFIAPI_sites')
|
unifi_sites_configs = get_setting_value('UNIFIAPI_sites')
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] number of unifi_sites_configs: {len(unifi_sites_configs)}'])
|
mylog('verbose', [f'[{pluginName}] number of unifi_sites_configs: {len(unifi_sites_configs)}'])
|
||||||
|
|
||||||
for site_config in unifi_sites_configs:
|
for site_config in unifi_sites_configs:
|
||||||
|
|
||||||
siteDict = decode_settings_base64(site_config)
|
siteDict = decode_settings_base64(site_config)
|
||||||
@@ -50,11 +49,11 @@ def main():
|
|||||||
mylog('none', [f'[{pluginName}] Connecting to: {siteDict["UNIFIAPI_site_name"]}'])
|
mylog('none', [f'[{pluginName}] Connecting to: {siteDict["UNIFIAPI_site_name"]}'])
|
||||||
|
|
||||||
api = SiteManagerAPI(
|
api = SiteManagerAPI(
|
||||||
api_key=siteDict["UNIFIAPI_api_key"],
|
api_key=siteDict["UNIFIAPI_api_key"],
|
||||||
version=siteDict["UNIFIAPI_api_version"],
|
version=siteDict["UNIFIAPI_api_version"],
|
||||||
base_url=siteDict["UNIFIAPI_base_url"],
|
base_url=siteDict["UNIFIAPI_base_url"],
|
||||||
verify_ssl=siteDict["UNIFIAPI_verify_ssl"]
|
verify_ssl=siteDict["UNIFIAPI_verify_ssl"]
|
||||||
)
|
)
|
||||||
|
|
||||||
sites_resp = api.get_sites()
|
sites_resp = api.get_sites()
|
||||||
sites = sites_resp.get("data", [])
|
sites = sites_resp.get("data", [])
|
||||||
@@ -67,18 +66,18 @@ def main():
|
|||||||
# Process the data into native application tables
|
# Process the data into native application tables
|
||||||
if len(device_data) > 0:
|
if len(device_data) > 0:
|
||||||
|
|
||||||
# insert devices into the lats_result.log
|
# insert devices into the lats_result.log
|
||||||
for device in device_data:
|
for device in device_data:
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = device['dev_mac'], # mac
|
primaryId = device['dev_mac'], # mac
|
||||||
secondaryId = device['dev_ip'], # IP
|
secondaryId = device['dev_ip'], # IP
|
||||||
watched1 = device['dev_name'], # name
|
watched1 = device['dev_name'], # name
|
||||||
watched2 = device['dev_type'], # device_type (AP/Switch etc)
|
watched2 = device['dev_type'], # device_type (AP/Switch etc)
|
||||||
watched3 = device['dev_connected'], # connectedAt or empty
|
watched3 = device['dev_connected'], # connectedAt or empty
|
||||||
watched4 = device['dev_parent_mac'],# parent_mac or "Internet"
|
watched4 = device['dev_parent_mac'], # parent_mac or "Internet"
|
||||||
extra = '',
|
extra = '',
|
||||||
foreignKey = device['dev_mac']
|
foreignKey = device['dev_mac']
|
||||||
)
|
)
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] New entries: "{len(device_data)}"'])
|
mylog('verbose', [f'[{pluginName}] New entries: "{len(device_data)}"'])
|
||||||
|
|
||||||
@@ -87,6 +86,7 @@ def main():
|
|||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
# retrieve data
|
# retrieve data
|
||||||
def get_device_data(site, api):
|
def get_device_data(site, api):
|
||||||
device_data = []
|
device_data = []
|
||||||
@@ -146,8 +146,8 @@ def get_device_data(site, api):
|
|||||||
dev_parent_mac = resolve_parent_mac(uplinkDeviceId)
|
dev_parent_mac = resolve_parent_mac(uplinkDeviceId)
|
||||||
|
|
||||||
device_data.append({
|
device_data.append({
|
||||||
"dev_mac": dev_mac,
|
"dev_mac": dev_mac,
|
||||||
"dev_ip": dev_ip,
|
"dev_ip": dev_ip,
|
||||||
"dev_name": dev_name,
|
"dev_name": dev_name,
|
||||||
"dev_type": dev_type,
|
"dev_type": dev_type,
|
||||||
"dev_connected": dev_connected,
|
"dev_connected": dev_connected,
|
||||||
|
|||||||
@@ -14,12 +14,12 @@ from pyunifi.controller import Controller
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects, rmBadChars, is_typical_router_ip, is_mac
|
from plugin_helper import Plugin_Objects, rmBadChars, is_typical_router_ip, is_mac # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value, normalize_string
|
from helper import get_setting_value, normalize_string # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -37,21 +37,16 @@ LOCK_FILE = os.path.join(LOG_PATH, f'full_run.{pluginName}.lock')
|
|||||||
urllib3.disable_warnings(InsecureRequestWarning)
|
urllib3.disable_warnings(InsecureRequestWarning)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Workflow
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
|
|
||||||
# init global variables
|
# init global variables
|
||||||
global UNIFI_USERNAME, UNIFI_PASSWORD, UNIFI_HOST, UNIFI_SITES, PORT, VERIFYSSL, VERSION, FULL_IMPORT
|
global UNIFI_USERNAME, UNIFI_PASSWORD, UNIFI_HOST, UNIFI_SITES, PORT, VERIFYSSL, VERSION, FULL_IMPORT
|
||||||
|
|
||||||
# parse output
|
# parse output
|
||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
UNIFI_USERNAME = get_setting_value("UNFIMP_username")
|
UNIFI_USERNAME = get_setting_value("UNFIMP_username")
|
||||||
UNIFI_PASSWORD = get_setting_value("UNFIMP_password")
|
UNIFI_PASSWORD = get_setting_value("UNFIMP_password")
|
||||||
UNIFI_HOST = get_setting_value("UNFIMP_host")
|
UNIFI_HOST = get_setting_value("UNFIMP_host")
|
||||||
@@ -64,12 +59,11 @@ def main():
|
|||||||
plugin_objects = get_entries(plugin_objects)
|
plugin_objects = get_entries(plugin_objects)
|
||||||
|
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices'])
|
mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices'])
|
||||||
|
|
||||||
# .............................................
|
|
||||||
|
|
||||||
|
# .............................................
|
||||||
def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
||||||
global VERIFYSSL
|
global VERIFYSSL
|
||||||
|
|
||||||
@@ -79,27 +73,26 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
|||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] sites: {UNIFI_SITES}'])
|
mylog('verbose', [f'[{pluginName}] sites: {UNIFI_SITES}'])
|
||||||
|
|
||||||
|
|
||||||
if (VERIFYSSL.upper() == "TRUE"):
|
if (VERIFYSSL.upper() == "TRUE"):
|
||||||
VERIFYSSL = True
|
VERIFYSSL = True
|
||||||
else:
|
else:
|
||||||
VERIFYSSL = False
|
VERIFYSSL = False
|
||||||
|
|
||||||
# mylog('verbose', [f'[{pluginName}] sites: {sites}'])
|
# mylog('verbose', [f'[{pluginName}] sites: {sites}'])
|
||||||
|
|
||||||
for site in UNIFI_SITES:
|
for site in UNIFI_SITES:
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] site: {site}'])
|
mylog('verbose', [f'[{pluginName}] site: {site}'])
|
||||||
|
|
||||||
c = Controller(
|
c = Controller(
|
||||||
UNIFI_HOST,
|
UNIFI_HOST,
|
||||||
UNIFI_USERNAME,
|
UNIFI_USERNAME,
|
||||||
UNIFI_PASSWORD,
|
UNIFI_PASSWORD,
|
||||||
port=PORT,
|
port=PORT,
|
||||||
version=VERSION,
|
version=VERSION,
|
||||||
ssl_verify=VERIFYSSL,
|
ssl_verify=VERIFYSSL,
|
||||||
site_id=site)
|
site_id=site)
|
||||||
|
|
||||||
online_macs = set()
|
online_macs = set()
|
||||||
processed_macs = []
|
processed_macs = []
|
||||||
|
|
||||||
@@ -114,7 +107,7 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
|||||||
plugin_objects=plugin_objects,
|
plugin_objects=plugin_objects,
|
||||||
device_label='client',
|
device_label='client',
|
||||||
device_vendor="",
|
device_vendor="",
|
||||||
force_import=True # These are online clients, force import
|
force_import=True # These are online clients, force import
|
||||||
)
|
)
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Online Devices'])
|
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Online Devices'])
|
||||||
@@ -154,11 +147,9 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
|||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Users'])
|
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Users'])
|
||||||
|
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] check if Lock file needs to be modified'])
|
mylog('verbose', [f'[{pluginName}] check if Lock file needs to be modified'])
|
||||||
set_lock_file_value(FULL_IMPORT, lock_file_value)
|
set_lock_file_value(FULL_IMPORT, lock_file_value)
|
||||||
|
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Clients overall'])
|
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Clients overall'])
|
||||||
|
|
||||||
return plugin_objects
|
return plugin_objects
|
||||||
@@ -173,19 +164,19 @@ def collect_details(device_type, devices, online_macs, processed_macs, plugin_ob
|
|||||||
name = get_name(get_unifi_val(device, 'name'), get_unifi_val(device, 'hostname'))
|
name = get_name(get_unifi_val(device, 'name'), get_unifi_val(device, 'hostname'))
|
||||||
ipTmp = get_ip(get_unifi_val(device, 'lan_ip'), get_unifi_val(device, 'last_ip'), get_unifi_val(device, 'fixed_ip'), get_unifi_val(device, 'ip'))
|
ipTmp = get_ip(get_unifi_val(device, 'lan_ip'), get_unifi_val(device, 'last_ip'), get_unifi_val(device, 'fixed_ip'), get_unifi_val(device, 'ip'))
|
||||||
macTmp = device['mac']
|
macTmp = device['mac']
|
||||||
|
|
||||||
# continue only if valid MAC address
|
# continue only if valid MAC address
|
||||||
if is_mac(macTmp):
|
if is_mac(macTmp):
|
||||||
status = 1 if macTmp in online_macs else device.get('state', 0)
|
status = 1 if macTmp in online_macs else device.get('state', 0)
|
||||||
deviceType = device_type.get(device.get('type'), '')
|
deviceType = device_type.get(device.get('type'), '')
|
||||||
parentMac = get_parent_mac(get_unifi_val(device, 'uplink_mac'), get_unifi_val(device, 'ap_mac'), get_unifi_val(device, 'sw_mac'))
|
parentMac = get_parent_mac(get_unifi_val(device, 'uplink_mac'), get_unifi_val(device, 'ap_mac'), get_unifi_val(device, 'sw_mac'))
|
||||||
|
|
||||||
# override parent MAC if this is a router
|
# override parent MAC if this is a router
|
||||||
if parentMac == 'null' and is_typical_router_ip(ipTmp):
|
if parentMac == 'null' and is_typical_router_ip(ipTmp):
|
||||||
parentMac = 'Internet'
|
parentMac = 'Internet'
|
||||||
|
|
||||||
# Add object only if not processed
|
# Add object only if not processed
|
||||||
if macTmp not in processed_macs and ( status == 1 or force_import is True ):
|
if macTmp not in processed_macs and (status == 1 or force_import is True):
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId=macTmp,
|
primaryId=macTmp,
|
||||||
secondaryId=ipTmp,
|
secondaryId=ipTmp,
|
||||||
@@ -203,7 +194,8 @@ def collect_details(device_type, devices, online_macs, processed_macs, plugin_ob
|
|||||||
processed_macs.append(macTmp)
|
processed_macs.append(macTmp)
|
||||||
else:
|
else:
|
||||||
mylog('verbose', [f'[{pluginName}] Skipping, not a valid MAC address: {macTmp}'])
|
mylog('verbose', [f'[{pluginName}] Skipping, not a valid MAC address: {macTmp}'])
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
def get_unifi_val(obj, key, default='null'):
|
def get_unifi_val(obj, key, default='null'):
|
||||||
if isinstance(obj, dict):
|
if isinstance(obj, dict):
|
||||||
@@ -212,9 +204,9 @@ def get_unifi_val(obj, key, default='null'):
|
|||||||
for k, v in obj.items():
|
for k, v in obj.items():
|
||||||
if isinstance(v, dict):
|
if isinstance(v, dict):
|
||||||
result = get_unifi_val(v, key, default)
|
result = get_unifi_val(v, key, default)
|
||||||
if result not in ['','None', None, 'null']:
|
if result not in ['', 'None', None, 'null']:
|
||||||
return result
|
return result
|
||||||
|
|
||||||
mylog('trace', [f'[{pluginName}] Value not found for key "{key}" in obj "{json.dumps(obj)}"'])
|
mylog('trace', [f'[{pluginName}] Value not found for key "{key}" in obj "{json.dumps(obj)}"'])
|
||||||
return default
|
return default
|
||||||
|
|
||||||
@@ -226,6 +218,7 @@ def get_name(*names: str) -> str:
|
|||||||
return rmBadChars(name)
|
return rmBadChars(name)
|
||||||
return 'null'
|
return 'null'
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
def get_parent_mac(*macs: str) -> str:
|
def get_parent_mac(*macs: str) -> str:
|
||||||
for mac in macs:
|
for mac in macs:
|
||||||
@@ -233,6 +226,7 @@ def get_parent_mac(*macs: str) -> str:
|
|||||||
return mac
|
return mac
|
||||||
return 'null'
|
return 'null'
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
def get_port(*ports: str) -> str:
|
def get_port(*ports: str) -> str:
|
||||||
for port in ports:
|
for port in ports:
|
||||||
@@ -240,12 +234,6 @@ def get_port(*ports: str) -> str:
|
|||||||
return port
|
return port
|
||||||
return 'null'
|
return 'null'
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
def get_port(*macs: str) -> str:
|
|
||||||
for mac in macs:
|
|
||||||
if mac and mac != 'null':
|
|
||||||
return mac
|
|
||||||
return 'null'
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
def get_ip(*ips: str) -> str:
|
def get_ip(*ips: str) -> str:
|
||||||
@@ -271,7 +259,7 @@ def set_lock_file_value(config_value: str, lock_file_value: bool) -> None:
|
|||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Setting lock value for "full import" to {out}'])
|
mylog('verbose', [f'[{pluginName}] Setting lock value for "full import" to {out}'])
|
||||||
with open(LOCK_FILE, 'w') as lock_file:
|
with open(LOCK_FILE, 'w') as lock_file:
|
||||||
lock_file.write(str(out))
|
lock_file.write(str(out))
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
@@ -286,15 +274,16 @@ def read_lock_file() -> bool:
|
|||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
def check_full_run_state(config_value: str, lock_file_value: bool) -> bool:
|
def check_full_run_state(config_value: str, lock_file_value: bool) -> bool:
|
||||||
if config_value == 'always' or (config_value == 'once' and lock_file_value == False):
|
if config_value == 'always' or (config_value == 'once' and lock_file_value is False):
|
||||||
mylog('verbose', [f'[{pluginName}] Full import needs to be done: config_value: {config_value} and lock_file_value: {lock_file_value}'])
|
mylog('verbose', [f'[{pluginName}] Full import needs to be done: config_value: {config_value} and lock_file_value: {lock_file_value}'])
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
mylog('verbose', [f'[{pluginName}] Full import NOT needed: config_value: {config_value} and lock_file_value: {lock_file_value}'])
|
mylog('verbose', [f'[{pluginName}] Full import NOT needed: config_value: {config_value} and lock_file_value: {lock_file_value}'])
|
||||||
return False
|
return False
|
||||||
|
|
||||||
#===============================================================================
|
|
||||||
|
# ===============================================================================
|
||||||
# BEGIN
|
# BEGIN
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -9,13 +9,13 @@ import sqlite3
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects, handleEmpty
|
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath, applicationPath, fullDbPath
|
from const import logPath, applicationPath, fullDbPath # noqa: E402 [flake8 lint suppression]
|
||||||
from scan.device_handling import query_MAC_vendor
|
from scan.device_handling import query_MAC_vendor # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -25,17 +25,17 @@ Logger(get_setting_value('LOG_LEVEL'))
|
|||||||
|
|
||||||
pluginName = 'VNDRPDT'
|
pluginName = 'VNDRPDT'
|
||||||
|
|
||||||
|
|
||||||
LOG_PATH = logPath + '/plugins'
|
LOG_PATH = logPath + '/plugins'
|
||||||
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
def main():
|
|
||||||
|
|
||||||
mylog('verbose', ['[VNDRPDT] In script'])
|
def main():
|
||||||
|
|
||||||
# Get newest DB
|
mylog('verbose', ['[VNDRPDT] In script'])
|
||||||
update_vendor_database()
|
|
||||||
|
# Get newest DB
|
||||||
|
update_vendor_database()
|
||||||
|
|
||||||
# Resolve missing vendors
|
# Resolve missing vendors
|
||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
@@ -43,67 +43,67 @@ def main():
|
|||||||
plugin_objects = update_vendors(fullDbPath, plugin_objects)
|
plugin_objects = update_vendors(fullDbPath, plugin_objects)
|
||||||
|
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
|
|
||||||
mylog('verbose', ['[VNDRPDT] Update complete'])
|
mylog('verbose', ['[VNDRPDT] Update complete'])
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
#===============================================================================
|
|
||||||
|
# ===============================================================================
|
||||||
# Update device vendors database
|
# Update device vendors database
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
def update_vendor_database():
|
def update_vendor_database():
|
||||||
|
|
||||||
# Update vendors DB (iab oui)
|
# Update vendors DB (iab oui)
|
||||||
mylog('verbose', [' Updating vendors DB (iab & oui)'])
|
mylog('verbose', [' Updating vendors DB (iab & oui)'])
|
||||||
update_args = ['sh', applicationPath + '/services/update_vendors.sh']
|
update_args = ['sh', applicationPath + '/services/update_vendors.sh']
|
||||||
|
|
||||||
# Execute command
|
# Execute command
|
||||||
try:
|
try:
|
||||||
# try runnning a subprocess safely
|
# try runnning a subprocess safely
|
||||||
update_output = subprocess.check_output (update_args)
|
subprocess.check_output(update_args)
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
# An error occured, handle it
|
# An error occured, handle it
|
||||||
mylog('verbose', [' FAILED: Updating vendors DB, set LOG_LEVEL=debug for more info'])
|
mylog('verbose', [' FAILED: Updating vendors DB, set LOG_LEVEL=debug for more info'])
|
||||||
mylog('verbose', [e.output])
|
mylog('verbose', [e.output])
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# resolve missing vendors
|
# resolve missing vendors
|
||||||
def update_vendors (dbPath, plugin_objects):
|
def update_vendors(dbPath, plugin_objects):
|
||||||
|
|
||||||
# Connect to the App SQLite database
|
# Connect to the App SQLite database
|
||||||
conn = sqlite3.connect(dbPath)
|
conn = sqlite3.connect(dbPath)
|
||||||
sql = conn.cursor()
|
sql = conn.cursor()
|
||||||
|
|
||||||
# Initialize variables
|
# Initialize variables
|
||||||
recordsToUpdate = []
|
|
||||||
ignored = 0
|
ignored = 0
|
||||||
notFound = 0
|
notFound = 0
|
||||||
|
|
||||||
|
mylog('verbose', [' Searching devices vendor'])
|
||||||
mylog('verbose', [' Searching devices vendor'])
|
|
||||||
|
|
||||||
# Get devices without a vendor
|
# Get devices without a vendor
|
||||||
sql.execute ("""SELECT
|
sql.execute("""SELECT
|
||||||
devMac,
|
devMac,
|
||||||
devLastIP,
|
devLastIP,
|
||||||
devName,
|
devName,
|
||||||
devVendor
|
devVendor
|
||||||
FROM Devices
|
FROM Devices
|
||||||
WHERE devVendor = '(unknown)'
|
WHERE devVendor = '(unknown)'
|
||||||
OR devVendor = '(Unknown)'
|
OR devVendor = '(Unknown)'
|
||||||
OR devVendor = ''
|
OR devVendor = ''
|
||||||
OR devVendor IS NULL
|
OR devVendor IS NULL
|
||||||
""")
|
""")
|
||||||
devices = sql.fetchall()
|
devices = sql.fetchall()
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
# Close the database connection
|
# Close the database connection
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
# All devices loop
|
# All devices loop
|
||||||
for device in devices:
|
for device in devices:
|
||||||
# Search vendor in HW Vendors DB
|
# Search vendor in HW Vendors DB
|
||||||
vendor = query_MAC_vendor (device[0])
|
vendor = query_MAC_vendor(device[0])
|
||||||
if vendor == -1 :
|
if vendor == -1 :
|
||||||
notFound += 1
|
notFound += 1
|
||||||
elif vendor == -2 :
|
elif vendor == -2 :
|
||||||
@@ -112,27 +112,25 @@ def update_vendors (dbPath, plugin_objects):
|
|||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId = handleEmpty(device[0]), # MAC (Device Name)
|
primaryId = handleEmpty(device[0]), # MAC (Device Name)
|
||||||
secondaryId = handleEmpty(device[1]), # IP Address (always 0.0.0.0)
|
secondaryId = handleEmpty(device[1]), # IP Address (always 0.0.0.0)
|
||||||
watched1 = handleEmpty(vendor),
|
watched1 = handleEmpty(vendor),
|
||||||
watched2 = handleEmpty(device[2]), # Device name
|
watched2 = handleEmpty(device[2]), # Device name
|
||||||
watched3 = "",
|
watched3 = "",
|
||||||
watched4 = "",
|
watched4 = "",
|
||||||
extra = "",
|
extra = "",
|
||||||
foreignKey = handleEmpty(device[0])
|
foreignKey = handleEmpty(device[0])
|
||||||
)
|
)
|
||||||
|
|
||||||
# Print log
|
# Print log
|
||||||
mylog('verbose', [" Devices Ignored : ", ignored])
|
mylog('verbose', [" Devices Ignored : ", ignored])
|
||||||
mylog('verbose', [" Devices with missing vendor : ", len(devices)])
|
mylog('verbose', [" Devices with missing vendor : ", len(devices)])
|
||||||
mylog('verbose', [" Vendors Not Found : ", notFound])
|
mylog('verbose', [" Vendors Not Found : ", notFound])
|
||||||
mylog('verbose', [" Vendors updated : ", len(plugin_objects) ])
|
mylog('verbose', [" Vendors updated : ", len(plugin_objects)])
|
||||||
|
|
||||||
|
|
||||||
return plugin_objects
|
return plugin_objects
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
# BEGIN
|
# BEGIN
|
||||||
#===============================================================================
|
# ===============================================================================
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -9,13 +9,13 @@ from wakeonlan import send_magic_packet
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from database import DB
|
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||||
from models.device_instance import DeviceInstance
|
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -34,9 +34,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
|||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
mylog('none', [f'[{pluginName}] In script'])
|
mylog('none', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
# Retrieve configuration settings
|
# Retrieve configuration settings
|
||||||
broadcast_ips = get_setting_value('WOL_broadcast_ips')
|
broadcast_ips = get_setting_value('WOL_broadcast_ips')
|
||||||
@@ -58,7 +57,7 @@ def main():
|
|||||||
devices_to_wake = device_handler.getOffline()
|
devices_to_wake = device_handler.getOffline()
|
||||||
|
|
||||||
elif 'down' in devices_to_wake:
|
elif 'down' in devices_to_wake:
|
||||||
|
|
||||||
devices_to_wake = device_handler.getDown()
|
devices_to_wake = device_handler.getDown()
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -89,15 +88,16 @@ def main():
|
|||||||
# log result
|
# log result
|
||||||
plugin_objects.write_result_file()
|
plugin_objects.write_result_file()
|
||||||
else:
|
else:
|
||||||
mylog('none', [f'[{pluginName}] No devices to wake'])
|
mylog('none', [f'[{pluginName}] No devices to wake'])
|
||||||
|
|
||||||
mylog('none', [f'[{pluginName}] Script finished'])
|
mylog('none', [f'[{pluginName}] Script finished'])
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
# wake
|
# wake
|
||||||
def execute(port, ip, mac, name):
|
def execute(port, ip, mac, name):
|
||||||
|
|
||||||
result = 'null'
|
result = 'null'
|
||||||
try:
|
try:
|
||||||
# Send the magic packet to wake up the device
|
# Send the magic packet to wake up the device
|
||||||
@@ -105,7 +105,7 @@ def execute(port, ip, mac, name):
|
|||||||
mylog('verbose', [f'[{pluginName}] Magic packet sent to {mac} ({name})'])
|
mylog('verbose', [f'[{pluginName}] Magic packet sent to {mac} ({name})'])
|
||||||
|
|
||||||
result = 'success'
|
result = 'success'
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
result = str(e)
|
result = str(e)
|
||||||
mylog('verbose', [f'[{pluginName}] Failed to send magic packet to {mac} ({name}): {e}'])
|
mylog('verbose', [f'[{pluginName}] Failed to send magic packet to {mac} ({name}): {e}'])
|
||||||
@@ -113,5 +113,6 @@ def execute(port, ip, mac, name):
|
|||||||
# Return the data result
|
# Return the data result
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -12,12 +12,12 @@ from urllib3.exceptions import InsecureRequestWarning
|
|||||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from plugin_helper import Plugin_Objects
|
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||||
from const import logPath
|
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
import conf
|
import conf # noqa: E402 [flake8 lint suppression]
|
||||||
from pytz import timezone
|
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||||
from logger import mylog, Logger
|
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# Make sure the TIMEZONE for logging is correct
|
# Make sure the TIMEZONE for logging is correct
|
||||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||||
@@ -30,15 +30,14 @@ pluginName = 'WEBMON'
|
|||||||
LOG_PATH = logPath + '/plugins'
|
LOG_PATH = logPath + '/plugins'
|
||||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||||
|
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] In script'])
|
mylog('verbose', [f'[{pluginName}] In script'])
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
values = get_setting_value('WEBMON_urls_to_check')
|
values = get_setting_value('WEBMON_urls_to_check')
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Checking URLs: {values}'])
|
mylog('verbose', [f'[{pluginName}] Checking URLs: {values}'])
|
||||||
|
|
||||||
|
|
||||||
if len(values) > 0:
|
if len(values) > 0:
|
||||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||||
@@ -48,12 +47,13 @@ def main():
|
|||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
def check_services_health(site):
|
def check_services_health(site):
|
||||||
|
|
||||||
mylog('verbose', [f'[{pluginName}] Checking {site}'])
|
mylog('verbose', [f'[{pluginName}] Checking {site}'])
|
||||||
|
|
||||||
urllib3.disable_warnings(InsecureRequestWarning)
|
urllib3.disable_warnings(InsecureRequestWarning)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resp = requests.get(site, verify=False, timeout=get_setting_value('WEBMON_RUN_TIMEOUT'), headers={"User-Agent": "NetAlertX"})
|
resp = requests.get(site, verify=False, timeout=get_setting_value('WEBMON_RUN_TIMEOUT'), headers={"User-Agent": "NetAlertX"})
|
||||||
latency = resp.elapsed.total_seconds()
|
latency = resp.elapsed.total_seconds()
|
||||||
@@ -79,12 +79,13 @@ def check_services_health(site):
|
|||||||
|
|
||||||
return status, latency
|
return status, latency
|
||||||
|
|
||||||
|
|
||||||
def service_monitoring(urls, plugin_objects):
|
def service_monitoring(urls, plugin_objects):
|
||||||
for site in urls:
|
for site in urls:
|
||||||
status, latency = check_services_health(site)
|
status, latency = check_services_health(site)
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
primaryId=site,
|
primaryId=site,
|
||||||
secondaryId='null',
|
secondaryId='null',
|
||||||
watched1=status,
|
watched1=status,
|
||||||
watched2=latency,
|
watched2=latency,
|
||||||
watched3='null',
|
watched3='null',
|
||||||
@@ -94,7 +95,6 @@ def service_monitoring(urls, plugin_objects):
|
|||||||
)
|
)
|
||||||
return plugin_objects
|
return plugin_objects
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -44,3 +44,4 @@ More Info:
|
|||||||
|
|
||||||
Report Date: 2021-12-08 12:30
|
Report Date: 2021-12-08 12:30
|
||||||
Server: Synology-NAS
|
Server: Synology-NAS
|
||||||
|
Link: netalertx.com
|
||||||
|
|||||||
@@ -1,12 +1,3 @@
|
|||||||
<!--
|
|
||||||
#---------------------------------------------------------------------------------#
|
|
||||||
# NetAlertX #
|
|
||||||
# Open Source Network Guard / WIFI & LAN intrusion detector #
|
|
||||||
# #
|
|
||||||
# report_template.html - Back module. Template to email reporting in HTML format #
|
|
||||||
#---------------------------------------------------------------------------------#
|
|
||||||
-->
|
|
||||||
|
|
||||||
<html>
|
<html>
|
||||||
<head></head>
|
<head></head>
|
||||||
<body>
|
<body>
|
||||||
@@ -20,11 +11,11 @@
|
|||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td height=200 valign=top style="padding: 10px">
|
<td height=200 valign=top style="padding: 10px">
|
||||||
<NEW_DEVICES_TABLE>
|
NEW_DEVICES_TABLE
|
||||||
<DOWN_DEVICES_TABLE>
|
DOWN_DEVICES_TABLE
|
||||||
<DOWN_RECONNECTED_TABLE>
|
DOWN_RECONNECTED_TABLE
|
||||||
<EVENTS_TABLE>
|
EVENTS_TABLE
|
||||||
<PLUGINS_TABLE>
|
PLUGINS_TABLE
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
@@ -34,11 +25,11 @@
|
|||||||
<table width=100% bgcolor=#3c8dbc cellpadding=5px cellspacing=0 style="font-size: 10px; border-bottom-left-radius: 5px; border-bottom-right-radius: 5px;">
|
<table width=100% bgcolor=#3c8dbc cellpadding=5px cellspacing=0 style="font-size: 10px; border-bottom-left-radius: 5px; border-bottom-right-radius: 5px;">
|
||||||
<tr>
|
<tr>
|
||||||
<td width=50% style="text-align:center;color: white;" bgcolor="#3c8dbc">
|
<td width=50% style="text-align:center;color: white;" bgcolor="#3c8dbc">
|
||||||
<NEW_VERSION>
|
NEW_VERSION
|
||||||
| Sent: <REPORT_DATE>
|
| Sent: REPORT_DATE
|
||||||
| Server: <SERVER_NAME>
|
| Server: <a href="REPORT_DASHBOARD_URL" target="_blank" style="color:#ffffff;">SERVER_NAME</a>
|
||||||
| Built: <BUILD_DATE>
|
| Built: BUILD_DATE
|
||||||
| Version: <BUILD_VERSION>
|
| Version: BUILD_VERSION
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
<NEW_DEVICES_TABLE>
|
NEW_DEVICES_TABLE
|
||||||
<DOWN_DEVICES_TABLE>
|
DOWN_DEVICES_TABLE
|
||||||
<DOWN_RECONNECTED_TABLE>
|
DOWN_RECONNECTED_TABLE
|
||||||
<EVENTS_TABLE>
|
EVENTS_TABLE
|
||||||
<PLUGINS_TABLE>
|
PLUGINS_TABLE
|
||||||
|
|
||||||
Report Date: <REPORT_DATE>
|
Report Date: REPORT_DATE
|
||||||
Server: <SERVER_NAME>
|
Server: SERVER_NAME
|
||||||
<NEW_VERSION>
|
Link: REPORT_DASHBOARD_URL
|
||||||
|
NEW_VERSION
|
||||||
@@ -1,7 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
echo "Initializing php-fpm..."
|
echo "Initializing php-fpm..."
|
||||||
# Set up PHP-FPM directories and socket configuration
|
# Set up PHP-FPM directories and socket configuration
|
||||||
install -d -o netalertx -g netalertx /services/config/run
|
|
||||||
|
|
||||||
|
|
||||||
echo "php-fpm initialized."
|
echo "php-fpm initialized."
|
||||||
|
|||||||
@@ -66,6 +66,7 @@ CREATE TABLE Devices (
|
|||||||
devIsArchived BOOLEAN NOT NULL DEFAULT (0) CHECK (devIsArchived IN (0, 1)),
|
devIsArchived BOOLEAN NOT NULL DEFAULT (0) CHECK (devIsArchived IN (0, 1)),
|
||||||
devParentMAC TEXT,
|
devParentMAC TEXT,
|
||||||
devParentPort INTEGER,
|
devParentPort INTEGER,
|
||||||
|
devParentRelType TEXT,
|
||||||
devIcon TEXT,
|
devIcon TEXT,
|
||||||
devGUID TEXT,
|
devGUID TEXT,
|
||||||
devSite TEXT,
|
devSite TEXT,
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
/tmp/nginx/active-config
|
|
||||||
@@ -5,8 +5,6 @@ set -euo pipefail
|
|||||||
LOG_DIR=${NETALERTX_LOG}
|
LOG_DIR=${NETALERTX_LOG}
|
||||||
RUN_DIR=${SYSTEM_SERVICES_RUN}
|
RUN_DIR=${SYSTEM_SERVICES_RUN}
|
||||||
TMP_DIR=/tmp/nginx
|
TMP_DIR=/tmp/nginx
|
||||||
SYSTEM_NGINX_CONFIG_TEMPLATE="/services/config/nginx/netalertx.conf.template"
|
|
||||||
SYSTEM_NGINX_CONFIG_FILE="/services/config/nginx/conf.active/netalertx.conf"
|
|
||||||
|
|
||||||
# Create directories if they don't exist
|
# Create directories if they don't exist
|
||||||
mkdir -p "${LOG_DIR}" "${RUN_DIR}" "${TMP_DIR}"
|
mkdir -p "${LOG_DIR}" "${RUN_DIR}" "${TMP_DIR}"
|
||||||
@@ -33,9 +31,9 @@ done
|
|||||||
|
|
||||||
TEMP_CONFIG_FILE=$(mktemp "${TMP_DIR}/netalertx.conf.XXXXXX")
|
TEMP_CONFIG_FILE=$(mktemp "${TMP_DIR}/netalertx.conf.XXXXXX")
|
||||||
if envsubst '${LISTEN_ADDR} ${PORT}' < "${SYSTEM_NGINX_CONFIG_TEMPLATE}" > "${TEMP_CONFIG_FILE}" 2>/dev/null; then
|
if envsubst '${LISTEN_ADDR} ${PORT}' < "${SYSTEM_NGINX_CONFIG_TEMPLATE}" > "${TEMP_CONFIG_FILE}" 2>/dev/null; then
|
||||||
mv "${TEMP_CONFIG_FILE}" "${SYSTEM_NGINX_CONFIG_FILE}"
|
mv "${TEMP_CONFIG_FILE}" "${SYSTEM_SERVICES_ACTIVE_CONFIG_FILE}"
|
||||||
else
|
else
|
||||||
echo "Note: Unable to write to ${SYSTEM_NGINX_CONFIG_FILE}. Using default configuration."
|
echo "Note: Unable to write to ${SYSTEM_SERVICES_ACTIVE_CONFIG_FILE}. Using default configuration."
|
||||||
rm -f "${TEMP_CONFIG_FILE}"
|
rm -f "${TEMP_CONFIG_FILE}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -49,10 +47,10 @@ chmod -R 777 "/tmp/nginx" 2>/dev/null || true
|
|||||||
|
|
||||||
# Execute nginx with overrides
|
# Execute nginx with overrides
|
||||||
# echo the full nginx command then run it
|
# echo the full nginx command then run it
|
||||||
echo "Starting /usr/sbin/nginx -p \"${RUN_DIR}/\" -c \"${SYSTEM_NGINX_CONFIG_FILE}\" -g \"error_log /dev/stderr; error_log ${NETALERTX_LOG}/nginx-error.log; daemon off;\" &"
|
echo "Starting /usr/sbin/nginx -p \"${RUN_DIR}/\" -c \"${SYSTEM_SERVICES_ACTIVE_CONFIG_FILE}\" -g \"error_log /dev/stderr; error_log ${NETALERTX_LOG}/nginx-error.log; daemon off;\" &"
|
||||||
/usr/sbin/nginx \
|
/usr/sbin/nginx \
|
||||||
-p "${RUN_DIR}/" \
|
-p "${RUN_DIR}/" \
|
||||||
-c "${SYSTEM_NGINX_CONFIG_FILE}" \
|
-c "${SYSTEM_SERVICES_ACTIVE_CONFIG_FILE}" \
|
||||||
-g "error_log /dev/stderr; error_log ${NETALERTX_LOG}/nginx-error.log; daemon off;" &
|
-g "error_log /dev/stderr; error_log ${NETALERTX_LOG}/nginx-error.log; daemon off;" &
|
||||||
nginx_pid=$!
|
nginx_pid=$!
|
||||||
|
|
||||||
|
|||||||
@@ -8,4 +8,11 @@ markers = [
|
|||||||
"docker: requires docker socket and elevated container permissions",
|
"docker: requires docker socket and elevated container permissions",
|
||||||
"compose: Tests docker compose files. Slow.",
|
"compose: Tests docker compose files. Slow.",
|
||||||
"feature_complete: extended coverage suite not run by default",
|
"feature_complete: extended coverage suite not run by default",
|
||||||
]
|
]
|
||||||
|
[tool.ruff]
|
||||||
|
line-length = 180
|
||||||
|
ignore = ["E203", "C901"] # global ignores
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
select = ["E", "F"]
|
||||||
|
extend-select = ["E402"]
|
||||||
87
run_docker_tests.sh
Executable file
87
run_docker_tests.sh
Executable file
@@ -0,0 +1,87 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# run_docker_tests.sh
|
||||||
|
#
|
||||||
|
# This script automates the entire process of testing the application
|
||||||
|
# within its intended, privileged devcontainer environment. It is
|
||||||
|
# idempotent and can be run repeatedly.
|
||||||
|
#
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# --- 1. Regenerate Devcontainer Dockerfile ---
|
||||||
|
echo "--- Regenerating .devcontainer/Dockerfile from source ---"
|
||||||
|
if [ -f ".devcontainer/scripts/generate-configs.sh" ]; then
|
||||||
|
/bin/bash .devcontainer/scripts/generate-configs.sh
|
||||||
|
else
|
||||||
|
echo "ERROR: generate-configs.sh not found. Aborting."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# --- 2. Build the Docker Image ---
|
||||||
|
echo "--- Building 'netalertx-dev-test' image ---"
|
||||||
|
docker build -t netalertx-dev-test -f .devcontainer/Dockerfile . --target netalertx-devcontainer
|
||||||
|
|
||||||
|
# --- 3. Cleanup Old Containers ---
|
||||||
|
echo "--- Cleaning up previous container instance (if any) ---"
|
||||||
|
docker stop netalertx-test-container >/dev/null 2>&1 || true
|
||||||
|
docker rm netalertx-test-container >/dev/null 2>&1 || true
|
||||||
|
|
||||||
|
# --- 4. Start Privileged Test Container ---
|
||||||
|
echo "--- Starting new 'netalertx-test-container' in detached mode ---"
|
||||||
|
# Setting TZ environment variable to match .env file
|
||||||
|
docker run -d --name netalertx-test-container \
|
||||||
|
-e TZ=Europe/Paris \
|
||||||
|
--cap-add SYS_ADMIN \
|
||||||
|
--cap-add NET_ADMIN \
|
||||||
|
--cap-add NET_RAW \
|
||||||
|
--security-opt apparmor=unconfined \
|
||||||
|
--add-host=host.docker.internal:host-gateway \
|
||||||
|
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||||
|
-v "$(pwd)":/workspaces/NetAlertX \
|
||||||
|
netalertx-dev-test
|
||||||
|
|
||||||
|
# --- 5. Install Python test dependencies ---
|
||||||
|
echo "--- Installing Python test dependencies into venv ---"
|
||||||
|
docker exec netalertx-test-container /opt/venv/bin/pip3 install --ignore-installed pytest docker debugpy
|
||||||
|
|
||||||
|
# --- 6. Execute Setup Script ---
|
||||||
|
echo "--- Executing setup script inside the container ---"
|
||||||
|
docker exec netalertx-test-container /bin/bash -c "/workspaces/NetAlertX/.devcontainer/scripts/setup.sh"
|
||||||
|
|
||||||
|
# --- 7. Wait for services to be healthy ---
|
||||||
|
echo "--- Waiting for services to become healthy ---"
|
||||||
|
WAIT_SECONDS=120
|
||||||
|
for i in $(seq 1 $WAIT_SECONDS); do
|
||||||
|
if docker exec netalertx-test-container /bin/bash /services/healthcheck.sh; then
|
||||||
|
echo "--- Services are healthy! ---"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
if [ $i -eq $WAIT_SECONDS ]; then
|
||||||
|
echo "--- Timeout: Services did not become healthy after $WAIT_SECONDS seconds. ---"
|
||||||
|
docker logs netalertx-test-container
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo " ... waiting ($i/$WAIT_SECONDS)"
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
|
||||||
|
# --- 8. Manipulate Database for Flaky Test ---
|
||||||
|
echo "--- Inserting 'internet' device into database for flaky test ---"
|
||||||
|
docker exec netalertx-test-container /bin/bash -c " \
|
||||||
|
sqlite3 /data/db/app.db \"INSERT OR IGNORE INTO Devices (devMac, devFirstConnection, devLastConnection, devLastIP, devName) VALUES ('internet', DATETIME('now'), DATETIME('now'), '0.0.0.0', 'Internet Gateway');\" \
|
||||||
|
"
|
||||||
|
|
||||||
|
# --- 9. Execute Tests ---
|
||||||
|
echo "--- Executing tests inside the container ---"
|
||||||
|
docker exec netalertx-test-container /bin/bash -c " \
|
||||||
|
cd /workspaces/NetAlertX && /opt/venv/bin/pytest -m 'not (docker or compose or feature_complete)' --cache-clear -o cache_dir=/tmp/.pytest_cache; \
|
||||||
|
"
|
||||||
|
|
||||||
|
# --- 10. Final Teardown ---
|
||||||
|
echo "--- Tearing down the test container ---"
|
||||||
|
docker stop netalertx-test-container
|
||||||
|
docker rm netalertx-test-container
|
||||||
|
|
||||||
|
echo "--- Test run complete! ---"
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
"""
|
"""
|
||||||
@@ -19,11 +18,12 @@ import subprocess
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
def check_new_devices():
|
def check_new_devices():
|
||||||
# Get API path from environment variable, fallback to /tmp/api
|
# Get API path from environment variable, fallback to /tmp/api
|
||||||
api_path = os.environ.get('NETALERTX_API', '/tmp/api')
|
api_path = os.environ.get('NETALERTX_API', '/tmp/api')
|
||||||
table_devices_path = f'{api_path}/table_devices.json'
|
table_devices_path = f'{api_path}/table_devices.json'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Rufe die JSON-Datei aus dem Docker-Container ab
|
# Rufe die JSON-Datei aus dem Docker-Container ab
|
||||||
result = subprocess.run(
|
result = subprocess.run(
|
||||||
@@ -73,6 +73,6 @@ def check_new_devices():
|
|||||||
)
|
)
|
||||||
print(f"1 NetAlertX_New_Devices - WARNING - Found {len(new_devices)} new device(s): {device_list_str}")
|
print(f"1 NetAlertX_New_Devices - WARNING - Found {len(new_devices)} new device(s): {device_list_str}")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
check_new_devices()
|
check_new_devices()
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
def run_sqlite_command(command):
|
def run_sqlite_command(command):
|
||||||
# Use environment variable with fallback
|
# Use environment variable with fallback
|
||||||
db_path = os.path.join(
|
db_path = os.path.join(
|
||||||
@@ -19,18 +19,19 @@ def run_sqlite_command(command):
|
|||||||
print(f"Error executing command: {e}")
|
print(f"Error executing command: {e}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def check_and_clean_device():
|
def check_and_clean_device():
|
||||||
while True:
|
while True:
|
||||||
print("\nDevice Cleanup Tool")
|
print("\nDevice Cleanup Tool")
|
||||||
print("1. Check/Clean by MAC address")
|
print("1. Check/Clean by MAC address")
|
||||||
print("2. Check/Clean by IP address")
|
print("2. Check/Clean by IP address")
|
||||||
print("3. Exit")
|
print("3. Exit")
|
||||||
|
|
||||||
choice = input("\nSelect option (1-3): ")
|
choice = input("\nSelect option (1-3): ")
|
||||||
|
|
||||||
if choice == "1":
|
if choice == "1":
|
||||||
mac = input("Enter MAC address (format: xx:xx:xx:xx:xx:xx): ").lower()
|
mac = input("Enter MAC address (format: xx:xx:xx:xx:xx:xx): ").lower()
|
||||||
|
|
||||||
# Check all tables for MAC
|
# Check all tables for MAC
|
||||||
tables_checks = [
|
tables_checks = [
|
||||||
f"SELECT 'Events' as source, * FROM Events WHERE eve_MAC='{mac}'",
|
f"SELECT 'Events' as source, * FROM Events WHERE eve_MAC='{mac}'",
|
||||||
@@ -40,14 +41,14 @@ def check_and_clean_device():
|
|||||||
f"SELECT 'AppEvents' as source, * FROM AppEvents WHERE ObjectPrimaryID LIKE '%{mac}%' OR ObjectSecondaryID LIKE '%{mac}%'",
|
f"SELECT 'AppEvents' as source, * FROM AppEvents WHERE ObjectPrimaryID LIKE '%{mac}%' OR ObjectSecondaryID LIKE '%{mac}%'",
|
||||||
f"SELECT 'Plugins_Objects' as source, * FROM Plugins_Objects WHERE Object_PrimaryID LIKE '%{mac}%'"
|
f"SELECT 'Plugins_Objects' as source, * FROM Plugins_Objects WHERE Object_PrimaryID LIKE '%{mac}%'"
|
||||||
]
|
]
|
||||||
|
|
||||||
found = False
|
found = False
|
||||||
for check in tables_checks:
|
for check in tables_checks:
|
||||||
result = run_sqlite_command(check)
|
result = run_sqlite_command(check)
|
||||||
if result and result.strip():
|
if result and result.strip():
|
||||||
found = True
|
found = True
|
||||||
print(f"\nFound entries:\n{result}")
|
print(f"\nFound entries:\n{result}")
|
||||||
|
|
||||||
if found:
|
if found:
|
||||||
confirm = input("\nWould you like to clean these entries? (y/n): ")
|
confirm = input("\nWould you like to clean these entries? (y/n): ")
|
||||||
if confirm.lower() == 'y':
|
if confirm.lower() == 'y':
|
||||||
@@ -60,16 +61,16 @@ def check_and_clean_device():
|
|||||||
f"DELETE FROM AppEvents WHERE ObjectPrimaryID LIKE '%{mac}%' OR ObjectSecondaryID LIKE '%{mac}%'",
|
f"DELETE FROM AppEvents WHERE ObjectPrimaryID LIKE '%{mac}%' OR ObjectSecondaryID LIKE '%{mac}%'",
|
||||||
f"DELETE FROM Plugins_Objects WHERE Object_PrimaryID LIKE '%{mac}%'"
|
f"DELETE FROM Plugins_Objects WHERE Object_PrimaryID LIKE '%{mac}%'"
|
||||||
]
|
]
|
||||||
|
|
||||||
for delete in deletes:
|
for delete in deletes:
|
||||||
run_sqlite_command(delete)
|
run_sqlite_command(delete)
|
||||||
print("Cleanup completed!")
|
print("Cleanup completed!")
|
||||||
else:
|
else:
|
||||||
print("\nNo entries found for this MAC address")
|
print("\nNo entries found for this MAC address")
|
||||||
|
|
||||||
elif choice == "2":
|
elif choice == "2":
|
||||||
ip = input("Enter IP address (format: xxx.xxx.xxx.xxx): ")
|
ip = input("Enter IP address (format: xxx.xxx.xxx.xxx): ")
|
||||||
|
|
||||||
# Check all tables for IP
|
# Check all tables for IP
|
||||||
tables_checks = [
|
tables_checks = [
|
||||||
f"SELECT 'Events' as source, * FROM Events WHERE eve_IP='{ip}'",
|
f"SELECT 'Events' as source, * FROM Events WHERE eve_IP='{ip}'",
|
||||||
@@ -79,14 +80,14 @@ def check_and_clean_device():
|
|||||||
f"SELECT 'AppEvents' as source, * FROM AppEvents WHERE ObjectSecondaryID LIKE '%{ip}%'",
|
f"SELECT 'AppEvents' as source, * FROM AppEvents WHERE ObjectSecondaryID LIKE '%{ip}%'",
|
||||||
f"SELECT 'Plugins_Objects' as source, * FROM Plugins_Objects WHERE Object_SecondaryID LIKE '%{ip}%'"
|
f"SELECT 'Plugins_Objects' as source, * FROM Plugins_Objects WHERE Object_SecondaryID LIKE '%{ip}%'"
|
||||||
]
|
]
|
||||||
|
|
||||||
found = False
|
found = False
|
||||||
for check in tables_checks:
|
for check in tables_checks:
|
||||||
result = run_sqlite_command(check)
|
result = run_sqlite_command(check)
|
||||||
if result and result.strip():
|
if result and result.strip():
|
||||||
found = True
|
found = True
|
||||||
print(f"\nFound entries:\n{result}")
|
print(f"\nFound entries:\n{result}")
|
||||||
|
|
||||||
if found:
|
if found:
|
||||||
confirm = input("\nWould you like to clean these entries? (y/n): ")
|
confirm = input("\nWould you like to clean these entries? (y/n): ")
|
||||||
if confirm.lower() == 'y':
|
if confirm.lower() == 'y':
|
||||||
@@ -99,19 +100,20 @@ def check_and_clean_device():
|
|||||||
f"DELETE FROM AppEvents WHERE ObjectSecondaryID LIKE '%{ip}%'",
|
f"DELETE FROM AppEvents WHERE ObjectSecondaryID LIKE '%{ip}%'",
|
||||||
f"DELETE FROM Plugins_Objects WHERE Object_SecondaryID LIKE '%{ip}%'"
|
f"DELETE FROM Plugins_Objects WHERE Object_SecondaryID LIKE '%{ip}%'"
|
||||||
]
|
]
|
||||||
|
|
||||||
for delete in deletes:
|
for delete in deletes:
|
||||||
run_sqlite_command(delete)
|
run_sqlite_command(delete)
|
||||||
print("Cleanup completed!")
|
print("Cleanup completed!")
|
||||||
else:
|
else:
|
||||||
print("\nNo entries found for this IP address")
|
print("\nNo entries found for this IP address")
|
||||||
|
|
||||||
elif choice == "3":
|
elif choice == "3":
|
||||||
print("\nExiting...")
|
print("\nExiting...")
|
||||||
break
|
break
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print("\nInvalid option, please try again")
|
print("\nInvalid option, please try again")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
check_and_clean_device()
|
check_and_clean_device()
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import paramiko
|
import paramiko
|
||||||
import re
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
@@ -8,6 +7,9 @@ from pathlib import Path
|
|||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
logger = None
|
||||||
|
|
||||||
|
|
||||||
def setup_logging(debug=False):
|
def setup_logging(debug=False):
|
||||||
"""Configure logging based on debug flag."""
|
"""Configure logging based on debug flag."""
|
||||||
level = logging.DEBUG if debug else logging.INFO
|
level = logging.DEBUG if debug else logging.INFO
|
||||||
@@ -18,6 +20,7 @@ def setup_logging(debug=False):
|
|||||||
)
|
)
|
||||||
return logging.getLogger(__name__)
|
return logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def parse_timestamp(date_str):
|
def parse_timestamp(date_str):
|
||||||
"""Convert OPNsense timestamp to Unix epoch time."""
|
"""Convert OPNsense timestamp to Unix epoch time."""
|
||||||
try:
|
try:
|
||||||
@@ -27,7 +30,8 @@ def parse_timestamp(date_str):
|
|||||||
dt = datetime.strptime(clean_date, '%Y/%m/%d %H:%M:%S')
|
dt = datetime.strptime(clean_date, '%Y/%m/%d %H:%M:%S')
|
||||||
return int(dt.timestamp())
|
return int(dt.timestamp())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to parse timestamp: {date_str}")
|
if logger:
|
||||||
|
logger.error(f"Failed to parse timestamp: {date_str} ({e})")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@@ -39,8 +43,14 @@ def get_lease_file(hostname, username, password=None, key_filename=None, port=22
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
logger.debug(f"Attempting to connect to {hostname}:{port} as {username}")
|
logger.debug(f"Attempting to connect to {hostname}:{port} as {username}")
|
||||||
ssh.connect(hostname, port=port, username=username,
|
|
||||||
password=password, key_filename=key_filename)
|
ssh.connect(
|
||||||
|
hostname,
|
||||||
|
port=port,
|
||||||
|
username=username,
|
||||||
|
password=password,
|
||||||
|
key_filename=key_filename
|
||||||
|
)
|
||||||
|
|
||||||
# Get an interactive shell session
|
# Get an interactive shell session
|
||||||
logger.debug("Opening interactive SSH channel")
|
logger.debug("Opening interactive SSH channel")
|
||||||
@@ -74,11 +84,23 @@ def get_lease_file(hostname, username, password=None, key_filename=None, port=22
|
|||||||
|
|
||||||
# Clean up the output by removing the command echo and shell prompts
|
# Clean up the output by removing the command echo and shell prompts
|
||||||
lines = output.split('\n')
|
lines = output.split('\n')
|
||||||
|
|
||||||
# Remove first line (command echo) and any lines containing shell prompts
|
# Remove first line (command echo) and any lines containing shell prompts
|
||||||
cleaned_lines = [line for line in lines
|
cmd = command.strip()
|
||||||
if not line.strip().startswith(command.strip())
|
|
||||||
and not line.strip().endswith('> ')
|
cleaned_lines = []
|
||||||
and not line.strip().endswith('# ')]
|
for line in lines:
|
||||||
|
stripped = line.strip()
|
||||||
|
|
||||||
|
if stripped.startswith(cmd):
|
||||||
|
continue
|
||||||
|
if stripped.endswith('> '):
|
||||||
|
continue
|
||||||
|
if stripped.endswith('# '):
|
||||||
|
continue
|
||||||
|
|
||||||
|
cleaned_lines.append(line)
|
||||||
|
|
||||||
cleaned_output = '\n'.join(cleaned_lines)
|
cleaned_output = '\n'.join(cleaned_lines)
|
||||||
|
|
||||||
logger.debug(f"Final cleaned output length: {len(cleaned_output)} characters")
|
logger.debug(f"Final cleaned output length: {len(cleaned_output)} characters")
|
||||||
@@ -156,9 +178,7 @@ def parse_lease_file(lease_content):
|
|||||||
|
|
||||||
# Filter only active leases
|
# Filter only active leases
|
||||||
active_leases = [lease for lease in leases
|
active_leases = [lease for lease in leases
|
||||||
if lease.get('state') == 'active'
|
if lease.get('state') == 'active' and 'mac' in lease and 'ip' in lease]
|
||||||
and 'mac' in lease
|
|
||||||
and 'ip' in lease]
|
|
||||||
|
|
||||||
logger.debug(f"Found {len(active_leases)} active leases out of {len(leases)} total leases")
|
logger.debug(f"Found {len(active_leases)} active leases out of {len(leases)} total leases")
|
||||||
logger.debug("Active leases:")
|
logger.debug("Active leases:")
|
||||||
@@ -206,6 +226,7 @@ def convert_to_dnsmasq(leases):
|
|||||||
|
|
||||||
return dnsmasq_lines
|
return dnsmasq_lines
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser(description='Convert OPNsense DHCP leases to dnsmasq format')
|
parser = argparse.ArgumentParser(description='Convert OPNsense DHCP leases to dnsmasq format')
|
||||||
parser.add_argument('--host', required=True, help='OPNsense hostname or IP')
|
parser.add_argument('--host', required=True, help='OPNsense hostname or IP')
|
||||||
@@ -219,6 +240,7 @@ def main():
|
|||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
# Setup logging
|
# Setup logging
|
||||||
|
global logger
|
||||||
logger = setup_logging(args.debug)
|
logger = setup_logging(args.debug)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -255,5 +277,6 @@ def main():
|
|||||||
logger.error(f"Error: {str(e)}")
|
logger.error(f"Error: {str(e)}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -22,9 +22,9 @@ from pathlib import Path
|
|||||||
|
|
||||||
# Register NetAlertX modules
|
# Register NetAlertX modules
|
||||||
import conf
|
import conf
|
||||||
from const import *
|
from const import fullConfPath, sql_new_devices
|
||||||
from logger import mylog
|
from logger import mylog
|
||||||
from helper import filePermissions
|
from helper import filePermissions
|
||||||
from utils.datetime_utils import timeNowTZ
|
from utils.datetime_utils import timeNowTZ
|
||||||
from app_state import updateState
|
from app_state import updateState
|
||||||
from api import update_api
|
from api import update_api
|
||||||
@@ -48,12 +48,12 @@ main structure of NetAlertX
|
|||||||
Initialise All
|
Initialise All
|
||||||
Rename old settings
|
Rename old settings
|
||||||
start Loop forever
|
start Loop forever
|
||||||
initialise loop
|
initialise loop
|
||||||
(re)import config
|
(re)import config
|
||||||
(re)import plugin config
|
(re)import plugin config
|
||||||
run plugins (once)
|
run plugins (once)
|
||||||
run frontend events
|
run frontend events
|
||||||
update API
|
update API
|
||||||
run plugins (scheduled)
|
run plugins (scheduled)
|
||||||
processing scan results
|
processing scan results
|
||||||
run plugins (after Scan)
|
run plugins (after Scan)
|
||||||
@@ -111,7 +111,7 @@ def main():
|
|||||||
loop_start_time = conf.loop_start_time # TODO fix
|
loop_start_time = conf.loop_start_time # TODO fix
|
||||||
|
|
||||||
# Handle plugins executed ONCE
|
# Handle plugins executed ONCE
|
||||||
if conf.plugins_once_run == False:
|
if conf.plugins_once_run is False:
|
||||||
pm.run_plugin_scripts("once")
|
pm.run_plugin_scripts("once")
|
||||||
conf.plugins_once_run = True
|
conf.plugins_once_run = True
|
||||||
|
|
||||||
@@ -146,7 +146,7 @@ def main():
|
|||||||
processScan = updateState("Check scan").processScan
|
processScan = updateState("Check scan").processScan
|
||||||
mylog("debug", [f"[MAIN] processScan: {processScan}"])
|
mylog("debug", [f"[MAIN] processScan: {processScan}"])
|
||||||
|
|
||||||
if processScan == True:
|
if processScan is True:
|
||||||
mylog("debug", "[MAIN] start processing scan results")
|
mylog("debug", "[MAIN] start processing scan results")
|
||||||
process_scan(db)
|
process_scan(db)
|
||||||
updateState("Scan processed", None, None, None, None, False)
|
updateState("Scan processed", None, None, None, None, False)
|
||||||
@@ -154,26 +154,24 @@ def main():
|
|||||||
# Name resolution
|
# Name resolution
|
||||||
# --------------------------------------------
|
# --------------------------------------------
|
||||||
|
|
||||||
# run plugins before notification processing (e.g. Plugins to discover device names)
|
# Check if new devices found (created by process_scan)
|
||||||
pm.run_plugin_scripts("before_name_updates")
|
|
||||||
|
|
||||||
# Resolve devices names
|
|
||||||
mylog("debug", "[Main] Resolve devices names")
|
|
||||||
update_devices_names(pm)
|
|
||||||
|
|
||||||
# --------
|
|
||||||
# Reporting
|
|
||||||
|
|
||||||
# Check if new devices found
|
|
||||||
sql.execute(sql_new_devices)
|
sql.execute(sql_new_devices)
|
||||||
newDevices = sql.fetchall()
|
newDevices = sql.fetchall()
|
||||||
db.commitDB()
|
db.commitDB()
|
||||||
|
|
||||||
# new devices were found
|
# If new devices were found, run all plugins registered to be run when new devices are found
|
||||||
|
# Run these before name resolution so plugins like NSLOOKUP that are configured
|
||||||
|
# for `on_new_device` can populate names used in the notifications below.
|
||||||
if len(newDevices) > 0:
|
if len(newDevices) > 0:
|
||||||
# run all plugins registered to be run when new devices are found
|
|
||||||
pm.run_plugin_scripts("on_new_device")
|
pm.run_plugin_scripts("on_new_device")
|
||||||
|
|
||||||
|
# run plugins before notification processing (e.g. Plugins to discover device names)
|
||||||
|
pm.run_plugin_scripts("before_name_updates")
|
||||||
|
|
||||||
|
# Resolve devices names (will pick up results from on_new_device plugins above)
|
||||||
|
mylog("debug", "[Main] Resolve devices names")
|
||||||
|
update_devices_names(pm)
|
||||||
|
|
||||||
# Notification handling
|
# Notification handling
|
||||||
# ----------------------------------------
|
# ----------------------------------------
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
@@ -110,7 +111,6 @@ def update_api(
|
|||||||
# -------------------------------------------------------------------------------
|
# -------------------------------------------------------------------------------
|
||||||
class api_endpoint_class:
|
class api_endpoint_class:
|
||||||
def __init__(self, db, forceUpdate, query, path, is_ad_hoc_user_event=False):
|
def __init__(self, db, forceUpdate, query, path, is_ad_hoc_user_event=False):
|
||||||
global apiEndpoints
|
|
||||||
|
|
||||||
current_time = timeNowTZ()
|
current_time = timeNowTZ()
|
||||||
|
|
||||||
@@ -145,8 +145,7 @@ class api_endpoint_class:
|
|||||||
self.needsUpdate = True
|
self.needsUpdate = True
|
||||||
# Only update changeDetectedWhen if it hasn't been set recently
|
# Only update changeDetectedWhen if it hasn't been set recently
|
||||||
if not self.changeDetectedWhen or current_time > (
|
if not self.changeDetectedWhen or current_time > (
|
||||||
self.changeDetectedWhen
|
self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
|
||||||
+ datetime.timedelta(seconds=self.debounce_interval)
|
|
||||||
):
|
):
|
||||||
self.changeDetectedWhen = (
|
self.changeDetectedWhen = (
|
||||||
current_time # Set timestamp for change detection
|
current_time # Set timestamp for change detection
|
||||||
@@ -164,8 +163,7 @@ class api_endpoint_class:
|
|||||||
self.needsUpdate = True
|
self.needsUpdate = True
|
||||||
# Only update changeDetectedWhen if it hasn't been set recently
|
# Only update changeDetectedWhen if it hasn't been set recently
|
||||||
if not self.changeDetectedWhen or current_time > (
|
if not self.changeDetectedWhen or current_time > (
|
||||||
self.changeDetectedWhen
|
self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
|
||||||
+ datetime.timedelta(seconds=self.debounce_interval)
|
|
||||||
):
|
):
|
||||||
self.changeDetectedWhen = (
|
self.changeDetectedWhen = (
|
||||||
current_time # Initialize timestamp for new endpoint
|
current_time # Initialize timestamp for new endpoint
|
||||||
@@ -180,17 +178,15 @@ class api_endpoint_class:
|
|||||||
current_time = timeNowTZ()
|
current_time = timeNowTZ()
|
||||||
|
|
||||||
# Debugging info to understand the issue
|
# Debugging info to understand the issue
|
||||||
# mylog('debug', [f'[API] api_endpoint_class: {self.fileName} is_ad_hoc_user_event {self.is_ad_hoc_user_event} last_update_time={self.last_update_time}, debounce time={self.last_update_time + datetime.timedelta(seconds=self.debounce_interval)}.'])
|
# mylog('debug', [f'[API] api_endpoint_class: {self.fileName} is_ad_hoc_user_event
|
||||||
|
# {self.is_ad_hoc_user_event} last_update_time={self.last_update_time},
|
||||||
|
# debounce time={self.last_update_time + datetime.timedelta(seconds=self.debounce_interval)}.'])
|
||||||
|
|
||||||
# Only attempt to write if the debounce time has passed
|
# Only attempt to write if the debounce time has passed
|
||||||
if forceUpdate == True or (
|
if forceUpdate is True or (
|
||||||
self.needsUpdate
|
self.needsUpdate and (
|
||||||
and (
|
self.changeDetectedWhen is None or current_time > (
|
||||||
self.changeDetectedWhen is None
|
self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
|
||||||
or current_time
|
|
||||||
> (
|
|
||||||
self.changeDetectedWhen
|
|
||||||
+ datetime.timedelta(seconds=self.debounce_interval)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
@@ -225,7 +221,7 @@ periodic_write_thread = None
|
|||||||
|
|
||||||
def periodic_write(interval=1):
|
def periodic_write(interval=1):
|
||||||
"""Periodically checks all endpoints for pending writes."""
|
"""Periodically checks all endpoints for pending writes."""
|
||||||
global apiEndpoints
|
|
||||||
while not stop_event.is_set():
|
while not stop_event.is_set():
|
||||||
with api_lock:
|
with api_lock:
|
||||||
for endpoint in apiEndpoints:
|
for endpoint in apiEndpoints:
|
||||||
|
|||||||
@@ -9,25 +9,68 @@ from flask_cors import CORS
|
|||||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from logger import mylog
|
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import get_setting_value
|
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from db.db_helper import get_date_from_period
|
from db.db_helper import get_date_from_period # noqa: E402 [flake8 lint suppression]
|
||||||
from app_state import updateState
|
from app_state import updateState # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
|
from .graphql_endpoint import devicesSchema # noqa: E402 [flake8 lint suppression]
|
||||||
from .graphql_endpoint import devicesSchema
|
from .device_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||||
from .device_endpoint import get_device_data, set_device_data, delete_device, delete_device_events, reset_device_props, copy_device, update_device_column
|
get_device_data,
|
||||||
from .devices_endpoint import get_all_devices, delete_unknown_devices, delete_all_with_empty_macs, delete_devices, export_devices, import_csv, devices_totals, devices_by_status
|
set_device_data,
|
||||||
from .events_endpoint import delete_events, delete_events_older_than, get_events, create_event, get_events_totals
|
delete_device,
|
||||||
from .history_endpoint import delete_online_history
|
delete_device_events,
|
||||||
from .prometheus_endpoint import get_metric_stats
|
reset_device_props,
|
||||||
from .sessions_endpoint import get_sessions, delete_session, create_session, get_sessions_calendar, get_device_sessions, get_session_events
|
copy_device,
|
||||||
from .nettools_endpoint import wakeonlan, traceroute, speedtest, nslookup, nmap_scan, internet_info
|
update_device_column
|
||||||
from .dbquery_endpoint import read_query, write_query, update_query, delete_query
|
)
|
||||||
from .sync_endpoint import handle_sync_post, handle_sync_get
|
from .devices_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||||
from .logs_endpoint import clean_log
|
get_all_devices,
|
||||||
from models.user_events_queue_instance import UserEventsQueueInstance
|
delete_unknown_devices,
|
||||||
from messaging.in_app import write_notification, mark_all_notifications_read, delete_notifications, get_unread_notifications, delete_notification, mark_notification_as_read
|
delete_all_with_empty_macs,
|
||||||
|
delete_devices,
|
||||||
|
export_devices,
|
||||||
|
import_csv,
|
||||||
|
devices_totals,
|
||||||
|
devices_by_status
|
||||||
|
)
|
||||||
|
from .events_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||||
|
delete_events,
|
||||||
|
delete_events_older_than,
|
||||||
|
get_events,
|
||||||
|
create_event,
|
||||||
|
get_events_totals
|
||||||
|
)
|
||||||
|
from .history_endpoint import delete_online_history # noqa: E402 [flake8 lint suppression]
|
||||||
|
from .prometheus_endpoint import get_metric_stats # noqa: E402 [flake8 lint suppression]
|
||||||
|
from .sessions_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||||
|
get_sessions,
|
||||||
|
delete_session,
|
||||||
|
create_session,
|
||||||
|
get_sessions_calendar,
|
||||||
|
get_device_sessions,
|
||||||
|
get_session_events
|
||||||
|
)
|
||||||
|
from .nettools_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||||
|
wakeonlan,
|
||||||
|
traceroute,
|
||||||
|
speedtest,
|
||||||
|
nslookup,
|
||||||
|
nmap_scan,
|
||||||
|
internet_info
|
||||||
|
)
|
||||||
|
from .dbquery_endpoint import read_query, write_query, update_query, delete_query # noqa: E402 [flake8 lint suppression]
|
||||||
|
from .sync_endpoint import handle_sync_post, handle_sync_get # noqa: E402 [flake8 lint suppression]
|
||||||
|
from .logs_endpoint import clean_log # noqa: E402 [flake8 lint suppression]
|
||||||
|
from models.user_events_queue_instance import UserEventsQueueInstance # noqa: E402 [flake8 lint suppression]
|
||||||
|
from messaging.in_app import ( # noqa: E402 [flake8 lint suppression]
|
||||||
|
write_notification,
|
||||||
|
mark_all_notifications_read,
|
||||||
|
delete_notifications,
|
||||||
|
get_unread_notifications,
|
||||||
|
delete_notification,
|
||||||
|
mark_notification_as_read
|
||||||
|
)
|
||||||
|
|
||||||
# Flask application
|
# Flask application
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
@@ -50,13 +93,14 @@ CORS(
|
|||||||
allow_headers=["Authorization", "Content-Type"],
|
allow_headers=["Authorization", "Content-Type"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------
|
# -------------------------------------------------------------------
|
||||||
# Custom handler for 404 - Route not found
|
# Custom handler for 404 - Route not found
|
||||||
# -------------------------------------------------------------------
|
# -------------------------------------------------------------------
|
||||||
@app.errorhandler(404)
|
@app.errorhandler(404)
|
||||||
def not_found(error):
|
def not_found(error):
|
||||||
response = {
|
response = {
|
||||||
"success": False,
|
"success": False,
|
||||||
"error": "API route not found",
|
"error": "API route not found",
|
||||||
"message": f"The requested URL {error.description if hasattr(error, 'description') else ''} was not found on the server.",
|
"message": f"The requested URL {error.description if hasattr(error, 'description') else ''} was not found on the server.",
|
||||||
}
|
}
|
||||||
@@ -81,7 +125,7 @@ def graphql_endpoint():
|
|||||||
if not is_authorized():
|
if not is_authorized():
|
||||||
msg = '[graphql_server] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct.'
|
msg = '[graphql_server] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct.'
|
||||||
mylog('verbose', [msg])
|
mylog('verbose', [msg])
|
||||||
return jsonify({"success": False, "message": msg}), 401
|
return jsonify({"success": False, "message": msg, "error": "Forbidden"}), 401
|
||||||
|
|
||||||
# Retrieve and log request data
|
# Retrieve and log request data
|
||||||
data = request.get_json()
|
data = request.get_json()
|
||||||
@@ -200,7 +244,7 @@ def api_get_devices():
|
|||||||
def api_delete_devices():
|
def api_delete_devices():
|
||||||
if not is_authorized():
|
if not is_authorized():
|
||||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
|
||||||
macs = request.json.get("macs") if request.is_json else None
|
macs = request.json.get("macs") if request.is_json else None
|
||||||
|
|
||||||
return delete_devices(macs)
|
return delete_devices(macs)
|
||||||
@@ -338,7 +382,7 @@ def dbquery_read():
|
|||||||
|
|
||||||
if not raw_sql_b64:
|
if not raw_sql_b64:
|
||||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
|
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
|
||||||
|
|
||||||
return read_query(raw_sql_b64)
|
return read_query(raw_sql_b64)
|
||||||
|
|
||||||
|
|
||||||
@@ -350,7 +394,7 @@ def dbquery_write():
|
|||||||
data = request.get_json() or {}
|
data = request.get_json() or {}
|
||||||
raw_sql_b64 = data.get("rawSql")
|
raw_sql_b64 = data.get("rawSql")
|
||||||
if not raw_sql_b64:
|
if not raw_sql_b64:
|
||||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
|
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
|
||||||
|
|
||||||
return write_query(raw_sql_b64)
|
return write_query(raw_sql_b64)
|
||||||
|
|
||||||
@@ -363,7 +407,13 @@ def dbquery_update():
|
|||||||
data = request.get_json() or {}
|
data = request.get_json() or {}
|
||||||
required = ["columnName", "id", "dbtable", "columns", "values"]
|
required = ["columnName", "id", "dbtable", "columns", "values"]
|
||||||
if not all(data.get(k) for k in required):
|
if not all(data.get(k) for k in required):
|
||||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"}), 400
|
return jsonify(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"message": "ERROR: Missing parameters",
|
||||||
|
"error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"
|
||||||
|
}
|
||||||
|
), 400
|
||||||
|
|
||||||
return update_query(
|
return update_query(
|
||||||
column_name=data["columnName"],
|
column_name=data["columnName"],
|
||||||
@@ -418,12 +468,13 @@ def api_clean_log():
|
|||||||
|
|
||||||
return clean_log(file)
|
return clean_log(file)
|
||||||
|
|
||||||
|
|
||||||
@app.route("/logs/add-to-execution-queue", methods=["POST"])
|
@app.route("/logs/add-to-execution-queue", methods=["POST"])
|
||||||
def api_add_to_execution_queue():
|
def api_add_to_execution_queue():
|
||||||
|
|
||||||
if not is_authorized():
|
if not is_authorized():
|
||||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
|
||||||
queue = UserEventsQueueInstance()
|
queue = UserEventsQueueInstance()
|
||||||
|
|
||||||
# Get JSON payload safely
|
# Get JSON payload safely
|
||||||
@@ -499,7 +550,7 @@ def api_delete_old_events(days: int):
|
|||||||
"""
|
"""
|
||||||
if not is_authorized():
|
if not is_authorized():
|
||||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||||
|
|
||||||
return delete_events_older_than(days)
|
return delete_events_older_than(days)
|
||||||
|
|
||||||
|
|
||||||
@@ -619,7 +670,7 @@ def api_write_notification():
|
|||||||
|
|
||||||
if not content:
|
if not content:
|
||||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing content"}), 400
|
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing content"}), 400
|
||||||
|
|
||||||
write_notification(content, level)
|
write_notification(content, level)
|
||||||
return jsonify({"success": True})
|
return jsonify({"success": True})
|
||||||
|
|
||||||
@@ -672,7 +723,8 @@ def api_mark_notification_read(guid):
|
|||||||
return jsonify({"success": True})
|
return jsonify({"success": True})
|
||||||
else:
|
else:
|
||||||
return jsonify({"success": False, "message": "ERROR", "error": result.get("error")}), 500
|
return jsonify({"success": False, "message": "ERROR", "error": result.get("error")}), 500
|
||||||
|
|
||||||
|
|
||||||
# --------------------------
|
# --------------------------
|
||||||
# SYNC endpoint
|
# SYNC endpoint
|
||||||
# --------------------------
|
# --------------------------
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from flask import jsonify
|
|||||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from database import get_temp_db_connection
|
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
|
|
||||||
def read_query(raw_sql_b64):
|
def read_query(raw_sql_b64):
|
||||||
|
|||||||
@@ -2,17 +2,16 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from datetime import datetime
|
|
||||||
from flask import jsonify, request
|
from flask import jsonify, request
|
||||||
|
|
||||||
# Register NetAlertX directories
|
# Register NetAlertX directories
|
||||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from database import get_temp_db_connection
|
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
|
||||||
from helper import is_random_mac, get_setting_value
|
from helper import is_random_mac, get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.datetime_utils import timeNowDB, format_date
|
from utils.datetime_utils import timeNowDB, format_date # noqa: E402 [flake8 lint suppression]
|
||||||
from db.db_helper import row_to_json, get_date_from_period
|
from db.db_helper import row_to_json, get_date_from_period # noqa: E402 [flake8 lint suppression]
|
||||||
|
|
||||||
# --------------------------
|
# --------------------------
|
||||||
# Device Endpoints Functions
|
# Device Endpoints Functions
|
||||||
@@ -27,10 +26,10 @@ def get_device_data(mac):
|
|||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
|
||||||
now = timeNowDB()
|
now = timeNowDB()
|
||||||
|
|
||||||
# Special case for new device
|
# Special case for new device
|
||||||
if mac.lower() == "new":
|
if mac.lower() == "new":
|
||||||
|
|
||||||
device_data = {
|
device_data = {
|
||||||
"devMac": "",
|
"devMac": "",
|
||||||
"devName": "",
|
"devName": "",
|
||||||
@@ -89,10 +88,10 @@ def get_device_data(mac):
|
|||||||
ELSE 'Off-line'
|
ELSE 'Off-line'
|
||||||
END AS devStatus,
|
END AS devStatus,
|
||||||
|
|
||||||
(SELECT COUNT(*) FROM Sessions
|
(SELECT COUNT(*) FROM Sessions
|
||||||
WHERE ses_MAC = d.devMac AND (
|
WHERE ses_MAC = d.devMac AND (
|
||||||
ses_DateTimeConnection >= {period_date_sql} OR
|
ses_DateTimeConnection >= {period_date_sql} OR
|
||||||
ses_DateTimeDisconnection >= {period_date_sql} OR
|
ses_DateTimeDisconnection >= {period_date_sql} OR
|
||||||
ses_StillConnected = 1
|
ses_StillConnected = 1
|
||||||
)) AS devSessions,
|
)) AS devSessions,
|
||||||
|
|
||||||
|
|||||||
@@ -14,16 +14,13 @@ from logger import mylog
|
|||||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||||
|
|
||||||
from database import get_temp_db_connection
|
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
|
||||||
from db.db_helper import get_table_json, get_device_condition_by_status
|
from db.db_helper import get_table_json, get_device_condition_by_status # noqa: E402 [flake8 lint suppression]
|
||||||
from utils.datetime_utils import format_date
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------
|
# --------------------------
|
||||||
# Device Endpoints Functions
|
# Device Endpoints Functions
|
||||||
# --------------------------
|
# --------------------------
|
||||||
|
|
||||||
|
|
||||||
def get_all_devices():
|
def get_all_devices():
|
||||||
"""Retrieve all devices from the database."""
|
"""Retrieve all devices from the database."""
|
||||||
conn = get_temp_db_connection()
|
conn = get_temp_db_connection()
|
||||||
@@ -99,7 +96,7 @@ def delete_unknown_devices():
|
|||||||
|
|
||||||
def export_devices(export_format):
|
def export_devices(export_format):
|
||||||
"""
|
"""
|
||||||
Export devices from the Devices table in teh desired format.
|
Export devices from the Devices table in the desired format.
|
||||||
- If `macs` is None → delete ALL devices.
|
- If `macs` is None → delete ALL devices.
|
||||||
- If `macs` is a list → delete only matching MACs (supports wildcard '*').
|
- If `macs` is a list → delete only matching MACs (supports wildcard '*').
|
||||||
"""
|
"""
|
||||||
@@ -139,7 +136,6 @@ def export_devices(export_format):
|
|||||||
def import_csv(file_storage=None):
|
def import_csv(file_storage=None):
|
||||||
data = ""
|
data = ""
|
||||||
skipped = []
|
skipped = []
|
||||||
error = None
|
|
||||||
|
|
||||||
# 1. Try JSON `content` (base64-encoded CSV)
|
# 1. Try JSON `content` (base64-encoded CSV)
|
||||||
if request.is_json and request.json.get("content"):
|
if request.is_json and request.json.get("content"):
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user