mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2025-12-07 09:36:05 -08:00
Compare commits
37 Commits
dbd1bdabc2
...
linting-fi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4f5a40ffce | ||
|
|
f5aea55b29 | ||
|
|
e3e7e2f52e | ||
|
|
872ac1ce0f | ||
|
|
ebeb7a07af | ||
|
|
5c14b34a8b | ||
|
|
f0abd500d9 | ||
|
|
8503cb86f1 | ||
|
|
5f0b670a82 | ||
|
|
9df814e351 | ||
|
|
88509ce8c2 | ||
|
|
995c371f48 | ||
|
|
aee5e04b9f | ||
|
|
e0c96052bb | ||
|
|
fd5235dd0a | ||
|
|
f3de66a287 | ||
|
|
9a4fb35ea5 | ||
|
|
a1ad904042 | ||
|
|
81ff1da756 | ||
|
|
85c9b0b99b | ||
|
|
4ccac66a73 | ||
|
|
c7b9fdaff2 | ||
|
|
c7dcc20a1d | ||
|
|
bb365a5e81 | ||
|
|
e2633d0251 | ||
|
|
09c40e76b2 | ||
|
|
abc3e71440 | ||
|
|
d13596c35c | ||
|
|
7d5dcf061c | ||
|
|
6206e483a9 | ||
|
|
f1ecc61de3 | ||
|
|
92a6a3a916 | ||
|
|
8a89f3b340 | ||
|
|
a93e87493f | ||
|
|
c7032bceba | ||
|
|
0cd7528284 | ||
|
|
2309b8eb3f |
@@ -140,7 +140,7 @@ RUN install -d -o ${NETALERTX_USER} -g ${NETALERTX_GROUP} -m 700 ${READ_WRITE_FO
|
||||
-exec chmod 750 {} \;"
|
||||
|
||||
# Copy version information into the image
|
||||
COPY --chown=${NETALERTX_USER}:${NETALERTX_GROUP} .VERSION ${NETALERTX_APP}/.VERSION
|
||||
COPY --chown=${NETALERTX_USER}:${NETALERTX_GROUP} .[V]ERSION ${NETALERTX_APP}/.VERSION
|
||||
|
||||
# Copy the virtualenv from the builder stage
|
||||
COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
||||
@@ -150,7 +150,13 @@ COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
||||
# This is done after the copy of the venv to ensure the venv is in place
|
||||
# although it may be quicker to do it before the copy, it keeps the image
|
||||
# layers smaller to do it after.
|
||||
RUN apk add libcap && \
|
||||
RUN if [ -f .VERSION ]; then \
|
||||
cp .VERSION ${NETALERTX_APP}/.VERSION; \
|
||||
else \
|
||||
echo "DEVELOPMENT 00000000" > ${NETALERTX_APP}/.VERSION; \
|
||||
fi && \
|
||||
chown 20212:20212 ${NETALERTX_APP}/.VERSION && \
|
||||
apk add libcap && \
|
||||
setcap cap_net_raw+ep /bin/busybox && \
|
||||
setcap cap_net_raw,cap_net_admin+eip /usr/bin/nmap && \
|
||||
setcap cap_net_raw,cap_net_admin+eip /usr/bin/arp-scan && \
|
||||
|
||||
14
.github/ISSUE_TEMPLATE/i-have-an-issue.yml
vendored
14
.github/ISSUE_TEMPLATE/i-have-an-issue.yml
vendored
@@ -44,7 +44,7 @@ body:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: app.conf
|
||||
label: Relevant `app.conf` settings
|
||||
description: |
|
||||
Paste relevant `app.conf`settings (remove sensitive info)
|
||||
render: python
|
||||
@@ -55,7 +55,7 @@ body:
|
||||
label: docker-compose.yml
|
||||
description: |
|
||||
Paste your `docker-compose.yml`
|
||||
render: python
|
||||
render: yaml
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
@@ -79,7 +79,11 @@ body:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: app.log
|
||||
label: Relevant `app.log` section
|
||||
value: |
|
||||
```
|
||||
PASTE LOG HERE. Using the triple backticks preserves format.
|
||||
```
|
||||
description: |
|
||||
Logs with debug enabled (https://github.com/jokob-sk/NetAlertX/blob/main/docs/DEBUG_TIPS.md) ⚠
|
||||
***Generally speaking, all bug reports should have logs provided.***
|
||||
@@ -93,6 +97,10 @@ body:
|
||||
label: Docker Logs
|
||||
description: |
|
||||
You can retrieve the logs from Portainer -> Containers -> your NetAlertX container -> Logs or by running `sudo docker logs netalertx`.
|
||||
value: |
|
||||
```
|
||||
PASTE DOCKER LOG HERE. Using the triple backticks preserves format.
|
||||
```
|
||||
validations:
|
||||
required: true
|
||||
|
||||
|
||||
61
.github/workflows/code_checks.yml
vendored
61
.github/workflows/code_checks.yml
vendored
@@ -21,7 +21,8 @@ jobs:
|
||||
run: |
|
||||
echo "🔍 Checking for incorrect absolute '/php/' URLs (should be 'php/' or './php/')..."
|
||||
|
||||
MATCHES=$(grep -rE "['\"]\/php\/" --include=\*.{js,php,html} ./front | grep -E "\.get|\.post|\.ajax|fetch|url\s*:") || true
|
||||
MATCHES=$(grep -rE "['\"]/php/" --include=\*.{js,php,html} ./front \
|
||||
| grep -E "\.get|\.post|\.ajax|fetch|url\s*:") || true
|
||||
|
||||
if [ -n "$MATCHES" ]; then
|
||||
echo "$MATCHES"
|
||||
@@ -38,3 +39,61 @@ jobs:
|
||||
set -e
|
||||
echo "🔍 Checking Python syntax..."
|
||||
find . -name "*.py" -print0 | xargs -0 -n1 python3 -m py_compile
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Install linting tools
|
||||
run: |
|
||||
# Python linting
|
||||
pip install flake8
|
||||
# Docker linting
|
||||
wget -O /tmp/hadolint https://github.com/hadolint/hadolint/releases/latest/download/hadolint-Linux-x86_64
|
||||
chmod +x /tmp/hadolint
|
||||
# PHP and shellcheck for syntax checking
|
||||
sudo apt-get update && sudo apt-get install -y php-cli shellcheck
|
||||
|
||||
- name: Shell check
|
||||
continue-on-error: true
|
||||
run: |
|
||||
echo "🔍 Checking shell scripts..."
|
||||
find . -name "*.sh" -exec shellcheck {} \;
|
||||
|
||||
- name: Python lint
|
||||
continue-on-error: true
|
||||
run: |
|
||||
echo "🔍 Linting Python code..."
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
|
||||
- name: PHP check
|
||||
continue-on-error: true
|
||||
run: |
|
||||
echo "🔍 Checking PHP syntax..."
|
||||
find . -name "*.php" -exec php -l {} \;
|
||||
|
||||
- name: Docker lint
|
||||
continue-on-error: true
|
||||
run: |
|
||||
echo "🔍 Linting Dockerfiles..."
|
||||
/tmp/hadolint Dockerfile* || true
|
||||
|
||||
docker-tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run Docker-based tests
|
||||
run: |
|
||||
echo "🐳 Running Docker-based tests..."
|
||||
chmod +x ./run_docker_tests.sh
|
||||
./run_docker_tests.sh
|
||||
|
||||
2
.github/workflows/docker_dev.yml
vendored
2
.github/workflows/docker_dev.yml
vendored
@@ -83,7 +83,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6
|
||||
|
||||
2
.github/workflows/docker_prod.yml
vendored
2
.github/workflows/docker_prod.yml
vendored
@@ -72,7 +72,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6
|
||||
|
||||
10
Dockerfile
10
Dockerfile
@@ -137,7 +137,7 @@ RUN install -d -o ${NETALERTX_USER} -g ${NETALERTX_GROUP} -m 700 ${READ_WRITE_FO
|
||||
-exec chmod 750 {} \;"
|
||||
|
||||
# Copy version information into the image
|
||||
COPY --chown=${NETALERTX_USER}:${NETALERTX_GROUP} .VERSION ${NETALERTX_APP}/.VERSION
|
||||
COPY --chown=${NETALERTX_USER}:${NETALERTX_GROUP} .[V]ERSION ${NETALERTX_APP}/.VERSION
|
||||
|
||||
# Copy the virtualenv from the builder stage
|
||||
COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
||||
@@ -147,7 +147,13 @@ COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
||||
# This is done after the copy of the venv to ensure the venv is in place
|
||||
# although it may be quicker to do it before the copy, it keeps the image
|
||||
# layers smaller to do it after.
|
||||
RUN apk add libcap && \
|
||||
RUN if [ -f .VERSION ]; then \
|
||||
cp .VERSION ${NETALERTX_APP}/.VERSION; \
|
||||
else \
|
||||
echo "DEVELOPMENT 00000000" > ${NETALERTX_APP}/.VERSION; \
|
||||
fi && \
|
||||
chown 20212:20212 ${NETALERTX_APP}/.VERSION && \
|
||||
apk add libcap && \
|
||||
setcap cap_net_raw+ep /bin/busybox && \
|
||||
setcap cap_net_raw,cap_net_admin+eip /usr/bin/nmap && \
|
||||
setcap cap_net_raw,cap_net_admin+eip /usr/bin/arp-scan && \
|
||||
|
||||
@@ -125,7 +125,9 @@ docker compose up
|
||||
|
||||
### Modification 1: Use a Local Folder (Bind Mount)
|
||||
|
||||
By default, the baseline compose file uses "named volumes" (`netalertx_config`, `netalertx_db`). **This is the preferred method** because NetAlertX is designed to manage all configuration and database settings directly from its web UI. Named volumes let Docker handle this data cleanly without you needing to manage local file permissions or paths.
|
||||
By default, the baseline compose file uses a single named volume (netalertx_data) mounted at /data. This single-volume layout is preferred because NetAlertX manages both configuration and the database under /data (for example, /data/config and /data/db) via its web UI. Using one named volume simplifies permissions and portability: Docker manages the storage and NetAlertX manages the files inside /data.
|
||||
|
||||
A two-volume layout that mounts /data/config and /data/db separately (for example, netalertx_config and netalertx_db) is supported for backward compatibility and some advanced workflows, but it is an abnormal/legacy layout and not recommended for new deployments.
|
||||
|
||||
However, if you prefer to have direct, file-level access to your configuration for manual editing, a "bind mount" is a simple alternative. This tells Docker to use a specific folder from your computer (the "host") inside the container.
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ var timerRefreshData = ''
|
||||
|
||||
var emptyArr = ['undefined', "", undefined, null, 'null'];
|
||||
var UI_LANG = "English (en_us)";
|
||||
const allLanguages = ["ar_ar","ca_ca","cs_cz","de_de","en_us","es_es","fa_fa","fr_fr","it_it","nb_no","pl_pl","pt_br","pt_pt","ru_ru","sv_sv","tr_tr","uk_ua","zh_cn"]; // needs to be same as in lang.php
|
||||
const allLanguages = ["ar_ar","ca_ca","cs_cz","de_de","en_us","es_es","fa_fa","fr_fr","it_it","ja_jp","nb_no","pl_pl","pt_br","pt_pt","ru_ru","sv_sv","tr_tr","uk_ua","zh_cn"]; // needs to be same as in lang.php
|
||||
var settingsJSON = {}
|
||||
|
||||
|
||||
@@ -343,6 +343,9 @@ function getLangCode() {
|
||||
case 'Italian (it_it)':
|
||||
lang_code = 'it_it';
|
||||
break;
|
||||
case 'Japanese (ja_jp)':
|
||||
lang_code = 'ja_jp';
|
||||
break;
|
||||
case 'Russian (ru_ru)':
|
||||
lang_code = 'ru_ru';
|
||||
break;
|
||||
|
||||
@@ -761,4 +761,4 @@
|
||||
"settings_system_label": "تسمية النظام",
|
||||
"settings_update_item_warning": "تحذير تحديث العنصر",
|
||||
"test_event_tooltip": "تلميح اختبار الحدث"
|
||||
}
|
||||
}
|
||||
@@ -761,4 +761,4 @@
|
||||
"settings_system_label": "",
|
||||
"settings_update_item_warning": "",
|
||||
"test_event_tooltip": ""
|
||||
}
|
||||
}
|
||||
@@ -834,4 +834,4 @@
|
||||
"settings_system_label": "System",
|
||||
"settings_update_item_warning": "",
|
||||
"test_event_tooltip": "Speichere die Änderungen, bevor Sie die Einstellungen testen."
|
||||
}
|
||||
}
|
||||
@@ -761,4 +761,4 @@
|
||||
"settings_system_label": "",
|
||||
"settings_update_item_warning": "",
|
||||
"test_event_tooltip": ""
|
||||
}
|
||||
}
|
||||
764
front/php/templates/language/ja_jp.json
Normal file
764
front/php/templates/language/ja_jp.json
Normal file
@@ -0,0 +1,764 @@
|
||||
{
|
||||
"API_CUSTOM_SQL_description": "",
|
||||
"API_CUSTOM_SQL_name": "",
|
||||
"API_TOKEN_description": "",
|
||||
"API_TOKEN_name": "",
|
||||
"API_display_name": "",
|
||||
"API_icon": "",
|
||||
"About_Design": "",
|
||||
"About_Exit": "",
|
||||
"About_Title": "",
|
||||
"AppEvents_AppEventProcessed": "",
|
||||
"AppEvents_DateTimeCreated": "",
|
||||
"AppEvents_Extra": "",
|
||||
"AppEvents_GUID": "",
|
||||
"AppEvents_Helper1": "",
|
||||
"AppEvents_Helper2": "",
|
||||
"AppEvents_Helper3": "",
|
||||
"AppEvents_ObjectForeignKey": "",
|
||||
"AppEvents_ObjectIndex": "",
|
||||
"AppEvents_ObjectIsArchived": "",
|
||||
"AppEvents_ObjectIsNew": "",
|
||||
"AppEvents_ObjectPlugin": "",
|
||||
"AppEvents_ObjectPrimaryID": "",
|
||||
"AppEvents_ObjectSecondaryID": "",
|
||||
"AppEvents_ObjectStatus": "",
|
||||
"AppEvents_ObjectStatusColumn": "",
|
||||
"AppEvents_ObjectType": "",
|
||||
"AppEvents_Plugin": "",
|
||||
"AppEvents_Type": "",
|
||||
"BackDevDetail_Actions_Ask_Run": "",
|
||||
"BackDevDetail_Actions_Not_Registered": "",
|
||||
"BackDevDetail_Actions_Title_Run": "",
|
||||
"BackDevDetail_Copy_Ask": "",
|
||||
"BackDevDetail_Copy_Title": "",
|
||||
"BackDevDetail_Tools_WOL_error": "",
|
||||
"BackDevDetail_Tools_WOL_okay": "",
|
||||
"BackDevices_Arpscan_disabled": "",
|
||||
"BackDevices_Arpscan_enabled": "",
|
||||
"BackDevices_Backup_CopError": "",
|
||||
"BackDevices_Backup_Failed": "",
|
||||
"BackDevices_Backup_okay": "",
|
||||
"BackDevices_DBTools_DelDevError_a": "",
|
||||
"BackDevices_DBTools_DelDevError_b": "",
|
||||
"BackDevices_DBTools_DelDev_a": "",
|
||||
"BackDevices_DBTools_DelDev_b": "",
|
||||
"BackDevices_DBTools_DelEvents": "",
|
||||
"BackDevices_DBTools_DelEventsError": "",
|
||||
"BackDevices_DBTools_ImportCSV": "",
|
||||
"BackDevices_DBTools_ImportCSVError": "",
|
||||
"BackDevices_DBTools_ImportCSVMissing": "",
|
||||
"BackDevices_DBTools_Purge": "",
|
||||
"BackDevices_DBTools_UpdDev": "",
|
||||
"BackDevices_DBTools_UpdDevError": "",
|
||||
"BackDevices_DBTools_Upgrade": "",
|
||||
"BackDevices_DBTools_UpgradeError": "",
|
||||
"BackDevices_Device_UpdDevError": "",
|
||||
"BackDevices_Restore_CopError": "",
|
||||
"BackDevices_Restore_Failed": "",
|
||||
"BackDevices_Restore_okay": "",
|
||||
"BackDevices_darkmode_disabled": "",
|
||||
"BackDevices_darkmode_enabled": "",
|
||||
"CLEAR_NEW_FLAG_description": "",
|
||||
"CLEAR_NEW_FLAG_name": "",
|
||||
"CustProps_cant_remove": "",
|
||||
"DAYS_TO_KEEP_EVENTS_description": "",
|
||||
"DAYS_TO_KEEP_EVENTS_name": "",
|
||||
"DISCOVER_PLUGINS_description": "",
|
||||
"DISCOVER_PLUGINS_name": "",
|
||||
"DevDetail_Children_Title": "",
|
||||
"DevDetail_Copy_Device_Title": "",
|
||||
"DevDetail_Copy_Device_Tooltip": "",
|
||||
"DevDetail_CustomProperties_Title": "",
|
||||
"DevDetail_CustomProps_reset_info": "",
|
||||
"DevDetail_DisplayFields_Title": "",
|
||||
"DevDetail_EveandAl_AlertAllEvents": "",
|
||||
"DevDetail_EveandAl_AlertDown": "",
|
||||
"DevDetail_EveandAl_Archived": "",
|
||||
"DevDetail_EveandAl_NewDevice": "",
|
||||
"DevDetail_EveandAl_NewDevice_Tooltip": "",
|
||||
"DevDetail_EveandAl_RandomMAC": "",
|
||||
"DevDetail_EveandAl_ScanCycle": "",
|
||||
"DevDetail_EveandAl_ScanCycle_a": "",
|
||||
"DevDetail_EveandAl_ScanCycle_z": "",
|
||||
"DevDetail_EveandAl_Skip": "",
|
||||
"DevDetail_EveandAl_Title": "",
|
||||
"DevDetail_Events_CheckBox": "",
|
||||
"DevDetail_GoToNetworkNode": "",
|
||||
"DevDetail_Icon": "",
|
||||
"DevDetail_Icon_Descr": "",
|
||||
"DevDetail_Loading": "",
|
||||
"DevDetail_MainInfo_Comments": "",
|
||||
"DevDetail_MainInfo_Favorite": "",
|
||||
"DevDetail_MainInfo_Group": "",
|
||||
"DevDetail_MainInfo_Location": "",
|
||||
"DevDetail_MainInfo_Name": "",
|
||||
"DevDetail_MainInfo_Network": "",
|
||||
"DevDetail_MainInfo_Network_Port": "",
|
||||
"DevDetail_MainInfo_Network_Site": "",
|
||||
"DevDetail_MainInfo_Network_Title": "",
|
||||
"DevDetail_MainInfo_Owner": "",
|
||||
"DevDetail_MainInfo_SSID": "",
|
||||
"DevDetail_MainInfo_Title": "",
|
||||
"DevDetail_MainInfo_Type": "",
|
||||
"DevDetail_MainInfo_Vendor": "",
|
||||
"DevDetail_MainInfo_mac": "",
|
||||
"DevDetail_NavToChildNode": "",
|
||||
"DevDetail_Network_Node_hover": "",
|
||||
"DevDetail_Network_Port_hover": "",
|
||||
"DevDetail_Nmap_Scans": "",
|
||||
"DevDetail_Nmap_Scans_desc": "",
|
||||
"DevDetail_Nmap_buttonDefault": "",
|
||||
"DevDetail_Nmap_buttonDefault_text": "",
|
||||
"DevDetail_Nmap_buttonDetail": "",
|
||||
"DevDetail_Nmap_buttonDetail_text": "",
|
||||
"DevDetail_Nmap_buttonFast": "",
|
||||
"DevDetail_Nmap_buttonFast_text": "",
|
||||
"DevDetail_Nmap_buttonSkipDiscovery": "",
|
||||
"DevDetail_Nmap_buttonSkipDiscovery_text": "",
|
||||
"DevDetail_Nmap_resultsLink": "",
|
||||
"DevDetail_Owner_hover": "",
|
||||
"DevDetail_Periodselect_All": "",
|
||||
"DevDetail_Periodselect_LastMonth": "",
|
||||
"DevDetail_Periodselect_LastWeek": "",
|
||||
"DevDetail_Periodselect_LastYear": "",
|
||||
"DevDetail_Periodselect_today": "",
|
||||
"DevDetail_Run_Actions_Title": "",
|
||||
"DevDetail_Run_Actions_Tooltip": "",
|
||||
"DevDetail_SessionInfo_FirstSession": "",
|
||||
"DevDetail_SessionInfo_LastIP": "",
|
||||
"DevDetail_SessionInfo_LastSession": "",
|
||||
"DevDetail_SessionInfo_StaticIP": "",
|
||||
"DevDetail_SessionInfo_Status": "",
|
||||
"DevDetail_SessionInfo_Title": "",
|
||||
"DevDetail_SessionTable_Additionalinfo": "",
|
||||
"DevDetail_SessionTable_Connection": "",
|
||||
"DevDetail_SessionTable_Disconnection": "",
|
||||
"DevDetail_SessionTable_Duration": "",
|
||||
"DevDetail_SessionTable_IP": "",
|
||||
"DevDetail_SessionTable_Order": "",
|
||||
"DevDetail_Shortcut_CurrentStatus": "",
|
||||
"DevDetail_Shortcut_DownAlerts": "",
|
||||
"DevDetail_Shortcut_Presence": "",
|
||||
"DevDetail_Shortcut_Sessions": "",
|
||||
"DevDetail_Tab_Details": "",
|
||||
"DevDetail_Tab_Events": "",
|
||||
"DevDetail_Tab_EventsTableDate": "",
|
||||
"DevDetail_Tab_EventsTableEvent": "",
|
||||
"DevDetail_Tab_EventsTableIP": "",
|
||||
"DevDetail_Tab_EventsTableInfo": "",
|
||||
"DevDetail_Tab_Nmap": "",
|
||||
"DevDetail_Tab_NmapEmpty": "",
|
||||
"DevDetail_Tab_NmapTableExtra": "",
|
||||
"DevDetail_Tab_NmapTableHeader": "",
|
||||
"DevDetail_Tab_NmapTableIndex": "",
|
||||
"DevDetail_Tab_NmapTablePort": "",
|
||||
"DevDetail_Tab_NmapTableService": "",
|
||||
"DevDetail_Tab_NmapTableState": "",
|
||||
"DevDetail_Tab_NmapTableText": "",
|
||||
"DevDetail_Tab_NmapTableTime": "",
|
||||
"DevDetail_Tab_Plugins": "",
|
||||
"DevDetail_Tab_Presence": "",
|
||||
"DevDetail_Tab_Sessions": "",
|
||||
"DevDetail_Tab_Tools": "",
|
||||
"DevDetail_Tab_Tools_Internet_Info_Description": "",
|
||||
"DevDetail_Tab_Tools_Internet_Info_Error": "",
|
||||
"DevDetail_Tab_Tools_Internet_Info_Start": "",
|
||||
"DevDetail_Tab_Tools_Internet_Info_Title": "",
|
||||
"DevDetail_Tab_Tools_Nslookup_Description": "",
|
||||
"DevDetail_Tab_Tools_Nslookup_Error": "",
|
||||
"DevDetail_Tab_Tools_Nslookup_Start": "",
|
||||
"DevDetail_Tab_Tools_Nslookup_Title": "",
|
||||
"DevDetail_Tab_Tools_Speedtest_Description": "",
|
||||
"DevDetail_Tab_Tools_Speedtest_Start": "",
|
||||
"DevDetail_Tab_Tools_Speedtest_Title": "",
|
||||
"DevDetail_Tab_Tools_Traceroute_Description": "",
|
||||
"DevDetail_Tab_Tools_Traceroute_Error": "",
|
||||
"DevDetail_Tab_Tools_Traceroute_Start": "",
|
||||
"DevDetail_Tab_Tools_Traceroute_Title": "",
|
||||
"DevDetail_Tools_WOL": "",
|
||||
"DevDetail_Tools_WOL_noti": "",
|
||||
"DevDetail_Tools_WOL_noti_text": "",
|
||||
"DevDetail_Type_hover": "",
|
||||
"DevDetail_Vendor_hover": "",
|
||||
"DevDetail_WOL_Title": "",
|
||||
"DevDetail_button_AddIcon": "",
|
||||
"DevDetail_button_AddIcon_Help": "",
|
||||
"DevDetail_button_AddIcon_Tooltip": "",
|
||||
"DevDetail_button_Delete": "",
|
||||
"DevDetail_button_DeleteEvents": "",
|
||||
"DevDetail_button_DeleteEvents_Warning": "",
|
||||
"DevDetail_button_Delete_ask": "",
|
||||
"DevDetail_button_OverwriteIcons": "",
|
||||
"DevDetail_button_OverwriteIcons_Tooltip": "",
|
||||
"DevDetail_button_OverwriteIcons_Warning": "",
|
||||
"DevDetail_button_Reset": "",
|
||||
"DevDetail_button_Save": "",
|
||||
"DeviceEdit_ValidMacIp": "",
|
||||
"Device_MultiEdit": "",
|
||||
"Device_MultiEdit_Backup": "",
|
||||
"Device_MultiEdit_Fields": "",
|
||||
"Device_MultiEdit_MassActions": "",
|
||||
"Device_MultiEdit_No_Devices": "",
|
||||
"Device_MultiEdit_Tooltip": "",
|
||||
"Device_Searchbox": "",
|
||||
"Device_Shortcut_AllDevices": "",
|
||||
"Device_Shortcut_AllNodes": "",
|
||||
"Device_Shortcut_Archived": "",
|
||||
"Device_Shortcut_Connected": "",
|
||||
"Device_Shortcut_Devices": "",
|
||||
"Device_Shortcut_DownAlerts": "",
|
||||
"Device_Shortcut_DownOnly": "",
|
||||
"Device_Shortcut_Favorites": "",
|
||||
"Device_Shortcut_NewDevices": "",
|
||||
"Device_Shortcut_OnlineChart": "",
|
||||
"Device_TableHead_AlertDown": "",
|
||||
"Device_TableHead_Connected_Devices": "",
|
||||
"Device_TableHead_CustomProps": "",
|
||||
"Device_TableHead_FQDN": "",
|
||||
"Device_TableHead_Favorite": "",
|
||||
"Device_TableHead_FirstSession": "",
|
||||
"Device_TableHead_GUID": "",
|
||||
"Device_TableHead_Group": "",
|
||||
"Device_TableHead_Icon": "",
|
||||
"Device_TableHead_LastIP": "",
|
||||
"Device_TableHead_LastIPOrder": "",
|
||||
"Device_TableHead_LastSession": "",
|
||||
"Device_TableHead_Location": "",
|
||||
"Device_TableHead_MAC": "",
|
||||
"Device_TableHead_MAC_full": "",
|
||||
"Device_TableHead_Name": "",
|
||||
"Device_TableHead_NetworkSite": "",
|
||||
"Device_TableHead_Owner": "",
|
||||
"Device_TableHead_ParentRelType": "",
|
||||
"Device_TableHead_Parent_MAC": "",
|
||||
"Device_TableHead_Port": "",
|
||||
"Device_TableHead_PresentLastScan": "",
|
||||
"Device_TableHead_ReqNicsOnline": "",
|
||||
"Device_TableHead_RowID": "",
|
||||
"Device_TableHead_Rowid": "",
|
||||
"Device_TableHead_SSID": "",
|
||||
"Device_TableHead_SourcePlugin": "",
|
||||
"Device_TableHead_Status": "",
|
||||
"Device_TableHead_SyncHubNodeName": "",
|
||||
"Device_TableHead_Type": "",
|
||||
"Device_TableHead_Vendor": "",
|
||||
"Device_Table_Not_Network_Device": "",
|
||||
"Device_Table_info": "",
|
||||
"Device_Table_nav_next": "",
|
||||
"Device_Table_nav_prev": "",
|
||||
"Device_Tablelenght": "",
|
||||
"Device_Tablelenght_all": "",
|
||||
"Device_Title": "",
|
||||
"Devices_Filters": "",
|
||||
"ENABLE_PLUGINS_description": "",
|
||||
"ENABLE_PLUGINS_name": "",
|
||||
"ENCRYPTION_KEY_description": "",
|
||||
"ENCRYPTION_KEY_name": "",
|
||||
"Email_display_name": "",
|
||||
"Email_icon": "",
|
||||
"Events_Loading": "",
|
||||
"Events_Periodselect_All": "",
|
||||
"Events_Periodselect_LastMonth": "",
|
||||
"Events_Periodselect_LastWeek": "",
|
||||
"Events_Periodselect_LastYear": "",
|
||||
"Events_Periodselect_today": "",
|
||||
"Events_Searchbox": "",
|
||||
"Events_Shortcut_AllEvents": "",
|
||||
"Events_Shortcut_DownAlerts": "",
|
||||
"Events_Shortcut_Events": "",
|
||||
"Events_Shortcut_MissSessions": "",
|
||||
"Events_Shortcut_NewDevices": "",
|
||||
"Events_Shortcut_Sessions": "",
|
||||
"Events_Shortcut_VoidSessions": "",
|
||||
"Events_TableHead_AdditionalInfo": "",
|
||||
"Events_TableHead_Connection": "",
|
||||
"Events_TableHead_Date": "",
|
||||
"Events_TableHead_Device": "",
|
||||
"Events_TableHead_Disconnection": "",
|
||||
"Events_TableHead_Duration": "",
|
||||
"Events_TableHead_DurationOrder": "",
|
||||
"Events_TableHead_EventType": "",
|
||||
"Events_TableHead_IP": "",
|
||||
"Events_TableHead_IPOrder": "",
|
||||
"Events_TableHead_Order": "",
|
||||
"Events_TableHead_Owner": "",
|
||||
"Events_TableHead_PendingAlert": "",
|
||||
"Events_Table_info": "",
|
||||
"Events_Table_nav_next": "",
|
||||
"Events_Table_nav_prev": "",
|
||||
"Events_Tablelenght": "",
|
||||
"Events_Tablelenght_all": "",
|
||||
"Events_Title": "",
|
||||
"GRAPHQL_PORT_description": "",
|
||||
"GRAPHQL_PORT_name": "",
|
||||
"Gen_Action": "",
|
||||
"Gen_Add": "",
|
||||
"Gen_AddDevice": "",
|
||||
"Gen_Add_All": "",
|
||||
"Gen_All_Devices": "",
|
||||
"Gen_AreYouSure": "",
|
||||
"Gen_Backup": "",
|
||||
"Gen_Cancel": "",
|
||||
"Gen_Change": "",
|
||||
"Gen_Copy": "",
|
||||
"Gen_CopyToClipboard": "",
|
||||
"Gen_DataUpdatedUITakesTime": "",
|
||||
"Gen_Delete": "",
|
||||
"Gen_DeleteAll": "",
|
||||
"Gen_Description": "",
|
||||
"Gen_Error": "",
|
||||
"Gen_Filter": "",
|
||||
"Gen_Generate": "",
|
||||
"Gen_InvalidMac": "",
|
||||
"Gen_LockedDB": "",
|
||||
"Gen_NetworkMask": "",
|
||||
"Gen_Offline": "",
|
||||
"Gen_Okay": "",
|
||||
"Gen_Online": "",
|
||||
"Gen_Purge": "",
|
||||
"Gen_ReadDocs": "",
|
||||
"Gen_Remove_All": "",
|
||||
"Gen_Remove_Last": "",
|
||||
"Gen_Reset": "",
|
||||
"Gen_Restore": "",
|
||||
"Gen_Run": "",
|
||||
"Gen_Save": "",
|
||||
"Gen_Saved": "",
|
||||
"Gen_Search": "",
|
||||
"Gen_Select": "",
|
||||
"Gen_SelectIcon": "",
|
||||
"Gen_SelectToPreview": "",
|
||||
"Gen_Selected_Devices": "",
|
||||
"Gen_Subnet": "",
|
||||
"Gen_Switch": "",
|
||||
"Gen_Upd": "",
|
||||
"Gen_Upd_Fail": "",
|
||||
"Gen_Update": "",
|
||||
"Gen_Update_Value": "",
|
||||
"Gen_ValidIcon": "",
|
||||
"Gen_Warning": "",
|
||||
"Gen_Work_In_Progress": "",
|
||||
"Gen_create_new_device": "",
|
||||
"Gen_create_new_device_info": "",
|
||||
"General_display_name": "",
|
||||
"General_icon": "",
|
||||
"HRS_TO_KEEP_NEWDEV_description": "",
|
||||
"HRS_TO_KEEP_NEWDEV_name": "",
|
||||
"HRS_TO_KEEP_OFFDEV_description": "",
|
||||
"HRS_TO_KEEP_OFFDEV_name": "",
|
||||
"LOADED_PLUGINS_description": "",
|
||||
"LOADED_PLUGINS_name": "",
|
||||
"LOG_LEVEL_description": "",
|
||||
"LOG_LEVEL_name": "",
|
||||
"Loading": "",
|
||||
"Login_Box": "",
|
||||
"Login_Default_PWD": "",
|
||||
"Login_Info": "",
|
||||
"Login_Psw-box": "",
|
||||
"Login_Psw_alert": "",
|
||||
"Login_Psw_folder": "",
|
||||
"Login_Psw_new": "",
|
||||
"Login_Psw_run": "",
|
||||
"Login_Remember": "",
|
||||
"Login_Remember_small": "",
|
||||
"Login_Submit": "",
|
||||
"Login_Toggle_Alert_headline": "",
|
||||
"Login_Toggle_Info": "",
|
||||
"Login_Toggle_Info_headline": "",
|
||||
"Maint_PurgeLog": "",
|
||||
"Maint_RestartServer": "",
|
||||
"Maint_Restart_Server_noti_text": "",
|
||||
"Maintenance_InitCheck": "",
|
||||
"Maintenance_InitCheck_Checking": "",
|
||||
"Maintenance_InitCheck_QuickSetupGuide": "",
|
||||
"Maintenance_InitCheck_Success": "",
|
||||
"Maintenance_ReCheck": "",
|
||||
"Maintenance_Running_Version": "",
|
||||
"Maintenance_Status": "",
|
||||
"Maintenance_Title": "",
|
||||
"Maintenance_Tool_DownloadConfig": "",
|
||||
"Maintenance_Tool_DownloadConfig_text": "",
|
||||
"Maintenance_Tool_DownloadWorkflows": "",
|
||||
"Maintenance_Tool_DownloadWorkflows_text": "",
|
||||
"Maintenance_Tool_ExportCSV": "",
|
||||
"Maintenance_Tool_ExportCSV_noti": "",
|
||||
"Maintenance_Tool_ExportCSV_noti_text": "",
|
||||
"Maintenance_Tool_ExportCSV_text": "",
|
||||
"Maintenance_Tool_ImportCSV": "",
|
||||
"Maintenance_Tool_ImportCSV_noti": "",
|
||||
"Maintenance_Tool_ImportCSV_noti_text": "",
|
||||
"Maintenance_Tool_ImportCSV_text": "",
|
||||
"Maintenance_Tool_ImportConfig_noti": "",
|
||||
"Maintenance_Tool_ImportPastedCSV": "",
|
||||
"Maintenance_Tool_ImportPastedCSV_noti_text": "",
|
||||
"Maintenance_Tool_ImportPastedCSV_text": "",
|
||||
"Maintenance_Tool_ImportPastedConfig": "",
|
||||
"Maintenance_Tool_ImportPastedConfig_noti_text": "",
|
||||
"Maintenance_Tool_ImportPastedConfig_text": "",
|
||||
"Maintenance_Tool_arpscansw": "",
|
||||
"Maintenance_Tool_arpscansw_noti": "",
|
||||
"Maintenance_Tool_arpscansw_noti_text": "",
|
||||
"Maintenance_Tool_arpscansw_text": "",
|
||||
"Maintenance_Tool_backup": "",
|
||||
"Maintenance_Tool_backup_noti": "",
|
||||
"Maintenance_Tool_backup_noti_text": "",
|
||||
"Maintenance_Tool_backup_text": "",
|
||||
"Maintenance_Tool_check_visible": "",
|
||||
"Maintenance_Tool_darkmode": "",
|
||||
"Maintenance_Tool_darkmode_noti": "",
|
||||
"Maintenance_Tool_darkmode_noti_text": "",
|
||||
"Maintenance_Tool_darkmode_text": "",
|
||||
"Maintenance_Tool_del_ActHistory": "",
|
||||
"Maintenance_Tool_del_ActHistory_noti": "",
|
||||
"Maintenance_Tool_del_ActHistory_noti_text": "",
|
||||
"Maintenance_Tool_del_ActHistory_text": "",
|
||||
"Maintenance_Tool_del_alldev": "",
|
||||
"Maintenance_Tool_del_alldev_noti": "",
|
||||
"Maintenance_Tool_del_alldev_noti_text": "",
|
||||
"Maintenance_Tool_del_alldev_text": "",
|
||||
"Maintenance_Tool_del_allevents": "",
|
||||
"Maintenance_Tool_del_allevents30": "",
|
||||
"Maintenance_Tool_del_allevents30_noti": "",
|
||||
"Maintenance_Tool_del_allevents30_noti_text": "",
|
||||
"Maintenance_Tool_del_allevents30_text": "",
|
||||
"Maintenance_Tool_del_allevents_noti": "",
|
||||
"Maintenance_Tool_del_allevents_noti_text": "",
|
||||
"Maintenance_Tool_del_allevents_text": "",
|
||||
"Maintenance_Tool_del_empty_macs": "",
|
||||
"Maintenance_Tool_del_empty_macs_noti": "",
|
||||
"Maintenance_Tool_del_empty_macs_noti_text": "",
|
||||
"Maintenance_Tool_del_empty_macs_text": "",
|
||||
"Maintenance_Tool_del_selecteddev": "",
|
||||
"Maintenance_Tool_del_selecteddev_text": "",
|
||||
"Maintenance_Tool_del_unknowndev": "",
|
||||
"Maintenance_Tool_del_unknowndev_noti": "",
|
||||
"Maintenance_Tool_del_unknowndev_noti_text": "",
|
||||
"Maintenance_Tool_del_unknowndev_text": "",
|
||||
"Maintenance_Tool_displayed_columns_text": "",
|
||||
"Maintenance_Tool_drag_me": "",
|
||||
"Maintenance_Tool_order_columns_text": "",
|
||||
"Maintenance_Tool_purgebackup": "",
|
||||
"Maintenance_Tool_purgebackup_noti": "",
|
||||
"Maintenance_Tool_purgebackup_noti_text": "",
|
||||
"Maintenance_Tool_purgebackup_text": "",
|
||||
"Maintenance_Tool_restore": "",
|
||||
"Maintenance_Tool_restore_noti": "",
|
||||
"Maintenance_Tool_restore_noti_text": "",
|
||||
"Maintenance_Tool_restore_text": "",
|
||||
"Maintenance_Tool_upgrade_database_noti": "",
|
||||
"Maintenance_Tool_upgrade_database_noti_text": "",
|
||||
"Maintenance_Tool_upgrade_database_text": "",
|
||||
"Maintenance_Tools_Tab_BackupRestore": "",
|
||||
"Maintenance_Tools_Tab_Logging": "",
|
||||
"Maintenance_Tools_Tab_Settings": "",
|
||||
"Maintenance_Tools_Tab_Tools": "",
|
||||
"Maintenance_Tools_Tab_UISettings": "",
|
||||
"Maintenance_arp_status": "",
|
||||
"Maintenance_arp_status_off": "",
|
||||
"Maintenance_arp_status_on": "",
|
||||
"Maintenance_built_on": "",
|
||||
"Maintenance_current_version": "",
|
||||
"Maintenance_database_backup": "",
|
||||
"Maintenance_database_backup_found": "",
|
||||
"Maintenance_database_backup_total": "",
|
||||
"Maintenance_database_lastmod": "",
|
||||
"Maintenance_database_path": "",
|
||||
"Maintenance_database_rows": "",
|
||||
"Maintenance_database_size": "",
|
||||
"Maintenance_lang_selector_apply": "",
|
||||
"Maintenance_lang_selector_empty": "",
|
||||
"Maintenance_lang_selector_lable": "",
|
||||
"Maintenance_lang_selector_text": "",
|
||||
"Maintenance_new_version": "",
|
||||
"Maintenance_themeselector_apply": "",
|
||||
"Maintenance_themeselector_empty": "",
|
||||
"Maintenance_themeselector_lable": "",
|
||||
"Maintenance_themeselector_text": "",
|
||||
"Maintenance_version": "",
|
||||
"NETWORK_DEVICE_TYPES_description": "",
|
||||
"NETWORK_DEVICE_TYPES_name": "",
|
||||
"Navigation_About": "",
|
||||
"Navigation_AppEvents": "",
|
||||
"Navigation_Devices": "",
|
||||
"Navigation_Donations": "",
|
||||
"Navigation_Events": "",
|
||||
"Navigation_Integrations": "",
|
||||
"Navigation_Maintenance": "",
|
||||
"Navigation_Monitoring": "",
|
||||
"Navigation_Network": "",
|
||||
"Navigation_Notifications": "",
|
||||
"Navigation_Plugins": "",
|
||||
"Navigation_Presence": "",
|
||||
"Navigation_Report": "",
|
||||
"Navigation_Settings": "",
|
||||
"Navigation_SystemInfo": "",
|
||||
"Navigation_Workflows": "",
|
||||
"Network_Assign": "",
|
||||
"Network_Cant_Assign": "",
|
||||
"Network_Cant_Assign_No_Node_Selected": "",
|
||||
"Network_Configuration_Error": "",
|
||||
"Network_Connected": "",
|
||||
"Network_Devices": "",
|
||||
"Network_ManageAdd": "",
|
||||
"Network_ManageAdd_Name": "",
|
||||
"Network_ManageAdd_Name_text": "",
|
||||
"Network_ManageAdd_Port": "",
|
||||
"Network_ManageAdd_Port_text": "",
|
||||
"Network_ManageAdd_Submit": "",
|
||||
"Network_ManageAdd_Type": "",
|
||||
"Network_ManageAdd_Type_text": "",
|
||||
"Network_ManageAssign": "",
|
||||
"Network_ManageDel": "",
|
||||
"Network_ManageDel_Name": "",
|
||||
"Network_ManageDel_Name_text": "",
|
||||
"Network_ManageDel_Submit": "",
|
||||
"Network_ManageDevices": "",
|
||||
"Network_ManageEdit": "",
|
||||
"Network_ManageEdit_ID": "",
|
||||
"Network_ManageEdit_ID_text": "",
|
||||
"Network_ManageEdit_Name": "",
|
||||
"Network_ManageEdit_Name_text": "",
|
||||
"Network_ManageEdit_Port": "",
|
||||
"Network_ManageEdit_Port_text": "",
|
||||
"Network_ManageEdit_Submit": "",
|
||||
"Network_ManageEdit_Type": "",
|
||||
"Network_ManageEdit_Type_text": "",
|
||||
"Network_ManageLeaf": "",
|
||||
"Network_ManageUnassign": "",
|
||||
"Network_NoAssignedDevices": "",
|
||||
"Network_NoDevices": "",
|
||||
"Network_Node": "",
|
||||
"Network_Node_Name": "",
|
||||
"Network_Parent": "",
|
||||
"Network_Root": "",
|
||||
"Network_Root_Not_Configured": "",
|
||||
"Network_Root_Unconfigurable": "",
|
||||
"Network_ShowArchived": "",
|
||||
"Network_ShowOffline": "",
|
||||
"Network_Table_Hostname": "",
|
||||
"Network_Table_IP": "",
|
||||
"Network_Table_State": "",
|
||||
"Network_Title": "",
|
||||
"Network_UnassignedDevices": "",
|
||||
"Notifications_All": "",
|
||||
"Notifications_Mark_All_Read": "",
|
||||
"PIALERT_WEB_PASSWORD_description": "",
|
||||
"PIALERT_WEB_PASSWORD_name": "",
|
||||
"PIALERT_WEB_PROTECTION_description": "",
|
||||
"PIALERT_WEB_PROTECTION_name": "",
|
||||
"PLUGINS_KEEP_HIST_description": "",
|
||||
"PLUGINS_KEEP_HIST_name": "",
|
||||
"Plugins_DeleteAll": "",
|
||||
"Plugins_Filters_Mac": "",
|
||||
"Plugins_History": "",
|
||||
"Plugins_Obj_DeleteListed": "",
|
||||
"Plugins_Objects": "",
|
||||
"Plugins_Out_of": "",
|
||||
"Plugins_Unprocessed_Events": "",
|
||||
"Plugins_no_control": "",
|
||||
"Presence_CalHead_day": "",
|
||||
"Presence_CalHead_lang": "",
|
||||
"Presence_CalHead_month": "",
|
||||
"Presence_CalHead_quarter": "",
|
||||
"Presence_CalHead_week": "",
|
||||
"Presence_CalHead_year": "",
|
||||
"Presence_CallHead_Devices": "",
|
||||
"Presence_Key_OnlineNow": "",
|
||||
"Presence_Key_OnlineNow_desc": "",
|
||||
"Presence_Key_OnlinePast": "",
|
||||
"Presence_Key_OnlinePastMiss": "",
|
||||
"Presence_Key_OnlinePastMiss_desc": "",
|
||||
"Presence_Key_OnlinePast_desc": "",
|
||||
"Presence_Loading": "",
|
||||
"Presence_Shortcut_AllDevices": "",
|
||||
"Presence_Shortcut_Archived": "",
|
||||
"Presence_Shortcut_Connected": "",
|
||||
"Presence_Shortcut_Devices": "",
|
||||
"Presence_Shortcut_DownAlerts": "",
|
||||
"Presence_Shortcut_Favorites": "",
|
||||
"Presence_Shortcut_NewDevices": "",
|
||||
"Presence_Title": "",
|
||||
"REFRESH_FQDN_description": "",
|
||||
"REFRESH_FQDN_name": "",
|
||||
"REPORT_DASHBOARD_URL_description": "",
|
||||
"REPORT_DASHBOARD_URL_name": "",
|
||||
"REPORT_ERROR": "",
|
||||
"REPORT_MAIL_description": "",
|
||||
"REPORT_MAIL_name": "",
|
||||
"REPORT_TITLE": "",
|
||||
"RandomMAC_hover": "",
|
||||
"Reports_Sent_Log": "",
|
||||
"SCAN_SUBNETS_description": "",
|
||||
"SCAN_SUBNETS_name": "",
|
||||
"SYSTEM_TITLE": "",
|
||||
"Setting_Override": "",
|
||||
"Setting_Override_Description": "",
|
||||
"Settings_Metadata_Toggle": "",
|
||||
"Settings_Show_Description": "",
|
||||
"Settings_device_Scanners_desync": "",
|
||||
"Settings_device_Scanners_desync_popup": "",
|
||||
"Speedtest_Results": "",
|
||||
"Systeminfo_AvailableIps": "",
|
||||
"Systeminfo_CPU": "",
|
||||
"Systeminfo_CPU_Cores": "",
|
||||
"Systeminfo_CPU_Name": "",
|
||||
"Systeminfo_CPU_Speed": "",
|
||||
"Systeminfo_CPU_Temp": "",
|
||||
"Systeminfo_CPU_Vendor": "",
|
||||
"Systeminfo_Client_Resolution": "",
|
||||
"Systeminfo_Client_User_Agent": "",
|
||||
"Systeminfo_General": "",
|
||||
"Systeminfo_General_Date": "",
|
||||
"Systeminfo_General_Date2": "",
|
||||
"Systeminfo_General_Full_Date": "",
|
||||
"Systeminfo_General_TimeZone": "",
|
||||
"Systeminfo_Memory": "",
|
||||
"Systeminfo_Memory_Total_Memory": "",
|
||||
"Systeminfo_Memory_Usage": "",
|
||||
"Systeminfo_Memory_Usage_Percent": "",
|
||||
"Systeminfo_Motherboard": "",
|
||||
"Systeminfo_Motherboard_BIOS": "",
|
||||
"Systeminfo_Motherboard_BIOS_Date": "",
|
||||
"Systeminfo_Motherboard_BIOS_Vendor": "",
|
||||
"Systeminfo_Motherboard_Manufactured": "",
|
||||
"Systeminfo_Motherboard_Name": "",
|
||||
"Systeminfo_Motherboard_Revision": "",
|
||||
"Systeminfo_Network": "",
|
||||
"Systeminfo_Network_Accept_Encoding": "",
|
||||
"Systeminfo_Network_Accept_Language": "",
|
||||
"Systeminfo_Network_Connection_Port": "",
|
||||
"Systeminfo_Network_HTTP_Host": "",
|
||||
"Systeminfo_Network_HTTP_Referer": "",
|
||||
"Systeminfo_Network_HTTP_Referer_String": "",
|
||||
"Systeminfo_Network_Hardware": "",
|
||||
"Systeminfo_Network_Hardware_Interface_Mask": "",
|
||||
"Systeminfo_Network_Hardware_Interface_Name": "",
|
||||
"Systeminfo_Network_Hardware_Interface_RX": "",
|
||||
"Systeminfo_Network_Hardware_Interface_TX": "",
|
||||
"Systeminfo_Network_IP": "",
|
||||
"Systeminfo_Network_IP_Connection": "",
|
||||
"Systeminfo_Network_IP_Server": "",
|
||||
"Systeminfo_Network_MIME": "",
|
||||
"Systeminfo_Network_Request_Method": "",
|
||||
"Systeminfo_Network_Request_Time": "",
|
||||
"Systeminfo_Network_Request_URI": "",
|
||||
"Systeminfo_Network_Secure_Connection": "",
|
||||
"Systeminfo_Network_Secure_Connection_String": "",
|
||||
"Systeminfo_Network_Server_Name": "",
|
||||
"Systeminfo_Network_Server_Name_String": "",
|
||||
"Systeminfo_Network_Server_Query": "",
|
||||
"Systeminfo_Network_Server_Query_String": "",
|
||||
"Systeminfo_Network_Server_Version": "",
|
||||
"Systeminfo_Services": "",
|
||||
"Systeminfo_Services_Description": "",
|
||||
"Systeminfo_Services_Name": "",
|
||||
"Systeminfo_Storage": "",
|
||||
"Systeminfo_Storage_Device": "",
|
||||
"Systeminfo_Storage_Mount": "",
|
||||
"Systeminfo_Storage_Size": "",
|
||||
"Systeminfo_Storage_Type": "",
|
||||
"Systeminfo_Storage_Usage": "",
|
||||
"Systeminfo_Storage_Usage_Free": "",
|
||||
"Systeminfo_Storage_Usage_Mount": "",
|
||||
"Systeminfo_Storage_Usage_Total": "",
|
||||
"Systeminfo_Storage_Usage_Used": "",
|
||||
"Systeminfo_System": "",
|
||||
"Systeminfo_System_AVG": "",
|
||||
"Systeminfo_System_Architecture": "",
|
||||
"Systeminfo_System_Kernel": "",
|
||||
"Systeminfo_System_OSVersion": "",
|
||||
"Systeminfo_System_Running_Processes": "",
|
||||
"Systeminfo_System_System": "",
|
||||
"Systeminfo_System_Uname": "",
|
||||
"Systeminfo_System_Uptime": "",
|
||||
"Systeminfo_This_Client": "",
|
||||
"Systeminfo_USB_Devices": "",
|
||||
"TICKER_MIGRATE_TO_NETALERTX": "",
|
||||
"TIMEZONE_description": "",
|
||||
"TIMEZONE_name": "",
|
||||
"UI_DEV_SECTIONS_description": "",
|
||||
"UI_DEV_SECTIONS_name": "",
|
||||
"UI_ICONS_description": "",
|
||||
"UI_ICONS_name": "",
|
||||
"UI_LANG_description": "",
|
||||
"UI_LANG_name": "",
|
||||
"UI_MY_DEVICES_description": "",
|
||||
"UI_MY_DEVICES_name": "",
|
||||
"UI_NOT_RANDOM_MAC_description": "",
|
||||
"UI_NOT_RANDOM_MAC_name": "",
|
||||
"UI_PRESENCE_description": "",
|
||||
"UI_PRESENCE_name": "",
|
||||
"UI_REFRESH_description": "",
|
||||
"UI_REFRESH_name": "",
|
||||
"VERSION_description": "",
|
||||
"VERSION_name": "",
|
||||
"WF_Action_Add": "",
|
||||
"WF_Action_field": "",
|
||||
"WF_Action_type": "",
|
||||
"WF_Action_value": "",
|
||||
"WF_Actions": "",
|
||||
"WF_Add": "",
|
||||
"WF_Add_Condition": "",
|
||||
"WF_Add_Group": "",
|
||||
"WF_Condition_field": "",
|
||||
"WF_Condition_operator": "",
|
||||
"WF_Condition_value": "",
|
||||
"WF_Conditions": "",
|
||||
"WF_Conditions_logic_rules": "",
|
||||
"WF_Duplicate": "",
|
||||
"WF_Enabled": "",
|
||||
"WF_Export": "",
|
||||
"WF_Export_Copy": "",
|
||||
"WF_Import": "",
|
||||
"WF_Import_Copy": "",
|
||||
"WF_Name": "",
|
||||
"WF_Remove": "",
|
||||
"WF_Remove_Copy": "",
|
||||
"WF_Save": "",
|
||||
"WF_Trigger": "",
|
||||
"WF_Trigger_event_type": "",
|
||||
"WF_Trigger_type": "",
|
||||
"add_icon_event_tooltip": "",
|
||||
"add_option_event_tooltip": "",
|
||||
"copy_icons_event_tooltip": "",
|
||||
"devices_old": "",
|
||||
"general_event_description": "",
|
||||
"general_event_title": "",
|
||||
"go_to_device_event_tooltip": "",
|
||||
"go_to_node_event_tooltip": "",
|
||||
"new_version_available": "",
|
||||
"report_guid": "",
|
||||
"report_guid_missing": "",
|
||||
"report_select_format": "",
|
||||
"report_time": "",
|
||||
"run_event_tooltip": "",
|
||||
"select_icon_event_tooltip": "",
|
||||
"settings_core_icon": "",
|
||||
"settings_core_label": "",
|
||||
"settings_device_scanners": "",
|
||||
"settings_device_scanners_icon": "",
|
||||
"settings_device_scanners_info": "",
|
||||
"settings_device_scanners_label": "",
|
||||
"settings_enabled": "",
|
||||
"settings_enabled_icon": "",
|
||||
"settings_expand_all": "",
|
||||
"settings_imported": "",
|
||||
"settings_imported_label": "",
|
||||
"settings_missing": "",
|
||||
"settings_missing_block": "",
|
||||
"settings_old": "",
|
||||
"settings_other_scanners": "",
|
||||
"settings_other_scanners_icon": "",
|
||||
"settings_other_scanners_label": "",
|
||||
"settings_publishers": "",
|
||||
"settings_publishers_icon": "",
|
||||
"settings_publishers_info": "",
|
||||
"settings_publishers_label": "",
|
||||
"settings_readonly": "",
|
||||
"settings_saved": "",
|
||||
"settings_system_icon": "",
|
||||
"settings_system_label": "",
|
||||
"settings_update_item_warning": "",
|
||||
"test_event_tooltip": ""
|
||||
}
|
||||
@@ -5,7 +5,7 @@
|
||||
// ###################################
|
||||
|
||||
$defaultLang = "en_us";
|
||||
$allLanguages = [ "ar_ar", "ca_ca", "cs_cz", "de_de", "en_us", "es_es", "fa_fa", "fr_fr", "it_it", "nb_no", "pl_pl", "pt_br", "pt_pt", "ru_ru", "sv_sv", "tr_tr", "uk_ua", "zh_cn"];
|
||||
$allLanguages = [ "ar_ar", "ca_ca", "cs_cz", "de_de", "en_us", "es_es", "fa_fa", "fr_fr", "it_it", "ja_jp", "nb_no", "pl_pl", "pt_br", "pt_pt", "ru_ru", "sv_sv", "tr_tr", "uk_ua", "zh_cn"];
|
||||
|
||||
|
||||
global $db;
|
||||
@@ -23,6 +23,7 @@ switch($result){
|
||||
case 'Farsi (fa_fa)': $pia_lang_selected = 'fa_fa'; break;
|
||||
case 'French (fr_fr)': $pia_lang_selected = 'fr_fr'; break;
|
||||
case 'Italian (it_it)': $pia_lang_selected = 'it_it'; break;
|
||||
case 'Japanese (ja_jp)': $pia_lang_selected = 'ja_jp'; break;
|
||||
case 'Norwegian (nb_no)': $pia_lang_selected = 'nb_no'; break;
|
||||
case 'Polish (pl_pl)': $pia_lang_selected = 'pl_pl'; break;
|
||||
case 'Portuguese (pt_br)': $pia_lang_selected = 'pt_br'; break;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def merge_translations(main_file, other_files):
|
||||
# Load main file
|
||||
@@ -30,10 +30,14 @@ def merge_translations(main_file, other_files):
|
||||
json.dump(data, f, indent=4, ensure_ascii=False)
|
||||
f.truncate()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
current_path = os.path.dirname(os.path.abspath(__file__))
|
||||
# language codes can be found here: http://www.lingoes.net/en/translator/langcode.htm
|
||||
# "en_us.json" has to be first!
|
||||
json_files = [ "en_us.json", "ar_ar.json", "ca_ca.json", "cs_cz.json", "de_de.json", "es_es.json", "fa_fa.json", "fr_fr.json", "it_it.json", "nb_no.json", "pl_pl.json", "pt_br.json", "pt_pt.json", "ru_ru.json", "sv_sv.json", "tr_tr.json", "uk_ua.json", "zh_cn.json"]
|
||||
# ⚠ "en_us.json" has to be first!
|
||||
json_files = ["en_us.json", "ar_ar.json", "ca_ca.json", "cs_cz.json", "de_de.json",
|
||||
"es_es.json", "fa_fa.json", "fr_fr.json", "it_it.json", "ja_jp.json",
|
||||
"nb_no.json", "pl_pl.json", "pt_br.json", "pt_pt.json", "ru_ru.json",
|
||||
"sv_sv.json", "tr_tr.json", "uk_ua.json", "zh_cn.json"]
|
||||
file_paths = [os.path.join(current_path, file) for file in json_files]
|
||||
merge_translations(file_paths[0], file_paths[1:])
|
||||
|
||||
@@ -761,4 +761,4 @@
|
||||
"settings_system_label": "Система",
|
||||
"settings_update_item_warning": "Обновить значение ниже. Будьте осторожны, следуя предыдущему формату. <b>Проверка не выполняется.</b>",
|
||||
"test_event_tooltip": "Сначала сохраните изменения, прежде чем проверять настройки."
|
||||
}
|
||||
}
|
||||
@@ -8,12 +8,12 @@ from pytz import timezone
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from const import logPath
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from const import logPath # noqa: E402, E261 [flake8 lint suppression]
|
||||
from plugin_helper import Plugin_Objects # noqa: E402, E261 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402, E261 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402, E261 [flake8 lint suppression]
|
||||
|
||||
import conf
|
||||
import conf # noqa: E402, E261 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -32,9 +32,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
# Retrieve configuration settings
|
||||
some_setting = get_setting_value('SYNC_plugins')
|
||||
@@ -47,14 +46,14 @@ def main():
|
||||
# Process the data into native application tables
|
||||
if len(device_data) > 0:
|
||||
|
||||
# insert devices into the lats_result.log
|
||||
# make sure the below mapping is mapped in config.json, for example:
|
||||
# insert devices into the lats_result.log
|
||||
# make sure the below mapping is mapped in config.json, for example:
|
||||
# "database_column_definitions": [
|
||||
# {
|
||||
# "column": "Object_PrimaryID", <--------- the value I save into primaryId
|
||||
# "mapped_to_column": "cur_MAC", <--------- gets inserted into the CurrentScan DB
|
||||
# table column cur_MAC
|
||||
#
|
||||
#
|
||||
for device in device_data:
|
||||
plugin_objects.add_object(
|
||||
primaryId = device['mac_address'],
|
||||
@@ -65,11 +64,11 @@ def main():
|
||||
watched4 = device['last_seen'],
|
||||
extra = '',
|
||||
foreignKey = device['mac_address']
|
||||
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
|
||||
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
|
||||
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
|
||||
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
|
||||
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
|
||||
# helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md
|
||||
)
|
||||
)
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] New entries: "{len(device_data)}"'])
|
||||
|
||||
@@ -78,14 +77,15 @@ def main():
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
# retrieve data
|
||||
def get_device_data(some_setting):
|
||||
|
||||
|
||||
device_data = []
|
||||
|
||||
# do some processing, call exteranl APIs, and return a device_data list
|
||||
# ...
|
||||
#
|
||||
#
|
||||
# Sample data for testing purposes, you can adjust the processing in main() as needed
|
||||
# ... before adding it to the plugin_objects.add_object(...)
|
||||
device_data = [
|
||||
@@ -113,8 +113,9 @@ def get_device_data(some_setting):
|
||||
}
|
||||
]
|
||||
|
||||
# Return the data to be detected by the main application
|
||||
# Return the data to be detected by the main application
|
||||
return device_data
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -11,10 +11,10 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
# NetAlertX modules
|
||||
from const import logPath
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog
|
||||
from helper import get_setting_value
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
pluginName = 'TESTONLY'
|
||||
|
||||
@@ -28,14 +28,11 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
md5_hash = hashlib.md5()
|
||||
|
||||
|
||||
|
||||
# globals
|
||||
|
||||
|
||||
def main():
|
||||
# START
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
# SPACE FOR TESTING 🔽
|
||||
|
||||
str = "ABC-MBP._another.localdomain."
|
||||
@@ -43,28 +40,23 @@ def main():
|
||||
# result = cleanDeviceName(str, True)
|
||||
|
||||
regexes = get_setting_value('NEWDEV_NAME_CLEANUP_REGEX')
|
||||
|
||||
print(regexes)
|
||||
subnets = get_setting_value('SCAN_SUBNETS')
|
||||
|
||||
|
||||
print(subnets)
|
||||
|
||||
for rgx in regexes:
|
||||
for rgx in regexes:
|
||||
mylog('trace', ["[cleanDeviceName] applying regex : " + rgx])
|
||||
mylog('trace', ["[cleanDeviceName] name before regex : " + str])
|
||||
|
||||
|
||||
str = re.sub(rgx, "", str)
|
||||
mylog('trace', ["[cleanDeviceName] name after regex : " + str])
|
||||
|
||||
mylog('debug', ["[cleanDeviceName] output: " + str])
|
||||
|
||||
|
||||
# SPACE FOR TESTING 🔼
|
||||
|
||||
# END
|
||||
mylog('verbose', [f'[{pluginName}] result "{str}"'])
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] result "{str}"'])
|
||||
|
||||
|
||||
# -------------INIT---------------------
|
||||
|
||||
@@ -9,15 +9,15 @@ import sys
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
import conf
|
||||
from const import confFileName, logPath
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from models.notification_instance import NotificationInstance
|
||||
from database import DB
|
||||
from pytz import timezone
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
||||
@@ -35,7 +35,7 @@ def main():
|
||||
mylog("verbose", [f"[{pluginName}](publisher) In script"])
|
||||
|
||||
# Check if basic config settings supplied
|
||||
if check_config() == False:
|
||||
if check_config() is False:
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
@@ -65,9 +65,9 @@ def main():
|
||||
# Log result
|
||||
plugin_objects.add_object(
|
||||
primaryId = pluginName,
|
||||
secondaryId = timeNowDB(),
|
||||
secondaryId = timeNowDB(),
|
||||
watched1 = notification["GUID"],
|
||||
watched2 = result,
|
||||
watched2 = result,
|
||||
watched3 = 'null',
|
||||
watched4 = 'null',
|
||||
extra = 'null',
|
||||
@@ -80,8 +80,7 @@ def main():
|
||||
# -------------------------------------------------------------------------------
|
||||
def check_config():
|
||||
if get_setting_value("APPRISE_HOST") == "" or (
|
||||
get_setting_value("APPRISE_URL") == ""
|
||||
and get_setting_value("APPRISE_TAG") == ""
|
||||
get_setting_value("APPRISE_URL") == "" and get_setting_value("APPRISE_TAG") == ""
|
||||
):
|
||||
return False
|
||||
else:
|
||||
|
||||
@@ -16,15 +16,15 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
# NetAlertX modules
|
||||
import conf
|
||||
from const import confFileName, logPath
|
||||
from plugin_helper import Plugin_Objects
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value, hide_email
|
||||
from models.notification_instance import NotificationInstance
|
||||
from database import DB
|
||||
from pytz import timezone
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value, hide_email # noqa: E402 [flake8 lint suppression]
|
||||
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -38,13 +38,12 @@ LOG_PATH = logPath + '/plugins'
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
||||
|
||||
# Check if basic config settings supplied
|
||||
if check_config() == False:
|
||||
if check_config() is False:
|
||||
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
|
||||
return
|
||||
|
||||
@@ -61,7 +60,7 @@ def main():
|
||||
# Retrieve new notifications
|
||||
new_notifications = notifications.getNew()
|
||||
|
||||
# mylog('verbose', [f'[{pluginName}] new_notifications: ', new_notifications])
|
||||
# mylog('verbose', [f'[{pluginName}] new_notifications: ', new_notifications])
|
||||
mylog('verbose', [f'[{pluginName}] SMTP_SERVER: ', get_setting_value("SMTP_SERVER")])
|
||||
mylog('verbose', [f'[{pluginName}] SMTP_PORT: ', get_setting_value("SMTP_PORT")])
|
||||
mylog('verbose', [f'[{pluginName}] SMTP_SKIP_LOGIN: ', get_setting_value("SMTP_SKIP_LOGIN")])
|
||||
@@ -72,19 +71,18 @@ def main():
|
||||
# mylog('verbose', [f'[{pluginName}] SMTP_REPORT_TO: ', get_setting_value("SMTP_REPORT_TO")])
|
||||
# mylog('verbose', [f'[{pluginName}] SMTP_REPORT_FROM: ', get_setting_value("SMTP_REPORT_FROM")])
|
||||
|
||||
|
||||
# Process the new notifications (see the Notifications DB table for structure or check the /php/server/query_json.php?file=table_notifications.json endpoint)
|
||||
for notification in new_notifications:
|
||||
|
||||
# Send notification
|
||||
result = send(notification["HTML"], notification["Text"])
|
||||
result = send(notification["HTML"], notification["Text"])
|
||||
|
||||
# Log result
|
||||
plugin_objects.add_object(
|
||||
primaryId = pluginName,
|
||||
secondaryId = timeNowDB(),
|
||||
secondaryId = timeNowDB(),
|
||||
watched1 = notification["GUID"],
|
||||
watched2 = result,
|
||||
watched2 = result,
|
||||
watched3 = 'null',
|
||||
watched4 = 'null',
|
||||
extra = 'null',
|
||||
@@ -93,25 +91,33 @@ def main():
|
||||
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def check_config ():
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def check_config():
|
||||
|
||||
server = get_setting_value('SMTP_SERVER')
|
||||
report_to = get_setting_value("SMTP_REPORT_TO")
|
||||
report_from = get_setting_value("SMTP_REPORT_FROM")
|
||||
|
||||
|
||||
if server == '' or report_from == '' or report_to == '':
|
||||
mylog('none', [f'[Email Check Config] ⚠ ERROR: Email service not set up correctly. Check your {confFileName} SMTP_*, SMTP_REPORT_FROM and SMTP_REPORT_TO variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def send(pHTML, pText):
|
||||
|
||||
mylog('debug', [f'[{pluginName}] SMTP_REPORT_TO: {hide_email(str(get_setting_value("SMTP_REPORT_TO")))} SMTP_USER: {hide_email(str(get_setting_value("SMTP_USER")))}'])
|
||||
|
||||
subject, from_email, to_email, message_html, message_text = sanitize_email_content(str(get_setting_value("SMTP_SUBJECT")), get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), pHTML, pText)
|
||||
subject, from_email, to_email, message_html, message_text = sanitize_email_content(
|
||||
str(get_setting_value("SMTP_SUBJECT")),
|
||||
get_setting_value("SMTP_REPORT_FROM"),
|
||||
get_setting_value("SMTP_REPORT_TO"),
|
||||
pHTML,
|
||||
pText
|
||||
)
|
||||
|
||||
emails = []
|
||||
|
||||
@@ -132,10 +138,10 @@ def send(pHTML, pText):
|
||||
msg['Subject'] = subject
|
||||
msg['From'] = from_email
|
||||
msg['To'] = mail_addr
|
||||
msg['Date'] = formatdate(localtime=True)
|
||||
msg['Date'] = formatdate(localtime=True)
|
||||
|
||||
msg.attach (MIMEText (message_text, 'plain'))
|
||||
msg.attach (MIMEText (message_html, 'html'))
|
||||
msg.attach(MIMEText(message_text, 'plain'))
|
||||
msg.attach(MIMEText(message_html, 'html'))
|
||||
|
||||
# Set a timeout for the SMTP connection (in seconds)
|
||||
smtp_timeout = 30
|
||||
@@ -144,30 +150,31 @@ def send(pHTML, pText):
|
||||
|
||||
if get_setting_value("LOG_LEVEL") == 'debug':
|
||||
|
||||
send_email(msg,smtp_timeout)
|
||||
send_email(msg, smtp_timeout)
|
||||
|
||||
else:
|
||||
|
||||
try:
|
||||
send_email(msg,smtp_timeout)
|
||||
|
||||
except smtplib.SMTPAuthenticationError as e:
|
||||
send_email(msg, smtp_timeout)
|
||||
|
||||
except smtplib.SMTPAuthenticationError as e:
|
||||
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPAuthenticationError)'])
|
||||
mylog('none', [' ERROR: Double-check your SMTP_USER and SMTP_PASS settings.)'])
|
||||
mylog('none', [' ERROR: ', str(e)])
|
||||
except smtplib.SMTPServerDisconnected as e:
|
||||
except smtplib.SMTPServerDisconnected as e:
|
||||
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPServerDisconnected)'])
|
||||
mylog('none', [' ERROR: ', str(e)])
|
||||
except socket.gaierror as e:
|
||||
except socket.gaierror as e:
|
||||
mylog('none', [' ERROR: Could not resolve hostname (socket.gaierror)'])
|
||||
mylog('none', [' ERROR: ', str(e)])
|
||||
except ssl.SSLError as e:
|
||||
mylog('none', [' ERROR: ', str(e)])
|
||||
except ssl.SSLError as e:
|
||||
mylog('none', [' ERROR: Could not establish SSL connection (ssl.SSLError)'])
|
||||
mylog('none', [' ERROR: Are you sure you need SMTP_FORCE_SSL enabled? Check your SMTP provider docs.'])
|
||||
mylog('none', [' ERROR: ', str(e)])
|
||||
mylog('none', [' ERROR: ', str(e)])
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------------
|
||||
def send_email(msg,smtp_timeout):
|
||||
def send_email(msg, smtp_timeout):
|
||||
# Send mail
|
||||
if get_setting_value('SMTP_FORCE_SSL'):
|
||||
mylog('debug', ['SMTP_FORCE_SSL == True so using .SMTP_SSL()'])
|
||||
@@ -182,10 +189,10 @@ def send_email(msg,smtp_timeout):
|
||||
mylog('debug', ['SMTP_FORCE_SSL == False so using .SMTP()'])
|
||||
if get_setting_value("SMTP_PORT") == 0:
|
||||
mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER)'])
|
||||
smtp_connection = smtplib.SMTP (get_setting_value('SMTP_SERVER'))
|
||||
smtp_connection = smtplib.SMTP(get_setting_value('SMTP_SERVER'))
|
||||
else:
|
||||
mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER, SMTP_PORT)'])
|
||||
smtp_connection = smtplib.SMTP (get_setting_value('SMTP_SERVER'), get_setting_value('SMTP_PORT'))
|
||||
smtp_connection = smtplib.SMTP(get_setting_value('SMTP_SERVER'), get_setting_value('SMTP_PORT'))
|
||||
|
||||
mylog('debug', ['Setting SMTP debug level'])
|
||||
|
||||
@@ -193,7 +200,7 @@ def send_email(msg,smtp_timeout):
|
||||
if get_setting_value('LOG_LEVEL') == 'debug':
|
||||
smtp_connection.set_debuglevel(1)
|
||||
|
||||
mylog('debug', [ 'Sending .ehlo()'])
|
||||
mylog('debug', ['Sending .ehlo()'])
|
||||
smtp_connection.ehlo()
|
||||
|
||||
if not get_setting_value('SMTP_SKIP_TLS'):
|
||||
@@ -203,12 +210,13 @@ def send_email(msg,smtp_timeout):
|
||||
smtp_connection.ehlo()
|
||||
if not get_setting_value('SMTP_SKIP_LOGIN'):
|
||||
mylog('debug', ['SMTP_SKIP_LOGIN == False so sending .login()'])
|
||||
smtp_connection.login (get_setting_value('SMTP_USER'), get_setting_value('SMTP_PASS'))
|
||||
smtp_connection.login(get_setting_value('SMTP_USER'), get_setting_value('SMTP_PASS'))
|
||||
|
||||
mylog('debug', ['Sending .sendmail()'])
|
||||
smtp_connection.sendmail (get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), msg.as_string())
|
||||
smtp_connection.sendmail(get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), msg.as_string())
|
||||
smtp_connection.quit()
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------------
|
||||
def sanitize_email_content(subject, from_email, to_email, message_html, message_text):
|
||||
# Validate and sanitize subject
|
||||
@@ -229,6 +237,7 @@ def sanitize_email_content(subject, from_email, to_email, message_html, message_
|
||||
|
||||
return subject, from_email, to_email, message_html, message_text
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------------
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
||||
@@ -18,15 +18,14 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
# NetAlertX modules
|
||||
import conf
|
||||
from const import confFileName, logPath
|
||||
from utils.plugin_utils import getPluginObject
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
|
||||
from utils.plugin_utils import getPluginObject # noqa: E402 [flake8 lint suppression]
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value, bytes_to_string, \
|
||||
sanitize_string, normalize_string
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from database import DB, get_device_stats
|
||||
sanitize_string, normalize_string # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB, get_device_stats # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
@@ -234,7 +233,6 @@ class sensor_config:
|
||||
Store the sensor configuration in the global plugin_objects, which tracks sensors based on a unique combination
|
||||
of attributes including deviceId, sensorName, hash, and MAC.
|
||||
"""
|
||||
global plugin_objects
|
||||
|
||||
# Add the sensor to the global plugin_objects
|
||||
plugin_objects.add_object(
|
||||
@@ -287,11 +285,11 @@ def publish_mqtt(mqtt_client, topic, message):
|
||||
# mylog('verbose', [f"[{pluginName}] mqtt_client.is_connected(): {mqtt_client.is_connected()} "])
|
||||
|
||||
result = mqtt_client.publish(
|
||||
topic=topic,
|
||||
payload=message,
|
||||
qos=qos,
|
||||
retain=True,
|
||||
)
|
||||
topic=topic,
|
||||
payload=message,
|
||||
qos=qos,
|
||||
retain=True,
|
||||
)
|
||||
|
||||
status = result[0]
|
||||
|
||||
@@ -303,6 +301,7 @@ def publish_mqtt(mqtt_client, topic, message):
|
||||
time.sleep(0.1)
|
||||
return True
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Create a generic device for overal stats
|
||||
def create_generic_device(mqtt_client, deviceId, deviceName):
|
||||
@@ -318,7 +317,6 @@ def create_generic_device(mqtt_client, deviceId, deviceName):
|
||||
# ------------------------------------------------------------------------------
|
||||
# Register sensor config on the broker
|
||||
def create_sensor(mqtt_client, deviceId, deviceName, sensorType, sensorName, icon, mac=""):
|
||||
global mqtt_sensors
|
||||
|
||||
# check previous configs
|
||||
sensorConfig = sensor_config(deviceId, deviceName, sensorType, sensorName, icon, mac)
|
||||
@@ -429,12 +427,11 @@ def mqtt_create_client():
|
||||
# -----------------------------------------------------------------------------
|
||||
def mqtt_start(db):
|
||||
|
||||
global mqtt_client, mqtt_connected_to_broker
|
||||
global mqtt_client
|
||||
|
||||
if not mqtt_connected_to_broker:
|
||||
mqtt_client = mqtt_create_client()
|
||||
|
||||
|
||||
deviceName = get_setting_value('MQTT_DEVICE_NAME')
|
||||
deviceId = get_setting_value('MQTT_DEVICE_ID')
|
||||
|
||||
@@ -449,16 +446,18 @@ def mqtt_start(db):
|
||||
row = get_device_stats(db)
|
||||
|
||||
# Publish (wrap into {} and remove last ',' from above)
|
||||
publish_mqtt(mqtt_client, f"{topic_root}/sensor/{deviceId}/state",
|
||||
{
|
||||
"online": row[0],
|
||||
"down": row[1],
|
||||
"all": row[2],
|
||||
"archived": row[3],
|
||||
"new": row[4],
|
||||
"unknown": row[5]
|
||||
}
|
||||
)
|
||||
publish_mqtt(
|
||||
mqtt_client,
|
||||
f"{topic_root}/sensor/{deviceId}/state",
|
||||
{
|
||||
"online": row[0],
|
||||
"down": row[1],
|
||||
"all": row[2],
|
||||
"archived": row[3],
|
||||
"new": row[4],
|
||||
"unknown": row[5]
|
||||
}
|
||||
)
|
||||
|
||||
# Generate device-specific MQTT messages if enabled
|
||||
if get_setting_value('MQTT_SEND_DEVICES'):
|
||||
@@ -466,11 +465,11 @@ def mqtt_start(db):
|
||||
# Specific devices processing
|
||||
|
||||
# Get all devices
|
||||
devices = db.read(get_setting_value('MQTT_DEVICES_SQL').replace('{s-quote}',"'"))
|
||||
devices = db.read(get_setting_value('MQTT_DEVICES_SQL').replace('{s-quote}', "'"))
|
||||
|
||||
sec_delay = len(devices) * int(get_setting_value('MQTT_DELAY_SEC'))*5
|
||||
sec_delay = len(devices) * int(get_setting_value('MQTT_DELAY_SEC')) * 5
|
||||
|
||||
mylog('verbose', [f"[{pluginName}] Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60, 1), 'min)'])
|
||||
mylog('verbose', [f"[{pluginName}] Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay / 60, 1), 'min)'])
|
||||
|
||||
for device in devices:
|
||||
|
||||
@@ -495,27 +494,29 @@ def mqtt_start(db):
|
||||
# handle device_tracker
|
||||
# IMPORTANT: shared payload - device_tracker attributes and individual sensors
|
||||
devJson = {
|
||||
"last_ip": device["devLastIP"],
|
||||
"is_new": str(device["devIsNew"]),
|
||||
"alert_down": str(device["devAlertDown"]),
|
||||
"vendor": sanitize_string(device["devVendor"]),
|
||||
"mac_address": str(device["devMac"]),
|
||||
"model": devDisplayName,
|
||||
"last_connection": prepTimeStamp(str(device["devLastConnection"])),
|
||||
"first_connection": prepTimeStamp(str(device["devFirstConnection"])),
|
||||
"sync_node": device["devSyncHubNode"],
|
||||
"group": device["devGroup"],
|
||||
"location": device["devLocation"],
|
||||
"network_parent_mac": device["devParentMAC"],
|
||||
"network_parent_name": next((dev["devName"] for dev in devices if dev["devMAC"] == device["devParentMAC"]), "")
|
||||
}
|
||||
"last_ip": device["devLastIP"],
|
||||
"is_new": str(device["devIsNew"]),
|
||||
"alert_down": str(device["devAlertDown"]),
|
||||
"vendor": sanitize_string(device["devVendor"]),
|
||||
"mac_address": str(device["devMac"]),
|
||||
"model": devDisplayName,
|
||||
"last_connection": prepTimeStamp(str(device["devLastConnection"])),
|
||||
"first_connection": prepTimeStamp(str(device["devFirstConnection"])),
|
||||
"sync_node": device["devSyncHubNode"],
|
||||
"group": device["devGroup"],
|
||||
"location": device["devLocation"],
|
||||
"network_parent_mac": device["devParentMAC"],
|
||||
"network_parent_name": next((dev["devName"] for dev in devices if dev["devMAC"] == device["devParentMAC"]), "")
|
||||
}
|
||||
|
||||
# bulk update device sensors in home assistant
|
||||
publish_mqtt(mqtt_client, sensorConfig.state_topic, devJson) # REQUIRED, DON'T DELETE
|
||||
|
||||
# create and update is_present sensor
|
||||
sensorConfig = create_sensor(mqtt_client, deviceId, devDisplayName, 'binary_sensor', 'is_present', 'wifi', device["devMac"])
|
||||
publish_mqtt(mqtt_client, sensorConfig.state_topic,
|
||||
publish_mqtt(
|
||||
mqtt_client,
|
||||
sensorConfig.state_topic,
|
||||
{
|
||||
"is_present": to_binary_sensor(str(device["devPresentLastScan"]))
|
||||
}
|
||||
@@ -547,7 +548,7 @@ def to_binary_sensor(input):
|
||||
elif isinstance(input, bool) and input:
|
||||
return "ON"
|
||||
elif isinstance(input, str) and input == "1":
|
||||
return "ON"
|
||||
return "ON"
|
||||
elif isinstance(input, bytes) and bytes_to_string(input) == "1":
|
||||
return "ON"
|
||||
return "OFF"
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
@@ -11,15 +10,15 @@ from base64 import b64encode
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
import conf
|
||||
from const import confFileName, logPath
|
||||
from plugin_helper import Plugin_Objects, handleEmpty
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from models.notification_instance import NotificationInstance
|
||||
from database import DB
|
||||
from pytz import timezone
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
|
||||
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -33,13 +32,12 @@ LOG_PATH = logPath + '/plugins'
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
||||
|
||||
# Check if basic config settings supplied
|
||||
if check_config() == False:
|
||||
if check_config() is False:
|
||||
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
|
||||
return
|
||||
|
||||
@@ -65,9 +63,9 @@ def main():
|
||||
# Log result
|
||||
plugin_objects.add_object(
|
||||
primaryId = pluginName,
|
||||
secondaryId = timeNowDB(),
|
||||
secondaryId = timeNowDB(),
|
||||
watched1 = notification["GUID"],
|
||||
watched2 = handleEmpty(response_text),
|
||||
watched2 = handleEmpty(response_text),
|
||||
watched3 = response_status_code,
|
||||
watched4 = 'null',
|
||||
extra = 'null',
|
||||
@@ -77,15 +75,15 @@ def main():
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
def check_config():
|
||||
if get_setting_value('NTFY_HOST') == '' or get_setting_value('NTFY_TOPIC') == '':
|
||||
if get_setting_value('NTFY_HOST') == '' or get_setting_value('NTFY_TOPIC') == '':
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def send(html, text):
|
||||
|
||||
response_text = ''
|
||||
@@ -100,7 +98,7 @@ def send(html, text):
|
||||
# prepare request headers
|
||||
headers = {
|
||||
"Title": "NetAlertX Notification",
|
||||
"Actions": "view, Open Dashboard, "+ get_setting_value('REPORT_DASHBOARD_URL'),
|
||||
"Actions": "view, Open Dashboard, " + get_setting_value('REPORT_DASHBOARD_URL'),
|
||||
"Priority": get_setting_value('NTFY_PRIORITY'),
|
||||
"Tags": "warning"
|
||||
}
|
||||
@@ -109,37 +107,39 @@ def send(html, text):
|
||||
if token != '':
|
||||
headers["Authorization"] = "Bearer {}".format(token)
|
||||
elif user != "" and pwd != "":
|
||||
# Generate hash for basic auth
|
||||
# Generate hash for basic auth
|
||||
basichash = b64encode(bytes(user + ':' + pwd, "utf-8")).decode("ascii")
|
||||
# add authorization header with hash
|
||||
# add authorization header with hash
|
||||
headers["Authorization"] = "Basic {}".format(basichash)
|
||||
|
||||
# call NTFY service
|
||||
try:
|
||||
response = requests.post("{}/{}".format( get_setting_value('NTFY_HOST'),
|
||||
get_setting_value('NTFY_TOPIC')),
|
||||
data = text,
|
||||
headers = headers,
|
||||
verify = verify_ssl)
|
||||
response = requests.post("{}/{}".format(
|
||||
get_setting_value('NTFY_HOST'),
|
||||
get_setting_value('NTFY_TOPIC')),
|
||||
data = text,
|
||||
headers = headers,
|
||||
verify = verify_ssl,
|
||||
timeout = get_setting_value('NTFY_RUN_TIMEOUT')
|
||||
)
|
||||
|
||||
response_status_code = response.status_code
|
||||
|
||||
# Check if the request was successful (status code 200)
|
||||
if response_status_code == 200:
|
||||
response_text = response.text # This captures the response body/message
|
||||
response_text = response.text # This captures the response body/message
|
||||
else:
|
||||
response_text = json.dumps(response.text)
|
||||
response_text = json.dumps(response.text)
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
except requests.exceptions.RequestException as e:
|
||||
mylog('none', [f'[{pluginName}] ⚠ ERROR: ', e])
|
||||
|
||||
response_text = e
|
||||
|
||||
return response_text, response_status_code
|
||||
|
||||
return response_text, response_status_code
|
||||
return response_text, response_status_code
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
||||
|
||||
@@ -12,12 +12,12 @@ import requests
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402
|
||||
from logger import mylog, Logger # noqa: E402
|
||||
from helper import get_setting_value, hide_string # noqa: E402
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from models.notification_instance import NotificationInstance # noqa: E402
|
||||
from database import DB # noqa: E402
|
||||
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value, hide_string # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
@@ -10,15 +8,15 @@ import requests
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
import conf
|
||||
from const import confFileName, logPath
|
||||
from plugin_helper import Plugin_Objects, handleEmpty
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value, hide_string
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from models.notification_instance import NotificationInstance
|
||||
from database import DB
|
||||
from pytz import timezone
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
|
||||
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value, hide_string # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -32,13 +30,12 @@ LOG_PATH = logPath + '/plugins'
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
||||
|
||||
# Check if basic config settings supplied
|
||||
if check_config() == False:
|
||||
if check_config() is False:
|
||||
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
|
||||
return
|
||||
|
||||
@@ -59,14 +56,14 @@ def main():
|
||||
for notification in new_notifications:
|
||||
|
||||
# Send notification
|
||||
response_text, response_status_code = send(notification["Text"])
|
||||
response_text, response_status_code = send(notification["Text"])
|
||||
|
||||
# Log result
|
||||
plugin_objects.add_object(
|
||||
primaryId = pluginName,
|
||||
secondaryId = timeNowDB(),
|
||||
secondaryId = timeNowDB(),
|
||||
watched1 = notification["GUID"],
|
||||
watched2 = handleEmpty(response_text),
|
||||
watched2 = handleEmpty(response_text),
|
||||
watched3 = response_status_code,
|
||||
watched4 = 'null',
|
||||
extra = 'null',
|
||||
@@ -76,8 +73,7 @@ def main():
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
def send(text):
|
||||
|
||||
response_text = ''
|
||||
@@ -85,8 +81,7 @@ def send(text):
|
||||
|
||||
token = get_setting_value('PUSHSAFER_TOKEN')
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] PUSHSAFER_TOKEN: "{hide_string(token)}"'])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] PUSHSAFER_TOKEN: "{hide_string(token)}"'])
|
||||
|
||||
try:
|
||||
url = 'https://www.pushsafer.com/api'
|
||||
@@ -101,40 +96,34 @@ def send(text):
|
||||
"u" : get_setting_value('REPORT_DASHBOARD_URL'),
|
||||
"ut" : 'Open NetAlertX',
|
||||
"k" : token,
|
||||
}
|
||||
response = requests.post(url, data=post_fields)
|
||||
|
||||
}
|
||||
response = requests.post(url, data=post_fields, timeout=get_setting_value("PUSHSAFER_RUN_TIMEOUT"))
|
||||
response_status_code = response.status_code
|
||||
|
||||
|
||||
# Check if the request was successful (status code 200)
|
||||
if response_status_code == 200:
|
||||
response_text = response.text # This captures the response body/message
|
||||
response_text = response.text # This captures the response body/message
|
||||
else:
|
||||
response_text = json.dumps(response.text)
|
||||
response_text = json.dumps(response.text)
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
except requests.exceptions.RequestException as e:
|
||||
mylog('none', [f'[{pluginName}] ⚠ ERROR: ', e])
|
||||
|
||||
response_text = e
|
||||
|
||||
return response_text, response_status_code
|
||||
|
||||
|
||||
return response_text, response_status_code
|
||||
return response_text, response_status_code
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
def check_config():
|
||||
if get_setting_value('PUSHSAFER_TOKEN') == 'ApiKey':
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
if get_setting_value('PUSHSAFER_TOKEN') == 'ApiKey':
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
# -------------------------------------------------------
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
||||
|
||||
@@ -8,15 +8,15 @@ import sys
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
import conf
|
||||
from const import confFileName, logPath
|
||||
from plugin_helper import Plugin_Objects
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from models.notification_instance import NotificationInstance
|
||||
from database import DB
|
||||
from pytz import timezone
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -30,13 +30,11 @@ LOG_PATH = logPath + '/plugins'
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
||||
|
||||
# Check if basic config settings supplied
|
||||
if check_config() == False:
|
||||
if check_config() is False:
|
||||
mylog('none', [
|
||||
f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
|
||||
return
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
@@ -13,15 +12,15 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
|
||||
import conf
|
||||
from const import logPath, confFileName
|
||||
from plugin_helper import Plugin_Objects, handleEmpty
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value, write_file
|
||||
from models.notification_instance import NotificationInstance
|
||||
from database import DB
|
||||
from pytz import timezone
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath, confFileName # noqa: E402 [flake8 lint suppression]
|
||||
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value, write_file # noqa: E402 [flake8 lint suppression]
|
||||
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -35,13 +34,12 @@ LOG_PATH = logPath + '/plugins'
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}](publisher) In script'])
|
||||
|
||||
# Check if basic config settings supplied
|
||||
if check_config() == False:
|
||||
if check_config() is False:
|
||||
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
|
||||
return
|
||||
|
||||
@@ -62,15 +60,19 @@ def main():
|
||||
for notification in new_notifications:
|
||||
|
||||
# Send notification
|
||||
response_stdout, response_stderr = send(notification["Text"], notification["HTML"], notification["JSON"])
|
||||
response_stdout, response_stderr = send(
|
||||
notification["Text"],
|
||||
notification["HTML"],
|
||||
notification["JSON"]
|
||||
)
|
||||
|
||||
# Log result
|
||||
plugin_objects.add_object(
|
||||
primaryId = pluginName,
|
||||
secondaryId = timeNowDB(),
|
||||
secondaryId = timeNowDB(),
|
||||
watched1 = notification["GUID"],
|
||||
watched2 = handleEmpty(response_stdout),
|
||||
watched3 = handleEmpty(response_stderr),
|
||||
watched2 = handleEmpty(response_stdout),
|
||||
watched3 = handleEmpty(response_stderr),
|
||||
watched4 = 'null',
|
||||
extra = 'null',
|
||||
foreignKey = notification["GUID"]
|
||||
@@ -79,16 +81,16 @@ def main():
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
def check_config():
|
||||
if get_setting_value('WEBHOOK_URL') == '':
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
if get_setting_value('WEBHOOK_URL') == '':
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def send (text_data, html_data, json_data):
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def send(text_data, html_data, json_data):
|
||||
|
||||
response_stderr = ''
|
||||
response_stdout = ''
|
||||
@@ -102,9 +104,9 @@ def send (text_data, html_data, json_data):
|
||||
|
||||
# use data type based on specified payload type
|
||||
if payloadType == 'json':
|
||||
# In this code, the truncate_json function is used to recursively traverse the JSON object
|
||||
# and remove nodes that exceed the size limit. It checks the size of each node's JSON representation
|
||||
# using json.dumps and includes only the nodes that are within the limit.
|
||||
# In this code, the truncate_json function is used to recursively traverse the JSON object
|
||||
# and remove nodes that exceed the size limit. It checks the size of each node's JSON representation
|
||||
# using json.dumps and includes only the nodes that are within the limit.
|
||||
json_str = json.dumps(json_data)
|
||||
|
||||
if len(json_str) <= limit:
|
||||
@@ -127,45 +129,48 @@ def send (text_data, html_data, json_data):
|
||||
return obj
|
||||
|
||||
payloadData = truncate_json(json_data)
|
||||
if payloadType == 'html':
|
||||
if payloadType == 'html':
|
||||
if len(html_data) > limit:
|
||||
payloadData = html_data[:limit] + " <h1>(text was truncated)</h1>"
|
||||
else:
|
||||
payloadData = html_data
|
||||
if payloadType == 'text':
|
||||
if payloadType == 'text':
|
||||
if len(text_data) > limit:
|
||||
payloadData = text_data[:limit] + " (text was truncated)"
|
||||
else:
|
||||
payloadData = text_data
|
||||
|
||||
# Define slack-compatible payload
|
||||
_json_payload = { "text": payloadData } if payloadType == 'text' else {
|
||||
"username": "NetAlertX",
|
||||
"text": "There are new notifications",
|
||||
"attachments": [{
|
||||
"title": "NetAlertX Notifications",
|
||||
"title_link": get_setting_value('REPORT_DASHBOARD_URL'),
|
||||
"text": payloadData
|
||||
}]
|
||||
}
|
||||
if payloadType == 'text':
|
||||
_json_payload = {"text": payloadData}
|
||||
else:
|
||||
_json_payload = {
|
||||
"username": "NetAlertX",
|
||||
"text": "There are new notifications",
|
||||
"attachments": [{
|
||||
"title": "NetAlertX Notifications",
|
||||
"title_link": get_setting_value('REPORT_DASHBOARD_URL'),
|
||||
"text": payloadData
|
||||
}]
|
||||
}
|
||||
|
||||
# DEBUG - Write the json payload into a log file for debugging
|
||||
write_file (logPath + '/webhook_payload.json', json.dumps(_json_payload))
|
||||
write_file(logPath + '/webhook_payload.json', json.dumps(_json_payload))
|
||||
|
||||
# Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both
|
||||
# Consider: curl has the ability to load in data to POST from a file + piping
|
||||
if(endpointUrl.startswith('https://discord.com/api/webhooks/') and not endpointUrl.endswith("/slack")):
|
||||
if (endpointUrl.startswith('https://discord.com/api/webhooks/') and not endpointUrl.endswith("/slack")):
|
||||
_WEBHOOK_URL = f"{endpointUrl}/slack"
|
||||
curlParams = ["curl","-i","-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL]
|
||||
curlParams = ["curl", "-i", "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
|
||||
else:
|
||||
_WEBHOOK_URL = endpointUrl
|
||||
curlParams = ["curl","-i","-X", requestMethod , "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
|
||||
curlParams = ["curl", "-i", "-X", requestMethod , "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
|
||||
|
||||
# Add HMAC signature if configured
|
||||
if(secret != ''):
|
||||
if (secret != ''):
|
||||
h = hmac.new(secret.encode("UTF-8"), json.dumps(_json_payload, separators=(',', ':')).encode(), hashlib.sha256).hexdigest()
|
||||
curlParams.insert(4,"-H")
|
||||
curlParams.insert(5,f"X-Webhook-Signature: sha256={h}")
|
||||
curlParams.insert(4, "-H")
|
||||
curlParams.insert(5, f"X-Webhook-Signature: sha256={h}")
|
||||
|
||||
try:
|
||||
# Execute CURL call
|
||||
@@ -173,13 +178,11 @@ def send (text_data, html_data, json_data):
|
||||
result = subprocess.run(curlParams, capture_output=True, text=True)
|
||||
|
||||
response_stderr = result.stderr
|
||||
response_stdout = result.stdout
|
||||
response_stdout = result.stdout
|
||||
|
||||
# Write stdout and stderr into .log files for debugging if needed
|
||||
mylog('debug', [f'[{pluginName}] stdout: ', response_stdout])
|
||||
mylog('debug', [f'[{pluginName}] stderr: ', response_stderr])
|
||||
|
||||
|
||||
mylog('debug', [f'[{pluginName}] stderr: ', response_stderr])
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occurred, handle it
|
||||
@@ -187,10 +190,9 @@ def send (text_data, html_data, json_data):
|
||||
|
||||
response_stderr = e.output
|
||||
|
||||
return response_stdout, response_stderr
|
||||
|
||||
return response_stdout, response_stderr
|
||||
|
||||
# -------------------------------------------------------
|
||||
# -------------------------------------------------------
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import time
|
||||
import pathlib
|
||||
import argparse
|
||||
import sys
|
||||
import re
|
||||
@@ -9,16 +8,16 @@ import base64
|
||||
import subprocess
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import DB
|
||||
from plugin_helper import Plugin_Objects, handleEmpty
|
||||
from logger import mylog, Logger, append_line_to_file
|
||||
from helper import get_setting_value
|
||||
from const import logPath, applicationPath
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
||||
|
||||
@@ -6,17 +6,16 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
pluginName = "ASUSWRT"
|
||||
|
||||
import asyncio
|
||||
|
||||
import aiohttp
|
||||
import conf
|
||||
from asusrouter import AsusData, AsusRouter
|
||||
from asusrouter.modules.connection import ConnectionState
|
||||
from const import logPath
|
||||
from helper import get_setting_value
|
||||
from logger import Logger, mylog
|
||||
from plugin_helper import (Plugin_Objects, handleEmpty)
|
||||
from pytz import timezone
|
||||
import asyncio # noqa: E402 [flake8 lint suppression]
|
||||
import aiohttp # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from asusrouter import AsusData, AsusRouter # noqa: E402 [flake8 lint suppression]
|
||||
from asusrouter.modules.connection import ConnectionState # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from logger import Logger, mylog # noqa: E402 [flake8 lint suppression]
|
||||
from plugin_helper import (Plugin_Objects, handleEmpty) # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
||||
|
||||
|
||||
@@ -8,14 +8,14 @@ from zeroconf import Zeroconf
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger
|
||||
from const import logPath
|
||||
from helper import get_setting_value
|
||||
from database import DB
|
||||
from models.device_instance import DeviceInstance
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Configure timezone and logging
|
||||
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
||||
@@ -67,7 +67,7 @@ def resolve_mdns_name(ip: str, timeout: int = 5) -> str:
|
||||
hostname = socket.getnameinfo((ip, 0), socket.NI_NAMEREQD)[0]
|
||||
zeroconf.close()
|
||||
if hostname and hostname != ip:
|
||||
mylog("debug", [f"[{pluginName}] Found mDNS name: {hostname}"])
|
||||
mylog("debug", [f"[{pluginName}] Found mDNS name (rev_name): {hostname} ({rev_name})"])
|
||||
return hostname
|
||||
except Exception as e:
|
||||
mylog("debug", [f"[{pluginName}] Zeroconf lookup failed for {ip}: {e}"])
|
||||
@@ -89,7 +89,7 @@ def main():
|
||||
|
||||
timeout = get_setting_value("AVAHISCAN_RUN_TIMEOUT")
|
||||
use_mock = "--mockdata" in sys.argv
|
||||
|
||||
|
||||
if use_mock:
|
||||
mylog("verbose", [f"[{pluginName}] Running in MOCK mode"])
|
||||
devices = [
|
||||
@@ -137,4 +137,4 @@ def main():
|
||||
# Entrypoint
|
||||
# =============================================================================
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
main()
|
||||
|
||||
@@ -11,11 +11,11 @@ from datetime import datetime
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from const import logPath, fullDbPath
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath, fullDbPath # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -29,6 +29,7 @@ LOG_PATH = logPath + '/plugins'
|
||||
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
# the script expects a parameter in the format of devices=device1,device2,...
|
||||
@@ -44,7 +45,7 @@ def main():
|
||||
else:
|
||||
overwrite = False
|
||||
|
||||
mylog('verbose', ['[CSVBCKP] In script'])
|
||||
mylog('verbose', ['[CSVBCKP] In script'])
|
||||
|
||||
# Connect to the App database
|
||||
conn = sqlite3.connect(fullDbPath)
|
||||
@@ -64,7 +65,7 @@ def main():
|
||||
|
||||
fullPath = os.path.join(values.location.split('=')[1], filename)
|
||||
|
||||
mylog('verbose', ['[CSVBCKP] Writing file ', fullPath])
|
||||
mylog('verbose', ['[CSVBCKP] Writing file ', fullPath])
|
||||
|
||||
# Create a CSV file in the specified location
|
||||
with open(fullPath, 'w', newline='') as csvfile:
|
||||
@@ -72,7 +73,7 @@ def main():
|
||||
csv_writer = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL)
|
||||
|
||||
# Wrap the header values in double quotes and write the header row
|
||||
csv_writer.writerow([ '"' + col + '"' for col in columns])
|
||||
csv_writer.writerow(['"' + col + '"' for col in columns])
|
||||
|
||||
# Fetch and write data rows
|
||||
for row in cursor.fetchall():
|
||||
@@ -96,8 +97,8 @@ def main():
|
||||
return 0
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# BEGIN
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
||||
|
||||
@@ -8,11 +8,11 @@ import sqlite3
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from const import logPath, fullDbPath
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath, fullDbPath # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
||||
@@ -81,7 +81,7 @@ def cleanup_database(
|
||||
)
|
||||
cursor.execute(
|
||||
"""DELETE from Online_History where "Index" not in (
|
||||
SELECT "Index" from Online_History
|
||||
SELECT "Index" from Online_History
|
||||
order by Scan_Date desc limit 150)"""
|
||||
)
|
||||
|
||||
@@ -94,7 +94,7 @@ def cleanup_database(
|
||||
],
|
||||
)
|
||||
cursor.execute(
|
||||
f"""DELETE FROM Events
|
||||
f"""DELETE FROM Events
|
||||
WHERE eve_DateTime <= date('now', '-{str(DAYS_TO_KEEP_EVENTS)} day')"""
|
||||
)
|
||||
# -----------------------------------------------------
|
||||
@@ -107,11 +107,11 @@ def cleanup_database(
|
||||
)
|
||||
|
||||
# Build the SQL query to delete entries that exceed the limit per unique "Plugin" column entry
|
||||
delete_query = f"""DELETE FROM Plugins_History
|
||||
delete_query = f"""DELETE FROM Plugins_History
|
||||
WHERE "Index" NOT IN (
|
||||
SELECT "Index"
|
||||
FROM (
|
||||
SELECT "Index",
|
||||
SELECT "Index",
|
||||
ROW_NUMBER() OVER(PARTITION BY "Plugin" ORDER BY DateTimeChanged DESC) AS row_num
|
||||
FROM Plugins_History
|
||||
) AS ranked_objects
|
||||
@@ -133,11 +133,11 @@ def cleanup_database(
|
||||
)
|
||||
|
||||
# Build the SQL query to delete entries
|
||||
delete_query = f"""DELETE FROM Notifications
|
||||
delete_query = f"""DELETE FROM Notifications
|
||||
WHERE "Index" NOT IN (
|
||||
SELECT "Index"
|
||||
FROM (
|
||||
SELECT "Index",
|
||||
SELECT "Index",
|
||||
ROW_NUMBER() OVER(PARTITION BY "Notifications" ORDER BY DateTimeCreated DESC) AS row_num
|
||||
FROM Notifications
|
||||
) AS ranked_objects
|
||||
@@ -153,11 +153,11 @@ def cleanup_database(
|
||||
mylog("verbose", [f"[{pluginName}] Trim AppEvents to less than {histCount}"])
|
||||
|
||||
# Build the SQL query to delete entries
|
||||
delete_query = f"""DELETE FROM AppEvents
|
||||
delete_query = f"""DELETE FROM AppEvents
|
||||
WHERE "Index" NOT IN (
|
||||
SELECT "Index"
|
||||
FROM (
|
||||
SELECT "Index",
|
||||
SELECT "Index",
|
||||
ROW_NUMBER() OVER(PARTITION BY "AppEvents" ORDER BY DateTimeCreated DESC) AS row_num
|
||||
FROM AppEvents
|
||||
) AS ranked_objects
|
||||
|
||||
@@ -9,11 +9,11 @@ import subprocess
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value, check_IP_format
|
||||
from const import logPath
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value, check_IP_format # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -28,91 +28,88 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
parser = argparse.ArgumentParser(description='Check internet connectivity and IP')
|
||||
|
||||
parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP")
|
||||
|
||||
parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP")
|
||||
parser.add_argument('DDNS_UPDATE_URL', action="store", help="URL for updating Dynamic DNS (DDNS)")
|
||||
parser.add_argument('DDNS_USER', action="store", help="Username for Dynamic DNS (DDNS) authentication")
|
||||
parser.add_argument('DDNS_PASSWORD', action="store", help="Password for Dynamic DNS (DDNS) authentication")
|
||||
parser.add_argument('DDNS_DOMAIN', action="store", help="Dynamic DNS (DDNS) domain name")
|
||||
|
||||
|
||||
values = parser.parse_args()
|
||||
|
||||
PREV_IP = values.prev_ip.split('=')[1]
|
||||
PREV_IP = values.prev_ip.split('=')[1]
|
||||
DDNS_UPDATE_URL = values.DDNS_UPDATE_URL.split('=')[1]
|
||||
DDNS_USER = values.DDNS_USER.split('=')[1]
|
||||
DDNS_PASSWORD = values.DDNS_PASSWORD.split('=')[1]
|
||||
DDNS_DOMAIN = values.DDNS_DOMAIN.split('=')[1]
|
||||
DDNS_DOMAIN = values.DDNS_DOMAIN.split('=')[1]
|
||||
|
||||
# perform the new IP lookup and DDNS tasks if enabled
|
||||
ddns_update( DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP)
|
||||
ddns_update(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP)
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Finished '])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Finished '])
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
#===============================================================================
|
||||
|
||||
|
||||
# ===============================================================================
|
||||
# INTERNET IP CHANGE
|
||||
#===============================================================================
|
||||
def ddns_update ( DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP ):
|
||||
|
||||
# ===============================================================================
|
||||
def ddns_update(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP):
|
||||
|
||||
# Update DDNS record if enabled and IP is different
|
||||
# Get Dynamic DNS IP
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Retrieving Dynamic DNS IP'])
|
||||
dns_IP = get_dynamic_DNS_IP(DDNS_DOMAIN)
|
||||
|
||||
# Check Dynamic DNS IP
|
||||
if dns_IP == "" or dns_IP == "0.0.0.0" :
|
||||
mylog('none', [f'[{pluginName}] Error retrieving Dynamic DNS IP'])
|
||||
mylog('none', [f'[{pluginName}] Error retrieving Dynamic DNS IP'])
|
||||
|
||||
mylog('none', [f'[{pluginName}] ', dns_IP])
|
||||
|
||||
# Check DNS Change
|
||||
if dns_IP != PREV_IP :
|
||||
mylog('none', [f'[{pluginName}] Updating Dynamic DNS IP'])
|
||||
message = set_dynamic_DNS_IP (DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN)
|
||||
mylog('none', [f'[{pluginName}] ', message])
|
||||
message = set_dynamic_DNS_IP(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN)
|
||||
mylog('none', [f'[{pluginName}] ', message])
|
||||
|
||||
# plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
# plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
# plugin_objects.add_object(
|
||||
# primaryId = 'Internet', # MAC (Device Name)
|
||||
# secondaryId = new_internet_IP, # IP Address
|
||||
# secondaryId = new_internet_IP, # IP Address
|
||||
# watched1 = f'Previous IP: {PREV_IP}',
|
||||
# watched2 = '',
|
||||
# watched3 = '',
|
||||
# watched3 = '',
|
||||
# watched4 = '',
|
||||
# extra = f'Previous IP: {PREV_IP}',
|
||||
# extra = f'Previous IP: {PREV_IP}',
|
||||
# foreignKey = 'Internet')
|
||||
|
||||
# plugin_objects.write_result_file()
|
||||
|
||||
# plugin_objects.write_result_file()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_dynamic_DNS_IP (DDNS_DOMAIN):
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def get_dynamic_DNS_IP(DDNS_DOMAIN):
|
||||
|
||||
# Using supplied DNS server
|
||||
dig_args = ['dig', '+short', DDNS_DOMAIN]
|
||||
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
dig_output = subprocess.check_output (dig_args, universal_newlines=True)
|
||||
dig_output = subprocess.check_output(dig_args, universal_newlines=True)
|
||||
mylog('none', [f'[{pluginName}] DIG output :', dig_output])
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', [f'[{pluginName}] ⚠ ERROR - ', e.output])
|
||||
dig_output = '' # probably no internet
|
||||
dig_output = '' # probably no internet
|
||||
|
||||
# Check result is an IP
|
||||
IP = check_IP_format (dig_output)
|
||||
IP = check_IP_format(dig_output)
|
||||
|
||||
# Handle invalid response
|
||||
if IP == '':
|
||||
@@ -120,28 +117,27 @@ def get_dynamic_DNS_IP (DDNS_DOMAIN):
|
||||
|
||||
return IP
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def set_dynamic_DNS_IP (DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN):
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def set_dynamic_DNS_IP(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN):
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
# Update Dynamic IP
|
||||
curl_output = subprocess.check_output (['curl',
|
||||
'-s',
|
||||
DDNS_UPDATE_URL +
|
||||
'username=' + DDNS_USER +
|
||||
'&password=' + DDNS_PASSWORD +
|
||||
'&hostname=' + DDNS_DOMAIN],
|
||||
universal_newlines=True)
|
||||
curl_output = subprocess.check_output([
|
||||
'curl',
|
||||
'-s',
|
||||
DDNS_UPDATE_URL + 'username=' + DDNS_USER + '&password=' + DDNS_PASSWORD + '&hostname=' + DDNS_DOMAIN],
|
||||
universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', [f'[{pluginName}] ⚠ ERROR - ',e.output])
|
||||
curl_output = ""
|
||||
|
||||
mylog('none', [f'[{pluginName}] ⚠ ERROR - ', e.output])
|
||||
curl_output = ""
|
||||
|
||||
return curl_output
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# BEGIN
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
||||
|
||||
@@ -4,19 +4,19 @@ from __future__ import unicode_literals
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import chardet
|
||||
import chardet
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects, handleEmpty, is_mac
|
||||
from logger import mylog, Logger
|
||||
from dhcp_leases import DhcpLeases
|
||||
from helper import get_setting_value
|
||||
import conf
|
||||
from const import logPath
|
||||
from pytz import timezone
|
||||
from plugin_helper import Plugin_Objects, handleEmpty, is_mac # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from dhcp_leases import DhcpLeases # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -24,34 +24,38 @@ conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
# Make sure log level is initialized correctly
|
||||
Logger(get_setting_value('LOG_LEVEL'))
|
||||
|
||||
pluginName= 'DHCPLSS'
|
||||
pluginName = 'DHCPLSS'
|
||||
|
||||
LOG_PATH = logPath + '/plugins'
|
||||
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
|
||||
|
||||
# -------------------------------------------------------------
|
||||
def main():
|
||||
def main():
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
last_run_logfile = open(RESULT_FILE, 'a')
|
||||
last_run_logfile = open(RESULT_FILE, 'a')
|
||||
last_run_logfile.write("")
|
||||
|
||||
parser = argparse.ArgumentParser(description='Import devices from dhcp.leases files')
|
||||
parser.add_argument('paths', action="store", help="absolute dhcp.leases file paths to check separated by ','")
|
||||
parser.add_argument(
|
||||
'paths',
|
||||
action="store",
|
||||
help="absolute dhcp.leases file paths to check separated by ','"
|
||||
)
|
||||
|
||||
values = parser.parse_args()
|
||||
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
if values.paths:
|
||||
for path in values.paths.split('=')[1].split(','):
|
||||
for path in values.paths.split('=')[1].split(','):
|
||||
plugin_objects = get_entries(path, plugin_objects)
|
||||
mylog('verbose', [f'[{pluginName}] {len(plugin_objects)} Entries found in "{path}"'])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] {len(plugin_objects)} Entries found in "{path}"'])
|
||||
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
|
||||
# -------------------------------------------------------------
|
||||
def get_entries(path, plugin_objects):
|
||||
|
||||
@@ -66,7 +70,7 @@ def get_entries(path, plugin_objects):
|
||||
# Use the detected encoding
|
||||
encoding = result['encoding']
|
||||
|
||||
# Order: MAC, IP, IsActive, NAME, Hardware
|
||||
# Order: MAC, IP, IsActive, NAME, Hardware
|
||||
# Handle pihole-specific dhcp.leases files
|
||||
if 'pihole' in path:
|
||||
with open(path, 'r', encoding=encoding, errors='replace') as f:
|
||||
@@ -111,9 +115,9 @@ def get_entries(path, plugin_objects):
|
||||
if is_mac(lease.ethernet):
|
||||
|
||||
plugin_objects.add_object(
|
||||
primaryId = handleEmpty(lease.ethernet),
|
||||
secondaryId = handleEmpty(lease.ip),
|
||||
watched1 = handleEmpty(lease.active),
|
||||
primaryId = handleEmpty(lease.ethernet),
|
||||
secondaryId = handleEmpty(lease.ip),
|
||||
watched1 = handleEmpty(lease.active),
|
||||
watched2 = handleEmpty(lease.hostname),
|
||||
watched3 = handleEmpty(lease.hardware),
|
||||
watched4 = handleEmpty(lease.binding_state),
|
||||
@@ -122,5 +126,6 @@ def get_entries(path, plugin_objects):
|
||||
)
|
||||
return plugin_objects
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
|
||||
import subprocess
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
import sys
|
||||
|
||||
@@ -11,12 +10,12 @@ import sys
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects, Plugin_Object
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from const import logPath
|
||||
from plugin_helper import Plugin_Objects, Plugin_Object # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
@@ -31,13 +30,14 @@ LOG_PATH = logPath + '/plugins'
|
||||
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
mylog('verbose', ['[DHCPSRVS] In script'])
|
||||
|
||||
last_run_logfile = open(RESULT_FILE, 'a')
|
||||
|
||||
last_run_logfile = open(RESULT_FILE, 'a')
|
||||
last_run_logfile.write("")
|
||||
|
||||
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
timeoutSec = get_setting_value('DHCPSRVS_RUN_TIMEOUT')
|
||||
|
||||
@@ -46,10 +46,10 @@ def main():
|
||||
try:
|
||||
# Number of DHCP discovery probes to send
|
||||
dhcp_probes = 1
|
||||
|
||||
|
||||
# Initialize a list to store output lines from the scan
|
||||
newLines = []
|
||||
|
||||
|
||||
for _ in range(dhcp_probes):
|
||||
output = subprocess.check_output(nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=timeoutSec)
|
||||
newLines += output.split("\n")
|
||||
@@ -57,9 +57,9 @@ def main():
|
||||
newEntries = []
|
||||
|
||||
for line in newLines:
|
||||
|
||||
|
||||
mylog('verbose', [f'[DHCPSRVS] Processing line: {line} '])
|
||||
|
||||
|
||||
if 'Response ' in line and ' of ' in line:
|
||||
newEntries.append(Plugin_Object())
|
||||
elif 'Server Identifier' in line:
|
||||
@@ -85,7 +85,7 @@ def main():
|
||||
newEntries[-1].extra += ',' + newVal
|
||||
|
||||
for e in newEntries:
|
||||
|
||||
|
||||
plugin_objects.add_object(
|
||||
primaryId=e.primaryId,
|
||||
secondaryId=e.secondaryId,
|
||||
@@ -101,5 +101,6 @@ def main():
|
||||
except Exception as e:
|
||||
mylog('verbose', ['[DHCPSRVS] Error in main:', str(e)])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
@@ -8,14 +7,14 @@ import subprocess
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger
|
||||
from const import logPath
|
||||
from helper import get_setting_value
|
||||
from database import DB
|
||||
from models.device_instance import DeviceInstance
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -35,7 +34,7 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
|
||||
def main():
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
timeout = get_setting_value('DIGSCAN_RUN_TIMEOUT')
|
||||
|
||||
@@ -50,13 +49,13 @@ def main():
|
||||
device_handler = DeviceInstance(db)
|
||||
|
||||
# Retrieve devices
|
||||
if get_setting_value("REFRESH_FQDN"):
|
||||
if get_setting_value("REFRESH_FQDN"):
|
||||
devices = device_handler.getAll()
|
||||
else:
|
||||
else:
|
||||
devices = device_handler.getUnknown()
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
|
||||
|
||||
# TEST - below is a WINDOWS host IP
|
||||
# execute_name_lookup('192.168.1.121', timeout)
|
||||
|
||||
@@ -65,27 +64,27 @@ def main():
|
||||
|
||||
if domain_name != '':
|
||||
plugin_objects.add_object(
|
||||
# "MAC", "IP", "Server", "Name"
|
||||
primaryId = device['devMac'],
|
||||
secondaryId = device['devLastIP'],
|
||||
watched1 = dns_server,
|
||||
watched2 = domain_name,
|
||||
watched3 = '',
|
||||
watched4 = '',
|
||||
extra = '',
|
||||
foreignKey = device['devMac'])
|
||||
primaryId = device['devMac'],
|
||||
secondaryId = device['devLastIP'],
|
||||
watched1 = dns_server,
|
||||
watched2 = domain_name,
|
||||
watched3 = '',
|
||||
watched4 = '',
|
||||
extra = '',
|
||||
foreignKey = device['devMac']
|
||||
)
|
||||
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||
|
||||
return 0
|
||||
|
||||
#===============================================================================
|
||||
|
||||
# ===============================================================================
|
||||
# Execute scan
|
||||
#===============================================================================
|
||||
def execute_name_lookup (ip, timeout):
|
||||
# ===============================================================================
|
||||
def execute_name_lookup(ip, timeout):
|
||||
"""
|
||||
Execute the DIG command on IP.
|
||||
"""
|
||||
@@ -97,32 +96,38 @@ def execute_name_lookup (ip, timeout):
|
||||
|
||||
try:
|
||||
mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args])
|
||||
|
||||
|
||||
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
|
||||
output = subprocess.check_output (args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True).strip()
|
||||
output = subprocess.check_output(
|
||||
args,
|
||||
universal_newlines=True,
|
||||
stderr=subprocess.STDOUT,
|
||||
timeout=(timeout),
|
||||
text=True
|
||||
).strip()
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
|
||||
|
||||
domain_name = output
|
||||
dns_server = ''
|
||||
dns_server = ''
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Domain Name: {domain_name}'])
|
||||
|
||||
return domain_name, dns_server
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
|
||||
|
||||
if output == "": # check if the subprocess failed
|
||||
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
|
||||
else:
|
||||
except subprocess.TimeoutExpired:
|
||||
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||
|
||||
if output == "": # check if the subprocess failed
|
||||
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
|
||||
else:
|
||||
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
|
||||
|
||||
return '', ''
|
||||
return '', ''
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
|
||||
@@ -17,11 +17,12 @@ from aiofreepybox.exceptions import NotOpenError, AuthorizationError
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger
|
||||
from const import logPath
|
||||
from helper import get_setting_value
|
||||
import conf
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB, DATETIME_PATTERN # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value("TIMEZONE"))
|
||||
@@ -79,6 +80,7 @@ def map_device_type(type: str):
|
||||
mylog("minimal", [f"[{pluginName}] Unknown device type: {type}"])
|
||||
return device_type_map["other"]
|
||||
|
||||
|
||||
async def get_device_data(api_version: int, api_address: str, api_port: int):
|
||||
# ensure existence of db path
|
||||
config_base = Path(os.getenv("NETALERTX_CONFIG", "/data/config"))
|
||||
@@ -149,7 +151,7 @@ def main():
|
||||
watched1=freebox["name"],
|
||||
watched2=freebox["operator"],
|
||||
watched3="Gateway",
|
||||
watched4=datetime.now,
|
||||
watched4=timeNowDB(),
|
||||
extra="",
|
||||
foreignKey=freebox["mac"],
|
||||
)
|
||||
@@ -165,7 +167,7 @@ def main():
|
||||
watched1=host.get("primary_name", "(unknown)"),
|
||||
watched2=host.get("vendor_name", "(unknown)"),
|
||||
watched3=map_device_type(host.get("host_type", "")),
|
||||
watched4=datetime.fromtimestamp(ip.get("last_time_reachable", 0)),
|
||||
watched4=datetime.fromtimestamp(ip.get("last_time_reachable", 0)).strftime(DATETIME_PATTERN),
|
||||
extra="",
|
||||
foreignKey=mac,
|
||||
)
|
||||
|
||||
@@ -11,14 +11,14 @@ import re
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from const import logPath
|
||||
from database import DB
|
||||
from models.device_instance import DeviceInstance
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -33,16 +33,14 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
timeout = get_setting_value('ICMP_RUN_TIMEOUT')
|
||||
args = get_setting_value('ICMP_ARGS')
|
||||
in_regex = get_setting_value('ICMP_IN_REGEX')
|
||||
|
||||
|
||||
# Create a database connection
|
||||
db = DB() # instance of class DB
|
||||
db.open()
|
||||
@@ -61,46 +59,45 @@ def main():
|
||||
|
||||
# Filter devices based on the regex match
|
||||
filtered_devices = [
|
||||
device for device in all_devices
|
||||
device for device in all_devices
|
||||
if regex_pattern.match(device['devLastIP'])
|
||||
]
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Devices to PING: {len(filtered_devices)}'])
|
||||
mylog('verbose', [f'[{pluginName}] Devices to PING: {len(filtered_devices)}'])
|
||||
|
||||
for device in filtered_devices:
|
||||
is_online, output = execute_scan(device['devLastIP'], timeout, args)
|
||||
|
||||
mylog('verbose', [f"[{pluginName}] ip: {device['devLastIP']} is_online: {is_online}"])
|
||||
|
||||
|
||||
if is_online:
|
||||
plugin_objects.add_object(
|
||||
# "MAC", "IP", "Name", "Output"
|
||||
primaryId = device['devMac'],
|
||||
secondaryId = device['devLastIP'],
|
||||
watched1 = device['devName'],
|
||||
watched2 = output.replace('\n',''),
|
||||
watched3 = '',
|
||||
watched4 = '',
|
||||
extra = '',
|
||||
foreignKey = device['devMac'])
|
||||
# "MAC", "IP", "Name", "Output"
|
||||
primaryId = device['devMac'],
|
||||
secondaryId = device['devLastIP'],
|
||||
watched1 = device['devName'],
|
||||
watched2 = output.replace('\n', ''),
|
||||
watched3 = '',
|
||||
watched4 = '',
|
||||
extra = '',
|
||||
foreignKey = device['devMac']
|
||||
)
|
||||
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||
|
||||
return 0
|
||||
|
||||
#===============================================================================
|
||||
|
||||
# ===============================================================================
|
||||
# Execute scan
|
||||
#===============================================================================
|
||||
def execute_scan (ip, timeout, args):
|
||||
# ===============================================================================
|
||||
def execute_scan(ip, timeout, args):
|
||||
"""
|
||||
Execute the ICMP command on IP.
|
||||
"""
|
||||
|
||||
|
||||
icmp_args = ['ping'] + args.split() + [ip]
|
||||
|
||||
# Execute command
|
||||
@@ -108,12 +105,18 @@ def execute_scan (ip, timeout, args):
|
||||
|
||||
try:
|
||||
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
|
||||
output = subprocess.check_output (icmp_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True)
|
||||
output = subprocess.check_output(
|
||||
icmp_args,
|
||||
universal_newlines=True,
|
||||
stderr=subprocess.STDOUT,
|
||||
timeout=(timeout),
|
||||
text=True
|
||||
)
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
|
||||
|
||||
# Parse output using case-insensitive regular expressions
|
||||
#Synology-NAS:/# ping -i 0.5 -c 3 -W 8 -w 9 192.168.1.82
|
||||
# Synology-NAS:/# ping -i 0.5 -c 3 -W 8 -w 9 192.168.1.82
|
||||
# PING 192.168.1.82 (192.168.1.82): 56 data bytes
|
||||
# 64 bytes from 192.168.1.82: seq=0 ttl=64 time=0.080 ms
|
||||
# 64 bytes from 192.168.1.82: seq=1 ttl=64 time=0.081 ms
|
||||
@@ -130,7 +133,7 @@ def execute_scan (ip, timeout, args):
|
||||
# --- 192.168.1.92 ping statistics ---
|
||||
# 3 packets transmitted, 0 packets received, 100% packet loss
|
||||
|
||||
# TODO: parse output and return True if online, False if Offline (100% packet loss, bad address)
|
||||
# TODO: parse output and return True if online, False if Offline (100% packet loss, bad address)
|
||||
is_online = True
|
||||
|
||||
# Check for 0% packet loss in the output
|
||||
@@ -145,22 +148,20 @@ def execute_scan (ip, timeout, args):
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occurred, handle it
|
||||
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
|
||||
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
|
||||
mylog('verbose', [f'[{pluginName}]', e.output])
|
||||
|
||||
return False, output
|
||||
|
||||
return False, output
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||
return False, output
|
||||
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||
return False, output
|
||||
|
||||
return False, output
|
||||
|
||||
|
||||
|
||||
return False, output
|
||||
|
||||
#===============================================================================
|
||||
|
||||
# ===============================================================================
|
||||
# BEGIN
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
||||
|
||||
@@ -11,13 +11,13 @@ import re
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from logger import mylog, Logger, append_line_to_file
|
||||
from helper import check_IP_format, get_setting_value
|
||||
from const import logPath
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger, append_line_to_file # noqa: E402 [flake8 lint suppression]
|
||||
from helper import check_IP_format, get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -31,39 +31,39 @@ LOG_PATH = logPath + '/plugins'
|
||||
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
no_internet_ip = '0.0.0.0'
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
parser = argparse.ArgumentParser(description='Check internet connectivity and IP')
|
||||
|
||||
|
||||
parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP")
|
||||
parser.add_argument('DIG_GET_IP_ARG', action="store", help="Arguments for the 'dig' command to retrieve the IP address") # unused
|
||||
parser.add_argument('DIG_GET_IP_ARG', action="store", help="Arguments for the 'dig' command to retrieve the IP address") # unused
|
||||
|
||||
values = parser.parse_args()
|
||||
|
||||
PREV_IP = values.prev_ip.split('=')[1]
|
||||
PREV_IP = values.prev_ip.split('=')[1]
|
||||
DIG_GET_IP_ARG = get_setting_value("INTRNT_DIG_GET_IP_ARG")
|
||||
|
||||
new_internet_IP = no_internet_ip
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] INTRNT_DIG_GET_IP_ARG: ', DIG_GET_IP_ARG])
|
||||
mylog('verbose', [f'[{pluginName}] INTRNT_DIG_GET_IP_ARG: ', DIG_GET_IP_ARG])
|
||||
|
||||
# METHOD 1: dig
|
||||
# perform the new IP lookup N times specified by the INTRNT_TRIES setting
|
||||
|
||||
|
||||
INTRNT_RETRIES = get_setting_value("INTRNT_RETRIES")
|
||||
retries_needed = 0
|
||||
|
||||
for i in range(INTRNT_RETRIES + 1):
|
||||
|
||||
new_internet_IP, cmd_output = check_internet_IP( PREV_IP, DIG_GET_IP_ARG)
|
||||
new_internet_IP, cmd_output = check_internet_IP(PREV_IP, DIG_GET_IP_ARG)
|
||||
|
||||
if new_internet_IP == no_internet_ip:
|
||||
time.sleep(1*i) # Exponential backoff strategy
|
||||
time.sleep(1 * i) # Exponential backoff strategy
|
||||
else:
|
||||
retries_needed = i
|
||||
break
|
||||
@@ -71,68 +71,69 @@ def main():
|
||||
# METHOD 2: curl
|
||||
if new_internet_IP == no_internet_ip:
|
||||
new_internet_IP, cmd_output = fallback_check_ip()
|
||||
mylog('verbose', [f'[{pluginName}] Curl Fallback (new_internet_IP|cmd_output): {new_internet_IP} | {cmd_output}'])
|
||||
mylog('verbose', [f'[{pluginName}] Curl Fallback (new_internet_IP|cmd_output): {new_internet_IP} | {cmd_output}'])
|
||||
|
||||
# logging
|
||||
append_line_to_file (logPath + '/IP_changes.log', '['+str(timeNowDB()) +']\t'+ new_internet_IP +'\n')
|
||||
append_line_to_file(logPath + '/IP_changes.log', '[' + str(timeNowDB()) + ']\t' + new_internet_IP + '\n')
|
||||
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
plugin_objects.add_object(
|
||||
primaryId = 'Internet', # MAC (Device Name)
|
||||
secondaryId = new_internet_IP, # IP Address
|
||||
secondaryId = new_internet_IP, # IP Address
|
||||
watched1 = f'Previous IP: {PREV_IP}',
|
||||
watched2 = cmd_output.replace('\n',''),
|
||||
watched3 = retries_needed,
|
||||
watched2 = cmd_output.replace('\n', ''),
|
||||
watched3 = retries_needed,
|
||||
watched4 = 'Gateway',
|
||||
extra = f'Previous IP: {PREV_IP}',
|
||||
foreignKey = 'Internet')
|
||||
extra = f'Previous IP: {PREV_IP}',
|
||||
foreignKey = 'Internet'
|
||||
)
|
||||
|
||||
plugin_objects.write_result_file()
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Finished '])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Finished '])
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
#===============================================================================
|
||||
|
||||
|
||||
# ===============================================================================
|
||||
# INTERNET IP CHANGE
|
||||
#===============================================================================
|
||||
def check_internet_IP ( PREV_IP, DIG_GET_IP_ARG ):
|
||||
|
||||
# ===============================================================================
|
||||
def check_internet_IP(PREV_IP, DIG_GET_IP_ARG):
|
||||
|
||||
# Get Internet IP
|
||||
mylog('verbose', [f'[{pluginName}] - Retrieving Internet IP'])
|
||||
internet_IP, cmd_output = get_internet_IP(DIG_GET_IP_ARG)
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Current internet_IP : {internet_IP}'])
|
||||
|
||||
# Check previously stored IP
|
||||
mylog('verbose', [f'[{pluginName}] Current internet_IP : {internet_IP}'])
|
||||
|
||||
# Check previously stored IP
|
||||
previous_IP = no_internet_ip
|
||||
|
||||
if PREV_IP is not None and len(PREV_IP) > 0 :
|
||||
if PREV_IP is not None and len(PREV_IP) > 0 :
|
||||
previous_IP = PREV_IP
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] previous_IP : {previous_IP}'])
|
||||
mylog('verbose', [f'[{pluginName}] previous_IP : {previous_IP}'])
|
||||
|
||||
return internet_IP, cmd_output
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_internet_IP (DIG_GET_IP_ARG):
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def get_internet_IP(DIG_GET_IP_ARG):
|
||||
|
||||
cmd_output = ''
|
||||
|
||||
|
||||
# Using 'dig'
|
||||
dig_args = ['dig', '+short'] + DIG_GET_IP_ARG.strip().split()
|
||||
try:
|
||||
cmd_output = subprocess.check_output (dig_args, universal_newlines=True)
|
||||
mylog('verbose', [f'[{pluginName}] DIG result : {cmd_output}'])
|
||||
cmd_output = subprocess.check_output(dig_args, universal_newlines=True)
|
||||
mylog('verbose', [f'[{pluginName}] DIG result : {cmd_output}'])
|
||||
except subprocess.CalledProcessError as e:
|
||||
mylog('verbose', [e.output])
|
||||
cmd_output = '' # no internet
|
||||
cmd_output = '' # no internet
|
||||
|
||||
# Check result is an IP
|
||||
IP = check_IP_format (cmd_output)
|
||||
IP = check_IP_format(cmd_output)
|
||||
|
||||
# Handle invalid response
|
||||
if IP == '':
|
||||
@@ -140,7 +141,8 @@ def get_internet_IP (DIG_GET_IP_ARG):
|
||||
|
||||
return IP, cmd_output
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def fallback_check_ip():
|
||||
"""Fallback mechanism using `curl ifconfig.me/ip`."""
|
||||
try:
|
||||
@@ -155,8 +157,9 @@ def fallback_check_ip():
|
||||
mylog('none', [f'[{pluginName}] Fallback curl exception: {e}'])
|
||||
return no_internet_ip, f'Fallback via curl exception: "{e}"'
|
||||
|
||||
#===============================================================================
|
||||
|
||||
# ===============================================================================
|
||||
# BEGIN
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import speedtest
|
||||
@@ -9,13 +8,13 @@ import speedtest
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from const import logPath
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -28,18 +27,16 @@ pluginName = 'INTRSPD'
|
||||
LOG_PATH = logPath + '/plugins'
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
def main():
|
||||
|
||||
mylog('verbose', ['[INTRSPD] In script'])
|
||||
|
||||
parser = argparse.ArgumentParser(description='Speedtest Plugin for NetAlertX')
|
||||
values = parser.parse_args()
|
||||
def main():
|
||||
|
||||
mylog('verbose', ['[INTRSPD] In script'])
|
||||
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
speedtest_result = run_speedtest()
|
||||
plugin_objects.add_object(
|
||||
primaryId = 'Speedtest',
|
||||
secondaryId = timeNowDB(),
|
||||
secondaryId = timeNowDB(),
|
||||
watched1 = speedtest_result['download_speed'],
|
||||
watched2 = speedtest_result['upload_speed'],
|
||||
watched3 = 'null',
|
||||
@@ -49,25 +46,27 @@ def main():
|
||||
)
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
|
||||
def run_speedtest():
|
||||
try:
|
||||
st = speedtest.Speedtest(secure=True)
|
||||
st.get_best_server()
|
||||
download_speed = round(st.download() / 10**6, 2) # Convert to Mbps
|
||||
upload_speed = round(st.upload() / 10**6, 2) # Convert to Mbps
|
||||
|
||||
mylog('verbose', [f"[INTRSPD] Result (down|up): {str(download_speed)} Mbps|{upload_speed} Mbps"])
|
||||
|
||||
mylog('verbose', [f"[INTRSPD] Result (down|up): {str(download_speed)} Mbps|{upload_speed} Mbps"])
|
||||
|
||||
return {
|
||||
'download_speed': download_speed,
|
||||
'upload_speed': upload_speed,
|
||||
}
|
||||
except Exception as e:
|
||||
mylog('verbose', [f"[INTRSPD] Error running speedtest: {str(e)}"])
|
||||
mylog('verbose', [f"[INTRSPD] Error running speedtest: {str(e)}"])
|
||||
return {
|
||||
'download_speed': -1,
|
||||
'upload_speed': -1,
|
||||
}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
||||
@@ -11,11 +11,11 @@ from functools import reduce
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger
|
||||
from const import logPath
|
||||
from helper import get_setting_value
|
||||
import conf
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -34,9 +34,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
# Retrieve configuration settings
|
||||
SCAN_SUBNETS = get_setting_value('SCAN_SUBNETS')
|
||||
@@ -48,33 +47,33 @@ def main():
|
||||
entry.split('--interface=')[-1].strip() for entry in SCAN_SUBNETS if '--interface=' in entry
|
||||
)
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Interfaces value: "{interfaces}"'])
|
||||
mylog('verbose', [f'[{pluginName}] Interfaces value: "{interfaces}"'])
|
||||
|
||||
# retrieve data
|
||||
raw_neighbors = get_neighbors(interfaces)
|
||||
|
||||
|
||||
neighbors = parse_neighbors(raw_neighbors)
|
||||
|
||||
# Process the data into native application tables
|
||||
if len(neighbors) > 0:
|
||||
|
||||
for device in neighbors:
|
||||
plugin_objects.add_object(
|
||||
primaryId = device['mac'],
|
||||
secondaryId = device['ip'],
|
||||
watched4 = device['last_seen'],
|
||||
plugin_objects.add_object(
|
||||
primaryId = device['mac'],
|
||||
secondaryId = device['ip'],
|
||||
watched4 = device['last_seen'],
|
||||
|
||||
# The following are always unknown
|
||||
watched1 = device['hostname'], # don't use these --> handleEmpty(device['hostname']),
|
||||
watched2 = device['vendor'], # handleEmpty(device['vendor']),
|
||||
watched3 = device['device_type'], # handleEmpty(device['device_type']),
|
||||
extra = '',
|
||||
foreignKey = "" #device['mac']
|
||||
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
|
||||
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
|
||||
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
|
||||
# helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md
|
||||
)
|
||||
# The following are always unknown
|
||||
watched1 = device['hostname'], # don't use these --> handleEmpty(device['hostname']),
|
||||
watched2 = device['vendor'], # don't use these --> handleEmpty(device['vendor']),
|
||||
watched3 = device['device_type'], # don't use these --> handleEmpty(device['device_type']),
|
||||
extra = '',
|
||||
foreignKey = "" # device['mac']
|
||||
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
|
||||
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
|
||||
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
|
||||
# helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md
|
||||
)
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] New entries: "{len(neighbors)}"'])
|
||||
|
||||
@@ -83,13 +82,14 @@ def main():
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def parse_neighbors(raw_neighbors: list[str]):
|
||||
neighbors = []
|
||||
for line in raw_neighbors:
|
||||
if "lladdr" in line and "REACHABLE" in line:
|
||||
# Known data
|
||||
fields = line.split()
|
||||
|
||||
|
||||
if not is_multicast(fields[0]):
|
||||
# mylog('verbose', [f'[{pluginName}] adding ip {fields[0]}"'])
|
||||
neighbor = {}
|
||||
@@ -101,9 +101,9 @@ def parse_neighbors(raw_neighbors: list[str]):
|
||||
neighbor['hostname'] = '(unknown)'
|
||||
neighbor['vendor'] = '(unknown)'
|
||||
neighbor['device_type'] = '(unknown)'
|
||||
|
||||
|
||||
neighbors.append(neighbor)
|
||||
|
||||
|
||||
return neighbors
|
||||
|
||||
|
||||
@@ -111,6 +111,7 @@ def is_multicast(ip):
|
||||
prefixes = ['ff', '224', '231', '232', '233', '234', '238', '239']
|
||||
return reduce(lambda acc, prefix: acc or ip.startswith(prefix), prefixes, False)
|
||||
|
||||
|
||||
# retrieve data
|
||||
def get_neighbors(interfaces):
|
||||
|
||||
@@ -119,7 +120,7 @@ def get_neighbors(interfaces):
|
||||
for interface in interfaces.split(","):
|
||||
try:
|
||||
|
||||
# Ping all IPv6 devices in multicast to trigger NDP
|
||||
# Ping all IPv6 devices in multicast to trigger NDP
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Pinging on interface: "{interface}"'])
|
||||
command = f"ping ff02::1%{interface} -c 2".split()
|
||||
@@ -136,11 +137,11 @@ def get_neighbors(interfaces):
|
||||
mylog('verbose', [f'[{pluginName}] Scanning interface succeded: "{interface}"'])
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occurred, handle it
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Scanning interface failed: "{interface}"'])
|
||||
error_type = type(e).__name__ # Capture the error type
|
||||
mylog('verbose', [f'[{pluginName}] Scanning interface failed: "{interface}" ({error_type})'])
|
||||
|
||||
return results
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -7,18 +7,18 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
pluginName = 'LUCIRPC'
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from const import logPath
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
try:
|
||||
from openwrt_luci_rpc import OpenWrtRpc
|
||||
except:
|
||||
mylog('error', [f'[{pluginName}] Failed import openwrt_luci_rpc'])
|
||||
exit()
|
||||
except ImportError as e:
|
||||
mylog('error', [f'[{pluginName}] Failed import openwrt_luci_rpc: {e}'])
|
||||
exit(1)
|
||||
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
|
||||
@@ -30,13 +30,14 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
def main():
|
||||
mylog('verbose', [f'[{pluginName}] start script.'])
|
||||
|
||||
def main():
|
||||
mylog('verbose', [f'[{pluginName}] start script.'])
|
||||
|
||||
device_data = get_device_data()
|
||||
|
||||
for entry in device_data:
|
||||
mylog('verbose', [f'[{pluginName}] found: ', str(entry.mac).lower()])
|
||||
mylog('verbose', [f'[{pluginName}] found: ', str(entry.mac).lower()])
|
||||
|
||||
name = str(entry.hostname)
|
||||
|
||||
@@ -45,36 +46,38 @@ def main():
|
||||
|
||||
plugin_objects.add_object(
|
||||
primaryId = str(entry.mac).lower(),
|
||||
secondaryId = entry.ip,
|
||||
secondaryId = entry.ip,
|
||||
watched1 = entry.host,
|
||||
watched2 = name,
|
||||
watched3 = "",
|
||||
watched3 = "",
|
||||
watched4 = "",
|
||||
extra = pluginName,
|
||||
extra = pluginName,
|
||||
foreignKey = str(entry.mac).lower())
|
||||
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def get_device_data():
|
||||
router = OpenWrtRpc(
|
||||
get_setting_value("LUCIRPC_host"),
|
||||
get_setting_value("LUCIRPC_user"),
|
||||
get_setting_value("LUCIRPC_password"),
|
||||
get_setting_value("LUCIRPC_ssl"),
|
||||
get_setting_value("LUCIRPC_user"),
|
||||
get_setting_value("LUCIRPC_password"),
|
||||
get_setting_value("LUCIRPC_ssl"),
|
||||
get_setting_value("LUCIRPC_verify_ssl")
|
||||
)
|
||||
)
|
||||
|
||||
if router.is_logged_in():
|
||||
mylog('verbose', [f'[{pluginName}] login successfully.'])
|
||||
mylog('verbose', [f'[{pluginName}] login successfully.'])
|
||||
else:
|
||||
mylog('error', [f'[{pluginName}] login fail.'])
|
||||
|
||||
mylog('error', [f'[{pluginName}] login fail.'])
|
||||
|
||||
device_data = router.get_all_connected_devices(only_reachable=get_setting_value("LUCIRPC_only_reachable"))
|
||||
return device_data
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
||||
|
||||
@@ -8,12 +8,12 @@ from collections import deque
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from const import logPath
|
||||
from messaging.in_app import remove_old
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from messaging.in_app import remove_old # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -28,10 +28,9 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
MAINT_LOG_LENGTH = int(get_setting_value('MAINT_LOG_LENGTH'))
|
||||
MAINT_NOTI_LENGTH = int(get_setting_value('MAINT_NOTI_LENGTH'))
|
||||
@@ -39,7 +38,7 @@ def main():
|
||||
# Check if set
|
||||
if MAINT_LOG_LENGTH != 0:
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Cleaning file'])
|
||||
mylog('verbose', [f'[{pluginName}] Cleaning file'])
|
||||
|
||||
logFile = logPath + "/app.log"
|
||||
|
||||
@@ -54,19 +53,19 @@ def main():
|
||||
with open(logFile, 'w') as file:
|
||||
# Write the last N lines back to the file
|
||||
file.writelines(lines_to_keep)
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Cleanup finished'])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Cleanup finished'])
|
||||
|
||||
# Check if set
|
||||
if MAINT_NOTI_LENGTH != 0:
|
||||
mylog('verbose', [f'[{pluginName}] Cleaning in-app notification history'])
|
||||
mylog('verbose', [f'[{pluginName}] Cleaning in-app notification history'])
|
||||
remove_old(MAINT_NOTI_LENGTH)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# BEGIN
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
||||
|
||||
@@ -7,14 +7,14 @@ import sys
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from const import logPath
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from librouteros import connect
|
||||
from librouteros.exceptions import TrapError
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
from librouteros import connect # noqa: E402 [flake8 lint suppression]
|
||||
from librouteros.exceptions import TrapError # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -29,7 +29,6 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
@@ -49,7 +48,7 @@ def main():
|
||||
plugin_objects = get_entries(plugin_objects)
|
||||
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices'])
|
||||
|
||||
|
||||
@@ -58,10 +57,10 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
||||
try:
|
||||
# connect router
|
||||
api = connect(username=MT_USER, password=MT_PASS, host=MT_HOST, port=MT_PORT)
|
||||
|
||||
|
||||
# get dhcp leases
|
||||
leases = api('/ip/dhcp-server/lease/print')
|
||||
|
||||
|
||||
for lease in leases:
|
||||
lease_id = lease.get('.id')
|
||||
address = lease.get('address')
|
||||
@@ -71,8 +70,11 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
||||
last_seen = lease.get('last-seen')
|
||||
status = lease.get('status')
|
||||
device_name = comment or host_name or "(unknown)"
|
||||
|
||||
mylog('verbose', [f"ID: {lease_id}, Address: {address}, MAC Address: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}"])
|
||||
|
||||
mylog(
|
||||
'verbose',
|
||||
[f"ID: {lease_id}, Address: {address}, MAC Address: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}"]
|
||||
)
|
||||
|
||||
if (status == "bound"):
|
||||
plugin_objects.add_object(
|
||||
@@ -83,7 +85,7 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
||||
watched3 = host_name,
|
||||
watched4 = last_seen,
|
||||
extra = '',
|
||||
helpVal1 = comment,
|
||||
helpVal1 = comment,
|
||||
foreignKey = mac_address)
|
||||
|
||||
except TrapError as e:
|
||||
@@ -91,13 +93,13 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
||||
except Exception as e:
|
||||
mylog('error', [f"Failed to connect to MikroTik API: {e}"])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||
|
||||
return plugin_objects
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# BEGIN
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -8,14 +8,14 @@ import subprocess
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger
|
||||
from const import logPath
|
||||
from helper import get_setting_value
|
||||
from database import DB
|
||||
from models.device_instance import DeviceInstance
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -34,9 +34,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
# timeout = get_setting_value('NBLOOKUP_RUN_TIMEOUT')
|
||||
timeout = 20
|
||||
@@ -52,13 +51,13 @@ def main():
|
||||
device_handler = DeviceInstance(db)
|
||||
|
||||
# Retrieve devices
|
||||
if get_setting_value("REFRESH_FQDN"):
|
||||
if get_setting_value("REFRESH_FQDN"):
|
||||
devices = device_handler.getAll()
|
||||
else:
|
||||
else:
|
||||
devices = device_handler.getUnknown()
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
|
||||
|
||||
# TEST - below is a WINDOWS host IP
|
||||
# execute_name_lookup('192.168.1.121', timeout)
|
||||
|
||||
@@ -67,31 +66,32 @@ def main():
|
||||
|
||||
if domain_name != '':
|
||||
plugin_objects.add_object(
|
||||
# "MAC", "IP", "Server", "Name"
|
||||
primaryId = device['devMac'],
|
||||
secondaryId = device['devLastIP'],
|
||||
watched1 = dns_server,
|
||||
watched2 = domain_name,
|
||||
watched3 = '',
|
||||
watched4 = '',
|
||||
extra = '',
|
||||
foreignKey = device['devMac'])
|
||||
# "MAC", "IP", "Server", "Name"
|
||||
primaryId = device['devMac'],
|
||||
secondaryId = device['devLastIP'],
|
||||
watched1 = dns_server,
|
||||
watched2 = domain_name,
|
||||
watched3 = '',
|
||||
watched4 = '',
|
||||
extra = '',
|
||||
foreignKey = device['devMac']
|
||||
)
|
||||
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||
|
||||
return 0
|
||||
|
||||
#===============================================================================
|
||||
|
||||
# ===============================================================================
|
||||
# Execute scan
|
||||
#===============================================================================
|
||||
def execute_name_lookup (ip, timeout):
|
||||
# ===============================================================================
|
||||
def execute_name_lookup(ip, timeout):
|
||||
"""
|
||||
Execute the NBTSCAN command on IP.
|
||||
"""
|
||||
|
||||
|
||||
args = ['nbtscan', ip]
|
||||
|
||||
# Execute command
|
||||
@@ -99,20 +99,25 @@ def execute_name_lookup (ip, timeout):
|
||||
|
||||
try:
|
||||
mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args])
|
||||
|
||||
|
||||
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
|
||||
output = subprocess.check_output (args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True)
|
||||
output = subprocess.check_output(
|
||||
args,
|
||||
universal_newlines=True,
|
||||
stderr=subprocess.STDOUT,
|
||||
timeout=(timeout),
|
||||
text=True
|
||||
)
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
|
||||
|
||||
|
||||
domain_name = ''
|
||||
dns_server = ''
|
||||
|
||||
|
||||
# Split the output into lines
|
||||
lines = output.splitlines()
|
||||
|
||||
# Look for the first line containing a valid NetBIOS name entry
|
||||
index = 0
|
||||
for line in lines:
|
||||
if 'Doing NBT name scan' not in line and ip in line:
|
||||
# Split the line and extract the primary NetBIOS name
|
||||
@@ -121,7 +126,6 @@ def execute_name_lookup (ip, timeout):
|
||||
domain_name = parts[1]
|
||||
else:
|
||||
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - Unexpected output format: {line}'])
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Domain Name: {domain_name}'])
|
||||
|
||||
@@ -132,18 +136,21 @@ def execute_name_lookup (ip, timeout):
|
||||
# if "NXDOMAIN" in e.output:
|
||||
# mylog('verbose', [f'[{pluginName}]', f"No PTR record found for IP: {ip}"])
|
||||
# else:
|
||||
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
|
||||
|
||||
if output == "": # check if the subprocess failed
|
||||
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
|
||||
else:
|
||||
except subprocess.TimeoutExpired:
|
||||
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||
|
||||
if output == "": # check if the subprocess failed
|
||||
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
|
||||
else:
|
||||
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
|
||||
|
||||
return '', ''
|
||||
return '', ''
|
||||
|
||||
|
||||
# ===============================================================================
|
||||
# BEGIN
|
||||
# ===============================================================================
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
|
||||
@@ -448,7 +448,7 @@
|
||||
"description": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
"string": "When scanning remote networks, NMAP can only retrieve the IP address, not the MAC address. Enabling this setting generates a fake MAC address from the IP address to track devices, but it may cause inconsistencies if IPs change or devices are rediscovered. Static IPs are recommended. Device type and icon will not be detected correctly. When unchecked, devices with empty MAC addresses are skipped."
|
||||
"string": "When scanning remote networks, NMAP can only retrieve the IP address, not the MAC address. Enabling the FAKE_MAC setting generates a fake MAC address from the IP address to track devices, but it may cause inconsistencies if IPs change or devices are re-discovered with a different MAC. Static IPs are recommended. Device type and icon might not be detected correctly and some plugins might fail if they depend on a valid MAC address. When unchecked, devices with empty MAC addresses are skipped."
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -7,19 +7,18 @@ import subprocess
|
||||
import sys
|
||||
import hashlib
|
||||
import re
|
||||
import nmap
|
||||
import nmap
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from const import logPath
|
||||
from database import DB
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -37,46 +36,46 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
def main():
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
timeout = get_setting_value('NMAPDEV_RUN_TIMEOUT')
|
||||
fakeMac = get_setting_value('NMAPDEV_FAKE_MAC')
|
||||
subnets = get_setting_value('SCAN_SUBNETS')
|
||||
args = get_setting_value('NMAPDEV_ARGS')
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] subnets: ', subnets])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] subnets: ', subnets])
|
||||
|
||||
# Initialize the Plugin obj output file
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
unique_devices = execute_scan(subnets, timeout, fakeMac, args)
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Devices found: {len(unique_devices)}'])
|
||||
mylog('verbose', [f'[{pluginName}] Devices found: {len(unique_devices)}'])
|
||||
|
||||
for device in unique_devices:
|
||||
|
||||
plugin_objects.add_object(
|
||||
# "MAC", "IP", "Name", "Vendor", "Interface"
|
||||
primaryId = device['mac'].lower(),
|
||||
secondaryId = device['ip'],
|
||||
watched1 = device['name'],
|
||||
watched2 = device['vendor'],
|
||||
watched3 = device['interface'],
|
||||
watched4 = '',
|
||||
extra = '',
|
||||
foreignKey = device['mac'])
|
||||
# "MAC", "IP", "Name", "Vendor", "Interface"
|
||||
primaryId = device['mac'].lower(),
|
||||
secondaryId = device['ip'],
|
||||
watched1 = device['name'],
|
||||
watched2 = device['vendor'],
|
||||
watched3 = device['interface'],
|
||||
watched4 = '',
|
||||
extra = '',
|
||||
foreignKey = device['mac']
|
||||
)
|
||||
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||
|
||||
return 0
|
||||
|
||||
#===============================================================================
|
||||
|
||||
# ===============================================================================
|
||||
# Execute scan
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
def execute_scan(subnets_list, timeout, fakeMac, args):
|
||||
devices_list = []
|
||||
|
||||
@@ -103,22 +102,25 @@ def execute_scan(subnets_list, timeout, fakeMac, args):
|
||||
return devices_list
|
||||
|
||||
|
||||
|
||||
def execute_scan_on_interface (interface, timeout, args):
|
||||
# Remove unsupported VLAN flags
|
||||
def execute_scan_on_interface(interface, timeout, args):
|
||||
# Remove unsupported VLAN flags
|
||||
interface = re.sub(r'--vlan=\S+', '', interface).strip()
|
||||
|
||||
# Prepare command arguments
|
||||
scan_args = args.split() + interface.replace('--interface=','-e ').split()
|
||||
scan_args = args.split() + interface.replace('--interface=', '-e ').split()
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] scan_args: ', scan_args])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] scan_args: ', scan_args])
|
||||
|
||||
try:
|
||||
result = subprocess.check_output(scan_args, universal_newlines=True)
|
||||
result = subprocess.check_output(
|
||||
scan_args,
|
||||
universal_newlines=True,
|
||||
timeout=timeout
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
error_type = type(e).__name__
|
||||
result = ""
|
||||
mylog('verbose', [f'[{pluginName}] ERROR: ', error_type])
|
||||
mylog('verbose', [f'[{pluginName}] ERROR: ', error_type])
|
||||
|
||||
return result
|
||||
|
||||
@@ -130,28 +132,25 @@ def parse_nmap_xml(xml_output, interface, fakeMac):
|
||||
nm = nmap.PortScanner()
|
||||
nm.analyse_nmap_xml_scan(xml_output)
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Number of hosts: ', len(nm.all_hosts())])
|
||||
mylog('verbose', [f'[{pluginName}] Number of hosts: ', len(nm.all_hosts())])
|
||||
|
||||
for host in nm.all_hosts():
|
||||
hostname = nm[host].hostname() or '(unknown)'
|
||||
hostname = nm[host].hostname() or '(unknown)'
|
||||
|
||||
ip = nm[host]['addresses']['ipv4'] if 'ipv4' in nm[host]['addresses'] else ''
|
||||
mac = nm[host]['addresses']['mac'] if 'mac' in nm[host]['addresses'] else ''
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] nm[host]: ', nm[host]])
|
||||
mylog('verbose', [f'[{pluginName}] nm[host]: ', nm[host]])
|
||||
|
||||
vendor = ''
|
||||
|
||||
|
||||
if nm[host]['vendor']:
|
||||
mylog('verbose', [f'[{pluginName}] entry: ', nm[host]['vendor']])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] entry: ', nm[host]['vendor']])
|
||||
|
||||
for key, value in nm[host]['vendor'].items():
|
||||
vendor = value
|
||||
|
||||
break
|
||||
|
||||
|
||||
# Log debug information
|
||||
mylog('verbose', [f"[{pluginName}] Hostname: {hostname}, IP: {ip}, MAC: {mac}, Vendor: {vendor}"])
|
||||
|
||||
@@ -172,24 +171,24 @@ def parse_nmap_xml(xml_output, interface, fakeMac):
|
||||
# MAC or IP missing
|
||||
mylog('verbose', [f"[{pluginName}] Skipping: {hostname}, IP or MAC missing, or NMAPDEV_GENERATE_MAC setting not enabled"])
|
||||
|
||||
|
||||
except Exception as e:
|
||||
mylog('verbose', [f"[{pluginName}] Error parsing nmap XML: ", str(e)])
|
||||
|
||||
return devices_list
|
||||
|
||||
|
||||
|
||||
|
||||
def string_to_mac_hash(input_string):
|
||||
# Calculate a hash using SHA-256
|
||||
sha256_hash = hashlib.sha256(input_string.encode()).hexdigest()
|
||||
|
||||
# Take the first 12 characters of the hash and format as a MAC address
|
||||
mac_hash = ':'.join(sha256_hash[i:i+2] for i in range(0, 12, 2))
|
||||
|
||||
mac_hash = ':'.join(sha256_hash[i:i + 2] for i in range(0, 12, 2))
|
||||
|
||||
return mac_hash
|
||||
|
||||
#===============================================================================
|
||||
|
||||
# ===============================================================================
|
||||
# BEGIN
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
||||
|
||||
@@ -9,13 +9,13 @@ import subprocess
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger, append_line_to_file
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from helper import get_setting_value
|
||||
from const import logPath
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger, append_line_to_file # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -32,7 +32,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
# Initialize the Plugin obj output file
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Scan ports of devices specified by IP addresses'
|
||||
@@ -85,7 +86,7 @@ def main():
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Total number of ports found by NMAP: ', len(entries)])
|
||||
|
||||
for entry in entries:
|
||||
for entry in entries:
|
||||
|
||||
plugin_objects.add_object(
|
||||
primaryId = entry.mac, # MAC (Device Name)
|
||||
@@ -94,14 +95,14 @@ def main():
|
||||
watched2 = entry.service,
|
||||
watched3 = entry.ip + ":" + entry.port,
|
||||
watched4 = "",
|
||||
extra = entry.extra,
|
||||
foreignKey = entry.mac
|
||||
extra = entry.extra,
|
||||
foreignKey = entry.mac
|
||||
)
|
||||
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
class nmap_entry:
|
||||
def __init__(self, ip, mac, time, port, state, service, name = '', extra = '', index = 0):
|
||||
self.ip = ip
|
||||
@@ -109,13 +110,13 @@ class nmap_entry:
|
||||
self.time = time
|
||||
self.port = port
|
||||
self.state = state
|
||||
self.service = service
|
||||
self.service = service
|
||||
self.extra = extra
|
||||
self.index = index
|
||||
self.hash = str(mac) + str(port)+ str(state)+ str(service)
|
||||
self.hash = str(mac) + str(port) + str(state) + str(service)
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
def parse_kv_args(raw_args):
|
||||
"""
|
||||
Converts ['ips=a,b,c', 'macs=x,y,z', 'timeout=5'] to a dict.
|
||||
@@ -125,26 +126,28 @@ def parse_kv_args(raw_args):
|
||||
|
||||
for item in raw_args:
|
||||
if '=' not in item:
|
||||
mylog('none', [f"[{pluginName}] Scan: Invalid parameter (missing '='): {item}"])
|
||||
mylog('none', [f"[{pluginName}] Scan: Invalid parameter (missing '='): {item}"])
|
||||
|
||||
key, value = item.split('=', 1)
|
||||
|
||||
if key in parsed:
|
||||
mylog('none', [f"[{pluginName}] Scan: Duplicate parameter supplied: {key}"])
|
||||
mylog('none', [f"[{pluginName}] Scan: Duplicate parameter supplied: {key}"])
|
||||
|
||||
parsed[key] = value
|
||||
|
||||
return parsed
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def safe_split_list(value, keyname):
|
||||
"""Split comma list safely and ensure no empty items."""
|
||||
items = [x.strip() for x in value.split(',') if x.strip()]
|
||||
if not items:
|
||||
mylog('none', [f"[{pluginName}] Scan: {keyname} list is empty or invalid"])
|
||||
mylog('none', [f"[{pluginName}] Scan: {keyname} list is empty or invalid"])
|
||||
return items
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
|
||||
"""
|
||||
run nmap scan on a list of devices
|
||||
@@ -154,15 +157,12 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
|
||||
# collect ports / new Nmap Entries
|
||||
newEntriesTmp = []
|
||||
|
||||
|
||||
if len(deviceIPs) > 0:
|
||||
if len(deviceIPs) > 0:
|
||||
|
||||
devTotal = len(deviceIPs)
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device'])
|
||||
mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Scan: Nmap for max ', str(timeoutSec), 's (' + str(round(int(timeoutSec) / 60, 1)) + 'min) per device'])
|
||||
mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec)) / 60, 1) , 'min)'])
|
||||
|
||||
devIndex = 0
|
||||
for ip in deviceIPs:
|
||||
@@ -171,67 +171,63 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
|
||||
# prepare arguments from user supplied ones
|
||||
nmapArgs = ['nmap'] + args.split() + [ip]
|
||||
|
||||
progress = ' (' + str(devIndex+1) + '/' + str(devTotal) + ')'
|
||||
progress = ' (' + str(devIndex + 1) + '/' + str(devTotal) + ')'
|
||||
|
||||
try:
|
||||
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
|
||||
output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(float(timeoutSec)))
|
||||
output = subprocess.check_output(
|
||||
nmapArgs,
|
||||
universal_newlines=True,
|
||||
stderr=subprocess.STDOUT,
|
||||
timeout=(float(timeoutSec))
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', ["[NMAP Scan] " ,e.output])
|
||||
mylog('none', ["[NMAP Scan] ⚠ ERROR - Nmap Scan - check logs", progress])
|
||||
mylog('none', ["[NMAP Scan] ", e.output])
|
||||
mylog('none', ["[NMAP Scan] ⚠ ERROR - Nmap Scan - check logs", progress])
|
||||
except subprocess.TimeoutExpired:
|
||||
mylog('verbose', [f'[{pluginName}] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', ip, progress])
|
||||
mylog('verbose', [f'[{pluginName}] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', ip, progress])
|
||||
|
||||
if output == "": # check if the subprocess failed
|
||||
mylog('minimal', [f'[{pluginName}] Nmap FAIL for ', ip, progress ,' check logs for details'])
|
||||
else:
|
||||
if output == "": # check if the subprocess failed
|
||||
mylog('minimal', [f'[{pluginName}] Nmap FAIL for ', ip, progress, ' check logs for details'])
|
||||
else:
|
||||
mylog('verbose', [f'[{pluginName}] Nmap SUCCESS for ', ip, progress])
|
||||
|
||||
|
||||
|
||||
# check the last run output
|
||||
# check the last run output
|
||||
newLines = output.split('\n')
|
||||
|
||||
# regular logging
|
||||
for line in newLines:
|
||||
append_line_to_file (logPath + '/app_nmap.log', line +'\n')
|
||||
|
||||
append_line_to_file(logPath + '/app_nmap.log', line + '\n')
|
||||
|
||||
index = 0
|
||||
startCollecting = False
|
||||
duration = ""
|
||||
duration = ""
|
||||
newPortsPerDevice = 0
|
||||
for line in newLines:
|
||||
for line in newLines:
|
||||
if 'Starting Nmap' in line:
|
||||
if len(newLines) > index+1 and 'Note: Host seems down' in newLines[index+1]:
|
||||
break # this entry is empty
|
||||
if len(newLines) > index + 1 and 'Note: Host seems down' in newLines[index + 1]:
|
||||
break # this entry is empty
|
||||
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
|
||||
startCollecting = True
|
||||
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
|
||||
startCollecting = False # end reached
|
||||
elif startCollecting and len(line.split()) == 3:
|
||||
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
|
||||
startCollecting = False # end reached
|
||||
elif startCollecting and len(line.split()) == 3:
|
||||
newEntriesTmp.append(nmap_entry(ip, deviceMACs[devIndex], timeNowDB(), line.split()[0], line.split()[1], line.split()[2]))
|
||||
newPortsPerDevice += 1
|
||||
elif 'Nmap done' in line:
|
||||
duration = line.split('scanned in ')[1]
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] {newPortsPerDevice} ports found on {deviceMACs[devIndex]}'])
|
||||
duration = line.split('scanned in ')[1]
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] {newPortsPerDevice} ports found on {deviceMACs[devIndex]} after {duration}'])
|
||||
|
||||
index += 1
|
||||
devIndex += 1
|
||||
|
||||
|
||||
|
||||
#end for loop
|
||||
|
||||
return newEntriesTmp
|
||||
|
||||
#===============================================================================
|
||||
|
||||
# ===============================================================================
|
||||
# BEGIN
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -11,14 +11,14 @@ import re
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from const import logPath
|
||||
from database import DB
|
||||
from models.device_instance import DeviceInstance
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -33,11 +33,9 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
timeout = get_setting_value('NSLOOKUP_RUN_TIMEOUT')
|
||||
|
||||
@@ -52,13 +50,13 @@ def main():
|
||||
device_handler = DeviceInstance(db)
|
||||
|
||||
# Retrieve devices
|
||||
if get_setting_value("REFRESH_FQDN"):
|
||||
if get_setting_value("REFRESH_FQDN"):
|
||||
devices = device_handler.getAll()
|
||||
else:
|
||||
else:
|
||||
devices = device_handler.getUnknown()
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
|
||||
|
||||
# TEST - below is a WINDOWS host IP
|
||||
# execute_name_lookup('192.168.1.121', timeout)
|
||||
|
||||
@@ -67,31 +65,32 @@ def main():
|
||||
|
||||
if domain_name != '':
|
||||
plugin_objects.add_object(
|
||||
# "MAC", "IP", "Server", "Name"
|
||||
primaryId = device['devMac'],
|
||||
secondaryId = device['devLastIP'],
|
||||
watched1 = dns_server,
|
||||
watched2 = domain_name,
|
||||
watched3 = '',
|
||||
watched4 = '',
|
||||
extra = '',
|
||||
foreignKey = device['devMac'])
|
||||
# "MAC", "IP", "Server", "Name"
|
||||
primaryId = device['devMac'],
|
||||
secondaryId = device['devLastIP'],
|
||||
watched1 = dns_server,
|
||||
watched2 = domain_name,
|
||||
watched3 = '',
|
||||
watched4 = '',
|
||||
extra = '',
|
||||
foreignKey = device['devMac']
|
||||
)
|
||||
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Script finished'])
|
||||
|
||||
return 0
|
||||
|
||||
#===============================================================================
|
||||
|
||||
# ===============================================================================
|
||||
# Execute scan
|
||||
#===============================================================================
|
||||
def execute_nslookup (ip, timeout):
|
||||
# ===============================================================================
|
||||
def execute_nslookup(ip, timeout):
|
||||
"""
|
||||
Execute the NSLOOKUP command on IP.
|
||||
"""
|
||||
|
||||
|
||||
nslookup_args = ['nslookup', ip]
|
||||
|
||||
# Execute command
|
||||
@@ -99,7 +98,13 @@ def execute_nslookup (ip, timeout):
|
||||
|
||||
try:
|
||||
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
|
||||
output = subprocess.check_output (nslookup_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True)
|
||||
output = subprocess.check_output(
|
||||
nslookup_args,
|
||||
universal_newlines=True,
|
||||
stderr=subprocess.STDOUT,
|
||||
timeout=(timeout),
|
||||
text=True
|
||||
)
|
||||
|
||||
domain_name = ''
|
||||
dns_server = ''
|
||||
@@ -110,8 +115,7 @@ def execute_nslookup (ip, timeout):
|
||||
domain_pattern = re.compile(r'name\s*=\s*([^\s]+)', re.IGNORECASE)
|
||||
server_pattern = re.compile(r'Server:\s+(.+)', re.IGNORECASE)
|
||||
|
||||
|
||||
domain_match = domain_pattern.search(output)
|
||||
domain_match = domain_pattern.search(output)
|
||||
server_match = server_pattern.search(output)
|
||||
|
||||
if domain_match:
|
||||
@@ -131,24 +135,20 @@ def execute_nslookup (ip, timeout):
|
||||
else:
|
||||
mylog('verbose', [f'[{pluginName}]', e.output])
|
||||
# Handle other errors here
|
||||
# mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||
# mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||
|
||||
if output != "": # check if the subprocess failed
|
||||
|
||||
if output == "": # check if the subprocess failed
|
||||
tmp = 1 # can't have empty
|
||||
# mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
|
||||
else:
|
||||
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
|
||||
|
||||
return '', ''
|
||||
|
||||
|
||||
|
||||
return '', ''
|
||||
|
||||
#===============================================================================
|
||||
|
||||
# ===============================================================================
|
||||
# BEGIN
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
||||
|
||||
@@ -15,10 +15,9 @@ __version__ = "1.3" # fix detection of the default gateway IP address that woul
|
||||
# try to identify and populate their connections by switch/accesspoints and ports/SSID
|
||||
# try to differentiate root bridges from accessory
|
||||
|
||||
|
||||
#
|
||||
# sample code to update unbound on opnsense - for reference...
|
||||
# curl -X POST -d '{"host":{"enabled":"1","hostname":"test","domain":"testdomain.com","rr":"A","mxprio":"","mx":"","server":"10.0.1.1","description":""}}' -H "Content-Type: application/json" -k -u $OPNS_KEY:$OPNS_SECRET https://$IPFW/api/unbound/settings/AddHostOverride
|
||||
# curl -X POST -d '{"host":{"enabled":"1","hostname":"test","domain":"testdomain.com","rr":"A","mxprio":"","mx":"","server":"10.0.1.1","description":""}}'\
|
||||
# -H "Content-Type: application/json" -k -u $OPNS_KEY:$OPNS_SECRET https://$IPFW/api/unbound/settings/AddHostOverride
|
||||
#
|
||||
import os
|
||||
import sys
|
||||
@@ -35,12 +34,12 @@ import multiprocessing
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger
|
||||
from const import logPath
|
||||
from helper import get_setting_value
|
||||
from pytz import timezone
|
||||
import conf
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -87,8 +86,6 @@ cMAC, cIP, cNAME, cSWITCH_AP, cPORT_SSID = range(5)
|
||||
OMDLOGLEVEL = "debug"
|
||||
|
||||
|
||||
|
||||
#
|
||||
# translate MAC address from standard ieee model to ietf draft
|
||||
# AA-BB-CC-DD-EE-FF to aa:bb:cc:dd:ee:ff
|
||||
# tplink adheres to ieee, Nax adheres to ietf
|
||||
@@ -137,12 +134,12 @@ def callomada(myargs):
|
||||
|
||||
omada_output = ""
|
||||
retries = 2
|
||||
while omada_output == "" and retries > 1:
|
||||
while omada_output == "" and retries > 0:
|
||||
retries = retries - 1
|
||||
try:
|
||||
mf = io.StringIO()
|
||||
with redirect_stdout(mf):
|
||||
bar = omada(myargs)
|
||||
omada(myargs)
|
||||
omada_output = mf.getvalue()
|
||||
except Exception:
|
||||
mylog(
|
||||
@@ -186,55 +183,71 @@ def add_uplink(
|
||||
sadevices_linksbymac,
|
||||
port_byswitchmac_byclientmac,
|
||||
):
|
||||
# Ensure switch_mac exists in device_data_bymac
|
||||
# Ensure switch exists
|
||||
if switch_mac not in device_data_bymac:
|
||||
mylog("none", [f"[{pluginName}] switch_mac '{switch_mac}' not found in device_data_bymac"])
|
||||
return
|
||||
|
||||
# Ensure SWITCH_AP key exists in the dictionary
|
||||
if SWITCH_AP not in device_data_bymac[switch_mac]:
|
||||
mylog("none", [f"[{pluginName}] Missing key '{SWITCH_AP}' in device_data_bymac[{switch_mac}]"])
|
||||
return
|
||||
|
||||
# Check if uplink should be added
|
||||
if device_data_bymac[switch_mac][SWITCH_AP] in [None, "null"]:
|
||||
device_data_bymac[switch_mac][SWITCH_AP] = uplink_mac
|
||||
|
||||
# Ensure uplink_mac exists in device_data_bymac
|
||||
dev_switch = device_data_bymac[switch_mac]
|
||||
|
||||
# Ensure list is long enough to contain SWITCH_AP index
|
||||
if len(dev_switch) <= SWITCH_AP:
|
||||
mylog("none", [f"[{pluginName}] SWITCH_AP index {SWITCH_AP} missing in record for {switch_mac}"])
|
||||
return
|
||||
|
||||
# Add uplink only if empty
|
||||
if dev_switch[SWITCH_AP] in (None, "null"):
|
||||
dev_switch[SWITCH_AP] = uplink_mac
|
||||
|
||||
# Validate uplink_mac exists
|
||||
if uplink_mac not in device_data_bymac:
|
||||
mylog("none", [f"[{pluginName}] uplink_mac '{uplink_mac}' not found in device_data_bymac"])
|
||||
return
|
||||
|
||||
# Determine port to uplink
|
||||
if (
|
||||
device_data_bymac[switch_mac].get(TYPE) == "Switch"
|
||||
and device_data_bymac[uplink_mac].get(TYPE) == "Switch"
|
||||
):
|
||||
|
||||
dev_uplink = device_data_bymac[uplink_mac]
|
||||
|
||||
# Get TYPE safely
|
||||
switch_type = dev_switch[TYPE] if len(dev_switch) > TYPE else None
|
||||
uplink_type = dev_uplink[TYPE] if len(dev_uplink) > TYPE else None
|
||||
|
||||
# Switch-to-switch link → use port mapping
|
||||
if switch_type == "Switch" and uplink_type == "Switch":
|
||||
port_to_uplink = port_byswitchmac_byclientmac.get(switch_mac, {}).get(uplink_mac)
|
||||
if port_to_uplink is None:
|
||||
mylog("none", [f"[{pluginName}] Missing port info for switch_mac '{switch_mac}' and uplink_mac '{uplink_mac}'"])
|
||||
mylog("none", [
|
||||
f"[{pluginName}] Missing port info for {switch_mac} → {uplink_mac}"
|
||||
])
|
||||
return
|
||||
else:
|
||||
port_to_uplink = device_data_bymac[uplink_mac].get(PORT_SSID)
|
||||
|
||||
# Assign port to switch_mac
|
||||
device_data_bymac[switch_mac][PORT_SSID] = port_to_uplink
|
||||
|
||||
# Recursively add uplinks for linked devices
|
||||
# Other device types → read PORT_SSID index
|
||||
if len(dev_uplink) <= PORT_SSID:
|
||||
mylog("none", [
|
||||
f"[{pluginName}] PORT_SSID index missing for uplink {uplink_mac}"
|
||||
])
|
||||
return
|
||||
port_to_uplink = dev_uplink[PORT_SSID]
|
||||
|
||||
# Assign port to switch
|
||||
if len(dev_switch) > PORT_SSID:
|
||||
dev_switch[PORT_SSID] = port_to_uplink
|
||||
else:
|
||||
mylog("none", [
|
||||
f"[{pluginName}] PORT_SSID index missing in switch {switch_mac}"
|
||||
])
|
||||
|
||||
# Process children recursively
|
||||
for link in sadevices_linksbymac.get(switch_mac, []):
|
||||
if (
|
||||
link in device_data_bymac
|
||||
and device_data_bymac[link].get(SWITCH_AP) in [None, "null"]
|
||||
and device_data_bymac[switch_mac].get(TYPE) == "Switch"
|
||||
link in device_data_bymac and len(device_data_bymac[link]) > SWITCH_AP and device_data_bymac[link][SWITCH_AP] in (None, "null") and len(dev_switch) > TYPE
|
||||
):
|
||||
add_uplink(
|
||||
switch_mac,
|
||||
link,
|
||||
device_data_bymac,
|
||||
sadevices_linksbymac,
|
||||
port_byswitchmac_byclientmac,
|
||||
)
|
||||
|
||||
if dev_switch[TYPE] == "Switch":
|
||||
add_uplink(
|
||||
switch_mac,
|
||||
link,
|
||||
device_data_bymac,
|
||||
sadevices_linksbymac,
|
||||
port_byswitchmac_byclientmac,
|
||||
)
|
||||
|
||||
|
||||
# ----------------------------------------------
|
||||
@@ -324,16 +337,16 @@ def main():
|
||||
)
|
||||
mymac = ieee2ietf_mac_formater(device[MAC])
|
||||
plugin_objects.add_object(
|
||||
primaryId=mymac, # MAC
|
||||
secondaryId=device[IP], # IP
|
||||
watched1=device[NAME], # NAME/HOSTNAME
|
||||
watched2=ParentNetworkNode, # PARENT NETWORK NODE MAC
|
||||
watched3=myport, # PORT
|
||||
watched4=myssid, # SSID
|
||||
primaryId=mymac, # MAC
|
||||
secondaryId=device[IP], # IP
|
||||
watched1=device[NAME], # NAME/HOSTNAME
|
||||
watched2=ParentNetworkNode, # PARENT NETWORK NODE MAC
|
||||
watched3=myport, # PORT
|
||||
watched4=myssid, # SSID
|
||||
extra=device[TYPE],
|
||||
# omada_site, # SITENAME (cur_NetworkSite) or VENDOR (cur_Vendor) (PICK one and adjust config.json -> "column": "Extra")
|
||||
foreignKey=device[MAC].lower().replace("-", ":"),
|
||||
) # usually MAC
|
||||
) # usually MAC
|
||||
|
||||
mylog(
|
||||
"verbose",
|
||||
@@ -369,7 +382,6 @@ def get_omada_devices_details(msadevice_data):
|
||||
mswitch_dump = callomada(["-t", "myomada", "switch", "-d", mthisswitch])
|
||||
else:
|
||||
mswitch_detail = ""
|
||||
nswitch_dump = ""
|
||||
return mswitch_detail, mswitch_dump
|
||||
|
||||
|
||||
@@ -414,7 +426,6 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
|
||||
# 17:27:10 [<unique_prefix>] token: "['1A-2B-3C-4D-5E-6F', '192.168.0.217', '1A-2B-3C-4D-5E-6F', '17', '40-AE-30-A5-A7-50, 'Switch']"
|
||||
# constants
|
||||
sadevices_macbyname = {}
|
||||
sadevices_macbymac = {}
|
||||
sadevices_linksbymac = {}
|
||||
port_byswitchmac_byclientmac = {}
|
||||
device_data_bymac = {}
|
||||
@@ -427,7 +438,7 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
|
||||
def run_command(command, index):
|
||||
result = subprocess.run(command, capture_output=True, text=True, shell=True)
|
||||
return str(index), result.stdout.strip()
|
||||
|
||||
|
||||
myindex, command_output= run_command(command, 2)
|
||||
mylog('verbose', [f'[{pluginName}] command={command} index={myindex} results={command_output}'])
|
||||
"""
|
||||
@@ -556,11 +567,11 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
|
||||
#
|
||||
|
||||
naxname = real_naxname
|
||||
if real_naxname != None:
|
||||
if real_naxname is not None:
|
||||
if "(" in real_naxname:
|
||||
# removing parenthesis and domains from the name
|
||||
naxname = real_naxname.split("(")[0]
|
||||
if naxname != None and "." in naxname:
|
||||
if naxname is not None and "." in naxname:
|
||||
naxname = naxname.split(".")[0]
|
||||
if naxname in (None, "null", ""):
|
||||
naxname = (
|
||||
|
||||
@@ -25,7 +25,6 @@ import sys
|
||||
import urllib3
|
||||
import requests
|
||||
import time
|
||||
import datetime
|
||||
import pytz
|
||||
|
||||
from datetime import datetime
|
||||
@@ -35,11 +34,11 @@ from typing import Literal, Any, Dict
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects, is_typical_router_ip, is_mac
|
||||
from logger import mylog, Logger
|
||||
from const import logPath
|
||||
from helper import get_setting_value
|
||||
import conf
|
||||
from plugin_helper import Plugin_Objects, is_typical_router_ip, is_mac # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = pytz.timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -176,7 +175,10 @@ class OmadaHelper:
|
||||
# If it's not a gateway try to assign parent node MAC
|
||||
if data.get("type", "") != "gateway":
|
||||
parent_mac = OmadaHelper.normalize_mac(data.get("uplinkDeviceMac"))
|
||||
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
|
||||
|
||||
resp_type = parent_mac.get("response_type")
|
||||
|
||||
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
|
||||
|
||||
# Applicable only for CLIENT
|
||||
if input_type == "client":
|
||||
@@ -185,15 +187,26 @@ class OmadaHelper:
|
||||
# Try to assign parent node MAC and PORT/SSID to the CLIENT
|
||||
if data.get("connectDevType", "") == "gateway":
|
||||
parent_mac = OmadaHelper.normalize_mac(data.get("gatewayMac"))
|
||||
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
|
||||
|
||||
resp_type = parent_mac.get("response_type")
|
||||
|
||||
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
|
||||
entry["parent_node_port"] = data.get("port", "")
|
||||
|
||||
elif data.get("connectDevType", "") == "switch":
|
||||
parent_mac = OmadaHelper.normalize_mac(data.get("switchMac"))
|
||||
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
|
||||
|
||||
resp_type = parent_mac.get("response_type")
|
||||
|
||||
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
|
||||
entry["parent_node_port"] = data.get("port", "")
|
||||
|
||||
elif data.get("connectDevType", "") == "ap":
|
||||
parent_mac = OmadaHelper.normalize_mac(data.get("apMac"))
|
||||
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
|
||||
|
||||
resp_type = parent_mac.get("response_type")
|
||||
|
||||
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
|
||||
entry["parent_node_ssid"] = data.get("ssid", "")
|
||||
|
||||
# Add the entry to the result
|
||||
@@ -253,7 +266,7 @@ class OmadaAPI:
|
||||
"""Return request headers."""
|
||||
headers = {"Content-type": "application/json"}
|
||||
# Add access token to header if requested and available
|
||||
if include_auth == True:
|
||||
if include_auth is True:
|
||||
if not self.access_token:
|
||||
OmadaHelper.debug("No access token available for headers")
|
||||
else:
|
||||
@@ -283,7 +296,7 @@ class OmadaAPI:
|
||||
OmadaHelper.verbose(f"{method} request error: {str(ex)}")
|
||||
return OmadaHelper.response("error", f"{method} request failed to endpoint '{endpoint}' with error: {str(ex)}")
|
||||
|
||||
def authenticate(self) -> Dict[str, any]:
|
||||
def authenticate(self) -> Dict[str, Any]:
|
||||
"""Make an endpoint request to get access token."""
|
||||
OmadaHelper.verbose("Starting authentication process")
|
||||
|
||||
@@ -368,7 +381,7 @@ class OmadaAPI:
|
||||
|
||||
# Failed site population
|
||||
OmadaHelper.debug(f"Site population response: {response}")
|
||||
return OmadaHelper.response("error", f"Site population failed - error: {response.get('response_message', 'Not provided')}")
|
||||
return OmadaHelper.response("error", f"Site population failed - error: {response.get('response_message', 'Not provided')}")
|
||||
|
||||
def requested_sites(self) -> list:
|
||||
"""Returns sites requested by user."""
|
||||
|
||||
@@ -17,12 +17,12 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
pluginName = 'PIHOLEAPI'
|
||||
|
||||
from plugin_helper import Plugin_Objects, is_mac
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from const import logPath
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from plugin_helper import Plugin_Objects, is_mac # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Setup timezone & logger using standard NAX helpers
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -179,7 +179,7 @@ def get_pihole_network_devices():
|
||||
resp = requests.get(PIHOLEAPI_URL + 'api/network/devices', headers=headers, params=params, verify=PIHOLEAPI_VERIFY_SSL, timeout=PIHOLEAPI_RUN_TIMEOUT)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
|
||||
|
||||
mylog('debug', [f'[{pluginName}] Pi-hole API returned data: {json.dumps(data)}'])
|
||||
|
||||
except Exception as e:
|
||||
@@ -267,8 +267,8 @@ def main():
|
||||
for entry in device_entries:
|
||||
|
||||
if is_mac(entry['mac']):
|
||||
# Map to Plugin_Objects fields
|
||||
mylog('verbose', [f'[{pluginName}] found: {entry['name']}|{entry['mac']}|{entry['ip']}'])
|
||||
# Map to Plugin_Objects fields
|
||||
mylog('verbose', [f"[{pluginName}] found: {entry['name']}|{entry['mac']}|{entry['ip']}"])
|
||||
|
||||
plugin_objects.add_object(
|
||||
primaryId=str(entry['mac']),
|
||||
@@ -281,7 +281,7 @@ def main():
|
||||
foreignKey=str(entry['mac'])
|
||||
)
|
||||
else:
|
||||
mylog('verbose', [f'[{pluginName}] Skipping invalid MAC: {entry['name']}|{entry['mac']}|{entry['ip']}'])
|
||||
mylog('verbose', [f"[{pluginName}] Skipping invalid MAC: {entry['name']}|{entry['mac']}|{entry['ip']}"])
|
||||
|
||||
# Write result file for NetAlertX to ingest
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
@@ -5,18 +5,18 @@ import os
|
||||
import re
|
||||
import base64
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
|
||||
sys.path.append(f"{INSTALL_PATH}/front/plugins")
|
||||
sys.path.append(f'{INSTALL_PATH}/server')
|
||||
sys.path.append(f'{INSTALL_PATH}/server')
|
||||
|
||||
from logger import mylog, Logger
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from const import default_tz, fullConfPath
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||
from const import default_tz, fullConfPath # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def read_config_file():
|
||||
"""
|
||||
retuns dict on the config file key:value pairs
|
||||
@@ -25,15 +25,15 @@ def read_config_file():
|
||||
|
||||
filename = fullConfPath
|
||||
|
||||
|
||||
print('[plugin_helper] reading config file')
|
||||
|
||||
# load the variables from .conf
|
||||
with open(filename, "r") as file:
|
||||
code = compile(file.read(), filename, "exec")
|
||||
|
||||
confDict = {} # config dictionary
|
||||
confDict = {} # config dictionary
|
||||
exec(code, {"__builtins__": {}}, confDict)
|
||||
return confDict
|
||||
return confDict
|
||||
|
||||
|
||||
configFile = read_config_file()
|
||||
@@ -42,6 +42,7 @@ if timeZoneSetting not in all_timezones:
|
||||
timeZoneSetting = default_tz
|
||||
timeZone = pytz.timezone(timeZoneSetting)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Sanitizes plugin output
|
||||
def handleEmpty(input):
|
||||
@@ -54,70 +55,72 @@ def handleEmpty(input):
|
||||
input = re.sub(r'[^\x00-\x7F]+', ' ', input)
|
||||
input = input.replace('\n', '') # Removing new lines
|
||||
return input
|
||||
|
||||
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Sanitizes string
|
||||
def rmBadChars(input):
|
||||
|
||||
|
||||
input = handleEmpty(input)
|
||||
input = input.replace("'", '_') # Removing ' (single quotes)
|
||||
|
||||
|
||||
return input
|
||||
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# check if this is a router IP
|
||||
def is_typical_router_ip(ip_address):
|
||||
# List of common default gateway IP addresses
|
||||
common_router_ips = [
|
||||
"192.168.0.1", "192.168.1.1", "192.168.1.254", "192.168.0.254",
|
||||
"10.0.0.1", "10.1.1.1", "192.168.2.1", "192.168.10.1", "192.168.11.1",
|
||||
"192.168.100.1", "192.168.101.1", "192.168.123.254", "192.168.223.1",
|
||||
"192.168.31.1", "192.168.8.1", "192.168.254.254", "192.168.50.1",
|
||||
"192.168.3.1", "192.168.4.1", "192.168.5.1", "192.168.9.1",
|
||||
"192.168.15.1", "192.168.16.1", "192.168.20.1", "192.168.30.1",
|
||||
"192.168.42.1", "192.168.62.1", "192.168.178.1", "192.168.1.1",
|
||||
"192.168.1.254", "192.168.0.1", "192.168.0.10", "192.168.0.100",
|
||||
"192.168.0.254"
|
||||
]
|
||||
|
||||
return ip_address in common_router_ips
|
||||
# List of common default gateway IP addresses
|
||||
common_router_ips = [
|
||||
"192.168.0.1", "192.168.1.1", "192.168.1.254", "192.168.0.254",
|
||||
"10.0.0.1", "10.1.1.1", "192.168.2.1", "192.168.10.1", "192.168.11.1",
|
||||
"192.168.100.1", "192.168.101.1", "192.168.123.254", "192.168.223.1",
|
||||
"192.168.31.1", "192.168.8.1", "192.168.254.254", "192.168.50.1",
|
||||
"192.168.3.1", "192.168.4.1", "192.168.5.1", "192.168.9.1",
|
||||
"192.168.15.1", "192.168.16.1", "192.168.20.1", "192.168.30.1",
|
||||
"192.168.42.1", "192.168.62.1", "192.168.178.1", "192.168.1.1",
|
||||
"192.168.1.254", "192.168.0.1", "192.168.0.10", "192.168.0.100",
|
||||
"192.168.0.254"
|
||||
]
|
||||
|
||||
return ip_address in common_router_ips
|
||||
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Check if a valid MAC address
|
||||
def is_mac(input):
|
||||
input_str = str(input).lower() # Convert to string and lowercase so non-string values won't raise errors
|
||||
|
||||
|
||||
isMac = bool(re.match("[0-9a-f]{2}([-:]?)[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$", input_str))
|
||||
|
||||
|
||||
if not isMac: # If it's not a MAC address, log the input
|
||||
mylog('verbose', [f'[is_mac] not a MAC: {input_str}'])
|
||||
|
||||
|
||||
return isMac
|
||||
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
def decodeBase64(inputParamBase64):
|
||||
|
||||
# Printing the input list to check its content.
|
||||
mylog('debug', ['[Plugins] Helper base64 input: ', input])
|
||||
print('[Plugins] Helper base64 input: ')
|
||||
print(input)
|
||||
|
||||
|
||||
# Extract the base64-encoded subnet information from the first element
|
||||
# The format of the element is assumed to be like 'param=b<base64-encoded-data>'.
|
||||
mylog('debug', ['[Plugins] Helper base64 input: ', input])
|
||||
print('[Plugins] Helper base64 input: ')
|
||||
print(input)
|
||||
|
||||
# Extract the base64-encoded subnet information from the first element
|
||||
# The format of the element is assumed to be like 'param=b<base64-encoded-data>'.
|
||||
# Printing the extracted base64-encoded information.
|
||||
mylog('debug', ['[Plugins] Helper base64 inputParamBase64: ', inputParamBase64])
|
||||
|
||||
mylog('debug', ['[Plugins] Helper base64 inputParamBase64: ', inputParamBase64])
|
||||
|
||||
# Decode the base64-encoded subnet information to get the actual subnet information in ASCII format.
|
||||
result = base64.b64decode(inputParamBase64).decode('ascii')
|
||||
|
||||
# Print the decoded subnet information.
|
||||
mylog('debug', ['[Plugins] Helper base64 result: ', result])
|
||||
mylog('debug', ['[Plugins] Helper base64 result: ', result])
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
def decode_settings_base64(encoded_str, convert_types=True):
|
||||
"""
|
||||
@@ -167,7 +170,7 @@ def decode_settings_base64(encoded_str, convert_types=True):
|
||||
def normalize_mac(mac):
|
||||
# Split the MAC address by colon (:) or hyphen (-) and convert each part to uppercase
|
||||
parts = mac.upper().split(':')
|
||||
|
||||
|
||||
# If the MAC address is split by hyphen instead of colon
|
||||
if len(parts) == 1:
|
||||
parts = mac.upper().split('-')
|
||||
@@ -177,14 +180,15 @@ def normalize_mac(mac):
|
||||
|
||||
# Join the parts with colon (:)
|
||||
normalized_mac = ':'.join(normalized_parts)
|
||||
|
||||
|
||||
return normalized_mac
|
||||
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
class Plugin_Object:
|
||||
"""
|
||||
"""
|
||||
Plugin_Object class to manage one object introduced by the plugin.
|
||||
An object typically is a device but could also be a website or something
|
||||
An object typically is a device but could also be a website or something
|
||||
else that is monitored by the plugin.
|
||||
"""
|
||||
|
||||
@@ -222,8 +226,8 @@ class Plugin_Object:
|
||||
self.helpVal4 = helpVal4 or ""
|
||||
|
||||
def write(self):
|
||||
"""
|
||||
Write the object details as a string in the
|
||||
"""
|
||||
Write the object details as a string in the
|
||||
format required to write the result file.
|
||||
"""
|
||||
line = "{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}\n".format(
|
||||
@@ -243,6 +247,7 @@ class Plugin_Object:
|
||||
)
|
||||
return line
|
||||
|
||||
|
||||
class Plugin_Objects:
|
||||
"""
|
||||
Plugin_Objects is the class that manages and holds all the objects created by the plugin.
|
||||
@@ -303,7 +308,3 @@ class Plugin_Objects:
|
||||
|
||||
def __len__(self):
|
||||
return len(self.objects)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -10,12 +10,12 @@ import sys
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects, handleEmpty, normalize_mac
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from const import logPath
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from plugin_helper import Plugin_Objects, handleEmpty, normalize_mac # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -28,53 +28,60 @@ pluginName = "SNMPDSC"
|
||||
LOG_PATH = logPath + '/plugins'
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
# Workflow
|
||||
|
||||
def main():
|
||||
mylog('verbose', ['[SNMPDSC] In script '])
|
||||
def main():
|
||||
mylog('verbose', ['[SNMPDSC] In script '])
|
||||
|
||||
# init global variables
|
||||
global snmpWalkCmds
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description='This plugin is used to discover devices via the arp table(s) of a RFC1213 compliant router or switch.')
|
||||
parser.add_argument('routers', action="store", help="IP(s) of routers, separated by comma (,) if passing multiple")
|
||||
parser = argparse.ArgumentParser(description='This plugin is used to discover devices via the arp table(s) of a RFC1213 compliant router or switch.')
|
||||
parser.add_argument(
|
||||
'routers',
|
||||
action="store",
|
||||
help="IP(s) of routers, separated by comma (,) if passing multiple"
|
||||
)
|
||||
|
||||
values = parser.parse_args()
|
||||
|
||||
timeoutSetting = get_setting_value("SNMPDSC_RUN_TIMEOUT")
|
||||
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
if values.routers:
|
||||
snmpWalkCmds = values.routers.split('=')[1].replace('\'','')
|
||||
|
||||
if values.routers:
|
||||
snmpWalkCmds = values.routers.split('=')[1].replace('\'', '')
|
||||
|
||||
if ',' in snmpWalkCmds:
|
||||
commands = snmpWalkCmds.split(',')
|
||||
else:
|
||||
commands = [snmpWalkCmds]
|
||||
|
||||
|
||||
for cmd in commands:
|
||||
mylog('verbose', ['[SNMPDSC] Router snmpwalk command: ', cmd])
|
||||
mylog('verbose', ['[SNMPDSC] Router snmpwalk command: ', cmd])
|
||||
# split the string, remove white spaces around each item, and exclude any empty strings
|
||||
snmpwalkArgs = [arg.strip() for arg in cmd.split(' ') if arg.strip()]
|
||||
|
||||
# Execute N probes and insert in list
|
||||
probes = 1 # N probes
|
||||
|
||||
for _ in range(probes):
|
||||
output = subprocess.check_output (snmpwalkArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSetting))
|
||||
|
||||
mylog('verbose', ['[SNMPDSC] output: ', output])
|
||||
for _ in range(probes):
|
||||
output = subprocess.check_output(
|
||||
snmpwalkArgs,
|
||||
universal_newlines=True,
|
||||
stderr=subprocess.STDOUT,
|
||||
timeout=(timeoutSetting)
|
||||
)
|
||||
|
||||
mylog('verbose', ['[SNMPDSC] output: ', output])
|
||||
|
||||
lines = output.split('\n')
|
||||
|
||||
for line in lines:
|
||||
for line in lines:
|
||||
|
||||
tmpSplt = line.split('"')
|
||||
tmpSplt = line.split('"')
|
||||
|
||||
if len(tmpSplt) == 3:
|
||||
|
||||
|
||||
ipStr = tmpSplt[0].split('.')[-4:] # Get the last 4 elements to extract the IP
|
||||
macStr = tmpSplt[1].strip().split(' ') # Remove leading/trailing spaces from MAC
|
||||
|
||||
@@ -82,19 +89,18 @@ def main():
|
||||
macAddress = ':'.join(macStr)
|
||||
ipAddress = '.'.join(ipStr)
|
||||
|
||||
mylog('verbose', [f'[SNMPDSC] IP: {ipAddress} MAC: {macAddress}'])
|
||||
|
||||
mylog('verbose', [f'[SNMPDSC] IP: {ipAddress} MAC: {macAddress}'])
|
||||
|
||||
plugin_objects.add_object(
|
||||
primaryId = handleEmpty(macAddress),
|
||||
secondaryId = handleEmpty(ipAddress.strip()), # Remove leading/trailing spaces from IP
|
||||
secondaryId = handleEmpty(ipAddress.strip()), # Remove leading/trailing spaces from IP
|
||||
watched1 = '(unknown)',
|
||||
watched2 = handleEmpty(snmpwalkArgs[6]), # router IP
|
||||
extra = handleEmpty(line),
|
||||
foreignKey = handleEmpty(macAddress) # Use the primary ID as the foreign key
|
||||
)
|
||||
else:
|
||||
mylog('verbose', ['[SNMPDSC] ipStr does not seem to contain a valid IP:', ipStr])
|
||||
|
||||
mylog('verbose', ['[SNMPDSC] ipStr does not seem to contain a valid IP:', ipStr])
|
||||
|
||||
elif line.startswith('ipNetToMediaPhysAddress'):
|
||||
# Format: snmpwalk -OXsq output
|
||||
@@ -115,12 +121,11 @@ def main():
|
||||
foreignKey = handleEmpty(macAddress)
|
||||
)
|
||||
|
||||
mylog('verbose', ['[SNMPDSC] Entries found: ', len(plugin_objects)])
|
||||
mylog('verbose', ['[SNMPDSC] Entries found: ', len(plugin_objects)])
|
||||
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
|
||||
|
||||
# BEGIN
|
||||
if __name__ == '__main__':
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -12,16 +12,16 @@ import base64
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from utils.plugin_utils import get_plugins_configs, decode_and_rename_files
|
||||
from logger import mylog, Logger
|
||||
from const import fullDbPath, logPath
|
||||
from helper import get_setting_value
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from utils.crypto_utils import encrypt_data
|
||||
from messaging.in_app import write_notification
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from utils.plugin_utils import get_plugins_configs, decode_and_rename_files # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from const import fullDbPath, logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||
from utils.crypto_utils import encrypt_data # noqa: E402 [flake8 lint suppression]
|
||||
from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -41,21 +41,21 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
|
||||
def main():
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
# Retrieve configuration settings
|
||||
plugins_to_sync = get_setting_value('SYNC_plugins')
|
||||
api_token = get_setting_value('API_TOKEN')
|
||||
api_token = get_setting_value('API_TOKEN')
|
||||
encryption_key = get_setting_value('SYNC_encryption_key')
|
||||
hub_url = get_setting_value('SYNC_hub_url')
|
||||
node_name = get_setting_value('SYNC_node_name')
|
||||
send_devices = get_setting_value('SYNC_devices')
|
||||
pull_nodes = get_setting_value('SYNC_nodes')
|
||||
|
||||
|
||||
# variables to determine operation mode
|
||||
is_hub = False
|
||||
is_node = False
|
||||
|
||||
|
||||
# Check if api_token set
|
||||
if not api_token:
|
||||
mylog('verbose', [f'[{pluginName}] ⚠ ERROR api_token not defined - quitting.'])
|
||||
@@ -63,23 +63,23 @@ def main():
|
||||
|
||||
# check if this is a hub or a node
|
||||
if len(hub_url) > 0 and (send_devices or plugins_to_sync):
|
||||
is_node = True
|
||||
mylog('verbose', [f'[{pluginName}] Mode 1: PUSH (NODE) - This is a NODE as SYNC_hub_url, SYNC_devices or SYNC_plugins are set'])
|
||||
if len(pull_nodes) > 0:
|
||||
is_node = True
|
||||
mylog('verbose', [f'[{pluginName}] Mode 1: PUSH (NODE) - This is a NODE as SYNC_hub_url, SYNC_devices or SYNC_plugins are set'])
|
||||
if len(pull_nodes) > 0:
|
||||
is_hub = True
|
||||
mylog('verbose', [f'[{pluginName}] Mode 2: PULL (HUB) - This is a HUB as SYNC_nodes is set'])
|
||||
mylog('verbose', [f'[{pluginName}] Mode 2: PULL (HUB) - This is a HUB as SYNC_nodes is set'])
|
||||
|
||||
# Mode 1: PUSH/SEND (NODE)
|
||||
# Mode 1: PUSH/SEND (NODE)
|
||||
if is_node:
|
||||
# PUSHING/SENDING Plugins
|
||||
|
||||
# PUSHING/SENDING Plugins
|
||||
|
||||
# Get all plugin configurations
|
||||
all_plugins = get_plugins_configs(False)
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] plugins_to_sync {plugins_to_sync}'])
|
||||
|
||||
|
||||
for plugin in all_plugins:
|
||||
pref = plugin["unique_prefix"]
|
||||
pref = plugin["unique_prefix"]
|
||||
|
||||
index = 0
|
||||
if pref in plugins_to_sync:
|
||||
@@ -100,9 +100,8 @@ def main():
|
||||
send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url)
|
||||
|
||||
else:
|
||||
mylog('verbose', [f'[{pluginName}] {file_path} not found'])
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] {file_path} not found'])
|
||||
|
||||
# PUSHING/SENDING devices
|
||||
if send_devices:
|
||||
|
||||
@@ -117,27 +116,27 @@ def main():
|
||||
mylog('verbose', [f'[{pluginName}] Sending file_content: "{file_content}"'])
|
||||
send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url)
|
||||
else:
|
||||
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Devices" data'])
|
||||
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Devices" data'])
|
||||
else:
|
||||
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Plugins" and "Devices" data'])
|
||||
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Plugins" and "Devices" data'])
|
||||
|
||||
# Mode 2: PULL/GET (HUB)
|
||||
|
||||
# PULLING DEVICES
|
||||
|
||||
# PULLING DEVICES
|
||||
file_prefix = 'last_result'
|
||||
|
||||
|
||||
# pull data from nodes if specified
|
||||
if is_hub:
|
||||
for node_url in pull_nodes:
|
||||
response_json = get_data(api_token, node_url)
|
||||
|
||||
|
||||
# Extract node_name and base64 data
|
||||
node_name = response_json.get('node_name', 'unknown_node')
|
||||
data_base64 = response_json.get('data_base64', '')
|
||||
|
||||
# Decode base64 data
|
||||
decoded_data = base64.b64decode(data_base64)
|
||||
|
||||
|
||||
# Create log file name using node name
|
||||
log_file_name = f'{file_prefix}.{node_name}.log'
|
||||
|
||||
@@ -148,18 +147,17 @@ def main():
|
||||
message = f'[{pluginName}] Device data from node "{node_name}" written to {log_file_name}'
|
||||
mylog('verbose', [message])
|
||||
if lggr.isAbove('verbose'):
|
||||
write_notification(message, 'info', timeNowDB())
|
||||
|
||||
write_notification(message, 'info', timeNowDB())
|
||||
|
||||
# Process any received data for the Device DB table (ONLY JSON)
|
||||
# Create the file path
|
||||
|
||||
# Get all "last_result" files from the sync folder, decode, rename them, and get the list of files
|
||||
files_to_process = decode_and_rename_files(LOG_PATH, file_prefix)
|
||||
|
||||
|
||||
if len(files_to_process) > 0:
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Mode 3: RECEIVE (HUB) - This is a HUB as received data found'])
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Mode 3: RECEIVE (HUB) - This is a HUB as received data found'])
|
||||
|
||||
# Connect to the App database
|
||||
conn = sqlite3.connect(fullDbPath)
|
||||
@@ -176,24 +174,24 @@ def main():
|
||||
# only process received .log files, skipping the one logging the progress of this plugin
|
||||
if file_name != 'last_result.log':
|
||||
mylog('verbose', [f'[{pluginName}] Processing: "{file_name}"'])
|
||||
|
||||
|
||||
# make sure the file has the correct name (e.g last_result.encoded.Node_1.1.log) to skip any otehr plugin files
|
||||
if len(file_name.split('.')) > 2:
|
||||
# Extract node name from either last_result.decoded.Node_1.1.log or last_result.Node_1.log
|
||||
parts = file_name.split('.')
|
||||
# If decoded/encoded file, node name is at index 2; otherwise at index 1
|
||||
syncHubNodeName = parts[2] if 'decoded' in file_name or 'encoded' in file_name else parts[1]
|
||||
syncHubNodeName = parts[2] if 'decoded' in file_name or 'encoded' in file_name else parts[1]
|
||||
|
||||
file_path = f"{LOG_PATH}/{file_name}"
|
||||
|
||||
|
||||
with open(file_path, 'r') as f:
|
||||
data = json.load(f)
|
||||
for device in data['data']:
|
||||
if device['devMac'] not in unique_mac_addresses:
|
||||
device['devSyncHubNode'] = syncHubNodeName
|
||||
unique_mac_addresses.add(device['devMac'])
|
||||
device_data.append(device)
|
||||
|
||||
device_data.append(device)
|
||||
|
||||
# Rename the file to "processed_" + current name
|
||||
new_file_name = f"processed_{file_name}"
|
||||
new_file_path = os.path.join(LOG_PATH, new_file_name)
|
||||
@@ -209,7 +207,6 @@ def main():
|
||||
placeholders = ', '.join('?' for _ in unique_mac_addresses)
|
||||
cursor.execute(f'SELECT devMac FROM Devices WHERE devMac IN ({placeholders})', tuple(unique_mac_addresses))
|
||||
existing_mac_addresses = set(row[0] for row in cursor.fetchall())
|
||||
|
||||
|
||||
# insert devices into the last_result.log and thus CurrentScan table to manage state
|
||||
for device in device_data:
|
||||
@@ -228,7 +225,7 @@ def main():
|
||||
# Filter out existing devices
|
||||
new_devices = [device for device in device_data if device['devMac'] not in existing_mac_addresses]
|
||||
|
||||
# Remove 'rowid' key if it exists
|
||||
# Remove 'rowid' key if it exists
|
||||
for device in new_devices:
|
||||
device.pop('rowid', None)
|
||||
device.pop('devStatus', None)
|
||||
@@ -257,7 +254,6 @@ def main():
|
||||
|
||||
mylog('verbose', [message])
|
||||
write_notification(message, 'info', timeNowDB())
|
||||
|
||||
|
||||
# Commit and close the connection
|
||||
conn.commit()
|
||||
@@ -268,6 +264,7 @@ def main():
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Data retrieval methods
|
||||
api_endpoints = [
|
||||
@@ -275,6 +272,7 @@ api_endpoints = [
|
||||
"/plugins/sync/hub.php" # Legacy PHP endpoint
|
||||
]
|
||||
|
||||
|
||||
# send data to the HUB
|
||||
def send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url):
|
||||
"""Send encrypted data to HUB, preferring /sync endpoint and falling back to PHP version."""
|
||||
@@ -345,6 +343,5 @@ def get_data(api_token, node_url):
|
||||
return ""
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -10,12 +10,11 @@ from unifi_sm_api.api import SiteManagerAPI
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects, decode_settings_base64
|
||||
from logger import mylog, Logger
|
||||
from const import logPath
|
||||
from helper import get_setting_value
|
||||
|
||||
import conf
|
||||
from plugin_helper import Plugin_Objects, decode_settings_base64 # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -35,13 +34,13 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
|
||||
def main():
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
# Retrieve configuration settings
|
||||
unifi_sites_configs = get_setting_value('UNIFIAPI_sites')
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] number of unifi_sites_configs: {len(unifi_sites_configs)}'])
|
||||
|
||||
|
||||
for site_config in unifi_sites_configs:
|
||||
|
||||
siteDict = decode_settings_base64(site_config)
|
||||
@@ -50,11 +49,11 @@ def main():
|
||||
mylog('none', [f'[{pluginName}] Connecting to: {siteDict["UNIFIAPI_site_name"]}'])
|
||||
|
||||
api = SiteManagerAPI(
|
||||
api_key=siteDict["UNIFIAPI_api_key"],
|
||||
version=siteDict["UNIFIAPI_api_version"],
|
||||
base_url=siteDict["UNIFIAPI_base_url"],
|
||||
verify_ssl=siteDict["UNIFIAPI_verify_ssl"]
|
||||
)
|
||||
api_key=siteDict["UNIFIAPI_api_key"],
|
||||
version=siteDict["UNIFIAPI_api_version"],
|
||||
base_url=siteDict["UNIFIAPI_base_url"],
|
||||
verify_ssl=siteDict["UNIFIAPI_verify_ssl"]
|
||||
)
|
||||
|
||||
sites_resp = api.get_sites()
|
||||
sites = sites_resp.get("data", [])
|
||||
@@ -67,18 +66,18 @@ def main():
|
||||
# Process the data into native application tables
|
||||
if len(device_data) > 0:
|
||||
|
||||
# insert devices into the lats_result.log
|
||||
# insert devices into the lats_result.log
|
||||
for device in device_data:
|
||||
plugin_objects.add_object(
|
||||
primaryId = device['dev_mac'], # mac
|
||||
secondaryId = device['dev_ip'], # IP
|
||||
watched1 = device['dev_name'], # name
|
||||
watched2 = device['dev_type'], # device_type (AP/Switch etc)
|
||||
watched3 = device['dev_connected'], # connectedAt or empty
|
||||
watched4 = device['dev_parent_mac'],# parent_mac or "Internet"
|
||||
extra = '',
|
||||
foreignKey = device['dev_mac']
|
||||
)
|
||||
plugin_objects.add_object(
|
||||
primaryId = device['dev_mac'], # mac
|
||||
secondaryId = device['dev_ip'], # IP
|
||||
watched1 = device['dev_name'], # name
|
||||
watched2 = device['dev_type'], # device_type (AP/Switch etc)
|
||||
watched3 = device['dev_connected'], # connectedAt or empty
|
||||
watched4 = device['dev_parent_mac'], # parent_mac or "Internet"
|
||||
extra = '',
|
||||
foreignKey = device['dev_mac']
|
||||
)
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] New entries: "{len(device_data)}"'])
|
||||
|
||||
@@ -87,6 +86,7 @@ def main():
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
# retrieve data
|
||||
def get_device_data(site, api):
|
||||
device_data = []
|
||||
@@ -146,8 +146,8 @@ def get_device_data(site, api):
|
||||
dev_parent_mac = resolve_parent_mac(uplinkDeviceId)
|
||||
|
||||
device_data.append({
|
||||
"dev_mac": dev_mac,
|
||||
"dev_ip": dev_ip,
|
||||
"dev_mac": dev_mac,
|
||||
"dev_ip": dev_ip,
|
||||
"dev_name": dev_name,
|
||||
"dev_type": dev_type,
|
||||
"dev_connected": dev_connected,
|
||||
|
||||
@@ -14,12 +14,12 @@ from pyunifi.controller import Controller
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects, rmBadChars, is_typical_router_ip, is_mac
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value, normalize_string
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from const import logPath
|
||||
from plugin_helper import Plugin_Objects, rmBadChars, is_typical_router_ip, is_mac # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value, normalize_string # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -37,21 +37,16 @@ LOCK_FILE = os.path.join(LOG_PATH, f'full_run.{pluginName}.lock')
|
||||
urllib3.disable_warnings(InsecureRequestWarning)
|
||||
|
||||
|
||||
|
||||
|
||||
# Workflow
|
||||
|
||||
def main():
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
|
||||
# init global variables
|
||||
global UNIFI_USERNAME, UNIFI_PASSWORD, UNIFI_HOST, UNIFI_SITES, PORT, VERIFYSSL, VERSION, FULL_IMPORT
|
||||
|
||||
# parse output
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
UNIFI_USERNAME = get_setting_value("UNFIMP_username")
|
||||
UNIFI_PASSWORD = get_setting_value("UNFIMP_password")
|
||||
UNIFI_HOST = get_setting_value("UNFIMP_host")
|
||||
@@ -64,12 +59,11 @@ def main():
|
||||
plugin_objects = get_entries(plugin_objects)
|
||||
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices'])
|
||||
|
||||
# .............................................
|
||||
|
||||
# .............................................
|
||||
def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
||||
global VERIFYSSL
|
||||
|
||||
@@ -79,27 +73,26 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] sites: {UNIFI_SITES}'])
|
||||
|
||||
|
||||
if (VERIFYSSL.upper() == "TRUE"):
|
||||
VERIFYSSL = True
|
||||
else:
|
||||
VERIFYSSL = False
|
||||
|
||||
|
||||
# mylog('verbose', [f'[{pluginName}] sites: {sites}'])
|
||||
|
||||
|
||||
for site in UNIFI_SITES:
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] site: {site}'])
|
||||
|
||||
c = Controller(
|
||||
UNIFI_HOST,
|
||||
UNIFI_USERNAME,
|
||||
UNIFI_PASSWORD,
|
||||
port=PORT,
|
||||
version=VERSION,
|
||||
ssl_verify=VERIFYSSL,
|
||||
UNIFI_HOST,
|
||||
UNIFI_USERNAME,
|
||||
UNIFI_PASSWORD,
|
||||
port=PORT,
|
||||
version=VERSION,
|
||||
ssl_verify=VERIFYSSL,
|
||||
site_id=site)
|
||||
|
||||
|
||||
online_macs = set()
|
||||
processed_macs = []
|
||||
|
||||
@@ -114,7 +107,7 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
||||
plugin_objects=plugin_objects,
|
||||
device_label='client',
|
||||
device_vendor="",
|
||||
force_import=True # These are online clients, force import
|
||||
force_import=True # These are online clients, force import
|
||||
)
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Online Devices'])
|
||||
@@ -154,11 +147,9 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Users'])
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] check if Lock file needs to be modified'])
|
||||
set_lock_file_value(FULL_IMPORT, lock_file_value)
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Clients overall'])
|
||||
|
||||
return plugin_objects
|
||||
@@ -173,19 +164,19 @@ def collect_details(device_type, devices, online_macs, processed_macs, plugin_ob
|
||||
name = get_name(get_unifi_val(device, 'name'), get_unifi_val(device, 'hostname'))
|
||||
ipTmp = get_ip(get_unifi_val(device, 'lan_ip'), get_unifi_val(device, 'last_ip'), get_unifi_val(device, 'fixed_ip'), get_unifi_val(device, 'ip'))
|
||||
macTmp = device['mac']
|
||||
|
||||
|
||||
# continue only if valid MAC address
|
||||
if is_mac(macTmp):
|
||||
status = 1 if macTmp in online_macs else device.get('state', 0)
|
||||
deviceType = device_type.get(device.get('type'), '')
|
||||
parentMac = get_parent_mac(get_unifi_val(device, 'uplink_mac'), get_unifi_val(device, 'ap_mac'), get_unifi_val(device, 'sw_mac'))
|
||||
|
||||
|
||||
# override parent MAC if this is a router
|
||||
if parentMac == 'null' and is_typical_router_ip(ipTmp):
|
||||
parentMac = 'Internet'
|
||||
parentMac = 'Internet'
|
||||
|
||||
# Add object only if not processed
|
||||
if macTmp not in processed_macs and ( status == 1 or force_import is True ):
|
||||
if macTmp not in processed_macs and (status == 1 or force_import is True):
|
||||
plugin_objects.add_object(
|
||||
primaryId=macTmp,
|
||||
secondaryId=ipTmp,
|
||||
@@ -203,7 +194,8 @@ def collect_details(device_type, devices, online_macs, processed_macs, plugin_ob
|
||||
processed_macs.append(macTmp)
|
||||
else:
|
||||
mylog('verbose', [f'[{pluginName}] Skipping, not a valid MAC address: {macTmp}'])
|
||||
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
def get_unifi_val(obj, key, default='null'):
|
||||
if isinstance(obj, dict):
|
||||
@@ -212,9 +204,9 @@ def get_unifi_val(obj, key, default='null'):
|
||||
for k, v in obj.items():
|
||||
if isinstance(v, dict):
|
||||
result = get_unifi_val(v, key, default)
|
||||
if result not in ['','None', None, 'null']:
|
||||
if result not in ['', 'None', None, 'null']:
|
||||
return result
|
||||
|
||||
|
||||
mylog('trace', [f'[{pluginName}] Value not found for key "{key}" in obj "{json.dumps(obj)}"'])
|
||||
return default
|
||||
|
||||
@@ -226,6 +218,7 @@ def get_name(*names: str) -> str:
|
||||
return rmBadChars(name)
|
||||
return 'null'
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
def get_parent_mac(*macs: str) -> str:
|
||||
for mac in macs:
|
||||
@@ -233,6 +226,7 @@ def get_parent_mac(*macs: str) -> str:
|
||||
return mac
|
||||
return 'null'
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
def get_port(*ports: str) -> str:
|
||||
for port in ports:
|
||||
@@ -240,12 +234,6 @@ def get_port(*ports: str) -> str:
|
||||
return port
|
||||
return 'null'
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
def get_port(*macs: str) -> str:
|
||||
for mac in macs:
|
||||
if mac and mac != 'null':
|
||||
return mac
|
||||
return 'null'
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
def get_ip(*ips: str) -> str:
|
||||
@@ -271,7 +259,7 @@ def set_lock_file_value(config_value: str, lock_file_value: bool) -> None:
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Setting lock value for "full import" to {out}'])
|
||||
with open(LOCK_FILE, 'w') as lock_file:
|
||||
lock_file.write(str(out))
|
||||
lock_file.write(str(out))
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
@@ -286,15 +274,16 @@ def read_lock_file() -> bool:
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
def check_full_run_state(config_value: str, lock_file_value: bool) -> bool:
|
||||
if config_value == 'always' or (config_value == 'once' and lock_file_value == False):
|
||||
if config_value == 'always' or (config_value == 'once' and lock_file_value is False):
|
||||
mylog('verbose', [f'[{pluginName}] Full import needs to be done: config_value: {config_value} and lock_file_value: {lock_file_value}'])
|
||||
return True
|
||||
else:
|
||||
mylog('verbose', [f'[{pluginName}] Full import NOT needed: config_value: {config_value} and lock_file_value: {lock_file_value}'])
|
||||
return False
|
||||
|
||||
#===============================================================================
|
||||
|
||||
# ===============================================================================
|
||||
# BEGIN
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -9,13 +9,13 @@ import sqlite3
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects, handleEmpty
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from const import logPath, applicationPath, fullDbPath
|
||||
from scan.device_handling import query_MAC_vendor
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath, applicationPath, fullDbPath # noqa: E402 [flake8 lint suppression]
|
||||
from scan.device_handling import query_MAC_vendor # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -25,17 +25,17 @@ Logger(get_setting_value('LOG_LEVEL'))
|
||||
|
||||
pluginName = 'VNDRPDT'
|
||||
|
||||
|
||||
LOG_PATH = logPath + '/plugins'
|
||||
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
def main():
|
||||
|
||||
mylog('verbose', ['[VNDRPDT] In script'])
|
||||
def main():
|
||||
|
||||
# Get newest DB
|
||||
update_vendor_database()
|
||||
mylog('verbose', ['[VNDRPDT] In script'])
|
||||
|
||||
# Get newest DB
|
||||
update_vendor_database()
|
||||
|
||||
# Resolve missing vendors
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
@@ -43,67 +43,67 @@ def main():
|
||||
plugin_objects = update_vendors(fullDbPath, plugin_objects)
|
||||
|
||||
plugin_objects.write_result_file()
|
||||
|
||||
mylog('verbose', ['[VNDRPDT] Update complete'])
|
||||
|
||||
|
||||
mylog('verbose', ['[VNDRPDT] Update complete'])
|
||||
|
||||
return 0
|
||||
|
||||
#===============================================================================
|
||||
|
||||
# ===============================================================================
|
||||
# Update device vendors database
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
def update_vendor_database():
|
||||
|
||||
# Update vendors DB (iab oui)
|
||||
mylog('verbose', [' Updating vendors DB (iab & oui)'])
|
||||
mylog('verbose', [' Updating vendors DB (iab & oui)'])
|
||||
update_args = ['sh', applicationPath + '/services/update_vendors.sh']
|
||||
|
||||
# Execute command
|
||||
# Execute command
|
||||
try:
|
||||
# try runnning a subprocess safely
|
||||
update_output = subprocess.check_output (update_args)
|
||||
subprocess.check_output(update_args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('verbose', [' FAILED: Updating vendors DB, set LOG_LEVEL=debug for more info'])
|
||||
mylog('verbose', [e.output])
|
||||
mylog('verbose', [' FAILED: Updating vendors DB, set LOG_LEVEL=debug for more info'])
|
||||
mylog('verbose', [e.output])
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# resolve missing vendors
|
||||
def update_vendors (dbPath, plugin_objects):
|
||||
|
||||
def update_vendors(dbPath, plugin_objects):
|
||||
|
||||
# Connect to the App SQLite database
|
||||
conn = sqlite3.connect(dbPath)
|
||||
sql = conn.cursor()
|
||||
|
||||
# Initialize variables
|
||||
recordsToUpdate = []
|
||||
ignored = 0
|
||||
notFound = 0
|
||||
|
||||
|
||||
mylog('verbose', [' Searching devices vendor'])
|
||||
mylog('verbose', [' Searching devices vendor'])
|
||||
|
||||
# Get devices without a vendor
|
||||
sql.execute ("""SELECT
|
||||
devMac,
|
||||
devLastIP,
|
||||
devName,
|
||||
devVendor
|
||||
sql.execute("""SELECT
|
||||
devMac,
|
||||
devLastIP,
|
||||
devName,
|
||||
devVendor
|
||||
FROM Devices
|
||||
WHERE devVendor = '(unknown)'
|
||||
OR devVendor = '(Unknown)'
|
||||
WHERE devVendor = '(unknown)'
|
||||
OR devVendor = '(Unknown)'
|
||||
OR devVendor = ''
|
||||
OR devVendor IS NULL
|
||||
""")
|
||||
devices = sql.fetchall()
|
||||
conn.commit()
|
||||
devices = sql.fetchall()
|
||||
conn.commit()
|
||||
|
||||
# Close the database connection
|
||||
conn.close()
|
||||
conn.close()
|
||||
|
||||
# All devices loop
|
||||
for device in devices:
|
||||
# Search vendor in HW Vendors DB
|
||||
vendor = query_MAC_vendor (device[0])
|
||||
vendor = query_MAC_vendor(device[0])
|
||||
if vendor == -1 :
|
||||
notFound += 1
|
||||
elif vendor == -2 :
|
||||
@@ -112,27 +112,25 @@ def update_vendors (dbPath, plugin_objects):
|
||||
plugin_objects.add_object(
|
||||
primaryId = handleEmpty(device[0]), # MAC (Device Name)
|
||||
secondaryId = handleEmpty(device[1]), # IP Address (always 0.0.0.0)
|
||||
watched1 = handleEmpty(vendor),
|
||||
watched1 = handleEmpty(vendor),
|
||||
watched2 = handleEmpty(device[2]), # Device name
|
||||
watched3 = "",
|
||||
watched4 = "",
|
||||
extra = "",
|
||||
foreignKey = handleEmpty(device[0])
|
||||
)
|
||||
|
||||
# Print log
|
||||
extra = "",
|
||||
foreignKey = handleEmpty(device[0])
|
||||
)
|
||||
|
||||
# Print log
|
||||
mylog('verbose', [" Devices Ignored : ", ignored])
|
||||
mylog('verbose', [" Devices with missing vendor : ", len(devices)])
|
||||
mylog('verbose', [" Vendors Not Found : ", notFound])
|
||||
mylog('verbose', [" Vendors updated : ", len(plugin_objects) ])
|
||||
|
||||
mylog('verbose', [" Vendors updated : ", len(plugin_objects)])
|
||||
|
||||
return plugin_objects
|
||||
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# BEGIN
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
||||
|
||||
@@ -9,13 +9,13 @@ from wakeonlan import send_magic_packet
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from logger import mylog, Logger
|
||||
from const import logPath
|
||||
from helper import get_setting_value
|
||||
from database import DB
|
||||
from models.device_instance import DeviceInstance
|
||||
import conf
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from database import DB # noqa: E402 [flake8 lint suppression]
|
||||
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -34,9 +34,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
mylog('none', [f'[{pluginName}] In script'])
|
||||
mylog('none', [f'[{pluginName}] In script'])
|
||||
|
||||
# Retrieve configuration settings
|
||||
broadcast_ips = get_setting_value('WOL_broadcast_ips')
|
||||
@@ -58,7 +57,7 @@ def main():
|
||||
devices_to_wake = device_handler.getOffline()
|
||||
|
||||
elif 'down' in devices_to_wake:
|
||||
|
||||
|
||||
devices_to_wake = device_handler.getDown()
|
||||
|
||||
else:
|
||||
@@ -89,15 +88,16 @@ def main():
|
||||
# log result
|
||||
plugin_objects.write_result_file()
|
||||
else:
|
||||
mylog('none', [f'[{pluginName}] No devices to wake'])
|
||||
mylog('none', [f'[{pluginName}] No devices to wake'])
|
||||
|
||||
mylog('none', [f'[{pluginName}] Script finished'])
|
||||
mylog('none', [f'[{pluginName}] Script finished'])
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
# wake
|
||||
def execute(port, ip, mac, name):
|
||||
|
||||
|
||||
result = 'null'
|
||||
try:
|
||||
# Send the magic packet to wake up the device
|
||||
@@ -105,7 +105,7 @@ def execute(port, ip, mac, name):
|
||||
mylog('verbose', [f'[{pluginName}] Magic packet sent to {mac} ({name})'])
|
||||
|
||||
result = 'success'
|
||||
|
||||
|
||||
except Exception as e:
|
||||
result = str(e)
|
||||
mylog('verbose', [f'[{pluginName}] Failed to send magic packet to {mac} ({name}): {e}'])
|
||||
@@ -113,5 +113,6 @@ def execute(port, ip, mac, name):
|
||||
# Return the data result
|
||||
return result
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -12,12 +12,12 @@ from urllib3.exceptions import InsecureRequestWarning
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from plugin_helper import Plugin_Objects
|
||||
from const import logPath
|
||||
from helper import get_setting_value
|
||||
import conf
|
||||
from pytz import timezone
|
||||
from logger import mylog, Logger
|
||||
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
import conf # noqa: E402 [flake8 lint suppression]
|
||||
from pytz import timezone # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
conf.tz = timezone(get_setting_value('TIMEZONE'))
|
||||
@@ -30,15 +30,14 @@ pluginName = 'WEBMON'
|
||||
LOG_PATH = logPath + '/plugins'
|
||||
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
|
||||
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] In script'])
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
values = get_setting_value('WEBMON_urls_to_check')
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Checking URLs: {values}'])
|
||||
|
||||
|
||||
if len(values) > 0:
|
||||
plugin_objects = Plugin_Objects(RESULT_FILE)
|
||||
@@ -48,12 +47,13 @@ def main():
|
||||
else:
|
||||
return
|
||||
|
||||
|
||||
def check_services_health(site):
|
||||
|
||||
mylog('verbose', [f'[{pluginName}] Checking {site}'])
|
||||
|
||||
urllib3.disable_warnings(InsecureRequestWarning)
|
||||
|
||||
|
||||
try:
|
||||
resp = requests.get(site, verify=False, timeout=get_setting_value('WEBMON_RUN_TIMEOUT'), headers={"User-Agent": "NetAlertX"})
|
||||
latency = resp.elapsed.total_seconds()
|
||||
@@ -79,12 +79,13 @@ def check_services_health(site):
|
||||
|
||||
return status, latency
|
||||
|
||||
|
||||
def service_monitoring(urls, plugin_objects):
|
||||
for site in urls:
|
||||
status, latency = check_services_health(site)
|
||||
plugin_objects.add_object(
|
||||
primaryId=site,
|
||||
secondaryId='null',
|
||||
secondaryId='null',
|
||||
watched1=status,
|
||||
watched2=latency,
|
||||
watched3='null',
|
||||
@@ -94,7 +95,6 @@ def service_monitoring(urls, plugin_objects):
|
||||
)
|
||||
return plugin_objects
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
||||
|
||||
|
||||
@@ -66,6 +66,7 @@ CREATE TABLE Devices (
|
||||
devIsArchived BOOLEAN NOT NULL DEFAULT (0) CHECK (devIsArchived IN (0, 1)),
|
||||
devParentMAC TEXT,
|
||||
devParentPort INTEGER,
|
||||
devParentRelType TEXT,
|
||||
devIcon TEXT,
|
||||
devGUID TEXT,
|
||||
devSite TEXT,
|
||||
|
||||
@@ -8,4 +8,11 @@ markers = [
|
||||
"docker: requires docker socket and elevated container permissions",
|
||||
"compose: Tests docker compose files. Slow.",
|
||||
"feature_complete: extended coverage suite not run by default",
|
||||
]
|
||||
]
|
||||
[tool.ruff]
|
||||
line-length = 180
|
||||
ignore = ["E203", "C901"] # global ignores
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["E", "F"]
|
||||
extend-select = ["E402"]
|
||||
87
run_docker_tests.sh
Executable file
87
run_docker_tests.sh
Executable file
@@ -0,0 +1,87 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# run_docker_tests.sh
|
||||
#
|
||||
# This script automates the entire process of testing the application
|
||||
# within its intended, privileged devcontainer environment. It is
|
||||
# idempotent and can be run repeatedly.
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
# --- 1. Regenerate Devcontainer Dockerfile ---
|
||||
echo "--- Regenerating .devcontainer/Dockerfile from source ---"
|
||||
if [ -f ".devcontainer/scripts/generate-configs.sh" ]; then
|
||||
/bin/bash .devcontainer/scripts/generate-configs.sh
|
||||
else
|
||||
echo "ERROR: generate-configs.sh not found. Aborting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# --- 2. Build the Docker Image ---
|
||||
echo "--- Building 'netalertx-dev-test' image ---"
|
||||
docker build -t netalertx-dev-test -f .devcontainer/Dockerfile . --target netalertx-devcontainer
|
||||
|
||||
# --- 3. Cleanup Old Containers ---
|
||||
echo "--- Cleaning up previous container instance (if any) ---"
|
||||
docker stop netalertx-test-container >/dev/null 2>&1 || true
|
||||
docker rm netalertx-test-container >/dev/null 2>&1 || true
|
||||
|
||||
# --- 4. Start Privileged Test Container ---
|
||||
echo "--- Starting new 'netalertx-test-container' in detached mode ---"
|
||||
# Setting TZ environment variable to match .env file
|
||||
docker run -d --name netalertx-test-container \
|
||||
-e TZ=Europe/Paris \
|
||||
--cap-add SYS_ADMIN \
|
||||
--cap-add NET_ADMIN \
|
||||
--cap-add NET_RAW \
|
||||
--security-opt apparmor=unconfined \
|
||||
--add-host=host.docker.internal:host-gateway \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||
-v "$(pwd)":/workspaces/NetAlertX \
|
||||
netalertx-dev-test
|
||||
|
||||
# --- 5. Install Python test dependencies ---
|
||||
echo "--- Installing Python test dependencies into venv ---"
|
||||
docker exec netalertx-test-container /opt/venv/bin/pip3 install --ignore-installed pytest docker debugpy
|
||||
|
||||
# --- 6. Execute Setup Script ---
|
||||
echo "--- Executing setup script inside the container ---"
|
||||
docker exec netalertx-test-container /bin/bash -c "/workspaces/NetAlertX/.devcontainer/scripts/setup.sh"
|
||||
|
||||
# --- 7. Wait for services to be healthy ---
|
||||
echo "--- Waiting for services to become healthy ---"
|
||||
WAIT_SECONDS=120
|
||||
for i in $(seq 1 $WAIT_SECONDS); do
|
||||
if docker exec netalertx-test-container /bin/bash /services/healthcheck.sh; then
|
||||
echo "--- Services are healthy! ---"
|
||||
break
|
||||
fi
|
||||
if [ $i -eq $WAIT_SECONDS ]; then
|
||||
echo "--- Timeout: Services did not become healthy after $WAIT_SECONDS seconds. ---"
|
||||
docker logs netalertx-test-container
|
||||
exit 1
|
||||
fi
|
||||
echo " ... waiting ($i/$WAIT_SECONDS)"
|
||||
sleep 1
|
||||
done
|
||||
|
||||
|
||||
# --- 8. Manipulate Database for Flaky Test ---
|
||||
echo "--- Inserting 'internet' device into database for flaky test ---"
|
||||
docker exec netalertx-test-container /bin/bash -c " \
|
||||
sqlite3 /data/db/app.db \"INSERT OR IGNORE INTO Devices (devMac, devFirstConnection, devLastConnection, devLastIP, devName) VALUES ('internet', DATETIME('now'), DATETIME('now'), '0.0.0.0', 'Internet Gateway');\" \
|
||||
"
|
||||
|
||||
# --- 9. Execute Tests ---
|
||||
echo "--- Executing tests inside the container ---"
|
||||
docker exec netalertx-test-container /bin/bash -c " \
|
||||
cd /workspaces/NetAlertX && /opt/venv/bin/pytest -m 'not (docker or compose or feature_complete)' --cache-clear -o cache_dir=/tmp/.pytest_cache; \
|
||||
"
|
||||
|
||||
# --- 10. Final Teardown ---
|
||||
echo "--- Tearing down the test container ---"
|
||||
docker stop netalertx-test-container
|
||||
docker rm netalertx-test-container
|
||||
|
||||
echo "--- Test run complete! ---"
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
@@ -19,11 +18,12 @@ import subprocess
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
def check_new_devices():
|
||||
# Get API path from environment variable, fallback to /tmp/api
|
||||
api_path = os.environ.get('NETALERTX_API', '/tmp/api')
|
||||
table_devices_path = f'{api_path}/table_devices.json'
|
||||
|
||||
|
||||
try:
|
||||
# Rufe die JSON-Datei aus dem Docker-Container ab
|
||||
result = subprocess.run(
|
||||
@@ -73,6 +73,6 @@ def check_new_devices():
|
||||
)
|
||||
print(f"1 NetAlertX_New_Devices - WARNING - Found {len(new_devices)} new device(s): {device_list_str}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
check_new_devices()
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env python3
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
||||
def run_sqlite_command(command):
|
||||
# Use environment variable with fallback
|
||||
db_path = os.path.join(
|
||||
@@ -19,18 +19,19 @@ def run_sqlite_command(command):
|
||||
print(f"Error executing command: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def check_and_clean_device():
|
||||
while True:
|
||||
print("\nDevice Cleanup Tool")
|
||||
print("1. Check/Clean by MAC address")
|
||||
print("2. Check/Clean by IP address")
|
||||
print("3. Exit")
|
||||
|
||||
|
||||
choice = input("\nSelect option (1-3): ")
|
||||
|
||||
|
||||
if choice == "1":
|
||||
mac = input("Enter MAC address (format: xx:xx:xx:xx:xx:xx): ").lower()
|
||||
|
||||
|
||||
# Check all tables for MAC
|
||||
tables_checks = [
|
||||
f"SELECT 'Events' as source, * FROM Events WHERE eve_MAC='{mac}'",
|
||||
@@ -40,14 +41,14 @@ def check_and_clean_device():
|
||||
f"SELECT 'AppEvents' as source, * FROM AppEvents WHERE ObjectPrimaryID LIKE '%{mac}%' OR ObjectSecondaryID LIKE '%{mac}%'",
|
||||
f"SELECT 'Plugins_Objects' as source, * FROM Plugins_Objects WHERE Object_PrimaryID LIKE '%{mac}%'"
|
||||
]
|
||||
|
||||
|
||||
found = False
|
||||
for check in tables_checks:
|
||||
result = run_sqlite_command(check)
|
||||
if result and result.strip():
|
||||
found = True
|
||||
print(f"\nFound entries:\n{result}")
|
||||
|
||||
|
||||
if found:
|
||||
confirm = input("\nWould you like to clean these entries? (y/n): ")
|
||||
if confirm.lower() == 'y':
|
||||
@@ -60,16 +61,16 @@ def check_and_clean_device():
|
||||
f"DELETE FROM AppEvents WHERE ObjectPrimaryID LIKE '%{mac}%' OR ObjectSecondaryID LIKE '%{mac}%'",
|
||||
f"DELETE FROM Plugins_Objects WHERE Object_PrimaryID LIKE '%{mac}%'"
|
||||
]
|
||||
|
||||
|
||||
for delete in deletes:
|
||||
run_sqlite_command(delete)
|
||||
print("Cleanup completed!")
|
||||
else:
|
||||
print("\nNo entries found for this MAC address")
|
||||
|
||||
|
||||
elif choice == "2":
|
||||
ip = input("Enter IP address (format: xxx.xxx.xxx.xxx): ")
|
||||
|
||||
|
||||
# Check all tables for IP
|
||||
tables_checks = [
|
||||
f"SELECT 'Events' as source, * FROM Events WHERE eve_IP='{ip}'",
|
||||
@@ -79,14 +80,14 @@ def check_and_clean_device():
|
||||
f"SELECT 'AppEvents' as source, * FROM AppEvents WHERE ObjectSecondaryID LIKE '%{ip}%'",
|
||||
f"SELECT 'Plugins_Objects' as source, * FROM Plugins_Objects WHERE Object_SecondaryID LIKE '%{ip}%'"
|
||||
]
|
||||
|
||||
|
||||
found = False
|
||||
for check in tables_checks:
|
||||
result = run_sqlite_command(check)
|
||||
if result and result.strip():
|
||||
found = True
|
||||
print(f"\nFound entries:\n{result}")
|
||||
|
||||
|
||||
if found:
|
||||
confirm = input("\nWould you like to clean these entries? (y/n): ")
|
||||
if confirm.lower() == 'y':
|
||||
@@ -99,19 +100,20 @@ def check_and_clean_device():
|
||||
f"DELETE FROM AppEvents WHERE ObjectSecondaryID LIKE '%{ip}%'",
|
||||
f"DELETE FROM Plugins_Objects WHERE Object_SecondaryID LIKE '%{ip}%'"
|
||||
]
|
||||
|
||||
|
||||
for delete in deletes:
|
||||
run_sqlite_command(delete)
|
||||
print("Cleanup completed!")
|
||||
else:
|
||||
print("\nNo entries found for this IP address")
|
||||
|
||||
|
||||
elif choice == "3":
|
||||
print("\nExiting...")
|
||||
break
|
||||
|
||||
|
||||
else:
|
||||
print("\nInvalid option, please try again")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
check_and_clean_device()
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
import paramiko
|
||||
import re
|
||||
from datetime import datetime
|
||||
import argparse
|
||||
import sys
|
||||
@@ -8,6 +7,9 @@ from pathlib import Path
|
||||
import time
|
||||
import logging
|
||||
|
||||
logger = None
|
||||
|
||||
|
||||
def setup_logging(debug=False):
|
||||
"""Configure logging based on debug flag."""
|
||||
level = logging.DEBUG if debug else logging.INFO
|
||||
@@ -18,6 +20,7 @@ def setup_logging(debug=False):
|
||||
)
|
||||
return logging.getLogger(__name__)
|
||||
|
||||
|
||||
def parse_timestamp(date_str):
|
||||
"""Convert OPNsense timestamp to Unix epoch time."""
|
||||
try:
|
||||
@@ -27,7 +30,8 @@ def parse_timestamp(date_str):
|
||||
dt = datetime.strptime(clean_date, '%Y/%m/%d %H:%M:%S')
|
||||
return int(dt.timestamp())
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to parse timestamp: {date_str}")
|
||||
if logger:
|
||||
logger.error(f"Failed to parse timestamp: {date_str} ({e})")
|
||||
return None
|
||||
|
||||
|
||||
@@ -39,8 +43,14 @@ def get_lease_file(hostname, username, password=None, key_filename=None, port=22
|
||||
|
||||
try:
|
||||
logger.debug(f"Attempting to connect to {hostname}:{port} as {username}")
|
||||
ssh.connect(hostname, port=port, username=username,
|
||||
password=password, key_filename=key_filename)
|
||||
|
||||
ssh.connect(
|
||||
hostname,
|
||||
port=port,
|
||||
username=username,
|
||||
password=password,
|
||||
key_filename=key_filename
|
||||
)
|
||||
|
||||
# Get an interactive shell session
|
||||
logger.debug("Opening interactive SSH channel")
|
||||
@@ -74,11 +84,23 @@ def get_lease_file(hostname, username, password=None, key_filename=None, port=22
|
||||
|
||||
# Clean up the output by removing the command echo and shell prompts
|
||||
lines = output.split('\n')
|
||||
|
||||
# Remove first line (command echo) and any lines containing shell prompts
|
||||
cleaned_lines = [line for line in lines
|
||||
if not line.strip().startswith(command.strip())
|
||||
and not line.strip().endswith('> ')
|
||||
and not line.strip().endswith('# ')]
|
||||
cmd = command.strip()
|
||||
|
||||
cleaned_lines = []
|
||||
for line in lines:
|
||||
stripped = line.strip()
|
||||
|
||||
if stripped.startswith(cmd):
|
||||
continue
|
||||
if stripped.endswith('> '):
|
||||
continue
|
||||
if stripped.endswith('# '):
|
||||
continue
|
||||
|
||||
cleaned_lines.append(line)
|
||||
|
||||
cleaned_output = '\n'.join(cleaned_lines)
|
||||
|
||||
logger.debug(f"Final cleaned output length: {len(cleaned_output)} characters")
|
||||
@@ -156,9 +178,7 @@ def parse_lease_file(lease_content):
|
||||
|
||||
# Filter only active leases
|
||||
active_leases = [lease for lease in leases
|
||||
if lease.get('state') == 'active'
|
||||
and 'mac' in lease
|
||||
and 'ip' in lease]
|
||||
if lease.get('state') == 'active' and 'mac' in lease and 'ip' in lease]
|
||||
|
||||
logger.debug(f"Found {len(active_leases)} active leases out of {len(leases)} total leases")
|
||||
logger.debug("Active leases:")
|
||||
@@ -206,6 +226,7 @@ def convert_to_dnsmasq(leases):
|
||||
|
||||
return dnsmasq_lines
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Convert OPNsense DHCP leases to dnsmasq format')
|
||||
parser.add_argument('--host', required=True, help='OPNsense hostname or IP')
|
||||
@@ -219,6 +240,7 @@ def main():
|
||||
args = parser.parse_args()
|
||||
|
||||
# Setup logging
|
||||
global logger
|
||||
logger = setup_logging(args.debug)
|
||||
|
||||
try:
|
||||
@@ -255,5 +277,6 @@ def main():
|
||||
logger.error(f"Error: {str(e)}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -22,9 +22,9 @@ from pathlib import Path
|
||||
|
||||
# Register NetAlertX modules
|
||||
import conf
|
||||
from const import *
|
||||
from logger import mylog
|
||||
from helper import filePermissions
|
||||
from const import fullConfPath, sql_new_devices
|
||||
from logger import mylog
|
||||
from helper import filePermissions
|
||||
from utils.datetime_utils import timeNowTZ
|
||||
from app_state import updateState
|
||||
from api import update_api
|
||||
@@ -48,12 +48,12 @@ main structure of NetAlertX
|
||||
Initialise All
|
||||
Rename old settings
|
||||
start Loop forever
|
||||
initialise loop
|
||||
initialise loop
|
||||
(re)import config
|
||||
(re)import plugin config
|
||||
run plugins (once)
|
||||
run frontend events
|
||||
update API
|
||||
update API
|
||||
run plugins (scheduled)
|
||||
processing scan results
|
||||
run plugins (after Scan)
|
||||
@@ -111,7 +111,7 @@ def main():
|
||||
loop_start_time = conf.loop_start_time # TODO fix
|
||||
|
||||
# Handle plugins executed ONCE
|
||||
if conf.plugins_once_run == False:
|
||||
if conf.plugins_once_run is False:
|
||||
pm.run_plugin_scripts("once")
|
||||
conf.plugins_once_run = True
|
||||
|
||||
@@ -146,7 +146,7 @@ def main():
|
||||
processScan = updateState("Check scan").processScan
|
||||
mylog("debug", [f"[MAIN] processScan: {processScan}"])
|
||||
|
||||
if processScan == True:
|
||||
if processScan is True:
|
||||
mylog("debug", "[MAIN] start processing scan results")
|
||||
process_scan(db)
|
||||
updateState("Scan processed", None, None, None, None, False)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
import json
|
||||
import time
|
||||
import threading
|
||||
@@ -110,7 +111,6 @@ def update_api(
|
||||
# -------------------------------------------------------------------------------
|
||||
class api_endpoint_class:
|
||||
def __init__(self, db, forceUpdate, query, path, is_ad_hoc_user_event=False):
|
||||
global apiEndpoints
|
||||
|
||||
current_time = timeNowTZ()
|
||||
|
||||
@@ -145,8 +145,7 @@ class api_endpoint_class:
|
||||
self.needsUpdate = True
|
||||
# Only update changeDetectedWhen if it hasn't been set recently
|
||||
if not self.changeDetectedWhen or current_time > (
|
||||
self.changeDetectedWhen
|
||||
+ datetime.timedelta(seconds=self.debounce_interval)
|
||||
self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
|
||||
):
|
||||
self.changeDetectedWhen = (
|
||||
current_time # Set timestamp for change detection
|
||||
@@ -164,8 +163,7 @@ class api_endpoint_class:
|
||||
self.needsUpdate = True
|
||||
# Only update changeDetectedWhen if it hasn't been set recently
|
||||
if not self.changeDetectedWhen or current_time > (
|
||||
self.changeDetectedWhen
|
||||
+ datetime.timedelta(seconds=self.debounce_interval)
|
||||
self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
|
||||
):
|
||||
self.changeDetectedWhen = (
|
||||
current_time # Initialize timestamp for new endpoint
|
||||
@@ -180,17 +178,15 @@ class api_endpoint_class:
|
||||
current_time = timeNowTZ()
|
||||
|
||||
# Debugging info to understand the issue
|
||||
# mylog('debug', [f'[API] api_endpoint_class: {self.fileName} is_ad_hoc_user_event {self.is_ad_hoc_user_event} last_update_time={self.last_update_time}, debounce time={self.last_update_time + datetime.timedelta(seconds=self.debounce_interval)}.'])
|
||||
# mylog('debug', [f'[API] api_endpoint_class: {self.fileName} is_ad_hoc_user_event
|
||||
# {self.is_ad_hoc_user_event} last_update_time={self.last_update_time},
|
||||
# debounce time={self.last_update_time + datetime.timedelta(seconds=self.debounce_interval)}.'])
|
||||
|
||||
# Only attempt to write if the debounce time has passed
|
||||
if forceUpdate == True or (
|
||||
self.needsUpdate
|
||||
and (
|
||||
self.changeDetectedWhen is None
|
||||
or current_time
|
||||
> (
|
||||
self.changeDetectedWhen
|
||||
+ datetime.timedelta(seconds=self.debounce_interval)
|
||||
if forceUpdate is True or (
|
||||
self.needsUpdate and (
|
||||
self.changeDetectedWhen is None or current_time > (
|
||||
self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
|
||||
)
|
||||
)
|
||||
):
|
||||
@@ -225,7 +221,7 @@ periodic_write_thread = None
|
||||
|
||||
def periodic_write(interval=1):
|
||||
"""Periodically checks all endpoints for pending writes."""
|
||||
global apiEndpoints
|
||||
|
||||
while not stop_event.is_set():
|
||||
with api_lock:
|
||||
for endpoint in apiEndpoints:
|
||||
|
||||
@@ -9,25 +9,68 @@ from flask_cors import CORS
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
from helper import get_setting_value
|
||||
from db.db_helper import get_date_from_period
|
||||
from app_state import updateState
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from db.db_helper import get_date_from_period # noqa: E402 [flake8 lint suppression]
|
||||
from app_state import updateState # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
from .graphql_endpoint import devicesSchema
|
||||
from .device_endpoint import get_device_data, set_device_data, delete_device, delete_device_events, reset_device_props, copy_device, update_device_column
|
||||
from .devices_endpoint import get_all_devices, delete_unknown_devices, delete_all_with_empty_macs, delete_devices, export_devices, import_csv, devices_totals, devices_by_status
|
||||
from .events_endpoint import delete_events, delete_events_older_than, get_events, create_event, get_events_totals
|
||||
from .history_endpoint import delete_online_history
|
||||
from .prometheus_endpoint import get_metric_stats
|
||||
from .sessions_endpoint import get_sessions, delete_session, create_session, get_sessions_calendar, get_device_sessions, get_session_events
|
||||
from .nettools_endpoint import wakeonlan, traceroute, speedtest, nslookup, nmap_scan, internet_info
|
||||
from .dbquery_endpoint import read_query, write_query, update_query, delete_query
|
||||
from .sync_endpoint import handle_sync_post, handle_sync_get
|
||||
from .logs_endpoint import clean_log
|
||||
from models.user_events_queue_instance import UserEventsQueueInstance
|
||||
from messaging.in_app import write_notification, mark_all_notifications_read, delete_notifications, get_unread_notifications, delete_notification, mark_notification_as_read
|
||||
from .graphql_endpoint import devicesSchema # noqa: E402 [flake8 lint suppression]
|
||||
from .device_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||
get_device_data,
|
||||
set_device_data,
|
||||
delete_device,
|
||||
delete_device_events,
|
||||
reset_device_props,
|
||||
copy_device,
|
||||
update_device_column
|
||||
)
|
||||
from .devices_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||
get_all_devices,
|
||||
delete_unknown_devices,
|
||||
delete_all_with_empty_macs,
|
||||
delete_devices,
|
||||
export_devices,
|
||||
import_csv,
|
||||
devices_totals,
|
||||
devices_by_status
|
||||
)
|
||||
from .events_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||
delete_events,
|
||||
delete_events_older_than,
|
||||
get_events,
|
||||
create_event,
|
||||
get_events_totals
|
||||
)
|
||||
from .history_endpoint import delete_online_history # noqa: E402 [flake8 lint suppression]
|
||||
from .prometheus_endpoint import get_metric_stats # noqa: E402 [flake8 lint suppression]
|
||||
from .sessions_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||
get_sessions,
|
||||
delete_session,
|
||||
create_session,
|
||||
get_sessions_calendar,
|
||||
get_device_sessions,
|
||||
get_session_events
|
||||
)
|
||||
from .nettools_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||
wakeonlan,
|
||||
traceroute,
|
||||
speedtest,
|
||||
nslookup,
|
||||
nmap_scan,
|
||||
internet_info
|
||||
)
|
||||
from .dbquery_endpoint import read_query, write_query, update_query, delete_query # noqa: E402 [flake8 lint suppression]
|
||||
from .sync_endpoint import handle_sync_post, handle_sync_get # noqa: E402 [flake8 lint suppression]
|
||||
from .logs_endpoint import clean_log # noqa: E402 [flake8 lint suppression]
|
||||
from models.user_events_queue_instance import UserEventsQueueInstance # noqa: E402 [flake8 lint suppression]
|
||||
from messaging.in_app import ( # noqa: E402 [flake8 lint suppression]
|
||||
write_notification,
|
||||
mark_all_notifications_read,
|
||||
delete_notifications,
|
||||
get_unread_notifications,
|
||||
delete_notification,
|
||||
mark_notification_as_read
|
||||
)
|
||||
|
||||
# Flask application
|
||||
app = Flask(__name__)
|
||||
@@ -50,13 +93,14 @@ CORS(
|
||||
allow_headers=["Authorization", "Content-Type"],
|
||||
)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Custom handler for 404 - Route not found
|
||||
# -------------------------------------------------------------------
|
||||
@app.errorhandler(404)
|
||||
def not_found(error):
|
||||
response = {
|
||||
"success": False,
|
||||
"success": False,
|
||||
"error": "API route not found",
|
||||
"message": f"The requested URL {error.description if hasattr(error, 'description') else ''} was not found on the server.",
|
||||
}
|
||||
@@ -200,7 +244,7 @@ def api_get_devices():
|
||||
def api_delete_devices():
|
||||
if not is_authorized():
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
|
||||
macs = request.json.get("macs") if request.is_json else None
|
||||
|
||||
return delete_devices(macs)
|
||||
@@ -338,7 +382,7 @@ def dbquery_read():
|
||||
|
||||
if not raw_sql_b64:
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
|
||||
|
||||
|
||||
return read_query(raw_sql_b64)
|
||||
|
||||
|
||||
@@ -350,7 +394,7 @@ def dbquery_write():
|
||||
data = request.get_json() or {}
|
||||
raw_sql_b64 = data.get("rawSql")
|
||||
if not raw_sql_b64:
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
|
||||
|
||||
return write_query(raw_sql_b64)
|
||||
|
||||
@@ -363,7 +407,13 @@ def dbquery_update():
|
||||
data = request.get_json() or {}
|
||||
required = ["columnName", "id", "dbtable", "columns", "values"]
|
||||
if not all(data.get(k) for k in required):
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"}), 400
|
||||
return jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"message": "ERROR: Missing parameters",
|
||||
"error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"
|
||||
}
|
||||
), 400
|
||||
|
||||
return update_query(
|
||||
column_name=data["columnName"],
|
||||
@@ -418,12 +468,13 @@ def api_clean_log():
|
||||
|
||||
return clean_log(file)
|
||||
|
||||
|
||||
@app.route("/logs/add-to-execution-queue", methods=["POST"])
|
||||
def api_add_to_execution_queue():
|
||||
|
||||
if not is_authorized():
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
|
||||
queue = UserEventsQueueInstance()
|
||||
|
||||
# Get JSON payload safely
|
||||
@@ -499,7 +550,7 @@ def api_delete_old_events(days: int):
|
||||
"""
|
||||
if not is_authorized():
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
|
||||
return delete_events_older_than(days)
|
||||
|
||||
|
||||
@@ -619,7 +670,7 @@ def api_write_notification():
|
||||
|
||||
if not content:
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing content"}), 400
|
||||
|
||||
|
||||
write_notification(content, level)
|
||||
return jsonify({"success": True})
|
||||
|
||||
@@ -672,7 +723,8 @@ def api_mark_notification_read(guid):
|
||||
return jsonify({"success": True})
|
||||
else:
|
||||
return jsonify({"success": False, "message": "ERROR", "error": result.get("error")}), 500
|
||||
|
||||
|
||||
|
||||
# --------------------------
|
||||
# SYNC endpoint
|
||||
# --------------------------
|
||||
|
||||
@@ -9,7 +9,7 @@ from flask import jsonify
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
def read_query(raw_sql_b64):
|
||||
|
||||
@@ -2,17 +2,16 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from flask import jsonify, request
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from helper import is_random_mac, get_setting_value
|
||||
from utils.datetime_utils import timeNowDB, format_date
|
||||
from db.db_helper import row_to_json, get_date_from_period
|
||||
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
|
||||
from helper import is_random_mac, get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB, format_date # noqa: E402 [flake8 lint suppression]
|
||||
from db.db_helper import row_to_json, get_date_from_period # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# --------------------------
|
||||
# Device Endpoints Functions
|
||||
@@ -27,10 +26,10 @@ def get_device_data(mac):
|
||||
cur = conn.cursor()
|
||||
|
||||
now = timeNowDB()
|
||||
|
||||
|
||||
# Special case for new device
|
||||
if mac.lower() == "new":
|
||||
|
||||
|
||||
device_data = {
|
||||
"devMac": "",
|
||||
"devName": "",
|
||||
@@ -89,10 +88,10 @@ def get_device_data(mac):
|
||||
ELSE 'Off-line'
|
||||
END AS devStatus,
|
||||
|
||||
(SELECT COUNT(*) FROM Sessions
|
||||
(SELECT COUNT(*) FROM Sessions
|
||||
WHERE ses_MAC = d.devMac AND (
|
||||
ses_DateTimeConnection >= {period_date_sql} OR
|
||||
ses_DateTimeDisconnection >= {period_date_sql} OR
|
||||
ses_DateTimeConnection >= {period_date_sql} OR
|
||||
ses_DateTimeDisconnection >= {period_date_sql} OR
|
||||
ses_StillConnected = 1
|
||||
)) AS devSessions,
|
||||
|
||||
|
||||
@@ -14,16 +14,13 @@ from logger import mylog
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from db.db_helper import get_table_json, get_device_condition_by_status
|
||||
from utils.datetime_utils import format_date
|
||||
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
|
||||
from db.db_helper import get_table_json, get_device_condition_by_status # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Device Endpoints Functions
|
||||
# --------------------------
|
||||
|
||||
|
||||
def get_all_devices():
|
||||
"""Retrieve all devices from the database."""
|
||||
conn = get_temp_db_connection()
|
||||
@@ -99,7 +96,7 @@ def delete_unknown_devices():
|
||||
|
||||
def export_devices(export_format):
|
||||
"""
|
||||
Export devices from the Devices table in teh desired format.
|
||||
Export devices from the Devices table in the desired format.
|
||||
- If `macs` is None → delete ALL devices.
|
||||
- If `macs` is a list → delete only matching MACs (supports wildcard '*').
|
||||
"""
|
||||
@@ -139,7 +136,6 @@ def export_devices(export_format):
|
||||
def import_csv(file_storage=None):
|
||||
data = ""
|
||||
skipped = []
|
||||
error = None
|
||||
|
||||
# 1. Try JSON `content` (base64-encoded CSV)
|
||||
if request.is_json and request.json.get("content"):
|
||||
|
||||
@@ -9,10 +9,10 @@ from flask import jsonify
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from helper import is_random_mac, mylog
|
||||
from db.db_helper import row_to_json, get_date_from_period
|
||||
from utils.datetime_utils import format_date, format_date_iso, format_event_date, ensure_datetime
|
||||
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
|
||||
from helper import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from db.db_helper import row_to_json, get_date_from_period # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import ensure_datetime # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
# --------------------------
|
||||
@@ -120,14 +120,14 @@ def get_events_totals(period: str = "7 days"):
|
||||
cur = conn.cursor()
|
||||
|
||||
sql = f"""
|
||||
SELECT
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM Events WHERE eve_DateTime >= {period_date_sql}) AS all_events,
|
||||
(SELECT COUNT(*) FROM Sessions WHERE
|
||||
(SELECT COUNT(*) FROM Sessions WHERE
|
||||
ses_DateTimeConnection >= {period_date_sql}
|
||||
OR ses_DateTimeDisconnection >= {period_date_sql}
|
||||
OR ses_StillConnected = 1
|
||||
) AS sessions,
|
||||
(SELECT COUNT(*) FROM Sessions WHERE
|
||||
(SELECT COUNT(*) FROM Sessions WHERE
|
||||
(ses_DateTimeConnection IS NULL AND ses_DateTimeDisconnection >= {period_date_sql})
|
||||
OR (ses_DateTimeDisconnection IS NULL AND ses_StillConnected = 0 AND ses_DateTimeConnection >= {period_date_sql})
|
||||
) AS missing,
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import graphene
|
||||
from graphene import ObjectType, String, Int, Boolean, List, Field, InputObjectType, Argument
|
||||
from graphene import (
|
||||
ObjectType, String, Int, Boolean, List, Field, InputObjectType, Argument
|
||||
)
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
@@ -8,9 +10,9 @@ import os
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
from const import apiPath
|
||||
from helper import (
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from const import apiPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import ( # noqa: E402 [flake8 lint suppression]
|
||||
is_random_mac,
|
||||
get_number_of_children,
|
||||
format_ip_long,
|
||||
@@ -111,12 +113,14 @@ class SettingResult(ObjectType):
|
||||
settings = List(Setting)
|
||||
count = Int()
|
||||
|
||||
# --- LANGSTRINGS ---
|
||||
# --- LANGSTRINGS ---
|
||||
|
||||
|
||||
# In-memory cache for lang strings
|
||||
_langstrings_cache = {} # caches lists per file (core JSON or plugin)
|
||||
_langstrings_cache_mtime = {} # tracks last modified times
|
||||
|
||||
|
||||
# LangString ObjectType
|
||||
class LangString(ObjectType):
|
||||
langCode = String()
|
||||
@@ -128,6 +132,7 @@ class LangStringResult(ObjectType):
|
||||
langStrings = List(LangString)
|
||||
count = Int()
|
||||
|
||||
|
||||
# Define Query Type with Pagination Support
|
||||
class Query(ObjectType):
|
||||
# --- DEVICES ---
|
||||
@@ -184,31 +189,39 @@ class Query(ObjectType):
|
||||
if (device.get("devParentRelType") not in hidden_relationships)
|
||||
]
|
||||
|
||||
devices_data = [
|
||||
device
|
||||
for device in devices_data
|
||||
if (
|
||||
(
|
||||
device["devPresentLastScan"] == 1
|
||||
and "online" in allowed_statuses
|
||||
)
|
||||
or (device["devIsNew"] == 1 and "new" in allowed_statuses)
|
||||
or (
|
||||
device["devPresentLastScan"] == 0
|
||||
and device["devAlertDown"]
|
||||
and "down" in allowed_statuses
|
||||
)
|
||||
or (
|
||||
device["devPresentLastScan"] == 0
|
||||
and "offline" in allowed_statuses
|
||||
)
|
||||
and device["devIsArchived"] == 0
|
||||
or (
|
||||
device["devIsArchived"] == 1
|
||||
and "archived" in allowed_statuses
|
||||
)
|
||||
filtered = []
|
||||
|
||||
for device in devices_data:
|
||||
is_online = (
|
||||
device["devPresentLastScan"] == 1 and "online" in allowed_statuses
|
||||
)
|
||||
]
|
||||
|
||||
is_new = (
|
||||
device["devIsNew"] == 1 and "new" in allowed_statuses
|
||||
)
|
||||
|
||||
is_down = (
|
||||
device["devPresentLastScan"] == 0 and device["devAlertDown"] and "down" in allowed_statuses
|
||||
)
|
||||
|
||||
is_offline = (
|
||||
device["devPresentLastScan"] == 0 and "offline" in allowed_statuses
|
||||
)
|
||||
|
||||
is_archived = (
|
||||
device["devIsArchived"] == 1 and "archived" in allowed_statuses
|
||||
)
|
||||
|
||||
# Matches if not archived and status matches OR it is archived and allowed
|
||||
matches = (
|
||||
(is_online or is_new or is_down or is_offline) and device["devIsArchived"] == 0
|
||||
) or is_archived
|
||||
|
||||
if matches:
|
||||
filtered.append(device)
|
||||
|
||||
devices_data = filtered
|
||||
|
||||
elif status == "connected":
|
||||
devices_data = [
|
||||
device
|
||||
@@ -257,8 +270,7 @@ class Query(ObjectType):
|
||||
devices_data = [
|
||||
device
|
||||
for device in devices_data
|
||||
if str(device.get(filter.filterColumn, "")).lower()
|
||||
== str(filter.filterValue).lower()
|
||||
if str(device.get(filter.filterColumn, "")).lower() == str(filter.filterValue).lower()
|
||||
]
|
||||
|
||||
# Search data if a search term is provided
|
||||
@@ -340,7 +352,7 @@ class Query(ObjectType):
|
||||
|
||||
return SettingResult(settings=settings, count=len(settings))
|
||||
|
||||
# --- LANGSTRINGS ---
|
||||
# --- LANGSTRINGS ---
|
||||
langStrings = Field(
|
||||
LangStringResult,
|
||||
langCode=Argument(String, required=False),
|
||||
@@ -352,7 +364,6 @@ class Query(ObjectType):
|
||||
Collect language strings, optionally filtered by language code and/or string key.
|
||||
Caches in memory for performance. Can fallback to 'en_us' if a string is missing.
|
||||
"""
|
||||
global _langstrings_cache, _langstrings_cache_mtime
|
||||
|
||||
langStrings = []
|
||||
|
||||
@@ -437,11 +448,11 @@ class Query(ObjectType):
|
||||
if en_fallback:
|
||||
langStrings[i] = en_fallback[0]
|
||||
|
||||
mylog('trace', f'[graphql_schema] Collected {len(langStrings)} language strings '
|
||||
f'(langCode={langCode}, key={langStringKey}, fallback_to_en={fallback_to_en})')
|
||||
mylog('trace', f'[graphql_schema] Collected {len(langStrings)} language strings (langCode={langCode}, key={langStringKey}, fallback_to_en={fallback_to_en})')
|
||||
|
||||
return LangStringResult(langStrings=langStrings, count=len(langStrings))
|
||||
|
||||
|
||||
# helps sorting inconsistent dataset mixed integers and strings
|
||||
def mixed_type_sort_key(value):
|
||||
if value is None or value == "":
|
||||
|
||||
@@ -8,7 +8,7 @@ from flask import jsonify
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
# --------------------------------------------------
|
||||
|
||||
@@ -3,18 +3,18 @@ import sys
|
||||
from flask import jsonify
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from const import logPath
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from messaging.in_app import write_notification
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure log level is initialized correctly
|
||||
Logger(get_setting_value('LOG_LEVEL'))
|
||||
|
||||
|
||||
def clean_log(log_file):
|
||||
"""
|
||||
Purge the content of an allowed log file within the /app/log/ directory.
|
||||
@@ -55,4 +55,3 @@ def clean_log(log_file):
|
||||
mylog('none', [msg])
|
||||
write_notification(msg, 'interrupt')
|
||||
return jsonify({"success": False, "message": msg}), 500
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ import os
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
from const import apiPath
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from const import apiPath # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
def escape_label_value(val):
|
||||
|
||||
@@ -9,10 +9,10 @@ from flask import jsonify
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from helper import is_random_mac, get_setting_value, mylog, format_ip_long
|
||||
from db.db_helper import row_to_json, get_date_from_period
|
||||
from utils.datetime_utils import format_date_iso, format_event_date, format_date_diff, parse_datetime, format_date
|
||||
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value, format_ip_long # noqa: E402 [flake8 lint suppression]
|
||||
from db.db_helper import get_date_from_period # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import format_date_iso, format_event_date, format_date_diff, format_date # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
# --------------------------
|
||||
@@ -33,7 +33,7 @@ def create_session(
|
||||
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO Sessions (ses_MAC, ses_IP, ses_DateTimeConnection, ses_DateTimeDisconnection,
|
||||
INSERT INTO Sessions (ses_MAC, ses_IP, ses_DateTimeConnection, ses_DateTimeDisconnection,
|
||||
ses_EventTypeConnection, ses_EventTypeDisconnection)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
@@ -105,7 +105,7 @@ def get_sessions_calendar(start_date, end_date):
|
||||
-- If ses_EventTypeConnection is missing, backfill from last disconnection
|
||||
-- If ses_EventTypeDisconnection is missing, forward-fill from next connection
|
||||
|
||||
SELECT
|
||||
SELECT
|
||||
SES1.ses_MAC, SES1.ses_EventTypeConnection, SES1.ses_DateTimeConnection,
|
||||
SES1.ses_EventTypeDisconnection, SES1.ses_DateTimeDisconnection, SES1.ses_IP,
|
||||
SES1.ses_AdditionalInfo, SES1.ses_StillConnected,
|
||||
@@ -113,9 +113,9 @@ def get_sessions_calendar(start_date, end_date):
|
||||
CASE
|
||||
WHEN SES1.ses_EventTypeConnection = '<missing event>' THEN
|
||||
IFNULL(
|
||||
(SELECT MAX(SES2.ses_DateTimeDisconnection)
|
||||
FROM Sessions AS SES2
|
||||
WHERE SES2.ses_MAC = SES1.ses_MAC
|
||||
(SELECT MAX(SES2.ses_DateTimeDisconnection)
|
||||
FROM Sessions AS SES2
|
||||
WHERE SES2.ses_MAC = SES1.ses_MAC
|
||||
AND SES2.ses_DateTimeDisconnection < SES1.ses_DateTimeDisconnection
|
||||
AND SES2.ses_DateTimeDisconnection BETWEEN Date(?) AND Date(?)
|
||||
),
|
||||
@@ -126,9 +126,9 @@ def get_sessions_calendar(start_date, end_date):
|
||||
|
||||
CASE
|
||||
WHEN SES1.ses_EventTypeDisconnection = '<missing event>' THEN
|
||||
(SELECT MIN(SES2.ses_DateTimeConnection)
|
||||
FROM Sessions AS SES2
|
||||
WHERE SES2.ses_MAC = SES1.ses_MAC
|
||||
(SELECT MIN(SES2.ses_DateTimeConnection)
|
||||
FROM Sessions AS SES2
|
||||
WHERE SES2.ses_MAC = SES1.ses_MAC
|
||||
AND SES2.ses_DateTimeConnection > SES1.ses_DateTimeConnection
|
||||
AND SES2.ses_DateTimeConnection BETWEEN Date(?) AND Date(?)
|
||||
)
|
||||
@@ -162,8 +162,7 @@ def get_sessions_calendar(start_date, end_date):
|
||||
|
||||
# Determine color
|
||||
if (
|
||||
row["ses_EventTypeConnection"] == "<missing event>"
|
||||
or row["ses_EventTypeDisconnection"] == "<missing event>"
|
||||
row["ses_EventTypeConnection"] == "<missing event>" or row["ses_EventTypeDisconnection"] == "<missing event>"
|
||||
):
|
||||
color = "#f39c12"
|
||||
elif row["ses_StillConnected"] == 1:
|
||||
@@ -205,7 +204,7 @@ def get_device_sessions(mac, period):
|
||||
cur = conn.cursor()
|
||||
|
||||
sql = f"""
|
||||
SELECT
|
||||
SELECT
|
||||
IFNULL(ses_DateTimeConnection, ses_DateTimeDisconnection) AS ses_DateTimeOrder,
|
||||
ses_EventTypeConnection,
|
||||
ses_DateTimeConnection,
|
||||
@@ -293,7 +292,7 @@ def get_session_events(event_type, period_date):
|
||||
|
||||
# Base SQLs
|
||||
sql_events = f"""
|
||||
SELECT
|
||||
SELECT
|
||||
eve_DateTime AS eve_DateTimeOrder,
|
||||
devName,
|
||||
devOwner,
|
||||
@@ -314,7 +313,7 @@ def get_session_events(event_type, period_date):
|
||||
"""
|
||||
|
||||
sql_sessions = """
|
||||
SELECT
|
||||
SELECT
|
||||
IFNULL(ses_DateTimeConnection, ses_DateTimeDisconnection) AS ses_DateTimeOrder,
|
||||
devName,
|
||||
devOwner,
|
||||
@@ -337,8 +336,7 @@ def get_session_events(event_type, period_date):
|
||||
sql = sql_events
|
||||
elif event_type == "sessions":
|
||||
sql = (
|
||||
sql_sessions
|
||||
+ f"""
|
||||
sql_sessions + f"""
|
||||
WHERE (
|
||||
ses_DateTimeConnection >= {period_date}
|
||||
OR ses_DateTimeDisconnection >= {period_date}
|
||||
@@ -348,8 +346,7 @@ def get_session_events(event_type, period_date):
|
||||
)
|
||||
elif event_type == "missing":
|
||||
sql = (
|
||||
sql_sessions
|
||||
+ f"""
|
||||
sql_sessions + f"""
|
||||
WHERE (
|
||||
(ses_DateTimeConnection IS NULL AND ses_DateTimeDisconnection >= {period_date})
|
||||
OR (ses_DateTimeDisconnection IS NULL AND ses_StillConnected = 0 AND ses_DateTimeConnection >= {period_date})
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import os
|
||||
import json
|
||||
|
||||
from const import *
|
||||
from const import applicationPath, apiPath
|
||||
from logger import mylog
|
||||
from helper import checkNewVersion
|
||||
from utils.datetime_utils import timeNowDB, timeNow
|
||||
@@ -32,14 +32,17 @@ class app_state_class:
|
||||
isNewVersionChecked (int): Timestamp of last version check.
|
||||
"""
|
||||
|
||||
def __init__(self, currentState=None,
|
||||
settingsSaved=None,
|
||||
settingsImported=None,
|
||||
showSpinner=None,
|
||||
graphQLServerStarted=0,
|
||||
processScan=False,
|
||||
pluginsStates=None,
|
||||
appVersion=None):
|
||||
def __init__(
|
||||
self,
|
||||
currentState=None,
|
||||
settingsSaved=None,
|
||||
settingsImported=None,
|
||||
showSpinner=None,
|
||||
graphQLServerStarted=0,
|
||||
processScan=False,
|
||||
pluginsStates=None,
|
||||
appVersion=None
|
||||
):
|
||||
"""
|
||||
Initialize the application state, optionally overwriting previous values.
|
||||
|
||||
@@ -62,7 +65,7 @@ class app_state_class:
|
||||
|
||||
# Update self
|
||||
self.lastUpdated = str(timeNowDB())
|
||||
|
||||
|
||||
if os.path.exists(stateFile):
|
||||
try:
|
||||
with open(stateFile, "r") as json_file:
|
||||
@@ -73,7 +76,7 @@ class app_state_class:
|
||||
)
|
||||
|
||||
# Check if the file exists and recover previous values
|
||||
if previousState != "":
|
||||
if previousState != "":
|
||||
self.settingsSaved = previousState.get("settingsSaved", 0)
|
||||
self.settingsImported = previousState.get("settingsImported", 0)
|
||||
self.processScan = previousState.get("processScan", False)
|
||||
@@ -82,9 +85,9 @@ class app_state_class:
|
||||
self.isNewVersionChecked = previousState.get("isNewVersionChecked", 0)
|
||||
self.graphQLServerStarted = previousState.get("graphQLServerStarted", 0)
|
||||
self.currentState = previousState.get("currentState", "Init")
|
||||
self.pluginsStates = previousState.get("pluginsStates", {})
|
||||
self.appVersion = previousState.get("appVersion", "")
|
||||
else: # init first time values
|
||||
self.pluginsStates = previousState.get("pluginsStates", {})
|
||||
self.appVersion = previousState.get("appVersion", "")
|
||||
else: # init first time values
|
||||
self.settingsSaved = 0
|
||||
self.settingsImported = 0
|
||||
self.showSpinner = False
|
||||
@@ -158,12 +161,12 @@ class app_state_class:
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# method to update the state
|
||||
def updateState(newState = None,
|
||||
settingsSaved = None,
|
||||
settingsImported = None,
|
||||
showSpinner = None,
|
||||
graphQLServerStarted = None,
|
||||
processScan = None,
|
||||
def updateState(newState = None,
|
||||
settingsSaved = None,
|
||||
settingsImported = None,
|
||||
showSpinner = None,
|
||||
graphQLServerStarted = None,
|
||||
processScan = None,
|
||||
pluginsStates=None,
|
||||
appVersion=None):
|
||||
"""
|
||||
@@ -182,14 +185,16 @@ def updateState(newState = None,
|
||||
Returns:
|
||||
app_state_class: Updated state object.
|
||||
"""
|
||||
return app_state_class( newState,
|
||||
settingsSaved,
|
||||
settingsImported,
|
||||
showSpinner,
|
||||
graphQLServerStarted,
|
||||
processScan,
|
||||
pluginsStates,
|
||||
appVersion)
|
||||
return app_state_class(
|
||||
newState,
|
||||
settingsSaved,
|
||||
settingsImported,
|
||||
showSpinner,
|
||||
graphQLServerStarted,
|
||||
processScan,
|
||||
pluginsStates,
|
||||
appVersion
|
||||
)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
@@ -52,7 +52,7 @@ default_tz = "Europe/Berlin"
|
||||
# SQL queries
|
||||
# ===============================================================================
|
||||
sql_devices_all = """
|
||||
SELECT
|
||||
SELECT
|
||||
rowid,
|
||||
IFNULL(devMac, '') AS devMac,
|
||||
IFNULL(devName, '') AS devName,
|
||||
@@ -88,7 +88,7 @@ sql_devices_all = """
|
||||
IFNULL(devFQDN, '') AS devFQDN,
|
||||
IFNULL(devParentRelType, '') AS devParentRelType,
|
||||
IFNULL(devReqNicsOnline, '') AS devReqNicsOnline,
|
||||
CASE
|
||||
CASE
|
||||
WHEN devIsNew = 1 THEN 'New'
|
||||
WHEN devPresentLastScan = 1 THEN 'On-line'
|
||||
WHEN devPresentLastScan = 0 AND devAlertDown != 0 THEN 'Down'
|
||||
@@ -133,7 +133,7 @@ sql_devices_tiles = """
|
||||
(SELECT COUNT(*) FROM Devices) AS "all_devices",
|
||||
-- My Devices count
|
||||
(SELECT COUNT(*) FROM MyDevicesFilter) AS my_devices
|
||||
FROM Statuses;
|
||||
FROM Statuses;
|
||||
"""
|
||||
sql_devices_filters = """
|
||||
SELECT DISTINCT 'devSite' AS columnName, devSite AS columnValue
|
||||
@@ -164,9 +164,9 @@ sql_devices_filters = """
|
||||
FROM Devices WHERE devSSID NOT IN ('', 'null') AND devSSID IS NOT NULL
|
||||
ORDER BY columnName;
|
||||
"""
|
||||
sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived,
|
||||
(select count(*) from Devices a where devIsNew = 1 ) as new,
|
||||
(select count(*) from Devices a where devName = '(unknown)' or devName = '(name not found)' ) as unknown
|
||||
sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived,
|
||||
(select count(*) from Devices a where devIsNew = 1 ) as new,
|
||||
(select count(*) from Devices a where devName = '(unknown)' or devName = '(name not found)' ) as unknown
|
||||
from Online_History order by Scan_Date desc limit 1"""
|
||||
sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0"
|
||||
sql_settings = "SELECT * FROM Settings"
|
||||
@@ -176,23 +176,23 @@ sql_notifications_all = "SELECT * FROM Notifications"
|
||||
sql_online_history = "SELECT * FROM Online_History"
|
||||
sql_plugins_events = "SELECT * FROM Plugins_Events"
|
||||
sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY DateTimeChanged DESC"
|
||||
sql_new_devices = """SELECT * FROM (
|
||||
SELECT eve_IP as devLastIP, eve_MAC as devMac
|
||||
sql_new_devices = """SELECT * FROM (
|
||||
SELECT eve_IP as devLastIP, eve_MAC as devMac
|
||||
FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType = 'New Device'
|
||||
ORDER BY eve_DateTime ) t1
|
||||
LEFT JOIN
|
||||
( SELECT devName, devMac as devMac_t2 FROM Devices) t2
|
||||
LEFT JOIN
|
||||
( SELECT devName, devMac as devMac_t2 FROM Devices) t2
|
||||
ON t1.devMac = t2.devMac_t2"""
|
||||
|
||||
|
||||
sql_generateGuid = """
|
||||
lower(
|
||||
hex(randomblob(4)) || '-' || hex(randomblob(2)) || '-' || '4' ||
|
||||
substr(hex( randomblob(2)), 2) || '-' ||
|
||||
hex(randomblob(4)) || '-' || hex(randomblob(2)) || '-' || '4' ||
|
||||
substr(hex( randomblob(2)), 2) || '-' ||
|
||||
substr('AB89', 1 + (abs(random()) % 4) , 1) ||
|
||||
substr(hex(randomblob(2)), 2) || '-' ||
|
||||
substr(hex(randomblob(2)), 2) || '-' ||
|
||||
hex(randomblob(6))
|
||||
)
|
||||
"""
|
||||
|
||||
@@ -180,7 +180,7 @@ class DB:
|
||||
# Init the AppEvent database table
|
||||
AppEvent_obj(self)
|
||||
|
||||
# #-------------------------------------------------------------------------------
|
||||
# # -------------------------------------------------------------------------------
|
||||
# def get_table_as_json(self, sqlQuery):
|
||||
|
||||
# # mylog('debug',[ '[Database] - get_table_as_json - Query: ', sqlQuery])
|
||||
|
||||
@@ -6,8 +6,8 @@ import os
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from helper import if_byte_then_to_str
|
||||
from logger import mylog
|
||||
from helper import if_byte_then_to_str # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
@@ -5,8 +5,8 @@ import os
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
from messaging.in_app import write_notification
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
def ensure_column(sql, table: str, column_name: str, column_type: str) -> bool:
|
||||
@@ -30,7 +30,7 @@ def ensure_column(sql, table: str, column_name: str, column_type: str) -> bool:
|
||||
if column_name in actual_columns:
|
||||
return True # Already exists
|
||||
|
||||
# Define the expected columns (hardcoded base schema) [v25.5.24] - available in teh default app.db
|
||||
# Define the expected columns (hardcoded base schema) [v25.5.24] - available in the default app.db
|
||||
expected_columns = [
|
||||
"devMac",
|
||||
"devName",
|
||||
@@ -108,23 +108,23 @@ def ensure_views(sql) -> bool:
|
||||
- sql: database cursor or connection wrapper (must support execute() and fetchall()).
|
||||
"""
|
||||
sql.execute(""" DROP VIEW IF EXISTS Events_Devices;""")
|
||||
sql.execute(""" CREATE VIEW Events_Devices AS
|
||||
SELECT *
|
||||
FROM Events
|
||||
sql.execute(""" CREATE VIEW Events_Devices AS
|
||||
SELECT *
|
||||
FROM Events
|
||||
LEFT JOIN Devices ON eve_MAC = devMac;
|
||||
""")
|
||||
|
||||
sql.execute(""" DROP VIEW IF EXISTS LatestEventsPerMAC;""")
|
||||
sql.execute("""CREATE VIEW LatestEventsPerMAC AS
|
||||
WITH RankedEvents AS (
|
||||
SELECT
|
||||
SELECT
|
||||
e.*,
|
||||
ROW_NUMBER() OVER (PARTITION BY e.eve_MAC ORDER BY e.eve_DateTime DESC) AS row_num
|
||||
FROM Events AS e
|
||||
)
|
||||
SELECT
|
||||
e.*,
|
||||
d.*,
|
||||
SELECT
|
||||
e.*,
|
||||
d.*,
|
||||
c.*
|
||||
FROM RankedEvents AS e
|
||||
LEFT JOIN Devices AS d ON e.eve_MAC = d.devMac
|
||||
@@ -136,23 +136,6 @@ def ensure_views(sql) -> bool:
|
||||
"""CREATE VIEW Sessions_Devices AS SELECT * FROM Sessions LEFT JOIN "Devices" ON ses_MAC = devMac;"""
|
||||
)
|
||||
|
||||
sql.execute(""" CREATE VIEW IF NOT EXISTS LatestEventsPerMAC AS
|
||||
WITH RankedEvents AS (
|
||||
SELECT
|
||||
e.*,
|
||||
ROW_NUMBER() OVER (PARTITION BY e.eve_MAC ORDER BY e.eve_DateTime DESC) AS row_num
|
||||
FROM Events AS e
|
||||
)
|
||||
SELECT
|
||||
e.*,
|
||||
d.*,
|
||||
c.*
|
||||
FROM RankedEvents AS e
|
||||
LEFT JOIN Devices AS d ON e.eve_MAC = d.devMac
|
||||
INNER JOIN CurrentScan AS c ON e.eve_MAC = c.cur_MAC
|
||||
WHERE e.row_num = 1;
|
||||
""")
|
||||
|
||||
# handling the Convert_Events_to_Sessions / Sessions screens
|
||||
sql.execute("""DROP VIEW IF EXISTS Convert_Events_to_Sessions;""")
|
||||
sql.execute("""CREATE VIEW Convert_Events_to_Sessions AS SELECT EVE1.eve_MAC,
|
||||
@@ -272,7 +255,7 @@ def ensure_CurrentScan(sql) -> bool:
|
||||
"""
|
||||
# 🐛 CurrentScan DEBUG: comment out below when debugging to keep the CurrentScan table after restarts/scan finishes
|
||||
sql.execute("DROP TABLE IF EXISTS CurrentScan;")
|
||||
sql.execute(""" CREATE TABLE IF NOT EXISTS CurrentScan (
|
||||
sql.execute(""" CREATE TABLE IF NOT EXISTS CurrentScan (
|
||||
cur_MAC STRING(50) NOT NULL COLLATE NOCASE,
|
||||
cur_IP STRING(50) NOT NULL COLLATE NOCASE,
|
||||
cur_Vendor STRING(250),
|
||||
@@ -354,7 +337,7 @@ def ensure_plugins_tables(sql) -> bool:
|
||||
# Plugin state
|
||||
sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects(
|
||||
"Index" INTEGER,
|
||||
Plugin TEXT NOT NULL,
|
||||
Plugin TEXT NOT NULL,
|
||||
Object_PrimaryID TEXT NOT NULL,
|
||||
Object_SecondaryID TEXT NOT NULL,
|
||||
DateTimeCreated TEXT NOT NULL,
|
||||
|
||||
@@ -18,7 +18,7 @@ from typing import Dict, List, Tuple, Any, Optional
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
class SafeConditionBuilder:
|
||||
@@ -494,8 +494,6 @@ class SafeConditionBuilder:
|
||||
if logical_op and not self._validate_logical_operator(logical_op):
|
||||
raise ValueError(f"Invalid logical operator: {logical_op}")
|
||||
|
||||
# Parse values from the IN clause
|
||||
values = []
|
||||
# Simple regex to extract quoted values
|
||||
value_pattern = r"'([^']*)'"
|
||||
matches = re.findall(value_pattern, values_str)
|
||||
|
||||
@@ -7,25 +7,22 @@ import os
|
||||
import re
|
||||
import unicodedata
|
||||
import subprocess
|
||||
import pytz
|
||||
import json
|
||||
import requests
|
||||
import base64
|
||||
import hashlib
|
||||
import random
|
||||
import email
|
||||
import string
|
||||
import ipaddress
|
||||
|
||||
import conf
|
||||
from const import *
|
||||
from const import applicationPath, fullConfPath, fullDbPath, dbPath, confPath, apiPath
|
||||
from logger import mylog, logResult
|
||||
|
||||
# Register NetAlertX directories using runtime configuration
|
||||
INSTALL_PATH = applicationPath
|
||||
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# File system permission handling
|
||||
# -------------------------------------------------------------------------------
|
||||
@@ -58,12 +55,6 @@ def checkPermissionsOK():
|
||||
return (confR_access, dbR_access)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def fixPermissions():
|
||||
# Try fixing access rights if needed
|
||||
chmodCommands = []
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def initialiseFile(pathToCheck, defaultFile):
|
||||
# if file not readable (missing?) try to copy over the backed-up (default) one
|
||||
@@ -71,9 +62,7 @@ def initialiseFile(pathToCheck, defaultFile):
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
"[Setup] ("
|
||||
+ pathToCheck
|
||||
+ ") file is not readable or missing. Trying to copy over the default one."
|
||||
"[Setup] (" + pathToCheck + ") file is not readable or missing. Trying to copy over the default one."
|
||||
],
|
||||
)
|
||||
try:
|
||||
@@ -89,22 +78,14 @@ def initialiseFile(pathToCheck, defaultFile):
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
"[Setup] ⚠ ERROR copying ("
|
||||
+ defaultFile
|
||||
+ ") to ("
|
||||
+ pathToCheck
|
||||
+ "). Make sure the app has Read & Write access to the parent directory."
|
||||
"[Setup] ⚠ ERROR copying (" + defaultFile + ") to (" + pathToCheck + "). Make sure the app has Read & Write access to the parent directory."
|
||||
],
|
||||
)
|
||||
else:
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
"[Setup] ("
|
||||
+ defaultFile
|
||||
+ ") copied over successfully to ("
|
||||
+ pathToCheck
|
||||
+ ")."
|
||||
"[Setup] (" + defaultFile + ") copied over successfully to (" + pathToCheck + ")."
|
||||
],
|
||||
)
|
||||
|
||||
@@ -116,10 +97,7 @@ def initialiseFile(pathToCheck, defaultFile):
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
"[Setup] ⚠ ERROR copying ("
|
||||
+ defaultFile
|
||||
+ "). Make sure the app has Read & Write access to "
|
||||
+ pathToCheck
|
||||
"[Setup] ⚠ ERROR copying (" + defaultFile + "). Make sure the app has Read & Write access to " + pathToCheck
|
||||
],
|
||||
)
|
||||
mylog("none", [e.output])
|
||||
@@ -130,16 +108,13 @@ def filePermissions():
|
||||
# check and initialize .conf
|
||||
(confR_access, dbR_access) = checkPermissionsOK() # Initial check
|
||||
|
||||
if confR_access == False:
|
||||
if confR_access is False:
|
||||
initialiseFile(fullConfPath, f"{INSTALL_PATH}/back/app.conf")
|
||||
|
||||
# check and initialize .db
|
||||
if dbR_access == False:
|
||||
if dbR_access is False:
|
||||
initialiseFile(fullDbPath, f"{INSTALL_PATH}/back/app.db")
|
||||
|
||||
# last attempt
|
||||
fixPermissions()
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# File manipulation methods
|
||||
@@ -273,8 +248,6 @@ def get_setting_value(key):
|
||||
Any: The Python-typed setting value, or an empty string if not found.
|
||||
"""
|
||||
|
||||
global SETTINGS_SECONDARYCACHE
|
||||
|
||||
# Returns empty string if not found
|
||||
value = ""
|
||||
|
||||
@@ -292,7 +265,7 @@ def get_setting_value(key):
|
||||
value = setting_value_to_python_type(set_type, set_value)
|
||||
else:
|
||||
value = setting_value_to_python_type(set_type, str(set_value))
|
||||
|
||||
|
||||
SETTINGS_SECONDARYCACHE[key] = value
|
||||
|
||||
return value
|
||||
@@ -382,7 +355,7 @@ def setting_value_to_python_type(set_type, set_value):
|
||||
if isinstance(set_value, str):
|
||||
try:
|
||||
value = json.loads(set_value.replace("'", "\""))
|
||||
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
mylog(
|
||||
"none",
|
||||
@@ -413,17 +386,12 @@ def setting_value_to_python_type(set_type, set_value):
|
||||
value = set_value
|
||||
|
||||
elif (
|
||||
dataType == "string"
|
||||
and elementType == "input"
|
||||
and any(opt.get("readonly") == "true" for opt in elementOptions)
|
||||
dataType == "string" and elementType == "input" and any(opt.get("readonly") == "true" for opt in elementOptions)
|
||||
):
|
||||
value = reverseTransformers(str(set_value), transformers)
|
||||
|
||||
elif (
|
||||
dataType == "string"
|
||||
and elementType == "input"
|
||||
and any(opt.get("type") == "password" for opt in elementOptions)
|
||||
and "sha256" in transformers
|
||||
dataType == "string" and elementType == "input" and any(opt.get("type") == "password" for opt in elementOptions) and "sha256" in transformers
|
||||
):
|
||||
value = hashlib.sha256(set_value.encode()).hexdigest()
|
||||
|
||||
@@ -602,23 +570,23 @@ def normalize_string(text):
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
def is_random_mac(mac: str) -> bool:
|
||||
"""Determine if a MAC address is random, respecting user-defined prefixes not to mark as random."""
|
||||
# # -------------------------------------------------------------------------------------------
|
||||
# def is_random_mac(mac: str) -> bool:
|
||||
# """Determine if a MAC address is random, respecting user-defined prefixes not to mark as random."""
|
||||
|
||||
is_random = mac[1].upper() in ["2", "6", "A", "E"]
|
||||
# is_random = mac[1].upper() in ["2", "6", "A", "E"]
|
||||
|
||||
# Get prefixes from settings
|
||||
prefixes = get_setting_value("UI_NOT_RANDOM_MAC")
|
||||
# # Get prefixes from settings
|
||||
# prefixes = get_setting_value("UI_NOT_RANDOM_MAC")
|
||||
|
||||
# If detected as random, make sure it doesn't start with a prefix the user wants to exclude
|
||||
if is_random:
|
||||
for prefix in prefixes:
|
||||
if mac.upper().startswith(prefix.upper()):
|
||||
is_random = False
|
||||
break
|
||||
# # If detected as random, make sure it doesn't start with a prefix the user wants to exclude
|
||||
# if is_random:
|
||||
# for prefix in prefixes:
|
||||
# if mac.upper().startswith(prefix.upper()):
|
||||
# is_random = False
|
||||
# break
|
||||
|
||||
return is_random
|
||||
# return is_random
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
@@ -653,6 +621,12 @@ def extract_ip_addresses(text):
|
||||
# -------------------------------------------------------------------------------
|
||||
# Helper function to determine if a MAC address is random
|
||||
def is_random_mac(mac):
|
||||
"""Determine if a MAC address is random, respecting user-defined prefixes not to mark as random."""
|
||||
|
||||
# Validate input
|
||||
if not mac or len(mac) < 2:
|
||||
return False
|
||||
|
||||
# Check if second character matches "2", "6", "A", "E" (case insensitive)
|
||||
is_random = mac[1].upper() in ["2", "6", "A", "E"]
|
||||
|
||||
@@ -660,7 +634,7 @@ def is_random_mac(mac):
|
||||
if is_random:
|
||||
not_random_prefixes = get_setting_value("UI_NOT_RANDOM_MAC")
|
||||
for prefix in not_random_prefixes:
|
||||
if mac.startswith(prefix):
|
||||
if mac.upper().startswith(prefix.upper()):
|
||||
is_random = False
|
||||
break
|
||||
return is_random
|
||||
@@ -773,7 +747,6 @@ def getBuildTimeStampAndVersion():
|
||||
return tuple(results)
|
||||
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def checkNewVersion():
|
||||
mylog("debug", ["[Version check] Checking if new version available"])
|
||||
|
||||
@@ -8,9 +8,9 @@ import shutil
|
||||
import re
|
||||
|
||||
# Register NetAlertX libraries
|
||||
import conf
|
||||
from const import fullConfPath, applicationPath, fullConfFolder, default_tz
|
||||
from helper import getBuildTimeStampAndVersion, fixPermissions, collect_lang_strings, updateSubnets, isJsonObject, setting_value_to_python_type, get_setting_value, generate_random_string
|
||||
import conf
|
||||
from const import fullConfPath, fullConfFolder, default_tz
|
||||
from helper import getBuildTimeStampAndVersion, collect_lang_strings, updateSubnets, generate_random_string
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from app_state import updateState
|
||||
from logger import mylog
|
||||
@@ -19,7 +19,6 @@ from scheduler import schedule_class
|
||||
from plugin import plugin_manager, print_plugin_info
|
||||
from utils.plugin_utils import get_plugins_configs, get_set_value_for_init
|
||||
from messaging.in_app import write_notification
|
||||
from utils.crypto_utils import get_random_bytes
|
||||
|
||||
# ===============================================================================
|
||||
# Initialise user defined values
|
||||
@@ -59,7 +58,7 @@ def ccd(
|
||||
result = default
|
||||
|
||||
# Use existing value if already supplied, otherwise default value is used
|
||||
if forceDefault == False and key in config_dir:
|
||||
if forceDefault is False and key in config_dir:
|
||||
result = config_dir[key]
|
||||
|
||||
# Single quotes might break SQL queries, replacing them
|
||||
@@ -216,7 +215,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
[],
|
||||
c_d,
|
||||
"Loaded plugins",
|
||||
'{"dataType":"array","elements":[{"elementType":"select","elementHasInputValue":1,"elementOptions":[{"multiple":"true","ordeable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"selectChange(this)"},{"getStringKey":"Gen_Change"}],"transformers":[]}]}',
|
||||
'{"dataType":"array","elements":[{"elementType":"select","elementHasInputValue":1,"elementOptions":[{"multiple":"true","ordeable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"selectChange(this)"},{"getStringKey":"Gen_Change"}],"transformers":[]}]}', # noqa: E501
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
@@ -234,7 +233,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
["192.168.1.0/24 --interface=eth1", "192.168.1.0/24 --interface=eth0"],
|
||||
c_d,
|
||||
"Subnets to scan",
|
||||
"""{"dataType": "array","elements": [{"elementType": "input","elementOptions": [{"placeholder": "192.168.1.0/24 --interface=eth1"},{"suffix": "_in"},{"cssClasses": "col-sm-10"},{"prefillValue": "null"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": ["_in"]},{"separator": ""},{"cssClasses": "col-xs-12"},{"onClick": "addList(this, false)"},{"getStringKey": "Gen_Add"}],"transformers": []},{"elementType": "select","elementHasInputValue": 1,"elementOptions": [{"multiple": "true"},{"readonly": "true"},{"editable": "true"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeAllOptions(this)"},{"getStringKey": "Gen_Remove_All"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeFromList(this)"},{"getStringKey": "Gen_Remove_Last"}],"transformers": []}]}""",
|
||||
"""{"dataType": "array","elements": [{"elementType": "input","elementOptions": [{"placeholder": "192.168.1.0/24 --interface=eth1"},{"suffix": "_in"},{"cssClasses": "col-sm-10"},{"prefillValue": "null"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": ["_in"]},{"separator": ""},{"cssClasses": "col-xs-12"},{"onClick": "addList(this, false)"},{"getStringKey": "Gen_Add"}],"transformers": []},{"elementType": "select","elementHasInputValue": 1,"elementOptions": [{"multiple": "true"},{"readonly": "true"},{"editable": "true"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeAllOptions(this)"},{"getStringKey": "Gen_Remove_All"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeFromList(this)"},{"getStringKey": "Gen_Remove_Last"}],"transformers": []}]}""", # noqa: E501 - inline JSON
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
@@ -356,7 +355,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
],
|
||||
c_d,
|
||||
"Network device types",
|
||||
'{"dataType":"array","elements":[{"elementType":"input","elementOptions":[{"placeholder":"Enter value"},{"suffix":"_in"},{"cssClasses":"col-sm-10"},{"prefillValue":"null"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":["_in"]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"addList(this,false)"},{"getStringKey":"Gen_Add"}],"transformers":[]},{"elementType":"select", "elementHasInputValue":1,"elementOptions":[{"multiple":"true"},{"readonly":"true"},{"editable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeAllOptions(this)"},{"getStringKey":"Gen_Remove_All"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeFromList(this)"},{"getStringKey":"Gen_Remove_Last"}],"transformers":[]}]}',
|
||||
'{"dataType":"array","elements":[{"elementType":"input","elementOptions":[{"placeholder":"Enter value"},{"suffix":"_in"},{"cssClasses":"col-sm-10"},{"prefillValue":"null"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":["_in"]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"addList(this,false)"},{"getStringKey":"Gen_Add"}],"transformers":[]},{"elementType":"select", "elementHasInputValue":1,"elementOptions":[{"multiple":"true"},{"readonly":"true"},{"editable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeAllOptions(this)"},{"getStringKey":"Gen_Remove_All"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeFromList(this)"},{"getStringKey":"Gen_Remove_Last"}],"transformers":[]}]}', # noqa: E501 - inline JSON
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
@@ -374,7 +373,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
"t_" + generate_random_string(20),
|
||||
c_d,
|
||||
"API token",
|
||||
'{"dataType": "string","elements": [{"elementType": "input","elementHasInputValue": 1,"elementOptions": [{ "cssClasses": "col-xs-12" }],"transformers": []},{"elementType": "button","elementOptions": [{ "getStringKey": "Gen_Generate" },{ "customParams": "API_TOKEN" },{ "onClick": "generateApiToken(this, 20)" },{ "cssClasses": "col-xs-12" }],"transformers": []}]}',
|
||||
'{"dataType": "string","elements": [{"elementType": "input","elementHasInputValue": 1,"elementOptions": [{ "cssClasses": "col-xs-12" }],"transformers": []},{"elementType": "button","elementOptions": [{ "getStringKey": "Gen_Generate" },{ "customParams": "API_TOKEN" },{ "onClick": "generateApiToken(this, 20)" },{ "cssClasses": "col-xs-12" }],"transformers": []}]}', # noqa: E501 - inline JSON
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
@@ -386,7 +385,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
c_d,
|
||||
"Language Interface",
|
||||
'{"dataType":"string", "elements": [{"elementType" : "select", "elementOptions" : [] ,"transformers": []}]}',
|
||||
"['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Chinese (zh_cn)']",
|
||||
"['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Japanese (ja_jp)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Chinese (zh_cn)']", # noqa: E501 - inline JSON
|
||||
"UI",
|
||||
)
|
||||
|
||||
@@ -483,9 +482,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
|
||||
# only include loaded plugins, and the ones that are enabled
|
||||
if (
|
||||
pref in conf.LOADED_PLUGINS
|
||||
or plugin_run != "disabled"
|
||||
or plugin_run is None
|
||||
pref in conf.LOADED_PLUGINS or plugin_run != "disabled" or plugin_run is None
|
||||
):
|
||||
print_plugin_info(plugin, ["display_name", "description"])
|
||||
|
||||
@@ -524,9 +521,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
if "popupForm" in option:
|
||||
for popup_entry in option["popupForm"]:
|
||||
popup_pref = (
|
||||
key
|
||||
+ "_popupform_"
|
||||
+ popup_entry.get("function", "")
|
||||
key + "_popupform_" + popup_entry.get("function", "")
|
||||
)
|
||||
stringSqlParams = collect_lang_strings(
|
||||
popup_entry, popup_pref, stringSqlParams
|
||||
@@ -606,7 +601,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
# Loop through settings_override dictionary
|
||||
for setting_name, value in settings_override.items():
|
||||
# Ensure the value is treated as a string and passed directly
|
||||
if isinstance(value, str) == False:
|
||||
if isinstance(value, str) is False:
|
||||
value = str(value)
|
||||
|
||||
# Log the value being passed
|
||||
@@ -669,23 +664,33 @@ def importConfigs(pm, db, all_plugins):
|
||||
|
||||
# -----------------
|
||||
# HANDLE APP was upgraded message - clear cache
|
||||
|
||||
|
||||
# Check if app was upgraded
|
||||
|
||||
|
||||
buildTimestamp, new_version = getBuildTimeStampAndVersion()
|
||||
prev_version = conf.VERSION if conf.VERSION != '' else "unknown"
|
||||
|
||||
|
||||
mylog('debug', [f"[Config] buildTimestamp | prev_version | .VERSION file: '{buildTimestamp}|{prev_version}|{new_version}'"])
|
||||
|
||||
|
||||
if str(prev_version) != str(new_version):
|
||||
|
||||
mylog('none', ['[Config] App upgraded 🚀'])
|
||||
|
||||
|
||||
mylog('none', ['[Config] App upgraded 🚀'])
|
||||
|
||||
# ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False)
|
||||
ccd('VERSION', new_version , c_d, '_KEEP_', '_KEEP_', '_KEEP_', '_KEEP_', None, "_KEEP_", None, None, True)
|
||||
|
||||
write_notification(f'[Upgrade] : App upgraded from <code>{prev_version}</code> to <code>{new_version}</code> 🚀 Please clear the cache: <ol> <li>Click OK below</li> <li>Clear the browser cache (shift + browser refresh button)</li> <li> Clear app cache with the <i class="fa-solid fa-rotate"></i> (reload) button in the header</li><li>Go to Settings and click Save</li> </ol> Check out new features and what has changed in the <a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.', 'interrupt', timeNowDB())
|
||||
|
||||
|
||||
write_notification(
|
||||
f"""[Upgrade]: App upgraded from <code>{prev_version}</code> to \
|
||||
<code>{new_version}</code> 🚀 Please clear the cache: \
|
||||
<ol> <li>Click OK below</li> \
|
||||
<li>Clear the browser cache (shift + browser refresh button)</li> \
|
||||
<li> Clear app cache with the <i class="fa-solid fa-rotate"></i> (reload) button in the header</li>\
|
||||
<li>Go to Settings and click Save</li> </ol>\
|
||||
Check out new features and what has changed in the \
|
||||
<a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.""",
|
||||
'interrupt',
|
||||
timeNowDB()
|
||||
)
|
||||
|
||||
# -----------------
|
||||
# Initialization finished, update DB and API endpoints
|
||||
@@ -717,13 +722,13 @@ def importConfigs(pm, db, all_plugins):
|
||||
# settingsImported = None (timestamp),
|
||||
# showSpinner = False (1/0),
|
||||
# graphQLServerStarted = 1 (1/0))
|
||||
updateState("Config imported", conf.lastImportedConfFile, conf.lastImportedConfFile, False, 1, None, None, new_version)
|
||||
|
||||
updateState("Config imported", conf.lastImportedConfFile, conf.lastImportedConfFile, False, 1, None, None, new_version)
|
||||
|
||||
msg = '[Config] Imported new settings config'
|
||||
mylog('minimal', msg)
|
||||
|
||||
|
||||
# front end app log loggging
|
||||
write_notification(msg, 'info', timeNowDB())
|
||||
write_notification(msg, 'info', timeNowDB())
|
||||
|
||||
return pm, all_plugins, True
|
||||
|
||||
@@ -801,8 +806,6 @@ def renameSettings(config_file):
|
||||
str(config_file) + "_temp", str(config_file)
|
||||
) # Convert config_file to a string
|
||||
|
||||
# ensure correct ownership
|
||||
fixPermissions()
|
||||
else:
|
||||
mylog(
|
||||
"debug", "[Config] No old setting names found in the file. No changes made."
|
||||
|
||||
@@ -1,19 +1,14 @@
|
||||
import sys
|
||||
import io
|
||||
import datetime
|
||||
# import datetime
|
||||
import threading
|
||||
import queue
|
||||
import logging
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
# from zoneinfo import ZoneInfo
|
||||
|
||||
# NetAlertX imports
|
||||
import conf
|
||||
from const import *
|
||||
from const import logPath
|
||||
from utils.datetime_utils import timeNowTZ
|
||||
|
||||
|
||||
|
||||
@@ -11,13 +11,9 @@ from flask import jsonify
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from const import apiPath
|
||||
from logger import mylog
|
||||
|
||||
import conf
|
||||
from const import applicationPath, logPath, apiPath, confFileName, reportTemplatesPath
|
||||
from logger import mylog
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from const import apiPath # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
NOTIFICATION_API_FILE = apiPath + 'user_notifications.json'
|
||||
@@ -38,7 +34,7 @@ def write_notification(content, level="alert", timestamp=None):
|
||||
None
|
||||
"""
|
||||
if timestamp is None:
|
||||
timestamp = timeNowDB()
|
||||
timestamp = timeNowDB()
|
||||
|
||||
# Generate GUID
|
||||
guid = str(uuid.uuid4())
|
||||
|
||||
@@ -18,12 +18,12 @@ import sys
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from helper import (
|
||||
from helper import ( # noqa: E402 [flake8 lint suppression]
|
||||
get_setting_value,
|
||||
)
|
||||
from logger import mylog
|
||||
from db.sql_safe_builder import create_safe_condition_builder
|
||||
from utils.datetime_utils import get_timezone_offset
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from db.sql_safe_builder import create_safe_condition_builder # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import get_timezone_offset # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# ===============================================================================
|
||||
# REPORTING
|
||||
@@ -56,14 +56,14 @@ def get_notifications(db):
|
||||
WHERE eve_PendingAlertEmail = 1 AND eve_EventType not in ('Device Down', 'Down Reconnected', 'New Device' ) AND eve_MAC IN
|
||||
(
|
||||
SELECT devMac FROM Devices WHERE devAlertEvents = 0
|
||||
)""")
|
||||
)""")
|
||||
|
||||
# Disable down/down reconnected notifications on devices where devAlertDown is disabled
|
||||
sql.execute("""UPDATE Events SET eve_PendingAlertEmail = 0
|
||||
WHERE eve_PendingAlertEmail = 1 AND eve_EventType in ('Device Down', 'Down Reconnected') AND eve_MAC IN
|
||||
(
|
||||
SELECT devMac FROM Devices WHERE devAlertDown = 0
|
||||
)""")
|
||||
)""")
|
||||
|
||||
sections = get_setting_value("NTFPRCS_INCLUDED_SECTIONS")
|
||||
|
||||
@@ -79,20 +79,29 @@ def get_notifications(db):
|
||||
safe_condition, parameters = condition_builder.get_safe_condition_legacy(
|
||||
new_dev_condition_setting
|
||||
)
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
sqlQuery = """SELECT
|
||||
eve_MAC as MAC,
|
||||
eve_DateTime as Datetime,
|
||||
devLastIP as IP,
|
||||
eve_EventType as "Event Type",
|
||||
devName as "Device name",
|
||||
devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType = 'New Device' {}
|
||||
ORDER BY eve_DateTime""".format(safe_condition)
|
||||
except Exception as e:
|
||||
mylog(
|
||||
"verbose",
|
||||
["[Notification] Error building safe condition for new devices: ", e],
|
||||
)
|
||||
ORDER BY eve_DateTime""".format(safe_condition)
|
||||
except (ValueError, KeyError, TypeError) as e:
|
||||
mylog("verbose", ["[Notification] Error building safe condition for new devices: ", e])
|
||||
# Fall back to safe default (no additional conditions)
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
sqlQuery = """SELECT
|
||||
eve_MAC as MAC,
|
||||
eve_DateTime as Datetime,
|
||||
devLastIP as IP,
|
||||
eve_EventType as "Event Type",
|
||||
devName as "Device name",
|
||||
devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType = 'New Device'
|
||||
ORDER BY eve_DateTime"""
|
||||
ORDER BY eve_DateTime"""
|
||||
parameters = {}
|
||||
|
||||
mylog("debug", ["[Notification] new_devices SQL query: ", sqlQuery])
|
||||
@@ -114,17 +123,17 @@ def get_notifications(db):
|
||||
minutes = int(get_setting_value("NTFPRCS_alert_down_time") or 0)
|
||||
tz_offset = get_timezone_offset()
|
||||
sqlQuery = f"""
|
||||
SELECT devName, eve_MAC, devVendor, eve_IP, eve_DateTime, eve_EventType
|
||||
SELECT devName, eve_MAC, devVendor, eve_IP, eve_DateTime, eve_EventType
|
||||
FROM Events_Devices AS down_events
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND down_events.eve_EventType = 'Device Down'
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND down_events.eve_EventType = 'Device Down'
|
||||
AND eve_DateTime < datetime('now', '-{minutes} minutes', '{tz_offset}')
|
||||
AND NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM Events AS connected_events
|
||||
WHERE connected_events.eve_MAC = down_events.eve_MAC
|
||||
AND connected_events.eve_EventType = 'Connected'
|
||||
AND connected_events.eve_DateTime > down_events.eve_DateTime
|
||||
AND connected_events.eve_DateTime > down_events.eve_DateTime
|
||||
)
|
||||
ORDER BY down_events.eve_DateTime;
|
||||
"""
|
||||
@@ -138,10 +147,7 @@ def get_notifications(db):
|
||||
}
|
||||
json_down_devices = json_obj.json["data"]
|
||||
|
||||
mylog(
|
||||
"debug",
|
||||
["[Notification] json_down_devices: ", json.dumps(json_down_devices)],
|
||||
)
|
||||
mylog("debug", f"[Notification] json_down_devices: {json.dumps(json_down_devices)}")
|
||||
|
||||
if "down_reconnected" in sections:
|
||||
# Compose Reconnected Down Section
|
||||
@@ -163,13 +169,7 @@ def get_notifications(db):
|
||||
}
|
||||
json_down_reconnected = json_obj.json["data"]
|
||||
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
"[Notification] json_down_reconnected: ",
|
||||
json.dumps(json_down_reconnected),
|
||||
],
|
||||
)
|
||||
mylog("debug", f"[Notification] json_down_reconnected: {json.dumps(json_down_reconnected)}")
|
||||
|
||||
if "events" in sections:
|
||||
# Compose Events Section (no empty lines in SQL queries!)
|
||||
@@ -181,20 +181,29 @@ def get_notifications(db):
|
||||
safe_condition, parameters = condition_builder.get_safe_condition_legacy(
|
||||
event_condition_setting
|
||||
)
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
sqlQuery = """SELECT
|
||||
eve_MAC as MAC,
|
||||
eve_DateTime as Datetime,
|
||||
devLastIP as IP,
|
||||
eve_EventType as "Event Type",
|
||||
devName as "Device name",
|
||||
devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed') {}
|
||||
ORDER BY eve_DateTime""".format(safe_condition)
|
||||
ORDER BY eve_DateTime""".format(safe_condition)
|
||||
except Exception as e:
|
||||
mylog(
|
||||
"verbose",
|
||||
["[Notification] Error building safe condition for events: ", e],
|
||||
)
|
||||
mylog("verbose", f"[Notification] Error building safe condition for events: {e}")
|
||||
# Fall back to safe default (no additional conditions)
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
sqlQuery = """SELECT
|
||||
eve_MAC as MAC,
|
||||
eve_DateTime as Datetime,
|
||||
devLastIP as IP,
|
||||
eve_EventType as "Event Type",
|
||||
devName as "Device name",
|
||||
devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed')
|
||||
ORDER BY eve_DateTime"""
|
||||
ORDER BY eve_DateTime"""
|
||||
parameters = {}
|
||||
|
||||
mylog("debug", ["[Notification] events SQL query: ", sqlQuery])
|
||||
@@ -208,7 +217,17 @@ def get_notifications(db):
|
||||
|
||||
if "plugins" in sections:
|
||||
# Compose Plugins Section
|
||||
sqlQuery = """SELECT Plugin, Object_PrimaryId, Object_SecondaryId, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status from Plugins_Events"""
|
||||
sqlQuery = """SELECT
|
||||
Plugin,
|
||||
Object_PrimaryId,
|
||||
Object_SecondaryId,
|
||||
DateTimeChanged,
|
||||
Watched_Value1,
|
||||
Watched_Value2,
|
||||
Watched_Value3,
|
||||
Watched_Value4,
|
||||
Status
|
||||
from Plugins_Events"""
|
||||
|
||||
# Get the events as JSON
|
||||
json_obj = db.get_table_as_json(sqlQuery)
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
import json
|
||||
import uuid
|
||||
import socket
|
||||
import subprocess
|
||||
from yattag import indent
|
||||
from json2table import convert
|
||||
|
||||
# Register NetAlertX modules
|
||||
import conf
|
||||
from const import applicationPath, logPath, apiPath, reportTemplatesPath
|
||||
from const import logPath, apiPath, reportTemplatesPath
|
||||
from logger import mylog, Logger
|
||||
from helper import (
|
||||
generate_mac_links,
|
||||
@@ -62,11 +61,7 @@ class NotificationInstance:
|
||||
|
||||
# Check if nothing to report, end
|
||||
if (
|
||||
JSON["new_devices"] == []
|
||||
and JSON["down_devices"] == []
|
||||
and JSON["events"] == []
|
||||
and JSON["plugins"] == []
|
||||
and JSON["down_reconnected"] == []
|
||||
JSON["new_devices"] == [] and JSON["down_devices"] == [] and JSON["events"] == [] and JSON["plugins"] == [] and JSON["down_reconnected"] == []
|
||||
):
|
||||
self.HasNotifications = False
|
||||
else:
|
||||
@@ -88,8 +83,6 @@ class NotificationInstance:
|
||||
# else:
|
||||
# mylog('debug', ['[Notification] notiStruc:', json.dumps(notiStruc.__dict__, indent=4)])
|
||||
|
||||
Text = ""
|
||||
HTML = ""
|
||||
template_file_path = reportTemplatesPath + "report_template.html"
|
||||
|
||||
# Open text Template
|
||||
@@ -274,7 +267,7 @@ class NotificationInstance:
|
||||
# Clear the Pending Email flag from all events and devices
|
||||
def clearPendingEmailFlag(self):
|
||||
|
||||
# Clean Pending Alert Events
|
||||
# Clean Pending Alert Events
|
||||
self.db.sql.execute("""
|
||||
UPDATE Devices SET devLastNotification = ?
|
||||
WHERE devMac IN (
|
||||
|
||||
@@ -100,7 +100,7 @@ class UserEventsQueueInstance:
|
||||
if not action or not isinstance(action, str):
|
||||
msg = "[UserEventsQueueInstance] Invalid or missing action"
|
||||
mylog('none', [msg])
|
||||
|
||||
|
||||
return False, msg
|
||||
|
||||
try:
|
||||
@@ -109,15 +109,11 @@ class UserEventsQueueInstance:
|
||||
|
||||
msg = f'[UserEventsQueueInstance] Action "{action}" added to the execution queue.'
|
||||
mylog('minimal', [msg])
|
||||
|
||||
|
||||
return True, msg
|
||||
|
||||
except Exception as e:
|
||||
msg = f"[UserEventsQueueInstance] ERROR Failed to write to {self.log_file}: {e}"
|
||||
mylog('none', [msg])
|
||||
|
||||
|
||||
return False, msg
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
260
server/plugin.py
260
server/plugin.py
@@ -9,12 +9,21 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
# Register NetAlertX modules
|
||||
import conf
|
||||
from const import pluginsPath, logPath, applicationPath, reportTemplatesPath
|
||||
from logger import mylog, Logger
|
||||
from helper import get_file_content, write_file, get_setting, get_setting_value
|
||||
from logger import mylog, Logger
|
||||
from helper import get_file_content, get_setting, get_setting_value
|
||||
from utils.datetime_utils import timeNowTZ, timeNowDB
|
||||
from app_state import updateState
|
||||
from api import update_api
|
||||
from utils.plugin_utils import logEventStatusCounts, get_plugin_string, get_plugin_setting_obj, print_plugin_info, list_to_csv, combine_plugin_objects, resolve_wildcards_arr, handle_empty, custom_plugin_decoder, decode_and_rename_files
|
||||
from utils.plugin_utils import (
|
||||
logEventStatusCounts,
|
||||
get_plugin_setting_obj,
|
||||
print_plugin_info,
|
||||
list_to_csv,
|
||||
combine_plugin_objects,
|
||||
resolve_wildcards_arr,
|
||||
handle_empty,
|
||||
decode_and_rename_files
|
||||
)
|
||||
from models.notification_instance import NotificationInstance
|
||||
from messaging.in_app import write_notification
|
||||
from models.user_events_queue_instance import UserEventsQueueInstance
|
||||
@@ -57,13 +66,7 @@ class plugin_manager:
|
||||
# Header
|
||||
updateState("Run: Plugins")
|
||||
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
"[Plugins] Check if any plugins need to be executed on run type: ",
|
||||
runType,
|
||||
],
|
||||
)
|
||||
mylog("debug", f"[Plugins] Check if any plugins need to be executed on run type: {runType}")
|
||||
|
||||
for plugin in self.all_plugins:
|
||||
shouldRun = False
|
||||
@@ -72,7 +75,7 @@ class plugin_manager:
|
||||
# 🔹 Lookup RUN setting from cache instead of calling get_plugin_setting_obj each time
|
||||
run_setting = self._cache["settings"].get(prefix, {}).get("RUN")
|
||||
|
||||
if run_setting != None and run_setting["value"] == runType:
|
||||
if run_setting is not None and run_setting["value"] == runType:
|
||||
if runType != "schedule":
|
||||
shouldRun = True
|
||||
elif runType == "schedule":
|
||||
@@ -91,10 +94,10 @@ class plugin_manager:
|
||||
|
||||
# 🔹 CMD also retrieved from cache
|
||||
cmd_setting = self._cache["settings"].get(prefix, {}).get("CMD")
|
||||
mylog(
|
||||
"debug",
|
||||
["[Plugins] CMD: ", cmd_setting["value"] if cmd_setting else None],
|
||||
)
|
||||
|
||||
print_str = cmd_setting["value"] if cmd_setting else None
|
||||
|
||||
mylog("debug", f"[Plugins] CMD: {print_str}")
|
||||
|
||||
execute_plugin(self.db, self.all_plugins, plugin)
|
||||
|
||||
@@ -130,13 +133,7 @@ class plugin_manager:
|
||||
mylog("debug", ["[check_and_run_user_event] User Execution Queue is empty"])
|
||||
return # Exit early if the log file is empty
|
||||
else:
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
"[check_and_run_user_event] Process User Execution Queue:"
|
||||
+ ", ".join(map(str, lines))
|
||||
],
|
||||
)
|
||||
mylog("debug", "[check_and_run_user_event] Process User Execution Queue:" + ", ".join(map(str, lines)))
|
||||
|
||||
for line in lines:
|
||||
# Extract event name and parameters from the log line
|
||||
@@ -160,15 +157,7 @@ class plugin_manager:
|
||||
update_api(self.db, self.all_plugins, False, param.split(","), True)
|
||||
|
||||
else:
|
||||
mylog(
|
||||
"minimal",
|
||||
[
|
||||
"[check_and_run_user_event] WARNING: Unhandled event in execution queue: ",
|
||||
event,
|
||||
" | ",
|
||||
param,
|
||||
],
|
||||
)
|
||||
mylog("minimal", f"[check_and_run_user_event] WARNING: Unhandled event in execution queue: {event} | {param}")
|
||||
execution_log.finalize_event(
|
||||
event
|
||||
) # Finalize unknown events to remove them
|
||||
@@ -183,9 +172,9 @@ class plugin_manager:
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def handle_run(self, runType):
|
||||
|
||||
|
||||
mylog('minimal', ['[', timeNowDB(), '] START Run: ', runType])
|
||||
|
||||
|
||||
# run the plugin
|
||||
for plugin in self.all_plugins:
|
||||
if plugin["unique_prefix"] == runType:
|
||||
@@ -201,7 +190,7 @@ class plugin_manager:
|
||||
pluginsStates={pluginName: current_plugin_state.get(pluginName, {})}
|
||||
)
|
||||
|
||||
mylog('minimal', ['[', timeNowDB(), '] END Run: ', runType])
|
||||
mylog('minimal', ['[', timeNowDB(), '] END Run: ', runType])
|
||||
|
||||
return
|
||||
|
||||
@@ -210,7 +199,7 @@ class plugin_manager:
|
||||
mylog("minimal", ["[", timeNowTZ(), "] [Test] START Test: ", runType])
|
||||
|
||||
mylog('minimal', ['[', timeNowDB(), '] [Test] START Test: ', runType])
|
||||
|
||||
|
||||
# Prepare test samples
|
||||
sample_json = json.loads(
|
||||
get_file_content(reportTemplatesPath + "webhook_json_sample.json")
|
||||
@@ -312,7 +301,7 @@ class plugin_param:
|
||||
if param["type"] == "setting":
|
||||
inputValue = get_setting(param["value"])
|
||||
|
||||
if inputValue != None:
|
||||
if inputValue is not None:
|
||||
setVal = inputValue["setValue"] # setting value
|
||||
setTyp = inputValue["setType"] # setting type
|
||||
|
||||
@@ -337,9 +326,7 @@ class plugin_param:
|
||||
resolved = list_to_csv(setVal)
|
||||
|
||||
else:
|
||||
mylog(
|
||||
"none", ["[Plugins] ⚠ ERROR: Parameter probably not converted."]
|
||||
)
|
||||
mylog("none", "[Plugins] ⚠ ERROR: Parameter probably not converted.")
|
||||
return json.dumps(setVal)
|
||||
|
||||
# Get SQL result
|
||||
@@ -390,15 +377,10 @@ def run_plugin(command, set_RUN_TIMEOUT, plugin):
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
mylog("none", [e.output])
|
||||
mylog("none", ["[Plugins] ⚠ ERROR - enable LOG_LEVEL=debug and check logs"])
|
||||
mylog("none", "[Plugins] ⚠ ERROR - enable LOG_LEVEL=debug and check logs")
|
||||
return None
|
||||
except subprocess.TimeoutExpired:
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f"[Plugins] ⚠ ERROR - TIMEOUT - the plugin {plugin['unique_prefix']} forcefully terminated as timeout reached. Increase TIMEOUT setting and scan interval."
|
||||
],
|
||||
)
|
||||
mylog("none", f"[Plugins] ⚠ ERROR - TIMEOUT - the plugin {plugin['unique_prefix']} forcefully terminated as timeout reached. Increase TIMEOUT setting and scan interval.")
|
||||
return None
|
||||
|
||||
|
||||
@@ -411,11 +393,11 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
set = get_plugin_setting_obj(plugin, "CMD")
|
||||
|
||||
# handle missing "function":"CMD" setting
|
||||
if set == None:
|
||||
if set is None:
|
||||
return
|
||||
|
||||
set_CMD = set["value"]
|
||||
|
||||
|
||||
# Replace hardcoded /app paths with environment-aware path
|
||||
if "/app/front/plugins" in set_CMD:
|
||||
set_CMD = set_CMD.replace("/app/front/plugins", str(pluginsPath))
|
||||
@@ -441,13 +423,8 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
for param in plugin["params"]:
|
||||
tempParam = plugin_param(param, plugin, db)
|
||||
|
||||
if tempParam.resolved == None:
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f'[Plugins] The parameter "name":"{tempParam.name}" for "value": {tempParam.value} was resolved as None'
|
||||
],
|
||||
)
|
||||
if tempParam.resolved is None:
|
||||
mylog("none", f'[Plugins] The parameter "name":"{tempParam.name}" for "value": {tempParam.value} was resolved as None')
|
||||
|
||||
else:
|
||||
# params.append( [param["name"], resolved] )
|
||||
@@ -456,14 +433,9 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
if tempParam.multiplyTimeout:
|
||||
set_RUN_TIMEOUT = set_RUN_TIMEOUT * tempParam.paramValuesCount
|
||||
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
f'[Plugins] The parameter "name":"{param["name"]}" will multiply the timeout {tempParam.paramValuesCount} times. Total timeout: {set_RUN_TIMEOUT}s'
|
||||
],
|
||||
)
|
||||
mylog("debug", f'[Plugins] The parameter "name":"{param["name"]}" will multiply timeout {tempParam.paramValuesCount}x. Total timeout: {set_RUN_TIMEOUT}s')
|
||||
|
||||
mylog("debug", ["[Plugins] Timeout: ", set_RUN_TIMEOUT])
|
||||
mylog("debug", f"[Plugins] Timeout: {set_RUN_TIMEOUT}")
|
||||
|
||||
# build SQL query parameters to insert into the DB
|
||||
sqlParams = []
|
||||
@@ -475,8 +447,8 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
command = resolve_wildcards_arr(set_CMD.split(), params)
|
||||
|
||||
# Execute command
|
||||
mylog("verbose", ["[Plugins] Executing: ", set_CMD])
|
||||
mylog("debug", ["[Plugins] Resolved : ", command])
|
||||
mylog("verbose", f"[Plugins] Executing: {set_CMD}")
|
||||
mylog("debug", f"[Plugins] Resolved : {command}")
|
||||
|
||||
# Using ThreadPoolExecutor to handle concurrent subprocesses
|
||||
with ThreadPoolExecutor(max_workers=5) as executor:
|
||||
@@ -521,12 +493,7 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
columns = line.split("|")
|
||||
# There have to be 9 or 13 columns
|
||||
if len(columns) not in [9, 13]:
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f"[Plugins] Wrong number of input values, must be 9 or 13, got {len(columns)} from: {line}"
|
||||
],
|
||||
)
|
||||
mylog("none", f"[Plugins] Wrong number of input values, must be 9 or 13, got {len(columns)} from: {line}")
|
||||
continue # Skip lines with incorrect number of columns
|
||||
|
||||
# Common part of the SQL parameters
|
||||
@@ -581,9 +548,7 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
# keep current instance log file, delete all from other nodes
|
||||
if filename != "last_result.log" and os.path.exists(full_path):
|
||||
os.remove(full_path) # DEBUG:TODO uncomment 🐛
|
||||
mylog(
|
||||
"verbose", [f"[Plugins] Processed and deleted file: {full_path} "]
|
||||
)
|
||||
mylog("verbose", f"[Plugins] Processed and deleted file: {full_path} ")
|
||||
|
||||
# app-db-query
|
||||
if plugin["data_source"] == "app-db-query":
|
||||
@@ -591,7 +556,7 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
q = set_CMD.replace("{s-quote}", "'")
|
||||
|
||||
# Execute command
|
||||
mylog("verbose", ["[Plugins] Executing: ", q])
|
||||
mylog("verbose", f"[Plugins] Executing: {q}")
|
||||
|
||||
# set_CMD should contain a SQL query
|
||||
arr = db.get_sql_array(q)
|
||||
@@ -650,7 +615,7 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
# Append the final parameters to sqlParams
|
||||
sqlParams.append(tuple(base_params))
|
||||
else:
|
||||
mylog("none", ["[Plugins] Skipped invalid sql result"])
|
||||
mylog("none", "[Plugins] Skipped invalid sql result")
|
||||
|
||||
# app-db-query
|
||||
if plugin["data_source"] == "sqlite-db-query":
|
||||
@@ -659,19 +624,14 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
q = set_CMD.replace("{s-quote}", "'")
|
||||
|
||||
# Execute command
|
||||
mylog("verbose", ["[Plugins] Executing: ", q])
|
||||
mylog("verbose", f"[Plugins] Executing: {q}")
|
||||
|
||||
# ------- necessary settings check --------
|
||||
set = get_plugin_setting_obj(plugin, "DB_PATH")
|
||||
|
||||
# handle missing "function":"DB_PATH" setting
|
||||
if set == None:
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
"[Plugins] ⚠ ERROR: DB_PATH setting for plugin type sqlite-db-query missing."
|
||||
],
|
||||
)
|
||||
if set is None:
|
||||
mylog("none", "[Plugins] ⚠ ERROR: DB_PATH setting for plugin type sqlite-db-query missing.")
|
||||
return
|
||||
|
||||
fullSqlitePath = set["value"]
|
||||
@@ -679,25 +639,14 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
# try attaching the sqlite DB
|
||||
try:
|
||||
sql.execute(
|
||||
"ATTACH DATABASE '"
|
||||
+ fullSqlitePath
|
||||
+ "' AS EXTERNAL_"
|
||||
+ plugin["unique_prefix"]
|
||||
"ATTACH DATABASE '" + fullSqlitePath + "' AS EXTERNAL_" + plugin["unique_prefix"]
|
||||
)
|
||||
arr = db.get_sql_array(q)
|
||||
sql.execute("DETACH DATABASE EXTERNAL_" + plugin["unique_prefix"])
|
||||
|
||||
except sqlite3.Error as e:
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f"[Plugins] ⚠ ERROR: DB_PATH setting ({fullSqlitePath}) for plugin {plugin['unique_prefix']}. Did you mount it correctly?"
|
||||
],
|
||||
)
|
||||
mylog(
|
||||
"none",
|
||||
["[Plugins] ⚠ ERROR: ATTACH DATABASE failed with SQL ERROR: ", e],
|
||||
)
|
||||
mylog("none", f"[Plugins] ⚠ ERROR: DB_PATH setting ({fullSqlitePath}) for plugin {plugin['unique_prefix']}. Did you mount it correctly?")
|
||||
mylog("none", f"[Plugins] ⚠ ERROR: ATTACH DATABASE failed with SQL ERROR: {e}")
|
||||
return
|
||||
|
||||
for row in arr:
|
||||
@@ -748,24 +697,14 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
# Append the final parameters to sqlParams
|
||||
sqlParams.append(tuple(base_params))
|
||||
else:
|
||||
mylog("none", ["[Plugins] Skipped invalid sql result"])
|
||||
mylog("none", "[Plugins] Skipped invalid sql result")
|
||||
|
||||
# check if the subprocess / SQL query failed / there was no valid output
|
||||
if len(sqlParams) == 0:
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f'[Plugins] No output received from the plugin "{plugin["unique_prefix"]}"'
|
||||
],
|
||||
)
|
||||
mylog("none", f'[Plugins] No output received from the plugin "{plugin["unique_prefix"]}"')
|
||||
|
||||
else:
|
||||
mylog(
|
||||
"verbose",
|
||||
[
|
||||
f"[Plugins] SUCCESS for {plugin['unique_prefix']} received {len(sqlParams)} entries"
|
||||
],
|
||||
)
|
||||
mylog("verbose", f"[Plugins] SUCCESS for {plugin['unique_prefix']} received {len(sqlParams)} entries")
|
||||
# mylog('debug', ['[Plugins] sqlParam entries: ', sqlParams])
|
||||
|
||||
# create objects
|
||||
@@ -782,12 +721,7 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
# check if we need to update devices api endpoint as well to prevent long user waits on Loading...
|
||||
userUpdatedDevices = UserEventsQueueInstance().has_update_devices()
|
||||
|
||||
mylog(
|
||||
"verbose",
|
||||
[
|
||||
f"[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}"
|
||||
],
|
||||
)
|
||||
mylog("verbose", f"[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}")
|
||||
|
||||
if userUpdatedDevices:
|
||||
endpoints += ["devices"]
|
||||
@@ -807,7 +741,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
|
||||
pluginPref = plugin["unique_prefix"]
|
||||
|
||||
mylog("verbose", ["[Plugins] Processing : ", pluginPref])
|
||||
mylog("verbose", f"[Plugins] Processing : {pluginPref}")
|
||||
|
||||
try:
|
||||
# Begin a transaction
|
||||
@@ -827,20 +761,8 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
for eve in plugEventsArr:
|
||||
pluginEvents.append(plugin_object_class(plugin, eve))
|
||||
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
"[Plugins] Existing objects from Plugins_Objects: ",
|
||||
len(pluginObjects),
|
||||
],
|
||||
)
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
"[Plugins] Logged events from the plugin run : ",
|
||||
len(pluginEvents),
|
||||
],
|
||||
)
|
||||
mylog("debug", f"[Plugins] Existing objects from Plugins_Objects: {len(pluginObjects)}")
|
||||
mylog("debug", f"[Plugins] Logged events from the plugin run : {len(pluginEvents)}")
|
||||
|
||||
# Loop thru all current events and update the status to "exists" if the event matches an existing object
|
||||
index = 0
|
||||
@@ -857,8 +779,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
if tmpObjFromEvent.status == "exists":
|
||||
# compare hash of the changed watched columns for uniqueness - make sure you compare the values with the same idsHash before checking watchedHash
|
||||
if any(
|
||||
x.idsHash == tmpObjFromEvent.idsHash
|
||||
and x.watchedHash != tmpObjFromEvent.watchedHash
|
||||
x.idsHash == tmpObjFromEvent.idsHash and x.watchedHash != tmpObjFromEvent.watchedHash
|
||||
for x in pluginObjects
|
||||
):
|
||||
pluginEvents[index].status = "watched-changed"
|
||||
@@ -879,7 +800,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
# if wasn't missing before, mark as changed
|
||||
if tmpObj.status != "missing-in-last-scan":
|
||||
tmpObj.changed = timeNowDB()
|
||||
tmpObj.status = "missing-in-last-scan"
|
||||
tmpObj.status = "missing-in-last-scan"
|
||||
# mylog('debug', [f'[Plugins] Missing from last scan (PrimaryID | SecondaryID): {tmpObj.primaryId} | {tmpObj.secondaryId}'])
|
||||
|
||||
# Merge existing plugin objects with newly discovered ones and update existing ones with new values
|
||||
@@ -955,25 +876,17 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
# combine all DB insert and update events into one for history
|
||||
history_to_insert.append(values)
|
||||
|
||||
mylog("debug", ["[Plugins] pluginEvents count: ", len(pluginEvents)])
|
||||
mylog("debug", ["[Plugins] pluginObjects count: ", len(pluginObjects)])
|
||||
mylog("debug", f"[Plugins] pluginEvents count: {len(pluginEvents)}")
|
||||
mylog("debug", f"[Plugins] pluginObjects count: {len(pluginObjects)}")
|
||||
|
||||
mylog(
|
||||
"debug", ["[Plugins] events_to_insert count: ", len(events_to_insert)]
|
||||
)
|
||||
mylog(
|
||||
"debug", ["[Plugins] history_to_insert count: ", len(history_to_insert)]
|
||||
)
|
||||
mylog(
|
||||
"debug", ["[Plugins] objects_to_insert count: ", len(objects_to_insert)]
|
||||
)
|
||||
mylog(
|
||||
"debug", ["[Plugins] objects_to_update count: ", len(objects_to_update)]
|
||||
)
|
||||
mylog("debug", f"[Plugins] events_to_insert count: {len(events_to_insert)}")
|
||||
mylog("debug", f"[Plugins] history_to_insert count: {len(history_to_insert)}")
|
||||
mylog("debug", f"[Plugins] objects_to_insert count: {len(objects_to_insert)}")
|
||||
mylog("debug", f"[Plugins] objects_to_update count: {len(objects_to_update)}")
|
||||
|
||||
mylog("trace", ["[Plugins] objects_to_update: ", objects_to_update])
|
||||
mylog("trace", ["[Plugins] events_to_insert: ", events_to_insert])
|
||||
mylog("trace", ["[Plugins] history_to_insert: ", history_to_insert])
|
||||
mylog("trace", f"[Plugins] objects_to_update: {objects_to_update}")
|
||||
mylog("trace", f"[Plugins] events_to_insert: {events_to_insert}")
|
||||
mylog("trace", f"[Plugins] history_to_insert: {history_to_insert}")
|
||||
|
||||
logEventStatusCounts("pluginEvents", pluginEvents)
|
||||
logEventStatusCounts("pluginObjects", pluginObjects)
|
||||
@@ -982,12 +895,12 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
if objects_to_insert:
|
||||
sql.executemany(
|
||||
"""
|
||||
INSERT INTO Plugins_Objects
|
||||
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
|
||||
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
|
||||
INSERT INTO Plugins_Objects
|
||||
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
|
||||
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
|
||||
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
|
||||
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
|
||||
"ObjectGUID")
|
||||
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
|
||||
"ObjectGUID")
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
objects_to_insert,
|
||||
@@ -998,10 +911,10 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
sql.executemany(
|
||||
"""
|
||||
UPDATE Plugins_Objects
|
||||
SET "Plugin" = ?, "Object_PrimaryID" = ?, "Object_SecondaryID" = ?, "DateTimeCreated" = ?,
|
||||
"DateTimeChanged" = ?, "Watched_Value1" = ?, "Watched_Value2" = ?, "Watched_Value3" = ?,
|
||||
"Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ?,
|
||||
"HelpVal1" = ?, "HelpVal2" = ?, "HelpVal3" = ?, "HelpVal4" = ?,
|
||||
SET "Plugin" = ?, "Object_PrimaryID" = ?, "Object_SecondaryID" = ?, "DateTimeCreated" = ?,
|
||||
"DateTimeChanged" = ?, "Watched_Value1" = ?, "Watched_Value2" = ?, "Watched_Value3" = ?,
|
||||
"Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ?,
|
||||
"HelpVal1" = ?, "HelpVal2" = ?, "HelpVal3" = ?, "HelpVal4" = ?,
|
||||
"ObjectGUID" = ?
|
||||
WHERE "Index" = ?
|
||||
""",
|
||||
@@ -1012,12 +925,12 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
if events_to_insert:
|
||||
sql.executemany(
|
||||
"""
|
||||
INSERT INTO Plugins_Events
|
||||
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
|
||||
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
|
||||
INSERT INTO Plugins_Events
|
||||
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
|
||||
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
|
||||
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
|
||||
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
|
||||
"ObjectGUID")
|
||||
"ObjectGUID")
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
events_to_insert,
|
||||
@@ -1027,12 +940,12 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
if history_to_insert:
|
||||
sql.executemany(
|
||||
"""
|
||||
INSERT INTO Plugins_History
|
||||
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
|
||||
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
|
||||
INSERT INTO Plugins_History
|
||||
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
|
||||
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
|
||||
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
|
||||
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
|
||||
"ObjectGUID")
|
||||
"ObjectGUID")
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
history_to_insert,
|
||||
@@ -1044,7 +957,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
except Exception as e:
|
||||
# Rollback the transaction in case of an error
|
||||
conn.rollback()
|
||||
mylog("none", ["[Plugins] ⚠ ERROR: ", e])
|
||||
mylog("none", f"[Plugins] ⚠ ERROR: {e}")
|
||||
raise e
|
||||
|
||||
# Perform database table mapping if enabled for the plugin
|
||||
@@ -1056,7 +969,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
dbTable = plugin["mapped_to_table"]
|
||||
|
||||
# Log a debug message indicating the mapping of objects to the database table.
|
||||
mylog("debug", ["[Plugins] Mapping objects to database table: ", dbTable])
|
||||
mylog("debug", f"[Plugins] Mapping objects to database table: {dbTable}")
|
||||
|
||||
# Initialize lists to hold mapped column names, columnsStr, and valuesStr for SQL query.
|
||||
mappedCols = []
|
||||
@@ -1121,8 +1034,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
|
||||
# Check if there's a default value specified for this column in the JSON.
|
||||
if (
|
||||
"mapped_to_column_data" in col
|
||||
and "value" in col["mapped_to_column_data"]
|
||||
"mapped_to_column_data" in col and "value" in col["mapped_to_column_data"]
|
||||
):
|
||||
tmpList.append(col["mapped_to_column_data"]["value"])
|
||||
|
||||
@@ -1133,8 +1045,8 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
q = f"INSERT OR IGNORE INTO {dbTable} ({columnsStr}) VALUES ({valuesStr})"
|
||||
|
||||
# Log a debug message showing the generated SQL query for mapping.
|
||||
mylog("debug", ["[Plugins] SQL query for mapping: ", q])
|
||||
mylog("debug", ["[Plugins] SQL sqlParams for mapping: ", sqlParams])
|
||||
mylog("debug", f"[Plugins] SQL query for mapping: {q}")
|
||||
mylog("debug", f"[Plugins] SQL sqlParams for mapping: {sqlParams}")
|
||||
|
||||
# Execute the SQL query using 'sql.executemany()' and the 'sqlParams' list of tuples.
|
||||
# This will insert multiple rows into the database in one go.
|
||||
|
||||
@@ -1,14 +1,6 @@
|
||||
import sys
|
||||
import subprocess
|
||||
import os
|
||||
import re
|
||||
import datetime
|
||||
from dateutil import parser
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from helper import get_setting_value, check_IP_format
|
||||
from utils.datetime_utils import timeNowDB, normalizeTimeStamp
|
||||
from logger import mylog, Logger
|
||||
@@ -44,7 +36,7 @@ def exclude_ignored_devices(db):
|
||||
# Join conditions and prepare the query
|
||||
conditions_str = " OR ".join(conditions)
|
||||
if conditions_str:
|
||||
query = f"""DELETE FROM CurrentScan WHERE
|
||||
query = f"""DELETE FROM CurrentScan WHERE
|
||||
1=1
|
||||
AND (
|
||||
{conditions_str}
|
||||
@@ -57,22 +49,23 @@ def exclude_ignored_devices(db):
|
||||
|
||||
sql.execute(query)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def update_devices_data_from_scan (db):
|
||||
sql = db.sql #TO-DO
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def update_devices_data_from_scan(db):
|
||||
sql = db.sql # TO-DO
|
||||
startTime = timeNowDB()
|
||||
|
||||
# Update Last Connection
|
||||
mylog("debug", "[Update Devices] 1 Last Connection")
|
||||
sql.execute(f"""UPDATE Devices SET devLastConnection = '{startTime}',
|
||||
devPresentLastScan = 1
|
||||
WHERE EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE devMac = cur_MAC) """)
|
||||
|
||||
# Clean no active devices
|
||||
mylog("debug", "[Update Devices] 2 Clean no active devices")
|
||||
sql.execute("""UPDATE Devices SET devPresentLastScan = 0
|
||||
WHERE NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE devMac = cur_MAC) """)
|
||||
|
||||
# Update IP
|
||||
@@ -103,7 +96,7 @@ def update_devices_data_from_scan (db):
|
||||
FROM CurrentScan
|
||||
WHERE Devices.devMac = CurrentScan.cur_MAC
|
||||
)
|
||||
WHERE
|
||||
WHERE
|
||||
(devVendor IS NULL OR devVendor IN ("", "null", "(unknown)", "(Unknown)"))
|
||||
AND EXISTS (
|
||||
SELECT 1
|
||||
@@ -116,12 +109,12 @@ def update_devices_data_from_scan (db):
|
||||
sql.execute("""UPDATE Devices
|
||||
SET devParentPort = (
|
||||
SELECT cur_Port
|
||||
FROM CurrentScan
|
||||
WHERE Devices.devMac = CurrentScan.cur_MAC
|
||||
FROM CurrentScan
|
||||
WHERE Devices.devMac = CurrentScan.cur_MAC
|
||||
)
|
||||
WHERE
|
||||
WHERE
|
||||
(devParentPort IS NULL OR devParentPort IN ("", "null", "(unknown)", "(Unknown)"))
|
||||
AND
|
||||
AND
|
||||
EXISTS (
|
||||
SELECT 1
|
||||
FROM CurrentScan
|
||||
@@ -139,9 +132,9 @@ def update_devices_data_from_scan (db):
|
||||
FROM CurrentScan
|
||||
WHERE Devices.devMac = CurrentScan.cur_MAC
|
||||
)
|
||||
WHERE
|
||||
WHERE
|
||||
(devParentMAC IS NULL OR devParentMAC IN ("", "null", "(unknown)", "(Unknown)"))
|
||||
AND
|
||||
AND
|
||||
EXISTS (
|
||||
SELECT 1
|
||||
FROM CurrentScan
|
||||
@@ -161,7 +154,7 @@ def update_devices_data_from_scan (db):
|
||||
FROM CurrentScan
|
||||
WHERE Devices.devMac = CurrentScan.cur_MAC
|
||||
)
|
||||
WHERE
|
||||
WHERE
|
||||
(devSite IS NULL OR devSite IN ("", "null"))
|
||||
AND EXISTS (
|
||||
SELECT 1
|
||||
@@ -178,7 +171,7 @@ def update_devices_data_from_scan (db):
|
||||
FROM CurrentScan
|
||||
WHERE Devices.devMac = CurrentScan.cur_MAC
|
||||
)
|
||||
WHERE
|
||||
WHERE
|
||||
(devSSID IS NULL OR devSSID IN ("", "null"))
|
||||
AND EXISTS (
|
||||
SELECT 1
|
||||
@@ -195,7 +188,7 @@ def update_devices_data_from_scan (db):
|
||||
FROM CurrentScan
|
||||
WHERE Devices.devMac = CurrentScan.cur_MAC
|
||||
)
|
||||
WHERE
|
||||
WHERE
|
||||
(devType IS NULL OR devType IN ("", "null"))
|
||||
AND EXISTS (
|
||||
SELECT 1
|
||||
@@ -208,17 +201,17 @@ def update_devices_data_from_scan (db):
|
||||
mylog("debug", "[Update Devices] - (if not empty) cur_Name -> (if empty) devName")
|
||||
sql.execute(""" UPDATE Devices
|
||||
SET devName = COALESCE((
|
||||
SELECT cur_Name
|
||||
SELECT cur_Name
|
||||
FROM CurrentScan
|
||||
WHERE cur_MAC = devMac
|
||||
AND cur_Name IS NOT NULL
|
||||
AND cur_Name <> 'null'
|
||||
AND cur_Name <> ''
|
||||
), devName)
|
||||
WHERE (devName IN ('(unknown)', '(name not found)', '')
|
||||
WHERE (devName IN ('(unknown)', '(name not found)', '')
|
||||
OR devName IS NULL)
|
||||
AND EXISTS (
|
||||
SELECT 1
|
||||
SELECT 1
|
||||
FROM CurrentScan
|
||||
WHERE cur_MAC = devMac
|
||||
AND cur_Name IS NOT NULL
|
||||
@@ -425,9 +418,9 @@ def print_scan_stats(db):
|
||||
mylog("verbose", f" {row['cur_ScanMethod']}: {row['scan_method_count']}")
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def create_new_devices (db):
|
||||
sql = db.sql # TO-DO
|
||||
# -------------------------------------------------------------------------------
|
||||
def create_new_devices(db):
|
||||
sql = db.sql # TO-DO
|
||||
startTime = timeNowDB()
|
||||
|
||||
# Insert events for new devices from CurrentScan (not yet in Devices)
|
||||
@@ -474,36 +467,36 @@ def create_new_devices (db):
|
||||
mylog("debug", "[New Devices] 2 Create devices")
|
||||
|
||||
# default New Device values preparation
|
||||
newDevColumns = """devAlertEvents,
|
||||
devAlertDown,
|
||||
devPresentLastScan,
|
||||
devIsArchived,
|
||||
devIsNew,
|
||||
devSkipRepeated,
|
||||
devScan,
|
||||
devOwner,
|
||||
devFavorite,
|
||||
devGroup,
|
||||
devComments,
|
||||
devLogEvents,
|
||||
newDevColumns = """devAlertEvents,
|
||||
devAlertDown,
|
||||
devPresentLastScan,
|
||||
devIsArchived,
|
||||
devIsNew,
|
||||
devSkipRepeated,
|
||||
devScan,
|
||||
devOwner,
|
||||
devFavorite,
|
||||
devGroup,
|
||||
devComments,
|
||||
devLogEvents,
|
||||
devLocation,
|
||||
devCustomProps,
|
||||
devParentRelType,
|
||||
devReqNicsOnline
|
||||
"""
|
||||
|
||||
newDevDefaults = f"""{get_setting_value("NEWDEV_devAlertEvents")},
|
||||
{get_setting_value("NEWDEV_devAlertDown")},
|
||||
{get_setting_value("NEWDEV_devPresentLastScan")},
|
||||
{get_setting_value("NEWDEV_devIsArchived")},
|
||||
{get_setting_value("NEWDEV_devIsNew")},
|
||||
{get_setting_value("NEWDEV_devSkipRepeated")},
|
||||
{get_setting_value("NEWDEV_devScan")},
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devOwner"))}',
|
||||
{get_setting_value("NEWDEV_devFavorite")},
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devGroup"))}',
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devComments"))}',
|
||||
{get_setting_value("NEWDEV_devLogEvents")},
|
||||
newDevDefaults = f"""{get_setting_value("NEWDEV_devAlertEvents")},
|
||||
{get_setting_value("NEWDEV_devAlertDown")},
|
||||
{get_setting_value("NEWDEV_devPresentLastScan")},
|
||||
{get_setting_value("NEWDEV_devIsArchived")},
|
||||
{get_setting_value("NEWDEV_devIsNew")},
|
||||
{get_setting_value("NEWDEV_devSkipRepeated")},
|
||||
{get_setting_value("NEWDEV_devScan")},
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devOwner"))}',
|
||||
{get_setting_value("NEWDEV_devFavorite")},
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devGroup"))}',
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devComments"))}',
|
||||
{get_setting_value("NEWDEV_devLogEvents")},
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devLocation"))}',
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devCustomProps"))}',
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devParentRelType"))}',
|
||||
@@ -511,7 +504,7 @@ def create_new_devices (db):
|
||||
"""
|
||||
|
||||
# Fetch data from CurrentScan skipping ignored devices by IP and MAC
|
||||
query = """SELECT cur_MAC, cur_Name, cur_Vendor, cur_ScanMethod, cur_IP, cur_SyncHubNodeName, cur_NetworkNodeMAC, cur_PORT, cur_NetworkSite, cur_SSID, cur_Type
|
||||
query = """SELECT cur_MAC, cur_Name, cur_Vendor, cur_ScanMethod, cur_IP, cur_SyncHubNodeName, cur_NetworkNodeMAC, cur_PORT, cur_NetworkSite, cur_SSID, cur_Type
|
||||
FROM CurrentScan """
|
||||
|
||||
mylog("debug", f"[New Devices] Collecting New Devices Query: {query}")
|
||||
@@ -554,40 +547,40 @@ def create_new_devices (db):
|
||||
)
|
||||
|
||||
# Preparing the individual insert statement
|
||||
sqlQuery = f"""INSERT OR IGNORE INTO Devices
|
||||
sqlQuery = f"""INSERT OR IGNORE INTO Devices
|
||||
(
|
||||
devMac,
|
||||
devName,
|
||||
devMac,
|
||||
devName,
|
||||
devVendor,
|
||||
devLastIP,
|
||||
devFirstConnection,
|
||||
devLastConnection,
|
||||
devSyncHubNode,
|
||||
devLastIP,
|
||||
devFirstConnection,
|
||||
devLastConnection,
|
||||
devSyncHubNode,
|
||||
devGUID,
|
||||
devParentMAC,
|
||||
devParentMAC,
|
||||
devParentPort,
|
||||
devSite,
|
||||
devSite,
|
||||
devSSID,
|
||||
devType,
|
||||
devSourcePlugin,
|
||||
devType,
|
||||
devSourcePlugin,
|
||||
{newDevColumns}
|
||||
)
|
||||
VALUES
|
||||
VALUES
|
||||
(
|
||||
'{sanitize_SQL_input(cur_MAC)}',
|
||||
'{sanitize_SQL_input(cur_MAC)}',
|
||||
'{sanitize_SQL_input(cur_Name)}',
|
||||
'{sanitize_SQL_input(cur_Vendor)}',
|
||||
'{sanitize_SQL_input(cur_IP)}',
|
||||
?,
|
||||
?,
|
||||
'{sanitize_SQL_input(cur_SyncHubNodeName)}',
|
||||
'{sanitize_SQL_input(cur_Vendor)}',
|
||||
'{sanitize_SQL_input(cur_IP)}',
|
||||
?,
|
||||
?,
|
||||
'{sanitize_SQL_input(cur_SyncHubNodeName)}',
|
||||
{sql_generateGuid},
|
||||
'{sanitize_SQL_input(cur_NetworkNodeMAC)}',
|
||||
'{sanitize_SQL_input(cur_PORT)}',
|
||||
'{sanitize_SQL_input(cur_NetworkSite)}',
|
||||
'{sanitize_SQL_input(cur_NetworkSite)}',
|
||||
'{sanitize_SQL_input(cur_SSID)}',
|
||||
'{sanitize_SQL_input(cur_Type)}',
|
||||
'{sanitize_SQL_input(cur_ScanMethod)}',
|
||||
'{sanitize_SQL_input(cur_Type)}',
|
||||
'{sanitize_SQL_input(cur_ScanMethod)}',
|
||||
{newDevDefaults}
|
||||
)"""
|
||||
|
||||
@@ -598,7 +591,8 @@ def create_new_devices (db):
|
||||
mylog("debug", "[New Devices] New Devices end")
|
||||
db.commitDB()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Check if plugins data changed
|
||||
def check_plugin_data_changed(pm, plugins_to_check):
|
||||
"""
|
||||
@@ -630,7 +624,7 @@ def check_plugin_data_changed(pm, plugins_to_check):
|
||||
|
||||
for plugin_name in plugins_to_check:
|
||||
|
||||
last_data_change = pm.plugin_states.get(plugin_name, {}).get("lastDataChange")
|
||||
last_data_change = pm.plugin_states.get(plugin_name, {}).get("lastDataChange")
|
||||
last_data_check = pm.plugin_checks.get(plugin_name, "")
|
||||
|
||||
if not last_data_change:
|
||||
@@ -639,13 +633,13 @@ def check_plugin_data_changed(pm, plugins_to_check):
|
||||
# Normalize and validate last_changed timestamp
|
||||
last_changed_ts = normalizeTimeStamp(last_data_change)
|
||||
|
||||
if last_changed_ts == None:
|
||||
if last_changed_ts is None:
|
||||
mylog('none', f'[check_plugin_data_changed] Unexpected last_data_change timestamp for {plugin_name} (input|output): ({last_data_change}|{last_changed_ts})')
|
||||
|
||||
# Normalize and validate last_data_check timestamp
|
||||
last_data_check_ts = normalizeTimeStamp(last_data_check)
|
||||
|
||||
if last_data_check_ts == None:
|
||||
if last_data_check_ts is None:
|
||||
mylog('none', f'[check_plugin_data_changed] Unexpected last_data_check timestamp for {plugin_name} (input|output): ({last_data_check}|{last_data_check_ts})')
|
||||
|
||||
# Track which plugins have newer state than last_checked
|
||||
@@ -660,15 +654,19 @@ def check_plugin_data_changed(pm, plugins_to_check):
|
||||
|
||||
# Continue if changes detected
|
||||
for p in plugins_changed:
|
||||
mylog('debug', f'[check_plugin_data_changed] {p} changed (last_data_change|last_data_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})')
|
||||
mylog(
|
||||
'debug',
|
||||
f'[check_plugin_data_changed] {p} changed (last_data_change|last_data_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})'
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def update_devices_names(pm):
|
||||
|
||||
# --- Short-circuit if no name-resolution plugin has changed ---
|
||||
if check_plugin_data_changed(pm, ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]) == False:
|
||||
if check_plugin_data_changed(pm, ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]) is False:
|
||||
mylog('debug', '[Update Device Name] No relevant plugin changes since last check.')
|
||||
return
|
||||
|
||||
@@ -676,8 +674,8 @@ def update_devices_names(pm):
|
||||
|
||||
sql = pm.db.sql
|
||||
resolver = NameResolver(pm.db)
|
||||
device_handler = DeviceInstance(pm.db)
|
||||
|
||||
device_handler = DeviceInstance(pm.db)
|
||||
|
||||
nameNotFound = "(name not found)"
|
||||
|
||||
# Define resolution strategies in priority order
|
||||
@@ -722,8 +720,7 @@ def update_devices_names(pm):
|
||||
|
||||
# If a valid result is found, record it and stop further attempts
|
||||
if (
|
||||
newFQDN not in [nameNotFound, "", "localhost."]
|
||||
and " communications error to " not in newFQDN
|
||||
newFQDN not in [nameNotFound, "", "localhost."] and " communications error to " not in newFQDN
|
||||
):
|
||||
foundStats[label] += 1
|
||||
|
||||
@@ -750,14 +747,14 @@ def update_devices_names(pm):
|
||||
)
|
||||
|
||||
# Try resolving both name and FQDN
|
||||
recordsToUpdate, recordsNotFound, foundStats, notFound = resolve_devices(
|
||||
recordsToUpdate, recordsNotFound, fs, notFound = resolve_devices(
|
||||
unknownDevices
|
||||
)
|
||||
|
||||
# Log summary
|
||||
mylog(
|
||||
"verbose",
|
||||
f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}/{foundStats['NBTSCAN']})",
|
||||
f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']})",
|
||||
)
|
||||
mylog("verbose", f"[Update Device Name] Names Not Found : {notFound}")
|
||||
|
||||
@@ -780,16 +777,14 @@ def update_devices_names(pm):
|
||||
)
|
||||
|
||||
# Try resolving only FQDN
|
||||
recordsToUpdate, _, foundStats, notFound = resolve_devices(
|
||||
recordsToUpdate, _, fs, notFound = resolve_devices(
|
||||
allDevices, resolve_both_name_and_fqdn=False
|
||||
)
|
||||
|
||||
# Log summary
|
||||
mylog(
|
||||
"verbose",
|
||||
f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)}"+
|
||||
f"({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}"+
|
||||
f"/{foundStats['NBTSCAN']})",
|
||||
f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)}({fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']})",
|
||||
)
|
||||
mylog("verbose", f"[Update FQDN] Names Not Found : {notFound}")
|
||||
|
||||
@@ -803,7 +798,7 @@ def update_devices_names(pm):
|
||||
|
||||
# --- Step 3: Log last checked time ---
|
||||
# After resolving names, update last checked
|
||||
pm.plugin_checks = {"DIGSCAN": timeNowDB(), "AVAHISCAN": timeNowDB(), "NSLOOKUP": timeNowDB(), "NBTSCAN": timeNowDB() }
|
||||
pm.plugin_checks = {"DIGSCAN": timeNowDB(), "AVAHISCAN": timeNowDB(), "NSLOOKUP": timeNowDB(), "NBTSCAN": timeNowDB()}
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
@@ -901,7 +896,6 @@ def query_MAC_vendor(pMAC):
|
||||
|
||||
# Search vendor in HW Vendors DB
|
||||
mac_start_string6 = mac[0:6]
|
||||
mac_start_string9 = mac[0:9]
|
||||
|
||||
try:
|
||||
with open(filePath, "r") as f:
|
||||
|
||||
@@ -1,16 +1,13 @@
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import base64
|
||||
from pathlib import Path
|
||||
from typing import Optional, Tuple
|
||||
from logger import mylog
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
|
||||
# Load MAC/device-type/icon rules from external file
|
||||
MAC_TYPE_ICON_PATH = Path(f"{INSTALL_PATH}/back/device_heuristics_rules.json")
|
||||
@@ -83,7 +80,7 @@ def match_vendor(vendor: str, default_type: str, default_icon: str) -> Tuple[str
|
||||
|
||||
for pattern in patterns:
|
||||
# Only apply fallback when no MAC prefix is specified
|
||||
mac_prefix = pattern.get("mac_prefix", "")
|
||||
# mac_prefix = pattern.get("mac_prefix", "")
|
||||
vendor_pattern = pattern.get("vendor", "").lower()
|
||||
|
||||
if vendor_pattern and vendor_pattern in vendor_lc:
|
||||
|
||||
@@ -1,11 +1,4 @@
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
from helper import get_setting_value
|
||||
|
||||
@@ -31,7 +24,7 @@ class NameResolver:
|
||||
|
||||
# Check by MAC
|
||||
sql.execute(f"""
|
||||
SELECT Watched_Value2 FROM Plugins_Objects
|
||||
SELECT Watched_Value2 FROM Plugins_Objects
|
||||
WHERE Plugin = '{plugin}' AND Object_PrimaryID = '{pMAC}'
|
||||
""")
|
||||
result = sql.fetchall()
|
||||
@@ -42,9 +35,9 @@ class NameResolver:
|
||||
|
||||
# Check name by IP if enabled
|
||||
if get_setting_value('NEWDEV_IP_MATCH_NAME'):
|
||||
|
||||
|
||||
sql.execute(f"""
|
||||
SELECT Watched_Value2 FROM Plugins_Objects
|
||||
SELECT Watched_Value2 FROM Plugins_Objects
|
||||
WHERE Plugin = '{plugin}' AND Object_SecondaryID = '{pIP}'
|
||||
""")
|
||||
result = sql.fetchall()
|
||||
|
||||
@@ -1,10 +1,3 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from scan.device_handling import (
|
||||
create_new_devices,
|
||||
print_scan_stats,
|
||||
@@ -14,7 +7,7 @@ from scan.device_handling import (
|
||||
)
|
||||
from helper import get_setting_value
|
||||
from db.db_helper import print_table_schema
|
||||
from utils.datetime_utils import timeNowDB, timeNowTZ
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from logger import mylog, Logger
|
||||
from messaging.reporting import skip_repeated_notifications
|
||||
|
||||
@@ -133,20 +126,20 @@ def create_sessions_snapshot(db):
|
||||
db.commitDB()
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def insert_events (db):
|
||||
sql = db.sql #TO-DO
|
||||
startTime = timeNowDB()
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def insert_events(db):
|
||||
sql = db.sql # TO-DO
|
||||
startTime = timeNowDB()
|
||||
|
||||
# Check device down
|
||||
mylog("debug", "[Events] - 1 - Devices down")
|
||||
sql.execute(f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
SELECT devMac, devLastIP, '{startTime}', 'Device Down', '', 1
|
||||
FROM Devices
|
||||
FROM Devices
|
||||
WHERE devAlertDown != 0
|
||||
AND devPresentLastScan = 1
|
||||
AND devPresentLastScan = 1
|
||||
AND NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE devMac = cur_MAC
|
||||
) """)
|
||||
@@ -156,15 +149,15 @@ def insert_events (db):
|
||||
sql.execute(f""" INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
SELECT DISTINCT c.cur_MAC, c.cur_IP, '{startTime}',
|
||||
CASE
|
||||
WHEN last_event.eve_EventType = 'Device Down' and last_event.eve_PendingAlertEmail = 0 THEN 'Down Reconnected'
|
||||
ELSE 'Connected'
|
||||
SELECT DISTINCT c.cur_MAC, c.cur_IP, '{startTime}',
|
||||
CASE
|
||||
WHEN last_event.eve_EventType = 'Device Down' and last_event.eve_PendingAlertEmail = 0 THEN 'Down Reconnected'
|
||||
ELSE 'Connected'
|
||||
END,
|
||||
'',
|
||||
1
|
||||
FROM CurrentScan AS c
|
||||
LEFT JOIN LatestEventsPerMAC AS last_event ON c.cur_MAC = last_event.eve_MAC
|
||||
FROM CurrentScan AS c
|
||||
LEFT JOIN LatestEventsPerMAC AS last_event ON c.cur_MAC = last_event.eve_MAC
|
||||
WHERE last_event.devPresentLastScan = 0 OR last_event.eve_MAC IS NULL
|
||||
""")
|
||||
|
||||
@@ -190,7 +183,7 @@ def insert_events (db):
|
||||
SELECT cur_MAC, cur_IP, '{startTime}', 'IP Changed',
|
||||
'Previous IP: '|| devLastIP, devAlertEvents
|
||||
FROM Devices, CurrentScan
|
||||
WHERE devMac = cur_MAC
|
||||
WHERE devMac = cur_MAC
|
||||
AND devLastIP <> cur_IP """)
|
||||
mylog("debug", "[Events] - Events end")
|
||||
|
||||
|
||||
@@ -1,49 +1,43 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from datetime import datetime
|
||||
# from datetime import datetime
|
||||
from dateutil import parser
|
||||
import datetime
|
||||
import re
|
||||
import pytz
|
||||
from pytz import timezone
|
||||
from typing import Union
|
||||
from typing import Union, Optional
|
||||
from zoneinfo import ZoneInfo
|
||||
import email.utils
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
import conf
|
||||
from const import *
|
||||
# from const import *
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# DateTime
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
DATETIME_PATTERN = "%Y-%m-%d %H:%M:%S"
|
||||
DATETIME_REGEX = re.compile(r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$')
|
||||
|
||||
|
||||
def timeNowTZ():
|
||||
if conf.tz:
|
||||
return datetime.datetime.now(conf.tz).replace(microsecond=0)
|
||||
else:
|
||||
return datetime.datetime.now().replace(microsecond=0)
|
||||
|
||||
|
||||
def timeNow():
|
||||
return datetime.datetime.now().replace(microsecond=0)
|
||||
|
||||
def get_timezone_offset():
|
||||
|
||||
def get_timezone_offset():
|
||||
now = datetime.datetime.now(conf.tz)
|
||||
offset_hours = now.utcoffset().total_seconds() / 3600
|
||||
offset_hours = now.utcoffset().total_seconds() / 3600
|
||||
offset_formatted = "{:+03d}:{:02d}".format(int(offset_hours), int((offset_hours % 1) * 60))
|
||||
return offset_formatted
|
||||
|
||||
|
||||
def timeNowDB(local=True):
|
||||
"""
|
||||
Return the current time (local or UTC) as ISO 8601 for DB storage.
|
||||
@@ -67,9 +61,9 @@ def timeNowDB(local=True):
|
||||
return datetime.datetime.now(datetime.UTC).strftime(DATETIME_PATTERN)
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# Date and time methods
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
def normalizeTimeStamp(inputTimeStamp):
|
||||
"""
|
||||
@@ -91,7 +85,7 @@ def normalizeTimeStamp(inputTimeStamp):
|
||||
|
||||
# Epoch timestamp (integer or float)
|
||||
if isinstance(inputTimeStamp, (int, float)):
|
||||
try:
|
||||
try:
|
||||
return datetime.datetime.fromtimestamp(inputTimeStamp)
|
||||
except (OSError, OverflowError, ValueError):
|
||||
return None
|
||||
@@ -118,13 +112,14 @@ def normalizeTimeStamp(inputTimeStamp):
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
def format_date_iso(date1: str) -> str:
|
||||
def format_date_iso(date1: str) -> Optional[str]:
|
||||
"""Return ISO 8601 string for a date or None if empty"""
|
||||
if date1 is None:
|
||||
if not date1:
|
||||
return None
|
||||
dt = datetime.datetime.fromisoformat(date1) if isinstance(date1, str) else date1
|
||||
return dt.isoformat()
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
def format_event_date(date_str: str, event_type: str) -> str:
|
||||
"""Format event date with fallback rules."""
|
||||
@@ -135,6 +130,7 @@ def format_event_date(date_str: str, event_type: str) -> str:
|
||||
else:
|
||||
return "<still connected>"
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
def ensure_datetime(dt: Union[str, datetime.datetime, None]) -> datetime.datetime:
|
||||
if dt is None:
|
||||
@@ -157,6 +153,7 @@ def parse_datetime(dt_str):
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def format_date(date_str: str) -> str:
|
||||
try:
|
||||
dt = parse_datetime(date_str)
|
||||
@@ -168,13 +165,14 @@ def format_date(date_str: str) -> str:
|
||||
except (ValueError, AttributeError, TypeError):
|
||||
return "invalid"
|
||||
|
||||
|
||||
def format_date_diff(date1, date2, tz_name):
|
||||
"""
|
||||
Return difference between two datetimes as 'Xd HH:MM'.
|
||||
Uses app timezone if datetime is naive.
|
||||
date2 can be None (uses now).
|
||||
"""
|
||||
# Get timezone from settings
|
||||
# Get timezone from settings
|
||||
tz = pytz.timezone(tz_name)
|
||||
|
||||
def parse_dt(dt):
|
||||
@@ -184,8 +182,8 @@ def format_date_diff(date1, date2, tz_name):
|
||||
try:
|
||||
dt_parsed = email.utils.parsedate_to_datetime(dt)
|
||||
except (ValueError, TypeError):
|
||||
# fallback: parse ISO string
|
||||
dt_parsed = datetime.datetime.fromisoformat(dt)
|
||||
# fallback: parse ISO string
|
||||
dt_parsed = datetime.datetime.fromisoformat(dt)
|
||||
# convert naive GMT/UTC to app timezone
|
||||
if dt_parsed.tzinfo is None:
|
||||
dt_parsed = tz.localize(dt_parsed)
|
||||
@@ -208,4 +206,4 @@ def format_date_diff(date1, date2, tz_name):
|
||||
"hours": hours,
|
||||
"minutes": minutes,
|
||||
"total_minutes": total_minutes
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import os
|
||||
import json
|
||||
|
||||
from collections import namedtuple
|
||||
import conf
|
||||
from logger import mylog
|
||||
from utils.crypto_utils import decrypt_data
|
||||
@@ -220,9 +220,7 @@ def get_plugins_configs(loadAll):
|
||||
# Load all plugins if `loadAll` is True, the plugin is in the enabled list,
|
||||
# or no specific plugins are enabled (enabledPlugins is empty)
|
||||
if (
|
||||
loadAll
|
||||
or plugJson["unique_prefix"] in enabledPlugins
|
||||
or enabledPlugins == []
|
||||
loadAll or plugJson["unique_prefix"] in enabledPlugins or enabledPlugins == []
|
||||
):
|
||||
# Load the contents of the config.json file as a JSON object and append it to pluginsList
|
||||
pluginsList.append(plugJson)
|
||||
|
||||
@@ -1,11 +1,4 @@
|
||||
import sqlite3
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from models.device_instance import DeviceInstance
|
||||
@@ -15,7 +8,6 @@ from models.plugin_object_instance import PluginObjectInstance
|
||||
Logger(get_setting_value("LOG_LEVEL"))
|
||||
|
||||
|
||||
|
||||
class Action:
|
||||
"""Base class for all actions."""
|
||||
|
||||
|
||||
@@ -1,10 +1,3 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from helper import get_setting_value
|
||||
from logger import Logger
|
||||
from const import sql_generateGuid
|
||||
@@ -96,11 +89,11 @@ class AppEvent_obj:
|
||||
"ObjectPrimaryID" TEXT,
|
||||
"ObjectSecondaryID" TEXT,
|
||||
"ObjectForeignKey" TEXT,
|
||||
"ObjectIndex" TEXT,
|
||||
"ObjectIsNew" BOOLEAN,
|
||||
"ObjectIsArchived" BOOLEAN,
|
||||
"ObjectIndex" TEXT,
|
||||
"ObjectIsNew" BOOLEAN,
|
||||
"ObjectIsArchived" BOOLEAN,
|
||||
"ObjectStatusColumn" TEXT,
|
||||
"ObjectStatus" TEXT,
|
||||
"ObjectStatus" TEXT,
|
||||
"AppEventType" TEXT,
|
||||
"Helper1" TEXT,
|
||||
"Helper2" TEXT,
|
||||
@@ -117,11 +110,11 @@ class AppEvent_obj:
|
||||
CREATE TRIGGER IF NOT EXISTS "{trigger_name}"
|
||||
AFTER {event.upper()} ON "{table_name}"
|
||||
WHEN NOT EXISTS (
|
||||
SELECT 1 FROM AppEvents
|
||||
WHERE AppEventProcessed = 0
|
||||
SELECT 1 FROM AppEvents
|
||||
WHERE AppEventProcessed = 0
|
||||
AND ObjectType = '{table_name}'
|
||||
AND ObjectGUID = {manage_prefix(config["fields"]["ObjectGUID"], event)}
|
||||
AND ObjectStatus = {manage_prefix(config["fields"]["ObjectStatus"], event)}
|
||||
AND ObjectStatus = {manage_prefix(config["fields"]["ObjectStatus"], event)}
|
||||
AND AppEventType = '{event.lower()}'
|
||||
)
|
||||
BEGIN
|
||||
@@ -142,10 +135,10 @@ class AppEvent_obj:
|
||||
"AppEventType"
|
||||
)
|
||||
VALUES (
|
||||
{sql_generateGuid},
|
||||
DATETIME('now'),
|
||||
FALSE,
|
||||
'{table_name}',
|
||||
{sql_generateGuid},
|
||||
DATETIME('now'),
|
||||
FALSE,
|
||||
'{table_name}',
|
||||
{manage_prefix(config["fields"]["ObjectGUID"], event)}, -- ObjectGUID
|
||||
{manage_prefix(config["fields"]["ObjectPrimaryID"], event)}, -- ObjectPrimaryID
|
||||
{manage_prefix(config["fields"]["ObjectSecondaryID"], event)}, -- ObjectSecondaryID
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user