From 4c92a941a86da5803d9f30306b596685fa7a8265 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Mon, 3 Nov 2025 08:12:00 +1100 Subject: [PATCH 01/31] BE: TZ timestamp work #1251 Signed-off-by: jokob-sk --- server/plugin.py | 12 +++++------- server/scan/device_handling.py | 5 +---- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/server/plugin.py b/server/plugin.py index 6e89ed4e..dff1e8b2 100755 --- a/server/plugin.py +++ b/server/plugin.py @@ -223,13 +223,12 @@ class plugin_manager: sql.execute(""" SELECT MAX(DateTimeChanged) AS last_changed, COUNT(*) AS total_objects, - SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects, - CURRENT_TIMESTAMP AS state_updated + SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects FROM Plugins_Objects WHERE Plugin = ? """, (plugin_name,)) row = sql.fetchone() - last_changed, total_objects, new_objects, state_updated = row if row else ("", 0, 0, "") + last_changed, total_objects, new_objects, state_updated = row if row else ("", 0, 0) new_objects = new_objects or 0 # ensure it's int changed_objects = total_objects - new_objects @@ -238,7 +237,7 @@ class plugin_manager: "totalObjects": total_objects or 0, "newObjects": new_objects or 0, "changedObjects": changed_objects or 0, - "stateUpdated": state_updated or "" + "stateUpdated": timeNowTZ() } # Save in memory @@ -249,8 +248,7 @@ class plugin_manager: SELECT Plugin, MAX(DateTimeChanged) AS last_changed, COUNT(*) AS total_objects, - SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects, - CURRENT_TIMESTAMP AS state_updated + SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects FROM Plugins_Objects GROUP BY Plugin """) @@ -262,7 +260,7 @@ class plugin_manager: "totalObjects": total_objects or 0, "newObjects": new_objects or 0, "changedObjects": changed_objects or 0, - "stateUpdated": state_updated or "" + "stateUpdated": timeNowTZ() } # Save in memory diff --git a/server/scan/device_handling.py b/server/scan/device_handling.py index 67dc9915..bd89f67c 100755 --- a/server/scan/device_handling.py +++ b/server/scan/device_handling.py @@ -645,10 +645,7 @@ def update_devices_names(pm): # --- Step 3: Log last checked time --- # After resolving names, update last checked - sql = pm.db.sql - sql.execute("SELECT CURRENT_TIMESTAMP") - row = sql.fetchone() - pm.name_plugins_checked = row[0] if row else None + pm.name_plugins_checked = timeNowTZ() #------------------------------------------------------------------------------- # Updates devPresentLastScan for parent devices based on the presence of their NICs From 288427c9390faba814274f13cba897e7ecf54f81 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Mon, 3 Nov 2025 10:19:39 +1100 Subject: [PATCH 02/31] BE/PLG: TZ timestamp work #1251 Signed-off-by: jokob-sk --- front/plugins/arp_scan/script.py | 1 - front/plugins/dhcp_servers/script.py | 5 ++++- front/plugins/nmap_scan/script.py | 1 - front/plugins/plugin_helper.py | 3 +-- server/api_server/device_endpoint.py | 11 +++++------ server/app_state.py | 4 ++-- server/helper.py | 19 ------------------- server/plugin.py | 14 +++++++++----- server/scan/device_handling.py | 19 ++++++++++++------- 9 files changed, 33 insertions(+), 44 deletions(-) diff --git a/front/plugins/arp_scan/script.py b/front/plugins/arp_scan/script.py index b4dee38f..48b46179 100755 --- a/front/plugins/arp_scan/script.py +++ b/front/plugins/arp_scan/script.py @@ -8,7 +8,6 @@ import sys import re import base64 import subprocess -from time import strftime # Register NetAlertX directories INSTALL_PATH="/app" diff --git a/front/plugins/dhcp_servers/script.py b/front/plugins/dhcp_servers/script.py index 62f112f3..6e449db2 100755 --- a/front/plugins/dhcp_servers/script.py +++ b/front/plugins/dhcp_servers/script.py @@ -44,8 +44,11 @@ def main(): nmapArgs = ['sudo', 'nmap', '--privileged', '--script', 'broadcast-dhcp-discover'] try: + # Number of DHCP discovery probes to send dhcp_probes = 1 - newLines = [datetime.now().strftime("%Y-%m-%d %H:%M:%S")] + + # Initialize a list to store output lines from the scan + newLines = [] for _ in range(dhcp_probes): output = subprocess.check_output(nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=timeoutSec) diff --git a/front/plugins/nmap_scan/script.py b/front/plugins/nmap_scan/script.py index adc7c4f6..1851b26c 100755 --- a/front/plugins/nmap_scan/script.py +++ b/front/plugins/nmap_scan/script.py @@ -7,7 +7,6 @@ import sys import re import base64 import subprocess -from time import strftime # Register NetAlertX directories INSTALL_PATH="/app" diff --git a/front/plugins/plugin_helper.py b/front/plugins/plugin_helper.py index ceb9cd8b..edd6a027 100755 --- a/front/plugins/plugin_helper.py +++ b/front/plugins/plugin_helper.py @@ -1,4 +1,3 @@ -from time import strftime import pytz from pytz import timezone, all_timezones, UnknownTimeZoneError import sys @@ -205,7 +204,7 @@ class Plugin_Object: self.pluginPref = "" self.primaryId = primaryId self.secondaryId = secondaryId - self.created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + self.created = datetime.now().astimezone().isoformat() self.changed = "" self.watched1 = watched1 self.watched2 = watched2 diff --git a/server/api_server/device_endpoint.py b/server/api_server/device_endpoint.py index 1064b7a1..3e66a560 100755 --- a/server/api_server/device_endpoint.py +++ b/server/api_server/device_endpoint.py @@ -14,7 +14,7 @@ INSTALL_PATH="/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from database import get_temp_db_connection -from helper import is_random_mac, format_date, get_setting_value +from helper import is_random_mac, format_date, get_setting_value, timeNowTZ from db.db_helper import row_to_json, get_date_from_period # -------------------------- @@ -30,7 +30,7 @@ def get_device_data(mac): # Special case for new device if mac.lower() == "new": - now = datetime.now().strftime("%Y-%m-%d %H:%M") + now = timeNowTZ().astimezone().isoformat() device_data = { "devMac": "", "devName": "", @@ -78,7 +78,6 @@ def get_device_data(mac): # Compute period date for sessions/events period = request.args.get('period', '') # e.g., '7 days', '1 month', etc. period_date_sql = get_date_from_period(period) - current_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S") # Fetch device info + computed fields sql = f""" @@ -106,7 +105,7 @@ def get_device_data(mac): AND eve_EventType = 'Device Down') AS devDownAlerts, (SELECT CAST(MAX(0, SUM( - julianday(IFNULL(ses_DateTimeDisconnection,'{current_date}')) - + julianday(IFNULL(ses_DateTimeDisconnection,'{now}')) - julianday(CASE WHEN ses_DateTimeConnection < {period_date_sql} THEN {period_date_sql} ELSE ses_DateTimeConnection END) ) * 24) AS INT) @@ -186,8 +185,8 @@ def set_device_data(mac, data): data.get("devSkipRepeated", 0), data.get("devIsNew", 0), data.get("devIsArchived", 0), - data.get("devLastConnection", datetime.now().strftime("%Y-%m-%d %H:%M:%S")), - data.get("devFirstConnection", datetime.now().strftime("%Y-%m-%d %H:%M:%S")), + data.get("devLastConnection", timeNowTZ().astimezone().isoformat()), + data.get("devFirstConnection", timeNowTZ().astimezone().isoformat()), data.get("devLastIP", ""), data.get("devGUID", ""), data.get("devCustomProps", ""), diff --git a/server/app_state.py b/server/app_state.py index 750915f3..4a3dcad2 100755 --- a/server/app_state.py +++ b/server/app_state.py @@ -59,7 +59,7 @@ class app_state_class: previousState = "" # Update self - self.lastUpdated = str(timeNowTZ()) + self.lastUpdated = str(timeNowTZ().astimezone().isoformat()) if os.path.exists(stateFile): try: @@ -107,7 +107,7 @@ class app_state_class: if pluginsStates is not None: for plugin, state in pluginsStates.items(): if plugin in self.pluginsStates: - # Only update existing keys if both are dicts + # Only update existing keys if both are dicts if isinstance(self.pluginsStates[plugin], dict) and isinstance(state, dict): self.pluginsStates[plugin].update(state) else: diff --git a/server/helper.py b/server/helper.py index e48958b8..1212265e 100755 --- a/server/helper.py +++ b/server/helper.py @@ -58,25 +58,6 @@ def get_timezone_offset(): # Date and time methods #------------------------------------------------------------------------------- -# # ------------------------------------------------------------------------------------------- -# def format_date(date_str: str) -> str: -# """Format a date string as 'YYYY-MM-DD HH:MM'""" -# dt = datetime.datetime.fromisoformat(date_str) if isinstance(date_str, str) else date_str -# return dt.strftime('%Y-%m-%d %H:%M') - -# # ------------------------------------------------------------------------------------------- -# def format_date_diff(date1: str, date2: str) -> str: -# """Return difference between two dates formatted as 'Xd HH:MM'""" -# dt1 = datetime.datetime.fromisoformat(date1) if isinstance(date1, str) else date1 -# dt2 = datetime.datetime.fromisoformat(date2) if isinstance(date2, str) else date2 -# delta = dt2 - dt1 - -# days = delta.days -# hours, remainder = divmod(delta.seconds, 3600) -# minutes = remainder // 60 - -# return f"{days}d {hours:02}:{minutes:02}" - # ------------------------------------------------------------------------------------------- def format_date_iso(date1: str) -> str: """Return ISO 8601 string for a date or None if empty""" diff --git a/server/plugin.py b/server/plugin.py index dff1e8b2..a65a0201 100755 --- a/server/plugin.py +++ b/server/plugin.py @@ -93,6 +93,9 @@ class plugin_manager: # Update plugin states in app_state current_plugin_state = self.get_plugin_states(prefix) # get latest plugin state + + # mylog('debug', f'current_plugin_state: {current_plugin_state}') + updateState(pluginsStates={prefix: current_plugin_state.get(prefix, {})}) # update last run time @@ -218,6 +221,7 @@ class plugin_manager: """ sql = self.db.sql plugin_states = {} + now_str = timeNowTZ().isoformat() if plugin_name: # Only compute for single plugin sql.execute(""" @@ -228,7 +232,7 @@ class plugin_manager: WHERE Plugin = ? """, (plugin_name,)) row = sql.fetchone() - last_changed, total_objects, new_objects, state_updated = row if row else ("", 0, 0) + last_changed, total_objects, new_objects = row if row else ("", 0, 0) new_objects = new_objects or 0 # ensure it's int changed_objects = total_objects - new_objects @@ -237,7 +241,7 @@ class plugin_manager: "totalObjects": total_objects or 0, "newObjects": new_objects or 0, "changedObjects": changed_objects or 0, - "stateUpdated": timeNowTZ() + "stateUpdated": now_str } # Save in memory @@ -252,7 +256,7 @@ class plugin_manager: FROM Plugins_Objects GROUP BY Plugin """) - for plugin, last_changed, total_objects, new_objects, state_updated in sql.fetchall(): + for plugin, last_changed, total_objects, new_objects in sql.fetchall(): new_objects = new_objects or 0 # ensure it's int changed_objects = total_objects - new_objects plugin_states[plugin] = { @@ -260,7 +264,7 @@ class plugin_manager: "totalObjects": total_objects or 0, "newObjects": new_objects or 0, "changedObjects": changed_objects or 0, - "stateUpdated": timeNowTZ() + "stateUpdated": now_str } # Save in memory @@ -755,7 +759,7 @@ def process_plugin_events(db, plugin, plugEventsArr): if isMissing: # if wasn't missing before, mark as changed if tmpObj.status != "missing-in-last-scan": - tmpObj.changed = timeNowTZ().strftime('%Y-%m-%d %H:%M:%S') + tmpObj.changed = timeNowTZ().astimezone().isoformat() tmpObj.status = "missing-in-last-scan" # mylog('debug', [f'[Plugins] Missing from last scan (PrimaryID | SecondaryID): {tmpObj.primaryId} | {tmpObj.secondaryId}']) diff --git a/server/scan/device_handling.py b/server/scan/device_handling.py index bd89f67c..57dcc923 100755 --- a/server/scan/device_handling.py +++ b/server/scan/device_handling.py @@ -3,6 +3,7 @@ import subprocess import conf import os import re +import datetime from dateutil import parser # Register NetAlertX directories @@ -55,7 +56,7 @@ def exclude_ignored_devices(db): #------------------------------------------------------------------------------- def update_devices_data_from_scan (db): sql = db.sql #TO-DO - startTime = timeNowTZ().strftime('%Y-%m-%d %H:%M:%S') + startTime = timeNowTZ().astimezone().isoformat() # Update Last Connection mylog('debug', '[Update Devices] 1 Last Connection') @@ -528,23 +529,27 @@ def update_devices_names(pm): # --- Short-circuit if no name-resolution plugin has changed --- name_plugins = ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"] - # Retrieve last time name resolution was checked (string or datetime) - last_checked_str = pm.name_plugins_checked - last_checked_dt = parser.parse(last_checked_str) if isinstance(last_checked_str, str) else last_checked_str + # Retrieve last time name resolution was checked + last_checked = pm.name_plugins_checked # Collect valid state update timestamps for name-related plugins state_times = [] for p in name_plugins: state_updated = pm.plugin_states.get(p, {}).get("stateUpdated") - if state_updated and state_updated.strip(): # skip empty or None + if state_updated: # skip empty or None state_times.append(state_updated) # Determine the latest valid stateUpdated timestamp latest_state_str = max(state_times, default=None) - latest_state_dt = parser.parse(latest_state_str) if latest_state_str else None + if isinstance(latest_state_str, datetime.datetime): + latest_state = latest_state_str + elif latest_state_str: + latest_state = parser.parse(latest_state_str) + else: + latest_state = None # Skip if no plugin state changed since last check - if last_checked_dt and latest_state_dt and latest_state_dt <= last_checked_dt: + if last_checked and latest_state and latest_state <= last_checked: mylog('debug', '[Update Device Name] No relevant name plugin changes since last check — skipping update.') return From c52e44f90c18f2f325f106f8d0486f380d29d777 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Tue, 4 Nov 2025 08:10:50 +1100 Subject: [PATCH 03/31] BE/PLG: TZ timestamp work #1251 Signed-off-by: jokob-sk --- server/api_server/device_endpoint.py | 4 ++- server/helper.py | 11 ++++++-- server/scan/device_handling.py | 38 ++++++++++++++++++---------- test/test_dbquery_endpoints.py | 5 +++- 4 files changed, 41 insertions(+), 17 deletions(-) diff --git a/server/api_server/device_endpoint.py b/server/api_server/device_endpoint.py index 3e66a560..7a331c5d 100755 --- a/server/api_server/device_endpoint.py +++ b/server/api_server/device_endpoint.py @@ -27,10 +27,12 @@ def get_device_data(mac): # Open temporary connection for this request conn = get_temp_db_connection() cur = conn.cursor() + + now = timeNowTZ().astimezone().isoformat() # Special case for new device if mac.lower() == "new": - now = timeNowTZ().astimezone().isoformat() + device_data = { "devMac": "", "devName": "", diff --git a/server/helper.py b/server/helper.py index 1212265e..f7787af1 100755 --- a/server/helper.py +++ b/server/helper.py @@ -99,8 +99,15 @@ def parse_datetime(dt_str): return None def format_date(date_str: str) -> str: - dt = parse_datetime(date_str) - return dt.strftime('%Y-%m-%d %H:%M') if dt else "invalid" + try: + dt = parse_datetime(date_str) + if dt.tzinfo is None: + # Set timezone if missing — change to timezone.utc if you prefer UTC + now = datetime.datetime.now(conf.tz) + dt = dt.replace(tzinfo=now.astimezone().tzinfo) + return dt.astimezone().isoformat() + except Exception: + return "invalid" def format_date_diff(date1, date2): """ diff --git a/server/scan/device_handling.py b/server/scan/device_handling.py index 57dcc923..08686885 100755 --- a/server/scan/device_handling.py +++ b/server/scan/device_handling.py @@ -532,21 +532,33 @@ def update_devices_names(pm): # Retrieve last time name resolution was checked last_checked = pm.name_plugins_checked - # Collect valid state update timestamps for name-related plugins - state_times = [] + # Collect and normalize valid state update timestamps for name-related plugins + state_times = [] + latest_state = None + for p in name_plugins: state_updated = pm.plugin_states.get(p, {}).get("stateUpdated") - if state_updated: # skip empty or None - state_times.append(state_updated) + if not state_updated: + continue + + # Normalize and validate timestamp + if isinstance(state_updated, datetime.datetime): + state_times.append(state_updated) + elif isinstance(state_updated, str): + try: + state_times.append(parser.parse(state_updated)) + except Exception as e: + mylog('none', f'[Update Device Name] Failed to parse timestamp for {p}: {state_updated!r} ({e})') + else: + mylog('none', f'[Update Device Name] Unexpected timestamp type for {p}: {type(state_updated)}') + # Determine the latest valid timestamp safely + try: + if state_times: + latest_state = max(state_times) + except Exception as e: + mylog('none', f'[Update Device Name] Failed to determine latest timestamp, using fallback ({e})') + latest_state = state_times[-1] if state_times else None - # Determine the latest valid stateUpdated timestamp - latest_state_str = max(state_times, default=None) - if isinstance(latest_state_str, datetime.datetime): - latest_state = latest_state_str - elif latest_state_str: - latest_state = parser.parse(latest_state_str) - else: - latest_state = None # Skip if no plugin state changed since last check if last_checked and latest_state and latest_state <= last_checked: @@ -650,7 +662,7 @@ def update_devices_names(pm): # --- Step 3: Log last checked time --- # After resolving names, update last checked - pm.name_plugins_checked = timeNowTZ() + pm.name_plugins_checked = timeNowTZ().astimezone().isoformat() #------------------------------------------------------------------------------- # Updates devPresentLastScan for parent devices based on the presence of their NICs diff --git a/test/test_dbquery_endpoints.py b/test/test_dbquery_endpoints.py index 981ab2f9..ff4347ed 100755 --- a/test/test_dbquery_endpoints.py +++ b/test/test_dbquery_endpoints.py @@ -40,9 +40,12 @@ def b64(sql: str) -> str: # Device lifecycle via dbquery endpoints # ----------------------------- def test_dbquery_create_device(client, api_token, test_mac): + + now = timeNowTZ().astimezone().isoformat() + sql = f""" INSERT INTO Devices (devMac, devName, devVendor, devOwner, devFirstConnection, devLastConnection, devLastIP) - VALUES ('{test_mac}', 'UnitTestDevice', 'TestVendor', 'UnitTest', '{timeNowTZ()}', '{timeNowTZ()}', '192.168.100.22' ) + VALUES ('{test_mac}', 'UnitTestDevice', 'TestVendor', 'UnitTest', '{now}', '{now}', '192.168.100.22' ) """ resp = client.post("/dbquery/write", json={"rawSql": b64(sql)}, headers=auth_headers(api_token)) print(resp.json) From 6dd7251c84a8ff2b4e8a94a987b4a1da4f6df73a Mon Sep 17 00:00:00 2001 From: "Jokob @NetAlertX" <96159884+jokob-sk@users.noreply.github.com> Date: Tue, 4 Nov 2025 07:06:19 +0000 Subject: [PATCH 04/31] BE/PLG: TZ timestamp work #1251 --- server/scan/device_handling.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/server/scan/device_handling.py b/server/scan/device_handling.py index 08686885..c213698e 100755 --- a/server/scan/device_handling.py +++ b/server/scan/device_handling.py @@ -532,6 +532,16 @@ def update_devices_names(pm): # Retrieve last time name resolution was checked last_checked = pm.name_plugins_checked + # Normalize last_checked to datetime if it's a string + if isinstance(last_checked, str): + try: + last_checked = parser.parse(last_checked) + except Exception as e: + mylog('none', f'[Update Device Name] Could not parse last_checked timestamp: {last_checked!r} ({e})') + last_checked = None + elif not isinstance(last_checked, datetime.datetime): + last_checked = None + # Collect and normalize valid state update timestamps for name-related plugins state_times = [] latest_state = None From 59477e7b380a7ce7a643807c8744038015786196 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Tue, 4 Nov 2025 19:24:13 +1100 Subject: [PATCH 05/31] BE/PLG: TZ timestamp work #1251 Signed-off-by: jokob-sk --- front/plugins/__template/rename_me.py | 2 +- front/plugins/__test/test.py | 2 +- front/plugins/_publisher_apprise/apprise.py | 4 +-- front/plugins/_publisher_email/email_smtp.py | 4 +-- front/plugins/_publisher_mqtt/mqtt.py | 4 +-- front/plugins/_publisher_ntfy/ntfy.py | 4 +-- front/plugins/_publisher_pushover/pushover.py | 4 +-- .../plugins/_publisher_pushsafer/pushsafer.py | 4 +-- front/plugins/_publisher_telegram/tg.py | 4 +-- front/plugins/_publisher_webhook/webhook.py | 4 +-- front/plugins/arp_scan/script.py | 2 +- front/plugins/csv_backup/script.py | 2 +- front/plugins/db_cleanup/script.py | 2 +- front/plugins/ddns_update/script.py | 2 +- front/plugins/dhcp_leases/script.py | 2 +- front/plugins/dhcp_servers/script.py | 2 +- front/plugins/dig_scan/digscan.py | 2 +- front/plugins/freebox/freebox.py | 2 +- front/plugins/icmp_scan/icmp.py | 2 +- front/plugins/internet_ip/script.py | 4 +-- front/plugins/internet_speedtest/script.py | 4 +-- front/plugins/ipneigh/ipneigh.py | 2 +- front/plugins/luci_import/script.py | 2 +- front/plugins/maintenance/maintenance.py | 2 +- front/plugins/nbtscan_scan/nbtscan.py | 2 +- front/plugins/nmap_dev_scan/nmap_dev.py | 2 +- front/plugins/nmap_scan/script.py | 4 +-- front/plugins/nslookup_scan/nslookup.py | 2 +- front/plugins/omada_sdn_imp/omada_sdn.py | 2 +- front/plugins/plugin_helper.py | 4 +-- front/plugins/snmp_discovery/script.py | 2 +- front/plugins/sync/sync.py | 14 +++++----- .../unifi_api_import/unifi_api_import.py | 2 +- front/plugins/unifi_import/script.py | 2 +- front/plugins/vendor_update/script.py | 2 +- front/plugins/wake_on_lan/wake_on_lan.py | 2 +- front/plugins/website_monitor/script.py | 2 +- server/api_server/api_server_start.py | 2 +- server/api_server/device_endpoint.py | 8 +++--- server/api_server/events_endpoint.py | 2 +- server/api_server/sessions_endpoint.py | 2 +- server/api_server/sync_endpoint.py | 12 ++++----- server/app_state.py | 4 +-- server/helper.py | 26 ++++++++++++------ server/initialise.py | 6 ++--- server/logger.py | 17 ++++++++++++ server/messaging/in_app.py | 4 +-- server/messaging/reporting.py | 5 +++- server/models/notification_instance.py | 9 ++++--- server/plugin.py | 14 +++++----- server/plugin_utils.py | 2 +- server/scan/device_handling.py | 27 ++++++++++--------- server/scan/device_heuristics.py | 2 +- server/scan/session_events.py | 6 ++--- server/workflows/actions.py | 2 +- server/workflows/app_events.py | 3 +-- server/workflows/conditions.py | 2 +- server/workflows/manager.py | 2 +- server/workflows/triggers.py | 2 +- test/test_dbquery_endpoints.py | 4 +-- test/test_device_endpoints.py | 2 +- test/test_devices_endpoints.py | 2 +- test/test_graphq_endpoints.py | 2 +- test/test_history_endpoints.py | 2 +- test/test_nettools_endpoints.py | 2 +- test/test_sessions_endpoints.py | 12 ++++----- test/test_settings_endpoints.py | 2 +- 67 files changed, 164 insertions(+), 133 deletions(-) diff --git a/front/plugins/__template/rename_me.py b/front/plugins/__template/rename_me.py index 49f2a760..c303d95d 100755 --- a/front/plugins/__template/rename_me.py +++ b/front/plugins/__template/rename_me.py @@ -15,7 +15,7 @@ from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from messaging.in_app import write_notification import conf diff --git a/front/plugins/__test/test.py b/front/plugins/__test/test.py index e87bdfb4..21cfc1d0 100755 --- a/front/plugins/__test/test.py +++ b/front/plugins/__test/test.py @@ -23,7 +23,7 @@ from const import apiPath, confFileName, logPath from plugin_utils import getPluginObject from plugin_helper import Plugin_Objects from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value, bytes_to_string, sanitize_string, cleanDeviceName +from helper import get_setting_value, bytes_to_string, sanitize_string, cleanDeviceName from models.notification_instance import NotificationInstance from database import DB, get_device_stats diff --git a/front/plugins/_publisher_apprise/apprise.py b/front/plugins/_publisher_apprise/apprise.py index 967074ed..5f1c3c33 100755 --- a/front/plugins/_publisher_apprise/apprise.py +++ b/front/plugins/_publisher_apprise/apprise.py @@ -16,7 +16,7 @@ import conf from const import confFileName, logPath from plugin_helper import Plugin_Objects from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value +from helper import timeNowDB, get_setting_value from models.notification_instance import NotificationInstance from database import DB from pytz import timezone @@ -65,7 +65,7 @@ def main(): # Log result plugin_objects.add_object( primaryId = pluginName, - secondaryId = timeNowTZ(), + secondaryId = timeNowDB(), watched1 = notification["GUID"], watched2 = result, watched3 = 'null', diff --git a/front/plugins/_publisher_email/email_smtp.py b/front/plugins/_publisher_email/email_smtp.py index 682bf5ce..8d738844 100755 --- a/front/plugins/_publisher_email/email_smtp.py +++ b/front/plugins/_publisher_email/email_smtp.py @@ -25,7 +25,7 @@ import conf from const import confFileName, logPath from plugin_helper import Plugin_Objects from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value, hide_email +from helper import timeNowDB, get_setting_value, hide_email from models.notification_instance import NotificationInstance from database import DB from pytz import timezone @@ -86,7 +86,7 @@ def main(): # Log result plugin_objects.add_object( primaryId = pluginName, - secondaryId = timeNowTZ(), + secondaryId = timeNowDB(), watched1 = notification["GUID"], watched2 = result, watched3 = 'null', diff --git a/front/plugins/_publisher_mqtt/mqtt.py b/front/plugins/_publisher_mqtt/mqtt.py index fade6e18..03a441a3 100755 --- a/front/plugins/_publisher_mqtt/mqtt.py +++ b/front/plugins/_publisher_mqtt/mqtt.py @@ -23,7 +23,7 @@ from const import confFileName, logPath from plugin_utils import getPluginObject from plugin_helper import Plugin_Objects from logger import mylog, Logger -from helper import timeNowTZ, get_setting_value, bytes_to_string, \ +from helper import timeNowDB, get_setting_value, bytes_to_string, \ sanitize_string, normalize_string from database import DB, get_device_stats @@ -567,7 +567,7 @@ def prepTimeStamp(datetime_str): except ValueError: mylog('verbose', [f"[{pluginName}] Timestamp conversion failed of string '{datetime_str}'"]) # Use the current time if the input format is invalid - parsed_datetime = timeNowTZ() # Assuming this function returns the current time with timezone + parsed_datetime = timeNowDB() # Convert to the required format with 'T' between date and time and ensure the timezone is included return parsed_datetime.isoformat() # This will include the timezone offset diff --git a/front/plugins/_publisher_ntfy/ntfy.py b/front/plugins/_publisher_ntfy/ntfy.py index 4c3807b9..79df681f 100755 --- a/front/plugins/_publisher_ntfy/ntfy.py +++ b/front/plugins/_publisher_ntfy/ntfy.py @@ -19,7 +19,7 @@ import conf from const import confFileName, logPath from plugin_helper import Plugin_Objects, handleEmpty from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value +from helper import timeNowDB, get_setting_value from models.notification_instance import NotificationInstance from database import DB from pytz import timezone @@ -68,7 +68,7 @@ def main(): # Log result plugin_objects.add_object( primaryId = pluginName, - secondaryId = timeNowTZ(), + secondaryId = timeNowDB(), watched1 = notification["GUID"], watched2 = handleEmpty(response_text), watched3 = response_status_code, diff --git a/front/plugins/_publisher_pushover/pushover.py b/front/plugins/_publisher_pushover/pushover.py index 65357c91..8ebd1dee 100755 --- a/front/plugins/_publisher_pushover/pushover.py +++ b/front/plugins/_publisher_pushover/pushover.py @@ -11,7 +11,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 from logger import mylog, Logger # noqa: E402 -from helper import timeNowTZ, get_setting_value, hide_string # noqa: E402 +from helper import timeNowDB, get_setting_value, hide_string # noqa: E402 from models.notification_instance import NotificationInstance # noqa: E402 from database import DB # noqa: E402 import conf @@ -63,7 +63,7 @@ def main(): # Log result plugin_objects.add_object( primaryId=pluginName, - secondaryId=timeNowTZ(), + secondaryId=timeNowDB(), watched1=notification["GUID"], watched2=handleEmpty(response_text), watched3=response_status_code, diff --git a/front/plugins/_publisher_pushsafer/pushsafer.py b/front/plugins/_publisher_pushsafer/pushsafer.py index 4a857e58..366f170a 100755 --- a/front/plugins/_publisher_pushsafer/pushsafer.py +++ b/front/plugins/_publisher_pushsafer/pushsafer.py @@ -19,7 +19,7 @@ import conf from const import confFileName, logPath from plugin_helper import Plugin_Objects, handleEmpty from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value, hide_string +from helper import timeNowDB, get_setting_value, hide_string from models.notification_instance import NotificationInstance from database import DB from pytz import timezone @@ -68,7 +68,7 @@ def main(): # Log result plugin_objects.add_object( primaryId = pluginName, - secondaryId = timeNowTZ(), + secondaryId = timeNowDB(), watched1 = notification["GUID"], watched2 = handleEmpty(response_text), watched3 = response_status_code, diff --git a/front/plugins/_publisher_telegram/tg.py b/front/plugins/_publisher_telegram/tg.py index a74842eb..c9f92d9d 100755 --- a/front/plugins/_publisher_telegram/tg.py +++ b/front/plugins/_publisher_telegram/tg.py @@ -16,7 +16,7 @@ import conf from const import confFileName, logPath from plugin_helper import Plugin_Objects from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value +from helper import timeNowDB, get_setting_value from models.notification_instance import NotificationInstance from database import DB from pytz import timezone @@ -65,7 +65,7 @@ def main(): # Log result plugin_objects.add_object( primaryId=pluginName, - secondaryId=timeNowTZ(), + secondaryId=timeNowDB(), watched1=notification["GUID"], watched2=result, watched3='null', diff --git a/front/plugins/_publisher_webhook/webhook.py b/front/plugins/_publisher_webhook/webhook.py index ec8a2407..f1eec9d7 100755 --- a/front/plugins/_publisher_webhook/webhook.py +++ b/front/plugins/_publisher_webhook/webhook.py @@ -22,7 +22,7 @@ import conf from const import logPath, confFileName from plugin_helper import Plugin_Objects, handleEmpty from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value, hide_string, write_file +from helper import timeNowDB, get_setting_value, hide_string, write_file from models.notification_instance import NotificationInstance from database import DB from pytz import timezone @@ -71,7 +71,7 @@ def main(): # Log result plugin_objects.add_object( primaryId = pluginName, - secondaryId = timeNowTZ(), + secondaryId = timeNowDB(), watched1 = notification["GUID"], watched2 = handleEmpty(response_stdout), watched3 = handleEmpty(response_stderr), diff --git a/front/plugins/arp_scan/script.py b/front/plugins/arp_scan/script.py index b3d4d426..01c868e1 100755 --- a/front/plugins/arp_scan/script.py +++ b/front/plugins/arp_scan/script.py @@ -16,7 +16,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from database import DB from plugin_helper import Plugin_Object, Plugin_Objects, handleEmpty from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from const import logPath, applicationPath import conf from pytz import timezone diff --git a/front/plugins/csv_backup/script.py b/front/plugins/csv_backup/script.py index 8ce00f89..c4c19951 100755 --- a/front/plugins/csv_backup/script.py +++ b/front/plugins/csv_backup/script.py @@ -16,7 +16,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from const import logPath, applicationPath, fullDbPath import conf from pytz import timezone diff --git a/front/plugins/db_cleanup/script.py b/front/plugins/db_cleanup/script.py index 55d56d8c..7d0fb012 100755 --- a/front/plugins/db_cleanup/script.py +++ b/front/plugins/db_cleanup/script.py @@ -16,7 +16,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from const import logPath, applicationPath, fullDbPath import conf from pytz import timezone diff --git a/front/plugins/ddns_update/script.py b/front/plugins/ddns_update/script.py index 9b787cb5..57a6da4c 100755 --- a/front/plugins/ddns_update/script.py +++ b/front/plugins/ddns_update/script.py @@ -19,7 +19,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value, check_IP_format +from helper import get_setting_value, check_IP_format from const import logPath, applicationPath, fullDbPath import conf from pytz import timezone diff --git a/front/plugins/dhcp_leases/script.py b/front/plugins/dhcp_leases/script.py index 491abebf..44a1b7c7 100755 --- a/front/plugins/dhcp_leases/script.py +++ b/front/plugins/dhcp_leases/script.py @@ -15,7 +15,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, handleEmpty, is_mac from logger import mylog, Logger from dhcp_leases import DhcpLeases -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value import conf from const import logPath from pytz import timezone diff --git a/front/plugins/dhcp_servers/script.py b/front/plugins/dhcp_servers/script.py index 6e449db2..6565072f 100755 --- a/front/plugins/dhcp_servers/script.py +++ b/front/plugins/dhcp_servers/script.py @@ -13,7 +13,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Objects, Plugin_Object from logger import mylog, Logger -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value import conf from pytz import timezone from const import logPath diff --git a/front/plugins/dig_scan/digscan.py b/front/plugins/dig_scan/digscan.py index 4ac67cf0..dd00c226 100755 --- a/front/plugins/dig_scan/digscan.py +++ b/front/plugins/dig_scan/digscan.py @@ -15,7 +15,7 @@ from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from messaging.in_app import write_notification from database import DB from models.device_instance import DeviceInstance diff --git a/front/plugins/freebox/freebox.py b/front/plugins/freebox/freebox.py index 1ce9488a..da8a8884 100755 --- a/front/plugins/freebox/freebox.py +++ b/front/plugins/freebox/freebox.py @@ -24,7 +24,7 @@ from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from messaging.in_app import write_notification import conf diff --git a/front/plugins/icmp_scan/icmp.py b/front/plugins/icmp_scan/icmp.py index 5577cb0e..467f3631 100755 --- a/front/plugins/icmp_scan/icmp.py +++ b/front/plugins/icmp_scan/icmp.py @@ -20,7 +20,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from const import logPath, applicationPath, fullDbPath from database import DB from models.device_instance import DeviceInstance diff --git a/front/plugins/internet_ip/script.py b/front/plugins/internet_ip/script.py index 574052c7..f56d2ed7 100755 --- a/front/plugins/internet_ip/script.py +++ b/front/plugins/internet_ip/script.py @@ -20,7 +20,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, check_IP_format, get_setting_value +from helper import timeNowDB, check_IP_format, get_setting_value from const import logPath, applicationPath, fullDbPath import conf from pytz import timezone @@ -80,7 +80,7 @@ def main(): mylog('verbose', [f'[{pluginName}] Curl Fallback (new_internet_IP|cmd_output): {new_internet_IP} | {cmd_output}']) # logging - append_line_to_file (logPath + '/IP_changes.log', '['+str(timeNowTZ()) +']\t'+ new_internet_IP +'\n') + append_line_to_file (logPath + '/IP_changes.log', '['+str(timeNowDB()) +']\t'+ new_internet_IP +'\n') plugin_objects = Plugin_Objects(RESULT_FILE) diff --git a/front/plugins/internet_speedtest/script.py b/front/plugins/internet_speedtest/script.py index 6e9063ed..ef4f5705 100755 --- a/front/plugins/internet_speedtest/script.py +++ b/front/plugins/internet_speedtest/script.py @@ -13,7 +13,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Objects from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value +from helper import timeNowDB, get_setting_value import conf from pytz import timezone from const import logPath @@ -40,7 +40,7 @@ def main(): speedtest_result = run_speedtest() plugin_objects.add_object( primaryId = 'Speedtest', - secondaryId = timeNowTZ(), + secondaryId = timeNowDB(), watched1 = speedtest_result['download_speed'], watched2 = speedtest_result['upload_speed'], watched3 = 'null', diff --git a/front/plugins/ipneigh/ipneigh.py b/front/plugins/ipneigh/ipneigh.py index dd0a7978..a556c213 100755 --- a/front/plugins/ipneigh/ipneigh.py +++ b/front/plugins/ipneigh/ipneigh.py @@ -18,7 +18,7 @@ from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64, handleEmp from plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from messaging.in_app import write_notification import conf diff --git a/front/plugins/luci_import/script.py b/front/plugins/luci_import/script.py index 5eaa578a..c4283a7d 100755 --- a/front/plugins/luci_import/script.py +++ b/front/plugins/luci_import/script.py @@ -9,7 +9,7 @@ pluginName = 'LUCIRPC' from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from const import logPath, applicationPath import conf from pytz import timezone diff --git a/front/plugins/maintenance/maintenance.py b/front/plugins/maintenance/maintenance.py index 2e28c6a5..5f13c79f 100755 --- a/front/plugins/maintenance/maintenance.py +++ b/front/plugins/maintenance/maintenance.py @@ -17,7 +17,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from const import logPath, applicationPath from messaging.in_app import remove_old import conf diff --git a/front/plugins/nbtscan_scan/nbtscan.py b/front/plugins/nbtscan_scan/nbtscan.py index 6a4e9fff..d555859d 100755 --- a/front/plugins/nbtscan_scan/nbtscan.py +++ b/front/plugins/nbtscan_scan/nbtscan.py @@ -15,7 +15,7 @@ from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from messaging.in_app import write_notification from database import DB from models.device_instance import DeviceInstance diff --git a/front/plugins/nmap_dev_scan/nmap_dev.py b/front/plugins/nmap_dev_scan/nmap_dev.py index e9f25cd1..2c7f85ad 100755 --- a/front/plugins/nmap_dev_scan/nmap_dev.py +++ b/front/plugins/nmap_dev_scan/nmap_dev.py @@ -21,7 +21,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value, extract_between_strings, extract_ip_addresses, extract_mac_addresses +from helper import get_setting_value, extract_between_strings, extract_ip_addresses, extract_mac_addresses from const import logPath, applicationPath, fullDbPath from database import DB from models.device_instance import DeviceInstance diff --git a/front/plugins/nmap_scan/script.py b/front/plugins/nmap_scan/script.py index 1851b26c..180973bb 100755 --- a/front/plugins/nmap_scan/script.py +++ b/front/plugins/nmap_scan/script.py @@ -14,7 +14,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value +from helper import timeNowDB, get_setting_value from const import logPath, applicationPath import conf from pytz import timezone @@ -158,7 +158,7 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args): elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line: startCollecting = False # end reached elif startCollecting and len(line.split()) == 3: - newEntriesTmp.append(nmap_entry(ip, deviceMACs[devIndex], timeNowTZ(), line.split()[0], line.split()[1], line.split()[2])) + newEntriesTmp.append(nmap_entry(ip, deviceMACs[devIndex], timeNowDB(), line.split()[0], line.split()[1], line.split()[2])) newPortsPerDevice += 1 elif 'Nmap done' in line: duration = line.split('scanned in ')[1] diff --git a/front/plugins/nslookup_scan/nslookup.py b/front/plugins/nslookup_scan/nslookup.py index d0d1c4d4..05baa254 100755 --- a/front/plugins/nslookup_scan/nslookup.py +++ b/front/plugins/nslookup_scan/nslookup.py @@ -20,7 +20,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from const import logPath, applicationPath, fullDbPath from database import DB from models.device_instance import DeviceInstance diff --git a/front/plugins/omada_sdn_imp/omada_sdn.py b/front/plugins/omada_sdn_imp/omada_sdn.py index 1b714f32..ae2f482b 100755 --- a/front/plugins/omada_sdn_imp/omada_sdn.py +++ b/front/plugins/omada_sdn_imp/omada_sdn.py @@ -44,7 +44,7 @@ from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from messaging.in_app import write_notification from pytz import timezone import conf diff --git a/front/plugins/plugin_helper.py b/front/plugins/plugin_helper.py index edd6a027..d95cb795 100755 --- a/front/plugins/plugin_helper.py +++ b/front/plugins/plugin_helper.py @@ -11,7 +11,7 @@ INSTALL_PATH = "/app" sys.path.append(f"{INSTALL_PATH}/front/plugins") sys.path.append(f'{INSTALL_PATH}/server') -from logger import mylog, Logger +from logger import mylog, Logger, timeNowDB from const import confFileName, default_tz #------------------------------------------------------------------------------- @@ -204,7 +204,7 @@ class Plugin_Object: self.pluginPref = "" self.primaryId = primaryId self.secondaryId = secondaryId - self.created = datetime.now().astimezone().isoformat() + self.created = timeNowDB() self.changed = "" self.watched1 = watched1 self.watched2 = watched2 diff --git a/front/plugins/snmp_discovery/script.py b/front/plugins/snmp_discovery/script.py index ad25d01d..11378168 100755 --- a/front/plugins/snmp_discovery/script.py +++ b/front/plugins/snmp_discovery/script.py @@ -13,7 +13,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64, handleEmpty, normalize_mac from logger import mylog, Logger -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from const import logPath, applicationPath import conf from pytz import timezone diff --git a/front/plugins/sync/sync.py b/front/plugins/sync/sync.py index f6b328ac..89695bec 100755 --- a/front/plugins/sync/sync.py +++ b/front/plugins/sync/sync.py @@ -18,7 +18,7 @@ from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from plugin_utils import get_plugins_configs, decode_and_rename_files from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath -from helper import timeNowTZ, get_setting_value +from helper import timeNowDB, get_setting_value from crypto_utils import encrypt_data from messaging.in_app import write_notification import conf @@ -149,7 +149,7 @@ def main(): message = f'[{pluginName}] Device data from node "{node_name}" written to {log_file_name}' mylog('verbose', [message]) if lggr.isAbove('verbose'): - write_notification(message, 'info', timeNowTZ()) + write_notification(message, 'info', timeNowDB()) # Process any received data for the Device DB table (ONLY JSON) @@ -255,7 +255,7 @@ def main(): message = f'[{pluginName}] Inserted "{len(new_devices)}" new devices' mylog('verbose', [message]) - write_notification(message, 'info', timeNowTZ()) + write_notification(message, 'info', timeNowDB()) # Commit and close the connection @@ -299,7 +299,7 @@ def send_data(api_token, file_content, encryption_key, file_path, node_name, pre if response.status_code == 200: message = f'[{pluginName}] Data for "{file_path}" sent successfully via {final_endpoint}' mylog('verbose', [message]) - write_notification(message, 'info', timeNowTZ()) + write_notification(message, 'info', timeNowDB()) return True except requests.RequestException as e: @@ -308,7 +308,7 @@ def send_data(api_token, file_content, encryption_key, file_path, node_name, pre # If all endpoints fail message = f'[{pluginName}] Failed to send data for "{file_path}" via all endpoints' mylog('verbose', [message]) - write_notification(message, 'alert', timeNowTZ()) + write_notification(message, 'alert', timeNowDB()) return False @@ -332,7 +332,7 @@ def get_data(api_token, node_url): except json.JSONDecodeError: message = f'[{pluginName}] Failed to parse JSON from {final_endpoint}' mylog('verbose', [message]) - write_notification(message, 'alert', timeNowTZ()) + write_notification(message, 'alert', timeNowDB()) return "" except requests.RequestException as e: mylog('verbose', [f'[{pluginName}] Error calling {final_endpoint}: {e}']) @@ -340,7 +340,7 @@ def get_data(api_token, node_url): # If all endpoints fail message = f'[{pluginName}] Failed to get data from "{node_url}" via all endpoints' mylog('verbose', [message]) - write_notification(message, 'alert', timeNowTZ()) + write_notification(message, 'alert', timeNowDB()) return "" diff --git a/front/plugins/unifi_api_import/unifi_api_import.py b/front/plugins/unifi_api_import/unifi_api_import.py index 6bdb52c8..8e8b9a94 100755 --- a/front/plugins/unifi_api_import/unifi_api_import.py +++ b/front/plugins/unifi_api_import/unifi_api_import.py @@ -16,7 +16,7 @@ from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64, decode_se from plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from messaging.in_app import write_notification import conf diff --git a/front/plugins/unifi_import/script.py b/front/plugins/unifi_import/script.py index c4dac026..95765829 100755 --- a/front/plugins/unifi_import/script.py +++ b/front/plugins/unifi_import/script.py @@ -21,7 +21,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, rmBadChars, is_typical_router_ip, is_mac from logger import mylog, Logger -from helper import timeNowTZ, get_setting_value, normalize_string +from helper import get_setting_value, normalize_string import conf from pytz import timezone from const import logPath diff --git a/front/plugins/vendor_update/script.py b/front/plugins/vendor_update/script.py index dbe6a055..4617ad5b 100755 --- a/front/plugins/vendor_update/script.py +++ b/front/plugins/vendor_update/script.py @@ -17,7 +17,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64, handleEmpty from logger import mylog, Logger, append_line_to_file -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from const import logPath, applicationPath, fullDbPath from scan.device_handling import query_MAC_vendor import conf diff --git a/front/plugins/wake_on_lan/wake_on_lan.py b/front/plugins/wake_on_lan/wake_on_lan.py index 775d2346..eaa0bdde 100755 --- a/front/plugins/wake_on_lan/wake_on_lan.py +++ b/front/plugins/wake_on_lan/wake_on_lan.py @@ -16,7 +16,7 @@ from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from messaging.in_app import write_notification from database import DB from models.device_instance import DeviceInstance diff --git a/front/plugins/website_monitor/script.py b/front/plugins/website_monitor/script.py index 0426444a..4acac72b 100755 --- a/front/plugins/website_monitor/script.py +++ b/front/plugins/website_monitor/script.py @@ -16,7 +16,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Objects from datetime import datetime from const import logPath -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value import conf from pytz import timezone from logger import mylog, Logger diff --git a/server/api_server/api_server_start.py b/server/api_server/api_server_start.py index 974dd313..3a376791 100755 --- a/server/api_server/api_server_start.py +++ b/server/api_server/api_server_start.py @@ -9,7 +9,7 @@ INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/server"]) from logger import mylog -from helper import get_setting_value, timeNowTZ +from helper import get_setting_value from db.db_helper import get_date_from_period from app_state import updateState diff --git a/server/api_server/device_endpoint.py b/server/api_server/device_endpoint.py index 7a331c5d..9c032f28 100755 --- a/server/api_server/device_endpoint.py +++ b/server/api_server/device_endpoint.py @@ -14,7 +14,7 @@ INSTALL_PATH="/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from database import get_temp_db_connection -from helper import is_random_mac, format_date, get_setting_value, timeNowTZ +from helper import is_random_mac, format_date, get_setting_value, timeNowDB from db.db_helper import row_to_json, get_date_from_period # -------------------------- @@ -28,7 +28,7 @@ def get_device_data(mac): conn = get_temp_db_connection() cur = conn.cursor() - now = timeNowTZ().astimezone().isoformat() + now = timeNowDB() # Special case for new device if mac.lower() == "new": @@ -187,8 +187,8 @@ def set_device_data(mac, data): data.get("devSkipRepeated", 0), data.get("devIsNew", 0), data.get("devIsArchived", 0), - data.get("devLastConnection", timeNowTZ().astimezone().isoformat()), - data.get("devFirstConnection", timeNowTZ().astimezone().isoformat()), + data.get("devLastConnection", timeNowDB()), + data.get("devFirstConnection", timeNowDB()), data.get("devLastIP", ""), data.get("devGUID", ""), data.get("devCustomProps", ""), diff --git a/server/api_server/events_endpoint.py b/server/api_server/events_endpoint.py index 5b94940d..5d02fcda 100755 --- a/server/api_server/events_endpoint.py +++ b/server/api_server/events_endpoint.py @@ -14,7 +14,7 @@ INSTALL_PATH="/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from database import get_temp_db_connection -from helper import is_random_mac, format_date, get_setting_value, format_date_iso, format_event_date, timeNowTZ, mylog, ensure_datetime +from helper import is_random_mac, format_date, get_setting_value, format_date_iso, format_event_date, mylog, ensure_datetime from db.db_helper import row_to_json, get_date_from_period diff --git a/server/api_server/sessions_endpoint.py b/server/api_server/sessions_endpoint.py index 7cde8cd7..811503be 100755 --- a/server/api_server/sessions_endpoint.py +++ b/server/api_server/sessions_endpoint.py @@ -16,7 +16,7 @@ INSTALL_PATH="/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from database import get_temp_db_connection -from helper import is_random_mac, format_date, get_setting_value, format_date_iso, format_event_date, mylog, timeNowTZ, format_date_diff, format_ip_long, parse_datetime +from helper import is_random_mac, format_date, get_setting_value, format_date_iso, format_event_date, mylog, format_date_diff, format_ip_long, parse_datetime from db.db_helper import row_to_json, get_date_from_period diff --git a/server/api_server/sync_endpoint.py b/server/api_server/sync_endpoint.py index 66e8f0f1..59b8095e 100755 --- a/server/api_server/sync_endpoint.py +++ b/server/api_server/sync_endpoint.py @@ -2,7 +2,7 @@ import os import base64 from flask import jsonify, request from logger import mylog -from helper import get_setting_value, timeNowTZ +from helper import get_setting_value, timeNowDB from messaging.in_app import write_notification INSTALL_PATH = "/app" @@ -16,19 +16,19 @@ def handle_sync_get(): raw_data = f.read() except FileNotFoundError: msg = f"[Plugin: SYNC] Data file not found: {file_path}" - write_notification(msg, "alert", timeNowTZ()) + write_notification(msg, "alert", timeNowDB()) mylog("verbose", [msg]) return jsonify({"error": msg}), 500 response_data = base64.b64encode(raw_data).decode("utf-8") - write_notification("[Plugin: SYNC] Data sent", "info", timeNowTZ()) + write_notification("[Plugin: SYNC] Data sent", "info", timeNowDB()) return jsonify({ "node_name": get_setting_value("SYNC_node_name"), "status": 200, "message": "OK", "data_base64": response_data, - "timestamp": timeNowTZ() + "timestamp": timeNowDB() }), 200 @@ -61,11 +61,11 @@ def handle_sync_post(): f.write(data) except Exception as e: msg = f"[Plugin: SYNC] Failed to store data: {e}" - write_notification(msg, "alert", timeNowTZ()) + write_notification(msg, "alert", timeNowDB()) mylog("verbose", [msg]) return jsonify({"error": msg}), 500 msg = f"[Plugin: SYNC] Data received ({file_path_new})" - write_notification(msg, "info", timeNowTZ()) + write_notification(msg, "info", timeNowDB()) mylog("verbose", [msg]) return jsonify({"message": "Data received and stored successfully"}), 200 diff --git a/server/app_state.py b/server/app_state.py index 4a3dcad2..d4b33525 100755 --- a/server/app_state.py +++ b/server/app_state.py @@ -4,7 +4,7 @@ import json import conf from const import * from logger import mylog, logResult -from helper import timeNowTZ, timeNow, checkNewVersion +from helper import timeNowDB, timeNow, checkNewVersion # Register NetAlertX directories INSTALL_PATH="/app" @@ -59,7 +59,7 @@ class app_state_class: previousState = "" # Update self - self.lastUpdated = str(timeNowTZ().astimezone().isoformat()) + self.lastUpdated = str(timeNowDB()) if os.path.exists(stateFile): try: diff --git a/server/helper.py b/server/helper.py index f7787af1..61bec402 100755 --- a/server/helper.py +++ b/server/helper.py @@ -31,18 +31,11 @@ INSTALL_PATH="/app" #------------------------------------------------------------------------------- # DateTime #------------------------------------------------------------------------------- -# Get the current time in the current TimeZone def timeNowTZ(): if conf.tz: return datetime.datetime.now(conf.tz).replace(microsecond=0) else: return datetime.datetime.now().replace(microsecond=0) - # if isinstance(conf.TIMEZONE, str): - # tz = pytz.timezone(conf.TIMEZONE) - # else: - # tz = conf.TIMEZONE - - # return datetime.datetime.now(tz).replace(microsecond=0) def timeNow(): return datetime.datetime.now().replace(microsecond=0) @@ -53,6 +46,23 @@ def get_timezone_offset(): offset_formatted = "{:+03d}:{:02d}".format(int(offset_hours), int((offset_hours % 1) * 60)) return offset_formatted +def timeNowDB(local=True): + """ + Return the current time (local or UTC) as ISO 8601 for DB storage. + Safe for SQLite, PostgreSQL, etc. + + Example local: '2025-11-04 18:09:11' + Example UTC: '2025-11-04 07:09:11' + """ + if local: + try: + tz = ZoneInfo(conf.tz) if conf.tz else None + except Exception: + tz = None + return datetime.datetime.now(tz).strftime('%Y-%m-%d %H:%M:%S') + else: + return datetime.datetime.now(datetime.UTC).strftime('%Y-%m-%d %H:%M:%S') + #------------------------------------------------------------------------------- # Date and time methods @@ -79,7 +89,7 @@ def format_event_date(date_str: str, event_type: str) -> str: # ------------------------------------------------------------------------------------------- def ensure_datetime(dt: Union[str, datetime.datetime, None]) -> datetime.datetime: if dt is None: - return timeNowTZ() + return timeNowDB() if isinstance(dt, str): return datetime.datetime.fromisoformat(dt) return dt diff --git a/server/initialise.py b/server/initialise.py index e4031088..c16e71ba 100755 --- a/server/initialise.py +++ b/server/initialise.py @@ -12,7 +12,7 @@ import re # Register NetAlertX libraries import conf from const import fullConfPath, applicationPath, fullConfFolder, default_tz -from helper import getBuildTimeStamp, fixPermissions, collect_lang_strings, updateSubnets, isJsonObject, setting_value_to_python_type, timeNowTZ, get_setting_value, generate_random_string +from helper import getBuildTimeStamp, fixPermissions, collect_lang_strings, updateSubnets, isJsonObject, setting_value_to_python_type, timeNowDB, get_setting_value, generate_random_string from app_state import updateState from logger import mylog from api import update_api @@ -392,7 +392,7 @@ def importConfigs (pm, db, all_plugins): # ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False) ccd('VERSION', buildTimestamp , c_d, '_KEEP_', '_KEEP_', '_KEEP_', '_KEEP_', None, "_KEEP_", None, None, True) - write_notification(f'[Upgrade] : App upgraded 🚀 Please clear the cache:
  1. Click OK below
  2. Clear the browser cache (shift + browser refresh button)
  3. Clear app cache with the (reload) button in the header
  4. Go to Settings and click Save
Check out new features and what has changed in the 📓 release notes.', 'interrupt', timeNowTZ()) + write_notification(f'[Upgrade] : App upgraded 🚀 Please clear the cache:
  1. Click OK below
  2. Clear the browser cache (shift + browser refresh button)
  3. Clear app cache with the (reload) button in the header
  4. Go to Settings and click Save
Check out new features and what has changed in the 📓 release notes.', 'interrupt', timeNowDB()) @@ -429,7 +429,7 @@ def importConfigs (pm, db, all_plugins): mylog('minimal', msg) # front end app log loggging - write_notification(msg, 'info', timeNowTZ()) + write_notification(msg, 'info', timeNowDB()) return pm, all_plugins, True diff --git a/server/logger.py b/server/logger.py index 72d6eb3e..da2ae75c 100755 --- a/server/logger.py +++ b/server/logger.py @@ -19,6 +19,23 @@ def timeNowTZ(): else: return datetime.datetime.now().replace(microsecond=0) +def timeNowDB(local=True): + """ + Return the current time (local or UTC) as ISO 8601 for DB storage. + Safe for SQLite, PostgreSQL, etc. + + Example local: '2025-11-04 18:09:11' + Example UTC: '2025-11-04 07:09:11' + """ + if local: + try: + tz = ZoneInfo(conf.tz) if conf.tz else None + except Exception: + tz = None + return datetime.datetime.now(tz).strftime('%Y-%m-%d %H:%M:%S') + else: + return datetime.datetime.now(datetime.UTC).strftime('%Y-%m-%d %H:%M:%S') + #------------------------------------------------------------------------------- # Map custom debug levels to Python logging levels custom_to_logging_levels = { diff --git a/server/messaging/in_app.py b/server/messaging/in_app.py index 837b72a8..5246acf4 100755 --- a/server/messaging/in_app.py +++ b/server/messaging/in_app.py @@ -20,7 +20,7 @@ sys.path.extend([f"{INSTALL_PATH}/server"]) import conf from const import applicationPath, logPath, apiPath, confFileName, reportTemplatesPath from logger import logResult, mylog -from helper import generate_mac_links, removeDuplicateNewLines, timeNowTZ, get_file_content, write_file, get_setting_value, get_timezone_offset +from helper import generate_mac_links, removeDuplicateNewLines, timeNowDB, get_file_content, write_file, get_setting_value, get_timezone_offset NOTIFICATION_API_FILE = apiPath + 'user_notifications.json' @@ -39,7 +39,7 @@ def write_notification(content, level='alert', timestamp=None): None """ if timestamp is None: - timestamp = timeNowTZ() + timestamp = timeNowDB() # Generate GUID guid = str(uuid.uuid4()) diff --git a/server/messaging/reporting.py b/server/messaging/reporting.py index a9343f87..2c885ce1 100755 --- a/server/messaging/reporting.py +++ b/server/messaging/reporting.py @@ -20,7 +20,7 @@ sys.path.extend([f"{INSTALL_PATH}/server"]) import conf from const import applicationPath, logPath, apiPath, confFileName -from helper import timeNowTZ, get_file_content, write_file, get_timezone_offset, get_setting_value +from helper import get_file_content, write_file, get_timezone_offset, get_setting_value from logger import logResult, mylog from db.sql_safe_builder import create_safe_condition_builder @@ -123,6 +123,9 @@ def get_notifications (db): ) ORDER BY down_events.eve_DateTime; """ + + + mylog("none", sqlQuery) # Get the events as JSON json_obj = db.get_table_as_json(sqlQuery) diff --git a/server/models/notification_instance.py b/server/models/notification_instance.py index d01cbec9..02832d45 100755 --- a/server/models/notification_instance.py +++ b/server/models/notification_instance.py @@ -16,6 +16,7 @@ from const import applicationPath, logPath, apiPath, reportTemplatesPath from logger import mylog, Logger from helper import generate_mac_links, \ removeDuplicateNewLines, \ + timeNowDB, \ timeNowTZ, \ write_file, \ get_setting_value, \ @@ -71,7 +72,7 @@ class NotificationInstance: self.HasNotifications = True self.GUID = str(uuid.uuid4()) - self.DateTimeCreated = timeNowTZ() + self.DateTimeCreated = timeNowDB() self.DateTimePushed = "" self.Status = "new" self.JSON = JSON @@ -112,7 +113,7 @@ class NotificationInstance: mail_html = mail_html.replace('', newVersionText) # Report "REPORT_DATE" in Header & footer - timeFormated = timeNowTZ().strftime('%Y-%m-%d %H:%M') + timeFormated = timeNowDB() mail_text = mail_text.replace('', timeFormated) mail_html = mail_html.replace('', timeFormated) @@ -231,7 +232,7 @@ class NotificationInstance: # Updates the Published properties def updatePublishedVia(self, newPublishedVia): self.PublishedVia = newPublishedVia - self.DateTimePushed = timeNowTZ() + self.DateTimePushed = timeNowDB() self.upsert() # create or update a notification @@ -282,7 +283,7 @@ class NotificationInstance: SELECT eve_MAC FROM Events WHERE eve_PendingAlertEmail = 1 ) - """, (timeNowTZ(),)) + """, (timeNowDB(),)) self.db.sql.execute(""" UPDATE Events SET eve_PendingAlertEmail = 0 diff --git a/server/plugin.py b/server/plugin.py index a65a0201..ba24b47d 100755 --- a/server/plugin.py +++ b/server/plugin.py @@ -12,7 +12,7 @@ from collections import namedtuple import conf from const import pluginsPath, logPath, applicationPath, reportTemplatesPath from logger import mylog, Logger -from helper import timeNowTZ, get_file_content, write_file, get_setting, get_setting_value +from helper import timeNowDB, timeNowTZ, get_file_content, write_file, get_setting, get_setting_value from app_state import updateState from api import update_api from plugin_utils import logEventStatusCounts, get_plugin_string, get_plugin_setting_obj, print_plugin_info, list_to_csv, combine_plugin_objects, resolve_wildcards_arr, handle_empty, custom_plugin_decoder, decode_and_rename_files @@ -154,7 +154,7 @@ class plugin_manager: if len(executed_events) > 0 and executed_events: executed_events_message = ', '.join(executed_events) mylog('minimal', ['[check_and_run_user_event] INFO: Executed events: ', executed_events_message]) - write_notification(f"[Ad-hoc events] Events executed: {executed_events_message}", "interrupt", timeNowTZ()) + write_notification(f"[Ad-hoc events] Events executed: {executed_events_message}", "interrupt", timeNowDB()) return @@ -163,7 +163,7 @@ class plugin_manager: #------------------------------------------------------------------------------- def handle_run(self, runType): - mylog('minimal', ['[', timeNowTZ(), '] START Run: ', runType]) + mylog('minimal', ['[', timeNowDB(), '] START Run: ', runType]) # run the plugin for plugin in self.all_plugins: @@ -177,7 +177,7 @@ class plugin_manager: current_plugin_state = self.get_plugin_states(pluginName) # get latest plugin state updateState(pluginsStates={pluginName: current_plugin_state.get(pluginName, {})}) - mylog('minimal', ['[', timeNowTZ(), '] END Run: ', runType]) + mylog('minimal', ['[', timeNowDB(), '] END Run: ', runType]) return @@ -186,7 +186,7 @@ class plugin_manager: #------------------------------------------------------------------------------- def handle_test(self, runType): - mylog('minimal', ['[', timeNowTZ(), '] [Test] START Test: ', runType]) + mylog('minimal', ['[', timeNowDB(), '] [Test] START Test: ', runType]) # Prepare test samples sample_json = json.loads(get_file_content(reportTemplatesPath + 'webhook_json_sample.json'))[0]["body"]["attachments"][0]["text"] @@ -221,7 +221,7 @@ class plugin_manager: """ sql = self.db.sql plugin_states = {} - now_str = timeNowTZ().isoformat() + now_str = timeNowDB() if plugin_name: # Only compute for single plugin sql.execute(""" @@ -759,7 +759,7 @@ def process_plugin_events(db, plugin, plugEventsArr): if isMissing: # if wasn't missing before, mark as changed if tmpObj.status != "missing-in-last-scan": - tmpObj.changed = timeNowTZ().astimezone().isoformat() + tmpObj.changed = timeNowDB() tmpObj.status = "missing-in-last-scan" # mylog('debug', [f'[Plugins] Missing from last scan (PrimaryID | SecondaryID): {tmpObj.primaryId} | {tmpObj.secondaryId}']) diff --git a/server/plugin_utils.py b/server/plugin_utils.py index de2d4d86..2e92ff38 100755 --- a/server/plugin_utils.py +++ b/server/plugin_utils.py @@ -4,7 +4,7 @@ import json import conf from logger import mylog from const import pluginsPath, logPath, apiPath -from helper import timeNowTZ, get_file_content, write_file, get_setting, get_setting_value, setting_value_to_python_type +from helper import get_file_content, write_file, get_setting, get_setting_value, setting_value_to_python_type from app_state import updateState from crypto_utils import decrypt_data, generate_deterministic_guid diff --git a/server/scan/device_handling.py b/server/scan/device_handling.py index c213698e..48cb84fe 100755 --- a/server/scan/device_handling.py +++ b/server/scan/device_handling.py @@ -10,7 +10,7 @@ from dateutil import parser INSTALL_PATH="/app" sys.path.extend([f"{INSTALL_PATH}/server"]) -from helper import timeNowTZ, get_setting_value, check_IP_format +from helper import timeNowDB, timeNowTZ, get_setting_value, check_IP_format from logger import mylog, Logger from const import vendorsPath, vendorsPathNewest, sql_generateGuid from models.device_instance import DeviceInstance @@ -56,7 +56,7 @@ def exclude_ignored_devices(db): #------------------------------------------------------------------------------- def update_devices_data_from_scan (db): sql = db.sql #TO-DO - startTime = timeNowTZ().astimezone().isoformat() + startTime = timeNowDB() # Update Last Connection mylog('debug', '[Update Devices] 1 Last Connection') @@ -371,7 +371,7 @@ def print_scan_stats(db): #------------------------------------------------------------------------------- def create_new_devices (db): sql = db.sql # TO-DO - startTime = timeNowTZ() + startTime = timeNowDB() # Insert events for new devices from CurrentScan (not yet in Devices) @@ -536,7 +536,7 @@ def update_devices_names(pm): if isinstance(last_checked, str): try: last_checked = parser.parse(last_checked) - except Exception as e: + except (ValueError, TypeError) as e: mylog('none', f'[Update Device Name] Could not parse last_checked timestamp: {last_checked!r} ({e})') last_checked = None elif not isinstance(last_checked, datetime.datetime): @@ -544,7 +544,6 @@ def update_devices_names(pm): # Collect and normalize valid state update timestamps for name-related plugins state_times = [] - latest_state = None for p in name_plugins: state_updated = pm.plugin_states.get(p, {}).get("stateUpdated") @@ -561,13 +560,15 @@ def update_devices_names(pm): mylog('none', f'[Update Device Name] Failed to parse timestamp for {p}: {state_updated!r} ({e})') else: mylog('none', f'[Update Device Name] Unexpected timestamp type for {p}: {type(state_updated)}') - # Determine the latest valid timestamp safely - try: - if state_times: - latest_state = max(state_times) - except Exception as e: - mylog('none', f'[Update Device Name] Failed to determine latest timestamp, using fallback ({e})') - latest_state = state_times[-1] if state_times else None + + # Determine the latest valid timestamp safely (after collecting all timestamps) + latest_state = None + try: + if state_times: + latest_state = max(state_times) + except (ValueError, TypeError) as e: + mylog('none', f'[Update Device Name] Failed to determine latest timestamp, using fallback ({e})') + latest_state = state_times[-1] if state_times else None # Skip if no plugin state changed since last check @@ -672,7 +673,7 @@ def update_devices_names(pm): # --- Step 3: Log last checked time --- # After resolving names, update last checked - pm.name_plugins_checked = timeNowTZ().astimezone().isoformat() + pm.name_plugins_checked = timeNowDB() #------------------------------------------------------------------------------- # Updates devPresentLastScan for parent devices based on the presence of their NICs diff --git a/server/scan/device_heuristics.py b/server/scan/device_heuristics.py index 5e7da0ff..c87576f8 100755 --- a/server/scan/device_heuristics.py +++ b/server/scan/device_heuristics.py @@ -12,7 +12,7 @@ sys.path.extend([f"{INSTALL_PATH}/server"]) import conf from const import * from logger import mylog -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value # Load MAC/device-type/icon rules from external file MAC_TYPE_ICON_PATH = Path(f"{INSTALL_PATH}/back/device_heuristics_rules.json") diff --git a/server/scan/session_events.py b/server/scan/session_events.py index 7f999041..2dd1b9fe 100755 --- a/server/scan/session_events.py +++ b/server/scan/session_events.py @@ -6,7 +6,7 @@ sys.path.extend([f"{INSTALL_PATH}/server"]) import conf from scan.device_handling import create_new_devices, print_scan_stats, save_scanned_devices, exclude_ignored_devices, update_devices_data_from_scan -from helper import timeNowTZ, get_setting_value +from helper import timeNowDB, get_setting_value from db.db_helper import print_table_schema from logger import mylog, Logger from messaging.reporting import skip_repeated_notifications @@ -128,7 +128,7 @@ def create_sessions_snapshot (db): #------------------------------------------------------------------------------- def insert_events (db): sql = db.sql #TO-DO - startTime = timeNowTZ() + startTime = timeNowDB() # Check device down mylog('debug','[Events] - 1 - Devices down') @@ -191,7 +191,7 @@ def insert_events (db): def insertOnlineHistory(db): sql = db.sql # TO-DO: Implement sql object - scanTimestamp = timeNowTZ() + scanTimestamp = timeNowDB() # Query to fetch all relevant device counts in one go query = """ diff --git a/server/workflows/actions.py b/server/workflows/actions.py index 0ad338db..ea7a1e16 100755 --- a/server/workflows/actions.py +++ b/server/workflows/actions.py @@ -7,7 +7,7 @@ sys.path.extend([f"{INSTALL_PATH}/server"]) import conf from logger import mylog, Logger -from helper import get_setting_value, timeNowTZ +from helper import get_setting_value from models.device_instance import DeviceInstance from models.plugin_object_instance import PluginObjectInstance diff --git a/server/workflows/app_events.py b/server/workflows/app_events.py index 2d89fe98..a386ca2d 100755 --- a/server/workflows/app_events.py +++ b/server/workflows/app_events.py @@ -10,7 +10,7 @@ sys.path.extend([f"{INSTALL_PATH}/server"]) # Register NetAlertX modules import conf -from helper import get_setting_value, timeNowTZ +from helper import get_setting_value # Make sure the TIMEZONE for logging is correct # conf.tz = pytz.timezone(get_setting_value('TIMEZONE')) @@ -20,7 +20,6 @@ from logger import mylog, Logger, logResult Logger(get_setting_value('LOG_LEVEL')) from const import applicationPath, logPath, apiPath, confFileName, sql_generateGuid -from helper import timeNowTZ class AppEvent_obj: def __init__(self, db): diff --git a/server/workflows/conditions.py b/server/workflows/conditions.py index 29522652..00a2e6ce 100755 --- a/server/workflows/conditions.py +++ b/server/workflows/conditions.py @@ -8,7 +8,7 @@ sys.path.extend([f"{INSTALL_PATH}/server"]) import conf from logger import mylog, Logger -from helper import get_setting_value, timeNowTZ +from helper import get_setting_value # Make sure log level is initialized correctly Logger(get_setting_value('LOG_LEVEL')) diff --git a/server/workflows/manager.py b/server/workflows/manager.py index 7b3e85d4..0908bfa8 100755 --- a/server/workflows/manager.py +++ b/server/workflows/manager.py @@ -9,7 +9,7 @@ import conf from const import fullConfFolder import workflows.actions from logger import mylog, Logger -from helper import get_setting_value, timeNowTZ +from helper import get_setting_value # Make sure log level is initialized correctly Logger(get_setting_value('LOG_LEVEL')) diff --git a/server/workflows/triggers.py b/server/workflows/triggers.py index 04024fe5..e3037d03 100755 --- a/server/workflows/triggers.py +++ b/server/workflows/triggers.py @@ -7,7 +7,7 @@ sys.path.extend([f"{INSTALL_PATH}/server"]) import conf from logger import mylog, Logger -from helper import get_setting_value, timeNowTZ +from helper import get_setting_value from database import get_array_from_sql_rows # Make sure log level is initialized correctly diff --git a/test/test_dbquery_endpoints.py b/test/test_dbquery_endpoints.py index ff4347ed..a9f663ad 100755 --- a/test/test_dbquery_endpoints.py +++ b/test/test_dbquery_endpoints.py @@ -6,7 +6,7 @@ import pytest INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) -from helper import get_setting_value, timeNowTZ +from helper import get_setting_value, timeNowDB from api_server.api_server_start import app @@ -41,7 +41,7 @@ def b64(sql: str) -> str: # ----------------------------- def test_dbquery_create_device(client, api_token, test_mac): - now = timeNowTZ().astimezone().isoformat() + now = timeNowDB() sql = f""" INSERT INTO Devices (devMac, devName, devVendor, devOwner, devFirstConnection, devLastConnection, devLastIP) diff --git a/test/test_device_endpoints.py b/test/test_device_endpoints.py index 95787c43..5f55ea88 100755 --- a/test/test_device_endpoints.py +++ b/test/test_device_endpoints.py @@ -9,7 +9,7 @@ import pytest INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from api_server.api_server_start import app @pytest.fixture(scope="session") diff --git a/test/test_devices_endpoints.py b/test/test_devices_endpoints.py index 1b84ecec..c54f414e 100755 --- a/test/test_devices_endpoints.py +++ b/test/test_devices_endpoints.py @@ -10,7 +10,7 @@ import pytest INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from api_server.api_server_start import app @pytest.fixture(scope="session") diff --git a/test/test_graphq_endpoints.py b/test/test_graphq_endpoints.py index 8aec402b..cc5e2076 100755 --- a/test/test_graphq_endpoints.py +++ b/test/test_graphq_endpoints.py @@ -10,7 +10,7 @@ from datetime import datetime, timedelta INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from api_server.api_server_start import app @pytest.fixture(scope="session") diff --git a/test/test_history_endpoints.py b/test/test_history_endpoints.py index 5968ea96..c01f402e 100755 --- a/test/test_history_endpoints.py +++ b/test/test_history_endpoints.py @@ -9,7 +9,7 @@ import pytest INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from api_server.api_server_start import app @pytest.fixture(scope="session") diff --git a/test/test_nettools_endpoints.py b/test/test_nettools_endpoints.py index 1cde5e55..f23704f2 100755 --- a/test/test_nettools_endpoints.py +++ b/test/test_nettools_endpoints.py @@ -10,7 +10,7 @@ import pytest INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from api_server.api_server_start import app @pytest.fixture(scope="session") diff --git a/test/test_sessions_endpoints.py b/test/test_sessions_endpoints.py index 3dde7fb8..e7035d65 100755 --- a/test/test_sessions_endpoints.py +++ b/test/test_sessions_endpoints.py @@ -10,7 +10,7 @@ from datetime import datetime, timedelta INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) -from helper import timeNowTZ, get_setting_value +from helper import timeNowDB, get_setting_value from api_server.api_server_start import app @pytest.fixture(scope="session") @@ -48,7 +48,7 @@ def test_create_session(client, api_token, test_mac): payload = { "mac": test_mac, "ip": "192.168.1.100", - "start_time": timeNowTZ(), + "start_time": timeNowDB(), "event_type_conn": "Connected", "event_type_disc": "Disconnected" } @@ -63,7 +63,7 @@ def test_list_sessions(client, api_token, test_mac): payload = { "mac": test_mac, "ip": "192.168.1.100", - "start_time": timeNowTZ() + "start_time": timeNowDB() } client.post("/sessions/create", json=payload, headers=auth_headers(api_token)) @@ -80,7 +80,7 @@ def test_device_sessions_by_period(client, api_token, test_mac): payload = { "mac": test_mac, "ip": "192.168.1.200", - "start_time": timeNowTZ() + "start_time": timeNowDB() } resp_create = client.post("/sessions/create", json=payload, headers=auth_headers(api_token)) assert resp_create.status_code == 200 @@ -115,7 +115,7 @@ def test_device_session_events(client, api_token, test_mac): payload = { "mac": test_mac, "ip": "192.168.1.250", - "start_time": timeNowTZ() + "start_time": timeNowDB() } resp_create = client.post( "/sessions/create", @@ -163,7 +163,7 @@ def test_delete_session(client, api_token, test_mac): payload = { "mac": test_mac, "ip": "192.168.1.100", - "start_time": timeNowTZ() + "start_time": timeNowDB() } client.post("/sessions/create", json=payload, headers=auth_headers(api_token)) diff --git a/test/test_settings_endpoints.py b/test/test_settings_endpoints.py index aefb851a..b622428b 100755 --- a/test/test_settings_endpoints.py +++ b/test/test_settings_endpoints.py @@ -10,7 +10,7 @@ from datetime import datetime, timedelta INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value from api_server.api_server_start import app @pytest.fixture(scope="session") From dc4da5b4c98c9af7e7b73cbea7f1885d213176a8 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Tue, 4 Nov 2025 19:44:30 +1100 Subject: [PATCH 06/31] BE/PLG: TZ timestamp work #1251 Signed-off-by: jokob-sk --- server/helper.py | 7 ++++++- server/logger.py | 7 ++++++- test/test_sessions_endpoints.py | 2 +- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/server/helper.py b/server/helper.py index 61bec402..f072e7c1 100755 --- a/server/helper.py +++ b/server/helper.py @@ -56,7 +56,12 @@ def timeNowDB(local=True): """ if local: try: - tz = ZoneInfo(conf.tz) if conf.tz else None + if isinstance(conf.tz, datetime.tzinfo): + tz = conf.tz + elif conf.tz: + tz = ZoneInfo(conf.tz) + else: + tz = None except Exception: tz = None return datetime.datetime.now(tz).strftime('%Y-%m-%d %H:%M:%S') diff --git a/server/logger.py b/server/logger.py index da2ae75c..2763d489 100755 --- a/server/logger.py +++ b/server/logger.py @@ -29,7 +29,12 @@ def timeNowDB(local=True): """ if local: try: - tz = ZoneInfo(conf.tz) if conf.tz else None + if isinstance(conf.tz, datetime.tzinfo): + tz = conf.tz + elif conf.tz: + tz = ZoneInfo(conf.tz) + else: + tz = None except Exception: tz = None return datetime.datetime.now(tz).strftime('%Y-%m-%d %H:%M:%S') diff --git a/test/test_sessions_endpoints.py b/test/test_sessions_endpoints.py index e7035d65..5529ab98 100755 --- a/test/test_sessions_endpoints.py +++ b/test/test_sessions_endpoints.py @@ -10,7 +10,7 @@ from datetime import datetime, timedelta INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) -from helper import timeNowDB, get_setting_value +from helper import timeNowDB, timeNowTZ, get_setting_value from api_server.api_server_start import app @pytest.fixture(scope="session") From 80d7ef7f24cadbd8f3a3abd3f403806e111745b9 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Tue, 4 Nov 2025 19:46:50 +1100 Subject: [PATCH 07/31] BE/PLG: TZ timestamp work #1251 Signed-off-by: jokob-sk --- server/helper.py | 1 + server/logger.py | 1 + 2 files changed, 2 insertions(+) diff --git a/server/helper.py b/server/helper.py index f072e7c1..cd9cae1c 100755 --- a/server/helper.py +++ b/server/helper.py @@ -12,6 +12,7 @@ import pytz from pytz import timezone import json import time +from zoneinfo import ZoneInfo from pathlib import Path import requests import base64 diff --git a/server/logger.py b/server/logger.py index 2763d489..8cd16c9d 100755 --- a/server/logger.py +++ b/server/logger.py @@ -5,6 +5,7 @@ import threading import queue import time import logging +from zoneinfo import ZoneInfo # NetAlertX imports import conf From 5452b7287ba5bed5c6c7ff47ed093593a763cec6 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Tue, 4 Nov 2025 19:52:19 +1100 Subject: [PATCH 08/31] BE/PLG: TZ timestamp work #1251 Signed-off-by: jokob-sk --- server/helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/helper.py b/server/helper.py index cd9cae1c..88d20f67 100755 --- a/server/helper.py +++ b/server/helper.py @@ -95,7 +95,7 @@ def format_event_date(date_str: str, event_type: str) -> str: # ------------------------------------------------------------------------------------------- def ensure_datetime(dt: Union[str, datetime.datetime, None]) -> datetime.datetime: if dt is None: - return timeNowDB() + return timeNowTZ() if isinstance(dt, str): return datetime.datetime.fromisoformat(dt) return dt From ea8cea16c50bc11e4c0fc5c50734e5da24ec2deb Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Tue, 4 Nov 2025 20:01:27 +1100 Subject: [PATCH 09/31] TEST: cleanup Signed-off-by: jokob-sk --- test/{ => api_endpoints}/test_dbquery_endpoints.py | 0 test/{ => api_endpoints}/test_device_endpoints.py | 0 test/{ => api_endpoints}/test_devices_endpoints.py | 0 test/{ => api_endpoints}/test_events_endpoints.py | 0 test/{ => api_endpoints}/test_graphq_endpoints.py | 0 test/{ => api_endpoints}/test_history_endpoints.py | 0 test/{ => api_endpoints}/test_messaging_in_app_endpoints.py | 0 test/{ => api_endpoints}/test_nettools_endpoints.py | 0 test/{ => api_endpoints}/test_sessions_endpoints.py | 0 test/{ => api_endpoints}/test_settings_endpoints.py | 0 test/{ => backend}/test_compound_conditions.py | 0 test/{ => backend}/test_safe_builder_unit.py | 0 test/{ => backend}/test_sql_injection_prevention.py | 0 test/{ => backend}/test_sql_security.py | 0 14 files changed, 0 insertions(+), 0 deletions(-) rename test/{ => api_endpoints}/test_dbquery_endpoints.py (100%) mode change 100755 => 100644 rename test/{ => api_endpoints}/test_device_endpoints.py (100%) mode change 100755 => 100644 rename test/{ => api_endpoints}/test_devices_endpoints.py (100%) mode change 100755 => 100644 rename test/{ => api_endpoints}/test_events_endpoints.py (100%) mode change 100755 => 100644 rename test/{ => api_endpoints}/test_graphq_endpoints.py (100%) mode change 100755 => 100644 rename test/{ => api_endpoints}/test_history_endpoints.py (100%) mode change 100755 => 100644 rename test/{ => api_endpoints}/test_messaging_in_app_endpoints.py (100%) mode change 100755 => 100644 rename test/{ => api_endpoints}/test_nettools_endpoints.py (100%) mode change 100755 => 100644 rename test/{ => api_endpoints}/test_sessions_endpoints.py (100%) mode change 100755 => 100644 rename test/{ => api_endpoints}/test_settings_endpoints.py (100%) mode change 100755 => 100644 rename test/{ => backend}/test_compound_conditions.py (100%) mode change 100755 => 100644 rename test/{ => backend}/test_safe_builder_unit.py (100%) mode change 100755 => 100644 rename test/{ => backend}/test_sql_injection_prevention.py (100%) mode change 100755 => 100644 rename test/{ => backend}/test_sql_security.py (100%) mode change 100755 => 100644 diff --git a/test/test_dbquery_endpoints.py b/test/api_endpoints/test_dbquery_endpoints.py old mode 100755 new mode 100644 similarity index 100% rename from test/test_dbquery_endpoints.py rename to test/api_endpoints/test_dbquery_endpoints.py diff --git a/test/test_device_endpoints.py b/test/api_endpoints/test_device_endpoints.py old mode 100755 new mode 100644 similarity index 100% rename from test/test_device_endpoints.py rename to test/api_endpoints/test_device_endpoints.py diff --git a/test/test_devices_endpoints.py b/test/api_endpoints/test_devices_endpoints.py old mode 100755 new mode 100644 similarity index 100% rename from test/test_devices_endpoints.py rename to test/api_endpoints/test_devices_endpoints.py diff --git a/test/test_events_endpoints.py b/test/api_endpoints/test_events_endpoints.py old mode 100755 new mode 100644 similarity index 100% rename from test/test_events_endpoints.py rename to test/api_endpoints/test_events_endpoints.py diff --git a/test/test_graphq_endpoints.py b/test/api_endpoints/test_graphq_endpoints.py old mode 100755 new mode 100644 similarity index 100% rename from test/test_graphq_endpoints.py rename to test/api_endpoints/test_graphq_endpoints.py diff --git a/test/test_history_endpoints.py b/test/api_endpoints/test_history_endpoints.py old mode 100755 new mode 100644 similarity index 100% rename from test/test_history_endpoints.py rename to test/api_endpoints/test_history_endpoints.py diff --git a/test/test_messaging_in_app_endpoints.py b/test/api_endpoints/test_messaging_in_app_endpoints.py old mode 100755 new mode 100644 similarity index 100% rename from test/test_messaging_in_app_endpoints.py rename to test/api_endpoints/test_messaging_in_app_endpoints.py diff --git a/test/test_nettools_endpoints.py b/test/api_endpoints/test_nettools_endpoints.py old mode 100755 new mode 100644 similarity index 100% rename from test/test_nettools_endpoints.py rename to test/api_endpoints/test_nettools_endpoints.py diff --git a/test/test_sessions_endpoints.py b/test/api_endpoints/test_sessions_endpoints.py old mode 100755 new mode 100644 similarity index 100% rename from test/test_sessions_endpoints.py rename to test/api_endpoints/test_sessions_endpoints.py diff --git a/test/test_settings_endpoints.py b/test/api_endpoints/test_settings_endpoints.py old mode 100755 new mode 100644 similarity index 100% rename from test/test_settings_endpoints.py rename to test/api_endpoints/test_settings_endpoints.py diff --git a/test/test_compound_conditions.py b/test/backend/test_compound_conditions.py old mode 100755 new mode 100644 similarity index 100% rename from test/test_compound_conditions.py rename to test/backend/test_compound_conditions.py diff --git a/test/test_safe_builder_unit.py b/test/backend/test_safe_builder_unit.py old mode 100755 new mode 100644 similarity index 100% rename from test/test_safe_builder_unit.py rename to test/backend/test_safe_builder_unit.py diff --git a/test/test_sql_injection_prevention.py b/test/backend/test_sql_injection_prevention.py old mode 100755 new mode 100644 similarity index 100% rename from test/test_sql_injection_prevention.py rename to test/backend/test_sql_injection_prevention.py diff --git a/test/test_sql_security.py b/test/backend/test_sql_security.py old mode 100755 new mode 100644 similarity index 100% rename from test/test_sql_security.py rename to test/backend/test_sql_security.py From 0845b7f4453ac9d8bf6bcc66ceb483af011c496d Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Wed, 5 Nov 2025 15:25:53 +1100 Subject: [PATCH 10/31] BE: name resolution did not apply regex cleanup Signed-off-by: jokob-sk --- server/helper.py | 10 ++++++---- server/scan/name_resolution.py | 1 + 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/server/helper.py b/server/helper.py index 88d20f67..db678ffd 100755 --- a/server/helper.py +++ b/server/helper.py @@ -381,10 +381,12 @@ def get_setting_value(key): value = setting_value_to_python_type(set_type, set_value) else: value = setting_value_to_python_type(set_type, str(set_value)) + SETTINGS_SECONDARYCACHE[key] = value + return value - # Otherwise fall back to retrive from json + # Otherwise fall back to retrieve from json setting = get_setting(key) if setting is not None: @@ -458,9 +460,6 @@ def setting_value_to_python_type(set_type, set_value): if isinstance(set_value, str): try: value = json.loads(set_value.replace("'", "\"")) - - # reverse transformations to all entries - value = reverseTransformers(value, transformers) except json.JSONDecodeError as e: mylog('none', [f'[setting_value_to_python_type] Error decoding JSON object: {e}']) @@ -470,6 +469,9 @@ def setting_value_to_python_type(set_type, set_value): elif isinstance(set_value, list): value = set_value + # Always apply transformers (base64, etc.) to array entries + value = reverseTransformers(value, transformers) + elif dataType == 'object' and elementType == 'input': if isinstance(set_value, str): try: diff --git a/server/scan/name_resolution.py b/server/scan/name_resolution.py index efa4371d..6c26c417 100755 --- a/server/scan/name_resolution.py +++ b/server/scan/name_resolution.py @@ -72,6 +72,7 @@ class NameResolver: name += " (IP match)" regexes = get_setting_value('NEWDEV_NAME_CLEANUP_REGEX') or [] + mylog('trace', [f"[cleanDeviceName] applying regexes: {regexes}"]) for rgx in regexes: mylog('trace', [f"[cleanDeviceName] applying regex: {rgx}"]) name = re.sub(rgx, "", name) From 746f1a892274193c1a8d8396cb72a099e8e6836f Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Wed, 5 Nov 2025 15:26:57 +1100 Subject: [PATCH 11/31] DOCS: decription fix and --exclude-broadcast documentation Signed-off-by: jokob-sk --- docs/NOTIFICATIONS.md | 11 +++++++++-- front/plugins/notification_processing/config.json | 2 +- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/docs/NOTIFICATIONS.md b/docs/NOTIFICATIONS.md index a8b6a8ea..58d50418 100755 --- a/docs/NOTIFICATIONS.md +++ b/docs/NOTIFICATIONS.md @@ -47,11 +47,18 @@ In Notification Processing settings, you can specify blanket rules. These allow 3. A filter to allow you to set device-specific exceptions to New devices being added to the app. 4. A filter to allow you to set device-specific exceptions to generated Events. -## Ignoring devices 🔕 +## Ignoring devices 💻 ![Ignoring new devices](./img/NOTIFICATIONS/NEWDEV_ignores.png) You can completely ignore detected devices globally. This could be because your instance detects docker containers, you want to ignore devices from a specific manufacturer via MAC rules or you want to ignore devices on a specific IP range. 1. Ignored MACs (`NEWDEV_ignored_MACs`) - List of MACs to ignore. -2. Ignored IPs (`NEWDEV_ignored_IPs`) - List of IPs to ignore. \ No newline at end of file +2. Ignored IPs (`NEWDEV_ignored_IPs`) - List of IPs to ignore. + +## Ignoring notifications 🔕 + +You can filter out unwanted notifications globally. This could be because of a misbehaving device (GoogleNest/GoogleHub (See also [ARPSAN docs and the `--exclude-broadcast` flag](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/arp_scan#ip-flipping-on-google-nest-devices))) which flips between IP addresses, or because you want to ignore new device notifications of a certian pattern. + +1. Events Filter (`NTFPRCS_event_condition`) - filter out Events from notifications. +2. New Devices Filter (`NTFPRCS_new_dev_condition`) - filter out New Devices from notifications, but log and keep a new device in the system. \ No newline at end of file diff --git a/front/plugins/notification_processing/config.json b/front/plugins/notification_processing/config.json index 5c2f60b3..acb4fbbf 100755 --- a/front/plugins/notification_processing/config.json +++ b/front/plugins/notification_processing/config.json @@ -149,7 +149,7 @@ "description": [ { "language_code": "en_us", - "string": "You can specify a SQL where condition to filter out Events from notifications. For example AND devLastIP NOT LIKE '192.168.3.%' will always exclude New Device notifications for all devices with the IP starting with 192.168.3.%." + "string": "You can specify a SQL where condition to filter out Events from notifications. For example AND devLastIP NOT LIKE '192.168.3.%' will always exclude any Event notifications for all devices with the IP starting with 192.168.3.%." } ] } From c08eb1dbba730fba0d282abe84cb0564aea4ce46 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Wed, 5 Nov 2025 16:08:04 +1100 Subject: [PATCH 12/31] BE: chore datetime_utils Signed-off-by: jokob-sk --- .github/copilot-instructions.md | 2 +- docs/NOTIFICATIONS.md | 2 +- front/plugins/_publisher_apprise/apprise.py | 3 +- front/plugins/_publisher_email/email_smtp.py | 3 +- front/plugins/_publisher_mqtt/mqtt.py | 3 +- front/plugins/_publisher_ntfy/ntfy.py | 3 +- front/plugins/_publisher_pushover/pushover.py | 3 +- .../plugins/_publisher_pushsafer/pushsafer.py | 3 +- front/plugins/_publisher_telegram/tg.py | 3 +- front/plugins/_publisher_webhook/webhook.py | 3 +- front/plugins/internet_ip/script.py | 3 +- front/plugins/internet_speedtest/script.py | 3 +- front/plugins/nmap_scan/script.py | 3 +- front/plugins/plugin_helper.py | 3 +- front/plugins/sync/sync.py | 3 +- server/__main__.py | 3 +- server/api.py | 3 +- server/api_server/device_endpoint.py | 3 +- server/api_server/devices_endpoint.py | 3 +- server/api_server/events_endpoint.py | 3 +- server/api_server/history_endpoint.py | 3 +- server/api_server/sessions_endpoint.py | 3 +- server/api_server/sync_endpoint.py | 3 +- server/app_state.py | 3 +- server/helper.py | 139 --------------- server/initialise.py | 3 +- server/logger.py | 37 +--- server/messaging/in_app.py | 3 +- server/messaging/reporting.py | 3 +- server/models/notification_instance.py | 6 +- server/plugin.py | 3 +- server/scan/device_handling.py | 3 +- server/scan/session_events.py | 3 +- server/utils/datetime_utils.py | 162 ++++++++++++++++++ test/api_endpoints/test_dbquery_endpoints.py | 3 +- test/api_endpoints/test_events_endpoints.py | 3 +- test/api_endpoints/test_sessions_endpoints.py | 3 +- 37 files changed, 234 insertions(+), 207 deletions(-) create mode 100644 server/utils/datetime_utils.py diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 82ca4350..d485819f 100755 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -42,7 +42,7 @@ Backend loop phases (see `server/__main__.py` and `server/plugin.py`): `once`, ` ## Conventions & helpers to reuse - Settings: add/modify via `ccd()` in `server/initialise.py` or per‑plugin manifest. Never hardcode ports or secrets; use `get_setting_value()`. - Logging: use `logger.mylog(level, [message])`; levels: none/minimal/verbose/debug/trace. -- Time/MAC/strings: `helper.py` (`timeNowTZ`, `normalize_mac`, sanitizers). Validate MACs before DB writes. +- Time/MAC/strings: `helper.py` (`timeNowDB`, `normalize_mac`, sanitizers). Validate MACs before DB writes. - DB helpers: prefer `server/db/db_helper.py` functions (e.g., `get_table_json`, device condition helpers) over raw SQL in new paths. ## Dev workflow (devcontainer) diff --git a/docs/NOTIFICATIONS.md b/docs/NOTIFICATIONS.md index 58d50418..3255820a 100755 --- a/docs/NOTIFICATIONS.md +++ b/docs/NOTIFICATIONS.md @@ -58,7 +58,7 @@ You can completely ignore detected devices globally. This could be because your ## Ignoring notifications 🔕 -You can filter out unwanted notifications globally. This could be because of a misbehaving device (GoogleNest/GoogleHub (See also [ARPSAN docs and the `--exclude-broadcast` flag](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/arp_scan#ip-flipping-on-google-nest-devices))) which flips between IP addresses, or because you want to ignore new device notifications of a certian pattern. +You can filter out unwanted notifications globally. This could be because of a misbehaving device (GoogleNest/GoogleHub (See also [ARPSAN docs and the `--exclude-broadcast` flag](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/arp_scan#ip-flipping-on-google-nest-devices))) which flips between IP addresses, or because you want to ignore new device notifications of a certain pattern. 1. Events Filter (`NTFPRCS_event_condition`) - filter out Events from notifications. 2. New Devices Filter (`NTFPRCS_new_dev_condition`) - filter out New Devices from notifications, but log and keep a new device in the system. \ No newline at end of file diff --git a/front/plugins/_publisher_apprise/apprise.py b/front/plugins/_publisher_apprise/apprise.py index 5f1c3c33..a65b1ac0 100755 --- a/front/plugins/_publisher_apprise/apprise.py +++ b/front/plugins/_publisher_apprise/apprise.py @@ -16,7 +16,8 @@ import conf from const import confFileName, logPath from plugin_helper import Plugin_Objects from logger import mylog, Logger, append_line_to_file -from helper import timeNowDB, get_setting_value +from helper import get_setting_value +from utils.datetime_utils import timeNowDB from models.notification_instance import NotificationInstance from database import DB from pytz import timezone diff --git a/front/plugins/_publisher_email/email_smtp.py b/front/plugins/_publisher_email/email_smtp.py index 8d738844..9370b03b 100755 --- a/front/plugins/_publisher_email/email_smtp.py +++ b/front/plugins/_publisher_email/email_smtp.py @@ -25,7 +25,8 @@ import conf from const import confFileName, logPath from plugin_helper import Plugin_Objects from logger import mylog, Logger, append_line_to_file -from helper import timeNowDB, get_setting_value, hide_email +from helper import get_setting_value, hide_email +from utils.datetime_utils import timeNowDB from models.notification_instance import NotificationInstance from database import DB from pytz import timezone diff --git a/front/plugins/_publisher_mqtt/mqtt.py b/front/plugins/_publisher_mqtt/mqtt.py index 03a441a3..943add6c 100755 --- a/front/plugins/_publisher_mqtt/mqtt.py +++ b/front/plugins/_publisher_mqtt/mqtt.py @@ -23,8 +23,9 @@ from const import confFileName, logPath from plugin_utils import getPluginObject from plugin_helper import Plugin_Objects from logger import mylog, Logger -from helper import timeNowDB, get_setting_value, bytes_to_string, \ +from helper import get_setting_value, bytes_to_string, \ sanitize_string, normalize_string +from utils.datetime_utils import timeNowDB from database import DB, get_device_stats diff --git a/front/plugins/_publisher_ntfy/ntfy.py b/front/plugins/_publisher_ntfy/ntfy.py index 79df681f..ca441f37 100755 --- a/front/plugins/_publisher_ntfy/ntfy.py +++ b/front/plugins/_publisher_ntfy/ntfy.py @@ -19,7 +19,8 @@ import conf from const import confFileName, logPath from plugin_helper import Plugin_Objects, handleEmpty from logger import mylog, Logger, append_line_to_file -from helper import timeNowDB, get_setting_value +from helper import get_setting_value +from utils.datetime_utils import timeNowDB from models.notification_instance import NotificationInstance from database import DB from pytz import timezone diff --git a/front/plugins/_publisher_pushover/pushover.py b/front/plugins/_publisher_pushover/pushover.py index 8ebd1dee..e45c76d6 100755 --- a/front/plugins/_publisher_pushover/pushover.py +++ b/front/plugins/_publisher_pushover/pushover.py @@ -11,7 +11,8 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 from logger import mylog, Logger # noqa: E402 -from helper import timeNowDB, get_setting_value, hide_string # noqa: E402 +from helper import get_setting_value, hide_string # noqa: E402 +from utils.datetime_utils import timeNowDB from models.notification_instance import NotificationInstance # noqa: E402 from database import DB # noqa: E402 import conf diff --git a/front/plugins/_publisher_pushsafer/pushsafer.py b/front/plugins/_publisher_pushsafer/pushsafer.py index 366f170a..422ed0f9 100755 --- a/front/plugins/_publisher_pushsafer/pushsafer.py +++ b/front/plugins/_publisher_pushsafer/pushsafer.py @@ -19,7 +19,8 @@ import conf from const import confFileName, logPath from plugin_helper import Plugin_Objects, handleEmpty from logger import mylog, Logger, append_line_to_file -from helper import timeNowDB, get_setting_value, hide_string +from helper import get_setting_value, hide_string +from utils.datetime_utils import timeNowDB from models.notification_instance import NotificationInstance from database import DB from pytz import timezone diff --git a/front/plugins/_publisher_telegram/tg.py b/front/plugins/_publisher_telegram/tg.py index c9f92d9d..72f81ed7 100755 --- a/front/plugins/_publisher_telegram/tg.py +++ b/front/plugins/_publisher_telegram/tg.py @@ -16,7 +16,8 @@ import conf from const import confFileName, logPath from plugin_helper import Plugin_Objects from logger import mylog, Logger, append_line_to_file -from helper import timeNowDB, get_setting_value +from helper import get_setting_value +from utils.datetime_utils import timeNowDB from models.notification_instance import NotificationInstance from database import DB from pytz import timezone diff --git a/front/plugins/_publisher_webhook/webhook.py b/front/plugins/_publisher_webhook/webhook.py index f1eec9d7..6644d2fe 100755 --- a/front/plugins/_publisher_webhook/webhook.py +++ b/front/plugins/_publisher_webhook/webhook.py @@ -22,7 +22,8 @@ import conf from const import logPath, confFileName from plugin_helper import Plugin_Objects, handleEmpty from logger import mylog, Logger, append_line_to_file -from helper import timeNowDB, get_setting_value, hide_string, write_file +from helper import get_setting_value, hide_string, write_file +from utils.datetime_utils import timeNowDB from models.notification_instance import NotificationInstance from database import DB from pytz import timezone diff --git a/front/plugins/internet_ip/script.py b/front/plugins/internet_ip/script.py index f56d2ed7..7f8cb8b9 100755 --- a/front/plugins/internet_ip/script.py +++ b/front/plugins/internet_ip/script.py @@ -20,8 +20,9 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from logger import mylog, Logger, append_line_to_file -from helper import timeNowDB, check_IP_format, get_setting_value +from helper import check_IP_format, get_setting_value from const import logPath, applicationPath, fullDbPath +from utils.datetime_utils import timeNowDB import conf from pytz import timezone diff --git a/front/plugins/internet_speedtest/script.py b/front/plugins/internet_speedtest/script.py index ef4f5705..4c41e7a3 100755 --- a/front/plugins/internet_speedtest/script.py +++ b/front/plugins/internet_speedtest/script.py @@ -13,7 +13,8 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Objects from logger import mylog, Logger, append_line_to_file -from helper import timeNowDB, get_setting_value +from helper import get_setting_value +from utils.datetime_utils import timeNowDB import conf from pytz import timezone from const import logPath diff --git a/front/plugins/nmap_scan/script.py b/front/plugins/nmap_scan/script.py index 180973bb..6ca65917 100755 --- a/front/plugins/nmap_scan/script.py +++ b/front/plugins/nmap_scan/script.py @@ -14,7 +14,8 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from logger import mylog, Logger, append_line_to_file -from helper import timeNowDB, get_setting_value +from helper import get_setting_value +from utils.datetime_utils import timeNowDB from const import logPath, applicationPath import conf from pytz import timezone diff --git a/front/plugins/plugin_helper.py b/front/plugins/plugin_helper.py index d95cb795..10023195 100755 --- a/front/plugins/plugin_helper.py +++ b/front/plugins/plugin_helper.py @@ -11,7 +11,8 @@ INSTALL_PATH = "/app" sys.path.append(f"{INSTALL_PATH}/front/plugins") sys.path.append(f'{INSTALL_PATH}/server') -from logger import mylog, Logger, timeNowDB +from logger import mylog, Logger +from utils.datetime_utils import timeNowDB from const import confFileName, default_tz #------------------------------------------------------------------------------- diff --git a/front/plugins/sync/sync.py b/front/plugins/sync/sync.py index 89695bec..3bc584e6 100755 --- a/front/plugins/sync/sync.py +++ b/front/plugins/sync/sync.py @@ -18,7 +18,8 @@ from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 from plugin_utils import get_plugins_configs, decode_and_rename_files from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath -from helper import timeNowDB, get_setting_value +from helper import get_setting_value +from utils.datetime_utils import timeNowDB from crypto_utils import encrypt_data from messaging.in_app import write_notification import conf diff --git a/server/__main__.py b/server/__main__.py index d623a572..56743876 100755 --- a/server/__main__.py +++ b/server/__main__.py @@ -26,7 +26,8 @@ from pathlib import Path import conf from const import * from logger import mylog -from helper import filePermissions, timeNowTZ, get_setting_value +from helper import filePermissions, get_setting_value +from utils.datetime_utils import timeNowTZ from app_state import updateState from api import update_api from scan.session_events import process_scan diff --git a/server/api.py b/server/api.py index 17d0ee43..4278e9a1 100755 --- a/server/api.py +++ b/server/api.py @@ -7,7 +7,8 @@ import datetime import conf from const import (apiPath, sql_appevents, sql_devices_all, sql_events_pending_alert, sql_settings, sql_plugins_events, sql_plugins_history, sql_plugins_objects,sql_language_strings, sql_notifications_all, sql_online_history, sql_devices_tiles, sql_devices_filters) from logger import mylog -from helper import write_file, get_setting_value, timeNowTZ +from helper import write_file, get_setting_value +from utils.datetime_utils import timeNowTZ from app_state import updateState from models.user_events_queue_instance import UserEventsQueueInstance from messaging.in_app import write_notification diff --git a/server/api_server/device_endpoint.py b/server/api_server/device_endpoint.py index 9c032f28..7633bbd2 100755 --- a/server/api_server/device_endpoint.py +++ b/server/api_server/device_endpoint.py @@ -14,7 +14,8 @@ INSTALL_PATH="/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from database import get_temp_db_connection -from helper import is_random_mac, format_date, get_setting_value, timeNowDB +from helper import is_random_mac, get_setting_value +from utils.datetime_utils import timeNowDB, format_date from db.db_helper import row_to_json, get_date_from_period # -------------------------- diff --git a/server/api_server/devices_endpoint.py b/server/api_server/devices_endpoint.py index eb1960a4..ab298745 100755 --- a/server/api_server/devices_endpoint.py +++ b/server/api_server/devices_endpoint.py @@ -19,8 +19,9 @@ INSTALL_PATH="/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from database import get_temp_db_connection -from helper import is_random_mac, format_date, get_setting_value +from helper import is_random_mac, get_setting_value from db.db_helper import get_table_json, get_device_condition_by_status +from utils.datetime_utils import format_date # -------------------------- diff --git a/server/api_server/events_endpoint.py b/server/api_server/events_endpoint.py index 5d02fcda..c63265bf 100755 --- a/server/api_server/events_endpoint.py +++ b/server/api_server/events_endpoint.py @@ -14,8 +14,9 @@ INSTALL_PATH="/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from database import get_temp_db_connection -from helper import is_random_mac, format_date, get_setting_value, format_date_iso, format_event_date, mylog, ensure_datetime +from helper import is_random_mac, get_setting_value, mylog from db.db_helper import row_to_json, get_date_from_period +from utils.datetime_utils import format_date, format_date_iso, format_event_date, ensure_datetime # -------------------------- diff --git a/server/api_server/history_endpoint.py b/server/api_server/history_endpoint.py index bf719ec2..a08ca476 100755 --- a/server/api_server/history_endpoint.py +++ b/server/api_server/history_endpoint.py @@ -14,7 +14,8 @@ INSTALL_PATH="/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from database import get_temp_db_connection -from helper import is_random_mac, format_date, get_setting_value +from helper import is_random_mac, get_setting_value +from utils.datetime_utils import format_date # -------------------------------------------------- diff --git a/server/api_server/sessions_endpoint.py b/server/api_server/sessions_endpoint.py index 811503be..113a8250 100755 --- a/server/api_server/sessions_endpoint.py +++ b/server/api_server/sessions_endpoint.py @@ -16,8 +16,9 @@ INSTALL_PATH="/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from database import get_temp_db_connection -from helper import is_random_mac, format_date, get_setting_value, format_date_iso, format_event_date, mylog, format_date_diff, format_ip_long, parse_datetime +from helper import is_random_mac, get_setting_value, mylog, format_ip_long from db.db_helper import row_to_json, get_date_from_period +from utils.datetime_utils import format_date_iso, format_event_date, format_date_diff, parse_datetime, format_date # -------------------------- diff --git a/server/api_server/sync_endpoint.py b/server/api_server/sync_endpoint.py index 59b8095e..424411ab 100755 --- a/server/api_server/sync_endpoint.py +++ b/server/api_server/sync_endpoint.py @@ -2,7 +2,8 @@ import os import base64 from flask import jsonify, request from logger import mylog -from helper import get_setting_value, timeNowDB +from helper import get_setting_value +from utils.datetime_utils import timeNowDB from messaging.in_app import write_notification INSTALL_PATH = "/app" diff --git a/server/app_state.py b/server/app_state.py index d4b33525..ec2ffc1c 100755 --- a/server/app_state.py +++ b/server/app_state.py @@ -4,7 +4,8 @@ import json import conf from const import * from logger import mylog, logResult -from helper import timeNowDB, timeNow, checkNewVersion +from helper import checkNewVersion +from utils.datetime_utils import timeNowDB, timeNow # Register NetAlertX directories INSTALL_PATH="/app" diff --git a/server/helper.py b/server/helper.py index db678ffd..7a89c270 100755 --- a/server/helper.py +++ b/server/helper.py @@ -7,7 +7,6 @@ import os import re import unicodedata import subprocess -from typing import Union import pytz from pytz import timezone import json @@ -29,144 +28,6 @@ from logger import mylog, logResult # Register NetAlertX directories INSTALL_PATH="/app" -#------------------------------------------------------------------------------- -# DateTime -#------------------------------------------------------------------------------- -def timeNowTZ(): - if conf.tz: - return datetime.datetime.now(conf.tz).replace(microsecond=0) - else: - return datetime.datetime.now().replace(microsecond=0) - -def timeNow(): - return datetime.datetime.now().replace(microsecond=0) - -def get_timezone_offset(): - now = datetime.datetime.now(conf.tz) - offset_hours = now.utcoffset().total_seconds() / 3600 - offset_formatted = "{:+03d}:{:02d}".format(int(offset_hours), int((offset_hours % 1) * 60)) - return offset_formatted - -def timeNowDB(local=True): - """ - Return the current time (local or UTC) as ISO 8601 for DB storage. - Safe for SQLite, PostgreSQL, etc. - - Example local: '2025-11-04 18:09:11' - Example UTC: '2025-11-04 07:09:11' - """ - if local: - try: - if isinstance(conf.tz, datetime.tzinfo): - tz = conf.tz - elif conf.tz: - tz = ZoneInfo(conf.tz) - else: - tz = None - except Exception: - tz = None - return datetime.datetime.now(tz).strftime('%Y-%m-%d %H:%M:%S') - else: - return datetime.datetime.now(datetime.UTC).strftime('%Y-%m-%d %H:%M:%S') - - -#------------------------------------------------------------------------------- -# Date and time methods -#------------------------------------------------------------------------------- - -# ------------------------------------------------------------------------------------------- -def format_date_iso(date1: str) -> str: - """Return ISO 8601 string for a date or None if empty""" - if date1 is None: - return None - dt = datetime.datetime.fromisoformat(date1) if isinstance(date1, str) else date1 - return dt.isoformat() - -# ------------------------------------------------------------------------------------------- -def format_event_date(date_str: str, event_type: str) -> str: - """Format event date with fallback rules.""" - if date_str: - return format_date(date_str) - elif event_type == "": - return "" - else: - return "" - -# ------------------------------------------------------------------------------------------- -def ensure_datetime(dt: Union[str, datetime.datetime, None]) -> datetime.datetime: - if dt is None: - return timeNowTZ() - if isinstance(dt, str): - return datetime.datetime.fromisoformat(dt) - return dt - - -def parse_datetime(dt_str): - if not dt_str: - return None - try: - # Try ISO8601 first - return datetime.datetime.fromisoformat(dt_str) - except ValueError: - # Try RFC1123 / HTTP format - try: - return datetime.datetime.strptime(dt_str, '%a, %d %b %Y %H:%M:%S GMT') - except ValueError: - return None - -def format_date(date_str: str) -> str: - try: - dt = parse_datetime(date_str) - if dt.tzinfo is None: - # Set timezone if missing — change to timezone.utc if you prefer UTC - now = datetime.datetime.now(conf.tz) - dt = dt.replace(tzinfo=now.astimezone().tzinfo) - return dt.astimezone().isoformat() - except Exception: - return "invalid" - -def format_date_diff(date1, date2): - """ - Return difference between two datetimes as 'Xd HH:MM'. - Uses app timezone if datetime is naive. - date2 can be None (uses now). - """ - # Get timezone from settings - tz_name = get_setting_value("TIMEZONE") or "UTC" - tz = pytz.timezone(tz_name) - - def parse_dt(dt): - if dt is None: - return datetime.datetime.now(tz) - if isinstance(dt, str): - try: - dt_parsed = email.utils.parsedate_to_datetime(dt) - except Exception: - # fallback: parse ISO string - dt_parsed = datetime.datetime.fromisoformat(dt) - # convert naive GMT/UTC to app timezone - if dt_parsed.tzinfo is None: - dt_parsed = tz.localize(dt_parsed) - else: - dt_parsed = dt_parsed.astimezone(tz) - return dt_parsed - return dt if dt.tzinfo else tz.localize(dt) - - dt1 = parse_dt(date1) - dt2 = parse_dt(date2) - - delta = dt2 - dt1 - total_minutes = int(delta.total_seconds() // 60) - days, rem_minutes = divmod(total_minutes, 1440) # 1440 mins in a day - hours, minutes = divmod(rem_minutes, 60) - - return { - "text": f"{days}d {hours:02}:{minutes:02}", - "days": days, - "hours": hours, - "minutes": minutes, - "total_minutes": total_minutes - } #------------------------------------------------------------------------------- # File system permission handling diff --git a/server/initialise.py b/server/initialise.py index c16e71ba..8f55476d 100755 --- a/server/initialise.py +++ b/server/initialise.py @@ -12,7 +12,8 @@ import re # Register NetAlertX libraries import conf from const import fullConfPath, applicationPath, fullConfFolder, default_tz -from helper import getBuildTimeStamp, fixPermissions, collect_lang_strings, updateSubnets, isJsonObject, setting_value_to_python_type, timeNowDB, get_setting_value, generate_random_string +from helper import getBuildTimeStamp, fixPermissions, collect_lang_strings, updateSubnets, isJsonObject, setting_value_to_python_type, get_setting_value, generate_random_string +from utils.datetime_utils import timeNowDB from app_state import updateState from logger import mylog from api import update_api diff --git a/server/logger.py b/server/logger.py index 8cd16c9d..0b4a57e2 100755 --- a/server/logger.py +++ b/server/logger.py @@ -7,40 +7,15 @@ import time import logging from zoneinfo import ZoneInfo +# Register NetAlertX directories +INSTALL_PATH="/app" + +sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) + # NetAlertX imports import conf from const import * - -#------------------------------------------------------------------------------- -# duplication from helper to avoid circle -#------------------------------------------------------------------------------- -def timeNowTZ(): - if conf.tz: - return datetime.datetime.now(conf.tz).replace(microsecond=0) - else: - return datetime.datetime.now().replace(microsecond=0) - -def timeNowDB(local=True): - """ - Return the current time (local or UTC) as ISO 8601 for DB storage. - Safe for SQLite, PostgreSQL, etc. - - Example local: '2025-11-04 18:09:11' - Example UTC: '2025-11-04 07:09:11' - """ - if local: - try: - if isinstance(conf.tz, datetime.tzinfo): - tz = conf.tz - elif conf.tz: - tz = ZoneInfo(conf.tz) - else: - tz = None - except Exception: - tz = None - return datetime.datetime.now(tz).strftime('%Y-%m-%d %H:%M:%S') - else: - return datetime.datetime.now(datetime.UTC).strftime('%Y-%m-%d %H:%M:%S') +from utils.datetime_utils import timeNowTZ #------------------------------------------------------------------------------- # Map custom debug levels to Python logging levels diff --git a/server/messaging/in_app.py b/server/messaging/in_app.py index 5246acf4..98cd5a28 100755 --- a/server/messaging/in_app.py +++ b/server/messaging/in_app.py @@ -20,7 +20,8 @@ sys.path.extend([f"{INSTALL_PATH}/server"]) import conf from const import applicationPath, logPath, apiPath, confFileName, reportTemplatesPath from logger import logResult, mylog -from helper import generate_mac_links, removeDuplicateNewLines, timeNowDB, get_file_content, write_file, get_setting_value, get_timezone_offset +from helper import generate_mac_links, removeDuplicateNewLines, get_file_content, write_file, get_setting_value +from utils.datetime_utils import timeNowDB NOTIFICATION_API_FILE = apiPath + 'user_notifications.json' diff --git a/server/messaging/reporting.py b/server/messaging/reporting.py index 2c885ce1..90e16808 100755 --- a/server/messaging/reporting.py +++ b/server/messaging/reporting.py @@ -20,9 +20,10 @@ sys.path.extend([f"{INSTALL_PATH}/server"]) import conf from const import applicationPath, logPath, apiPath, confFileName -from helper import get_file_content, write_file, get_timezone_offset, get_setting_value +from helper import get_file_content, write_file, get_setting_value from logger import logResult, mylog from db.sql_safe_builder import create_safe_condition_builder +from utils.datetime_utils import get_timezone_offset #=============================================================================== # REPORTING diff --git a/server/models/notification_instance.py b/server/models/notification_instance.py index 02832d45..8db9be43 100755 --- a/server/models/notification_instance.py +++ b/server/models/notification_instance.py @@ -16,12 +16,10 @@ from const import applicationPath, logPath, apiPath, reportTemplatesPath from logger import mylog, Logger from helper import generate_mac_links, \ removeDuplicateNewLines, \ - timeNowDB, \ - timeNowTZ, \ write_file, \ - get_setting_value, \ - get_timezone_offset + get_setting_value from messaging.in_app import write_notification +from utils.datetime_utils import timeNowDB, get_timezone_offset # ----------------------------------------------------------------------------- diff --git a/server/plugin.py b/server/plugin.py index ba24b47d..a62ba584 100755 --- a/server/plugin.py +++ b/server/plugin.py @@ -12,7 +12,8 @@ from collections import namedtuple import conf from const import pluginsPath, logPath, applicationPath, reportTemplatesPath from logger import mylog, Logger -from helper import timeNowDB, timeNowTZ, get_file_content, write_file, get_setting, get_setting_value +from helper import get_file_content, write_file, get_setting, get_setting_value +from utils.datetime_utils import timeNowTZ, timeNowDB from app_state import updateState from api import update_api from plugin_utils import logEventStatusCounts, get_plugin_string, get_plugin_setting_obj, print_plugin_info, list_to_csv, combine_plugin_objects, resolve_wildcards_arr, handle_empty, custom_plugin_decoder, decode_and_rename_files diff --git a/server/scan/device_handling.py b/server/scan/device_handling.py index 48cb84fe..76aca0c5 100755 --- a/server/scan/device_handling.py +++ b/server/scan/device_handling.py @@ -10,7 +10,8 @@ from dateutil import parser INSTALL_PATH="/app" sys.path.extend([f"{INSTALL_PATH}/server"]) -from helper import timeNowDB, timeNowTZ, get_setting_value, check_IP_format +from helper import get_setting_value, check_IP_format +from utils.datetime_utils import timeNowDB from logger import mylog, Logger from const import vendorsPath, vendorsPathNewest, sql_generateGuid from models.device_instance import DeviceInstance diff --git a/server/scan/session_events.py b/server/scan/session_events.py index 2dd1b9fe..88fbb530 100755 --- a/server/scan/session_events.py +++ b/server/scan/session_events.py @@ -6,7 +6,8 @@ sys.path.extend([f"{INSTALL_PATH}/server"]) import conf from scan.device_handling import create_new_devices, print_scan_stats, save_scanned_devices, exclude_ignored_devices, update_devices_data_from_scan -from helper import timeNowDB, get_setting_value +from helper import get_setting_value +from utils.datetime_utils import timeNowDB from db.db_helper import print_table_schema from logger import mylog, Logger from messaging.reporting import skip_repeated_notifications diff --git a/server/utils/datetime_utils.py b/server/utils/datetime_utils.py new file mode 100644 index 00000000..b8f7d1dc --- /dev/null +++ b/server/utils/datetime_utils.py @@ -0,0 +1,162 @@ +#!/usr/bin/env python + +import os +import pathlib +import sys +from datetime import datetime +import pytz +from pytz import timezone +import datetime +from typing import Union + +# Register NetAlertX directories +INSTALL_PATH="/app" +sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) + + +# Register NetAlertX directories +INSTALL_PATH="/app" + +import conf +from const import * + + + +#------------------------------------------------------------------------------- +# DateTime +#------------------------------------------------------------------------------- +def timeNowTZ(): + if conf.tz: + return datetime.datetime.now(conf.tz).replace(microsecond=0) + else: + return datetime.datetime.now().replace(microsecond=0) + +def timeNow(): + return datetime.datetime.now().replace(microsecond=0) + +def get_timezone_offset(): + now = datetime.datetime.now(conf.tz) + offset_hours = now.utcoffset().total_seconds() / 3600 + offset_formatted = "{:+03d}:{:02d}".format(int(offset_hours), int((offset_hours % 1) * 60)) + return offset_formatted + +def timeNowDB(local=True): + """ + Return the current time (local or UTC) as ISO 8601 for DB storage. + Safe for SQLite, PostgreSQL, etc. + + Example local: '2025-11-04 18:09:11' + Example UTC: '2025-11-04 07:09:11' + """ + if local: + try: + if isinstance(conf.tz, datetime.tzinfo): + tz = conf.tz + elif conf.tz: + tz = ZoneInfo(conf.tz) + else: + tz = None + except Exception: + tz = None + return datetime.datetime.now(tz).strftime('%Y-%m-%d %H:%M:%S') + else: + return datetime.datetime.now(datetime.UTC).strftime('%Y-%m-%d %H:%M:%S') + + +#------------------------------------------------------------------------------- +# Date and time methods +#------------------------------------------------------------------------------- + +# ------------------------------------------------------------------------------------------- +def format_date_iso(date1: str) -> str: + """Return ISO 8601 string for a date or None if empty""" + if date1 is None: + return None + dt = datetime.datetime.fromisoformat(date1) if isinstance(date1, str) else date1 + return dt.isoformat() + +# ------------------------------------------------------------------------------------------- +def format_event_date(date_str: str, event_type: str) -> str: + """Format event date with fallback rules.""" + if date_str: + return format_date(date_str) + elif event_type == "": + return "" + else: + return "" + +# ------------------------------------------------------------------------------------------- +def ensure_datetime(dt: Union[str, datetime.datetime, None]) -> datetime.datetime: + if dt is None: + return timeNowTZ() + if isinstance(dt, str): + return datetime.datetime.fromisoformat(dt) + return dt + + +def parse_datetime(dt_str): + if not dt_str: + return None + try: + # Try ISO8601 first + return datetime.datetime.fromisoformat(dt_str) + except ValueError: + # Try RFC1123 / HTTP format + try: + return datetime.datetime.strptime(dt_str, '%a, %d %b %Y %H:%M:%S GMT') + except ValueError: + return None + +def format_date(date_str: str) -> str: + try: + dt = parse_datetime(date_str) + if dt.tzinfo is None: + # Set timezone if missing — change to timezone.utc if you prefer UTC + now = datetime.datetime.now(conf.tz) + dt = dt.replace(tzinfo=now.astimezone().tzinfo) + return dt.astimezone().isoformat() + except Exception: + return "invalid" + +def format_date_diff(date1, date2): + """ + Return difference between two datetimes as 'Xd HH:MM'. + Uses app timezone if datetime is naive. + date2 can be None (uses now). + """ + # Get timezone from settings + tz_name = get_setting_value("TIMEZONE") or "UTC" + tz = pytz.timezone(tz_name) + + def parse_dt(dt): + if dt is None: + return datetime.datetime.now(tz) + if isinstance(dt, str): + try: + dt_parsed = email.utils.parsedate_to_datetime(dt) + except Exception: + # fallback: parse ISO string + dt_parsed = datetime.datetime.fromisoformat(dt) + # convert naive GMT/UTC to app timezone + if dt_parsed.tzinfo is None: + dt_parsed = tz.localize(dt_parsed) + else: + dt_parsed = dt_parsed.astimezone(tz) + return dt_parsed + return dt if dt.tzinfo else tz.localize(dt) + + dt1 = parse_dt(date1) + dt2 = parse_dt(date2) + + delta = dt2 - dt1 + total_minutes = int(delta.total_seconds() // 60) + days, rem_minutes = divmod(total_minutes, 1440) # 1440 mins in a day + hours, minutes = divmod(rem_minutes, 60) + + return { + "text": f"{days}d {hours:02}:{minutes:02}", + "days": days, + "hours": hours, + "minutes": minutes, + "total_minutes": total_minutes + } \ No newline at end of file diff --git a/test/api_endpoints/test_dbquery_endpoints.py b/test/api_endpoints/test_dbquery_endpoints.py index a9f663ad..22ed05f2 100644 --- a/test/api_endpoints/test_dbquery_endpoints.py +++ b/test/api_endpoints/test_dbquery_endpoints.py @@ -6,7 +6,8 @@ import pytest INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) -from helper import get_setting_value, timeNowDB +from helper import get_setting_value +from utils.datetime_utils import timeNowDB from api_server.api_server_start import app diff --git a/test/api_endpoints/test_events_endpoints.py b/test/api_endpoints/test_events_endpoints.py index b3060d00..57cc519e 100644 --- a/test/api_endpoints/test_events_endpoints.py +++ b/test/api_endpoints/test_events_endpoints.py @@ -10,7 +10,8 @@ from datetime import datetime, timedelta INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) -from helper import timeNowTZ, get_setting_value +from helper import get_setting_value +from utils.datetime_utils import timeNowTZ from api_server.api_server_start import app @pytest.fixture(scope="session") diff --git a/test/api_endpoints/test_sessions_endpoints.py b/test/api_endpoints/test_sessions_endpoints.py index 5529ab98..59db6fc4 100644 --- a/test/api_endpoints/test_sessions_endpoints.py +++ b/test/api_endpoints/test_sessions_endpoints.py @@ -10,7 +10,8 @@ from datetime import datetime, timedelta INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) -from helper import timeNowDB, timeNowTZ, get_setting_value +from helper import get_setting_value +from utils.datetime_utils import timeNowTZ, timeNowDB from api_server.api_server_start import app @pytest.fixture(scope="session") From 57096a92583601f02c6f6b04ca3a42b863c600c4 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Wed, 5 Nov 2025 16:13:28 +1100 Subject: [PATCH 13/31] FE: handling non-existent logs Signed-off-by: jokob-sk --- front/php/components/logs.php | 27 +++++++++++++------------- server/models/notification_instance.py | 6 ++++-- 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/front/php/components/logs.php b/front/php/components/logs.php index 53d9b6a1..af9fdc96 100755 --- a/front/php/components/logs.php +++ b/front/php/components/logs.php @@ -13,16 +13,22 @@ function renderLogArea($params) { $textAreaCssClass = isset($params['textAreaCssClass']) ? $params['textAreaCssClass'] : ''; $buttons = isset($params['buttons']) ? $params['buttons'] : []; $content = ""; + $fileSize = 0; - if (filesize($filePath) > 2000000) { - $content = file_get_contents($filePath, false, null, -2000000); + if (file_exists($filePath) && is_readable($filePath)) { + $fileSize = filesize($filePath); + if ($fileSize > 2000000) { + $content = file_get_contents($filePath, false, null, max(0, $fileSize - 2000000)); + } else { + $content = file_get_contents($filePath); + } } else { - $content = file_get_contents($filePath); + $content = "⚠️ File not found or not readable: $filePath"; } // Prepare the download button HTML if filePath starts with /app $downloadButtonHtml = ''; - if (strpos($filePath, '/app') === 0) { + if (strpos($filePath, '/app') === 0 && file_exists($filePath)) { $downloadButtonHtml = ' @@ -34,13 +40,7 @@ function renderLogArea($params) { // Prepare buttons HTML $buttonsHtml = ''; $totalButtons = count($buttons); - if ($totalButtons > 0) { - $colClass = 12 / $totalButtons; - // Use $colClass in your HTML generation or further logic - } else { - // Handle case where $buttons array is empty - $colClass = 12; - } + $colClass = $totalButtons > 0 ? (12 / $totalButtons) : 12; foreach ($buttons as $button) { $labelStringCode = isset($button['labelStringCode']) ? $button['labelStringCode'] : ''; @@ -52,8 +52,7 @@ function renderLogArea($params) { '; } - - // Render the log area HTML + // Render HTML $html = '
@@ -63,7 +62,7 @@ function renderLogArea($params) {
' . htmlspecialchars($filePath) . ' -
' . number_format((filesize($filePath) / 1000000), 2, ",", ".") . ' MB' +
' . number_format(($fileSize / 1000000), 2, ",", ".") . ' MB' . $downloadButtonHtml . '
diff --git a/server/models/notification_instance.py b/server/models/notification_instance.py index 8db9be43..02832d45 100755 --- a/server/models/notification_instance.py +++ b/server/models/notification_instance.py @@ -16,10 +16,12 @@ from const import applicationPath, logPath, apiPath, reportTemplatesPath from logger import mylog, Logger from helper import generate_mac_links, \ removeDuplicateNewLines, \ + timeNowDB, \ + timeNowTZ, \ write_file, \ - get_setting_value + get_setting_value, \ + get_timezone_offset from messaging.in_app import write_notification -from utils.datetime_utils import timeNowDB, get_timezone_offset # ----------------------------------------------------------------------------- From 286d5555d2689e93d7a03be77b9a44e14050d8e6 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Wed, 5 Nov 2025 16:14:03 +1100 Subject: [PATCH 14/31] BE: chore datetime_utils Signed-off-by: jokob-sk --- server/models/notification_instance.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/server/models/notification_instance.py b/server/models/notification_instance.py index 02832d45..3dac2b49 100755 --- a/server/models/notification_instance.py +++ b/server/models/notification_instance.py @@ -16,12 +16,10 @@ from const import applicationPath, logPath, apiPath, reportTemplatesPath from logger import mylog, Logger from helper import generate_mac_links, \ removeDuplicateNewLines, \ - timeNowDB, \ - timeNowTZ, \ write_file, \ - get_setting_value, \ - get_timezone_offset + get_setting_value from messaging.in_app import write_notification +from utils.datetime_utils import timeNowDB, get_timezone_offset # ----------------------------------------------------------------------------- @@ -276,7 +274,7 @@ class NotificationInstance: # Clear the Pending Email flag from all events and devices def clearPendingEmailFlag(self): - # Clean Pending Alert Events + # Clean Pending Alert Events self.db.sql.execute(""" UPDATE Devices SET devLastNotification = ? WHERE devMac IN ( From 1fd8d97d56f27663fafd0188f01f275dfbb3fde8 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Wed, 5 Nov 2025 16:42:42 +1100 Subject: [PATCH 15/31] BE: chore datetime_utils Signed-off-by: jokob-sk --- server/api_server/sessions_endpoint.py | 9 +++++---- server/utils/datetime_utils.py | 21 +++++++++------------ 2 files changed, 14 insertions(+), 16 deletions(-) diff --git a/server/api_server/sessions_endpoint.py b/server/api_server/sessions_endpoint.py index 113a8250..147dbbe3 100755 --- a/server/api_server/sessions_endpoint.py +++ b/server/api_server/sessions_endpoint.py @@ -208,6 +208,7 @@ def get_device_sessions(mac, period): cur.execute(sql, (mac,)) rows = cur.fetchall() conn.close() + tz_name = get_setting_value("TIMEZONE") or "UTC" table_data = {"data": []} @@ -230,9 +231,9 @@ def get_device_sessions(mac, period): if row["ses_EventTypeConnection"] in ("", None) or row["ses_EventTypeDisconnection"] in ("", None): dur = "..." elif row["ses_StillConnected"]: - dur = format_date_diff(row["ses_DateTimeConnection"], None)["text"] + dur = format_date_diff(row["ses_DateTimeConnection"], None, tz_name)["text"] else: - dur = format_date_diff(row["ses_DateTimeConnection"], row["ses_DateTimeDisconnection"])["text"] + dur = format_date_diff(row["ses_DateTimeConnection"], row["ses_DateTimeDisconnection"], tz_name)["text"] # Additional Info info = row["ses_AdditionalInfo"] @@ -350,11 +351,11 @@ def get_session_events(event_type, period_date): if event_type in ("sessions", "missing"): # Duration if row[5] and row[6]: - delta = format_date_diff(row[5], row[6]) + delta = format_date_diff(row[5], row[6], tz_name) row[7] = delta["text"] row[8] = int(delta["total_minutes"] * 60) # seconds elif row[12] == 1: - delta = format_date_diff(row[5], None) + delta = format_date_diff(row[5], None, tz_name) row[7] = delta["text"] row[8] = int(delta["total_minutes"] * 60) # seconds else: diff --git a/server/utils/datetime_utils.py b/server/utils/datetime_utils.py index b8f7d1dc..a74234b4 100644 --- a/server/utils/datetime_utils.py +++ b/server/utils/datetime_utils.py @@ -4,19 +4,17 @@ import os import pathlib import sys from datetime import datetime +import datetime import pytz from pytz import timezone -import datetime from typing import Union +from zoneinfo import ZoneInfo +import email.utils # Register NetAlertX directories INSTALL_PATH="/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) - -# Register NetAlertX directories -INSTALL_PATH="/app" - import conf from const import * @@ -115,17 +113,16 @@ def format_date(date_str: str) -> str: now = datetime.datetime.now(conf.tz) dt = dt.replace(tzinfo=now.astimezone().tzinfo) return dt.astimezone().isoformat() - except Exception: + except (ValueError, AttributeError, TypeError): return "invalid" -def format_date_diff(date1, date2): +def format_date_diff(date1, date2, tz_name): """ Return difference between two datetimes as 'Xd HH:MM'. Uses app timezone if datetime is naive. date2 can be None (uses now). """ - # Get timezone from settings - tz_name = get_setting_value("TIMEZONE") or "UTC" + # Get timezone from settings tz = pytz.timezone(tz_name) def parse_dt(dt): @@ -134,9 +131,9 @@ def format_date_diff(date1, date2): if isinstance(dt, str): try: dt_parsed = email.utils.parsedate_to_datetime(dt) - except Exception: - # fallback: parse ISO string - dt_parsed = datetime.datetime.fromisoformat(dt) + except (ValueError, TypeError): + # fallback: parse ISO string + dt_parsed = datetime.datetime.fromisoformat(dt) # convert naive GMT/UTC to app timezone if dt_parsed.tzinfo is None: dt_parsed = tz.localize(dt_parsed) From 85aa04c490c04ef9ece6ce61d8b590fc23225091 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Thu, 6 Nov 2025 08:14:00 +1100 Subject: [PATCH 16/31] TEST: fix Signed-off-by: jokob-sk --- server/api_server/sessions_endpoint.py | 1 + 1 file changed, 1 insertion(+) diff --git a/server/api_server/sessions_endpoint.py b/server/api_server/sessions_endpoint.py index 147dbbe3..1074da1a 100755 --- a/server/api_server/sessions_endpoint.py +++ b/server/api_server/sessions_endpoint.py @@ -270,6 +270,7 @@ def get_session_events(event_type, period_date): conn = get_temp_db_connection() conn.row_factory = sqlite3.Row cur = conn.cursor() + tz_name = get_setting_value("TIMEZONE") or "UTC" # Base SQLs sql_events = f""" From 6e745fc6d135d7b6a0aae558258fa43202c15996 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Thu, 6 Nov 2025 08:14:13 +1100 Subject: [PATCH 17/31] DOCS: fix Signed-off-by: jokob-sk --- docs/NOTIFICATIONS.md | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/docs/NOTIFICATIONS.md b/docs/NOTIFICATIONS.md index 3255820a..cd067d59 100755 --- a/docs/NOTIFICATIONS.md +++ b/docs/NOTIFICATIONS.md @@ -44,8 +44,11 @@ In Notification Processing settings, you can specify blanket rules. These allow 1. Notify on (`NTFPRCS_INCLUDED_SECTIONS`) allows you to specify which events trigger notifications. Usual setups will have `new_devices`, `down_devices`, and possibly `down_reconnected` set. Including `plugin` (dependenton the Plugin `_WATCH` and `_REPORT_ON` settings) and `events` (dependent on the on-device **Alert Events** setting) might be too noisy for most setups. More info in the [NTFPRCS plugin](https://github.com/jokob-sk/NetAlertX/blob/main/front/plugins/notification_processing/README.md) on what events these selections include. 2. Alert down after (`NTFPRCS_alert_down_time`) is useful if you want to wait for some time before the system sends out a down notification for a device. This is related to the on-device **Alert down** setting and only devices with this checked will trigger a down notification. -3. A filter to allow you to set device-specific exceptions to New devices being added to the app. -4. A filter to allow you to set device-specific exceptions to generated Events. + +You can filter out unwanted notifications globally. This could be because of a misbehaving device (GoogleNest/GoogleHub (See also [ARPSAN docs and the `--exclude-broadcast` flag](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/arp_scan#ip-flipping-on-google-nest-devices))) which flips between IP addresses, or because you want to ignore new device notifications of a certain pattern. + +1. Events Filter (`NTFPRCS_event_condition`) - Filter out Events from notifications. +2. New Devices Filter (`NTFPRCS_new_dev_condition`) - Filter out New Devices from notifications, but log and keep a new device in the system. ## Ignoring devices 💻 @@ -56,9 +59,4 @@ You can completely ignore detected devices globally. This could be because your 1. Ignored MACs (`NEWDEV_ignored_MACs`) - List of MACs to ignore. 2. Ignored IPs (`NEWDEV_ignored_IPs`) - List of IPs to ignore. -## Ignoring notifications 🔕 -You can filter out unwanted notifications globally. This could be because of a misbehaving device (GoogleNest/GoogleHub (See also [ARPSAN docs and the `--exclude-broadcast` flag](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/arp_scan#ip-flipping-on-google-nest-devices))) which flips between IP addresses, or because you want to ignore new device notifications of a certain pattern. - -1. Events Filter (`NTFPRCS_event_condition`) - filter out Events from notifications. -2. New Devices Filter (`NTFPRCS_new_dev_condition`) - filter out New Devices from notifications, but log and keep a new device in the system. \ No newline at end of file From 6374219e05eb381d93a44584d8c47639fa02cbc1 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Thu, 6 Nov 2025 20:47:28 +1100 Subject: [PATCH 18/31] BE: github actions + dev version Signed-off-by: jokob-sk --- .github/workflows/docker_dev.yml | 19 +++++++++++++++---- server/helper.py | 5 ++--- 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/.github/workflows/docker_dev.yml b/.github/workflows/docker_dev.yml index 08c4bdba..97da1434 100755 --- a/.github/workflows/docker_dev.yml +++ b/.github/workflows/docker_dev.yml @@ -10,7 +10,7 @@ on: branches: - next_release -jobs: +jobs: docker_dev: runs-on: ubuntu-latest timeout-minutes: 30 @@ -19,7 +19,8 @@ jobs: packages: write if: > contains(github.event.head_commit.message, 'PUSHPROD') != 'True' && - github.repository == 'jokob-sk/NetAlertX' + github.repository == 'jokob-sk/NetAlertX' + steps: - name: Checkout uses: actions/checkout@v4 @@ -30,16 +31,25 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 + # --- Generate timestamped dev version + - name: Generate timestamp version + id: timestamp + run: | + ts=$(date -u +'%Y%m%d-%H%M%S') + echo "version=dev-${ts}" >> $GITHUB_OUTPUT + echo "Generated version: dev-${ts}" + - name: Set up dynamic build ARGs - id: getargs + id: getargs run: echo "version=$(cat ./stable/VERSION)" >> $GITHUB_OUTPUT - name: Get release version id: get_version run: echo "version=Dev" >> $GITHUB_OUTPUT + # --- Write the timestamped version to .VERSION file - name: Create .VERSION file - run: echo "${{ steps.get_version.outputs.version }}" >> .VERSION + run: echo "${{ steps.timestamp.outputs.version }}" > .VERSION - name: Docker meta id: meta @@ -50,6 +60,7 @@ jobs: jokobsk/netalertx-dev tags: | type=raw,value=latest + type=raw,value=${{ steps.timestamp.outputs.version }} type=ref,event=branch type=ref,event=pr type=semver,pattern={{version}} diff --git a/server/helper.py b/server/helper.py index 7a89c270..d33767af 100755 --- a/server/helper.py +++ b/server/helper.py @@ -666,7 +666,6 @@ def getBuildTimeStamp(): def checkNewVersion(): mylog('debug', [f"[Version check] Checking if new version available"]) - newVersion = False buildTimestamp = getBuildTimeStamp() try: @@ -693,13 +692,13 @@ def checkNewVersion(): if releaseTimestamp > buildTimestamp + 600: mylog('none', ["[Version check] New version of the container available!"]) - newVersion = True + return True else: mylog('none', ["[Version check] Running the latest version."]) else: mylog('minimal', ["[Version check] ⚠ ERROR: Received unexpected response from GitHub."]) - return newVersion + return False #------------------------------------------------------------------------------- class noti_obj: From 30269a6a73a1b9ab1c9fe7513b8163d68fab6f30 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Thu, 6 Nov 2025 20:47:54 +1100 Subject: [PATCH 19/31] DOCS: link fix Signed-off-by: jokob-sk --- docs/INSTALLATION.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/INSTALLATION.md b/docs/INSTALLATION.md index 8cb13f86..9f8eb115 100755 --- a/docs/INSTALLATION.md +++ b/docs/INSTALLATION.md @@ -4,7 +4,7 @@ NetAlertX can be installed several ways. The best supported option is Docker, followed by a supervised Home Assistant instance, as an Unraid app, and lastly, on bare metal. -- [[Installation] Docker (recommended)](https://github.com/jokob-sk/NetAlertX/blob/main/dockerfiles/README.md) +- [[Installation] Docker (recommended)](https://github.com/jokob-sk/NetAlertX/blob/main/docs/DOCKER_INSTALLATION.md) - [[Installation] Home Assistant](https://github.com/alexbelgium/hassio-addons/tree/master/netalertx) - [[Installation] Unraid App](https://unraid.net/community/apps) - [[Installation] Bare metal (experimental - looking for maintainers)](https://github.com/jokob-sk/NetAlertX/blob/main/docs/HW_INSTALL.md) From 44d65cca9679c34ea6662619dce566d597726090 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Thu, 6 Nov 2025 21:12:13 +1100 Subject: [PATCH 20/31] BE: version file Signed-off-by: jokob-sk --- Dockerfile | 3 +++ README.md | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 558d173e..5cfdf73e 100755 --- a/Dockerfile +++ b/Dockerfile @@ -130,6 +130,9 @@ RUN install -d -o ${NETALERTX_USER} -g ${NETALERTX_GROUP} -m 755 ${NETALERTX_API sh -c "find ${NETALERTX_APP} -type f \( -name '*.sh' -o -name 'speedtest-cli' \) \ -exec chmod 750 {} \;" +# Copy version information into the image +COPY --chown=${NETALERTX_USER}:${NETALERTX_GROUP} .VERSION ${NETALERTX_APP}/.VERSION + # Copy the virtualenv from the builder stage COPY --from=builder --chown=20212:20212 ${VIRTUAL_ENV} ${VIRTUAL_ENV} diff --git a/README.md b/README.md index dec38950..a1246bb0 100755 --- a/README.md +++ b/README.md @@ -61,7 +61,7 @@ For Home Assistant users: [Click here to add NetAlertX](https://my.home-assistan For other install methods, check the [installation docs](#-documentation) -| [📑 Docker guide](https://github.com/jokob-sk/NetAlertX/blob/main/dockerfiles/README.md) | [🚀 Releases](https://github.com/jokob-sk/NetAlertX/releases) | [📚 Docs](https://jokob-sk.github.io/NetAlertX/) | [🔌 Plugins](https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS.md) | [🤖 Ask AI](https://gurubase.io/g/netalertx) +| [📑 Docker guide](https://github.com/jokob-sk/NetAlertX/blob/main/docs/DOCKER_INSTALLATION.md) | [🚀 Releases](https://github.com/jokob-sk/NetAlertX/releases) | [📚 Docs](https://jokob-sk.github.io/NetAlertX/) | [🔌 Plugins](https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS.md) | [🤖 Ask AI](https://gurubase.io/g/netalertx) |----------------------| ----------------------| ----------------------| ----------------------| ----------------------| ![showcase][showcase] @@ -103,7 +103,7 @@ The [workflows module](https://github.com/jokob-sk/NetAlertX/blob/main/docs/WORK Supported browsers: Chrome, Firefox -- [[Installation] Docker](https://github.com/jokob-sk/NetAlertX/blob/main/dockerfiles/README.md) +- [[Installation] Docker](https://github.com/jokob-sk/NetAlertX/blob/main/docs/DOCKER_INSTALLATION.md) - [[Installation] Home Assistant](https://github.com/alexbelgium/hassio-addons/tree/master/netalertx) - [[Installation] Bare metal](https://github.com/jokob-sk/NetAlertX/blob/main/docs/HW_INSTALL.md) - [[Installation] Unraid App](https://unraid.net/community/apps) From 5c06dc68c6fd9a535d75acdfe28e0a6a4d981468 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Thu, 6 Nov 2025 21:20:28 +1100 Subject: [PATCH 21/31] DOCS: link fix Signed-off-by: jokob-sk --- docs/BACKUPS.md | 2 +- docs/COMMON_ISSUES.md | 2 +- docs/README.md | 2 +- docs/WEB_UI_PORT_DEBUG.md | 2 +- docs/index.md | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/BACKUPS.md b/docs/BACKUPS.md index 9e2fd679..6db08887 100755 --- a/docs/BACKUPS.md +++ b/docs/BACKUPS.md @@ -83,7 +83,7 @@ You can also download the `app.conf` and `devices.csv` files from the **Maintena ### 📥 How to Restore -Map these files into your container as described in the [Setup documentation](https://github.com/jokob-sk/NetAlertX/blob/main/dockerfiles/README.md#docker-paths). +Map these files into your container as described in the [Setup documentation](./DOCKER_INSTALLATION.md). --- diff --git a/docs/COMMON_ISSUES.md b/docs/COMMON_ISSUES.md index 073fe573..7a55e0f2 100755 --- a/docs/COMMON_ISSUES.md +++ b/docs/COMMON_ISSUES.md @@ -16,7 +16,7 @@ Make sure you [File permissions](./FILE_PERMISSIONS.md) are set correctly. * If facing issues (AJAX errors, can't write to DB, empty screen, etc,) make sure permissions are set correctly, and check the logs under `/app/log`. * To solve permission issues you can try setting the owner and group of the `app.db` by executing the following on the host system: `docker exec netalertx chown -R www-data:www-data /app/db/app.db`. -* If still facing issues, try to map the app.db file (⚠ not folder) to `:/app/db/app.db` (see [docker-compose Examples](https://github.com/jokob-sk/NetAlertX/blob/main/dockerfiles/README.md#-docker-composeyml-examples) for details) +* If still facing issues, try to map the app.db file (⚠ not folder) to `:/app/db/app.db` (see [docker-compose Examples](./DOCKER_COMPOSE.md) for details) ### Container restarts / crashes diff --git a/docs/README.md b/docs/README.md index d8fbe9b5..9e3bc8e5 100755 --- a/docs/README.md +++ b/docs/README.md @@ -13,7 +13,7 @@ There is also an in-app Help / FAQ section that should be answering frequently a #### 🐳 Docker (Fully supported) -- The main installation method is as a [docker container - follow these instructions here](https://github.com/jokob-sk/NetAlertX/blob/main/dockerfiles/README.md). +- The main installation method is as a [docker container - follow these instructions here](./DOCKER_INSTALLATION.md). #### 💻 Bare-metal / On-server (Experimental/community supported 🧪) diff --git a/docs/WEB_UI_PORT_DEBUG.md b/docs/WEB_UI_PORT_DEBUG.md index 5db03cd8..263fa685 100755 --- a/docs/WEB_UI_PORT_DEBUG.md +++ b/docs/WEB_UI_PORT_DEBUG.md @@ -15,7 +15,7 @@ The **Web UI** is served by an **nginx** server, while the **API backend** runs APP_CONF_OVERRIDE={"GRAPHQL_PORT":"20212"} ``` -For more information, check the [Docker installation guide](https://github.com/jokob-sk/NetAlertX/blob/main/dockerfiles/README.md). +For more information, check the [Docker installation guide](./DOCKER_INSTALLATION.md). ## Possible issues and troubleshooting diff --git a/docs/index.md b/docs/index.md index 62d6aad9..a1cf3c54 100755 --- a/docs/index.md +++ b/docs/index.md @@ -21,7 +21,7 @@ The app can be installed different ways, with the best support of the docker-bas NetAlertX is fully supported in Docker environments, allowing for easy setup and configuration. Follow the official guide to get started: -- [Docker Installation Guide](https://github.com/jokob-sk/NetAlertX/blob/main/dockerfiles/README.md) +- [Docker Installation Guide](./DOCKER_INSTALLATION.md) This guide will take you through the process of setting up NetAlertX using Docker Compose or standalone Docker commands. From 58de31d0eab0764f6b66e114ca9780061cde631d Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Thu, 6 Nov 2025 21:35:05 +1100 Subject: [PATCH 22/31] BE: prod workflow + docs Signed-off-by: jokob-sk --- .github/workflows/docker_prod.yml | 29 +++++++++++++---------------- README.md | 2 +- 2 files changed, 14 insertions(+), 17 deletions(-) diff --git a/.github/workflows/docker_prod.yml b/.github/workflows/docker_prod.yml index 93e2ae1c..548b05dc 100755 --- a/.github/workflows/docker_prod.yml +++ b/.github/workflows/docker_prod.yml @@ -6,7 +6,6 @@ # GitHub recommends pinning actions to a commit SHA. # To get a newer version, you will need to update the SHA. # You can also reference a tag or branch, but the action may change without warning. - name: Publish Docker image on: @@ -14,6 +13,7 @@ on: types: [published] tags: - '*.[1-9]+[0-9]?.[1-9]+*' + jobs: docker: runs-on: ubuntu-latest @@ -21,6 +21,7 @@ jobs: permissions: contents: read packages: write + steps: - name: Checkout uses: actions/checkout@v3 @@ -31,42 +32,39 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - - name: Set up dynamic build ARGs - id: getargs - run: echo "version=$(cat ./stable/VERSION)" >> $GITHUB_OUTPUT - + # --- Get release version from tag - name: Get release version id: get_version - run: echo "::set-output name=version::${GITHUB_REF#refs/tags/}" + run: echo "version=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT + # --- Write version to .VERSION file - name: Create .VERSION file - run: echo "${{ steps.get_version.outputs.version }}" >> .VERSION + run: echo "${{ steps.get_version.outputs.version }}" > .VERSION + # --- Generate Docker metadata and tags - name: Docker meta id: meta uses: docker/metadata-action@v4 with: - # list of Docker images to use as base name for tags images: | ghcr.io/jokob-sk/netalertx - jokobsk/netalertx - # generate Docker tags based on the following events/attributes + jokobsk/netalertx tags: | - type=semver,pattern={{version}},value=${{ inputs.version }} - type=semver,pattern={{major}}.{{minor}},value=${{ inputs.version }} - type=semver,pattern={{major}},value=${{ inputs.version }} + type=semver,pattern={{version}},value=${{ steps.get_version.outputs.version }} + type=semver,pattern={{major}}.{{minor}},value=${{ steps.get_version.outputs.version }} + type=semver,pattern={{major}},value=${{ steps.get_version.outputs.version }} type=ref,event=branch,suffix=-{{ sha }} type=ref,event=pr type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/') }} - - name: Log in to Github Container registry + - name: Log in to Github Container Registry (GHCR) uses: docker/login-action@v3 with: registry: ghcr.io username: jokob-sk password: ${{ secrets.GITHUB_TOKEN }} - - name: Login to DockerHub + - name: Log in to DockerHub if: github.event_name != 'pull_request' uses: docker/login-action@v3 with: @@ -81,6 +79,5 @@ jobs: push: ${{ github.event_name != 'pull_request' }} tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - # # ⚠ disable cache if build is failing to download debian packages # cache-from: type=registry,ref=ghcr.io/jokob-sk/netalertx:buildcache # cache-to: type=registry,ref=ghcr.io/jokob-sk/netalertx:buildcache,mode=max diff --git a/README.md b/README.md index a1246bb0..d923ba1d 100755 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ # NetAlertX - Network, presence scanner and alert framework -Get visibility of what's going on on your WIFI/LAN network and enable presence detection of important devices. Schedule scans for devices, port changes and get alerts if unknown devices or changes are found. Write your own [Plugin](https://github.com/jokob-sk/NetAlertX/tree/main/docs/PLUGINS.md#readme) with auto-generated UI and in-build notification system. Build out and easily maintain your network source of truth (NSoT). +Get visibility of what's going on on your WIFI/LAN network and enable presence detection of important devices. Schedule scans for devices, port changes and get alerts if unknown devices or changes are found. Write your own [Plugin](https://github.com/jokob-sk/NetAlertX/tree/main/docs/PLUGINS.md#readme) with auto-generated UI and in-build notification system. Build out and easily maintain your network source of truth (NSoT) and device inventory. ## 📋 Table of Contents From cbe5a4a7327c0b96bfdd2ea9de9f678f96f59a2a Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Thu, 6 Nov 2025 22:08:19 +1100 Subject: [PATCH 23/31] BE: version added to app_state Signed-off-by: jokob-sk --- server/app_state.py | 16 ++++++++++++---- server/helper.py | 41 +++++++++++++++++++++++------------------ server/initialise.py | 18 ++++++++---------- 3 files changed, 43 insertions(+), 32 deletions(-) diff --git a/server/app_state.py b/server/app_state.py index ec2ffc1c..c6eee593 100755 --- a/server/app_state.py +++ b/server/app_state.py @@ -39,7 +39,8 @@ class app_state_class: showSpinner=None, graphQLServerStarted=0, processScan=False, - pluginsStates=None): + pluginsStates=None, + appVersion=None): """ Initialize the application state, optionally overwriting previous values. @@ -54,6 +55,7 @@ class app_state_class: graphQLServerStarted (int, optional): Initial GraphQL server timestamp. processScan (bool, optional): Initial processScan flag. pluginsStates (dict, optional): Initial plugin states to merge with previous state. + appVersion (str, optional): Application version. """ # json file containing the state to communicate with the frontend stateFile = apiPath + 'app_state.json' @@ -80,6 +82,7 @@ class app_state_class: self.graphQLServerStarted = previousState.get("graphQLServerStarted", 0) self.currentState = previousState.get("currentState", "Init") self.pluginsStates = previousState.get("pluginsStates", {}) + self.appVersion = previousState.get("appVersion", "") else: # init first time values self.settingsSaved = 0 self.settingsImported = 0 @@ -90,6 +93,7 @@ class app_state_class: self.graphQLServerStarted = 0 self.currentState = "Init" self.pluginsStates = {} + self.appVersion = "" # Overwrite with provided parameters if supplied if settingsSaved is not None: @@ -118,7 +122,8 @@ class app_state_class: # Optionally ignore or add new plugin entries # To ignore new plugins, comment out the next line self.pluginsStates[plugin] = state - + if appVersion is not None: + self.appVersion = appVersion # check for new version every hour and if currently not running new version if self.isNewVersion is False and self.isNewVersionChecked + 3600 < int(timeNow().timestamp()): self.isNewVersion = checkNewVersion() @@ -154,7 +159,8 @@ def updateState(newState = None, showSpinner = None, graphQLServerStarted = None, processScan = None, - pluginsStates=None): + pluginsStates=None, + appVersion=None): """ Convenience method to create or update the app state. @@ -166,6 +172,7 @@ def updateState(newState = None, graphQLServerStarted (int, optional): Timestamp of GraphQL server start. processScan (bool, optional): Flag indicating if a scan is active. pluginsStates (dict, optional): Plugin state updates. + appVersion (str, optional): Application version. Returns: app_state_class: Updated state object. @@ -176,7 +183,8 @@ def updateState(newState = None, showSpinner, graphQLServerStarted, processScan, - pluginsStates) + pluginsStates, + appVersion) #------------------------------------------------------------------------------- diff --git a/server/helper.py b/server/helper.py index d33767af..8398705c 100755 --- a/server/helper.py +++ b/server/helper.py @@ -636,37 +636,42 @@ def collect_lang_strings(json, pref, stringSqlParams): #------------------------------------------------------------------------------- # Get the value from the buildtimestamp.txt and initialize it if missing -def getBuildTimeStamp(): +def getBuildTimeStampAndVersion(): """ - Retrieves the build timestamp from 'front/buildtimestamp.txt' within the - application directory. - - If the file does not exist, it is created and initialized with the value '0'. + Retrieves the build timestamp and version from files within the + application directory. Initializes them if missing. Returns: - int: The integer value of the build timestamp read from the file. - Returns 0 if the file is empty or just initialized. + tuple: (int buildTimestamp, str version) """ - buildTimestamp = 0 - build_timestamp_path = os.path.join(applicationPath, 'front/buildtimestamp.txt') + files_defaults = [ + ('front/buildtimestamp.txt', '0'), + ('.VERSION', 'unknown') + ] - # Ensure file exists, initialize if missing - if not os.path.exists(build_timestamp_path): - with open(build_timestamp_path, 'w') as f: - f.write("0") + results = [] - # Now safely read the timestamp - with open(build_timestamp_path, 'r') as f: - buildTimestamp = int(f.read().strip() or 0) + for filename, default in files_defaults: + path = os.path.join(applicationPath, filename) + if not os.path.exists(path): + with open(path, 'w') as f: + f.write(default) + + with open(path, 'r') as f: + content = f.read().strip() or default + # Convert buildtimestamp to int, leave version as string + value = int(content) if filename.endswith('buildtimestamp.txt') else content + results.append(value) + + return tuple(results) - return buildTimestamp #------------------------------------------------------------------------------- def checkNewVersion(): mylog('debug', [f"[Version check] Checking if new version available"]) - buildTimestamp = getBuildTimeStamp() + buildTimestamp, _version = getBuildTimeStampAndVersion() try: response = requests.get( diff --git a/server/initialise.py b/server/initialise.py index 8f55476d..f0fb0237 100755 --- a/server/initialise.py +++ b/server/initialise.py @@ -12,7 +12,7 @@ import re # Register NetAlertX libraries import conf from const import fullConfPath, applicationPath, fullConfFolder, default_tz -from helper import getBuildTimeStamp, fixPermissions, collect_lang_strings, updateSubnets, isJsonObject, setting_value_to_python_type, get_setting_value, generate_random_string +from helper import getBuildTimeStampAndVersion, fixPermissions, collect_lang_strings, updateSubnets, isJsonObject, setting_value_to_python_type, get_setting_value, generate_random_string from utils.datetime_utils import timeNowDB from app_state import updateState from logger import mylog @@ -380,21 +380,19 @@ def importConfigs (pm, db, all_plugins): # Check if app was upgraded - buildTimestamp = getBuildTimeStamp() - cur_version = conf.VERSION + buildTimestamp, new_version = getBuildTimeStampAndVersion() + prev_version = conf.VERSION - mylog('debug', [f"[Config] buildTimestamp: '{buildTimestamp}'"]) - mylog('debug', [f"[Config] conf.VERSION : '{cur_version}'"]) + mylog('debug', [f"[Config] buildTimestamp | prev_version | .VERSION file: '{buildTimestamp}|{prev_version}|{new_version}'"]) - if str(cur_version) != str(buildTimestamp): + if str(prev_version) != str(new_version): mylog('none', ['[Config] App upgraded 🚀']) # ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False) - ccd('VERSION', buildTimestamp , c_d, '_KEEP_', '_KEEP_', '_KEEP_', '_KEEP_', None, "_KEEP_", None, None, True) + ccd('VERSION', new_version , c_d, '_KEEP_', '_KEEP_', '_KEEP_', '_KEEP_', None, "_KEEP_", None, None, True) - write_notification(f'[Upgrade] : App upgraded 🚀 Please clear the cache:
  1. Click OK below
  2. Clear the browser cache (shift + browser refresh button)
  3. Clear app cache with the (reload) button in the header
  4. Go to Settings and click Save
Check out new features and what has changed in the
📓 release notes.', 'interrupt', timeNowDB()) - + write_notification(f'[Upgrade] : App upgraded from {prev_version} to {new_version} 🚀 Please clear the cache:
  1. Click OK below
  2. Clear the browser cache (shift + browser refresh button)
  3. Clear app cache with the (reload) button in the header
  4. Go to Settings and click Save
Check out new features and what has changed in the 📓 release notes.', 'interrupt', timeNowDB()) # ----------------- @@ -424,7 +422,7 @@ def importConfigs (pm, db, all_plugins): # settingsImported = None (timestamp), # showSpinner = False (1/0), # graphQLServerStarted = 1 (1/0)) - updateState("Config imported", conf.lastImportedConfFile, conf.lastImportedConfFile, False, 1) + updateState("Config imported", conf.lastImportedConfFile, conf.lastImportedConfFile, False, 1, None, None, new_version) msg = '[Config] Imported new settings config' mylog('minimal', msg) From 7822b11d51c82ba6f832511cb723928e6eaedd22 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Sat, 8 Nov 2025 14:15:45 +1100 Subject: [PATCH 24/31] BE: plugins changed data detection Signed-off-by: jokob-sk --- server/plugin.py | 8 +-- server/scan/device_handling.py | 126 +++++++++++++++++++-------------- server/utils/datetime_utils.py | 40 +++++++++++ 3 files changed, 118 insertions(+), 56 deletions(-) diff --git a/server/plugin.py b/server/plugin.py index a62ba584..81075a3f 100755 --- a/server/plugin.py +++ b/server/plugin.py @@ -28,7 +28,7 @@ class plugin_manager: self.db = db self.all_plugins = all_plugins self.plugin_states = {} - self.name_plugins_checked = None + self.plugin_checks = {} # object cache of settings and schedules for faster lookups self._cache = {} @@ -213,7 +213,7 @@ class plugin_manager: If plugin_name is provided, only calculates stats for that plugin. Structure per plugin: { - "lastChanged": str, + "lastDataChange": str, "totalObjects": int, "newObjects": int, "changedObjects": int, @@ -238,7 +238,7 @@ class plugin_manager: changed_objects = total_objects - new_objects plugin_states[plugin_name] = { - "lastChanged": last_changed or "", + "lastDataChange": last_changed or "", "totalObjects": total_objects or 0, "newObjects": new_objects or 0, "changedObjects": changed_objects or 0, @@ -261,7 +261,7 @@ class plugin_manager: new_objects = new_objects or 0 # ensure it's int changed_objects = total_objects - new_objects plugin_states[plugin] = { - "lastChanged": last_changed or "", + "lastDataChange": last_changed or "", "totalObjects": total_objects or 0, "newObjects": new_objects or 0, "changedObjects": changed_objects or 0, diff --git a/server/scan/device_handling.py b/server/scan/device_handling.py index 76aca0c5..612e888a 100755 --- a/server/scan/device_handling.py +++ b/server/scan/device_handling.py @@ -11,7 +11,7 @@ INSTALL_PATH="/app" sys.path.extend([f"{INSTALL_PATH}/server"]) from helper import get_setting_value, check_IP_format -from utils.datetime_utils import timeNowDB +from utils.datetime_utils import timeNowDB, normalizeTimeStamp from logger import mylog, Logger from const import vendorsPath, vendorsPathNewest, sql_generateGuid from models.device_instance import DeviceInstance @@ -519,64 +519,86 @@ def create_new_devices (db): mylog('debug','[New Devices] New Devices end') db.commitDB() +#------------------------------------------------------------------------------- +# Check if plugins data changed +def check_plugin_data_changed(pm, plugins_to_check): + """ + Checks whether any of the specified plugins have updated data since their + last recorded check time. + This function compares each plugin's `lastDataChange` timestamp from + `pm.plugin_states` with its corresponding `lastDataCheck` timestamp from + `pm.plugin_checks`. If a plugin's data has changed more recently than it + was last checked, it is flagged as changed. + + Args: + pm (object): Plugin manager or state object containing: + - plugin_states (dict): Per-plugin metadata with "lastDataChange". + - plugin_checks (dict): Per-plugin last check timestamps. + plugins_to_check (list[str]): List of plugin names to validate. + + Returns: + bool: True if any plugin data has changed since last check, + otherwise False. + + Logging: + - Logs unexpected or invalid timestamps at level 'none'. + - Logs when no changes are detected at level 'debug'. + - Logs each changed plugin at level 'debug'. + """ + + plugins_changed = [] + + for plugin_name in plugins_to_check: + + last_data_change = pm.plugin_states.get(plugin_name, {}).get("lastDataChange") + last_data_check = pm.plugin_checks.get(plugin_name, "") + + if not last_data_change: + continue + + # Normalize and validate last_changed timestamp + last_changed_ts = normalizeTimeStamp(str(last_data_change)) + + if last_changed_ts == None: + mylog('none', f'[check_plugin_data_changed] Unexpected last_data_change timestamp for {plugin_name}: {last_data_change}') + + # Normalize and validate last_data_check timestamp + last_data_check_ts = normalizeTimeStamp(str(last_data_check)) + + if last_data_check_ts == None: + mylog('none', f'[check_plugin_data_changed] Unexpected last_data_check timestamp for {plugin_name}: {last_data_check}') + + # Track which plugins have newer state than last_checked + if last_data_check_ts is None or last_changed_ts is None or last_changed_ts > last_data_check_ts: + mylog('debug', f'[check_plugin_data_changed] plugin_name changed last_changed_ts | last_data_check_ts: {last_changed_ts} | {last_data_check_ts}') + plugins_changed.append(plugin_name) + + # Skip if no plugin state changed since last check + if len(plugins_changed) == 0: + mylog('debug', f'[check_plugin_data_changed] No relevant plugin changes since last check for {plugins_to_check}') + return False + + # Continue if changes detected + for p in plugins_changed: + mylog('debug', f'[check_plugin_data_changed] {p} changed (last_data_change|last_data_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})') + + return True #------------------------------------------------------------------------------- def update_devices_names(pm): - sql = pm.db.sql - resolver = NameResolver(pm.db) - device_handler = DeviceInstance(pm.db) # --- Short-circuit if no name-resolution plugin has changed --- - name_plugins = ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"] - - # Retrieve last time name resolution was checked - last_checked = pm.name_plugins_checked - - # Normalize last_checked to datetime if it's a string - if isinstance(last_checked, str): - try: - last_checked = parser.parse(last_checked) - except (ValueError, TypeError) as e: - mylog('none', f'[Update Device Name] Could not parse last_checked timestamp: {last_checked!r} ({e})') - last_checked = None - elif not isinstance(last_checked, datetime.datetime): - last_checked = None - - # Collect and normalize valid state update timestamps for name-related plugins - state_times = [] - - for p in name_plugins: - state_updated = pm.plugin_states.get(p, {}).get("stateUpdated") - if not state_updated: - continue - - # Normalize and validate timestamp - if isinstance(state_updated, datetime.datetime): - state_times.append(state_updated) - elif isinstance(state_updated, str): - try: - state_times.append(parser.parse(state_updated)) - except Exception as e: - mylog('none', f'[Update Device Name] Failed to parse timestamp for {p}: {state_updated!r} ({e})') - else: - mylog('none', f'[Update Device Name] Unexpected timestamp type for {p}: {type(state_updated)}') - - # Determine the latest valid timestamp safely (after collecting all timestamps) - latest_state = None - try: - if state_times: - latest_state = max(state_times) - except (ValueError, TypeError) as e: - mylog('none', f'[Update Device Name] Failed to determine latest timestamp, using fallback ({e})') - latest_state = state_times[-1] if state_times else None - - - # Skip if no plugin state changed since last check - if last_checked and latest_state and latest_state <= last_checked: - mylog('debug', '[Update Device Name] No relevant name plugin changes since last check — skipping update.') + if check_plugin_data_changed(pm, ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]) == False: + mylog('debug', '[Update Device Name] No relevant plugin changes since last check.') return + mylog('debug', '[Update Device Name] Check if unknown devices present to resolve names for or if REFRESH_FQDN enabled.') + + sql = pm.db.sql + resolver = NameResolver(pm.db) + device_handler = DeviceInstance(pm.db) + nameNotFound = "(name not found)" # Define resolution strategies in priority order @@ -674,7 +696,7 @@ def update_devices_names(pm): # --- Step 3: Log last checked time --- # After resolving names, update last checked - pm.name_plugins_checked = timeNowDB() + pm.plugin_checks = {"DIGSCAN": timeNowDB(), "AVAHISCAN": timeNowDB(), "NSLOOKUP": timeNowDB(), "NBTSCAN": timeNowDB() } #------------------------------------------------------------------------------- # Updates devPresentLastScan for parent devices based on the presence of their NICs diff --git a/server/utils/datetime_utils.py b/server/utils/datetime_utils.py index a74234b4..16bba58c 100644 --- a/server/utils/datetime_utils.py +++ b/server/utils/datetime_utils.py @@ -65,6 +65,46 @@ def timeNowDB(local=True): # Date and time methods #------------------------------------------------------------------------------- +def normalizeTimeStamp(inputTimeStamp): + """ + Normalize various timestamp formats into a datetime.datetime object. + + Supports: + - SQLite-style 'YYYY-MM-DD HH:MM:SS' + - ISO 8601 'YYYY-MM-DDTHH:MM:SSZ' + - Epoch timestamps (int or float) + - datetime.datetime objects (returned as-is) + - Empty or invalid values (returns None) + """ + if inputTimeStamp is None: + return None + + # Already a datetime + if isinstance(inputTimeStamp, datetime.datetime): + return inputTimeStamp + + # Epoch timestamp (integer or float) + if isinstance(inputTimeStamp, (int, float)): + try: + return datetime.datetime.fromtimestamp(inputTimeStamp) + except (OSError, OverflowError, ValueError): + return None + + # String formats (SQLite / ISO8601) + if isinstance(inputTimeStamp, str): + inputTimeStamp = inputTimeStamp.strip() + if not inputTimeStamp: + return None + try: + # Handles SQLite and ISO8601 automatically + return parser.parse(inputTimeStamp) + except Exception: + return None + + # Unrecognized type + return None + + # ------------------------------------------------------------------------------------------- def format_date_iso(date1: str) -> str: """Return ISO 8601 string for a date or None if empty""" From 552d2a82869b7f7c777d76c73e70b20b44a867d7 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Sat, 8 Nov 2025 14:16:17 +1100 Subject: [PATCH 25/31] DOCS: plugin docs Signed-off-by: jokob-sk --- docs/PLUGINS_DEV_CONFIG.md | 200 +++++++++++++++++++++++-------------- 1 file changed, 123 insertions(+), 77 deletions(-) diff --git a/docs/PLUGINS_DEV_CONFIG.md b/docs/PLUGINS_DEV_CONFIG.md index 7452aece..ca190ad1 100755 --- a/docs/PLUGINS_DEV_CONFIG.md +++ b/docs/PLUGINS_DEV_CONFIG.md @@ -1,146 +1,192 @@ -## config.json Lifecycle in NetAlertX +# Plugins Implementation Details -This document describes on a high level how `config.json` is read, processed, and used by the NetAlertX core and plugins. It also outlines the plugin output contract and the main plugin types. +Plugins provide data to the NetAlertX core, which processes it to detect changes, discover new devices, raise alerts, and apply heuristics. -> [!NOTE] -> For a deep-dive on the specific configuration options and sections of the `config.json` plugin manifest, consult the [Plugins Development Guide](PLUGINS_DEV.md). +--- + +## Overview: Plugin Data Flow + +1. Each plugin runs on a defined schedule. +2. Aligning all plugin schedules is recommended so they execute in the same loop. +3. During execution, all plugins write their collected data into the **`CurrentScan`** table. +4. After all plugins complete, the `CurrentScan` table is evaluated to detect **new devices**, **changes**, and **triggers**. + +Although plugins run independently, they contribute to the shared `CurrentScan` table. +To inspect its contents, set `LOG_LEVEL=trace` and check for the log section: + +``` +================ CurrentScan table content ================ +``` + +--- + +## `config.json` Lifecycle + +This section outlines how each plugin’s `config.json` manifest is read, validated, and used by the core and plugins. +It also describes plugin output expectations and the main plugin categories. + +> [!TIP] +> For detailed schema and examples, see the [Plugin Development Guide](PLUGINS_DEV.md). --- ### 1. Loading -* On startup, the app core loads `config.json` for each plugin. -* The `config.json` represents a plugin manifest, that contains metadata and runtime settings. +* On startup, the core loads `config.json` for each plugin. +* The file acts as a **plugin manifest**, defining metadata, runtime configuration, and database mappings. --- ### 2. Validation -* The core checks that each required settings key (such as `RUN`) for a plugin exists. -* Invalid or missing values may be replaced with defaults, or the plugin may be disabled. +* The core validates required keys (for example, `RUN`). +* Missing or invalid entries may be replaced with defaults or cause the plugin to be disabled. --- ### 3. Preparation -* The plugin’s settings (paths, commands, parameters) are prepared. -* Database mappings (`mapped_to_table`, `database_column_definitions`) for data ingestion into the core app are parsed. +* Plugin parameters (paths, commands, and options) are prepared for execution. +* Database mappings (`mapped_to_table`, `database_column_definitions`) are parsed to define how data integrates with the main app. --- ### 4. Execution -* Plugins can be run at different core app execution points, such as on schedule, once on start, after a notification, etc. -* At runtime, the scheduler triggers plugins according to their `interval`. -* The plugin executes its command or script. +* Plugins may run: + + * On a fixed schedule. + * Once at startup. + * After a notification or other trigger. +* The scheduler executes plugins according to their `interval`. --- ### 5. Parsing -* Plugin output is expected in **pipe (`|`)-delimited format**. -* The core parses lines into fields, matching the **plugin interface contract**. +* Plugin output must be **pipe-delimited (`|`)**. +* The core parses each output line following the **Plugin Interface Contract**, splitting and mapping fields accordingly. --- ### 6. Mapping -* Each parsed field is moved into the `Plugins_` database tables and can be mapped into a configured database table. -* Controlled by `database_column_definitions` and `mapped_to_table`. -* Example: `Object_PrimaryID → Devices.MAC`. +* Parsed fields are inserted into the plugin’s `Plugins_*` table. +* Data can be mapped into other tables (e.g., `Devices`, `CurrentScan`) as defined by: + + * `database_column_definitions` + * `mapped_to_table` + +**Example:** `Object_PrimaryID → devMAC` --- ### 6a. Plugin Output Contract -Each plugin must output results in the **plugin interface contract format**, pipe (`|`)-delimited values, in the column order described under [Plugin Interface Contract](PLUGINS_DEV.md) +All plugins must follow the **Plugin Interface Contract** defined in `PLUGINS_DEV.md`. +Output values are pipe-delimited in a fixed order. -#### IDs +#### Identifiers - * `Object_PrimaryID` and `Object_SecondaryID` identify the record (e.g. `MAC|IP`). +* `Object_PrimaryID` and `Object_SecondaryID` uniquely identify records (for example, `MAC|IP`). -#### **Watched values (`Watched_Value1–4`)** +#### Watched Values (`Watched_Value1–4`) - * Used by the core to detect changes between runs. - * Changes here can trigger **notifications**. +* Used by the core to detect changes between runs. +* Changes in these fields can trigger notifications. -#### **Extra value (`Extra`)** +#### Extra Field (`Extra`) - * Optional, extra field. - * Stored in the database but **not used for alerts**. +* Optional additional value. +* Stored in the database but not used for alerts. -#### **Helper values (`Helper_Value1–3`)** +#### Helper Values (`Helper_Value1–3`) - * Added for cases where more than IDs + watched + extra are needed. - * Can be made visible in the UI. - * Stored in the database but **not used for alerts**. +* Optional auxiliary data (for display or plugin logic). +* Stored but not alert-triggering. -#### **Mapping matters** +#### Mapping - * While the plugin output is free-form, the `database_column_definitions` and `mapped_to_table` settings in `config.json` determine the **target columns and data types** in NetAlertX. +* While the output format is flexible, the plugin’s manifest determines the destination and type of each field. --- ### 7. Persistence -* Data is upserted into the database. -* Conflicts are resolved using `Object_PrimaryID` + `Object_SecondaryID`. +* Parsed data is **upserted** into the database. +* Conflicts are resolved using the combined key: `Object_PrimaryID + Object_SecondaryID`. --- -### 8. Plugin Types and Expected Outputs +## Plugin Categories -Beyond the `data_source` setting, plugins fall into functional categories. Each has its own input requirements and output expectations: +Plugins fall into several functional categories depending on their purpose and expected outputs. -#### **Device discovery plugins** +### 1. Device Discovery Plugins - * **Inputs:** `N/A`, subnet, or API for discovery service, or similar. - * **Outputs:** At minimum `MAC` and `IP` that results in a new or updated device records in the `Devices` table. - * **Mapping:** Must be mapped to the `CurrentScan` table via `database_column_definitions` and `data_filters`. - * **Examples:** ARP-scan, NMAP device discovery (e.g., `ARPSCAN`, `NMAPDEV`). - -#### **Device-data enrichment plugins** - - * **Inputs:** Device identifier (usually `MAC`, `IP`). - * **Outputs:** Additional data for that device (e.g. open ports). - * **Mapping:** Controlled via `database_column_definitions` and `data_filters`. - * **Examples:** Ports, MQTT messages (e.g., `NMAP`, `MQTT`) - -#### **Name resolver plugins** - - * **Inputs:** Device identifiers (MAC, IP, or hostname). - * **Outputs:** Updated `devName` and `devFQDN` fields. - * **Mapping:** Not expected. - * **Note:** Currently requires **core app modification** to add new plugins, not fully driven by the plugins’ `config.json`. - * **Examples:** Avahiscan (e.g., `NBTSCAN`, `NSLOOKUP`). - -#### **Generic plugins** - - * **Inputs:** Whatever the script or query provides. - * **Outputs:** Data shown only in **Integrations → Plugins**, not tied to devices. - * **Mapping:** Not expected. - * **Examples:** External monitoring data (e.g., `INTRSPD`) - -#### **Configuration-only plugins** - - * **Inputs/Outputs:** None at runtime. - * **Mapping:** Not expected. - * **Examples:** Used to provide additional settings or execute scripts (e.g., `MAINT`, `CSVBCKP`). +* **Inputs:** None, subnet, or discovery API. +* **Outputs:** `MAC` and `IP` for new or updated device records in `Devices`. +* **Mapping:** Required – usually into `CurrentScan`. +* **Examples:** `ARPSCAN`, `NMAPDEV`. --- -### 9. Post-Processing +### 2. Device Data Enrichment Plugins -* Notifications are generated if watched values change. -* UI is updated with new or updated records. -* All values that are configured to be shown in teh UI appear in the Plugins section. +* **Inputs:** Device identifiers (`MAC`, `IP`). +* **Outputs:** Additional metadata (for example, open ports or sensors). +* **Mapping:** Controlled via manifest definitions. +* **Examples:** `NMAP`, `MQTT`. --- -### 10. Summary +### 3. Name Resolver Plugins -The lifecycle of `config.json` entries is: +* **Inputs:** Device identifiers (`MAC`, `IP`, hostname`). +* **Outputs:** Updated `devName` and `devFQDN`. +* **Mapping:** Typically none. +* **Note:** Adding new resolvers currently requires a core change. +* **Examples:** `NBTSCAN`, `NSLOOKUP`. + +--- + +### 4. Generic Plugins + +* **Inputs:** Custom, based on the plugin logic or script. +* **Outputs:** Data displayed under **Integrations → Plugins** only. +* **Mapping:** Not required. +* **Examples:** `INTRSPD`, custom monitoring scripts. + +--- + +### 5. Configuration-Only Plugins + +* **Inputs/Outputs:** None at runtime. +* **Purpose:** Used for configuration or maintenance tasks. +* **Examples:** `MAINT`, `CSVBCKP`. + +--- + +## Post-Processing + +After persistence: + +* The core generates notifications for any watched value changes. +* The UI updates with new or modified data. +* Plugins with UI-enabled data display under **Integrations → Plugins**. + +--- + +## Summary + +The lifecycle of a plugin configuration is: **Load → Validate → Prepare → Execute → Parse → Map → Persist → Post-process** -Plugins must follow the **output contract**, and their category (discovery, specific, resolver, generic, config-only) defines what inputs they require and what outputs are expected. +Each plugin must: + +* Follow the **output contract**. +* Declare its type and expected output structure. +* Define mappings and watched values clearly in `config.json`. + + From 5cf8a25bae31b8e5fd1d5526f8e5218f77f9dbfd Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Sat, 8 Nov 2025 22:01:04 +1100 Subject: [PATCH 26/31] BE: timestamp work name changes #1251 Signed-off-by: jokob-sk --- server/scan/device_handling.py | 10 +++++----- server/utils/datetime_utils.py | 22 +++++++++++++++++----- 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/server/scan/device_handling.py b/server/scan/device_handling.py index 612e888a..5ebfb343 100755 --- a/server/scan/device_handling.py +++ b/server/scan/device_handling.py @@ -558,20 +558,20 @@ def check_plugin_data_changed(pm, plugins_to_check): continue # Normalize and validate last_changed timestamp - last_changed_ts = normalizeTimeStamp(str(last_data_change)) + last_changed_ts = normalizeTimeStamp(last_data_change) if last_changed_ts == None: - mylog('none', f'[check_plugin_data_changed] Unexpected last_data_change timestamp for {plugin_name}: {last_data_change}') + mylog('none', f'[check_plugin_data_changed] Unexpected last_data_change timestamp for {plugin_name} (input|output): ({last_data_change}|{last_changed_ts})') # Normalize and validate last_data_check timestamp - last_data_check_ts = normalizeTimeStamp(str(last_data_check)) + last_data_check_ts = normalizeTimeStamp(last_data_check) if last_data_check_ts == None: - mylog('none', f'[check_plugin_data_changed] Unexpected last_data_check timestamp for {plugin_name}: {last_data_check}') + mylog('none', f'[check_plugin_data_changed] Unexpected last_data_check timestamp for {plugin_name} (input|output): ({last_data_check}|{last_data_check_ts})') # Track which plugins have newer state than last_checked if last_data_check_ts is None or last_changed_ts is None or last_changed_ts > last_data_check_ts: - mylog('debug', f'[check_plugin_data_changed] plugin_name changed last_changed_ts | last_data_check_ts: {last_changed_ts} | {last_data_check_ts}') + mylog('debug', f'[check_plugin_data_changed] {plugin_name} changed (last_changed_ts|last_data_check_ts): ({last_changed_ts}|{last_data_check_ts})') plugins_changed.append(plugin_name) # Skip if no plugin state changed since last check diff --git a/server/utils/datetime_utils.py b/server/utils/datetime_utils.py index 16bba58c..21fab835 100644 --- a/server/utils/datetime_utils.py +++ b/server/utils/datetime_utils.py @@ -4,7 +4,9 @@ import os import pathlib import sys from datetime import datetime +from dateutil import parser import datetime +import re import pytz from pytz import timezone from typing import Union @@ -23,6 +25,10 @@ from const import * #------------------------------------------------------------------------------- # DateTime #------------------------------------------------------------------------------- + +DATETIME_PATTERN = "%Y-%m-%d %H:%M:%S" +DATETIME_REGEX = re.compile(r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$') + def timeNowTZ(): if conf.tz: return datetime.datetime.now(conf.tz).replace(microsecond=0) @@ -56,9 +62,9 @@ def timeNowDB(local=True): tz = None except Exception: tz = None - return datetime.datetime.now(tz).strftime('%Y-%m-%d %H:%M:%S') + return datetime.datetime.now(tz).strftime(DATETIME_PATTERN) else: - return datetime.datetime.now(datetime.UTC).strftime('%Y-%m-%d %H:%M:%S') + return datetime.datetime.now(datetime.UTC).strftime(DATETIME_PATTERN) #------------------------------------------------------------------------------- @@ -85,7 +91,7 @@ def normalizeTimeStamp(inputTimeStamp): # Epoch timestamp (integer or float) if isinstance(inputTimeStamp, (int, float)): - try: + try: return datetime.datetime.fromtimestamp(inputTimeStamp) except (OSError, OverflowError, ValueError): return None @@ -96,8 +102,14 @@ def normalizeTimeStamp(inputTimeStamp): if not inputTimeStamp: return None try: - # Handles SQLite and ISO8601 automatically - return parser.parse(inputTimeStamp) + # match the "2025-11-08 14:32:10" format + pattern = DATETIME_REGEX + + if pattern.match(inputTimeStamp): + return datetime.datetime.strptime(inputTimeStamp, DATETIME_PATTERN) + else: + # Handles SQLite and ISO8601 automatically + return parser.parse(inputTimeStamp) except Exception: return None From 76150b2ca73afd2cbb643406ce84f46f84e6255e Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Sat, 8 Nov 2025 22:02:55 +1100 Subject: [PATCH 27/31] BE: github actions + dev version Signed-off-by: jokob-sk --- .github/workflows/docker_dev.yml | 2 +- .github/workflows/docker_prod.yml | 2 +- .github/workflows/docker_rewrite.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker_dev.yml b/.github/workflows/docker_dev.yml index 97da1434..eaeb261a 100755 --- a/.github/workflows/docker_dev.yml +++ b/.github/workflows/docker_dev.yml @@ -53,7 +53,7 @@ jobs: - name: Docker meta id: meta - uses: docker/metadata-action@v4 + uses: docker/metadata-action@v5 with: images: | ghcr.io/jokob-sk/netalertx-dev diff --git a/.github/workflows/docker_prod.yml b/.github/workflows/docker_prod.yml index 548b05dc..fa484de9 100755 --- a/.github/workflows/docker_prod.yml +++ b/.github/workflows/docker_prod.yml @@ -44,7 +44,7 @@ jobs: # --- Generate Docker metadata and tags - name: Docker meta id: meta - uses: docker/metadata-action@v4 + uses: docker/metadata-action@v5 with: images: | ghcr.io/jokob-sk/netalertx diff --git a/.github/workflows/docker_rewrite.yml b/.github/workflows/docker_rewrite.yml index df1e49c9..db862430 100755 --- a/.github/workflows/docker_rewrite.yml +++ b/.github/workflows/docker_rewrite.yml @@ -43,7 +43,7 @@ jobs: - name: Docker meta id: meta - uses: docker/metadata-action@v4 + uses: docker/metadata-action@v5 with: images: | ghcr.io/jokob-sk/netalertx-dev-rewrite From 68c8e16828ab4de896f219c9f778d1dfa7dce537 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Sat, 8 Nov 2025 22:08:20 +1100 Subject: [PATCH 28/31] PLG: cleanup Signed-off-by: jokob-sk --- front/plugins/_publisher_mqtt/mqtt.py | 2 +- front/plugins/nmap_dev_scan/nmap_dev.py | 4 ---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/front/plugins/_publisher_mqtt/mqtt.py b/front/plugins/_publisher_mqtt/mqtt.py index 943add6c..f2970222 100755 --- a/front/plugins/_publisher_mqtt/mqtt.py +++ b/front/plugins/_publisher_mqtt/mqtt.py @@ -568,7 +568,7 @@ def prepTimeStamp(datetime_str): except ValueError: mylog('verbose', [f"[{pluginName}] Timestamp conversion failed of string '{datetime_str}'"]) # Use the current time if the input format is invalid - parsed_datetime = timeNowDB() + parsed_datetime = datetime.now(conf.tz) # Convert to the required format with 'T' between date and time and ensure the timezone is included return parsed_datetime.isoformat() # This will include the timezone offset diff --git a/front/plugins/nmap_dev_scan/nmap_dev.py b/front/plugins/nmap_dev_scan/nmap_dev.py index 2c7f85ad..fe53dfc3 100755 --- a/front/plugins/nmap_dev_scan/nmap_dev.py +++ b/front/plugins/nmap_dev_scan/nmap_dev.py @@ -46,10 +46,6 @@ def main(): mylog('verbose', [f'[{pluginName}] In script']) - # Create a database connection - db = DB() # instance of class DB - db.open() - timeout = get_setting_value('NMAPDEV_RUN_TIMEOUT') fakeMac = get_setting_value('NMAPDEV_FAKE_MAC') subnets = get_setting_value('SCAN_SUBNETS') From 8483a741b47c9125bba590b479baefa3597bd323 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Sun, 9 Nov 2025 18:50:16 +1100 Subject: [PATCH 29/31] BE: LangStrings /graphql + /logs endpoint, utils chores Signed-off-by: jokob-sk --- docs/API.md | 3 +- docs/API_GRAPHQL.md | 72 ++++++- docs/API_LOGS.md | 179 ++++++++++++++++++ front/php/server/util.php | 22 ++- front/plugins/__template/rename_me.py | 2 +- front/plugins/__test/test.py | 2 +- front/plugins/_publisher_mqtt/mqtt.py | 2 +- front/plugins/dig_scan/digscan.py | 2 +- front/plugins/freebox/freebox.py | 2 +- front/plugins/ipneigh/ipneigh.py | 2 +- front/plugins/nbtscan_scan/nbtscan.py | 2 +- front/plugins/omada_sdn_imp/omada_sdn.py | 2 +- front/plugins/sync/sync.py | 4 +- .../unifi_api_import/unifi_api_import.py | 2 +- front/plugins/wake_on_lan/wake_on_lan.py | 2 +- server/api_server/api_server_start.py | 178 +++++++++++------ server/api_server/graphql_endpoint.py | 121 +++++++++++- server/api_server/logs_endpoint.py | 58 ++++++ server/initialise.py | 4 +- server/models/user_events_queue_instance.py | 40 ++++ server/plugin.py | 4 +- server/{ => utils}/crypto_utils.py | 0 server/{ => utils}/plugin_utils.py | 2 +- test/api_endpoints/test_graphq_endpoints.py | 77 ++++++++ test/api_endpoints/test_logs_endpoints.py | 61 ++++++ 25 files changed, 757 insertions(+), 88 deletions(-) create mode 100644 docs/API_LOGS.md create mode 100644 server/api_server/logs_endpoint.py rename server/{ => utils}/crypto_utils.py (100%) mode change 100755 => 100644 rename server/{ => utils}/plugin_utils.py (99%) mode change 100755 => 100644 create mode 100644 test/api_endpoints/test_logs_endpoints.py diff --git a/docs/API.md b/docs/API.md index 6268a9d9..8c9c3767 100755 --- a/docs/API.md +++ b/docs/API.md @@ -64,8 +64,9 @@ http://:/ * [Metrics](API_METRICS.md) – Prometheus metrics and per-device status * [Network Tools](API_NETTOOLS.md) – Utilities like Wake-on-LAN, traceroute, nslookup, nmap, and internet info * [Online History](API_ONLINEHISTORY.md) – Online/offline device records -* [GraphQL](API_GRAPHQL.md) – Advanced queries and filtering +* [GraphQL](API_GRAPHQL.md) – Advanced queries and filtering for Devices, Settings and Language Strings * [Sync](API_SYNC.md) – Synchronization between multiple NetAlertX instances +* [Logs](API_LOGS.md) – Purging of logs and adding to the event execution queue for user triggered events * [DB query](API_DBQUERY.md) (⚠ Internal) - Low level database access - use other endpoints if possible See [Testing](API_TESTS.md) for example requests and usage. diff --git a/docs/API_GRAPHQL.md b/docs/API_GRAPHQL.md index d3016b1e..e7ccfd10 100755 --- a/docs/API_GRAPHQL.md +++ b/docs/API_GRAPHQL.md @@ -1,9 +1,10 @@ # GraphQL API Endpoint -GraphQL queries are **read-optimized for speed**. Data may be slightly out of date until the file system cache refreshes. The GraphQL endpoints allows you to access the following objects: +GraphQL queries are **read-optimized for speed**. Data may be slightly out of date until the file system cache refreshes. The GraphQL endpoints allow you to access the following objects: -- Devices -- Settings +* Devices +* Settings +* Language Strings (LangStrings) ## Endpoints @@ -190,11 +191,74 @@ curl 'http://host:GRAPHQL_PORT/graphql' \ } ``` + +--- + +## LangStrings Query + +The **LangStrings query** provides access to localized strings. Supports filtering by `langCode` and `langStringKey`. If the requested string is missing or empty, you can optionally fallback to `en_us`. + +### Sample Query + +```graphql +query GetLangStrings { + langStrings(langCode: "de_de", langStringKey: "settings_other_scanners") { + langStrings { + langCode + langStringKey + langStringText + } + count + } +} +``` + +### Query Parameters + +| Parameter | Type | Description | +| ---------------- | ------- | ---------------------------------------------------------------------------------------- | +| `langCode` | String | Optional language code (e.g., `en_us`, `de_de`). If omitted, all languages are returned. | +| `langStringKey` | String | Optional string key to retrieve a specific entry. | +| `fallback_to_en` | Boolean | Optional (default `true`). If `true`, empty or missing strings fallback to `en_us`. | + +### `curl` Example + +```sh +curl 'http://host:GRAPHQL_PORT/graphql' \ + -X POST \ + -H 'Authorization: Bearer API_TOKEN' \ + -H 'Content-Type: application/json' \ + --data '{ + "query": "query GetLangStrings { langStrings(langCode: \"de_de\", langStringKey: \"settings_other_scanners\") { langStrings { langCode langStringKey langStringText } count } }" + }' +``` + +### Sample Response + +```json +{ + "data": { + "langStrings": { + "count": 1, + "langStrings": [ + { + "langCode": "de_de", + "langStringKey": "settings_other_scanners", + "langStringText": "Other, non-device scanner plugins that are currently enabled." // falls back to en_us if empty + } + ] + } + } +} +``` + --- ## Notes -* Device and settings queries can be combined in one request since GraphQL supports batching. +* Device, settings, and LangStrings queries can be combined in **one request** since GraphQL supports batching. +* The `fallback_to_en` feature ensures UI always has a value even if a translation is missing. +* Data is **cached in memory** per JSON file; changes to language or plugin files will only refresh after the cache detects a file modification. * The `setOverriddenByEnv` flag helps identify setting values that are locked at container runtime. * The schema is **read-only** — updates must be performed through other APIs or configuration management. See the other [API](API.md) endpoints for details. diff --git a/docs/API_LOGS.md b/docs/API_LOGS.md new file mode 100644 index 00000000..8907069d --- /dev/null +++ b/docs/API_LOGS.md @@ -0,0 +1,179 @@ +# Logs API Endpoints + +Manage or purge application log files stored under `/app/log` and manage the execution queue. These endpoints are primarily used for maintenance tasks such as clearing accumulated logs or adding system actions without restarting the container. + +Only specific, pre-approved log files can be purged for security and stability reasons. + +--- + +## Delete (Purge) a Log File + +* **DELETE** `/logs?file=` → Purge the contents of an allowed log file. + +**Query Parameter:** + +* `file` → The name of the log file to purge (e.g., `app.log`, `stdout.log`) + +**Allowed Files:** + +``` +app.log +app_front.log +IP_changes.log +stdout.log +stderr.log +app.php_errors.log +execution_queue.log +db_is_locked.log +``` + +**Authorization:** +Requires a valid API token in the `Authorization` header. + +--- + +### `curl` Example (Success) + +```sh +curl -X DELETE 'http://:/logs?file=app.log' \ + -H 'Authorization: Bearer ' \ + -H 'Accept: application/json' +``` + +**Response:** + +```json +{ + "success": true, + "message": "[clean_log] File app.log purged successfully" +} +``` + +--- + +### `curl` Example (Not Allowed) + +```sh +curl -X DELETE 'http://:/logs?file=not_allowed.log' \ + -H 'Authorization: Bearer ' \ + -H 'Accept: application/json' +``` + +**Response:** + +```json +{ + "success": false, + "message": "[clean_log] File not_allowed.log is not allowed to be purged" +} +``` + +--- + +### `curl` Example (Unauthorized) + +```sh +curl -X DELETE 'http://:/logs?file=app.log' \ + -H 'Accept: application/json' +``` + +**Response:** + +```json +{ + "error": "Forbidden" +} +``` + +--- + +## Add an Action to the Execution Queue + +* **POST** `/logs/add-to-execution-queue` → Add a system action to the execution queue. + +**Request Body (JSON):** + +```json +{ + "action": "update_api|devices" +} +``` + +**Authorization:** +Requires a valid API token in the `Authorization` header. + +--- + +### `curl` Example (Success) + +The below will update the API cache for Devices + +```sh +curl -X POST 'http://:/logs/add-to-execution-queue' \ + -H 'Authorization: Bearer ' \ + -H 'Content-Type: application/json' \ + --data '{"action": "update_api|devices"}' +``` + +**Response:** + +```json +{ + "success": true, + "message": "[UserEventsQueueInstance] Action \"update_api|devices\" added to the execution queue." +} +``` + +--- + +### `curl` Example (Missing Parameter) + +```sh +curl -X POST 'http://:/logs/add-to-execution-queue' \ + -H 'Authorization: Bearer ' \ + -H 'Content-Type: application/json' \ + --data '{}' +``` + +**Response:** + +```json +{ + "success": false, + "message": "Missing parameters", + "error": "Missing required 'action' field in JSON body" +} +``` + +--- + +### `curl` Example (Unauthorized) + +```sh +curl -X POST 'http://:/logs/add-to-execution-queue' \ + -H 'Content-Type: application/json' \ + --data '{"action": "update_api|devices"}' +``` + +**Response:** + +```json +{ + "error": "Forbidden" +} +``` + +--- + +## Notes + +* Only predefined files in `/app/log` can be purged — arbitrary paths are **not permitted**. +* When a log file is purged: + + * Its content is replaced with a short marker text: `"File manually purged"`. + * A backend log entry is created via `mylog()`. + * A frontend notification is generated via `write_notification()`. +* Execution queue actions are appended to `execution_queue.log` and can be processed asynchronously by background tasks or workflows. +* Unauthorized or invalid attempts are safely logged and rejected. +* For advanced log retrieval, analysis, or structured querying, use the frontend log viewer. +* Always ensure that sensitive or production logs are handled carefully — purging cannot be undone. diff --git a/front/php/server/util.php b/front/php/server/util.php index f4d11052..e80cca23 100755 --- a/front/php/server/util.php +++ b/front/php/server/util.php @@ -176,7 +176,10 @@ function checkPermissions($files) } // ---------------------------------------------------------------------------------------- - +// 🔺----- API ENDPOINTS SUPERSEDED -----🔺 +// check server/api_server/api_server_start.py for equivalents +// equivalent: /messaging/in-app/write +// 🔺----- API ENDPOINTS SUPERSEDED -----🔺 function displayMessage($message, $logAlert = FALSE, $logConsole = TRUE, $logFile = TRUE, $logEcho = FALSE) { global $logFolderPath, $log_file, $timestamp; @@ -234,7 +237,10 @@ function displayMessage($message, $logAlert = FALSE, $logConsole = TRUE, $logFil } - +// 🔺----- API ENDPOINTS SUPERSEDED -----🔺 +// check server/api_server/api_server_start.py for equivalents +// equivalent: /logs/add-to-execution-queue +// 🔺----- API ENDPOINTS SUPERSEDED -----🔺 // ---------------------------------------------------------------------------------------- // Adds an action to perform into the execution_queue.log file function addToExecutionQueue($action) @@ -257,6 +263,10 @@ function addToExecutionQueue($action) // ---------------------------------------------------------------------------------------- +// 🔺----- API ENDPOINTS SUPERSEDED -----🔺 +// check server/api_server/api_server_start.py for equivalents +// equivalent: /logs DELETE +// 🔺----- API ENDPOINTS SUPERSEDED -----🔺 function cleanLog($logFile) { global $logFolderPath, $timestamp; @@ -418,6 +428,10 @@ function saveSettings() } // ------------------------------------------------------------------------------------------- +// 🔺----- API ENDPOINTS SUPERSEDED -----🔺 +// check server/api_server/api_server_start.py for equivalents +// equivalent: /graphql LangStrings endpoint +// 🔺----- API ENDPOINTS SUPERSEDED -----🔺 function getString ($setKey, $default) { $result = lang($setKey); @@ -430,6 +444,10 @@ function getString ($setKey, $default) { return $default; } // ------------------------------------------------------------------------------------------- +// 🔺----- API ENDPOINTS SUPERSEDED -----🔺 +// check server/api_server/api_server_start.py for equivalents +// equivalent: /settings/ +// 🔺----- API ENDPOINTS SUPERSEDED -----🔺 function getSettingValue($setKey) { // Define the JSON endpoint URL $url = dirname(__FILE__).'/../../../api/table_settings.json'; diff --git a/front/plugins/__template/rename_me.py b/front/plugins/__template/rename_me.py index c303d95d..09941226 100755 --- a/front/plugins/__template/rename_me.py +++ b/front/plugins/__template/rename_me.py @@ -12,7 +12,7 @@ INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 -from plugin_utils import get_plugins_configs +from utils.plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath from helper import get_setting_value diff --git a/front/plugins/__test/test.py b/front/plugins/__test/test.py index 21cfc1d0..966f853e 100755 --- a/front/plugins/__test/test.py +++ b/front/plugins/__test/test.py @@ -20,7 +20,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) # NetAlertX modules import conf from const import apiPath, confFileName, logPath -from plugin_utils import getPluginObject +from utils.plugin_utils import getPluginObject from plugin_helper import Plugin_Objects from logger import mylog, Logger, append_line_to_file from helper import get_setting_value, bytes_to_string, sanitize_string, cleanDeviceName diff --git a/front/plugins/_publisher_mqtt/mqtt.py b/front/plugins/_publisher_mqtt/mqtt.py index f2970222..0a0dd05c 100755 --- a/front/plugins/_publisher_mqtt/mqtt.py +++ b/front/plugins/_publisher_mqtt/mqtt.py @@ -20,7 +20,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) # NetAlertX modules import conf from const import confFileName, logPath -from plugin_utils import getPluginObject +from utils.plugin_utils import getPluginObject from plugin_helper import Plugin_Objects from logger import mylog, Logger from helper import get_setting_value, bytes_to_string, \ diff --git a/front/plugins/dig_scan/digscan.py b/front/plugins/dig_scan/digscan.py index dd00c226..60d4f1ac 100755 --- a/front/plugins/dig_scan/digscan.py +++ b/front/plugins/dig_scan/digscan.py @@ -12,7 +12,7 @@ INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 -from plugin_utils import get_plugins_configs +from utils.plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath from helper import get_setting_value diff --git a/front/plugins/freebox/freebox.py b/front/plugins/freebox/freebox.py index da8a8884..3e1c4c15 100755 --- a/front/plugins/freebox/freebox.py +++ b/front/plugins/freebox/freebox.py @@ -21,7 +21,7 @@ INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 -from plugin_utils import get_plugins_configs +from utils.plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath from helper import get_setting_value diff --git a/front/plugins/ipneigh/ipneigh.py b/front/plugins/ipneigh/ipneigh.py index a556c213..2d053a2e 100755 --- a/front/plugins/ipneigh/ipneigh.py +++ b/front/plugins/ipneigh/ipneigh.py @@ -15,7 +15,7 @@ INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64, handleEmpty -from plugin_utils import get_plugins_configs +from utils.plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath from helper import get_setting_value diff --git a/front/plugins/nbtscan_scan/nbtscan.py b/front/plugins/nbtscan_scan/nbtscan.py index d555859d..2ea9c410 100755 --- a/front/plugins/nbtscan_scan/nbtscan.py +++ b/front/plugins/nbtscan_scan/nbtscan.py @@ -12,7 +12,7 @@ INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 -from plugin_utils import get_plugins_configs +from utils.plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath from helper import get_setting_value diff --git a/front/plugins/omada_sdn_imp/omada_sdn.py b/front/plugins/omada_sdn_imp/omada_sdn.py index ae2f482b..9434f226 100755 --- a/front/plugins/omada_sdn_imp/omada_sdn.py +++ b/front/plugins/omada_sdn_imp/omada_sdn.py @@ -41,7 +41,7 @@ INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 -from plugin_utils import get_plugins_configs +from utils.plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath from helper import get_setting_value diff --git a/front/plugins/sync/sync.py b/front/plugins/sync/sync.py index 3bc584e6..2e804b0d 100755 --- a/front/plugins/sync/sync.py +++ b/front/plugins/sync/sync.py @@ -15,12 +15,12 @@ INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 -from plugin_utils import get_plugins_configs, decode_and_rename_files +from utils.plugin_utils import get_plugins_configs, decode_and_rename_files from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath from helper import get_setting_value from utils.datetime_utils import timeNowDB -from crypto_utils import encrypt_data +from utils.crypto_utils import encrypt_data from messaging.in_app import write_notification import conf from pytz import timezone diff --git a/front/plugins/unifi_api_import/unifi_api_import.py b/front/plugins/unifi_api_import/unifi_api_import.py index 8e8b9a94..c9f720bc 100755 --- a/front/plugins/unifi_api_import/unifi_api_import.py +++ b/front/plugins/unifi_api_import/unifi_api_import.py @@ -13,7 +13,7 @@ INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64, decode_settings_base64 -from plugin_utils import get_plugins_configs +from utils.plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath from helper import get_setting_value diff --git a/front/plugins/wake_on_lan/wake_on_lan.py b/front/plugins/wake_on_lan/wake_on_lan.py index eaa0bdde..60736db8 100755 --- a/front/plugins/wake_on_lan/wake_on_lan.py +++ b/front/plugins/wake_on_lan/wake_on_lan.py @@ -13,7 +13,7 @@ INSTALL_PATH = "/app" sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 -from plugin_utils import get_plugins_configs +from utils.plugin_utils import get_plugins_configs from logger import mylog, Logger from const import pluginsPath, fullDbPath, logPath from helper import get_setting_value diff --git a/server/api_server/api_server_start.py b/server/api_server/api_server_start.py index 3a376791..fbf944f5 100755 --- a/server/api_server/api_server_start.py +++ b/server/api_server/api_server_start.py @@ -24,6 +24,8 @@ from .sessions_endpoint import get_sessions, delete_session, create_session, get from .nettools_endpoint import wakeonlan, traceroute, speedtest, nslookup, nmap_scan, internet_info from .dbquery_endpoint import read_query, write_query, update_query, delete_query from .sync_endpoint import handle_sync_post, handle_sync_get +from .logs_endpoint import clean_log +from models.user_events_queue_instance import UserEventsQueueInstance from messaging.in_app import write_notification, mark_all_notifications_read, delete_notifications, get_unread_notifications, delete_notification, mark_notification_as_read # Flask application @@ -40,12 +42,25 @@ CORS( r"/settings/*": {"origins": "*"}, r"/dbquery/*": {"origins": "*"}, r"/messaging/*": {"origins": "*"}, - r"/events/*": {"origins": "*"} + r"/events/*": {"origins": "*"}, + r"/logs/*": {"origins": "*"} }, supports_credentials=True, allow_headers=["Authorization", "Content-Type"] ) +# ------------------------------------------------------------------- +# Custom handler for 404 - Route not found +# ------------------------------------------------------------------- +@app.errorhandler(404) +def not_found(error): + response = { + "success": False, + "error": "API route not found", + "message": f"The requested URL {error.description if hasattr(error, 'description') else ''} was not found on the server.", + } + return jsonify(response), 404 + # -------------------------- # GraphQL Endpoints # -------------------------- @@ -63,7 +78,7 @@ def graphql_endpoint(): if not is_authorized(): msg = '[graphql_server] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct.' mylog('verbose', [msg]) - return jsonify({"error": msg}), 401 + return jsonify({"success": False, "message": msg}), 401 # Retrieve and log request data data = request.get_json() @@ -89,7 +104,7 @@ def graphql_endpoint(): @app.route("/settings/", methods=["GET"]) def api_get_setting(setKey): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 value = get_setting_value(setKey) return jsonify({"success": True, "value": value}) @@ -100,58 +115,58 @@ def api_get_setting(setKey): @app.route("/device/", methods=["GET"]) def api_get_device(mac): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return get_device_data(mac) @app.route("/device/", methods=["POST"]) def api_set_device(mac): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return set_device_data(mac, request.json) @app.route("/device//delete", methods=["DELETE"]) def api_delete_device(mac): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return delete_device(mac) @app.route("/device//events/delete", methods=["DELETE"]) def api_delete_device_events(mac): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return delete_device_events(mac) @app.route("/device//reset-props", methods=["POST"]) def api_reset_device_props(mac): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return reset_device_props(mac, request.json) @app.route("/device/copy", methods=["POST"]) def api_copy_device(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 data = request.get_json() or {} mac_from = data.get("macFrom") mac_to = data.get("macTo") if not mac_from or not mac_to: - return jsonify({"success": False, "error": "macFrom and macTo are required"}), 400 + return jsonify({"success": False, "message": "Missing parameters", "error": "macFrom and macTo are required"}), 400 return copy_device(mac_from, mac_to) @app.route("/device//update-column", methods=["POST"]) def api_update_device_column(mac): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 data = request.get_json() or {} column_name = data.get("columnName") column_value = data.get("columnValue") if not column_name or not column_value: - return jsonify({"success": False, "error": "columnName and columnValue are required"}), 400 + return jsonify({"success": False, "message": "Missing parameters", "error": "columnName and columnValue are required"}), 400 return update_device_column(mac, column_name, column_value) @@ -162,13 +177,13 @@ def api_update_device_column(mac): @app.route("/devices", methods=["GET"]) def api_get_devices(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return get_all_devices() @app.route("/devices", methods=["DELETE"]) def api_delete_devices(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 macs = request.json.get("macs") if request.is_json else None @@ -177,13 +192,13 @@ def api_delete_devices(): @app.route("/devices/empty-macs", methods=["DELETE"]) def api_delete_all_empty_macs(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return delete_all_with_empty_macs() @app.route("/devices/unknown", methods=["DELETE"]) def api_delete_unknown_devices(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return delete_unknown_devices() @@ -191,7 +206,7 @@ def api_delete_unknown_devices(): @app.route("/devices/export/", methods=["GET"]) def api_export_devices(format=None): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 export_format = (format or request.args.get("format", "csv")).lower() return export_devices(export_format) @@ -199,19 +214,19 @@ def api_export_devices(format=None): @app.route("/devices/import", methods=["POST"]) def api_import_csv(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return import_csv(request.files.get("file")) @app.route("/devices/totals", methods=["GET"]) def api_devices_totals(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return devices_totals() @app.route("/devices/by-status", methods=["GET"]) def api_devices_by_status(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 status = request.args.get("status", "") if request.args else None @@ -223,7 +238,7 @@ def api_devices_by_status(): @app.route("/nettools/wakeonlan", methods=["POST"]) def api_wakeonlan(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 mac = request.json.get("devMac") return wakeonlan(mac) @@ -231,14 +246,14 @@ def api_wakeonlan(): @app.route("/nettools/traceroute", methods=["POST"]) def api_traceroute(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 ip = request.json.get("devLastIP") return traceroute(ip) @app.route("/nettools/speedtest", methods=["GET"]) def api_speedtest(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return speedtest() @app.route("/nettools/nslookup", methods=["POST"]) @@ -248,11 +263,11 @@ def api_nslookup(): Expects JSON with 'devLastIP'. """ if not is_authorized(): - return jsonify({"success": False, "error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 data = request.get_json(silent=True) if not data or "devLastIP" not in data: - return jsonify({"success": False, "error": "Missing 'devLastIP'"}), 400 + return jsonify({"success": False, "message": "Missing parameters", "error": "Missing 'devLastIP'"}), 400 ip = data["devLastIP"] return nslookup(ip) @@ -264,11 +279,11 @@ def api_nmap(): Expects JSON with 'scan' (IP address) and 'mode' (scan mode). """ if not is_authorized(): - return jsonify({"success": False, "error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 data = request.get_json(silent=True) if not data or "scan" not in data or "mode" not in data: - return jsonify({"success": False, "error": "Missing 'scan' or 'mode'"}), 400 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Missing 'scan' or 'mode'"}), 400 ip = data["scan"] mode = data["mode"] @@ -278,7 +293,7 @@ def api_nmap(): @app.route("/nettools/internetinfo", methods=["GET"]) def api_internet_info(): if not is_authorized(): - return jsonify({"success": False, "error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return internet_info() @@ -289,13 +304,13 @@ def api_internet_info(): @app.route("/dbquery/read", methods=["POST"]) def dbquery_read(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 data = request.get_json() or {} raw_sql_b64 = data.get("rawSql") if not raw_sql_b64: - return jsonify({"error": "rawSql is required"}), 400 + return jsonify({"success": False, "message": "Missing parameters", "error": "rawSql is required"}), 400 return read_query(raw_sql_b64) @@ -303,12 +318,12 @@ def dbquery_read(): @app.route("/dbquery/write", methods=["POST"]) def dbquery_write(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 data = request.get_json() or {} raw_sql_b64 = data.get("rawSql") if not raw_sql_b64: - return jsonify({"error": "rawSql is required"}), 400 + return jsonify({"success": False, "message": "Missing parameters", "error": "rawSql is required"}), 400 return write_query(raw_sql_b64) @@ -316,12 +331,12 @@ def dbquery_write(): @app.route("/dbquery/update", methods=["POST"]) def dbquery_update(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 data = request.get_json() or {} required = ["columnName", "id", "dbtable", "columns", "values"] if not all(data.get(k) for k in required): - return jsonify({"error": "Missing required parameters"}), 400 + return jsonify({"success": False, "message": "Missing parameters", "error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"}), 400 return update_query( column_name=data["columnName"], @@ -335,12 +350,12 @@ def dbquery_update(): @app.route("/dbquery/delete", methods=["POST"]) def dbquery_delete(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 data = request.get_json() or {} required = ["columnName", "id", "dbtable"] if not all(data.get(k) for k in required): - return jsonify({"error": "Missing required parameters"}), 400 + return jsonify({"success": False, "message": "Missing parameters", "error": "Missing required 'columnName', 'id', or 'dbtable' query parameter"}), 400 return delete_query( column_name=data["columnName"], @@ -355,9 +370,46 @@ def dbquery_delete(): @app.route("/history", methods=["DELETE"]) def api_delete_online_history(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return delete_online_history() +# -------------------------- +# Logs +# -------------------------- + +@app.route("/logs", methods=["DELETE"]) +def api_clean_log(): + if not is_authorized(): + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 + + file = request.args.get("file") + if not file: + return jsonify({"success": False, "message": "Missing parameters", "error": "Missing 'file' query parameter"}), 400 + + return clean_log(file) + +@app.route("/logs/add-to-execution-queue", methods=["POST"]) +def api_add_to_execution_queue(): + queue = UserEventsQueueInstance() + + # Get JSON payload safely + data = request.get_json(silent=True) or {} + action = data.get("action") + + if not action: + return jsonify({ + "success": False, "message": "Missing parameters", "error": "Missing required 'action' field in JSON body"}), 400 + + success, message = queue.add_event(action) + status_code = 200 if success else 400 + + response = {"success": success, "message": message} + if not success: + response["error"] = "ERROR" + + return jsonify(response), status_code + + # -------------------------- # Device Events # -------------------------- @@ -365,7 +417,7 @@ def api_delete_online_history(): @app.route("/events/create/", methods=["POST"]) def api_create_event(mac): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 data = request.json or {} ip = data.get("ip", "0.0.0.0") @@ -384,19 +436,19 @@ def api_create_event(mac): @app.route("/events/", methods=["DELETE"]) def api_events_by_mac(mac): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return delete_device_events(mac) @app.route("/events", methods=["DELETE"]) def api_delete_all_events(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return delete_events() @app.route("/events", methods=["GET"]) def api_get_events(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 mac = request.args.get("mac") return get_events(mac) @@ -408,14 +460,14 @@ def api_delete_old_events(days: int): Example: DELETE /events/30 """ if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return delete_events_older_than(days) @app.route("/sessions/totals", methods=["GET"]) def api_get_events_totals(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 period = get_date_from_period(request.args.get("period", "7 days")) return get_events_totals(period) @@ -427,7 +479,7 @@ def api_get_events_totals(): @app.route("/sessions/create", methods=["POST"]) def api_create_session(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 data = request.json mac = data.get("mac") @@ -438,7 +490,7 @@ def api_create_session(): event_type_disc = data.get("event_type_disc", "Disconnected") if not mac or not ip or not start_time: - return jsonify({"success": False, "error": "Missing required parameters"}), 400 + return jsonify({"success": False, "message": "Missing parameters", "error": "Missing required 'mac', 'ip', or 'start_time' query parameter"}), 400 return create_session(mac, ip, start_time, end_time, event_type_conn, event_type_disc) @@ -446,11 +498,11 @@ def api_create_session(): @app.route("/sessions/delete", methods=["DELETE"]) def api_delete_session(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 mac = request.json.get("mac") if request.is_json else None if not mac: - return jsonify({"success": False, "error": "Missing MAC parameter"}), 400 + return jsonify({"success": False, "message": "Missing parameters", "error": "Missing 'mac' query parameter"}), 400 return delete_session(mac) @@ -458,7 +510,7 @@ def api_delete_session(): @app.route("/sessions/list", methods=["GET"]) def api_get_sessions(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 mac = request.args.get("mac") start_date = request.args.get("start_date") @@ -469,7 +521,7 @@ def api_get_sessions(): @app.route("/sessions/calendar", methods=["GET"]) def api_get_sessions_calendar(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 # Query params: /sessions/calendar?start=2025-08-01&end=2025-08-21 start_date = request.args.get("start") @@ -480,7 +532,7 @@ def api_get_sessions_calendar(): @app.route("/sessions/", methods=["GET"]) def api_device_sessions(mac): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 period = request.args.get("period", "1 day") return get_device_sessions(mac, period) @@ -488,7 +540,7 @@ def api_device_sessions(mac): @app.route("/sessions/session-events", methods=["GET"]) def api_get_session_events(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 session_event_type = request.args.get("type", "all") period = get_date_from_period(request.args.get("period", "7 days")) @@ -500,7 +552,7 @@ def api_get_session_events(): @app.route("/metrics") def metrics(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 # Return Prometheus metrics as plain text return Response(get_metric_stats(), mimetype="text/plain") @@ -511,14 +563,14 @@ def metrics(): @app.route("/messaging/in-app/write", methods=["POST"]) def api_write_notification(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 data = request.json or {} content = data.get("content") level = data.get("level", "alert") if not content: - return jsonify({"success": False, "error": "Missing content"}), 400 + return jsonify({"success": False, "message": "Missing parameters", "error": "Missing content"}), 400 write_notification(content, level) return jsonify({"success": True}) @@ -526,21 +578,21 @@ def api_write_notification(): @app.route("/messaging/in-app/unread", methods=["GET"]) def api_get_unread_notifications(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return get_unread_notifications() @app.route("/messaging/in-app/read/all", methods=["POST"]) def api_mark_all_notifications_read(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return jsonify(mark_all_notifications_read()) @app.route("/messaging/in-app/delete", methods=["DELETE"]) def api_delete_all_notifications(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return delete_notifications() @@ -548,25 +600,25 @@ def api_delete_all_notifications(): def api_delete_notification(guid): """Delete a single notification by GUID.""" if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 result = delete_notification(guid) if result.get("success"): return jsonify({"success": True}) else: - return jsonify({"success": False, "error": result.get("error")}), 500 + return jsonify({"success": False, "message": "ERROR", "error": result.get("error")}), 500 @app.route("/messaging/in-app/read/", methods=["POST"]) def api_mark_notification_read(guid): """Mark a single notification as read by GUID.""" if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 result = mark_notification_as_read(guid) if result.get("success"): return jsonify({"success": True}) else: - return jsonify({"success": False, "error": result.get("error")}), 500 + return jsonify({"success": False, "message": "ERROR", "error": result.get("error")}), 500 # -------------------------- # SYNC endpoint @@ -574,7 +626,7 @@ def api_mark_notification_read(guid): @app.route("/sync", methods=["GET", "POST"]) def sync_endpoint(): if not is_authorized(): - return jsonify({"error": "Forbidden"}), 403 + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 if request.method == "GET": return handle_sync_get() @@ -584,7 +636,7 @@ def sync_endpoint(): msg = "[sync endpoint] Method Not Allowed" write_notification(msg, "alert") mylog("verbose", [msg]) - return jsonify({"error": "Method Not Allowed"}), 405 + return jsonify({"success": False, "message": "ERROR: No allowed", "error": "Method Not Allowed"}), 405 # -------------------------- # Background Server Start diff --git a/server/api_server/graphql_endpoint.py b/server/api_server/graphql_endpoint.py index 572c56ec..be675f91 100755 --- a/server/api_server/graphql_endpoint.py +++ b/server/api_server/graphql_endpoint.py @@ -1,7 +1,8 @@ import graphene -from graphene import ObjectType, String, Int, Boolean, List, Field, InputObjectType +from graphene import ObjectType, String, Int, Boolean, List, Field, InputObjectType, Argument import json import sys +import os # Register NetAlertX directories INSTALL_PATH="/app" @@ -102,6 +103,23 @@ class SettingResult(ObjectType): settings = List(Setting) count = Int() +# --- LANGSTRINGS --- + +# In-memory cache for lang strings +_langstrings_cache = {} # caches lists per file (core JSON or plugin) +_langstrings_cache_mtime = {} # tracks last modified times + +# LangString ObjectType +class LangString(ObjectType): + langCode = String() + langStringKey = String() + langStringText = String() + + +class LangStringResult(ObjectType): + langStrings = List(LangString) + count = Int() + # Define Query Type with Pagination Support class Query(ObjectType): @@ -258,6 +276,107 @@ class Query(ObjectType): return SettingResult(settings=settings, count=len(settings)) + # --- LANGSTRINGS --- + langStrings = Field( + LangStringResult, + langCode=Argument(String, required=False), + langStringKey=Argument(String, required=False) + ) + + def resolve_langStrings(self, info, langCode=None, langStringKey=None, fallback_to_en=True): + """ + Collect language strings, optionally filtered by language code and/or string key. + Caches in memory for performance. Can fallback to 'en_us' if a string is missing. + """ + global _langstrings_cache, _langstrings_cache_mtime + + langStrings = [] + + # --- CORE JSON FILES --- + language_folder = '/app/front/php/templates/language/' + if os.path.exists(language_folder): + for filename in os.listdir(language_folder): + if filename.endswith('.json'): + file_lang_code = filename.replace('.json', '') + + # Filter by langCode if provided + if langCode and file_lang_code != langCode: + continue + + file_path = os.path.join(language_folder, filename) + file_mtime = os.path.getmtime(file_path) + cache_key = f'core_{file_lang_code}' + + # Use cached data if available and not modified + if cache_key in _langstrings_cache_mtime and _langstrings_cache_mtime[cache_key] == file_mtime: + lang_list = _langstrings_cache[cache_key] + else: + try: + with open(file_path, 'r', encoding='utf-8') as f: + data = json.load(f) + lang_list = [ + LangString( + langCode=file_lang_code, + langStringKey=key, + langStringText=value + ) for key, value in data.items() + ] + _langstrings_cache[cache_key] = lang_list + _langstrings_cache_mtime[cache_key] = file_mtime + except (FileNotFoundError, json.JSONDecodeError) as e: + mylog('none', f'[graphql_schema] Error loading core language strings from {filename}: {e}') + lang_list = [] + + langStrings.extend(lang_list) + + # --- PLUGIN STRINGS --- + plugin_file = folder + 'table_plugins_language_strings.json' + try: + file_mtime = os.path.getmtime(plugin_file) + cache_key = 'plugin' + if cache_key in _langstrings_cache_mtime and _langstrings_cache_mtime[cache_key] == file_mtime: + plugin_list = _langstrings_cache[cache_key] + else: + with open(plugin_file, 'r', encoding='utf-8') as f: + plugin_data = json.load(f).get("data", []) + plugin_list = [ + LangString( + langCode=entry.get("Language_Code"), + langStringKey=entry.get("String_Key"), + langStringText=entry.get("String_Value") + ) for entry in plugin_data + ] + _langstrings_cache[cache_key] = plugin_list + _langstrings_cache_mtime[cache_key] = file_mtime + except (FileNotFoundError, json.JSONDecodeError) as e: + mylog('none', f'[graphql_schema] Error loading plugin language strings from {plugin_file}: {e}') + plugin_list = [] + + # Filter plugin strings by langCode if provided + if langCode: + plugin_list = [p for p in plugin_list if p.langCode == langCode] + + langStrings.extend(plugin_list) + + # --- Filter by string key if requested --- + if langStringKey: + langStrings = [ls for ls in langStrings if ls.langStringKey == langStringKey] + + # --- Fallback to en_us if enabled and requested lang is missing --- + if fallback_to_en and langCode and langCode != "en_us": + for i, ls in enumerate(langStrings): + if not ls.langStringText: # empty string triggers fallback + # try to get en_us version + en_list = _langstrings_cache.get("core_en_us", []) + en_list += [p for p in _langstrings_cache.get("plugin", []) if p.langCode == "en_us"] + en_fallback = [e for e in en_list if e.langStringKey == ls.langStringKey] + if en_fallback: + langStrings[i] = en_fallback[0] + + mylog('trace', f'[graphql_schema] Collected {len(langStrings)} language strings ' + f'(langCode={langCode}, key={langStringKey}, fallback_to_en={fallback_to_en})') + + return LangStringResult(langStrings=langStrings, count=len(langStrings)) # helps sorting inconsistent dataset mixed integers and strings diff --git a/server/api_server/logs_endpoint.py b/server/api_server/logs_endpoint.py new file mode 100644 index 00000000..4d76cefa --- /dev/null +++ b/server/api_server/logs_endpoint.py @@ -0,0 +1,58 @@ +import os +import sys +from flask import jsonify + +# Register NetAlertX directories +INSTALL_PATH="/app" +sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) + +from const import logPath +from logger import mylog, Logger +from helper import get_setting_value +from utils.datetime_utils import timeNowDB +from messaging.in_app import write_notification + +# Make sure log level is initialized correctly +Logger(get_setting_value('LOG_LEVEL')) + +def clean_log(log_file): + """ + Purge the content of an allowed log file within the /app/log/ directory. + + Args: + log_file (str): Name of the log file to purge. + + Returns: + flask.Response: JSON response with success and message keys + """ + allowed_files = [ + 'app.log', 'app_front.log', 'IP_changes.log', 'stdout.log', 'stderr.log', + 'app.php_errors.log', 'execution_queue.log', 'db_is_locked.log' + ] + + # Validate filename if purging allowed + if log_file not in allowed_files: + msg = f"[clean_log] File {log_file} is not allowed to be purged" + + mylog('none', [msg]) + write_notification(msg, 'interrupt') + return jsonify({"success": False, "message": msg}), 400 + + log_path = os.path.join(logPath, log_file) + + try: + # Purge content + with open(log_path, "w") as f: + f.write("File manually purged\n") + msg = f"[clean_log] File {log_file} purged successfully" + + mylog('minimal', [msg]) + write_notification(msg, 'interrupt') + return jsonify({"success": True, "message": msg}), 200 + except Exception as e: + msg = f"[clean_log] ERROR Failed to purge {log_file}: {e}" + + mylog('none', []) + write_notification(msg) + return jsonify({"success": False, "message": msg}), 200 + diff --git a/server/initialise.py b/server/initialise.py index f0fb0237..013468f2 100755 --- a/server/initialise.py +++ b/server/initialise.py @@ -19,9 +19,9 @@ from logger import mylog from api import update_api from scheduler import schedule_class from plugin import plugin_manager, print_plugin_info -from plugin_utils import get_plugins_configs, get_set_value_for_init +from utils.plugin_utils import get_plugins_configs, get_set_value_for_init from messaging.in_app import write_notification -from crypto_utils import get_random_bytes +from utils.crypto_utils import get_random_bytes #=============================================================================== # Initialise user defined values diff --git a/server/models/user_events_queue_instance.py b/server/models/user_events_queue_instance.py index 9d03eef4..85dee8b8 100755 --- a/server/models/user_events_queue_instance.py +++ b/server/models/user_events_queue_instance.py @@ -1,5 +1,6 @@ import os import sys +import uuid # Register NetAlertX directories INSTALL_PATH="/app" @@ -8,6 +9,7 @@ sys.path.extend([f"{INSTALL_PATH}/server"]) # Register NetAlertX modules from const import pluginsPath, logPath, applicationPath, reportTemplatesPath from logger import mylog +from utils.datetime_utils import timeNowDB class UserEventsQueueInstance: """ @@ -81,5 +83,43 @@ class UserEventsQueueInstance: return removed + def add_event(self, action): + """ + Append an action to the execution queue log file. + + Args: + action (str): Description of the action to queue. + + Returns: + tuple: (success: bool, message: str) + success - True if the event was successfully added. + message - Log message describing the result. + """ + timestamp = timeNowDB() + # Generate GUID + guid = str(uuid.uuid4()) + + if not action or not isinstance(action, str): + msg = "[UserEventsQueueInstance] Invalid or missing action" + mylog('none', [msg]) + + return False, msg + + try: + with open(self.log_file, "a") as f: + f.write(f"[{timestamp}]|{guid}|{action}\n") + + msg = f'[UserEventsQueueInstance] Action "{action}" added to the execution queue.' + mylog('minimal', [msg]) + + return True, msg + + except Exception as e: + msg = f"[UserEventsQueueInstance] ERROR Failed to write to {self.log_file}: {e}" + mylog('none', [msg]) + + return False, msg + + diff --git a/server/plugin.py b/server/plugin.py index 81075a3f..e21011f3 100755 --- a/server/plugin.py +++ b/server/plugin.py @@ -16,11 +16,11 @@ from helper import get_file_content, write_file, get_setting, get_setting_value from utils.datetime_utils import timeNowTZ, timeNowDB from app_state import updateState from api import update_api -from plugin_utils import logEventStatusCounts, get_plugin_string, get_plugin_setting_obj, print_plugin_info, list_to_csv, combine_plugin_objects, resolve_wildcards_arr, handle_empty, custom_plugin_decoder, decode_and_rename_files +from utils.plugin_utils import logEventStatusCounts, get_plugin_string, get_plugin_setting_obj, print_plugin_info, list_to_csv, combine_plugin_objects, resolve_wildcards_arr, handle_empty, custom_plugin_decoder, decode_and_rename_files from models.notification_instance import NotificationInstance from messaging.in_app import write_notification from models.user_events_queue_instance import UserEventsQueueInstance -from crypto_utils import generate_deterministic_guid +from utils.crypto_utils import generate_deterministic_guid #------------------------------------------------------------------------------- class plugin_manager: diff --git a/server/crypto_utils.py b/server/utils/crypto_utils.py old mode 100755 new mode 100644 similarity index 100% rename from server/crypto_utils.py rename to server/utils/crypto_utils.py diff --git a/server/plugin_utils.py b/server/utils/plugin_utils.py old mode 100755 new mode 100644 similarity index 99% rename from server/plugin_utils.py rename to server/utils/plugin_utils.py index 2e92ff38..9b76d4b7 --- a/server/plugin_utils.py +++ b/server/utils/plugin_utils.py @@ -6,7 +6,7 @@ from logger import mylog from const import pluginsPath, logPath, apiPath from helper import get_file_content, write_file, get_setting, get_setting_value, setting_value_to_python_type from app_state import updateState -from crypto_utils import decrypt_data, generate_deterministic_guid +from utils.crypto_utils import decrypt_data, generate_deterministic_guid module_name = 'Plugin utils' diff --git a/test/api_endpoints/test_graphq_endpoints.py b/test/api_endpoints/test_graphq_endpoints.py index cc5e2076..262a62bf 100644 --- a/test/api_endpoints/test_graphq_endpoints.py +++ b/test/api_endpoints/test_graphq_endpoints.py @@ -44,6 +44,8 @@ def test_graphql_post_unauthorized(client): assert resp.status_code == 401 assert "Unauthorized access attempt" in resp.json.get("error", "") +# --- DEVICES TESTS --- + def test_graphql_post_devices(client, api_token): """POST /graphql with a valid token should return device data""" query = { @@ -74,6 +76,8 @@ def test_graphql_post_devices(client, api_token): assert isinstance(data["devices"]["devices"], list) assert isinstance(data["devices"]["count"], int) +# --- SETTINGS TESTS --- + def test_graphql_post_settings(client, api_token): """POST /graphql should return settings data""" query = { @@ -91,3 +95,76 @@ def test_graphql_post_settings(client, api_token): data = resp.json.get("data", {}) assert "settings" in data assert isinstance(data["settings"]["settings"], list) + +# --- LANGSTRINGS TESTS --- + +def test_graphql_post_langstrings_specific(client, api_token): + """Retrieve a specific langString in a given language""" + query = { + "query": """ + { + langStrings(langCode: "en_us", langStringKey: "settings_other_scanners") { + langStrings { langCode langStringKey langStringText } + count + } + } + """ + } + resp = client.post("/graphql", json=query, headers=auth_headers(api_token)) + assert resp.status_code == 200 + data = resp.json.get("data", {}).get("langStrings", {}) + assert data["count"] >= 1 + for entry in data["langStrings"]: + assert entry["langCode"] == "en_us" + assert entry["langStringKey"] == "settings_other_scanners" + assert isinstance(entry["langStringText"], str) + + +def test_graphql_post_langstrings_fallback(client, api_token): + """Fallback to en_us if requested language string is empty""" + query = { + "query": """ + { + langStrings(langCode: "de_de", langStringKey: "settings_other_scanners") { + langStrings { langCode langStringKey langStringText } + count + } + } + """ + } + resp = client.post("/graphql", json=query, headers=auth_headers(api_token)) + assert resp.status_code == 200 + data = resp.json.get("data", {}).get("langStrings", {}) + assert data["count"] >= 1 + # Ensure fallback occurred if de_de text is empty + for entry in data["langStrings"]: + assert entry["langStringText"] != "" + + +def test_graphql_post_langstrings_all_languages(client, api_token): + """Retrieve all languages for a given key""" + query = { + "query": """ + { + enStrings: langStrings(langCode: "en_us", langStringKey: "settings_other_scanners") { + langStrings { langCode langStringKey langStringText } + count + } + deStrings: langStrings(langCode: "de_de", langStringKey: "settings_other_scanners") { + langStrings { langCode langStringKey langStringText } + count + } + } + """ + } + resp = client.post("/graphql", json=query, headers=auth_headers(api_token)) + assert resp.status_code == 200 + data = resp.json.get("data", {}) + assert "enStrings" in data + assert "deStrings" in data + # At least one string in each language + assert data["enStrings"]["count"] >= 1 + assert data["deStrings"]["count"] >= 1 + # Ensure langCode matches + assert all(e["langCode"] == "en_us" for e in data["enStrings"]["langStrings"]) + assert all(e["langCode"] == "de_de" for e in data["deStrings"]["langStrings"]) \ No newline at end of file diff --git a/test/api_endpoints/test_logs_endpoints.py b/test/api_endpoints/test_logs_endpoints.py new file mode 100644 index 00000000..cd62fd17 --- /dev/null +++ b/test/api_endpoints/test_logs_endpoints.py @@ -0,0 +1,61 @@ +import sys +import random +import pytest + +INSTALL_PATH = "/app" +sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) + +from helper import get_setting_value +from api_server.api_server_start import app + +# ---------------------------- +# Fixtures +# ---------------------------- +@pytest.fixture(scope="session") +def api_token(): + return get_setting_value("API_TOKEN") + +@pytest.fixture +def client(): + with app.test_client() as client: + yield client + +def auth_headers(token): + return {"Authorization": f"Bearer {token}"} + +# ---------------------------- +# Logs Endpoint Tests +# ---------------------------- +def test_clean_log(client, api_token): + resp = client.delete("/logs?file=app.log", headers=auth_headers(api_token)) + assert resp.status_code == 200 + assert resp.json.get("success") is True + +def test_clean_log_not_allowed(client, api_token): + resp = client.delete("/logs?file=not_allowed.log", headers=auth_headers(api_token)) + assert resp.status_code == 400 + assert resp.json.get("success") is False + +# ---------------------------- +# Execution Queue Endpoint Tests +# ---------------------------- +def test_add_to_execution_queue(client, api_token): + action_name = f"test_action_{random.randint(0,9999)}" + resp = client.post( + "/logs/add-to-execution-queue", + json={"action": action_name}, + headers=auth_headers(api_token) + ) + assert resp.status_code == 200 + assert resp.json.get("success") is True + assert action_name in resp.json.get("message", "") + +def test_add_to_execution_queue_missing_action(client, api_token): + resp = client.post( + "/logs/add-to-execution-queue", + json={}, + headers=auth_headers(api_token) + ) + assert resp.status_code == 400 + assert resp.json.get("success") is False + assert "Missing required 'action'" in resp.json.get("error", "") From 6d70a8a71dde511658c8ff21b83dfa8ae246c124 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Mon, 10 Nov 2025 07:58:21 +1100 Subject: [PATCH 30/31] BE: /logs endpoint, comments resolution, github template Signed-off-by: jokob-sk --- .github/ISSUE_TEMPLATE/i-have-an-issue.yml | 24 ++++++++++++++-------- front/plugins/sync/sync.py | 6 ++++-- server/api_server/api_server_start.py | 4 ++++ server/api_server/logs_endpoint.py | 6 +++--- 4 files changed, 27 insertions(+), 13 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/i-have-an-issue.yml b/.github/ISSUE_TEMPLATE/i-have-an-issue.yml index 068c170a..49c21793 100755 --- a/.github/ISSUE_TEMPLATE/i-have-an-issue.yml +++ b/.github/ISSUE_TEMPLATE/i-have-an-issue.yml @@ -46,7 +46,7 @@ body: attributes: label: app.conf description: | - Paste your `app.conf` (remove personal info) + Paste relevant `app.conf`settings (remove sensitive info) render: python validations: required: false @@ -70,6 +70,13 @@ body: - Bare-metal (community only support - Check Discord) validations: required: true +- type: checkboxes + attributes: + label: Debug or Trace enabled + description: I confirm I set `LOG_LEVEL` to `debug` or `trace` + options: + - label: I have read and followed the steps in the wiki link above and provided the required debug logs and the log section covers the time when the issue occurs. + required: true - type: textarea attributes: label: app.log @@ -78,13 +85,14 @@ body: ***Generally speaking, all bug reports should have logs provided.*** Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in. Additionally, any additional info? Screenshots? References? Anything that will give us more context about the issue you are encountering! - You can use `tail -100 /app/log/app.log` in the container if you have trouble getting to the log files. + You can use `tail -100 /app/log/app.log` in the container if you have trouble getting to the log files or send them to netalertx@gmail.com with the issue number. validations: required: false -- type: checkboxes +- type: textarea attributes: - label: Debug enabled - description: I confirm I enabled `debug` - options: - - label: I have read and followed the steps in the wiki link above and provided the required debug logs and the log section covers the time when the issue occurs. - required: true + label: Docker Logs + description: | + You can retrieve the logs from Portainer -> Containers -> your NetAlertX container -> Logs or by running `sudo docker logs netalertx`. + validations: + required: true + diff --git a/front/plugins/sync/sync.py b/front/plugins/sync/sync.py index 2e804b0d..e47a4682 100755 --- a/front/plugins/sync/sync.py +++ b/front/plugins/sync/sync.py @@ -181,8 +181,10 @@ def main(): # make sure the file has the correct name (e.g last_result.encoded.Node_1.1.log) to skip any otehr plugin files if len(file_name.split('.')) > 2: - # Store e.g. Node_1 from last_result.encoded.Node_1.1.log - syncHubNodeName = file_name.split('.')[1] + # Extract node name from either last_result.decoded.Node_1.1.log or last_result.Node_1.log + parts = file_name.split('.') + # If decoded/encoded file, node name is at index 2; otherwise at index 1 + syncHubNodeName = parts[2] if 'decoded' in file_name or 'encoded' in file_name else parts[1] file_path = f"{LOG_PATH}/{file_name}" diff --git a/server/api_server/api_server_start.py b/server/api_server/api_server_start.py index fbf944f5..9b516f13 100755 --- a/server/api_server/api_server_start.py +++ b/server/api_server/api_server_start.py @@ -390,6 +390,10 @@ def api_clean_log(): @app.route("/logs/add-to-execution-queue", methods=["POST"]) def api_add_to_execution_queue(): + + if not is_authorized(): + return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 + queue = UserEventsQueueInstance() # Get JSON payload safely diff --git a/server/api_server/logs_endpoint.py b/server/api_server/logs_endpoint.py index 4d76cefa..120644b7 100644 --- a/server/api_server/logs_endpoint.py +++ b/server/api_server/logs_endpoint.py @@ -52,7 +52,7 @@ def clean_log(log_file): except Exception as e: msg = f"[clean_log] ERROR Failed to purge {log_file}: {e}" - mylog('none', []) - write_notification(msg) - return jsonify({"success": False, "message": msg}), 200 + mylog('none', [msg]) + write_notification(msg, 'interrupt') + return jsonify({"success": False, "message": msg}), 500 From 9f6086e5cfa7da42f762fccadaa8c10a55c8aa91 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Mon, 10 Nov 2025 09:27:13 +1100 Subject: [PATCH 31/31] BE: better error message Signed-off-by: jokob-sk --- server/api_server/api_server_start.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/server/api_server/api_server_start.py b/server/api_server/api_server_start.py index 9b516f13..27a7813e 100755 --- a/server/api_server/api_server_start.py +++ b/server/api_server/api_server_start.py @@ -152,7 +152,7 @@ def api_copy_device(): mac_to = data.get("macTo") if not mac_from or not mac_to: - return jsonify({"success": False, "message": "Missing parameters", "error": "macFrom and macTo are required"}), 400 + return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "macFrom and macTo are required"}), 400 return copy_device(mac_from, mac_to) @@ -166,7 +166,7 @@ def api_update_device_column(mac): column_value = data.get("columnValue") if not column_name or not column_value: - return jsonify({"success": False, "message": "Missing parameters", "error": "columnName and columnValue are required"}), 400 + return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "columnName and columnValue are required"}), 400 return update_device_column(mac, column_name, column_value) @@ -267,7 +267,7 @@ def api_nslookup(): data = request.get_json(silent=True) if not data or "devLastIP" not in data: - return jsonify({"success": False, "message": "Missing parameters", "error": "Missing 'devLastIP'"}), 400 + return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing 'devLastIP'"}), 400 ip = data["devLastIP"] return nslookup(ip) @@ -283,7 +283,7 @@ def api_nmap(): data = request.get_json(silent=True) if not data or "scan" not in data or "mode" not in data: - return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Missing 'scan' or 'mode'"}), 400 + return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing 'scan' or 'mode'"}), 400 ip = data["scan"] mode = data["mode"] @@ -310,7 +310,7 @@ def dbquery_read(): raw_sql_b64 = data.get("rawSql") if not raw_sql_b64: - return jsonify({"success": False, "message": "Missing parameters", "error": "rawSql is required"}), 400 + return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400 return read_query(raw_sql_b64) @@ -323,7 +323,7 @@ def dbquery_write(): data = request.get_json() or {} raw_sql_b64 = data.get("rawSql") if not raw_sql_b64: - return jsonify({"success": False, "message": "Missing parameters", "error": "rawSql is required"}), 400 + return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400 return write_query(raw_sql_b64) @@ -336,7 +336,7 @@ def dbquery_update(): data = request.get_json() or {} required = ["columnName", "id", "dbtable", "columns", "values"] if not all(data.get(k) for k in required): - return jsonify({"success": False, "message": "Missing parameters", "error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"}), 400 + return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"}), 400 return update_query( column_name=data["columnName"], @@ -355,7 +355,7 @@ def dbquery_delete(): data = request.get_json() or {} required = ["columnName", "id", "dbtable"] if not all(data.get(k) for k in required): - return jsonify({"success": False, "message": "Missing parameters", "error": "Missing required 'columnName', 'id', or 'dbtable' query parameter"}), 400 + return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'columnName', 'id', or 'dbtable' query parameter"}), 400 return delete_query( column_name=data["columnName"], @@ -384,7 +384,7 @@ def api_clean_log(): file = request.args.get("file") if not file: - return jsonify({"success": False, "message": "Missing parameters", "error": "Missing 'file' query parameter"}), 400 + return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing 'file' query parameter"}), 400 return clean_log(file) @@ -402,7 +402,7 @@ def api_add_to_execution_queue(): if not action: return jsonify({ - "success": False, "message": "Missing parameters", "error": "Missing required 'action' field in JSON body"}), 400 + "success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'action' field in JSON body"}), 400 success, message = queue.add_event(action) status_code = 200 if success else 400 @@ -494,7 +494,7 @@ def api_create_session(): event_type_disc = data.get("event_type_disc", "Disconnected") if not mac or not ip or not start_time: - return jsonify({"success": False, "message": "Missing parameters", "error": "Missing required 'mac', 'ip', or 'start_time' query parameter"}), 400 + return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'mac', 'ip', or 'start_time' query parameter"}), 400 return create_session(mac, ip, start_time, end_time, event_type_conn, event_type_disc) @@ -506,7 +506,7 @@ def api_delete_session(): mac = request.json.get("mac") if request.is_json else None if not mac: - return jsonify({"success": False, "message": "Missing parameters", "error": "Missing 'mac' query parameter"}), 400 + return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing 'mac' query parameter"}), 400 return delete_session(mac) @@ -574,7 +574,7 @@ def api_write_notification(): level = data.get("level", "alert") if not content: - return jsonify({"success": False, "message": "Missing parameters", "error": "Missing content"}), 400 + return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing content"}), 400 write_notification(content, level) return jsonify({"success": True})