BE: mylog() better code radability

Signed-off-by: jokob-sk <jokob.sk@gmail.com>
This commit is contained in:
jokob-sk
2025-11-25 07:54:17 +11:00
parent fa9fc2c8e3
commit 139447b253
26 changed files with 95 additions and 431 deletions

View File

@@ -36,12 +36,7 @@ def main():
# Check if basic config settings supplied
if check_config() is False:
mylog(
"none",
[
f"[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables."
],
)
mylog("none", f"[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.")
return
# Create a database connection

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python3
import conf
from const import confFileName, logPath
from const import logPath
from pytz import timezone
import os
@@ -36,11 +36,7 @@ def main():
# Check if basic config settings supplied
if not validate_config():
mylog(
"none",
f"[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. "
f"Check your {confFileName} {pluginName}_* variables.",
)
mylog("none", f"[{pluginName}] ⚠ ERROR: Publisher not set up correctly. Check your {pluginName}_* variables.",)
return
# Create a database connection

View File

@@ -138,10 +138,7 @@ def execute_arpscan(userSubnets):
mylog("verbose", [f"[{pluginName}] All devices List len:", len(devices_list)])
mylog("verbose", [f"[{pluginName}] Devices List:", devices_list])
mylog(
"verbose",
[f"[{pluginName}] Found: Devices without duplicates ", len(unique_devices)],
)
mylog("verbose", [f"[{pluginName}] Found: Devices without duplicates ", len(unique_devices)],)
return unique_devices
@@ -174,10 +171,7 @@ def execute_arpscan_on_interface(interface):
except subprocess.CalledProcessError:
result = ""
except subprocess.TimeoutExpired:
mylog(
"warning",
[f"[{pluginName}] arp-scan timed out after {timeout_seconds}s"],
)
mylog("warning", [f"[{pluginName}] arp-scan timed out after {timeout_seconds}s"],)
result = ""
# stop looping if duration not set or expired
if scan_duration == 0 or (time.time() - start_time) > scan_duration:

View File

@@ -33,10 +33,7 @@ def main():
device_data = get_device_data()
mylog(
"verbose",
[f"[{pluginName}] Found '{len(device_data)}' devices"],
)
mylog("verbose", f"[{pluginName}] Found '{len(device_data)}' devices")
filtered_devices = [
(key, device)
@@ -44,10 +41,7 @@ def main():
if device.state == ConnectionState.CONNECTED
]
mylog(
"verbose",
[f"[{pluginName}] Processing '{len(filtered_devices)}' connected devices"],
)
mylog("verbose", f"[{pluginName}] Processing '{len(filtered_devices)}' connected devices")
for mac, device in filtered_devices:
entry_mac = str(device.description.mac).lower()

View File

@@ -75,10 +75,7 @@ def cleanup_database(
# -----------------------------------------------------
# Cleanup Online History
mylog(
"verbose",
[f"[{pluginName}] Online_History: Delete all but keep latest 150 entries"],
)
mylog("verbose", [f"[{pluginName}] Online_History: Delete all but keep latest 150 entries"],)
cursor.execute(
"""DELETE from Online_History where "Index" not in (
SELECT "Index" from Online_History
@@ -87,24 +84,14 @@ def cleanup_database(
# -----------------------------------------------------
# Cleanup Events
mylog(
"verbose",
[
f"[{pluginName}] Events: Delete all older than {str(DAYS_TO_KEEP_EVENTS)} days (DAYS_TO_KEEP_EVENTS setting)"
],
)
mylog("verbose", f"[{pluginName}] Events: Delete all older than {str(DAYS_TO_KEEP_EVENTS)} days (DAYS_TO_KEEP_EVENTS setting)")
cursor.execute(
f"""DELETE FROM Events
WHERE eve_DateTime <= date('now', '-{str(DAYS_TO_KEEP_EVENTS)} day')"""
)
# -----------------------------------------------------
# Trim Plugins_History entries to less than PLUGINS_KEEP_HIST setting per unique "Plugin" column entry
mylog(
"verbose",
[
f"[{pluginName}] Plugins_History: Trim Plugins_History entries to less than {str(PLUGINS_KEEP_HIST)} per Plugin (PLUGINS_KEEP_HIST setting)"
],
)
mylog("verbose", f"[{pluginName}] Plugins_History: Trim Plugins_History entries to less than {str(PLUGINS_KEEP_HIST)} per Plugin (PLUGINS_KEEP_HIST setting)")
# Build the SQL query to delete entries that exceed the limit per unique "Plugin" column entry
delete_query = f"""DELETE FROM Plugins_History
@@ -125,12 +112,7 @@ def cleanup_database(
histCount = get_setting_value("DBCLNP_NOTIFI_HIST")
mylog(
"verbose",
[
f"[{pluginName}] Plugins_History: Trim Notifications entries to less than {histCount}"
],
)
mylog("verbose", f"[{pluginName}] Plugins_History: Trim Notifications entries to less than {histCount}")
# Build the SQL query to delete entries
delete_query = f"""DELETE FROM Notifications
@@ -170,12 +152,7 @@ def cleanup_database(
# -----------------------------------------------------
# Cleanup New Devices
if HRS_TO_KEEP_NEWDEV != 0:
mylog(
"verbose",
[
f"[{pluginName}] Devices: Delete all New Devices older than {str(HRS_TO_KEEP_NEWDEV)} hours (HRS_TO_KEEP_NEWDEV setting)"
],
)
mylog("verbose", f"[{pluginName}] Devices: Delete all New Devices older than {str(HRS_TO_KEEP_NEWDEV)} hours (HRS_TO_KEEP_NEWDEV setting)")
query = f"""DELETE FROM Devices WHERE devIsNew = 1 AND devFirstConnection < date('now', '-{str(HRS_TO_KEEP_NEWDEV)} hour')"""
mylog("verbose", [f"[{pluginName}] Query: {query} "])
cursor.execute(query)
@@ -183,12 +160,7 @@ def cleanup_database(
# -----------------------------------------------------
# Cleanup Offline Devices
if HRS_TO_KEEP_OFFDEV != 0:
mylog(
"verbose",
[
f"[{pluginName}] Devices: Delete all New Devices older than {str(HRS_TO_KEEP_OFFDEV)} hours (HRS_TO_KEEP_OFFDEV setting)"
],
)
mylog("verbose", f"[{pluginName}] Devices: Delete all New Devices older than {str(HRS_TO_KEEP_OFFDEV)} hours (HRS_TO_KEEP_OFFDEV setting)")
query = f"""DELETE FROM Devices WHERE devPresentLastScan = 0 AND devLastConnection < date('now', '-{str(HRS_TO_KEEP_OFFDEV)} hour')"""
mylog("verbose", [f"[{pluginName}] Query: {query} "])
cursor.execute(query)
@@ -196,12 +168,7 @@ def cleanup_database(
# -----------------------------------------------------
# Clear New Flag
if CLEAR_NEW_FLAG != 0:
mylog(
"verbose",
[
f'[{pluginName}] Devices: Clear "New Device" flag for all devices older than {str(CLEAR_NEW_FLAG)} hours (CLEAR_NEW_FLAG setting)'
],
)
mylog("verbose", f'[{pluginName}] Devices: Clear "New Device" flag for all devices older than {str(CLEAR_NEW_FLAG)} hours (CLEAR_NEW_FLAG setting)')
query = f"""UPDATE Devices SET devIsNew = 0 WHERE devIsNew = 1 AND date(devFirstConnection, '+{str(CLEAR_NEW_FLAG)} hour') < date('now')"""
# select * from Devices where devIsNew = 1 AND date(devFirstConnection, '+3 hour' ) < date('now')
mylog("verbose", [f"[{pluginName}] Query: {query} "])

View File

@@ -71,10 +71,7 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
status = lease.get('status')
device_name = comment or host_name or "(unknown)"
mylog(
'verbose',
[f"ID: {lease_id}, Address: {address}, MAC Address: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}"]
)
mylog('verbose', f"ID: {lease_id}, Address: {address}, MAC: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}")
if (status == "bound"):
plugin_objects.add_object(

View File

@@ -63,9 +63,7 @@ main structure of NetAlertX
def main():
mylog(
"none", ["[MAIN] Setting up ..."]
) # has to be level 'none' as user config not loaded yet
mylog("none", ["[MAIN] Setting up ..."]) # has to be level 'none' as user config not loaded yet
mylog("none", [f"[conf.tz] Setting up ...{conf.tz}"])
@@ -221,22 +219,14 @@ def main():
# Fetch new unprocessed events
new_events = workflow_manager.get_new_app_events()
mylog(
"debug",
[
f"[MAIN] Processing WORKFLOW new_events from get_new_app_events: {len(new_events)}"
],
)
mylog("debug", [f"[MAIN] Processing WORKFLOW new_events from get_new_app_events: {len(new_events)}"],)
# Process each new event and check triggers
if len(new_events) > 0:
updateState("Workflows: Start")
update_api_flag = False
for event in new_events:
mylog(
"debug",
[f"[MAIN] Processing WORKFLOW app event with GUID {event['GUID']}"],
)
mylog("debug", [f"[MAIN] Processing WORKFLOW app event with GUID {event['GUID']}"],)
# proceed to process events
workflow_manager.process_event(event)
@@ -253,12 +243,7 @@ def main():
# check if devices list needs updating
userUpdatedDevices = UserEventsQueueInstance().has_update_devices()
mylog(
"debug",
[
f"[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}"
],
)
mylog("debug", [f"[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}"],)
if userUpdatedDevices:
update_api(db, all_plugins, True, ["devices"], userUpdatedDevices)

View File

@@ -96,16 +96,9 @@ def update_api(
) # Ensure port is an integer
start_server(graphql_port_value, app_state) # Start the server
except ValueError:
mylog(
"none",
[
f"[API] Invalid GRAPHQL_PORT value, must be an integer: {graphql_port_value}"
],
)
mylog("none", [f"[API] Invalid GRAPHQL_PORT value, must be an integer: {graphql_port_value}"],)
else:
mylog(
"none", ["[API] GRAPHQL_PORT or API_TOKEN is not set, will try later."]
)
mylog("none", ["[API] GRAPHQL_PORT or API_TOKEN is not set, will try later."])
# -------------------------------------------------------------------------------
@@ -135,12 +128,7 @@ class api_endpoint_class:
# Match SQL and API endpoint path
if endpoint.query == self.query and endpoint.path == self.path:
found = True
mylog(
"trace",
[
f"[API] api_endpoint_class: Hashes (file|old|new): ({self.fileName}|{endpoint.hash}|{self.hash})"
],
)
mylog("trace", [f"[API] api_endpoint_class: Hashes (file|old|new): ({self.fileName}|{endpoint.hash}|{self.hash})"],)
if endpoint.hash != self.hash:
self.needsUpdate = True
# Only update changeDetectedWhen if it hasn't been set recently
@@ -190,10 +178,7 @@ class api_endpoint_class:
)
)
):
mylog(
"debug",
[f"[API] api_endpoint_class: Writing {self.fileName} after debounce."],
)
mylog("debug", [f"[API] api_endpoint_class: Writing {self.fileName} after debounce."],)
write_file(self.path, json.dumps(self.jsonData))

View File

@@ -173,13 +173,8 @@ class Query(ObjectType):
network_dev_types = get_setting_value("NETWORK_DEVICE_TYPES")
mylog("trace", f"[graphql_schema] allowed_statuses: {allowed_statuses}")
mylog(
"trace",
f"[graphql_schema] hidden_relationships: {hidden_relationships}",
)
mylog(
"trace", f"[graphql_schema] network_dev_types: {network_dev_types}"
)
mylog("trace", f"[graphql_schema] hidden_relationships: {hidden_relationships}",)
mylog("trace", f"[graphql_schema] network_dev_types: {network_dev_types}")
# Filtering based on the "status"
if status == "my_devices":

View File

@@ -71,9 +71,7 @@ class app_state_class:
with open(stateFile, "r") as json_file:
previousState = json.load(json_file)
except json.decoder.JSONDecodeError as e:
mylog(
"none", [f"[app_state_class] Failed to handle app_state.json: {e}"]
)
mylog("none", [f"[app_state_class] Failed to handle app_state.json: {e}"])
# Check if the file exists and recover previous values
if previousState != "":
@@ -151,10 +149,7 @@ class app_state_class:
with open(stateFile, "w") as json_file:
json_file.write(json_data)
except (TypeError, ValueError) as e:
mylog(
"none",
[f"[app_state_class] Failed to serialize object to JSON: {e}"],
)
mylog("none", [f"[app_state_class] Failed to serialize object to JSON: {e}"],)
return

View File

@@ -233,15 +233,7 @@ class DB:
rows = self.sql.fetchall()
return rows
except AssertionError:
mylog(
"minimal",
[
"[Database] - ERROR: inconsistent query and/or arguments.",
query,
" params: ",
args,
],
)
mylog("minimal", ["[Database] - ERROR: inconsistent query and/or arguments.", query, " params: ", args,],)
except sqlite3.Error as e:
mylog("minimal", ["[Database] - SQL ERROR: ", e])
return None
@@ -258,15 +250,7 @@ class DB:
if len(rows) == 1:
return rows[0]
if len(rows) > 1:
mylog(
"verbose",
[
"[Database] - Warning!: query returns multiple rows, only first row is passed on!",
query,
" params: ",
args,
],
)
mylog("verbose", ["[Database] - Warning!: query returns multiple rows, only first row is passed on!", query, " params: ", args,],)
return rows[0]
# empty result set
return None

View File

@@ -88,10 +88,7 @@ def ensure_column(sql, table: str, column_name: str, column_type: str) -> bool:
mylog("none", [msg])
# Add missing column
mylog(
"verbose",
[f"[db_upgrade] Adding '{column_name}' ({column_type}) to {table} table"],
)
mylog("verbose", [f"[db_upgrade] Adding '{column_name}' ({column_type}) to {table} table"],)
sql.execute(f'ALTER TABLE "{table}" ADD "{column_name}" {column_type}')
return True

View File

@@ -586,16 +586,11 @@ class SafeConditionBuilder:
# Validate each component
if not self._validate_column_name(column):
mylog(
"verbose", [f"[SafeConditionBuilder] Invalid column: {column}"]
)
mylog("verbose", [f"[SafeConditionBuilder] Invalid column: {column}"])
return "", {}
if not self._validate_operator(operator):
mylog(
"verbose",
[f"[SafeConditionBuilder] Invalid operator: {operator}"],
)
mylog("verbose", [f"[SafeConditionBuilder] Invalid operator: {operator}"])
return "", {}
# Create parameter binding
@@ -607,10 +602,7 @@ class SafeConditionBuilder:
condition_parts.append(condition_part)
except Exception as e:
mylog(
"verbose",
[f"[SafeConditionBuilder] Error processing condition: {e}"],
)
mylog("verbose", [f"[SafeConditionBuilder] Error processing condition: {e}"],)
return "", {}
if not condition_parts:
@@ -644,10 +636,7 @@ class SafeConditionBuilder:
if event_type in self.ALLOWED_EVENT_TYPES:
valid_types.append(event_type)
else:
mylog(
"verbose",
f"[SafeConditionBuilder] Invalid event type filtered out: {event_type}",
)
mylog("verbose", f"[SafeConditionBuilder] Invalid event type filtered out: {event_type}",)
if not valid_types:
return "", {}
@@ -682,10 +671,7 @@ class SafeConditionBuilder:
return self.build_safe_condition(condition_setting)
except ValueError as e:
# Log the error and return empty condition for safety
mylog(
"verbose",
f"[SafeConditionBuilder] Unsafe condition rejected: {condition_setting}, Error: {e}",
)
mylog("verbose", f"[SafeConditionBuilder] Unsafe condition rejected: {condition_setting}, Error: {e}",)
return "", {}

View File

@@ -36,12 +36,7 @@ def checkPermissionsOK():
dbW_access = os.access(fullDbPath, os.W_OK)
mylog("none", ["\n"])
mylog(
"none",
[
"The backend restarted (started). If this is unexpected check https://bit.ly/NetAlertX_debug for troubleshooting tips."
],
)
mylog("none", "The backend restarted (started). If this is unexpected check https://bit.ly/NetAlertX_debug for troubleshooting tips.")
mylog("none", ["\n"])
mylog("none", ["Permissions check (All should be True)"])
mylog("none", ["------------------------------------------------"])
@@ -59,12 +54,7 @@ def checkPermissionsOK():
def initialiseFile(pathToCheck, defaultFile):
# if file not readable (missing?) try to copy over the backed-up (default) one
if str(os.access(pathToCheck, os.R_OK)) == "False":
mylog(
"none",
[
"[Setup] (" + pathToCheck + ") file is not readable or missing. Trying to copy over the default one."
],
)
mylog("none", ["[Setup] (" + pathToCheck + ") file is not readable or missing. Trying to copy over the default one."],)
try:
# try runnning a subprocess
p = subprocess.Popen(
@@ -75,31 +65,16 @@ def initialiseFile(pathToCheck, defaultFile):
stdout, stderr = p.communicate()
if str(os.access(pathToCheck, os.R_OK)) == "False":
mylog(
"none",
[
"[Setup] ⚠ ERROR copying (" + defaultFile + ") to (" + pathToCheck + "). Make sure the app has Read & Write access to the parent directory."
],
)
mylog("none", "[Setup] ⚠ ERROR copying (" + defaultFile + ") to (" + pathToCheck + "). Ensure Read & Write access to the parent directory.")
else:
mylog(
"none",
[
"[Setup] (" + defaultFile + ") copied over successfully to (" + pathToCheck + ")."
],
)
mylog("none", ["[Setup] (" + defaultFile + ") copied over successfully to (" + pathToCheck + ")."],)
# write stdout and stderr into .log files for debugging if needed
logResult(stdout, stderr) # TO-DO should be changed to mylog
except subprocess.CalledProcessError as e:
# An error occured, handle it
mylog(
"none",
[
"[Setup] ⚠ ERROR copying (" + defaultFile + "). Make sure the app has Read & Write access to " + pathToCheck
],
)
mylog("none", ["[Setup] ⚠ ERROR copying (" + defaultFile + "). Make sure the app has Read & Write access to " + pathToCheck],)
mylog("none", [e.output])
@@ -187,14 +162,7 @@ def get_setting(key):
mylog("none", [f"[Settings] ⚠ File not found: {settingsFile}"])
return None
mylog(
"trace",
[
"[Import table_settings.json] checking table_settings.json file",
f"SETTINGS_LASTCACHEDATE: {SETTINGS_LASTCACHEDATE}",
f"fileModifiedTime: {fileModifiedTime}",
],
)
mylog("trace", f"[Import table_settings.json] checking table_settings.json file SETTINGS_LASTCACHEDATE: {SETTINGS_LASTCACHEDATE} fileModifiedTime: {fileModifiedTime}")
# Use cache if file hasn't changed
if fileModifiedTime == SETTINGS_LASTCACHEDATE and SETTINGS_CACHE:
@@ -221,10 +189,7 @@ def get_setting(key):
SETTINGS_LASTCACHEDATE = fileModifiedTime
if key not in SETTINGS_CACHE:
mylog(
"none",
[f"[Settings] ⚠ ERROR - setting_missing - {key} not in {settingsFile}"],
)
mylog("none", [f"[Settings] ⚠ ERROR - setting_missing - {key} not in {settingsFile}"],)
return None
return SETTINGS_CACHE[key]
@@ -357,10 +322,7 @@ def setting_value_to_python_type(set_type, set_value):
value = json.loads(set_value.replace("'", "\""))
except json.JSONDecodeError as e:
mylog(
"none",
[f"[setting_value_to_python_type] Error decoding JSON object: {e}"],
)
mylog("none", [f"[setting_value_to_python_type] Error decoding JSON object: {e}"],)
mylog("none", [set_value])
value = []
@@ -375,10 +337,7 @@ def setting_value_to_python_type(set_type, set_value):
try:
value = reverseTransformers(json.loads(set_value), transformers)
except json.JSONDecodeError as e:
mylog(
"none",
[f"[setting_value_to_python_type] Error decoding JSON object: {e}"],
)
mylog("none", [f"[setting_value_to_python_type] Error decoding JSON object: {e}"],)
mylog("none", [{set_value}])
value = {}
@@ -766,9 +725,7 @@ def checkNewVersion():
try:
data = json.loads(text)
except json.JSONDecodeError:
mylog(
"minimal", ["[Version check] ⚠ ERROR: Invalid JSON response from GitHub."]
)
mylog("minimal", ["[Version check] ⚠ ERROR: Invalid JSON response from GitHub."])
return False
# make sure we received a valid response and not an API rate limit exceeded message
@@ -784,10 +741,7 @@ def checkNewVersion():
else:
mylog("none", ["[Version check] Running the latest version."])
else:
mylog(
"minimal",
["[Version check] ⚠ ERROR: Received unexpected response from GitHub."],
)
mylog("minimal", ["[Version check] ⚠ ERROR: Received unexpected response from GitHub."],)
return False

View File

@@ -180,10 +180,7 @@ def importConfigs(pm, db, all_plugins):
fileModifiedTime = os.path.getmtime(config_file)
mylog("debug", ["[Import Config] checking config file "])
mylog(
"debug",
["[Import Config] lastImportedConfFile :", conf.lastImportedConfFile],
)
mylog("debug", ["[Import Config] lastImportedConfFile :", conf.lastImportedConfFile],)
mylog("debug", ["[Import Config] fileModifiedTime :", fileModifiedTime])
if (fileModifiedTime == conf.lastImportedConfFile) and all_plugins is not None:
@@ -399,12 +396,7 @@ def importConfigs(pm, db, all_plugins):
conf.TIMEZONE = ccd(
"TIMEZONE", conf.tz, c_d, "_KEEP_", "_KEEP_", "[]", "General"
)
mylog(
"none",
[
f"[Config] Invalid timezone '{conf.TIMEZONE}', defaulting to {default_tz}."
],
)
mylog("none", [f"[Config] Invalid timezone '{conf.TIMEZONE}', defaulting to {default_tz}."],)
# TODO cleanup later ----------------------------------------------------------------------------------
# init all time values as we have timezone - all this shoudl be moved into plugin/plugin settings
@@ -450,13 +442,7 @@ def importConfigs(pm, db, all_plugins):
all_plugins = get_plugins_configs(conf.DISCOVER_PLUGINS)
mylog(
"none",
[
"[Config] Plugins: Number of all plugins (including not loaded): ",
len(all_plugins),
],
)
mylog("none", ["[Config] Plugins: Number of all plugins (including not loaded): ", len(all_plugins),],)
plugin_indexes_to_remove = []
all_plugins_prefixes = [] # to init the LOADED_PLUGINS setting with correct options
@@ -580,9 +566,7 @@ def importConfigs(pm, db, all_plugins):
"General",
)
mylog(
"none", ["[Config] Number of Plugins to load: ", len(loaded_plugins_prefixes)]
)
mylog("none", ["[Config] Number of Plugins to load: ", len(loaded_plugins_prefixes)])
mylog("none", ["[Config] Plugins to load: ", loaded_plugins_prefixes])
conf.plugins_once_run = False
@@ -606,12 +590,7 @@ def importConfigs(pm, db, all_plugins):
# Log the value being passed
# ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False)
mylog(
"verbose",
[
f"[Config] Setting override {setting_name} with value: {value}"
],
)
mylog("verbose", [f"[Config] Setting override {setting_name} with value: {value}"],)
ccd(
setting_name,
value,
@@ -630,12 +609,7 @@ def importConfigs(pm, db, all_plugins):
)
except json.JSONDecodeError:
mylog(
"none",
[
f"[Config] [ERROR] Setting override decoding JSON from {app_conf_override_path}"
],
)
mylog("none", [f"[Config] [ERROR] Setting override decoding JSON from {app_conf_override_path}"],)
else:
mylog("debug", [f"[Config] File {app_conf_override_path} does not exist."])
@@ -777,10 +751,7 @@ def renameSettings(config_file):
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
backup_file = f"{config_file}_old_setting_names_{timestamp}.bak"
mylog(
"debug",
f"[Config] Old setting names will be replaced and a backup ({backup_file}) of the config created.",
)
mylog("debug", f"[Config] Old setting names will be replaced and a backup ({backup_file}) of the config created.",)
shutil.copy(str(config_file), backup_file) # Convert config_file to a string
@@ -807,6 +778,4 @@ def renameSettings(config_file):
) # Convert config_file to a string
else:
mylog(
"debug", "[Config] No old setting names found in the file. No changes made."
)
mylog("debug", "[Config] No old setting names found in the file. No changes made.")

View File

@@ -119,10 +119,7 @@ def remove_old(keepNumberOfEntries):
try:
with open(NOTIFICATION_API_FILE, "w") as file:
json.dump(trimmed, file, indent=4)
mylog(
"verbose",
f"[Notification] Trimmed notifications to latest {keepNumberOfEntries}",
)
mylog("verbose", f"[Notification] Trimmed notifications to latest {keepNumberOfEntries}",)
except Exception as e:
mylog("none", f"Error writing trimmed notifications file: {e}")

View File

@@ -295,9 +295,7 @@ class NotificationInstance:
(f"-{minutes} minutes", tz_offset),
)
mylog(
"minimal", ["[Notification] Notifications changes: ", self.db.sql.rowcount]
)
mylog("minimal", ["[Notification] Notifications changes: ", self.db.sql.rowcount])
# clear plugin events
self.clearPluginEvents()

View File

@@ -31,10 +31,7 @@ class UserEventsQueueInstance:
Returns an empty list if the file doesn't exist.
"""
if not os.path.exists(self.log_file):
mylog(
"none",
["[UserEventsQueueInstance] Log file not found: ", self.log_file],
)
mylog("none", ["[UserEventsQueueInstance] Log file not found: ", self.log_file],)
return [] # No log file, return empty list
with open(self.log_file, "r") as file:
return file.readlines()

View File

@@ -123,9 +123,7 @@ def update_devices_data_from_scan(db):
)""")
# Update only devices with empty or NULL devParentMAC
mylog(
"debug", "[Update Devices] - (if not empty) cur_NetworkNodeMAC -> devParentMAC"
)
mylog("debug", "[Update Devices] - (if not empty) cur_NetworkNodeMAC -> devParentMAC")
sql.execute("""UPDATE Devices
SET devParentMAC = (
SELECT cur_NetworkNodeMAC
@@ -144,10 +142,7 @@ def update_devices_data_from_scan(db):
""")
# Update only devices with empty or NULL devSite
mylog(
"debug",
"[Update Devices] - (if not empty) cur_NetworkSite -> (if empty) devSite",
)
mylog("debug", "[Update Devices] - (if not empty) cur_NetworkSite -> (if empty) devSite",)
sql.execute("""UPDATE Devices
SET devSite = (
SELECT cur_NetworkSite
@@ -325,9 +320,7 @@ def save_scanned_devices(db):
.strip()
)
mylog(
"debug", ["[Save Devices] Saving this IP into the CurrentScan table:", local_ip]
)
mylog("debug", ["[Save Devices] Saving this IP into the CurrentScan table:", local_ip])
if check_IP_format(local_ip) == "":
local_ip = "0.0.0.0"
@@ -361,23 +354,12 @@ def print_scan_stats(db):
sql.execute(query)
stats = sql.fetchall()
mylog(
"verbose",
f"[Scan Stats] Devices Detected.......: {stats[0]['devices_detected']}",
)
mylog("verbose", f"[Scan Stats] Devices Detected.......: {stats[0]['devices_detected']}",)
mylog("verbose", f"[Scan Stats] New Devices............: {stats[0]['new_devices']}")
mylog("verbose", f"[Scan Stats] Down Alerts............: {stats[0]['down_alerts']}")
mylog(
"verbose",
f"[Scan Stats] New Down Alerts........: {stats[0]['new_down_alerts']}",
)
mylog(
"verbose",
f"[Scan Stats] New Connections........: {stats[0]['new_connections']}",
)
mylog(
"verbose", f"[Scan Stats] Disconnections.........: {stats[0]['disconnections']}"
)
mylog("verbose", f"[Scan Stats] New Down Alerts........: {stats[0]['new_down_alerts']}",)
mylog("verbose", f"[Scan Stats] New Connections........: {stats[0]['new_connections']}",)
mylog("verbose", f"[Scan Stats] Disconnections.........: {stats[0]['disconnections']}")
mylog("verbose", f"[Scan Stats] IP Changes.............: {stats[0]['ip_changes']}")
# if str(stats[0]["new_devices"]) != '0':
@@ -395,10 +377,7 @@ def print_scan_stats(db):
row_dict = dict(row)
mylog("trace", f" {row_dict}")
mylog(
"trace",
" ================ Events table content where eve_PendingAlertEmail = 1 ================",
)
mylog("trace", " ================ Events table content where eve_PendingAlertEmail = 1 ================",)
sql.execute("select * from Events where eve_PendingAlertEmail = 1")
rows = sql.fetchall()
for row in rows:
@@ -654,10 +633,7 @@ def check_plugin_data_changed(pm, plugins_to_check):
# Continue if changes detected
for p in plugins_changed:
mylog(
'debug',
f'[check_plugin_data_changed] {p} changed (last_data_change|last_data_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})'
)
mylog('debug', f'[check_plugin_data_changed] {p} changed (last_change|last_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})')
return True
@@ -741,10 +717,7 @@ def update_devices_names(pm):
# --- Step 1: Update device names for unknown devices ---
unknownDevices = device_handler.getUnknown()
if unknownDevices:
mylog(
"verbose",
f"[Update Device Name] Trying to resolve devices without name. Unknown devices count: {len(unknownDevices)}",
)
mylog("verbose", f"[Update Device Name] Trying to resolve devices without name. Unknown devices count: {len(unknownDevices)}",)
# Try resolving both name and FQDN
recordsToUpdate, recordsNotFound, fs, notFound = resolve_devices(
@@ -752,10 +725,8 @@ def update_devices_names(pm):
)
# Log summary
mylog(
"verbose",
f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']})",
)
res_string = f"{fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']}"
mylog("verbose", f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({res_string})",)
mylog("verbose", f"[Update Device Name] Names Not Found : {notFound}")
# Apply updates to database
@@ -771,10 +742,7 @@ def update_devices_names(pm):
if get_setting_value("REFRESH_FQDN"):
allDevices = device_handler.getAll()
if allDevices:
mylog(
"verbose",
f"[Update FQDN] Trying to resolve FQDN. Devices count: {len(allDevices)}",
)
mylog("verbose", f"[Update FQDN] Trying to resolve FQDN. Devices count: {len(allDevices)}",)
# Try resolving only FQDN
recordsToUpdate, _, fs, notFound = resolve_devices(
@@ -782,10 +750,8 @@ def update_devices_names(pm):
)
# Log summary
mylog(
"verbose",
f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)}({fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']})",
)
res_string = f"{fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']}"
mylog("verbose", f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)}({res_string})",)
mylog("verbose", f"[Update FQDN] Names Not Found : {notFound}")
# Apply FQDN-only updates
@@ -907,25 +873,13 @@ def query_MAC_vendor(pMAC):
parts = line.split("\t", 1)
if len(parts) > 1:
vendor = parts[1].strip()
mylog(
"debug",
[
f"[Vendor Check] Found '{vendor}' for '{pMAC}' in {vendorsPath}"
],
)
mylog("debug", [f"[Vendor Check] Found '{vendor}' for '{pMAC}' in {vendorsPath}"], )
return vendor
else:
mylog(
"debug",
[
f'[Vendor Check] ⚠ ERROR: Match found, but line could not be processed: "{line_lower}"'
],
)
mylog("debug", [f'[Vendor Check] ⚠ ERROR: Match found, but line could not be processed: "{line_lower}"'],)
return -1
return -1 # MAC address not found in the database
except FileNotFoundError:
mylog(
"none", [f"[Vendor Check] ⚠ ERROR: Vendors file {vendorsPath} not found."]
)
mylog("none", [f"[Vendor Check] ⚠ ERROR: Vendors file {vendorsPath} not found."])
return -1

View File

@@ -25,10 +25,7 @@ try:
rule["icon_base64"] = ""
except Exception as e:
MAC_TYPE_ICON_RULES = []
mylog(
"none",
f"[guess_device_attributes] Failed to load device_heuristics_rules.json: {e}",
)
mylog("none", f"[guess_device_attributes] Failed to load device_heuristics_rules.json: {e}",)
# -----------------------------------------
@@ -169,10 +166,8 @@ def guess_device_attributes(
default_icon: str,
default_type: str,
) -> Tuple[str, str]:
mylog(
"debug",
f"[guess_device_attributes] Guessing attributes for (vendor|mac|ip|name): ('{vendor}'|'{mac}'|'{ip}'|'{name}')",
)
mylog("debug", f"[guess_device_attributes] Guessing attributes for (vendor|mac|ip|name): ('{vendor}'|'{mac}'|'{ip}'|'{name}')",)
# --- Normalize inputs ---
vendor = str(vendor).lower().strip() if vendor else "unknown"
@@ -207,10 +202,7 @@ def guess_device_attributes(
type_ = type_ or default_type
icon = icon or default_icon
mylog(
"debug",
f"[guess_device_attributes] Guessed attributes (icon|type_): ('{icon}'|'{type_}')",
)
mylog("debug", f"[guess_device_attributes] Guessed attributes (icon|type_): ('{icon}'|'{type_}')",)
return icon, type_

View File

@@ -50,9 +50,7 @@ def process_scan(db):
update_devices_data_from_scan(db)
# Pair session events (Connection / Disconnection)
mylog(
"verbose", "[Process Scan] Pairing session events (connection / disconnection) "
)
mylog("verbose", "[Process Scan] Pairing session events (connection / disconnection) ")
pair_sessions_events(db)
# Sessions snapshot
@@ -221,10 +219,7 @@ def insertOnlineHistory(db):
VALUES (?, ?, ?, ?, ?, ?)
"""
mylog(
"debug",
f"[Presence graph] Sql query: {insert_query} with values: {scanTimestamp}, {onlineDevices}, {downDevices}, {allDevices}, {archivedDevices}, {offlineDevices}",
)
mylog("debug", f"[Presence graph] Sql query: {insert_query} with values: {scanTimestamp}, {onlineDevices}, {downDevices}, {allDevices}, {archivedDevices}, {offlineDevices}",)
# Debug output
print_table_schema(db, "Online_History")

View File

@@ -26,12 +26,7 @@ def logEventStatusCounts(objName, pluginEvents):
status_counts[status] = 1
for status, count in status_counts.items():
mylog(
"debug",
[
f'[{module_name}] In {objName} there are {count} events with the status "{status}" '
],
)
mylog("debug", [f'[{module_name}] In {objName} there are {count} events with the status "{status}" '],)
# -------------------------------------------------------------------------------
@@ -100,10 +95,7 @@ def list_to_csv(arr):
mylog("debug", f"[{module_name}] Flattening the below array")
mylog("debug", arr)
mylog(
"debug",
f"[{module_name}] isinstance(arr, list) : {isinstance(arr, list)} | isinstance(arr, str) : {isinstance(arr, str)}",
)
mylog("debug", f"[{module_name}] isinstance(arr, list) : {isinstance(arr, list)} | isinstance(arr, str) : {isinstance(arr, str)}",)
if isinstance(arr, str):
tmpStr = (
@@ -227,19 +219,9 @@ def get_plugins_configs(loadAll):
except (FileNotFoundError, json.JSONDecodeError):
# Handle the case when the file is not found or JSON decoding fails
mylog(
"none",
[
f"[{module_name}] ⚠ ERROR - JSONDecodeError or FileNotFoundError for file {config_path}"
],
)
mylog("none", f"[{module_name}] ⚠ ERROR - JSONDecodeError or FileNotFoundError for file {config_path}")
except Exception as e:
mylog(
"none",
[
f"[{module_name}] ⚠ ERROR - Exception for file {config_path}: {str(e)}"
],
)
mylog("none", f"[{module_name}] ⚠ ERROR - Exception for file {config_path}: {str(e)}")
# Sort pluginsList based on "execution_order"
pluginsListSorted = sorted(pluginsList, key=get_layer)
@@ -285,23 +267,13 @@ def getPluginObject(keyValues):
if all_match:
return item
mylog(
"verbose",
[
f"[{module_name}] 💬 INFO - Object not found {json.dumps(keyValues)} "
],
)
mylog("verbose", f"[{module_name}] 💬 INFO - Object not found {json.dumps(keyValues)} ")
return {}
except (FileNotFoundError, json.JSONDecodeError, ValueError):
# Handle the case when the file is not found, JSON decoding fails, or data is not in the expected format
mylog(
"verbose",
[
f"[{module_name}] ⚠ ERROR - JSONDecodeError or FileNotFoundError for file {plugins_objects}"
],
)
mylog("verbose", f"[{module_name}] ⚠ ERROR - JSONDecodeError or FileNotFoundError for file {plugins_objects}")
return {}

View File

@@ -29,10 +29,7 @@ class UpdateFieldAction(Action):
self.db = db
def execute(self):
mylog(
"verbose",
f"[WF] Updating field '{self.field}' to '{self.value}' for event object {self.trigger.object_type}",
)
mylog("verbose", f"[WF] Updating field '{self.field}' to '{self.value}' for event object {self.trigger.object_type}")
obj = self.trigger.object
@@ -109,12 +106,7 @@ class RunPluginAction(Action):
def execute(self):
obj = self.trigger.object
mylog(
"verbose",
[
f"Executing plugin '{self.plugin_name}' with parameters {self.params} for object {obj}"
],
)
mylog("verbose", f"Executing plugin '{self.plugin_name}' with parameters {self.params} for object {obj}")
# PluginManager.run(self.plugin_name, self.parameters)
return obj
@@ -129,12 +121,7 @@ class SendNotificationAction(Action):
def execute(self):
obj = self.trigger.object
mylog(
"verbose",
[
f"Sending notification via '{self.method}': {self.message} for object {obj}"
],
)
mylog("verbose", f"Sending notification via '{self.method}': {self.message} for object {obj}")
# NotificationManager.send(self.method, self.message)
return obj

View File

@@ -52,10 +52,7 @@ class ConditionGroup:
"""Handles condition groups with AND, OR logic, supporting nested groups."""
def __init__(self, group_json):
mylog(
"verbose",
[f"[WF] ConditionGroup json.dumps(group_json): {json.dumps(group_json)}"],
)
mylog("verbose", f"[WF] ConditionGroup json.dumps(group_json): {json.dumps(group_json)}")
self.logic = group_json.get("logic", "AND").upper()
self.conditions = []

View File

@@ -53,21 +53,13 @@ class WorkflowManager:
# Ensure workflow is enabled before proceeding
if workflow.get("enabled", "No").lower() == "yes":
wfName = workflow["name"]
mylog(
"debug",
[f"[WF] Checking if '{evGuid}' triggers the workflow '{wfName}'"],
)
mylog("debug", f"[WF] Checking if '{evGuid}' triggers the workflow '{wfName}'")
# construct trigger object which also evaluates if the current event triggers it
trigger = Trigger(workflow["trigger"], event, self.db)
if trigger.triggered:
mylog(
"verbose",
[
f"[WF] Event with GUID '{evGuid}' triggered the workflow '{wfName}'"
],
)
mylog("verbose", f"[WF] Event with GUID '{evGuid}' triggered the workflow '{wfName}'")
self.execute_workflow(workflow, trigger)
@@ -98,12 +90,7 @@ class WorkflowManager:
evaluator = ConditionGroup(condition_group)
if evaluator.evaluate(trigger): # If any group evaluates to True
mylog(
"none",
[
f"[WF] Workflow {wfName} will be executed - conditions were evaluated as TRUE"
],
)
mylog("none", f"[WF] Workflow {wfName} will be executed - conditions were evaluated as TRUE")
mylog("debug", [f"[WF] Workflow condition_group: {condition_group}"])
self.execute_actions(workflow["actions"], trigger)

View File

@@ -24,12 +24,7 @@ class Trigger:
self.object_type == event["ObjectType"] and self.event_type == event["AppEventType"]
)
mylog(
"debug",
[
f"""[WF] self.triggered '{self.triggered}' for event '{get_array_from_sql_rows(event)} and trigger {json.dumps(triggerJson)}' """
],
)
mylog("debug", f"""[WF] self.triggered '{self.triggered}' for event '{get_array_from_sql_rows(event)} and trigger {json.dumps(triggerJson)}' """)
if self.triggered:
# object type corresponds with the DB table name