mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2025-12-07 09:36:05 -08:00
Compare commits
2 Commits
fa9fc2c8e3
...
e90fbf17d3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e90fbf17d3 | ||
|
|
139447b253 |
@@ -50,6 +50,8 @@ Let’s walk through setting up a device named `raspberrypi` to act as a network
|
|||||||
|
|
||||||
- Optionally assign a **Parent Node** (where this device connects to) and the **Relationship type** of the connection.
|
- Optionally assign a **Parent Node** (where this device connects to) and the **Relationship type** of the connection.
|
||||||
The `nic` relationship type can affect parent notifications — see the setting description and [Notifications documentation](./NOTIFICATIONS.md) for more.
|
The `nic` relationship type can affect parent notifications — see the setting description and [Notifications documentation](./NOTIFICATIONS.md) for more.
|
||||||
|
- A device’s parent MAC will be overwritten by plugins if its current value is any of the following: "null", "(unknown)" "(Unknown)".
|
||||||
|
- If you want plugins to be able to overwrite the parent value (for example, when mixing plugins that do not provide parent MACs like `ARPSCAN` with those that do, like `UNIFIAPI`), you must set the setting `NEWDEV_devParentMAC` to None.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|||||||
@@ -36,12 +36,7 @@ def main():
|
|||||||
|
|
||||||
# Check if basic config settings supplied
|
# Check if basic config settings supplied
|
||||||
if check_config() is False:
|
if check_config() is False:
|
||||||
mylog(
|
mylog("none", f"[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.")
|
||||||
"none",
|
|
||||||
[
|
|
||||||
f"[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables."
|
|
||||||
],
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# Create a database connection
|
# Create a database connection
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import conf
|
import conf
|
||||||
from const import confFileName, logPath
|
from const import logPath
|
||||||
from pytz import timezone
|
from pytz import timezone
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -36,11 +36,7 @@ def main():
|
|||||||
|
|
||||||
# Check if basic config settings supplied
|
# Check if basic config settings supplied
|
||||||
if not validate_config():
|
if not validate_config():
|
||||||
mylog(
|
mylog("none", f"[{pluginName}] ⚠ ERROR: Publisher not set up correctly. Check your {pluginName}_* variables.",)
|
||||||
"none",
|
|
||||||
f"[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. "
|
|
||||||
f"Check your {confFileName} {pluginName}_* variables.",
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# Create a database connection
|
# Create a database connection
|
||||||
|
|||||||
@@ -138,10 +138,7 @@ def execute_arpscan(userSubnets):
|
|||||||
mylog("verbose", [f"[{pluginName}] All devices List len:", len(devices_list)])
|
mylog("verbose", [f"[{pluginName}] All devices List len:", len(devices_list)])
|
||||||
mylog("verbose", [f"[{pluginName}] Devices List:", devices_list])
|
mylog("verbose", [f"[{pluginName}] Devices List:", devices_list])
|
||||||
|
|
||||||
mylog(
|
mylog("verbose", [f"[{pluginName}] Found: Devices without duplicates ", len(unique_devices)],)
|
||||||
"verbose",
|
|
||||||
[f"[{pluginName}] Found: Devices without duplicates ", len(unique_devices)],
|
|
||||||
)
|
|
||||||
|
|
||||||
return unique_devices
|
return unique_devices
|
||||||
|
|
||||||
@@ -174,10 +171,7 @@ def execute_arpscan_on_interface(interface):
|
|||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
result = ""
|
result = ""
|
||||||
except subprocess.TimeoutExpired:
|
except subprocess.TimeoutExpired:
|
||||||
mylog(
|
mylog("warning", [f"[{pluginName}] arp-scan timed out after {timeout_seconds}s"],)
|
||||||
"warning",
|
|
||||||
[f"[{pluginName}] arp-scan timed out after {timeout_seconds}s"],
|
|
||||||
)
|
|
||||||
result = ""
|
result = ""
|
||||||
# stop looping if duration not set or expired
|
# stop looping if duration not set or expired
|
||||||
if scan_duration == 0 or (time.time() - start_time) > scan_duration:
|
if scan_duration == 0 or (time.time() - start_time) > scan_duration:
|
||||||
|
|||||||
@@ -33,10 +33,7 @@ def main():
|
|||||||
|
|
||||||
device_data = get_device_data()
|
device_data = get_device_data()
|
||||||
|
|
||||||
mylog(
|
mylog("verbose", f"[{pluginName}] Found '{len(device_data)}' devices")
|
||||||
"verbose",
|
|
||||||
[f"[{pluginName}] Found '{len(device_data)}' devices"],
|
|
||||||
)
|
|
||||||
|
|
||||||
filtered_devices = [
|
filtered_devices = [
|
||||||
(key, device)
|
(key, device)
|
||||||
@@ -44,10 +41,7 @@ def main():
|
|||||||
if device.state == ConnectionState.CONNECTED
|
if device.state == ConnectionState.CONNECTED
|
||||||
]
|
]
|
||||||
|
|
||||||
mylog(
|
mylog("verbose", f"[{pluginName}] Processing '{len(filtered_devices)}' connected devices")
|
||||||
"verbose",
|
|
||||||
[f"[{pluginName}] Processing '{len(filtered_devices)}' connected devices"],
|
|
||||||
)
|
|
||||||
|
|
||||||
for mac, device in filtered_devices:
|
for mac, device in filtered_devices:
|
||||||
entry_mac = str(device.description.mac).lower()
|
entry_mac = str(device.description.mac).lower()
|
||||||
|
|||||||
@@ -75,10 +75,7 @@ def cleanup_database(
|
|||||||
|
|
||||||
# -----------------------------------------------------
|
# -----------------------------------------------------
|
||||||
# Cleanup Online History
|
# Cleanup Online History
|
||||||
mylog(
|
mylog("verbose", [f"[{pluginName}] Online_History: Delete all but keep latest 150 entries"],)
|
||||||
"verbose",
|
|
||||||
[f"[{pluginName}] Online_History: Delete all but keep latest 150 entries"],
|
|
||||||
)
|
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"""DELETE from Online_History where "Index" not in (
|
"""DELETE from Online_History where "Index" not in (
|
||||||
SELECT "Index" from Online_History
|
SELECT "Index" from Online_History
|
||||||
@@ -87,24 +84,14 @@ def cleanup_database(
|
|||||||
|
|
||||||
# -----------------------------------------------------
|
# -----------------------------------------------------
|
||||||
# Cleanup Events
|
# Cleanup Events
|
||||||
mylog(
|
mylog("verbose", f"[{pluginName}] Events: Delete all older than {str(DAYS_TO_KEEP_EVENTS)} days (DAYS_TO_KEEP_EVENTS setting)")
|
||||||
"verbose",
|
|
||||||
[
|
|
||||||
f"[{pluginName}] Events: Delete all older than {str(DAYS_TO_KEEP_EVENTS)} days (DAYS_TO_KEEP_EVENTS setting)"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
f"""DELETE FROM Events
|
f"""DELETE FROM Events
|
||||||
WHERE eve_DateTime <= date('now', '-{str(DAYS_TO_KEEP_EVENTS)} day')"""
|
WHERE eve_DateTime <= date('now', '-{str(DAYS_TO_KEEP_EVENTS)} day')"""
|
||||||
)
|
)
|
||||||
# -----------------------------------------------------
|
# -----------------------------------------------------
|
||||||
# Trim Plugins_History entries to less than PLUGINS_KEEP_HIST setting per unique "Plugin" column entry
|
# Trim Plugins_History entries to less than PLUGINS_KEEP_HIST setting per unique "Plugin" column entry
|
||||||
mylog(
|
mylog("verbose", f"[{pluginName}] Plugins_History: Trim Plugins_History entries to less than {str(PLUGINS_KEEP_HIST)} per Plugin (PLUGINS_KEEP_HIST setting)")
|
||||||
"verbose",
|
|
||||||
[
|
|
||||||
f"[{pluginName}] Plugins_History: Trim Plugins_History entries to less than {str(PLUGINS_KEEP_HIST)} per Plugin (PLUGINS_KEEP_HIST setting)"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Build the SQL query to delete entries that exceed the limit per unique "Plugin" column entry
|
# Build the SQL query to delete entries that exceed the limit per unique "Plugin" column entry
|
||||||
delete_query = f"""DELETE FROM Plugins_History
|
delete_query = f"""DELETE FROM Plugins_History
|
||||||
@@ -125,12 +112,7 @@ def cleanup_database(
|
|||||||
|
|
||||||
histCount = get_setting_value("DBCLNP_NOTIFI_HIST")
|
histCount = get_setting_value("DBCLNP_NOTIFI_HIST")
|
||||||
|
|
||||||
mylog(
|
mylog("verbose", f"[{pluginName}] Plugins_History: Trim Notifications entries to less than {histCount}")
|
||||||
"verbose",
|
|
||||||
[
|
|
||||||
f"[{pluginName}] Plugins_History: Trim Notifications entries to less than {histCount}"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Build the SQL query to delete entries
|
# Build the SQL query to delete entries
|
||||||
delete_query = f"""DELETE FROM Notifications
|
delete_query = f"""DELETE FROM Notifications
|
||||||
@@ -170,12 +152,7 @@ def cleanup_database(
|
|||||||
# -----------------------------------------------------
|
# -----------------------------------------------------
|
||||||
# Cleanup New Devices
|
# Cleanup New Devices
|
||||||
if HRS_TO_KEEP_NEWDEV != 0:
|
if HRS_TO_KEEP_NEWDEV != 0:
|
||||||
mylog(
|
mylog("verbose", f"[{pluginName}] Devices: Delete all New Devices older than {str(HRS_TO_KEEP_NEWDEV)} hours (HRS_TO_KEEP_NEWDEV setting)")
|
||||||
"verbose",
|
|
||||||
[
|
|
||||||
f"[{pluginName}] Devices: Delete all New Devices older than {str(HRS_TO_KEEP_NEWDEV)} hours (HRS_TO_KEEP_NEWDEV setting)"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
query = f"""DELETE FROM Devices WHERE devIsNew = 1 AND devFirstConnection < date('now', '-{str(HRS_TO_KEEP_NEWDEV)} hour')"""
|
query = f"""DELETE FROM Devices WHERE devIsNew = 1 AND devFirstConnection < date('now', '-{str(HRS_TO_KEEP_NEWDEV)} hour')"""
|
||||||
mylog("verbose", [f"[{pluginName}] Query: {query} "])
|
mylog("verbose", [f"[{pluginName}] Query: {query} "])
|
||||||
cursor.execute(query)
|
cursor.execute(query)
|
||||||
@@ -183,12 +160,7 @@ def cleanup_database(
|
|||||||
# -----------------------------------------------------
|
# -----------------------------------------------------
|
||||||
# Cleanup Offline Devices
|
# Cleanup Offline Devices
|
||||||
if HRS_TO_KEEP_OFFDEV != 0:
|
if HRS_TO_KEEP_OFFDEV != 0:
|
||||||
mylog(
|
mylog("verbose", f"[{pluginName}] Devices: Delete all New Devices older than {str(HRS_TO_KEEP_OFFDEV)} hours (HRS_TO_KEEP_OFFDEV setting)")
|
||||||
"verbose",
|
|
||||||
[
|
|
||||||
f"[{pluginName}] Devices: Delete all New Devices older than {str(HRS_TO_KEEP_OFFDEV)} hours (HRS_TO_KEEP_OFFDEV setting)"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
query = f"""DELETE FROM Devices WHERE devPresentLastScan = 0 AND devLastConnection < date('now', '-{str(HRS_TO_KEEP_OFFDEV)} hour')"""
|
query = f"""DELETE FROM Devices WHERE devPresentLastScan = 0 AND devLastConnection < date('now', '-{str(HRS_TO_KEEP_OFFDEV)} hour')"""
|
||||||
mylog("verbose", [f"[{pluginName}] Query: {query} "])
|
mylog("verbose", [f"[{pluginName}] Query: {query} "])
|
||||||
cursor.execute(query)
|
cursor.execute(query)
|
||||||
@@ -196,12 +168,7 @@ def cleanup_database(
|
|||||||
# -----------------------------------------------------
|
# -----------------------------------------------------
|
||||||
# Clear New Flag
|
# Clear New Flag
|
||||||
if CLEAR_NEW_FLAG != 0:
|
if CLEAR_NEW_FLAG != 0:
|
||||||
mylog(
|
mylog("verbose", f'[{pluginName}] Devices: Clear "New Device" flag for all devices older than {str(CLEAR_NEW_FLAG)} hours (CLEAR_NEW_FLAG setting)')
|
||||||
"verbose",
|
|
||||||
[
|
|
||||||
f'[{pluginName}] Devices: Clear "New Device" flag for all devices older than {str(CLEAR_NEW_FLAG)} hours (CLEAR_NEW_FLAG setting)'
|
|
||||||
],
|
|
||||||
)
|
|
||||||
query = f"""UPDATE Devices SET devIsNew = 0 WHERE devIsNew = 1 AND date(devFirstConnection, '+{str(CLEAR_NEW_FLAG)} hour') < date('now')"""
|
query = f"""UPDATE Devices SET devIsNew = 0 WHERE devIsNew = 1 AND date(devFirstConnection, '+{str(CLEAR_NEW_FLAG)} hour') < date('now')"""
|
||||||
# select * from Devices where devIsNew = 1 AND date(devFirstConnection, '+3 hour' ) < date('now')
|
# select * from Devices where devIsNew = 1 AND date(devFirstConnection, '+3 hour' ) < date('now')
|
||||||
mylog("verbose", [f"[{pluginName}] Query: {query} "])
|
mylog("verbose", [f"[{pluginName}] Query: {query} "])
|
||||||
|
|||||||
@@ -71,10 +71,7 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
|
|||||||
status = lease.get('status')
|
status = lease.get('status')
|
||||||
device_name = comment or host_name or "(unknown)"
|
device_name = comment or host_name or "(unknown)"
|
||||||
|
|
||||||
mylog(
|
mylog('verbose', f"ID: {lease_id}, Address: {address}, MAC: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}")
|
||||||
'verbose',
|
|
||||||
[f"ID: {lease_id}, Address: {address}, MAC Address: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}"]
|
|
||||||
)
|
|
||||||
|
|
||||||
if (status == "bound"):
|
if (status == "bound"):
|
||||||
plugin_objects.add_object(
|
plugin_objects.add_object(
|
||||||
|
|||||||
@@ -63,9 +63,7 @@ main structure of NetAlertX
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
mylog(
|
mylog("none", ["[MAIN] Setting up ..."]) # has to be level 'none' as user config not loaded yet
|
||||||
"none", ["[MAIN] Setting up ..."]
|
|
||||||
) # has to be level 'none' as user config not loaded yet
|
|
||||||
|
|
||||||
mylog("none", [f"[conf.tz] Setting up ...{conf.tz}"])
|
mylog("none", [f"[conf.tz] Setting up ...{conf.tz}"])
|
||||||
|
|
||||||
@@ -221,22 +219,14 @@ def main():
|
|||||||
# Fetch new unprocessed events
|
# Fetch new unprocessed events
|
||||||
new_events = workflow_manager.get_new_app_events()
|
new_events = workflow_manager.get_new_app_events()
|
||||||
|
|
||||||
mylog(
|
mylog("debug", [f"[MAIN] Processing WORKFLOW new_events from get_new_app_events: {len(new_events)}"],)
|
||||||
"debug",
|
|
||||||
[
|
|
||||||
f"[MAIN] Processing WORKFLOW new_events from get_new_app_events: {len(new_events)}"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Process each new event and check triggers
|
# Process each new event and check triggers
|
||||||
if len(new_events) > 0:
|
if len(new_events) > 0:
|
||||||
updateState("Workflows: Start")
|
updateState("Workflows: Start")
|
||||||
update_api_flag = False
|
update_api_flag = False
|
||||||
for event in new_events:
|
for event in new_events:
|
||||||
mylog(
|
mylog("debug", [f"[MAIN] Processing WORKFLOW app event with GUID {event['GUID']}"],)
|
||||||
"debug",
|
|
||||||
[f"[MAIN] Processing WORKFLOW app event with GUID {event['GUID']}"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# proceed to process events
|
# proceed to process events
|
||||||
workflow_manager.process_event(event)
|
workflow_manager.process_event(event)
|
||||||
@@ -253,12 +243,7 @@ def main():
|
|||||||
# check if devices list needs updating
|
# check if devices list needs updating
|
||||||
userUpdatedDevices = UserEventsQueueInstance().has_update_devices()
|
userUpdatedDevices = UserEventsQueueInstance().has_update_devices()
|
||||||
|
|
||||||
mylog(
|
mylog("debug", [f"[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}"],)
|
||||||
"debug",
|
|
||||||
[
|
|
||||||
f"[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
if userUpdatedDevices:
|
if userUpdatedDevices:
|
||||||
update_api(db, all_plugins, True, ["devices"], userUpdatedDevices)
|
update_api(db, all_plugins, True, ["devices"], userUpdatedDevices)
|
||||||
|
|||||||
@@ -96,16 +96,9 @@ def update_api(
|
|||||||
) # Ensure port is an integer
|
) # Ensure port is an integer
|
||||||
start_server(graphql_port_value, app_state) # Start the server
|
start_server(graphql_port_value, app_state) # Start the server
|
||||||
except ValueError:
|
except ValueError:
|
||||||
mylog(
|
mylog("none", [f"[API] Invalid GRAPHQL_PORT value, must be an integer: {graphql_port_value}"],)
|
||||||
"none",
|
|
||||||
[
|
|
||||||
f"[API] Invalid GRAPHQL_PORT value, must be an integer: {graphql_port_value}"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
mylog(
|
mylog("none", ["[API] GRAPHQL_PORT or API_TOKEN is not set, will try later."])
|
||||||
"none", ["[API] GRAPHQL_PORT or API_TOKEN is not set, will try later."]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------------------
|
# -------------------------------------------------------------------------------
|
||||||
@@ -135,12 +128,7 @@ class api_endpoint_class:
|
|||||||
# Match SQL and API endpoint path
|
# Match SQL and API endpoint path
|
||||||
if endpoint.query == self.query and endpoint.path == self.path:
|
if endpoint.query == self.query and endpoint.path == self.path:
|
||||||
found = True
|
found = True
|
||||||
mylog(
|
mylog("trace", [f"[API] api_endpoint_class: Hashes (file|old|new): ({self.fileName}|{endpoint.hash}|{self.hash})"],)
|
||||||
"trace",
|
|
||||||
[
|
|
||||||
f"[API] api_endpoint_class: Hashes (file|old|new): ({self.fileName}|{endpoint.hash}|{self.hash})"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
if endpoint.hash != self.hash:
|
if endpoint.hash != self.hash:
|
||||||
self.needsUpdate = True
|
self.needsUpdate = True
|
||||||
# Only update changeDetectedWhen if it hasn't been set recently
|
# Only update changeDetectedWhen if it hasn't been set recently
|
||||||
@@ -190,10 +178,7 @@ class api_endpoint_class:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
mylog(
|
mylog("debug", [f"[API] api_endpoint_class: Writing {self.fileName} after debounce."],)
|
||||||
"debug",
|
|
||||||
[f"[API] api_endpoint_class: Writing {self.fileName} after debounce."],
|
|
||||||
)
|
|
||||||
|
|
||||||
write_file(self.path, json.dumps(self.jsonData))
|
write_file(self.path, json.dumps(self.jsonData))
|
||||||
|
|
||||||
|
|||||||
@@ -173,13 +173,8 @@ class Query(ObjectType):
|
|||||||
network_dev_types = get_setting_value("NETWORK_DEVICE_TYPES")
|
network_dev_types = get_setting_value("NETWORK_DEVICE_TYPES")
|
||||||
|
|
||||||
mylog("trace", f"[graphql_schema] allowed_statuses: {allowed_statuses}")
|
mylog("trace", f"[graphql_schema] allowed_statuses: {allowed_statuses}")
|
||||||
mylog(
|
mylog("trace", f"[graphql_schema] hidden_relationships: {hidden_relationships}",)
|
||||||
"trace",
|
mylog("trace", f"[graphql_schema] network_dev_types: {network_dev_types}")
|
||||||
f"[graphql_schema] hidden_relationships: {hidden_relationships}",
|
|
||||||
)
|
|
||||||
mylog(
|
|
||||||
"trace", f"[graphql_schema] network_dev_types: {network_dev_types}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Filtering based on the "status"
|
# Filtering based on the "status"
|
||||||
if status == "my_devices":
|
if status == "my_devices":
|
||||||
|
|||||||
@@ -71,9 +71,7 @@ class app_state_class:
|
|||||||
with open(stateFile, "r") as json_file:
|
with open(stateFile, "r") as json_file:
|
||||||
previousState = json.load(json_file)
|
previousState = json.load(json_file)
|
||||||
except json.decoder.JSONDecodeError as e:
|
except json.decoder.JSONDecodeError as e:
|
||||||
mylog(
|
mylog("none", [f"[app_state_class] Failed to handle app_state.json: {e}"])
|
||||||
"none", [f"[app_state_class] Failed to handle app_state.json: {e}"]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if the file exists and recover previous values
|
# Check if the file exists and recover previous values
|
||||||
if previousState != "":
|
if previousState != "":
|
||||||
@@ -151,10 +149,7 @@ class app_state_class:
|
|||||||
with open(stateFile, "w") as json_file:
|
with open(stateFile, "w") as json_file:
|
||||||
json_file.write(json_data)
|
json_file.write(json_data)
|
||||||
except (TypeError, ValueError) as e:
|
except (TypeError, ValueError) as e:
|
||||||
mylog(
|
mylog("none", [f"[app_state_class] Failed to serialize object to JSON: {e}"],)
|
||||||
"none",
|
|
||||||
[f"[app_state_class] Failed to serialize object to JSON: {e}"],
|
|
||||||
)
|
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -233,15 +233,7 @@ class DB:
|
|||||||
rows = self.sql.fetchall()
|
rows = self.sql.fetchall()
|
||||||
return rows
|
return rows
|
||||||
except AssertionError:
|
except AssertionError:
|
||||||
mylog(
|
mylog("minimal", ["[Database] - ERROR: inconsistent query and/or arguments.", query, " params: ", args,],)
|
||||||
"minimal",
|
|
||||||
[
|
|
||||||
"[Database] - ERROR: inconsistent query and/or arguments.",
|
|
||||||
query,
|
|
||||||
" params: ",
|
|
||||||
args,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
except sqlite3.Error as e:
|
except sqlite3.Error as e:
|
||||||
mylog("minimal", ["[Database] - SQL ERROR: ", e])
|
mylog("minimal", ["[Database] - SQL ERROR: ", e])
|
||||||
return None
|
return None
|
||||||
@@ -258,15 +250,7 @@ class DB:
|
|||||||
if len(rows) == 1:
|
if len(rows) == 1:
|
||||||
return rows[0]
|
return rows[0]
|
||||||
if len(rows) > 1:
|
if len(rows) > 1:
|
||||||
mylog(
|
mylog("verbose", ["[Database] - Warning!: query returns multiple rows, only first row is passed on!", query, " params: ", args,],)
|
||||||
"verbose",
|
|
||||||
[
|
|
||||||
"[Database] - Warning!: query returns multiple rows, only first row is passed on!",
|
|
||||||
query,
|
|
||||||
" params: ",
|
|
||||||
args,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
return rows[0]
|
return rows[0]
|
||||||
# empty result set
|
# empty result set
|
||||||
return None
|
return None
|
||||||
|
|||||||
@@ -88,10 +88,7 @@ def ensure_column(sql, table: str, column_name: str, column_type: str) -> bool:
|
|||||||
mylog("none", [msg])
|
mylog("none", [msg])
|
||||||
|
|
||||||
# Add missing column
|
# Add missing column
|
||||||
mylog(
|
mylog("verbose", [f"[db_upgrade] Adding '{column_name}' ({column_type}) to {table} table"],)
|
||||||
"verbose",
|
|
||||||
[f"[db_upgrade] Adding '{column_name}' ({column_type}) to {table} table"],
|
|
||||||
)
|
|
||||||
sql.execute(f'ALTER TABLE "{table}" ADD "{column_name}" {column_type}')
|
sql.execute(f'ALTER TABLE "{table}" ADD "{column_name}" {column_type}')
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|||||||
@@ -586,16 +586,11 @@ class SafeConditionBuilder:
|
|||||||
|
|
||||||
# Validate each component
|
# Validate each component
|
||||||
if not self._validate_column_name(column):
|
if not self._validate_column_name(column):
|
||||||
mylog(
|
mylog("verbose", [f"[SafeConditionBuilder] Invalid column: {column}"])
|
||||||
"verbose", [f"[SafeConditionBuilder] Invalid column: {column}"]
|
|
||||||
)
|
|
||||||
return "", {}
|
return "", {}
|
||||||
|
|
||||||
if not self._validate_operator(operator):
|
if not self._validate_operator(operator):
|
||||||
mylog(
|
mylog("verbose", [f"[SafeConditionBuilder] Invalid operator: {operator}"])
|
||||||
"verbose",
|
|
||||||
[f"[SafeConditionBuilder] Invalid operator: {operator}"],
|
|
||||||
)
|
|
||||||
return "", {}
|
return "", {}
|
||||||
|
|
||||||
# Create parameter binding
|
# Create parameter binding
|
||||||
@@ -607,10 +602,7 @@ class SafeConditionBuilder:
|
|||||||
condition_parts.append(condition_part)
|
condition_parts.append(condition_part)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
mylog(
|
mylog("verbose", [f"[SafeConditionBuilder] Error processing condition: {e}"],)
|
||||||
"verbose",
|
|
||||||
[f"[SafeConditionBuilder] Error processing condition: {e}"],
|
|
||||||
)
|
|
||||||
return "", {}
|
return "", {}
|
||||||
|
|
||||||
if not condition_parts:
|
if not condition_parts:
|
||||||
@@ -644,10 +636,7 @@ class SafeConditionBuilder:
|
|||||||
if event_type in self.ALLOWED_EVENT_TYPES:
|
if event_type in self.ALLOWED_EVENT_TYPES:
|
||||||
valid_types.append(event_type)
|
valid_types.append(event_type)
|
||||||
else:
|
else:
|
||||||
mylog(
|
mylog("verbose", f"[SafeConditionBuilder] Invalid event type filtered out: {event_type}",)
|
||||||
"verbose",
|
|
||||||
f"[SafeConditionBuilder] Invalid event type filtered out: {event_type}",
|
|
||||||
)
|
|
||||||
|
|
||||||
if not valid_types:
|
if not valid_types:
|
||||||
return "", {}
|
return "", {}
|
||||||
@@ -682,10 +671,7 @@ class SafeConditionBuilder:
|
|||||||
return self.build_safe_condition(condition_setting)
|
return self.build_safe_condition(condition_setting)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
# Log the error and return empty condition for safety
|
# Log the error and return empty condition for safety
|
||||||
mylog(
|
mylog("verbose", f"[SafeConditionBuilder] Unsafe condition rejected: {condition_setting}, Error: {e}",)
|
||||||
"verbose",
|
|
||||||
f"[SafeConditionBuilder] Unsafe condition rejected: {condition_setting}, Error: {e}",
|
|
||||||
)
|
|
||||||
return "", {}
|
return "", {}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -36,12 +36,7 @@ def checkPermissionsOK():
|
|||||||
dbW_access = os.access(fullDbPath, os.W_OK)
|
dbW_access = os.access(fullDbPath, os.W_OK)
|
||||||
|
|
||||||
mylog("none", ["\n"])
|
mylog("none", ["\n"])
|
||||||
mylog(
|
mylog("none", "The backend restarted (started). If this is unexpected check https://bit.ly/NetAlertX_debug for troubleshooting tips.")
|
||||||
"none",
|
|
||||||
[
|
|
||||||
"The backend restarted (started). If this is unexpected check https://bit.ly/NetAlertX_debug for troubleshooting tips."
|
|
||||||
],
|
|
||||||
)
|
|
||||||
mylog("none", ["\n"])
|
mylog("none", ["\n"])
|
||||||
mylog("none", ["Permissions check (All should be True)"])
|
mylog("none", ["Permissions check (All should be True)"])
|
||||||
mylog("none", ["------------------------------------------------"])
|
mylog("none", ["------------------------------------------------"])
|
||||||
@@ -59,12 +54,7 @@ def checkPermissionsOK():
|
|||||||
def initialiseFile(pathToCheck, defaultFile):
|
def initialiseFile(pathToCheck, defaultFile):
|
||||||
# if file not readable (missing?) try to copy over the backed-up (default) one
|
# if file not readable (missing?) try to copy over the backed-up (default) one
|
||||||
if str(os.access(pathToCheck, os.R_OK)) == "False":
|
if str(os.access(pathToCheck, os.R_OK)) == "False":
|
||||||
mylog(
|
mylog("none", ["[Setup] (" + pathToCheck + ") file is not readable or missing. Trying to copy over the default one."],)
|
||||||
"none",
|
|
||||||
[
|
|
||||||
"[Setup] (" + pathToCheck + ") file is not readable or missing. Trying to copy over the default one."
|
|
||||||
],
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
# try runnning a subprocess
|
# try runnning a subprocess
|
||||||
p = subprocess.Popen(
|
p = subprocess.Popen(
|
||||||
@@ -75,31 +65,16 @@ def initialiseFile(pathToCheck, defaultFile):
|
|||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
|
|
||||||
if str(os.access(pathToCheck, os.R_OK)) == "False":
|
if str(os.access(pathToCheck, os.R_OK)) == "False":
|
||||||
mylog(
|
mylog("none", "[Setup] ⚠ ERROR copying (" + defaultFile + ") to (" + pathToCheck + "). Ensure Read & Write access to the parent directory.")
|
||||||
"none",
|
|
||||||
[
|
|
||||||
"[Setup] ⚠ ERROR copying (" + defaultFile + ") to (" + pathToCheck + "). Make sure the app has Read & Write access to the parent directory."
|
|
||||||
],
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
mylog(
|
mylog("none", ["[Setup] (" + defaultFile + ") copied over successfully to (" + pathToCheck + ")."],)
|
||||||
"none",
|
|
||||||
[
|
|
||||||
"[Setup] (" + defaultFile + ") copied over successfully to (" + pathToCheck + ")."
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
# write stdout and stderr into .log files for debugging if needed
|
# write stdout and stderr into .log files for debugging if needed
|
||||||
logResult(stdout, stderr) # TO-DO should be changed to mylog
|
logResult(stdout, stderr) # TO-DO should be changed to mylog
|
||||||
|
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
# An error occured, handle it
|
# An error occured, handle it
|
||||||
mylog(
|
mylog("none", ["[Setup] ⚠ ERROR copying (" + defaultFile + "). Make sure the app has Read & Write access to " + pathToCheck],)
|
||||||
"none",
|
|
||||||
[
|
|
||||||
"[Setup] ⚠ ERROR copying (" + defaultFile + "). Make sure the app has Read & Write access to " + pathToCheck
|
|
||||||
],
|
|
||||||
)
|
|
||||||
mylog("none", [e.output])
|
mylog("none", [e.output])
|
||||||
|
|
||||||
|
|
||||||
@@ -187,14 +162,7 @@ def get_setting(key):
|
|||||||
mylog("none", [f"[Settings] ⚠ File not found: {settingsFile}"])
|
mylog("none", [f"[Settings] ⚠ File not found: {settingsFile}"])
|
||||||
return None
|
return None
|
||||||
|
|
||||||
mylog(
|
mylog("trace", f"[Import table_settings.json] checking table_settings.json file SETTINGS_LASTCACHEDATE: {SETTINGS_LASTCACHEDATE} fileModifiedTime: {fileModifiedTime}")
|
||||||
"trace",
|
|
||||||
[
|
|
||||||
"[Import table_settings.json] checking table_settings.json file",
|
|
||||||
f"SETTINGS_LASTCACHEDATE: {SETTINGS_LASTCACHEDATE}",
|
|
||||||
f"fileModifiedTime: {fileModifiedTime}",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Use cache if file hasn't changed
|
# Use cache if file hasn't changed
|
||||||
if fileModifiedTime == SETTINGS_LASTCACHEDATE and SETTINGS_CACHE:
|
if fileModifiedTime == SETTINGS_LASTCACHEDATE and SETTINGS_CACHE:
|
||||||
@@ -221,10 +189,7 @@ def get_setting(key):
|
|||||||
SETTINGS_LASTCACHEDATE = fileModifiedTime
|
SETTINGS_LASTCACHEDATE = fileModifiedTime
|
||||||
|
|
||||||
if key not in SETTINGS_CACHE:
|
if key not in SETTINGS_CACHE:
|
||||||
mylog(
|
mylog("none", [f"[Settings] ⚠ ERROR - setting_missing - {key} not in {settingsFile}"],)
|
||||||
"none",
|
|
||||||
[f"[Settings] ⚠ ERROR - setting_missing - {key} not in {settingsFile}"],
|
|
||||||
)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return SETTINGS_CACHE[key]
|
return SETTINGS_CACHE[key]
|
||||||
@@ -357,10 +322,7 @@ def setting_value_to_python_type(set_type, set_value):
|
|||||||
value = json.loads(set_value.replace("'", "\""))
|
value = json.loads(set_value.replace("'", "\""))
|
||||||
|
|
||||||
except json.JSONDecodeError as e:
|
except json.JSONDecodeError as e:
|
||||||
mylog(
|
mylog("none", [f"[setting_value_to_python_type] Error decoding JSON object: {e}"],)
|
||||||
"none",
|
|
||||||
[f"[setting_value_to_python_type] Error decoding JSON object: {e}"],
|
|
||||||
)
|
|
||||||
mylog("none", [set_value])
|
mylog("none", [set_value])
|
||||||
value = []
|
value = []
|
||||||
|
|
||||||
@@ -375,10 +337,7 @@ def setting_value_to_python_type(set_type, set_value):
|
|||||||
try:
|
try:
|
||||||
value = reverseTransformers(json.loads(set_value), transformers)
|
value = reverseTransformers(json.loads(set_value), transformers)
|
||||||
except json.JSONDecodeError as e:
|
except json.JSONDecodeError as e:
|
||||||
mylog(
|
mylog("none", [f"[setting_value_to_python_type] Error decoding JSON object: {e}"],)
|
||||||
"none",
|
|
||||||
[f"[setting_value_to_python_type] Error decoding JSON object: {e}"],
|
|
||||||
)
|
|
||||||
mylog("none", [{set_value}])
|
mylog("none", [{set_value}])
|
||||||
value = {}
|
value = {}
|
||||||
|
|
||||||
@@ -766,9 +725,7 @@ def checkNewVersion():
|
|||||||
try:
|
try:
|
||||||
data = json.loads(text)
|
data = json.loads(text)
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
mylog(
|
mylog("minimal", ["[Version check] ⚠ ERROR: Invalid JSON response from GitHub."])
|
||||||
"minimal", ["[Version check] ⚠ ERROR: Invalid JSON response from GitHub."]
|
|
||||||
)
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# make sure we received a valid response and not an API rate limit exceeded message
|
# make sure we received a valid response and not an API rate limit exceeded message
|
||||||
@@ -784,10 +741,7 @@ def checkNewVersion():
|
|||||||
else:
|
else:
|
||||||
mylog("none", ["[Version check] Running the latest version."])
|
mylog("none", ["[Version check] Running the latest version."])
|
||||||
else:
|
else:
|
||||||
mylog(
|
mylog("minimal", ["[Version check] ⚠ ERROR: Received unexpected response from GitHub."],)
|
||||||
"minimal",
|
|
||||||
["[Version check] ⚠ ERROR: Received unexpected response from GitHub."],
|
|
||||||
)
|
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|||||||
@@ -180,10 +180,7 @@ def importConfigs(pm, db, all_plugins):
|
|||||||
fileModifiedTime = os.path.getmtime(config_file)
|
fileModifiedTime = os.path.getmtime(config_file)
|
||||||
|
|
||||||
mylog("debug", ["[Import Config] checking config file "])
|
mylog("debug", ["[Import Config] checking config file "])
|
||||||
mylog(
|
mylog("debug", ["[Import Config] lastImportedConfFile :", conf.lastImportedConfFile],)
|
||||||
"debug",
|
|
||||||
["[Import Config] lastImportedConfFile :", conf.lastImportedConfFile],
|
|
||||||
)
|
|
||||||
mylog("debug", ["[Import Config] fileModifiedTime :", fileModifiedTime])
|
mylog("debug", ["[Import Config] fileModifiedTime :", fileModifiedTime])
|
||||||
|
|
||||||
if (fileModifiedTime == conf.lastImportedConfFile) and all_plugins is not None:
|
if (fileModifiedTime == conf.lastImportedConfFile) and all_plugins is not None:
|
||||||
@@ -399,12 +396,7 @@ def importConfigs(pm, db, all_plugins):
|
|||||||
conf.TIMEZONE = ccd(
|
conf.TIMEZONE = ccd(
|
||||||
"TIMEZONE", conf.tz, c_d, "_KEEP_", "_KEEP_", "[]", "General"
|
"TIMEZONE", conf.tz, c_d, "_KEEP_", "_KEEP_", "[]", "General"
|
||||||
)
|
)
|
||||||
mylog(
|
mylog("none", [f"[Config] Invalid timezone '{conf.TIMEZONE}', defaulting to {default_tz}."],)
|
||||||
"none",
|
|
||||||
[
|
|
||||||
f"[Config] Invalid timezone '{conf.TIMEZONE}', defaulting to {default_tz}."
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO cleanup later ----------------------------------------------------------------------------------
|
# TODO cleanup later ----------------------------------------------------------------------------------
|
||||||
# init all time values as we have timezone - all this shoudl be moved into plugin/plugin settings
|
# init all time values as we have timezone - all this shoudl be moved into plugin/plugin settings
|
||||||
@@ -450,13 +442,7 @@ def importConfigs(pm, db, all_plugins):
|
|||||||
|
|
||||||
all_plugins = get_plugins_configs(conf.DISCOVER_PLUGINS)
|
all_plugins = get_plugins_configs(conf.DISCOVER_PLUGINS)
|
||||||
|
|
||||||
mylog(
|
mylog("none", ["[Config] Plugins: Number of all plugins (including not loaded): ", len(all_plugins),],)
|
||||||
"none",
|
|
||||||
[
|
|
||||||
"[Config] Plugins: Number of all plugins (including not loaded): ",
|
|
||||||
len(all_plugins),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
plugin_indexes_to_remove = []
|
plugin_indexes_to_remove = []
|
||||||
all_plugins_prefixes = [] # to init the LOADED_PLUGINS setting with correct options
|
all_plugins_prefixes = [] # to init the LOADED_PLUGINS setting with correct options
|
||||||
@@ -580,9 +566,7 @@ def importConfigs(pm, db, all_plugins):
|
|||||||
"General",
|
"General",
|
||||||
)
|
)
|
||||||
|
|
||||||
mylog(
|
mylog("none", ["[Config] Number of Plugins to load: ", len(loaded_plugins_prefixes)])
|
||||||
"none", ["[Config] Number of Plugins to load: ", len(loaded_plugins_prefixes)]
|
|
||||||
)
|
|
||||||
mylog("none", ["[Config] Plugins to load: ", loaded_plugins_prefixes])
|
mylog("none", ["[Config] Plugins to load: ", loaded_plugins_prefixes])
|
||||||
|
|
||||||
conf.plugins_once_run = False
|
conf.plugins_once_run = False
|
||||||
@@ -606,12 +590,7 @@ def importConfigs(pm, db, all_plugins):
|
|||||||
|
|
||||||
# Log the value being passed
|
# Log the value being passed
|
||||||
# ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False)
|
# ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False)
|
||||||
mylog(
|
mylog("verbose", [f"[Config] Setting override {setting_name} with value: {value}"],)
|
||||||
"verbose",
|
|
||||||
[
|
|
||||||
f"[Config] Setting override {setting_name} with value: {value}"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
ccd(
|
ccd(
|
||||||
setting_name,
|
setting_name,
|
||||||
value,
|
value,
|
||||||
@@ -630,12 +609,7 @@ def importConfigs(pm, db, all_plugins):
|
|||||||
)
|
)
|
||||||
|
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
mylog(
|
mylog("none", [f"[Config] [ERROR] Setting override decoding JSON from {app_conf_override_path}"],)
|
||||||
"none",
|
|
||||||
[
|
|
||||||
f"[Config] [ERROR] Setting override decoding JSON from {app_conf_override_path}"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
mylog("debug", [f"[Config] File {app_conf_override_path} does not exist."])
|
mylog("debug", [f"[Config] File {app_conf_override_path} does not exist."])
|
||||||
|
|
||||||
@@ -777,10 +751,7 @@ def renameSettings(config_file):
|
|||||||
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||||
backup_file = f"{config_file}_old_setting_names_{timestamp}.bak"
|
backup_file = f"{config_file}_old_setting_names_{timestamp}.bak"
|
||||||
|
|
||||||
mylog(
|
mylog("debug", f"[Config] Old setting names will be replaced and a backup ({backup_file}) of the config created.",)
|
||||||
"debug",
|
|
||||||
f"[Config] Old setting names will be replaced and a backup ({backup_file}) of the config created.",
|
|
||||||
)
|
|
||||||
|
|
||||||
shutil.copy(str(config_file), backup_file) # Convert config_file to a string
|
shutil.copy(str(config_file), backup_file) # Convert config_file to a string
|
||||||
|
|
||||||
@@ -807,6 +778,4 @@ def renameSettings(config_file):
|
|||||||
) # Convert config_file to a string
|
) # Convert config_file to a string
|
||||||
|
|
||||||
else:
|
else:
|
||||||
mylog(
|
mylog("debug", "[Config] No old setting names found in the file. No changes made.")
|
||||||
"debug", "[Config] No old setting names found in the file. No changes made."
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -119,10 +119,7 @@ def remove_old(keepNumberOfEntries):
|
|||||||
try:
|
try:
|
||||||
with open(NOTIFICATION_API_FILE, "w") as file:
|
with open(NOTIFICATION_API_FILE, "w") as file:
|
||||||
json.dump(trimmed, file, indent=4)
|
json.dump(trimmed, file, indent=4)
|
||||||
mylog(
|
mylog("verbose", f"[Notification] Trimmed notifications to latest {keepNumberOfEntries}",)
|
||||||
"verbose",
|
|
||||||
f"[Notification] Trimmed notifications to latest {keepNumberOfEntries}",
|
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
mylog("none", f"Error writing trimmed notifications file: {e}")
|
mylog("none", f"Error writing trimmed notifications file: {e}")
|
||||||
|
|
||||||
|
|||||||
@@ -295,9 +295,7 @@ class NotificationInstance:
|
|||||||
(f"-{minutes} minutes", tz_offset),
|
(f"-{minutes} minutes", tz_offset),
|
||||||
)
|
)
|
||||||
|
|
||||||
mylog(
|
mylog("minimal", ["[Notification] Notifications changes: ", self.db.sql.rowcount])
|
||||||
"minimal", ["[Notification] Notifications changes: ", self.db.sql.rowcount]
|
|
||||||
)
|
|
||||||
|
|
||||||
# clear plugin events
|
# clear plugin events
|
||||||
self.clearPluginEvents()
|
self.clearPluginEvents()
|
||||||
|
|||||||
@@ -31,10 +31,7 @@ class UserEventsQueueInstance:
|
|||||||
Returns an empty list if the file doesn't exist.
|
Returns an empty list if the file doesn't exist.
|
||||||
"""
|
"""
|
||||||
if not os.path.exists(self.log_file):
|
if not os.path.exists(self.log_file):
|
||||||
mylog(
|
mylog("none", ["[UserEventsQueueInstance] Log file not found: ", self.log_file],)
|
||||||
"none",
|
|
||||||
["[UserEventsQueueInstance] Log file not found: ", self.log_file],
|
|
||||||
)
|
|
||||||
return [] # No log file, return empty list
|
return [] # No log file, return empty list
|
||||||
with open(self.log_file, "r") as file:
|
with open(self.log_file, "r") as file:
|
||||||
return file.readlines()
|
return file.readlines()
|
||||||
|
|||||||
@@ -123,9 +123,7 @@ def update_devices_data_from_scan(db):
|
|||||||
)""")
|
)""")
|
||||||
|
|
||||||
# Update only devices with empty or NULL devParentMAC
|
# Update only devices with empty or NULL devParentMAC
|
||||||
mylog(
|
mylog("debug", "[Update Devices] - (if not empty) cur_NetworkNodeMAC -> devParentMAC")
|
||||||
"debug", "[Update Devices] - (if not empty) cur_NetworkNodeMAC -> devParentMAC"
|
|
||||||
)
|
|
||||||
sql.execute("""UPDATE Devices
|
sql.execute("""UPDATE Devices
|
||||||
SET devParentMAC = (
|
SET devParentMAC = (
|
||||||
SELECT cur_NetworkNodeMAC
|
SELECT cur_NetworkNodeMAC
|
||||||
@@ -144,10 +142,7 @@ def update_devices_data_from_scan(db):
|
|||||||
""")
|
""")
|
||||||
|
|
||||||
# Update only devices with empty or NULL devSite
|
# Update only devices with empty or NULL devSite
|
||||||
mylog(
|
mylog("debug", "[Update Devices] - (if not empty) cur_NetworkSite -> (if empty) devSite",)
|
||||||
"debug",
|
|
||||||
"[Update Devices] - (if not empty) cur_NetworkSite -> (if empty) devSite",
|
|
||||||
)
|
|
||||||
sql.execute("""UPDATE Devices
|
sql.execute("""UPDATE Devices
|
||||||
SET devSite = (
|
SET devSite = (
|
||||||
SELECT cur_NetworkSite
|
SELECT cur_NetworkSite
|
||||||
@@ -325,9 +320,7 @@ def save_scanned_devices(db):
|
|||||||
.strip()
|
.strip()
|
||||||
)
|
)
|
||||||
|
|
||||||
mylog(
|
mylog("debug", ["[Save Devices] Saving this IP into the CurrentScan table:", local_ip])
|
||||||
"debug", ["[Save Devices] Saving this IP into the CurrentScan table:", local_ip]
|
|
||||||
)
|
|
||||||
|
|
||||||
if check_IP_format(local_ip) == "":
|
if check_IP_format(local_ip) == "":
|
||||||
local_ip = "0.0.0.0"
|
local_ip = "0.0.0.0"
|
||||||
@@ -361,23 +354,12 @@ def print_scan_stats(db):
|
|||||||
sql.execute(query)
|
sql.execute(query)
|
||||||
stats = sql.fetchall()
|
stats = sql.fetchall()
|
||||||
|
|
||||||
mylog(
|
mylog("verbose", f"[Scan Stats] Devices Detected.......: {stats[0]['devices_detected']}",)
|
||||||
"verbose",
|
|
||||||
f"[Scan Stats] Devices Detected.......: {stats[0]['devices_detected']}",
|
|
||||||
)
|
|
||||||
mylog("verbose", f"[Scan Stats] New Devices............: {stats[0]['new_devices']}")
|
mylog("verbose", f"[Scan Stats] New Devices............: {stats[0]['new_devices']}")
|
||||||
mylog("verbose", f"[Scan Stats] Down Alerts............: {stats[0]['down_alerts']}")
|
mylog("verbose", f"[Scan Stats] Down Alerts............: {stats[0]['down_alerts']}")
|
||||||
mylog(
|
mylog("verbose", f"[Scan Stats] New Down Alerts........: {stats[0]['new_down_alerts']}",)
|
||||||
"verbose",
|
mylog("verbose", f"[Scan Stats] New Connections........: {stats[0]['new_connections']}",)
|
||||||
f"[Scan Stats] New Down Alerts........: {stats[0]['new_down_alerts']}",
|
mylog("verbose", f"[Scan Stats] Disconnections.........: {stats[0]['disconnections']}")
|
||||||
)
|
|
||||||
mylog(
|
|
||||||
"verbose",
|
|
||||||
f"[Scan Stats] New Connections........: {stats[0]['new_connections']}",
|
|
||||||
)
|
|
||||||
mylog(
|
|
||||||
"verbose", f"[Scan Stats] Disconnections.........: {stats[0]['disconnections']}"
|
|
||||||
)
|
|
||||||
mylog("verbose", f"[Scan Stats] IP Changes.............: {stats[0]['ip_changes']}")
|
mylog("verbose", f"[Scan Stats] IP Changes.............: {stats[0]['ip_changes']}")
|
||||||
|
|
||||||
# if str(stats[0]["new_devices"]) != '0':
|
# if str(stats[0]["new_devices"]) != '0':
|
||||||
@@ -395,10 +377,7 @@ def print_scan_stats(db):
|
|||||||
row_dict = dict(row)
|
row_dict = dict(row)
|
||||||
mylog("trace", f" {row_dict}")
|
mylog("trace", f" {row_dict}")
|
||||||
|
|
||||||
mylog(
|
mylog("trace", " ================ Events table content where eve_PendingAlertEmail = 1 ================",)
|
||||||
"trace",
|
|
||||||
" ================ Events table content where eve_PendingAlertEmail = 1 ================",
|
|
||||||
)
|
|
||||||
sql.execute("select * from Events where eve_PendingAlertEmail = 1")
|
sql.execute("select * from Events where eve_PendingAlertEmail = 1")
|
||||||
rows = sql.fetchall()
|
rows = sql.fetchall()
|
||||||
for row in rows:
|
for row in rows:
|
||||||
@@ -654,10 +633,7 @@ def check_plugin_data_changed(pm, plugins_to_check):
|
|||||||
|
|
||||||
# Continue if changes detected
|
# Continue if changes detected
|
||||||
for p in plugins_changed:
|
for p in plugins_changed:
|
||||||
mylog(
|
mylog('debug', f'[check_plugin_data_changed] {p} changed (last_change|last_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})')
|
||||||
'debug',
|
|
||||||
f'[check_plugin_data_changed] {p} changed (last_data_change|last_data_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})'
|
|
||||||
)
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -741,10 +717,7 @@ def update_devices_names(pm):
|
|||||||
# --- Step 1: Update device names for unknown devices ---
|
# --- Step 1: Update device names for unknown devices ---
|
||||||
unknownDevices = device_handler.getUnknown()
|
unknownDevices = device_handler.getUnknown()
|
||||||
if unknownDevices:
|
if unknownDevices:
|
||||||
mylog(
|
mylog("verbose", f"[Update Device Name] Trying to resolve devices without name. Unknown devices count: {len(unknownDevices)}",)
|
||||||
"verbose",
|
|
||||||
f"[Update Device Name] Trying to resolve devices without name. Unknown devices count: {len(unknownDevices)}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Try resolving both name and FQDN
|
# Try resolving both name and FQDN
|
||||||
recordsToUpdate, recordsNotFound, fs, notFound = resolve_devices(
|
recordsToUpdate, recordsNotFound, fs, notFound = resolve_devices(
|
||||||
@@ -752,10 +725,8 @@ def update_devices_names(pm):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Log summary
|
# Log summary
|
||||||
mylog(
|
res_string = f"{fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']}"
|
||||||
"verbose",
|
mylog("verbose", f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({res_string})",)
|
||||||
f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']})",
|
|
||||||
)
|
|
||||||
mylog("verbose", f"[Update Device Name] Names Not Found : {notFound}")
|
mylog("verbose", f"[Update Device Name] Names Not Found : {notFound}")
|
||||||
|
|
||||||
# Apply updates to database
|
# Apply updates to database
|
||||||
@@ -771,10 +742,7 @@ def update_devices_names(pm):
|
|||||||
if get_setting_value("REFRESH_FQDN"):
|
if get_setting_value("REFRESH_FQDN"):
|
||||||
allDevices = device_handler.getAll()
|
allDevices = device_handler.getAll()
|
||||||
if allDevices:
|
if allDevices:
|
||||||
mylog(
|
mylog("verbose", f"[Update FQDN] Trying to resolve FQDN. Devices count: {len(allDevices)}",)
|
||||||
"verbose",
|
|
||||||
f"[Update FQDN] Trying to resolve FQDN. Devices count: {len(allDevices)}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Try resolving only FQDN
|
# Try resolving only FQDN
|
||||||
recordsToUpdate, _, fs, notFound = resolve_devices(
|
recordsToUpdate, _, fs, notFound = resolve_devices(
|
||||||
@@ -782,10 +750,8 @@ def update_devices_names(pm):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Log summary
|
# Log summary
|
||||||
mylog(
|
res_string = f"{fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']}"
|
||||||
"verbose",
|
mylog("verbose", f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)}({res_string})",)
|
||||||
f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)}({fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']})",
|
|
||||||
)
|
|
||||||
mylog("verbose", f"[Update FQDN] Names Not Found : {notFound}")
|
mylog("verbose", f"[Update FQDN] Names Not Found : {notFound}")
|
||||||
|
|
||||||
# Apply FQDN-only updates
|
# Apply FQDN-only updates
|
||||||
@@ -907,25 +873,13 @@ def query_MAC_vendor(pMAC):
|
|||||||
parts = line.split("\t", 1)
|
parts = line.split("\t", 1)
|
||||||
if len(parts) > 1:
|
if len(parts) > 1:
|
||||||
vendor = parts[1].strip()
|
vendor = parts[1].strip()
|
||||||
mylog(
|
mylog("debug", [f"[Vendor Check] Found '{vendor}' for '{pMAC}' in {vendorsPath}"], )
|
||||||
"debug",
|
|
||||||
[
|
|
||||||
f"[Vendor Check] Found '{vendor}' for '{pMAC}' in {vendorsPath}"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
return vendor
|
return vendor
|
||||||
else:
|
else:
|
||||||
mylog(
|
mylog("debug", [f'[Vendor Check] ⚠ ERROR: Match found, but line could not be processed: "{line_lower}"'],)
|
||||||
"debug",
|
|
||||||
[
|
|
||||||
f'[Vendor Check] ⚠ ERROR: Match found, but line could not be processed: "{line_lower}"'
|
|
||||||
],
|
|
||||||
)
|
|
||||||
return -1
|
return -1
|
||||||
|
|
||||||
return -1 # MAC address not found in the database
|
return -1 # MAC address not found in the database
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
mylog(
|
mylog("none", [f"[Vendor Check] ⚠ ERROR: Vendors file {vendorsPath} not found."])
|
||||||
"none", [f"[Vendor Check] ⚠ ERROR: Vendors file {vendorsPath} not found."]
|
|
||||||
)
|
|
||||||
return -1
|
return -1
|
||||||
|
|||||||
@@ -25,10 +25,7 @@ try:
|
|||||||
rule["icon_base64"] = ""
|
rule["icon_base64"] = ""
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
MAC_TYPE_ICON_RULES = []
|
MAC_TYPE_ICON_RULES = []
|
||||||
mylog(
|
mylog("none", f"[guess_device_attributes] Failed to load device_heuristics_rules.json: {e}",)
|
||||||
"none",
|
|
||||||
f"[guess_device_attributes] Failed to load device_heuristics_rules.json: {e}",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------
|
# -----------------------------------------
|
||||||
@@ -169,10 +166,8 @@ def guess_device_attributes(
|
|||||||
default_icon: str,
|
default_icon: str,
|
||||||
default_type: str,
|
default_type: str,
|
||||||
) -> Tuple[str, str]:
|
) -> Tuple[str, str]:
|
||||||
mylog(
|
|
||||||
"debug",
|
mylog("debug", f"[guess_device_attributes] Guessing attributes for (vendor|mac|ip|name): ('{vendor}'|'{mac}'|'{ip}'|'{name}')",)
|
||||||
f"[guess_device_attributes] Guessing attributes for (vendor|mac|ip|name): ('{vendor}'|'{mac}'|'{ip}'|'{name}')",
|
|
||||||
)
|
|
||||||
|
|
||||||
# --- Normalize inputs ---
|
# --- Normalize inputs ---
|
||||||
vendor = str(vendor).lower().strip() if vendor else "unknown"
|
vendor = str(vendor).lower().strip() if vendor else "unknown"
|
||||||
@@ -207,10 +202,7 @@ def guess_device_attributes(
|
|||||||
type_ = type_ or default_type
|
type_ = type_ or default_type
|
||||||
icon = icon or default_icon
|
icon = icon or default_icon
|
||||||
|
|
||||||
mylog(
|
mylog("debug", f"[guess_device_attributes] Guessed attributes (icon|type_): ('{icon}'|'{type_}')",)
|
||||||
"debug",
|
|
||||||
f"[guess_device_attributes] Guessed attributes (icon|type_): ('{icon}'|'{type_}')",
|
|
||||||
)
|
|
||||||
return icon, type_
|
return icon, type_
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -50,9 +50,7 @@ def process_scan(db):
|
|||||||
update_devices_data_from_scan(db)
|
update_devices_data_from_scan(db)
|
||||||
|
|
||||||
# Pair session events (Connection / Disconnection)
|
# Pair session events (Connection / Disconnection)
|
||||||
mylog(
|
mylog("verbose", "[Process Scan] Pairing session events (connection / disconnection) ")
|
||||||
"verbose", "[Process Scan] Pairing session events (connection / disconnection) "
|
|
||||||
)
|
|
||||||
pair_sessions_events(db)
|
pair_sessions_events(db)
|
||||||
|
|
||||||
# Sessions snapshot
|
# Sessions snapshot
|
||||||
@@ -221,10 +219,7 @@ def insertOnlineHistory(db):
|
|||||||
VALUES (?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
mylog(
|
mylog("debug", f"[Presence graph] Sql query: {insert_query} with values: {scanTimestamp}, {onlineDevices}, {downDevices}, {allDevices}, {archivedDevices}, {offlineDevices}",)
|
||||||
"debug",
|
|
||||||
f"[Presence graph] Sql query: {insert_query} with values: {scanTimestamp}, {onlineDevices}, {downDevices}, {allDevices}, {archivedDevices}, {offlineDevices}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Debug output
|
# Debug output
|
||||||
print_table_schema(db, "Online_History")
|
print_table_schema(db, "Online_History")
|
||||||
|
|||||||
@@ -26,12 +26,7 @@ def logEventStatusCounts(objName, pluginEvents):
|
|||||||
status_counts[status] = 1
|
status_counts[status] = 1
|
||||||
|
|
||||||
for status, count in status_counts.items():
|
for status, count in status_counts.items():
|
||||||
mylog(
|
mylog("debug", [f'[{module_name}] In {objName} there are {count} events with the status "{status}" '],)
|
||||||
"debug",
|
|
||||||
[
|
|
||||||
f'[{module_name}] In {objName} there are {count} events with the status "{status}" '
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------------------
|
# -------------------------------------------------------------------------------
|
||||||
@@ -100,10 +95,7 @@ def list_to_csv(arr):
|
|||||||
|
|
||||||
mylog("debug", f"[{module_name}] Flattening the below array")
|
mylog("debug", f"[{module_name}] Flattening the below array")
|
||||||
mylog("debug", arr)
|
mylog("debug", arr)
|
||||||
mylog(
|
mylog("debug", f"[{module_name}] isinstance(arr, list) : {isinstance(arr, list)} | isinstance(arr, str) : {isinstance(arr, str)}",)
|
||||||
"debug",
|
|
||||||
f"[{module_name}] isinstance(arr, list) : {isinstance(arr, list)} | isinstance(arr, str) : {isinstance(arr, str)}",
|
|
||||||
)
|
|
||||||
|
|
||||||
if isinstance(arr, str):
|
if isinstance(arr, str):
|
||||||
tmpStr = (
|
tmpStr = (
|
||||||
@@ -227,19 +219,9 @@ def get_plugins_configs(loadAll):
|
|||||||
|
|
||||||
except (FileNotFoundError, json.JSONDecodeError):
|
except (FileNotFoundError, json.JSONDecodeError):
|
||||||
# Handle the case when the file is not found or JSON decoding fails
|
# Handle the case when the file is not found or JSON decoding fails
|
||||||
mylog(
|
mylog("none", f"[{module_name}] ⚠ ERROR - JSONDecodeError or FileNotFoundError for file {config_path}")
|
||||||
"none",
|
|
||||||
[
|
|
||||||
f"[{module_name}] ⚠ ERROR - JSONDecodeError or FileNotFoundError for file {config_path}"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
mylog(
|
mylog("none", f"[{module_name}] ⚠ ERROR - Exception for file {config_path}: {str(e)}")
|
||||||
"none",
|
|
||||||
[
|
|
||||||
f"[{module_name}] ⚠ ERROR - Exception for file {config_path}: {str(e)}"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Sort pluginsList based on "execution_order"
|
# Sort pluginsList based on "execution_order"
|
||||||
pluginsListSorted = sorted(pluginsList, key=get_layer)
|
pluginsListSorted = sorted(pluginsList, key=get_layer)
|
||||||
@@ -285,23 +267,13 @@ def getPluginObject(keyValues):
|
|||||||
if all_match:
|
if all_match:
|
||||||
return item
|
return item
|
||||||
|
|
||||||
mylog(
|
mylog("verbose", f"[{module_name}] 💬 INFO - Object not found {json.dumps(keyValues)} ")
|
||||||
"verbose",
|
|
||||||
[
|
|
||||||
f"[{module_name}] 💬 INFO - Object not found {json.dumps(keyValues)} "
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
except (FileNotFoundError, json.JSONDecodeError, ValueError):
|
except (FileNotFoundError, json.JSONDecodeError, ValueError):
|
||||||
# Handle the case when the file is not found, JSON decoding fails, or data is not in the expected format
|
# Handle the case when the file is not found, JSON decoding fails, or data is not in the expected format
|
||||||
mylog(
|
mylog("verbose", f"[{module_name}] ⚠ ERROR - JSONDecodeError or FileNotFoundError for file {plugins_objects}")
|
||||||
"verbose",
|
|
||||||
[
|
|
||||||
f"[{module_name}] ⚠ ERROR - JSONDecodeError or FileNotFoundError for file {plugins_objects}"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|||||||
@@ -29,10 +29,7 @@ class UpdateFieldAction(Action):
|
|||||||
self.db = db
|
self.db = db
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
mylog(
|
mylog("verbose", f"[WF] Updating field '{self.field}' to '{self.value}' for event object {self.trigger.object_type}")
|
||||||
"verbose",
|
|
||||||
f"[WF] Updating field '{self.field}' to '{self.value}' for event object {self.trigger.object_type}",
|
|
||||||
)
|
|
||||||
|
|
||||||
obj = self.trigger.object
|
obj = self.trigger.object
|
||||||
|
|
||||||
@@ -109,12 +106,7 @@ class RunPluginAction(Action):
|
|||||||
def execute(self):
|
def execute(self):
|
||||||
obj = self.trigger.object
|
obj = self.trigger.object
|
||||||
|
|
||||||
mylog(
|
mylog("verbose", f"Executing plugin '{self.plugin_name}' with parameters {self.params} for object {obj}")
|
||||||
"verbose",
|
|
||||||
[
|
|
||||||
f"Executing plugin '{self.plugin_name}' with parameters {self.params} for object {obj}"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
# PluginManager.run(self.plugin_name, self.parameters)
|
# PluginManager.run(self.plugin_name, self.parameters)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
@@ -129,12 +121,7 @@ class SendNotificationAction(Action):
|
|||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
obj = self.trigger.object
|
obj = self.trigger.object
|
||||||
mylog(
|
mylog("verbose", f"Sending notification via '{self.method}': {self.message} for object {obj}")
|
||||||
"verbose",
|
|
||||||
[
|
|
||||||
f"Sending notification via '{self.method}': {self.message} for object {obj}"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
# NotificationManager.send(self.method, self.message)
|
# NotificationManager.send(self.method, self.message)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
|||||||
@@ -52,10 +52,7 @@ class ConditionGroup:
|
|||||||
"""Handles condition groups with AND, OR logic, supporting nested groups."""
|
"""Handles condition groups with AND, OR logic, supporting nested groups."""
|
||||||
|
|
||||||
def __init__(self, group_json):
|
def __init__(self, group_json):
|
||||||
mylog(
|
mylog("verbose", f"[WF] ConditionGroup json.dumps(group_json): {json.dumps(group_json)}")
|
||||||
"verbose",
|
|
||||||
[f"[WF] ConditionGroup json.dumps(group_json): {json.dumps(group_json)}"],
|
|
||||||
)
|
|
||||||
|
|
||||||
self.logic = group_json.get("logic", "AND").upper()
|
self.logic = group_json.get("logic", "AND").upper()
|
||||||
self.conditions = []
|
self.conditions = []
|
||||||
|
|||||||
@@ -53,21 +53,13 @@ class WorkflowManager:
|
|||||||
# Ensure workflow is enabled before proceeding
|
# Ensure workflow is enabled before proceeding
|
||||||
if workflow.get("enabled", "No").lower() == "yes":
|
if workflow.get("enabled", "No").lower() == "yes":
|
||||||
wfName = workflow["name"]
|
wfName = workflow["name"]
|
||||||
mylog(
|
mylog("debug", f"[WF] Checking if '{evGuid}' triggers the workflow '{wfName}'")
|
||||||
"debug",
|
|
||||||
[f"[WF] Checking if '{evGuid}' triggers the workflow '{wfName}'"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# construct trigger object which also evaluates if the current event triggers it
|
# construct trigger object which also evaluates if the current event triggers it
|
||||||
trigger = Trigger(workflow["trigger"], event, self.db)
|
trigger = Trigger(workflow["trigger"], event, self.db)
|
||||||
|
|
||||||
if trigger.triggered:
|
if trigger.triggered:
|
||||||
mylog(
|
mylog("verbose", f"[WF] Event with GUID '{evGuid}' triggered the workflow '{wfName}'")
|
||||||
"verbose",
|
|
||||||
[
|
|
||||||
f"[WF] Event with GUID '{evGuid}' triggered the workflow '{wfName}'"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
self.execute_workflow(workflow, trigger)
|
self.execute_workflow(workflow, trigger)
|
||||||
|
|
||||||
@@ -98,12 +90,7 @@ class WorkflowManager:
|
|||||||
evaluator = ConditionGroup(condition_group)
|
evaluator = ConditionGroup(condition_group)
|
||||||
|
|
||||||
if evaluator.evaluate(trigger): # If any group evaluates to True
|
if evaluator.evaluate(trigger): # If any group evaluates to True
|
||||||
mylog(
|
mylog("none", f"[WF] Workflow {wfName} will be executed - conditions were evaluated as TRUE")
|
||||||
"none",
|
|
||||||
[
|
|
||||||
f"[WF] Workflow {wfName} will be executed - conditions were evaluated as TRUE"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
mylog("debug", [f"[WF] Workflow condition_group: {condition_group}"])
|
mylog("debug", [f"[WF] Workflow condition_group: {condition_group}"])
|
||||||
|
|
||||||
self.execute_actions(workflow["actions"], trigger)
|
self.execute_actions(workflow["actions"], trigger)
|
||||||
|
|||||||
@@ -24,12 +24,7 @@ class Trigger:
|
|||||||
self.object_type == event["ObjectType"] and self.event_type == event["AppEventType"]
|
self.object_type == event["ObjectType"] and self.event_type == event["AppEventType"]
|
||||||
)
|
)
|
||||||
|
|
||||||
mylog(
|
mylog("debug", f"""[WF] self.triggered '{self.triggered}' for event '{get_array_from_sql_rows(event)} and trigger {json.dumps(triggerJson)}' """)
|
||||||
"debug",
|
|
||||||
[
|
|
||||||
f"""[WF] self.triggered '{self.triggered}' for event '{get_array_from_sql_rows(event)} and trigger {json.dumps(triggerJson)}' """
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.triggered:
|
if self.triggered:
|
||||||
# object type corresponds with the DB table name
|
# object type corresponds with the DB table name
|
||||||
|
|||||||
Reference in New Issue
Block a user