-
- "AppEventType" TEXT, -- "create", "update", "delete" (+TBD)
- "Helper1" TEXT,
- "Helper2" TEXT,
- "Helper3" TEXT,
- "Extra" TEXT,
- PRIMARY KEY("Index" AUTOINCREMENT)
- );
- """)
-
- # -------------
- # Device events
-
- sql_devices_mappedColumns = '''
- "GUID",
- "DateTimeCreated",
- "ObjectType",
- "ObjectPrimaryID",
- "ObjectSecondaryID",
- "ObjectStatus",
- "ObjectStatusColumn",
- "ObjectIsNew",
- "ObjectIsArchived",
- "ObjectForeignKey",
- "AppEventType"
- '''
-
- # Trigger for create event
- self.db.sql.execute(f'''
- CREATE TRIGGER IF NOT EXISTS "trg_create_device"
- AFTER INSERT ON "Devices"
- BEGIN
- INSERT INTO "AppEvents" (
- {sql_devices_mappedColumns}
- )
- VALUES (
- {sql_generateGuid},
- DATETIME('now'),
- 'Devices',
- NEW.devMac,
- NEW.devLastIP,
- CASE WHEN NEW.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END,
- 'devPresentLastScan',
- NEW.devIsNew,
- NEW.devIsArchived,
- NEW.devMac,
- 'create'
- );
- END;
- ''')
-
- # 🔴 This would generate too many events, disabled for now
- # # Trigger for read event
- # self.db.sql.execute('''
- # TODO
- # ''')
-
- # Trigger for update event
- self.db.sql.execute(f'''
- CREATE TRIGGER IF NOT EXISTS "trg_update_device"
- AFTER UPDATE ON "Devices"
- BEGIN
- INSERT INTO "AppEvents" (
- {sql_devices_mappedColumns}
- )
- VALUES (
- {sql_generateGuid},
- DATETIME('now'),
- 'Devices',
- NEW.devMac,
- NEW.devLastIP,
- CASE WHEN NEW.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END,
- 'devPresentLastScan',
- NEW.devIsNew,
- NEW.devIsArchived,
- NEW.devMac,
- 'update'
- );
- END;
- ''')
-
- # Trigger for delete event
- self.db.sql.execute(f'''
- CREATE TRIGGER IF NOT EXISTS "trg_delete_device"
- AFTER DELETE ON "Devices"
- BEGIN
- INSERT INTO "AppEvents" (
- {sql_devices_mappedColumns}
- )
- VALUES (
- {sql_generateGuid},
- DATETIME('now'),
- 'Devices',
- OLD.devMac,
- OLD.devLastIP,
- CASE WHEN OLD.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END,
- 'devPresentLastScan',
- OLD.devIsNew,
- OLD.devIsArchived,
- OLD.devMac,
- 'delete'
- );
- END;
- ''')
-
-
- # -------------
- # Plugins_Objects events
-
- sql_plugins_objects_mappedColumns = '''
- "GUID",
- "DateTimeCreated",
- "ObjectType",
- "ObjectPlugin",
- "ObjectPrimaryID",
- "ObjectSecondaryID",
- "ObjectForeignKey",
- "ObjectStatusColumn",
- "ObjectStatus",
- "AppEventType"
- '''
-
- # Create trigger for update event on Plugins_Objects
- self.db.sql.execute(f'''
- CREATE TRIGGER IF NOT EXISTS trg_update_plugin_object
- AFTER UPDATE ON Plugins_Objects
- BEGIN
- INSERT INTO AppEvents (
- {sql_plugins_objects_mappedColumns}
- )
- VALUES (
- {sql_generateGuid},
- DATETIME('now'),
- 'Plugins_Objects',
- NEW.Plugin,
- NEW.Object_PrimaryID,
- NEW.Object_SecondaryID,
- NEW.ForeignKey,
- 'Status',
- NEW.Status,
- 'update'
- );
- END;
- ''')
-
- # Create trigger for CREATE event on Plugins_Objects
- self.db.sql.execute(f'''
- CREATE TRIGGER IF NOT EXISTS trg_create_plugin_object
- AFTER INSERT ON Plugins_Objects
- BEGIN
- INSERT INTO AppEvents (
- {sql_plugins_objects_mappedColumns}
- )
- VALUES (
- {sql_generateGuid},
- DATETIME('now'),
- 'Plugins_Objects',
- NEW.Plugin,
- NEW.Object_PrimaryID,
- NEW.Object_SecondaryID,
- NEW.ForeignKey,
- 'Status',
- NEW.Status,
- 'create'
- );
- END;
- ''')
-
- # Create trigger for DELETE event on Plugins_Objects
- self.db.sql.execute(f'''
- CREATE TRIGGER IF NOT EXISTS trg_delete_plugin_object
- AFTER DELETE ON Plugins_Objects
- BEGIN
- INSERT INTO AppEvents (
- {sql_plugins_objects_mappedColumns}
- )
- VALUES (
- {sql_generateGuid},
- DATETIME('now'),
- 'Plugins_Objects',
- OLD.Plugin,
- OLD.Object_PrimaryID,
- OLD.Object_SecondaryID,
- OLD.ForeignKey,
- 'Status',
- OLD.Status,
- 'delete'
- );
- END;
- ''')
-
- self.save()
-
- # -------------------------------------------------------------------------------
- # -------------------------------------------------------------------------------
- # below code is unused
- # -------------------------------------------------------------------------------
-
- # Create a new DB entry if new notifications are available, otherwise skip
- def create(self, Extra="", **kwargs):
- # Check if nothing to report, end
- if not any(kwargs.values()):
- return False
-
- # Continue and save into DB if notifications are available
- self.GUID = str(uuid.uuid4())
- self.DateTimeCreated = timeNowTZ()
- self.ObjectType = "Plugins" # Modify ObjectType as needed
-
- # Optional parameters
- self.ObjectGUID = kwargs.get("ObjectGUID", "")
- self.ObjectPlugin = kwargs.get("ObjectPlugin", "")
- self.ObjectMAC = kwargs.get("ObjectMAC", "")
- self.ObjectIP = kwargs.get("ObjectIP", "")
- self.ObjectPrimaryID = kwargs.get("ObjectPrimaryID", "")
- self.ObjectSecondaryID = kwargs.get("ObjectSecondaryID", "")
- self.ObjectForeignKey = kwargs.get("ObjectForeignKey", "")
- self.ObjectIndex = kwargs.get("ObjectIndex", "")
- self.ObjectRowID = kwargs.get("ObjectRowID", "")
- self.ObjectStatusColumn = kwargs.get("ObjectStatusColumn", "")
- self.ObjectStatus = kwargs.get("ObjectStatus", "")
-
- self.AppEventStatus = "new" # Modify AppEventStatus as needed
- self.Extra = Extra
-
- self.upsert()
-
- return True
-
- def upsert(self):
- self.db.sql.execute("""
- INSERT OR REPLACE INTO AppEvents (
- "GUID",
- "DateTimeCreated",
- "ObjectType",
- "ObjectGUID",
- "ObjectPlugin",
- "ObjectMAC",
- "ObjectIP",
- "ObjectPrimaryID",
- "ObjectSecondaryID",
- "ObjectForeignKey",
- "ObjectIndex",
- "ObjectRowID",
- "ObjectStatusColumn",
- "ObjectStatus",
- "AppEventStatus",
- "Extra"
- )
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
- """, (
- self.GUID,
- self.DateTimeCreated,
- self.ObjectType,
- self.ObjectGUID,
- self.ObjectPlugin,
- self.ObjectMAC,
- self.ObjectIP,
- self.ObjectPrimaryID,
- self.ObjectSecondaryID,
- self.ObjectForeignKey,
- self.ObjectIndex,
- self.ObjectRowID,
- self.ObjectStatusColumn,
- self.ObjectStatus,
- self.AppEventStatus,
- self.Extra
- ))
-
- self.save()
-
- def save(self):
- # Commit changes
- self.db.commitDB()
-
-
-def getPluginObject(**kwargs):
-
- # Check if nothing, end
- if not any(kwargs.values()):
- return None
-
- # Optional parameters
- GUID = kwargs.get("GUID", "")
- Plugin = kwargs.get("Plugin", "")
- MAC = kwargs.get("MAC", "")
- IP = kwargs.get("IP", "")
- PrimaryID = kwargs.get("PrimaryID", "")
- SecondaryID = kwargs.get("SecondaryID", "")
- ForeignKey = kwargs.get("ForeignKey", "")
- Index = kwargs.get("Index", "")
- RowID = kwargs.get("RowID", "")
-
- # we need the plugin
- if Plugin == "":
- return None
-
- plugins_objects = apiPath + 'table_plugins_objects.json'
-
- try:
- with open(plugins_objects, 'r') as json_file:
-
- data = json.load(json_file)
-
- for item in data.get("data",[]):
- if item.get("Index") == Index:
- return item
-
- for item in data.get("data",[]):
- if item.get("ObjectPrimaryID") == PrimaryID and item.get("ObjectSecondaryID") == SecondaryID:
- return item
-
- for item in data.get("data",[]):
- if item.get("ObjectPrimaryID") == MAC and item.get("ObjectSecondaryID") == IP:
- return item
-
- for item in data.get("data",[]):
- if item.get("ObjectPrimaryID") == PrimaryID and item.get("ObjectSecondaryID") == IP:
- return item
-
- for item in data.get("data",[]):
- if item.get("ObjectPrimaryID") == MAC and item.get("ObjectSecondaryID") == IP:
- return item
-
-
- mylog('debug', [f'[{module_name}] ⚠ ERROR - Object not found - GUID:{GUID} | Plugin:{Plugin} | MAC:{MAC} | IP:{IP} | PrimaryID:{PrimaryID} | SecondaryID:{SecondaryID} | ForeignKey:{ForeignKey} | Index:{Index} | RowID:{RowID} '])
-
- return None
-
- except (FileNotFoundError, json.JSONDecodeError, ValueError) as e:
- # Handle the case when the file is not found, JSON decoding fails, or data is not in the expected format
- mylog('none', [f'[{module_name}] ⚠ ERROR - JSONDecodeError or FileNotFoundError for file {plugins_objects}'])
-
- return None
-
diff --git a/server/const.py b/server/const.py
index 74ae2c48..9663c7ec 100755
--- a/server/const.py
+++ b/server/const.py
@@ -71,7 +71,7 @@ sql_devices_all = """
FROM Devices
"""
-sql_appevents = """select * from AppEvents"""
+sql_appevents = """select * from AppEvents order by DateTimeCreated desc"""
# The below query calculates counts of devices in various categories:
# (connected/online, offline, down, new, archived),
# as well as a combined count for devices that match any status listed in the UI_MY_DEVICES setting
diff --git a/server/crypto_utils.py b/server/crypto_utils.py
index 3576f9dd..b139d488 100755
--- a/server/crypto_utils.py
+++ b/server/crypto_utils.py
@@ -3,6 +3,7 @@ from Crypto.Util.Padding import pad, unpad
import base64
import os
import hashlib
+import uuid
# SIMPLE CRYPT - requeres C compiler -------------------------------------------------------------------------
@@ -56,4 +57,10 @@ def get_random_bytes(length):
# Format hexadecimal string with hyphens
formatted_hex = '-'.join(hex_string[i:i+2] for i in range(0, len(hex_string), 2))
- return formatted_hex
\ No newline at end of file
+ return formatted_hex
+
+#-------------------------------------------------------------------------------
+def generate_deterministic_guid(plugin, primary_id, secondary_id):
+ """Generates a deterministic GUID based on plugin, primary ID, and secondary ID."""
+ data = f"{plugin}-{primary_id}-{secondary_id}".encode("utf-8")
+ return str(uuid.UUID(hashlib.md5(data).hexdigest()))
\ No newline at end of file
diff --git a/server/database.py b/server/database.py
index 5aad5b67..6a460c7c 100755
--- a/server/database.py
+++ b/server/database.py
@@ -9,7 +9,7 @@ from const import fullDbPath, sql_devices_stats, sql_devices_all, sql_generateGu
from logger import mylog
from helper import json_obj, initOrSetParam, row_to_json, timeNowTZ
-from appevent import AppEvent_obj
+from workflows.app_events import AppEvent_obj
class DB():
"""
@@ -543,6 +543,7 @@ class DB():
sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects(
"Index" INTEGER,
Plugin TEXT NOT NULL,
+ ObjectGUID TEXT,
Object_PrimaryID TEXT NOT NULL,
Object_SecondaryID TEXT NOT NULL,
DateTimeCreated TEXT NOT NULL,
@@ -589,6 +590,18 @@ class DB():
self.sql.execute('ALTER TABLE "Plugins_Objects" ADD COLUMN "HelpVal2" TEXT')
self.sql.execute('ALTER TABLE "Plugins_Objects" ADD COLUMN "HelpVal3" TEXT')
self.sql.execute('ALTER TABLE "Plugins_Objects" ADD COLUMN "HelpVal4" TEXT')
+
+ # plug_ObjectGUID_missing column
+ plug_ObjectGUID_missing = self.sql.execute ("""
+ SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Plugins_Objects') WHERE name='ObjectGUID'
+ """).fetchone()[0] == 0
+
+ if plug_ObjectGUID_missing :
+ mylog('verbose', ["[upgradeDB] Adding ObjectGUID to the Plugins_Objects table"])
+ self.sql.execute("""
+ ALTER TABLE "Plugins_Objects" ADD "ObjectGUID" TEXT
+ """)
+
# -----------------------------------------
# REMOVE after 6/6/2025 - END
@@ -645,6 +658,17 @@ class DB():
self.sql.execute('ALTER TABLE "Plugins_Events" ADD COLUMN "HelpVal2" TEXT')
self.sql.execute('ALTER TABLE "Plugins_Events" ADD COLUMN "HelpVal3" TEXT')
self.sql.execute('ALTER TABLE "Plugins_Events" ADD COLUMN "HelpVal4" TEXT')
+
+ # plug_ObjectGUID_missing column
+ plug_ObjectGUID_missing = self.sql.execute ("""
+ SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Plugins_Events') WHERE name='ObjectGUID'
+ """).fetchone()[0] == 0
+
+ if plug_ObjectGUID_missing :
+ mylog('verbose', ["[upgradeDB] Adding ObjectGUID to the Plugins_Events table"])
+ self.sql.execute("""
+ ALTER TABLE "Plugins_Events" ADD "ObjectGUID" TEXT
+ """)
# -----------------------------------------
# REMOVE after 6/6/2025 - END
@@ -703,6 +727,18 @@ class DB():
self.sql.execute('ALTER TABLE "Plugins_History" ADD COLUMN "HelpVal3" TEXT')
self.sql.execute('ALTER TABLE "Plugins_History" ADD COLUMN "HelpVal4" TEXT')
+
+ # plug_ObjectGUID_missing column
+ plug_ObjectGUID_missing = self.sql.execute ("""
+ SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Plugins_History') WHERE name='ObjectGUID'
+ """).fetchone()[0] == 0
+
+ if plug_ObjectGUID_missing :
+ mylog('verbose', ["[upgradeDB] Adding ObjectGUID to the Plugins_History table"])
+ self.sql.execute("""
+ ALTER TABLE "Plugins_History" ADD "ObjectGUID" TEXT
+ """)
+
# -----------------------------------------
# REMOVE after 6/6/2025 - END
# -----------------------------------------
diff --git a/server/device.py b/server/device.py
deleted file mode 100755
index acd7e2e7..00000000
--- a/server/device.py
+++ /dev/null
@@ -1,783 +0,0 @@
-
-import subprocess
-
-import conf
-import os
-import re
-from helper import timeNowTZ, get_setting, get_setting_value, list_to_where, resolve_device_name_dig, get_device_name_nbtlookup, get_device_name_nslookup, get_device_name_mdns, check_IP_format, sanitize_SQL_input
-from logger import mylog, print_log
-from const import vendorsPath, vendorsPathNewest, sql_generateGuid
-
-#-------------------------------------------------------------------------------
-# Device object handling (WIP)
-#-------------------------------------------------------------------------------
-class Device_obj:
- def __init__(self, db):
- self.db = db
-
- # Get all
- def getAll(self):
- self.db.sql.execute("""
- SELECT * FROM Devices
- """)
- return self.db.sql.fetchall()
-
- # Get all with unknown names
- def getUnknown(self):
- self.db.sql.execute("""
- SELECT * FROM Devices WHERE devName in ("(unknown)", "(name not found)", "" )
- """)
- return self.db.sql.fetchall()
-
- # Get specific column value based on devMac
- def getValueWithMac(self, column_name, devMac):
-
- query = f"SELECT {column_name} FROM Devices WHERE devMac = ?"
-
- self.db.sql.execute(query, (devMac,))
-
- result = self.db.sql.fetchone()
-
- return result[column_name] if result else None
-
- # Get all down
- def getDown(self):
- self.db.sql.execute("""
- SELECT * FROM Devices WHERE devAlertDown = 1 and devPresentLastScan = 0
- """)
- return self.db.sql.fetchall()
-
- # Get all down
- def getOffline(self):
- self.db.sql.execute("""
- SELECT * FROM Devices WHERE devPresentLastScan = 0
- """)
- return self.db.sql.fetchall()
-
-
-
-
-
-#-------------------------------------------------------------------------------
-# Removing devices from the CurrentScan DB table which the user chose to ignore by MAC or IP
-def exclude_ignored_devices(db):
- sql = db.sql # Database interface for executing queries
-
- mac_condition = list_to_where('OR', 'cur_MAC', 'LIKE', get_setting_value('NEWDEV_ignored_MACs'))
- ip_condition = list_to_where('OR', 'cur_IP', 'LIKE', get_setting_value('NEWDEV_ignored_IPs'))
-
- # Only delete if either the MAC or IP matches an ignored condition
- conditions = []
- if mac_condition:
- conditions.append(mac_condition)
- if ip_condition:
- conditions.append(ip_condition)
-
- # Join conditions and prepare the query
- conditions_str = " OR ".join(conditions)
- if conditions_str:
- query = f"""DELETE FROM CurrentScan WHERE
- 1=1
- AND (
- {conditions_str}
- )
- """
- else:
- query = "DELETE FROM CurrentScan WHERE 1=1 AND 1=0" # No valid conditions, prevent deletion
-
- mylog('debug', f'[New Devices] Excluding Ignored Devices Query: {query}')
-
- sql.execute(query)
-
-
-
-#-------------------------------------------------------------------------------
-def save_scanned_devices (db):
- sql = db.sql #TO-DO
-
-
- # Add Local MAC of default local interface
- local_mac_cmd = ["/sbin/ifconfig `ip -o route get 1 | sed 's/^.*dev \\([^ ]*\\).*$/\\1/;q'` | grep ether | awk '{print $2}'"]
- local_mac = subprocess.Popen (local_mac_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip()
-
- local_ip_cmd = ["ip -o route get 1 | sed 's/^.*src \\([^ ]*\\).*$/\\1/;q'"]
- local_ip = subprocess.Popen (local_ip_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip()
-
- mylog('debug', ['[Save Devices] Saving this IP into the CurrentScan table:', local_ip])
-
- if check_IP_format(local_ip) == '':
- local_ip = '0.0.0.0'
-
- # Proceed if variable contains valid MAC
- if check_mac_or_internet(local_mac):
- sql.execute (f"""INSERT OR IGNORE INTO CurrentScan (cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) VALUES ( '{local_mac}', '{local_ip}', Null, 'local_MAC') """)
-
-#-------------------------------------------------------------------------------
-def print_scan_stats(db):
- sql = db.sql # TO-DO
-
- query = """
- SELECT
- (SELECT COUNT(*) FROM CurrentScan) AS devices_detected,
- (SELECT COUNT(*) FROM CurrentScan WHERE NOT EXISTS (SELECT 1 FROM Devices WHERE devMac = cur_MAC)) AS new_devices,
- (SELECT COUNT(*) FROM Devices WHERE devAlertDown != 0 AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE devMac = cur_MAC)) AS down_alerts,
- (SELECT COUNT(*) FROM Devices WHERE devAlertDown != 0 AND devPresentLastScan = 1 AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE devMac = cur_MAC)) AS new_down_alerts,
- (SELECT COUNT(*) FROM Devices WHERE devPresentLastScan = 0) AS new_connections,
- (SELECT COUNT(*) FROM Devices WHERE devPresentLastScan = 1 AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE devMac = cur_MAC)) AS disconnections,
- (SELECT COUNT(*) FROM Devices, CurrentScan WHERE devMac = cur_MAC AND devLastIP <> cur_IP) AS ip_changes,
- cur_ScanMethod,
- COUNT(*) AS scan_method_count
- FROM CurrentScan
- GROUP BY cur_ScanMethod
- """
-
- sql.execute(query)
- stats = sql.fetchall()
-
- mylog('verbose', f'[Scan Stats] Devices Detected.......: {stats[0]["devices_detected"]}')
- mylog('verbose', f'[Scan Stats] New Devices............: {stats[0]["new_devices"]}')
- mylog('verbose', f'[Scan Stats] Down Alerts............: {stats[0]["down_alerts"]}')
- mylog('verbose', f'[Scan Stats] New Down Alerts........: {stats[0]["new_down_alerts"]}')
- mylog('verbose', f'[Scan Stats] New Connections........: {stats[0]["new_connections"]}')
- mylog('verbose', f'[Scan Stats] Disconnections.........: {stats[0]["disconnections"]}')
- mylog('verbose', f'[Scan Stats] IP Changes.............: {stats[0]["ip_changes"]}')
-
- # if str(stats[0]["new_devices"]) != '0':
- mylog('trace', f' ================ DEVICES table content ================')
- sql.execute('select * from Devices')
- rows = sql.fetchall()
- for row in rows:
- row_dict = dict(row)
- mylog('trace', f' {row_dict}')
-
- mylog('trace', f' ================ CurrentScan table content ================')
- sql.execute('select * from CurrentScan')
- rows = sql.fetchall()
- for row in rows:
- row_dict = dict(row)
- mylog('trace', f' {row_dict}')
-
- mylog('trace', f' ================ Events table content where eve_PendingAlertEmail = 1 ================')
- sql.execute('select * from Events where eve_PendingAlertEmail = 1')
- rows = sql.fetchall()
- for row in rows:
- row_dict = dict(row)
- mylog('trace', f' {row_dict}')
-
- mylog('trace', f' ================ Events table COUNT ================')
- sql.execute('select count(*) from Events')
- rows = sql.fetchall()
- for row in rows:
- row_dict = dict(row)
- mylog('trace', f' {row_dict}')
-
-
- mylog('verbose', '[Scan Stats] Scan Method Statistics:')
- for row in stats:
- if row["cur_ScanMethod"] is not None:
- mylog('verbose', f' {row["cur_ScanMethod"]}: {row["scan_method_count"]}')
-
-
-#-------------------------------------------------------------------------------
-def create_new_devices (db):
- sql = db.sql # TO-DO
- startTime = timeNowTZ()
-
- # Insert events for new devices from CurrentScan
- mylog('debug','[New Devices] New devices - 1 Events')
-
- query = f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
- eve_EventType, eve_AdditionalInfo,
- eve_PendingAlertEmail)
- SELECT cur_MAC, cur_IP, '{startTime}', 'New Device', cur_Vendor, 1
- FROM CurrentScan
- WHERE NOT EXISTS (SELECT 1 FROM Devices
- WHERE devMac = cur_MAC)
- """
-
-
- mylog('debug',f'[New Devices] Log Events Query: {query}')
-
- sql.execute(query)
-
- mylog('debug',f'[New Devices] Insert Connection into session table')
-
- sql.execute (f"""INSERT INTO Sessions (ses_MAC, ses_IP, ses_EventTypeConnection, ses_DateTimeConnection,
- ses_EventTypeDisconnection, ses_DateTimeDisconnection, ses_StillConnected, ses_AdditionalInfo)
- SELECT cur_MAC, cur_IP,'Connected','{startTime}', NULL , NULL ,1, cur_Vendor
- FROM CurrentScan
- WHERE NOT EXISTS (SELECT 1 FROM Sessions
- WHERE ses_MAC = cur_MAC)
- """)
-
- # Create new devices from CurrentScan
- mylog('debug','[New Devices] 2 Create devices')
-
- # default New Device values preparation
- newDevColumns = """devAlertEvents,
- devAlertDown,
- devPresentLastScan,
- devIsArchived,
- devIsNew,
- devSkipRepeated,
- devScan,
- devOwner,
- devFavorite,
- devGroup,
- devComments,
- devLogEvents,
- devLocation,
- devCustomProps"""
-
- newDevDefaults = f"""{get_setting_value('NEWDEV_devAlertEvents')},
- {get_setting_value('NEWDEV_devAlertDown')},
- {get_setting_value('NEWDEV_devPresentLastScan')},
- {get_setting_value('NEWDEV_devIsArchived')},
- {get_setting_value('NEWDEV_devIsNew')},
- {get_setting_value('NEWDEV_devSkipRepeated')},
- {get_setting_value('NEWDEV_devScan')},
- '{sanitize_SQL_input(get_setting_value('NEWDEV_devOwner'))}',
- {get_setting_value('NEWDEV_devFavorite')},
- '{sanitize_SQL_input(get_setting_value('NEWDEV_devGroup'))}',
- '{sanitize_SQL_input(get_setting_value('NEWDEV_devComments'))}',
- {get_setting_value('NEWDEV_devLogEvents')},
- '{sanitize_SQL_input(get_setting_value('NEWDEV_devLocation'))}',
- '{sanitize_SQL_input(get_setting_value('NEWDEV_devCustomProps'))}'
- """
-
- # Fetch data from CurrentScan skipping ignored devices by IP and MAC
- query = f"""SELECT cur_MAC, cur_Name, cur_Vendor, cur_ScanMethod, cur_IP, cur_SyncHubNodeName, cur_NetworkNodeMAC, cur_PORT, cur_NetworkSite, cur_SSID, cur_Type
- FROM CurrentScan """
-
-
- mylog('debug',f'[New Devices] Collecting New Devices Query: {query}')
- current_scan_data = sql.execute(query).fetchall()
-
- for row in current_scan_data:
- cur_MAC, cur_Name, cur_Vendor, cur_ScanMethod, cur_IP, cur_SyncHubNodeName, cur_NetworkNodeMAC, cur_PORT, cur_NetworkSite, cur_SSID, cur_Type = row
-
- # Handle NoneType
- cur_Name = cur_Name.strip() if cur_Name else '(unknown)'
- cur_Type = cur_Type.strip() if cur_Type else get_setting_value("NEWDEV_devType")
- cur_NetworkNodeMAC = cur_NetworkNodeMAC.strip() if cur_NetworkNodeMAC else ''
- cur_NetworkNodeMAC = cur_NetworkNodeMAC if cur_NetworkNodeMAC and cur_MAC != "Internet" else (get_setting_value("NEWDEV_devParentMAC") if cur_MAC != "Internet" else "null")
- cur_SyncHubNodeName = cur_SyncHubNodeName if cur_SyncHubNodeName and cur_SyncHubNodeName != "null" else (get_setting_value("SYNC_node_name"))
-
- # Preparing the individual insert statement
- sqlQuery = f"""INSERT OR IGNORE INTO Devices
- (
- devMac,
- devName,
- devVendor,
- devLastIP,
- devFirstConnection,
- devLastConnection,
- devSyncHubNode,
- devGUID,
- devParentMAC,
- devParentPort,
- devSite,
- devSSID,
- devType,
- devSourcePlugin,
- {newDevColumns}
- )
- VALUES
- (
- '{sanitize_SQL_input(cur_MAC)}',
- '{sanitize_SQL_input(cur_Name)}',
- '{sanitize_SQL_input(cur_Vendor)}',
- '{sanitize_SQL_input(cur_IP)}',
- ?,
- ?,
- '{sanitize_SQL_input(cur_SyncHubNodeName)}',
- {sql_generateGuid},
- '{sanitize_SQL_input(cur_NetworkNodeMAC)}',
- '{sanitize_SQL_input(cur_PORT)}',
- '{sanitize_SQL_input(cur_NetworkSite)}',
- '{sanitize_SQL_input(cur_SSID)}',
- '{sanitize_SQL_input(cur_Type)}',
- '{sanitize_SQL_input(cur_ScanMethod)}',
- {newDevDefaults}
- )"""
-
- mylog('trace', f'[New Devices] Create device SQL: {sqlQuery}')
-
- sql.execute(sqlQuery, (startTime, startTime))
-
-
- mylog('debug','[New Devices] New Devices end')
- db.commitDB()
-
-
-#-------------------------------------------------------------------------------
-def update_devices_data_from_scan (db):
- sql = db.sql #TO-DO
- startTime = timeNowTZ().strftime('%Y-%m-%d %H:%M:%S')
-
- # Update Last Connection
- mylog('debug', '[Update Devices] 1 Last Connection')
- sql.execute(f"""UPDATE Devices SET devLastConnection = '{startTime}',
- devPresentLastScan = 1
- WHERE devPresentLastScan = 0
- AND EXISTS (SELECT 1 FROM CurrentScan
- WHERE devMac = cur_MAC) """)
-
- # Clean no active devices
- mylog('debug', '[Update Devices] 2 Clean no active devices')
- sql.execute("""UPDATE Devices SET devPresentLastScan = 0
- WHERE NOT EXISTS (SELECT 1 FROM CurrentScan
- WHERE devMac = cur_MAC) """)
-
- # Update IP
- mylog('debug', '[Update Devices] - cur_IP -> devLastIP (always updated)')
- sql.execute("""UPDATE Devices
- SET devLastIP = (SELECT cur_IP FROM CurrentScan
- WHERE devMac = cur_MAC)
- WHERE EXISTS (SELECT 1 FROM CurrentScan
- WHERE devMac = cur_MAC) """)
-
- # Update only devices with empty, NULL or (u(U)nknown) vendors
- mylog('debug', '[Update Devices] - cur_Vendor -> (if empty) devVendor')
- sql.execute("""UPDATE Devices
- SET devVendor = (
- SELECT cur_Vendor
- FROM CurrentScan
- WHERE Devices.devMac = CurrentScan.cur_MAC
- )
- WHERE
- (devVendor IS NULL OR devVendor IN ("", "null", "(unknown)", "(Unknown)"))
- AND EXISTS (
- SELECT 1
- FROM CurrentScan
- WHERE Devices.devMac = CurrentScan.cur_MAC
- )""")
-
- # Update only devices with empty or NULL devParentPort
- mylog('debug', '[Update Devices] - (if not empty) cur_Port -> devParentPort')
- sql.execute("""UPDATE Devices
- SET devParentPort = (
- SELECT cur_Port
- FROM CurrentScan
- WHERE Devices.devMac = CurrentScan.cur_MAC
- )
- WHERE EXISTS (
- SELECT 1
- FROM CurrentScan
- WHERE Devices.devMac = CurrentScan.cur_MAC
- AND CurrentScan.cur_Port IS NOT NULL AND CurrentScan.cur_Port NOT IN ("", "null")
- )""")
-
- # Update only devices with empty or NULL devParentMAC
- mylog('debug', '[Update Devices] - (if not empty) cur_NetworkNodeMAC -> devParentMAC')
- sql.execute("""UPDATE Devices
- SET devParentMAC = (
- SELECT cur_NetworkNodeMAC
- FROM CurrentScan
- WHERE Devices.devMac = CurrentScan.cur_MAC
- )
- WHERE
- (devParentMAC IS NULL OR devParentMAC IN ("", "null", "(unknown)", "(Unknown)"))
- AND
- EXISTS (
- SELECT 1
- FROM CurrentScan
- WHERE Devices.devMac = CurrentScan.cur_MAC
- AND CurrentScan.cur_NetworkNodeMAC IS NOT NULL AND CurrentScan.cur_NetworkNodeMAC NOT IN ("", "null")
-
- )""")
-
- # Update only devices with empty or NULL devSite
- mylog('debug', '[Update Devices] - (if not empty) cur_NetworkSite -> (if empty) devSite')
- sql.execute("""UPDATE Devices
- SET devSite = (
- SELECT cur_NetworkSite
- FROM CurrentScan
- WHERE Devices.devMac = CurrentScan.cur_MAC
- )
- WHERE
- (devSite IS NULL OR devSite IN ("", "null"))
- AND EXISTS (
- SELECT 1
- FROM CurrentScan
- WHERE Devices.devMac = CurrentScan.cur_MAC
- AND CurrentScan.cur_NetworkSite IS NOT NULL AND CurrentScan.cur_NetworkSite NOT IN ("", "null")
- )""")
-
- # Update only devices with empty or NULL devSSID
- mylog('debug', '[Update Devices] - (if not empty) cur_SSID -> (if empty) devSSID')
- sql.execute("""UPDATE Devices
- SET devSSID = (
- SELECT cur_SSID
- FROM CurrentScan
- WHERE Devices.devMac = CurrentScan.cur_MAC
- )
- WHERE
- (devSSID IS NULL OR devSSID IN ("", "null"))
- AND EXISTS (
- SELECT 1
- FROM CurrentScan
- WHERE Devices.devMac = CurrentScan.cur_MAC
- AND CurrentScan.cur_SSID IS NOT NULL AND CurrentScan.cur_SSID NOT IN ("", "null")
- )""")
-
- # Update only devices with empty or NULL devType
- mylog('debug', '[Update Devices] - (if not empty) cur_Type -> (if empty) devType')
- sql.execute("""UPDATE Devices
- SET devType = (
- SELECT cur_Type
- FROM CurrentScan
- WHERE Devices.devMac = CurrentScan.cur_MAC
- )
- WHERE
- (devType IS NULL OR devType IN ("", "null"))
- AND EXISTS (
- SELECT 1
- FROM CurrentScan
- WHERE Devices.devMac = CurrentScan.cur_MAC
- AND CurrentScan.cur_Type IS NOT NULL AND CurrentScan.cur_Type NOT IN ("", "null")
- )""")
-
- # Update (unknown) or (name not found) Names if available
- mylog('debug','[Update Devices] - (if not empty) cur_Name -> (if empty) devName')
- sql.execute (""" UPDATE Devices
- SET devName = COALESCE((
- SELECT cur_Name
- FROM CurrentScan
- WHERE cur_MAC = devMac
- AND cur_Name IS NOT NULL
- AND cur_Name <> 'null'
- AND cur_Name <> ''
- ), devName)
- WHERE (devName IN ('(unknown)', '(name not found)', '')
- OR devName IS NULL)
- AND EXISTS (
- SELECT 1
- FROM CurrentScan
- WHERE cur_MAC = devMac
- AND cur_Name IS NOT NULL
- AND cur_Name <> 'null'
- AND cur_Name <> ''
- ) """)
-
- # Update VENDORS
- recordsToUpdate = []
- query = """SELECT * FROM Devices
- WHERE devVendor IS NULL OR devVendor IN ("", "null", "(unknown)", "(Unknown)")
- """
-
- for device in sql.execute (query) :
- vendor = query_MAC_vendor (device['devMac'])
- if vendor != -1 and vendor != -2 :
- recordsToUpdate.append ([vendor, device['devMac']])
-
- if len(recordsToUpdate) > 0:
- sql.executemany ("UPDATE Devices SET devVendor = ? WHERE devMac = ? ", recordsToUpdate )
-
- # Guess ICONS
- recordsToUpdate = []
-
- default_icon = get_setting_value('NEWDEV_devIcon')
-
-
-
- if get_setting_value('NEWDEV_replace_preset_icon'):
- query = f"""SELECT * FROM Devices
- WHERE devIcon in ('', 'null', '{default_icon}')
- OR devIcon IS NULL"""
- else:
- query = """SELECT * FROM Devices
- WHERE devIcon in ('', 'null')
- OR devIcon IS NULL"""
-
- for device in sql.execute (query) :
- # Conditional logic for devIcon guessing
- devIcon = guess_icon(device['devVendor'], device['devMac'], device['devLastIP'], device['devName'], default_icon)
-
- recordsToUpdate.append ([devIcon, device['devMac']])
-
-
- mylog('debug',f'[Update Devices] recordsToUpdate: {recordsToUpdate}')
-
- if len(recordsToUpdate) > 0:
- sql.executemany ("UPDATE Devices SET devIcon = ? WHERE devMac = ? ", recordsToUpdate )
-
- # Guess Type
- recordsToUpdate = []
- query = """SELECT * FROM Devices
- WHERE devType in ('', 'null')
- OR devType IS NULL"""
- default_type = get_setting_value('NEWDEV_devType')
-
- for device in sql.execute (query) :
- # Conditional logic for devIcon guessing
- devType = guess_type(device['devVendor'], device['devMac'], device['devLastIP'], device['devName'], default_type)
-
- recordsToUpdate.append ([devType, device['devMac']])
-
- if len(recordsToUpdate) > 0:
- sql.executemany ("UPDATE Devices SET devType = ? WHERE devMac = ? ", recordsToUpdate )
-
-
- mylog('debug','[Update Devices] Update devices end')
-
-#-------------------------------------------------------------------------------
-def update_devices_names (db):
- sql = db.sql #TO-DO
- # Initialize variables
- recordsToUpdate = []
- recordsNotFound = []
-
- nameNotFound = "(name not found)"
-
- ignored = 0
- notFound = 0
-
- foundDig = 0
- foundmDNSLookup = 0
- foundNsLookup = 0
- foundNbtLookup = 0
-
- # Gen unknown devices
- device_handler = Device_obj(db)
- # Retrieve devices
- unknownDevices = device_handler.getUnknown()
-
- # skip checks if no unknown devices
- if len(unknownDevices) == 0:
- return
-
- # Devices without name
- mylog('verbose', f'[Update Device Name] Trying to resolve devices without name. Unknown devices count: {len(unknownDevices)}')
-
- for device in unknownDevices:
- newName = nameNotFound
-
- # Resolve device name with DiG
- newName = resolve_device_name_dig (device['devMac'], device['devLastIP'])
-
- # count
- if newName != nameNotFound:
- foundDig += 1
-
- # Resolve device name with AVAHISCAN plugin data
- if newName == nameNotFound:
- newName = get_device_name_mdns(db, device['devMac'], device['devLastIP'])
-
- if newName != nameNotFound:
- foundmDNSLookup += 1
-
- # Resolve device name with NSLOOKUP plugin data
- if newName == nameNotFound:
- newName = get_device_name_nslookup(db, device['devMac'], device['devLastIP'])
-
- if newName != nameNotFound:
- foundNsLookup += 1
-
- # Resolve device name with NBTLOOKUP plugin data
- if newName == nameNotFound:
- newName = get_device_name_nbtlookup(db, device['devMac'], device['devLastIP'])
-
- if newName != nameNotFound:
- foundNbtLookup += 1
-
- # if still not found update name so we can distinguish the devices where we tried already
- if newName == nameNotFound :
-
- notFound += 1
-
- # if devName is the same as what we will change it to, take no action
- # this mitigates a race condition which would overwrite a users edits that occured since the select earlier
- if device['devName'] != nameNotFound:
- recordsNotFound.append (["(name not found)", device['devMac']])
- else:
- # name was found
- recordsToUpdate.append ([newName, device['devMac']])
-
- # Print log
- mylog('verbose', [f'[Update Device Name] Names Found (DiG/mDNS/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundDig}/{foundmDNSLookup}/{foundNsLookup}/{foundNbtLookup})'] )
- mylog('verbose', [f'[Update Device Name] Names Not Found : {notFound}'] )
-
- # update not found devices with (name not found)
- sql.executemany ("UPDATE Devices SET devName = ? WHERE devMac = ? ", recordsNotFound )
- # update names of devices which we were bale to resolve
- sql.executemany ("UPDATE Devices SET devName = ? WHERE devMac = ? ", recordsToUpdate )
- db.commitDB()
-
-#-------------------------------------------------------------------------------
-# Check if the variable contains a valid MAC address or "Internet"
-def check_mac_or_internet(input_str):
- # Regular expression pattern for matching a MAC address
- mac_pattern = r'([0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2})'
-
- if input_str.lower() == 'internet':
- return True
- elif re.match(mac_pattern, input_str):
- return True
- else:
- return False
-
-
-#===============================================================================
-# Lookup unknown vendors on devices
-#===============================================================================
-
-#-------------------------------------------------------------------------------
-def query_MAC_vendor (pMAC):
-
- pMACstr = str(pMAC)
-
- filePath = vendorsPath
-
- if os.path.isfile(vendorsPathNewest):
- filePath = vendorsPathNewest
-
- # Check MAC parameter
- mac = pMACstr.replace (':','').lower()
- if len(pMACstr) != 17 or len(mac) != 12 :
- return -2 # return -2 if ignored MAC
-
- # Search vendor in HW Vendors DB
- mac_start_string6 = mac[0:6]
- mac_start_string9 = mac[0:9]
-
- try:
- with open(filePath, 'r') as f:
- for line in f:
- line_lower = line.lower() # Convert line to lowercase for case-insensitive matching
- if line_lower.startswith(mac_start_string6):
- parts = line.split('\t', 1)
- if len(parts) > 1:
- vendor = parts[1].strip()
- mylog('debug', [f"[Vendor Check] Found '{vendor}' for '{pMAC}' in {vendorsPath}"])
- return vendor
- else:
- mylog('debug', [f'[Vendor Check] ⚠ ERROR: Match found, but line could not be processed: "{line_lower}"'])
- return -1
-
-
- return -1 # MAC address not found in the database
- except FileNotFoundError:
- mylog('none', [f"[Vendor Check] ⚠ ERROR: Vendors file {vendorsPath} not found."])
- return -1
-
-
-#===============================================================================
-# Icons
-#===============================================================================
-#-------------------------------------------------------------------------------
-# Base64 encoded HTML string for FontAwesome icons
-icons = {
- "globe": "PGkgY2xhc3M9ImZhcyBmYS1nbG9iZSI+PC9pPg==", # globe icon
- "phone": "PGkgY2xhc3M9ImZhcyBmYS1tb2JpbGUtYWx0Ij48L2k+",
- "laptop": "PGkgY2xhc3M9ImZhIGZhLWxhcHRvcCI+PC9pPg==",
- "printer": "PGkgY2xhc3M9ImZhIGZhLXByaW50ZXIiPjwvaT4=",
- "router": "PGkgY2xhc3M9ImZhcyBmYS1yYW5kb20iPjwvaT4=",
- "tv": "PGkgY2xhc3M9ImZhIGZhLXR2Ij48L2k+",
- "desktop": "PGkgY2xhc3M9ImZhIGZhLWRlc2t0b3AiPjwvaT4=",
- "tablet": "PGkgY2xhc3M9ImZhIGZhLXRhYmxldCI+PC9pPg==",
- "watch": "PGkgY2xhc3M9ImZhIGZhLXdhbmNoIj48L2k+",
- "camera": "PGkgY2xhc3M9ImZhIGZhLWNhbWVyYSI+PC9pPg==",
- "home": "PGkgY2xhc3M9ImZhIGZhLWhvbWUiPjwvaT4=",
- "apple": "PGkgY2xhc3M9ImZhYiBmYS1hcHBsZSI+PC9pPg==",
- "ethernet": "PGkgY2xhc3M9ImZhcyBmYS1ldGhlcm5ldCI+PC9pPg==",
- "google": "PGkgY2xhc3M9ImZhYiBmYS1nb29nbGUiPjwvaT4=",
- "raspberry": "PGkgY2xhc3M9ImZhYiBmYS1yYXNwYmVycnktcGkiPjwvaT4=",
- "microchip": "PGkgY2xhc3M9ImZhcyBmYS1taWNyb2NoaXAiPjwvaT4="
-}
-
-#-------------------------------------------------------------------------------
-# Guess device icon
-def guess_icon(vendor, mac, ip, name, default):
-
- mylog('debug', [f"[guess_icon] Guessing icon for (vendor|mac|ip|name): ('{vendor}'|'{mac}'|{ip}|{name})"])
-
- result = default
- mac = mac.upper()
- vendor = vendor.lower() if vendor else "unknown"
- name = name.lower() if name else "(unknown)"
-
- # Guess icon based on vendor
- if any(brand in vendor for brand in {"samsung", "motorola"}):
- result = icons.get("phone")
- elif "dell" in vendor:
- result = icons.get("laptop")
- elif "hp" in vendor:
- result = icons.get("printer")
- elif "cisco" in vendor:
- result = icons.get("router")
- elif "lg" in vendor:
- result = icons.get("tv")
- elif "raspberry" in vendor:
- result = icons.get("raspberry")
- elif "apple" in vendor:
- result = icons.get("apple")
- elif "google" in vendor:
- result = icons.get("google")
- elif "ubiquiti" in vendor:
- result = icons.get("router")
- elif any(brand in vendor for brand in {"espressif"}):
- result = icons.get("microchip")
-
- # Guess icon based on MAC address patterns
- elif mac == "INTERNET":
- result = icons.get("globe")
- elif mac.startswith("00:1A:79"): # Apple
- result = icons.get("apple")
- elif mac.startswith("B0:BE:83"): # Apple
- result = icons.get("apple")
- elif mac.startswith("00:1B:63"): # Sony
- result = icons.get("tablet")
- elif mac.startswith("74:AC:B9"): # Unifi
- result = icons.get("ethernet")
-
-
- # Guess icon based on name
- elif 'google' in name:
- result = icons.get("google")
- elif 'desktop' in name:
- result = icons.get("desktop")
- elif 'raspberry' in name:
- result = icons.get("raspberry")
-
- # Guess icon based on IP address ranges
- elif ip.startswith("192.168.1."):
- result = icons.get("laptop")
-
-
- return result
-
-#-------------------------------------------------------------------------------
-# Guess device type
-def guess_type(vendor, mac, ip, name, default):
- result = default
- mac = mac.upper()
- vendor = vendor.lower() if vendor else "unknown"
- name = str(name).lower() if name else "(unknown)"
-
- # Guess icon based on vendor
- if any(brand in vendor for brand in {"samsung", "motorola"}):
- result = "Phone"
- elif "cisco" in vendor:
- result = "Router"
- elif "lg" in vendor:
- result = "TV"
- elif "google" in vendor:
- result = "Phone"
- elif "ubiquiti" in vendor:
- result = "Router"
-
- # Guess type based on MAC address patterns
- elif mac == "INTERNET":
- result = "Internet"
-
- # Guess type based on name
- elif 'google' in name:
- result = "Phone"
-
- # Guess type based on IP address ranges
- elif ip == ("192.168.1.1"):
- result = "Router"
-
- return result
-
diff --git a/server/flows.py b/server/flows.py
deleted file mode 100755
index 87b59ba4..00000000
--- a/server/flows.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import json
-
-def update_value(json_data, object_path, key, value, target_property, desired_value):
- # Helper function to traverse the JSON structure and get the target object
- def traverse(obj, path):
- keys = path.split(".")
- for key in keys:
- if isinstance(obj, list):
- key = int(key)
- obj = obj[key]
- return obj
-
- # Helper function to update the target property with the desired value
- def update(obj, path, key, value, target_property, desired_value):
- keys = path.split(".")
- for i, key in enumerate(keys):
- if isinstance(obj, list):
- key = int(key)
- # Check if we have reached the desired object
- if i == len(keys) - 1 and obj[key][key] == value:
- # Update the target property with the desired value
- obj[key][target_property] = desired_value
- else:
- obj = obj[key]
- return obj
-
- # Get the target object based on the object path
- target_obj = traverse(json_data, object_path)
- # Update the value in the target object
- updated_obj = update(json_data, object_path, key, value, target_property, desired_value)
- return updated_obj
\ No newline at end of file
diff --git a/server/models/device_instance.py b/server/models/device_instance.py
new file mode 100755
index 00000000..63176af4
--- /dev/null
+++ b/server/models/device_instance.py
@@ -0,0 +1,84 @@
+import sys
+
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
+
+from logger import mylog, print_log
+
+#-------------------------------------------------------------------------------
+# Device object handling (WIP)
+#-------------------------------------------------------------------------------
+class DeviceInstance:
+ def __init__(self, db):
+ self.db = db
+
+ # Get all
+ def getAll(self):
+ self.db.sql.execute("""
+ SELECT * FROM Devices
+ """)
+ return self.db.sql.fetchall()
+
+ # Get all with unknown names
+ def getUnknown(self):
+ self.db.sql.execute("""
+ SELECT * FROM Devices WHERE devName in ("(unknown)", "(name not found)", "" )
+ """)
+ return self.db.sql.fetchall()
+
+ # Get specific column value based on devMac
+ def getValueWithMac(self, column_name, devMac):
+
+ query = f"SELECT {column_name} FROM Devices WHERE devMac = ?"
+ self.db.sql.execute(query, (devMac,))
+ result = self.db.sql.fetchone()
+ return result[column_name] if result else None
+
+ # Get all down
+ def getDown(self):
+ self.db.sql.execute("""
+ SELECT * FROM Devices WHERE devAlertDown = 1 and devPresentLastScan = 0
+ """)
+ return self.db.sql.fetchall()
+
+ # Get all down
+ def getOffline(self):
+ self.db.sql.execute("""
+ SELECT * FROM Devices WHERE devPresentLastScan = 0
+ """)
+ return self.db.sql.fetchall()
+
+ # Get a device by devGUID
+ def getByGUID(self, devGUID):
+ self.db.sql.execute("SELECT * FROM Devices WHERE devGUID = ?", (devGUID,))
+ result = self.db.sql.fetchone()
+ return dict(result) if result else None
+
+ # Check if a device exists by devGUID
+ def exists(self, devGUID):
+ self.db.sql.execute("SELECT COUNT(*) AS count FROM Devices WHERE devGUID = ?", (devGUID,))
+ result = self.db.sql.fetchone()
+ return result["count"] > 0
+
+ # Update a specific field for a device
+ def updateField(self, devGUID, field, value):
+ if not self.exists(devGUID):
+ m = f"[Device] In 'updateField': GUID {devGUID} not found."
+ mylog('none', m)
+ raise ValueError(m)
+
+ self.db.sql.execute(f"""
+ UPDATE Devices SET {field} = ? WHERE devGUID = ?
+ """, (value, devGUID))
+ self.db.commitDB()
+
+ # Delete a device by devGUID
+ def delete(self, devGUID):
+ if not self.exists(devGUID):
+ m = f"[Device] In 'delete': GUID {devGUID} not found."
+ mylog('none', m)
+ raise ValueError(m)
+
+ self.db.sql.execute("DELETE FROM Devices WHERE devGUID = ?", (devGUID,))
+ self.db.commitDB()
\ No newline at end of file
diff --git a/server/models/plugin_object_instance.py b/server/models/plugin_object_instance.py
new file mode 100755
index 00000000..83f58512
--- /dev/null
+++ b/server/models/plugin_object_instance.py
@@ -0,0 +1,65 @@
+import sys
+
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
+
+from logger import mylog, print_log
+
+#-------------------------------------------------------------------------------
+# Plugin object handling (WIP)
+#-------------------------------------------------------------------------------
+class PluginObjectInstance:
+ def __init__(self, db):
+ self.db = db
+
+ # Get all plugin objects
+ def getAll(self):
+ self.db.sql.execute("""
+ SELECT * FROM Plugins_Objects
+ """)
+ return self.db.sql.fetchall()
+
+ # Get plugin object by ObjectGUID
+ def getByGUID(self, ObjectGUID):
+ self.db.sql.execute("SELECT * FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,))
+ result = self.db.sql.fetchone()
+ return dict(result) if result else None
+
+ # Check if a plugin object exists by ObjectGUID
+ def exists(self, ObjectGUID):
+ self.db.sql.execute("SELECT COUNT(*) AS count FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,))
+ result = self.db.sql.fetchone()
+ return result["count"] > 0
+
+ # Get objects by plugin name
+ def getByPlugin(self, plugin):
+ self.db.sql.execute("SELECT * FROM Plugins_Objects WHERE Plugin = ?", (plugin,))
+ return self.db.sql.fetchall()
+
+ # Get objects by status
+ def getByStatus(self, status):
+ self.db.sql.execute("SELECT * FROM Plugins_Objects WHERE Status = ?", (status,))
+ return self.db.sql.fetchall()
+
+ # Update a specific field for a plugin object
+ def updateField(self, ObjectGUID, field, value):
+ if not self.exists(ObjectGUID):
+ m = f"[PluginObject] In 'updateField': GUID {ObjectGUID} not found."
+ mylog('none', m)
+ raise ValueError(m)
+
+ self.db.sql.execute(f"""
+ UPDATE Plugins_Objects SET {field} = ? WHERE ObjectGUID = ?
+ """, (value, ObjectGUID))
+ self.db.commitDB()
+
+ # Delete a plugin object by ObjectGUID
+ def delete(self, ObjectGUID):
+ if not self.exists(ObjectGUID):
+ m = f"[PluginObject] In 'delete': GUID {ObjectGUID} not found."
+ mylog('none', m)
+ raise ValueError(m)
+
+ self.db.sql.execute("DELETE FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,))
+ self.db.commitDB()
diff --git a/server/plugin.py b/server/plugin.py
index 5cfd6251..f07a6af1 100755
--- a/server/plugin.py
+++ b/server/plugin.py
@@ -18,6 +18,7 @@ from api import update_api
from plugin_utils import logEventStatusCounts, get_plugin_string, get_plugin_setting_obj, print_plugin_info, list_to_csv, combine_plugin_objects, resolve_wildcards_arr, handle_empty, custom_plugin_decoder, decode_and_rename_files
from notification import Notification_obj, write_notification
from user_events_queue import UserEventsQueue
+from crypto_utils import generate_deterministic_guid
# Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL'))
@@ -582,13 +583,14 @@ def process_plugin_events(db, plugin, plugEventsArr):
for plugObj in pluginObjects:
# keep old createdTime time if the plugObj already was created before
createdTime = plugObj.changed if plugObj.status == 'new' else plugObj.created
- # 18 values without Index
+ # 19 values without Index
values = (
plugObj.pluginPref, plugObj.primaryId, plugObj.secondaryId, createdTime,
plugObj.changed, plugObj.watched1, plugObj.watched2, plugObj.watched3,
plugObj.watched4, plugObj.status, plugObj.extra, plugObj.userData,
plugObj.foreignKey, plugObj.syncHubNodeName,
- plugObj.helpVal1, plugObj.helpVal2, plugObj.helpVal3, plugObj.helpVal4
+ plugObj.helpVal1, plugObj.helpVal2, plugObj.helpVal3, plugObj.helpVal4,
+ plugObj.objectGUID
)
if plugObj.status == 'new':
@@ -625,8 +627,9 @@ def process_plugin_events(db, plugin, plugEventsArr):
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
- "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4")
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+ "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
+ "ObjectGUID")
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", objects_to_insert
)
@@ -637,7 +640,9 @@ def process_plugin_events(db, plugin, plugEventsArr):
UPDATE Plugins_Objects
SET "Plugin" = ?, "Object_PrimaryID" = ?, "Object_SecondaryID" = ?, "DateTimeCreated" = ?,
"DateTimeChanged" = ?, "Watched_Value1" = ?, "Watched_Value2" = ?, "Watched_Value3" = ?,
- "Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ?, "HelpVal1" = ?, "HelpVal2" = ?, "HelpVal3" = ?, "HelpVal4" = ?
+ "Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ?,
+ "HelpVal1" = ?, "HelpVal2" = ?, "HelpVal3" = ?, "HelpVal4" = ?,
+ "ObjectGUID" = ?
WHERE "Index" = ?
""", objects_to_update
)
@@ -651,8 +656,9 @@ def process_plugin_events(db, plugin, plugEventsArr):
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
- "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4")
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+ "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
+ "ObjectGUID")
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", events_to_insert
)
@@ -665,8 +671,9 @@ def process_plugin_events(db, plugin, plugEventsArr):
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
- "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4")
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+ "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
+ "ObjectGUID")
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", history_to_insert
)
@@ -807,6 +814,7 @@ class plugin_object_class:
self.helpVal2 = objDbRow[16]
self.helpVal3 = objDbRow[17]
self.helpVal4 = objDbRow[18]
+ self.objectGUID = generate_deterministic_guid(self.pluginPref, self.primaryId, self.secondaryId)
# Check if self.status is valid
diff --git a/server/plugin_utils.py b/server/plugin_utils.py
index 5cd55ad3..e60e7f6b 100755
--- a/server/plugin_utils.py
+++ b/server/plugin_utils.py
@@ -6,7 +6,7 @@ from logger import mylog
from const import pluginsPath, logPath, apiPath
from helper import timeNowTZ, get_file_content, write_file, get_setting, get_setting_value, setting_value_to_python_type
from app_state import updateState
-from crypto_utils import decrypt_data
+from crypto_utils import decrypt_data, generate_deterministic_guid
module_name = 'Plugin utils'
diff --git a/server/scan/device_handling.py b/server/scan/device_handling.py
index 69b075a2..3c42c6ed 100755
--- a/server/scan/device_handling.py
+++ b/server/scan/device_handling.py
@@ -317,16 +317,14 @@ def update_devices_data_from_scan (db):
WHERE Devices.devMac = CurrentScan.cur_MAC
)
WHERE
- (devParentPort IS NULL OR devParentPort = "" OR devParentPort = "null")
- AND
+ (devParentPort IS NULL OR devParentPort IN ("", "null", "(unknown)", "(Unknown)"))
+ AND
EXISTS (
SELECT 1
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
- AND CurrentScan.cur_Port IS NOT NULL
- AND CurrentScan.cur_Port NOT IN ("", "null")
- )
- """)
+ AND CurrentScan.cur_Port IS NOT NULL AND CurrentScan.cur_Port NOT IN ("", "null")
+ )""")
# Update only devices with empty or NULL devParentMAC
mylog('debug', '[Update Devices] - (if not empty) cur_NetworkNodeMAC -> devParentMAC')
@@ -336,16 +334,34 @@ def update_devices_data_from_scan (db):
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
)
+<<<<<<< HEAD
+<<<<<<<< HEAD:server/scan/device_handling.py
+ WHERE
+========
WHERE
+>>>>>>>> main:server/device.py
+=======
+ WHERE
+>>>>>>> main
(devParentMAC IS NULL OR devParentMAC IN ("", "null", "(unknown)", "(Unknown)"))
AND
EXISTS (
SELECT 1
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
+<<<<<<< HEAD
+<<<<<<<< HEAD:server/scan/device_handling.py
+ AND CurrentScan.cur_NetworkNodeMAC IS NOT NULL AND CurrentScan.cur_NetworkNodeMAC NOT IN ("", "null")
+========
+ AND CurrentScan.cur_NetworkNodeMAC IS NOT NULL AND CurrentScan.cur_NetworkNodeMAC NOT IN ("", "null")
+
+>>>>>>>> main:server/device.py
+ )""")
+=======
AND CurrentScan.cur_NetworkNodeMAC IS NOT NULL AND CurrentScan.cur_NetworkNodeMAC NOT IN ("", "null")
)
""")
+>>>>>>> main
# Update only devices with empty or NULL devSite
mylog('debug', '[Update Devices] - (if not empty) cur_NetworkSite -> (if empty) devSite')
diff --git a/server/networkscan.py b/server/scan/session_events.py
similarity index 97%
rename from server/networkscan.py
rename to server/scan/session_events.py
index 6ed3aa8d..6c357e4c 100755
--- a/server/networkscan.py
+++ b/server/scan/session_events.py
@@ -1,15 +1,16 @@
+import sys
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
import conf
-
-
-from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, exclude_ignored_devices
+from scan.device_handling import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, exclude_ignored_devices
from helper import timeNowTZ
from logger import mylog
from reporting import skip_repeated_notifications
-
#===============================================================================
# SCAN NETWORK
#===============================================================================
diff --git a/server/workflows/actions.py b/server/workflows/actions.py
new file mode 100755
index 00000000..0ad338db
--- /dev/null
+++ b/server/workflows/actions.py
@@ -0,0 +1,147 @@
+import sys
+import sqlite3
+
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
+
+import conf
+from logger import mylog, Logger
+from helper import get_setting_value, timeNowTZ
+from models.device_instance import DeviceInstance
+from models.plugin_object_instance import PluginObjectInstance
+
+# Make sure log level is initialized correctly
+Logger(get_setting_value('LOG_LEVEL'))
+
+from workflows.triggers import Trigger
+
+class Action:
+ """Base class for all actions."""
+
+ def __init__(self, trigger):
+ self.trigger = trigger
+
+ def execute(self, obj):
+ """Executes the action on the given object."""
+ raise NotImplementedError("Subclasses must implement execute()")
+
+
+class UpdateFieldAction(Action):
+ """Action to update a specific field of an object."""
+
+ def __init__(self, db, field, value, trigger):
+ super().__init__(trigger) # Call the base class constructor
+ self.field = field
+ self.value = value
+ self.db = db
+
+ def execute(self):
+ mylog('verbose', f"[WF] Updating field '{self.field}' to '{self.value}' for event object {self.trigger.object_type}")
+
+ obj = self.trigger.object
+
+ # convert to dict for easeir handling
+ if isinstance(obj, sqlite3.Row):
+ obj = dict(obj) # Convert Row object to a standard dictionary
+
+ processed = False
+
+ # currently unused
+ if isinstance(obj, dict) and "ObjectGUID" in obj:
+ mylog('debug', f"[WF] Updating Object '{obj}' ")
+ plugin_instance = PluginObjectInstance(self.db)
+ plugin_instance.updateField(obj["ObjectGUID"], self.field, self.value)
+ processed = True
+
+ elif isinstance(obj, dict) and "devGUID" in obj:
+ mylog('debug', f"[WF] Updating Device '{obj}' ")
+ device_instance = DeviceInstance(self.db)
+ device_instance.updateField(obj["devGUID"], self.field, self.value)
+ processed = True
+
+ if not processed:
+ mylog('none', f"[WF] Could not process action for object: {obj}")
+
+ return obj
+
+
+class DeleteObjectAction(Action):
+ """Action to delete an object."""
+
+ def __init__(self, db, trigger):
+ super().__init__(trigger) # Call the base class constructor
+ self.db = db
+
+ def execute(self):
+ mylog('verbose', f"[WF] Deleting event object {self.trigger.object_type}")
+
+ obj = self.trigger.object
+
+ # convert to dict for easeir handling
+ if isinstance(obj, sqlite3.Row):
+ obj = dict(obj) # Convert Row object to a standard dictionary
+
+ processed = False
+
+ # currently unused
+ if isinstance(obj, dict) and "ObjectGUID" in obj:
+ mylog('debug', f"[WF] Updating Object '{obj}' ")
+ plugin_instance = PluginObjectInstance(self.db)
+ plugin_instance.delete(obj["ObjectGUID"])
+ processed = True
+
+ elif isinstance(obj, dict) and "devGUID" in obj:
+ mylog('debug', f"[WF] Updating Device '{obj}' ")
+ device_instance = DeviceInstance(self.db)
+ device_instance.delete(obj["devGUID"])
+ processed = True
+
+ if not processed:
+ mylog('none', f"[WF] Could not process action for object: {obj}")
+
+ return obj
+
+
+class RunPluginAction(Action):
+ """Action to run a specific plugin."""
+
+ def __init__(self, plugin_name, params, trigger): # Add trigger
+ super().__init__(trigger) # Call parent constructor
+ self.plugin_name = plugin_name
+ self.params = params
+
+ def execute(self):
+
+ obj = self.trigger.object
+
+ mylog('verbose', [f"Executing plugin '{self.plugin_name}' with parameters {self.params} for object {obj}"])
+ # PluginManager.run(self.plugin_name, self.parameters)
+ return obj
+
+
+class SendNotificationAction(Action):
+ """Action to send a notification."""
+
+ def __init__(self, method, message, trigger):
+ super().__init__(trigger) # Call parent constructor
+ self.method = method # Fix attribute name
+ self.message = message
+
+ def execute(self):
+ obj = self.trigger.object
+ mylog('verbose', [f"Sending notification via '{self.method}': {self.message} for object {obj}"])
+ # NotificationManager.send(self.method, self.message)
+ return obj
+
+
+class ActionGroup:
+ """Handles multiple actions applied to an object."""
+
+ def __init__(self, actions):
+ self.actions = actions
+
+ def execute(self, obj):
+ for action in self.actions:
+ action.execute(obj)
+ return obj
\ No newline at end of file
diff --git a/server/workflows/app_events.py b/server/workflows/app_events.py
new file mode 100755
index 00000000..5a623973
--- /dev/null
+++ b/server/workflows/app_events.py
@@ -0,0 +1,186 @@
+import datetime
+import json
+import uuid
+import sys
+import pytz
+
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
+
+# Register NetAlertX modules
+import conf
+from helper import get_setting_value, timeNowTZ
+# Make sure the TIMEZONE for logging is correct
+# conf.tz = pytz.timezone(get_setting_value('TIMEZONE'))
+
+from logger import mylog, Logger, print_log, logResult
+
+# Make sure log level is initialized correctly
+Logger(get_setting_value('LOG_LEVEL'))
+
+from const import applicationPath, logPath, apiPath, confFileName, sql_generateGuid
+from helper import timeNowTZ
+
+class AppEvent_obj:
+ def __init__(self, db):
+ self.db = db
+
+ # Drop existing table
+ self.db.sql.execute("""DROP TABLE IF EXISTS "AppEvents" """)
+
+ # Drop all triggers
+ self.drop_all_triggers()
+
+ # Create the AppEvents table if missing
+ self.create_app_events_table()
+
+ # Define object mapping for different table structures, including fields, expressions, and constants
+ self.object_mapping = {
+ "Devices": {
+ "fields": {
+ "ObjectGUID": "NEW.devGUID",
+ "ObjectPrimaryID": "NEW.devMac",
+ "ObjectSecondaryID": "NEW.devLastIP",
+ "ObjectForeignKey": "NEW.devGUID",
+ "ObjectStatus": "CASE WHEN NEW.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END",
+ "ObjectStatusColumn": "'devPresentLastScan'",
+ "ObjectIsNew": "NEW.devIsNew",
+ "ObjectIsArchived": "NEW.devIsArchived",
+ "ObjectPlugin": "'DEVICES'"
+ }
+ }
+ # ,
+ # "Plugins_Objects": {
+ # "fields": {
+ # "ObjectGUID": "NEW.ObjectGUID",
+ # "ObjectPrimaryID": "NEW.Plugin",
+ # "ObjectSecondaryID": "NEW.Object_PrimaryID",
+ # "ObjectForeignKey": "NEW.ForeignKey",
+ # "ObjectStatus": "NEW.Status",
+ # "ObjectStatusColumn": "'Status'",
+ # "ObjectIsNew": "CASE WHEN NEW.Status = 'new' THEN 1 ELSE 0 END",
+ # "ObjectIsArchived": "0", # Default value
+ # "ObjectPlugin": "NEW.Plugin"
+ # }
+ # }
+ }
+
+
+ # Re-Create triggers dynamically
+ for table, config in self.object_mapping.items():
+ self.create_trigger(table, "insert", config)
+ self.create_trigger(table, "update", config)
+ self.create_trigger(table, "delete", config)
+
+ self.save()
+
+ def drop_all_triggers(self):
+ """Drops all relevant triggers to ensure a clean start."""
+ self.db.sql.execute("""
+ SELECT 'DROP TRIGGER IF EXISTS ' || name || ';'
+ FROM sqlite_master
+ WHERE type = 'trigger';
+ """)
+
+ # Fetch all drop statements
+ drop_statements = self.db.sql.fetchall()
+
+ # Execute each drop statement
+ for statement in drop_statements:
+ self.db.sql.execute(statement[0])
+
+ self.save()
+
+ def create_app_events_table(self):
+ """Creates the AppEvents table if it doesn't exist."""
+ self.db.sql.execute("""
+ CREATE TABLE IF NOT EXISTS "AppEvents" (
+ "Index" INTEGER PRIMARY KEY AUTOINCREMENT,
+ "GUID" TEXT UNIQUE,
+ "AppEventProcessed" BOOLEAN,
+ "DateTimeCreated" TEXT,
+ "ObjectType" TEXT,
+ "ObjectGUID" TEXT,
+ "ObjectPlugin" TEXT,
+ "ObjectPrimaryID" TEXT,
+ "ObjectSecondaryID" TEXT,
+ "ObjectForeignKey" TEXT,
+ "ObjectIndex" TEXT,
+ "ObjectIsNew" BOOLEAN,
+ "ObjectIsArchived" BOOLEAN,
+ "ObjectStatusColumn" TEXT,
+ "ObjectStatus" TEXT,
+ "AppEventType" TEXT,
+ "Helper1" TEXT,
+ "Helper2" TEXT,
+ "Helper3" TEXT,
+ "Extra" TEXT
+ );
+ """)
+
+ def create_trigger(self, table_name, event, config):
+ """Generic function to create triggers dynamically."""
+ trigger_name = f"trg_{event}_{table_name.lower()}"
+
+ query = f"""
+ CREATE TRIGGER IF NOT EXISTS "{trigger_name}"
+ AFTER {event.upper()} ON "{table_name}"
+ WHEN NOT EXISTS (
+ SELECT 1 FROM AppEvents
+ WHERE AppEventProcessed = 0
+ AND ObjectType = '{table_name}'
+ AND ObjectGUID = {manage_prefix(config['fields']['ObjectGUID'], event)}
+ AND ObjectStatus = {manage_prefix(config['fields']['ObjectStatus'], event)}
+ AND AppEventType = '{event.lower()}'
+ )
+ BEGIN
+ INSERT INTO "AppEvents" (
+ "GUID",
+ "DateTimeCreated",
+ "AppEventProcessed",
+ "ObjectType",
+ "ObjectGUID",
+ "ObjectPrimaryID",
+ "ObjectSecondaryID",
+ "ObjectStatus",
+ "ObjectStatusColumn",
+ "ObjectIsNew",
+ "ObjectIsArchived",
+ "ObjectForeignKey",
+ "ObjectPlugin",
+ "AppEventType"
+ )
+ VALUES (
+ {sql_generateGuid},
+ DATETIME('now'),
+ FALSE,
+ '{table_name}',
+ {manage_prefix(config['fields']['ObjectGUID'], event)}, -- ObjectGUID
+ {manage_prefix(config['fields']['ObjectPrimaryID'], event)}, -- ObjectPrimaryID
+ {manage_prefix(config['fields']['ObjectSecondaryID'], event)}, -- ObjectSecondaryID
+ {manage_prefix(config['fields']['ObjectStatus'], event)}, -- ObjectStatus
+ {manage_prefix(config['fields']['ObjectStatusColumn'], event)}, -- ObjectStatusColumn
+ {manage_prefix(config['fields']['ObjectIsNew'], event)}, -- ObjectIsNew
+ {manage_prefix(config['fields']['ObjectIsArchived'], event)}, -- ObjectIsArchived
+ {manage_prefix(config['fields']['ObjectForeignKey'], event)}, -- ObjectForeignKey
+ {manage_prefix(config['fields']['ObjectPlugin'], event)}, -- ObjectForeignKey
+ '{event.lower()}'
+ );
+ END;
+ """
+
+ mylog("verbose", [query])
+
+ self.db.sql.execute(query)
+
+ def save(self):
+ # Commit changes
+ self.db.commitDB()
+
+# Manage prefixes of column names
+def manage_prefix(field, event):
+ if event == "delete":
+ return field.replace("NEW.", "OLD.")
+ return field
+
diff --git a/server/workflows/conditions.py b/server/workflows/conditions.py
new file mode 100755
index 00000000..801974df
--- /dev/null
+++ b/server/workflows/conditions.py
@@ -0,0 +1,83 @@
+import re
+import sys
+import json
+
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
+
+import conf
+from logger import mylog, Logger
+from helper import get_setting_value, timeNowTZ
+
+# Make sure log level is initialized correctly
+Logger(get_setting_value('LOG_LEVEL'))
+
+class Condition:
+ """Evaluates a single condition."""
+
+ def __init__(self, condition_json):
+ self.field = condition_json["field"]
+ self.operator = condition_json["operator"]
+ self.value = condition_json["value"]
+ self.negate = condition_json.get("negate", False)
+
+ def evaluate(self, trigger):
+
+ # try finding the value of the field on the event triggering this workflow or thre object triggering the app event
+ appEvent_value = trigger.event[self.field] if self.field in trigger.event.keys() else None
+ eveObj_value = trigger.object[self.field] if self.field in trigger.object.keys() else None
+
+
+ # proceed only if value found
+ if appEvent_value is None and eveObj_value is None:
+ return False
+ elif appEvent_value is not None:
+ obj_value = appEvent_value
+ elif eveObj_value is not None:
+ obj_value = eveObj_value
+
+ # process based on operators
+ if self.operator == "equals":
+ result = str(obj_value) == str(self.value)
+ elif self.operator == "contains":
+ result = str(self.value) in str(obj_value)
+ elif self.operator == "regex":
+ result = bool(re.match(self.value, str(obj_value)))
+ else:
+ m = f"[WF] Unsupported operator: {self.operator}"
+ mylog('none', [m])
+ raise ValueError(m)
+
+ return not result if self.negate else result
+
+
+class ConditionGroup:
+ """Handles condition groups with AND, OR logic, supporting nested groups."""
+
+ def __init__(self, group_json):
+
+ mylog('none', ["[WF] json.dumps(group_json)"])
+ mylog('none', [json.dumps(group_json)])
+ mylog('none', [group_json])
+
+ self.logic = group_json.get("logic", "AND").upper()
+ self.conditions = []
+
+ for condition in group_json["conditions"]:
+ if "field" in condition: # Simple condition
+ self.conditions.append(Condition(condition))
+ else: # Nested condition group
+ self.conditions.append(ConditionGroup(condition))
+
+ def evaluate(self, event):
+ results = [condition.evaluate(event) for condition in self.conditions]
+
+ if self.logic == "AND":
+ return all(results)
+ elif self.logic == "OR":
+ return any(results)
+ else:
+ m = f"[WF] Unsupported logic: {self.logic}"
+ mylog('none', [m])
+ raise ValueError(m)
diff --git a/server/workflows/manager.py b/server/workflows/manager.py
new file mode 100755
index 00000000..709713cd
--- /dev/null
+++ b/server/workflows/manager.py
@@ -0,0 +1,160 @@
+import sys
+import json
+
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
+
+import conf
+from const import fullConfFolder
+import workflows.actions
+from logger import mylog, Logger
+from helper import get_setting_value, timeNowTZ
+
+# Make sure log level is initialized correctly
+Logger(get_setting_value('LOG_LEVEL'))
+
+from workflows.triggers import Trigger
+from workflows.conditions import ConditionGroup
+from workflows.actions import *
+
+class WorkflowManager:
+ def __init__(self, db):
+ self.db = db
+ self.workflows = self.load_workflows()
+ self.update_api = False
+
+ def load_workflows(self):
+ """Load workflows from workflows.json."""
+ try:
+ workflows_json_path = fullConfFolder + '/workflows.json'
+ with open(workflows_json_path, 'r') as f:
+ workflows = json.load(f)
+ return workflows
+ except (FileNotFoundError, json.JSONDecodeError):
+ mylog('none', ['[WF] Failed to load workflows.json'])
+ return []
+
+ def get_new_app_events(self):
+ """Get new unprocessed events from the AppEvents table."""
+ result = self.db.sql.execute("""
+ SELECT * FROM AppEvents
+ WHERE AppEventProcessed = 0
+ ORDER BY DateTimeCreated ASC
+ """).fetchall()
+
+ mylog('none', [f'[WF] get_new_app_events - new events count: {len(result)}'])
+
+ return result
+
+ def process_event(self, event):
+ """Process the events. Check if events match a workflow trigger"""
+
+ mylog('verbose', [f"[WF] Processing event with GUID {event["GUID"]}"])
+
+ # Check if the trigger conditions match
+ for workflow in self.workflows:
+
+ # construct trigger object which also evaluates if the current event triggers it
+ trigger = Trigger(workflow["trigger"], event, self.db)
+
+ if trigger.triggered:
+
+ mylog('verbose', [f"[WF] Event with GUID '{event["GUID"]}' triggered the workflow '{workflow["name"]}'"])
+
+ self.execute_workflow(workflow, trigger)
+
+ # After processing the event, mark the event as processed (set AppEventProcessed to 1)
+ self.db.sql.execute("""
+ UPDATE AppEvents
+ SET AppEventProcessed = 1
+ WHERE "Index" = ?
+ """, (event['Index'],)) # Pass the event's unique identifier
+ self.db.commitDB()
+
+
+
+ def execute_workflow(self, workflow, trigger):
+ """Execute the actions in the given workflow if conditions are met."""
+
+ # Ensure conditions exist
+ if not isinstance(workflow.get("conditions"), list):
+ m = f"[WF] workflow['conditions'] must be a list"
+ mylog('none', [m])
+ raise ValueError(m)
+
+ # Evaluate each condition group separately
+ for condition_group in workflow["conditions"]:
+
+ evaluator = ConditionGroup(condition_group)
+
+ if evaluator.evaluate(trigger): # If any group evaluates to True
+
+ mylog('none', [f"[WF] Workflow {workflow["name"]} will be executed - conditions were evaluated as TRUE"])
+ mylog('debug', [f"[WF] Workflow condition_group: {condition_group}"])
+
+ self.execute_actions(workflow["actions"], trigger)
+ return # Stop if a condition group succeeds
+
+ mylog('none', ["[WF] No condition group matched. Actions not executed."])
+
+
+ def execute_actions(self, actions, trigger):
+ """Execute the actions defined in a workflow."""
+
+ for action in actions:
+ if action["type"] == "update_field":
+ field = action["field"]
+ value = action["value"]
+ action_instance = UpdateFieldAction(self.db, field, value, trigger)
+ # indicate if the api has to be updated
+ self.update_api = True
+
+ elif action["type"] == "run_plugin":
+ plugin_name = action["plugin"]
+ params = action["params"]
+ action_instance = RunPluginAction(self.db, plugin_name, params, trigger)
+
+ elif action["type"] == "delete_device":
+ action_instance = DeleteObjectAction(self.db, trigger)
+
+ # elif action["type"] == "send_notification":
+ # method = action["method"]
+ # message = action["message"]
+ # action_instance = SendNotificationAction(method, message, trigger)
+
+ else:
+ m = f"[WF] Unsupported action type: {action['type']}"
+ mylog('none', [m])
+ raise ValueError(m)
+
+ action_instance.execute() # Execute the action
+
+ # if result:
+ # # Iterate through actions and execute them
+ # for action in workflow["actions"]:
+ # if action["type"] == "update_field":
+ # # Action type is "update_field", so map to UpdateFieldAction
+ # field = action["field"]
+ # value = action["value"]
+ # action_instance = UpdateFieldAction(field, value)
+ # action_instance.execute(trigger.event)
+
+ # elif action["type"] == "run_plugin":
+ # # Action type is "run_plugin", so map to RunPluginAction
+ # plugin_name = action["plugin"]
+ # params = action["params"]
+ # action_instance = RunPluginAction(plugin_name, params)
+ # action_instance.execute(trigger.event)
+ # elif action["type"] == "send_notification":
+ # # Action type is "send_notification", so map to SendNotificationAction
+ # method = action["method"]
+ # message = action["message"]
+ # action_instance = SendNotificationAction(method, message)
+ # action_instance.execute(trigger.event)
+ # else:
+ # # Handle unsupported action types
+ # raise ValueError(f"Unsupported action type: {action['type']}")
+
+
+
diff --git a/server/workflows/triggers.py b/server/workflows/triggers.py
new file mode 100755
index 00000000..f5f4be60
--- /dev/null
+++ b/server/workflows/triggers.py
@@ -0,0 +1,62 @@
+import sys
+
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
+
+import conf
+from logger import mylog, Logger
+from helper import get_setting_value, timeNowTZ
+
+# Make sure log level is initialized correctly
+Logger(get_setting_value('LOG_LEVEL'))
+
+
+class Trigger:
+ """Represents a trigger definition"""
+
+ def __init__(self, triggerJson, event, db):
+ """
+ :param name: Friendly name of the trigger
+ :param triggerJson: JSON trigger object {"object_type":"Devices",event_type":"update"}
+ :param event: The actual event that the trigger is evaluated against
+ :param db: DB connection in case trigger matches and object needs to be retrieved
+ """
+ self.object_type = triggerJson["object_type"]
+ self.event_type = triggerJson["event_type"]
+ self.event = event # Store the triggered event context, if provided
+ self.triggered = self.object_type == event["ObjectType"] and self.event_type == event["AppEventType"]
+
+ mylog('verbose', [f"[WF] self.triggered '{self.triggered}'"])
+
+ if self.triggered:
+ # object type corresponds with the DB table name
+ db_table = self.object_type
+
+ if db_table == "Devices":
+ refField = "devGUID"
+ elif db_table == "Plugins_Objects":
+ refField = "ObjectGUID"
+ else:
+ m = f"[WF] Unsupported object_type: {self.object_type}"
+ mylog('none', [m])
+ raise ValueError(m)
+
+ query = f"""
+ SELECT * FROM
+ {db_table}
+ WHERE {refField} = '{event["ObjectGUID"]}'
+ """
+
+ mylog('debug', [query])
+
+ result = db.sql.execute(query).fetchall()
+ self.object = result[0]
+ else:
+ self.object = None
+
+
+ def set_event(self, event):
+ """Set or update the event context for this trigger"""
+ self.event = event
+
diff --git a/test/workflows.json b/test/workflows.json
new file mode 100755
index 00000000..08914fc4
--- /dev/null
+++ b/test/workflows.json
@@ -0,0 +1,55 @@
+[
+ {
+ "name": "Sample Device Update Workflow",
+ "trigger": {
+ "object_type": "Devices",
+ "event_type": "update"
+ },
+ "conditions": [
+ {
+ "logic": "AND",
+ "conditions": [
+ {
+ "field": "devVendor",
+ "operator": "contains",
+ "value": "Google"
+ },
+ {
+ "field": "devIsNew",
+ "operator": "equals",
+ "value": "1"
+ },
+ {
+ "logic": "OR",
+ "conditions": [
+ {
+ "field": "devIsNew",
+ "operator": "equals",
+ "value": "1"
+ },
+ {
+ "field": "devName",
+ "operator": "contains",
+ "value": "Google"
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "actions": [
+ {
+ "type": "update_field",
+ "field": "devIsNew",
+ "value": "0"
+ },
+ {
+ "type": "run_plugin",
+ "plugin": "SMTP",
+ "params": {
+ "message": "New device from Google detected."
+ }
+ }
+ ]
+ }
+]
\ No newline at end of file
-
-
- Events -
-
-
+
+
@@ -18,75 +18,111 @@ showSpinner()
$(document).ready(function() {
- // Load JSON data from the provided URL
- $.getJSON('/php/server/query_json.php?file=table_appevents.json', function(data) {
- // Process the JSON data and generate UI dynamically
- processData(data)
+ // Load JSON data from the provided URL
+ $.getJSON('/php/server/query_json.php?file=table_appevents.json', function(data) {
+ // Process the JSON data and generate UI dynamically
+ processData(data)
- // hide loading dialog
- hideSpinner()
- });
+ // hide loading dialog
+ hideSpinner()
+ });
});
function processData(data) {
- // Create an object to store unique ObjectType values as app event identifiers
- var appEventIdentifiers = {};
+ // Create an object to store unique ObjectType values as app event identifiers
+ var appEventIdentifiers = {};
- // Array to accumulate data for DataTable
- var allData = [];
+ // Array to accumulate data for DataTable
+ var allData = [];
- // Iterate through the data and generate tabs and content dynamically
- $.each(data.data, function(index, item) {
-
- // Accumulate data for DataTable
- allData.push(item);
-
- });
-
- // Initialize DataTable for all app events
+ // Iterate through the data and generate tabs and content dynamically
+ $.each(data.data, function(index, item) {
- $('#appevents-table').DataTable({
- data: allData,
- paging: true,
- lengthChange: true,
- lengthMenu: [[10, 25, 50, 100, 500, -1], [10, 25, 50, 100, 500, 'All']],
- searching: true,
- ordering: true,
- info: true,
- autoWidth: false,
- pageLength: 25, // Set the default paging to 25
- columns: [
- { data: 'DateTimeCreated', title: getString('AppEvents_DateTimeCreated') },
- { data: 'AppEventType', title: getString('AppEvents_Type') },
- { data: 'ObjectType', title: getString('AppEvents_ObjectType') },
- { data: 'ObjectPrimaryID', title: getString('AppEvents_ObjectPrimaryID') },
- { data: 'ObjectSecondaryID', title: getString('AppEvents_ObjectSecondaryID') },
- { data: 'ObjectStatus', title: getString('AppEvents_ObjectStatus') },
- { data: 'Extra', title: getString('AppEvents_Extra') },
- { data: 'ObjectPlugin', title: getString('AppEvents_Plugin') },
- // Add other columns as needed
- ],
- // Add column-specific configurations if needed
- columnDefs: [
- { className: 'text-center', targets: [3] },
- { width: '80px', targets: [6] },
- // ... Add other columnDefs as needed
- // Full MAC
- {targets: [3, 4],
- 'createdCell': function (td, cellData, rowData, row, col) {
- if (!emptyArr.includes(cellData)){
- $(td).html (createDeviceLink(cellData));
- } else {
- $(td).html ('');
- }
- } },
- ]
- });
+ // Accumulate data for DataTable
+ allData.push(item);
+
+ });
+
+ console.log(allData);
+
+
+ // Initialize DataTable for all app events
+
+ $('#appevents-table').DataTable({
+ data: allData,
+ paging: true,
+ lengthChange: true,
+ lengthMenu: [[10, 25, 50, 100, 500, -1], [10, 25, 50, 100, 500, 'All']],
+ searching: true,
+ ordering: true,
+ info: true,
+ autoWidth: false,
+ pageLength: 25, // Set the default paging to 25
+ columns: [
+ { data: 'DateTimeCreated', title: getString('AppEvents_DateTimeCreated') },
+ { data: 'AppEventProcessed', title: getString('AppEvents_AppEventProcessed') },
+ { data: 'AppEventType', title: getString('AppEvents_Type') },
+ { data: 'ObjectType', title: getString('AppEvents_ObjectType') },
+ { data: 'ObjectPrimaryID', title: getString('AppEvents_ObjectPrimaryID') },
+ { data: 'ObjectSecondaryID', title: getString('AppEvents_ObjectSecondaryID') },
+ { data: 'ObjectStatus', title: getString('AppEvents_ObjectStatus') },
+ { data: 'ObjectPlugin', title: getString('AppEvents_Plugin') },
+ { data: 'ObjectGUID', title: "Object GUID" },
+ { data: 'GUID', title: "Event GUID" },
+ // Add other columns as needed
+ ],
+ // Add column-specific configurations if needed
+ columnDefs: [
+ { className: 'text-center', targets: [4] },
+ { width: '80px', targets: [7] },
+ // ... Add other columnDefs as needed
+ // Full MAC
+ {targets: [4, 5],
+ 'createdCell': function (td, cellData, rowData, row, col) {
+ if (!emptyArr.includes(cellData)){
+ $(td).html (createDeviceLink(cellData));
+ } else {
+ $(td).html ('');
+ }
+ } },
+ // Processed
+ {targets: [1],
+ 'createdCell': function (td, cellData, rowData, row, col) {
+ // console.log(cellData);
+ $(td).html (cellData);
+ }
+ },
+ // Datetime
+ {targets: [0],
+ 'createdCell': function (td, cellData, rowData, row, col) {
+ let timezone = $("#NAX_TZ").html(); // e.g., 'Europe/Berlin'
+ let utcDate = new Date(cellData + ' UTC'); // Adding ' UTC' makes it interpreted as UTC time
+
+ // Format the date in the desired timezone
+ let options = {
+ year: 'numeric',
+ month: 'short',
+ day: '2-digit',
+ hour: '2-digit',
+ minute: '2-digit',
+ second: '2-digit',
+ hour12: false, // Use 24-hour format
+ timeZone: timezone // Use the specified timezone
+ };
+
+ let localDate = new Intl.DateTimeFormat('en-GB', options).format(utcDate);
+
+ // Update the table cell
+ $(td).html(localDate);
+ }
+ },
+ ]
+ });
- // Activate the first tab
- $('#tabs-location li:first-child').addClass('active');
- $('#tabs-content-location .tab-pane:first-child').addClass('active');
+ // Activate the first tab
+ $('#tabs-location li:first-child').addClass('active');
+ $('#tabs-content-location .tab-pane:first-child').addClass('active');
}
diff --git a/front/css/app.css b/front/css/app.css
index 92482f51..bc23bb2c 100755
--- a/front/css/app.css
+++ b/front/css/app.css
@@ -1524,10 +1524,11 @@ input[readonly] {
margin:0px;
align-items:center;
border-radius:20px;
- width:180px;
- display:flex;
+ /* width:190px; Don't change, smaller causes line break in network view */
+ /* display:flex; */
flex-direction:column;
justify-content:center;
+ display: inline-grid;
}
.networkHelpIcon
{
@@ -1597,7 +1598,7 @@ input[readonly] {
.spanNetworkTree {
display: inline-block;
- width: 135px;
+ /* width: 135px; */
white-space: nowrap;
overflow: hidden !important;
text-overflow: ellipsis;
@@ -1840,6 +1841,132 @@ input[readonly] {
height:50px;
}
+/* -----------------------------------------------------------------------------
+ Workflows
+----------------------------------------------------------------------------- */
+#wf-content-wrapper
+{
+ display: grid;
+}
+
+#workflowContainer
+{
+ display: grid;
+}
+
+#workflowContainerWrap {
+ display: grid;
+}
+
+#workflowContainerWrap .panel-collapse
+{
+ padding: 5px;
+}
+
+.workflows .col-sm-12, .workflows .col-sx-12
+{
+ padding-right: 5px;
+ padding-left: 5px;
+}
+
+.workflows .add-button-wrap .button-container
+{
+ padding-bottom: 5px;
+ text-align: center;
+}
+
+.workflows .actions-list {
+
+ display: block;
+}
+
+.workflows .form-group {
+ margin-bottom: 7px;
+ z-index: 1;
+ display: flex;
+}
+
+.workflows .section-title
+{
+ padding: 10px;
+ font-weight: bolder;
+ font-size: large;
+}
+
+.workflows .panel, .workflows .box {
+ padding-top: 5px;
+ padding-bottom: 5px;
+
+}
+
+.workflows .btn-secondary{
+ color: #000;
+}
+
+.workflows .button-container
+{
+ display: contents;
+ text-align: center;
+ width: 100%;
+}
+
+.workflows .panel:hover{
+ opacity: 1;
+}
+
+.workflows .panel{
+ opacity: 0.8;
+}
+
+.workflows .bottom-buttons button
+{
+ margin: 5px;
+}
+
+.workflows .button-container
+{
+ padding-right: 0px !important;
+ padding-left: 0px !important;
+}
+
+/* .workflows .condition-list button
+{
+ margin: 2px;
+} */
+
+/* .button-container button
+{
+ width:100%;
+} */
+
+.workflows .workflow-card
+{
+ display: block;
+}
+
+.workflow-card .panel-title
+{
+ padding: 10px;
+}
+
+.workflow-card, .actions-list
+{
+ display: contents;
+ padding: 5px;
+ padding-left: 10px;
+}
+
+.condition-list
+{
+ z-index:1;
+}
+
+.condition
+{
+ padding: 5px;
+ padding-left: 10px;
+}
+
/* -----------------------------------------------------------------------------
Floating edit button
----------------------------------------------------------------------------- */
diff --git a/front/css/system-dark-patch.css b/front/css/system-dark-patch.css
index cb9fd2a5..6b6a9197 100755
--- a/front/css/system-dark-patch.css
+++ b/front/css/system-dark-patch.css
@@ -744,4 +744,5 @@
.thresholdFormControl
{
color:#000;
-}
\ No newline at end of file
+}
+
diff --git a/front/devices.php b/front/devices.php
index c952eba2..29b29ef8 100755
--- a/front/devices.php
+++ b/front/devices.php
@@ -123,7 +123,7 @@
-
+
\ No newline at end of file
diff --git a/server/__main__.py b/server/__main__.py
index 7ecb18fa..36a2f8eb 100755
--- a/server/__main__.py
+++ b/server/__main__.py
@@ -28,13 +28,14 @@ from logger import mylog
from helper import filePermissions, timeNowTZ, get_setting_value
from app_state import updateState
from api import update_api
-from networkscan import process_scan
+from scan.session_events import process_scan
from initialise import importConfigs
from database import DB
from reporting import get_notifications
from notification import Notification_obj
from plugin import run_plugin_scripts, check_and_run_user_event
-from device import update_devices_names
+from scan.device_handling import update_devices_names
+from workflows.manager import WorkflowManager
#===============================================================================
#===============================================================================
@@ -79,6 +80,9 @@ def main ():
# Upgrade DB if needed
db.upgradeDB()
+ # Initialize the WorkflowManager
+ workflow_manager = WorkflowManager(db)
+
#===============================================================================
# This is the main loop of NetAlertX
#===============================================================================
@@ -180,15 +184,39 @@ def main ():
# Commit SQL
db.commitDB()
-
- # Footer
-
+
mylog('verbose', ['[MAIN] Process: Idle'])
else:
# do something
# mylog('verbose', ['[MAIN] Waiting to start next loop'])
- updateState("Process: Idle")
-
+ updateState("Process: Idle")
+
+ # WORKFLOWS handling
+ # ----------------------------------------
+ # Fetch new unprocessed events
+ new_events = workflow_manager.get_new_app_events()
+
+ mylog('debug', [f'[MAIN] Processing WORKFLOW new_events from get_new_app_events: {len(new_events)}'])
+
+ # Process each new event and check triggers
+ if len(new_events) > 0:
+ updateState("Workflows: Start")
+ update_api_flag = False
+ for event in new_events:
+ mylog('debug', [f'[MAIN] Processing WORKFLOW app event with GUID {event["GUID"]}'])
+
+ # proceed to process events
+ workflow_manager.process_event(event)
+
+ if workflow_manager.update_api:
+ # Update API endpoints if needed
+ update_api_flag = True
+
+ if update_api_flag:
+ update_api(db, all_plugins, True)
+
+ updateState("Workflows: End")
+
#loop
time.sleep(5) # wait for N seconds
diff --git a/server/appevent.py b/server/appevent.py
deleted file mode 100755
index c855150b..00000000
--- a/server/appevent.py
+++ /dev/null
@@ -1,380 +0,0 @@
-import datetime
-import json
-import uuid
-
-# Register NetAlertX modules
-import conf
-from const import applicationPath, logPath, apiPath, confFileName, sql_generateGuid
-from logger import logResult, mylog, print_log
-from helper import timeNowTZ
-
-#-------------------------------------------------------------------------------
-# Execution object handling
-#-------------------------------------------------------------------------------
-class AppEvent_obj:
- def __init__(self, db):
- self.db = db
-
- # drop table
- self.db.sql.execute("""DROP TABLE IF EXISTS "AppEvents" """)
-
- # Drop all triggers
- self.db.sql.execute('DROP TRIGGER IF EXISTS trg_create_device;')
- self.db.sql.execute('DROP TRIGGER IF EXISTS trg_read_device;')
- self.db.sql.execute('DROP TRIGGER IF EXISTS trg_update_device;')
- self.db.sql.execute('DROP TRIGGER IF EXISTS trg_delete_device;')
-
- self.db.sql.execute('DROP TRIGGER IF EXISTS trg_delete_plugin_object;')
- self.db.sql.execute('DROP TRIGGER IF EXISTS trg_create_plugin_object;')
- self.db.sql.execute('DROP TRIGGER IF EXISTS trg_update_plugin_object;')
-
- # Create AppEvent table if missing
- self.db.sql.execute("""CREATE TABLE IF NOT EXISTS "AppEvents" (
- "Index" INTEGER,
- "GUID" TEXT UNIQUE,
- "DateTimeCreated" TEXT,
- "ObjectType" TEXT, -- ObjectType (Plugins, Notifications, Events)
- "ObjectGUID" TEXT,
- "ObjectPlugin" TEXT,
- "ObjectPrimaryID" TEXT,
- "ObjectSecondaryID" TEXT,
- "ObjectForeignKey" TEXT,
- "ObjectIndex" TEXT,
- "ObjectIsNew" BOOLEAN,
- "ObjectIsArchived" BOOLEAN,
- "ObjectStatusColumn" TEXT, -- Status (Notifications, Plugins), eve_EventType (Events)
- "ObjectStatus" TEXT, -- new_devices, down_devices, events, new, watched-changed, watched-not-changed, missing-in-last-scan, Device down, New Device, IP Changed, Connected, Disconnected, VOIDED - Disconnected, VOIDED - Connected, -
+
- Events +
+
+