From 50304fd63b49cb32ae6d5ac1f12447b1193ce32e Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Tue, 1 Oct 2024 08:42:14 +1000 Subject: [PATCH] =?UTF-8?q?=F0=9F=93=8A=20Presence=20over=20time=20updates?= =?UTF-8?q?=20#816?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/SUBNETS.md | 2 +- front/devices.php | 44 +++++++++++++------- front/js/graph_online_history.js | 51 +++++++++++++++-------- front/php/templates/graph.php | 59 --------------------------- front/plugins/ui_settings/config.json | 4 +- front/presence.php | 41 +++++++++++++------ server/api.py | 3 +- server/const.py | 1 + server/database.py | 41 ++++++++----------- server/networkscan.py | 45 +++++++++++++++++++- server/plugin.py | 2 +- 11 files changed, 160 insertions(+), 133 deletions(-) delete mode 100755 front/php/templates/graph.php diff --git a/docs/SUBNETS.md b/docs/SUBNETS.md index 575d1717..ee49cb4a 100755 --- a/docs/SUBNETS.md +++ b/docs/SUBNETS.md @@ -2,7 +2,7 @@ You need to specify the network interface and the network mask. You can also configure multiple subnets and specify VLANs (see VLAN exceptions below). -`ARPSCAN` can scan multiple networks if the network allows it. To scan networks directly, the subnets must be accessible from the network where NetAlertX is running. You can verify this by running the following command in the container: +`ARPSCAN` can scan multiple networks if the network allows it. To scan networks directly, the subnets must be accessible from the network where NetAlertX is running. This means NetAlertX needs to have access to the interface attached to that subnet. You can verify this by running the following command in the container: `sudo arp-scan --interface=eth0 192.168.1.0/24` diff --git a/front/devices.php b/front/devices.php index 32b2fa7a..a23a344f 100755 --- a/front/devices.php +++ b/front/devices.php @@ -15,8 +15,6 @@ diff --git a/front/js/graph_online_history.js b/front/js/graph_online_history.js index 327be874..33a6715b 100755 --- a/front/js/graph_online_history.js +++ b/front/js/graph_online_history.js @@ -1,13 +1,17 @@ -function pia_draw_graph_online_history(pia_js_graph_online_history_time, pia_js_graph_online_history_ondev, pia_js_graph_online_history_dodev, pia_js_graph_online_history_ardev) { - var xValues = pia_js_graph_online_history_time; - - // alert("dev presence") +function presenceOverTime( + timeStamp, + onlineCount, + offlineCount, + archivedCount, + downCount + ) { + var xValues = timeStamp; // Data object for online status onlineData = { label: 'Online', - data: pia_js_graph_online_history_ondev, - borderColor: "rgba(0, 166, 89)", + data: onlineCount, + borderColor: "#00000", fill: true, backgroundColor: "rgba(0, 166, 89, .6)", pointStyle: 'circle', @@ -15,20 +19,29 @@ function pia_draw_graph_online_history(pia_js_graph_online_history_time, pia_js_ pointHoverRadius: 3 }; + // Data object for down status + downData = { + label: 'Down', + data: downCount, + borderColor: "#00000", + fill: true, + backgroundColor: "#dd4b39", + }; + // Data object for offline status offlineData = { - label: 'Offline/Down', - data: pia_js_graph_online_history_dodev, - borderColor: "rgba(222, 74, 56)", + label: 'Offline', + data: offlineCount, + borderColor: "#00000", fill: true, - backgroundColor: "rgba(222, 74, 56, .6)", + backgroundColor: "#b2b6be", }; // Data object for archived status archivedData = { label: 'Archived', - data: pia_js_graph_online_history_ardev, - borderColor: "rgba(220,220,220)", + data: archivedCount, + borderColor: "#00000", fill: true, backgroundColor: "rgba(220,220,220, .6)", }; @@ -42,23 +55,27 @@ function pia_draw_graph_online_history(pia_js_graph_online_history_time, pia_js_ // Check if 'online' status should be displayed if(showStats.includes("online")) { - datasets.push(onlineData); // Add onlineData to datasets array + datasets.push(onlineData); + } + + // Check if 'down' status should be displayed + if(showStats.includes("down")) + { + datasets.push(downData); } // Check if 'offline' status should be displayed if(showStats.includes("offline")) { - datasets.push(offlineData); // Add offlineData to datasets array + datasets.push(offlineData); } // Check if 'archived' status should be displayed if(showStats.includes("archived")) { - datasets.push(archivedData); // Add archivedData to datasets array + datasets.push(archivedData); } - - new Chart("OnlineChart", { type: "bar", scaleIntegersOnly: true, diff --git a/front/php/templates/graph.php b/front/php/templates/graph.php deleted file mode 100755 index 3632ba62..00000000 --- a/front/php/templates/graph.php +++ /dev/null @@ -1,59 +0,0 @@ -query("SELECT * FROM Settings WHERE Code_Name = 'UI_PRESENCE'"); - -while($r = $statQuery->fetchArray(SQLITE3_ASSOC)) -{ - $statusesToShow = $r['Value']; -} - -$results = $db->query('SELECT * FROM Online_History ORDER BY Scan_Date DESC LIMIT 144'); - -while ($row = $results->fetchArray()) -{ - $time_raw = explode(' ', $row['Scan_Date']); - $time = explode(':', $time_raw[1]); - array_push($Pia_Graph_Device_Time, $time[0].':'.$time[1]); - - // Offline - if(strpos($statusesToShow, 'offline') !== false) - { - array_push($Pia_Graph_Device_Down, $row['Down_Devices']); - } - - // All - array_push($Pia_Graph_Device_All, $row['All_Devices']); - - // Online - if(strpos($statusesToShow, 'online') !== false) - { - array_push($Pia_Graph_Device_Online, $row['Online_Devices']); - } - - // Archived - if(strpos($statusesToShow, 'archived') !== false) - { - array_push($Pia_Graph_Device_Arch, $row['Archived_Devices']); - } -} -function pia_graph_devices_data($Pia_Graph_Array) { - $Pia_Graph_Array_rev = array_reverse($Pia_Graph_Array); - foreach ($Pia_Graph_Array_rev as $result) { - echo "'".$result."'"; - echo ","; - } -} diff --git a/front/plugins/ui_settings/config.json b/front/plugins/ui_settings/config.json index 56e12742..ec09fceb 100755 --- a/front/plugins/ui_settings/config.json +++ b/front/plugins/ui_settings/config.json @@ -253,8 +253,8 @@ ] }, "maxLength": 50, - "default_value": ["online", "offline", "archived"], - "options": ["online", "offline", "archived"], + "default_value": ["online", "down", "offline", "archived"], + "options": ["online", "down", "offline", "archived"], "localized": [], "name": [ { diff --git a/front/presence.php b/front/presence.php index cac5e4b2..1b4f843a 100755 --- a/front/presence.php +++ b/front/presence.php @@ -14,7 +14,6 @@ @@ -128,19 +127,37 @@ diff --git a/server/api.py b/server/api.py index 76e36f34..427dacdc 100755 --- a/server/api.py +++ b/server/api.py @@ -3,7 +3,7 @@ import json # Register NetAlertX modules import conf -from const import (apiPath, sql_appevents, sql_devices_all, sql_events_pending_alert, sql_settings, sql_plugins_events, sql_plugins_history, sql_plugins_objects,sql_language_strings, sql_notifications_all) +from const import (apiPath, sql_appevents, sql_devices_all, sql_events_pending_alert, sql_settings, sql_plugins_events, sql_plugins_history, sql_plugins_objects,sql_language_strings, sql_notifications_all, sql_online_history) from logger import mylog from helper import write_file @@ -32,6 +32,7 @@ def update_api(db, all_plugins, isNotification = False, updateOnlyDataSources = ["plugins_objects", sql_plugins_objects], ["plugins_language_strings", sql_language_strings], ["notifications", sql_notifications_all], + ["online_history", sql_online_history], ["custom_endpoint", conf.API_CUSTOM_SQL], ] diff --git a/server/const.py b/server/const.py index 4b0e043e..72ab755c 100755 --- a/server/const.py +++ b/server/const.py @@ -38,6 +38,7 @@ sql_settings = "SELECT * FROM Settings" sql_plugins_objects = "SELECT * FROM Plugins_Objects" sql_language_strings = "SELECT * FROM Plugins_Language_Strings" sql_notifications_all = "SELECT * FROM Notifications" +sql_online_history = "SELECT * FROM Online_History" sql_plugins_events = "SELECT * FROM Plugins_Events" sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY DateTimeChanged DESC" sql_new_devices = """SELECT * FROM ( diff --git a/server/database.py b/server/database.py index d0ccf7bc..d260ddf6 100755 --- a/server/database.py +++ b/server/database.py @@ -101,7 +101,7 @@ class DB(): mylog('none','[upgradeDB] Table is incompatible, Dropping the Online_History table') self.sql.execute("DROP TABLE Online_History;") onlineHistoryAvailable = False - + if onlineHistoryAvailable == False : self.sql.execute(""" CREATE TABLE "Online_History" ( @@ -115,6 +115,18 @@ class DB(): ); """) + # Offline_Devices column + Offline_Devices_missing = self.sql.execute (""" + SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Online_History') WHERE name='Offline_Devices' + """).fetchone()[0] == 0 + + if Offline_Devices_missing : + mylog('verbose', ["[upgradeDB] Adding Offline_Devices to the Online_History table"]) + self.sql.execute(""" + ALTER TABLE "Online_History" ADD "Offline_Devices" INTEGER + """) + + # ------------------------------------------------------------------------- # Alter Devices table # ------------------------------------------------------------------------- @@ -278,8 +290,8 @@ class DB(): Plugin, Object_PrimaryID, Object_SecondaryID, - DateTimeCreated, - DateTimeChanged, + DateTimeCreated, + DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, @@ -293,8 +305,8 @@ class DB(): 'NMAP' AS Plugin, MAC AS Object_PrimaryID, Port AS Object_SecondaryID, - Time AS DateTimeCreated, - DATETIME('now') AS DateTimeChanged, + Time AS DateTimeCreated, + DATETIME('now') AS DateTimeChanged, State AS Watched_Value1, Service AS Watched_Value2, '' AS Watched_Value3, @@ -644,22 +656,3 @@ def get_all_devices(db): #------------------------------------------------------------------------------- -#------------------------------------------------------------------------------- -def insertOnlineHistory(db): - sql = db.sql #TO-DO - startTime = timeNowTZ() - # Add to History - - History_All = db.read("SELECT * FROM Devices") - History_All_Devices = len(History_All) - - History_Archived = db.read("SELECT * FROM Devices WHERE dev_Archived = 1") - History_Archived_Devices = len(History_Archived) - - History_Online = db.read("SELECT * FROM Devices WHERE dev_PresentLastScan = 1") - History_Online_Devices = len(History_Online) - History_Offline_Devices = History_All_Devices - History_Archived_Devices - History_Online_Devices - - sql.execute ("INSERT INTO Online_History (Scan_Date, Online_Devices, Down_Devices, All_Devices, Archived_Devices) "+ - "VALUES ( ?, ?, ?, ?, ?)", (startTime, History_Online_Devices, History_Offline_Devices, History_All_Devices, History_Archived_Devices ) ) - db.commitDB() \ No newline at end of file diff --git a/server/networkscan.py b/server/networkscan.py index 0d144eb0..7a9b6206 100755 --- a/server/networkscan.py +++ b/server/networkscan.py @@ -2,7 +2,7 @@ import conf -from database import insertOnlineHistory + from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan from helper import timeNowTZ from logger import mylog @@ -232,4 +232,45 @@ def insert_events (db): FROM Devices, CurrentScan WHERE dev_MAC = cur_MAC AND dev_LastIP <> cur_IP """ ) - mylog('debug','[Events] - Events end') \ No newline at end of file + mylog('debug','[Events] - Events end') + + +#------------------------------------------------------------------------------- +def insertOnlineHistory(db): + sql = db.sql # TO-DO: Implement sql object + + scanTimestamp = timeNowTZ() + + # Query to fetch all relevant device counts in one go + query = """ + SELECT + COUNT(*) AS allDevics, + SUM(CASE WHEN dev_Archived = 1 THEN 1 ELSE 0 END) AS archivedDevices, + SUM(CASE WHEN dev_PresentLastScan = 1 THEN 1 ELSE 0 END) AS onlineDevices, + SUM(CASE WHEN dev_PresentLastScan = 0 AND dev_AlertDeviceDown = 1 THEN 1 ELSE 0 END) AS downDevices + FROM Devices + """ + + deviceCounts = db.read(query)[0] # Assuming db.read returns a list of rows, take the first (and only) row + + allDevics = deviceCounts['allDevics'] + archivedDevices = deviceCounts['archivedDevices'] + onlineDevices = deviceCounts['onlineDevices'] + downDevices = deviceCounts['downDevices'] + + offlineDevices = allDevics - archivedDevices - onlineDevices + + # Prepare the insert query using parameterized inputs + insert_query = """ + INSERT INTO Online_History (Scan_Date, Online_Devices, Down_Devices, All_Devices, Archived_Devices, Offline_Devices) + VALUES (?, ?, ?, ?, ?, ?) + """ + + mylog('debug', f'[Presence graph] Sql query: {insert_query} with values: {scanTimestamp}, {onlineDevices}, {downDevices}, {allDevics}, {archivedDevices}, {offlineDevices}') + + # Insert the gathered data into the history table + sql.execute(insert_query, (scanTimestamp, onlineDevices, downDevices, allDevics, archivedDevices, offlineDevices)) + + db.commitDB() + + diff --git a/server/plugin.py b/server/plugin.py index 7ed51b1c..2fc72e5e 100755 --- a/server/plugin.py +++ b/server/plugin.py @@ -129,7 +129,7 @@ def run_plugin_scripts(db, all_plugins, runType, pluginsState = plugins_state()) if shouldRun: # Header - updateState(f"Plugins: {prefix}") + updateState(f"Plugin: {prefix}") print_plugin_info(plugin, ['display_name']) mylog('debug', ['[Plugins] CMD: ', get_plugin_setting_obj(plugin, "CMD")["value"]])