From c2da5c56b8f281773ccd6d3e3d3b0f6e95fd45b6 Mon Sep 17 00:00:00 2001 From: Jokob-sk Date: Sun, 6 Aug 2023 10:50:03 +1000 Subject: [PATCH] ARPSCAN to plugin rewrite --- docker-compose.yml | 3 +- front/plugins/README.md | 8 +- front/plugins/arp_scan/config.json | 149 ++++++++++++---------- front/plugins/arp_scan/script.py | 117 +++++++++++++---- front/plugins/dhcp_leases/config.json | 2 +- front/plugins/dhcp_servers/config.json | 2 +- front/plugins/known_template/config.json | 9 +- front/plugins/undiscoverables/config.json | 2 +- front/plugins/unifi_import/config.json | 2 +- front/plugins/website_monitor/config.json | 2 +- pialert/__main__.py | 3 +- pialert/device.py | 146 +++++++++------------ pialert/initialise.py | 9 +- pialert/networkscan.py | 40 +++--- pialert/plugin.py | 47 ++++--- 15 files changed, 303 insertions(+), 238 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 1d2594b1..b950913a 100755 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -9,7 +9,7 @@ services: - type=registry,ref=docker.io/jokob-sk/pi.alert:buildcache container_name: pialert network_mode: host - restart: unless-stopped + # restart: unless-stopped volumes: - ${APP_DATA_LOCATION}/pialert_dev/config:/home/pi/pialert/config # - ${APP_DATA_LOCATION}/pialert/config:/home/pi/pialert/config @@ -46,6 +46,7 @@ services: - ${DEV_LOCATION}/front/network.php:/home/pi/pialert/front/network.php - ${DEV_LOCATION}/front/presence.php:/home/pi/pialert/front/presence.php - ${DEV_LOCATION}/front/settings.php:/home/pi/pialert/front/settings.php + - ${DEV_LOCATION}/front/systeminfo.php:/home/pi/pialert/front/systeminfo.php - ${DEV_LOCATION}/front/flows.php:/home/pi/pialert/front/flows.php - ${DEV_LOCATION}/front/plugins:/home/pi/pialert/front/plugins # DELETE END anyone trying to use this file: comment out / delete ABOVE lines, they are only for development purposes diff --git a/front/plugins/README.md b/front/plugins/README.md index 7ff75a88..9a33e305 100755 --- a/front/plugins/README.md +++ b/front/plugins/README.md @@ -97,7 +97,7 @@ More on specifics below. Currently, only 3 data sources are supported (valid `data_source` value). -- Script (`python-script`) +- Script (`script`) - SQL query on the PiAlert database (`pialert-db-query`) - Template (`template`) @@ -107,9 +107,9 @@ Currently, only 3 data sources are supported (valid `data_source` value). >``` Any of the above data sources have to return a "table" of the exact structure as outlined above. -### "data_source": "python-script" +### "data_source": "script" - If the `data_source` is set to `python-script` the `CMD` setting (that you specify in the `settings` array section in the `config.json`) needs to contain an executable Linux command, that generates a `last_result.log` file. This file needs to be stored in the same folder as the plugin. + If the `data_source` is set to `script` the `CMD` setting (that you specify in the `settings` array section in the `config.json`) needs to contain an executable Linux command, that generates a `last_result.log` file. This file needs to be stored in the same folder as the plugin. The content of the `last_result.log` file needs to contain the columns as defined in the "Column order and values" section above. The order of columns can't be changed. After every scan it should contain only the results from the latest scan/execution. @@ -264,7 +264,7 @@ This approach is used to implement the `DHCPLSS` plugin. The script parses all s "code_name": "dhcp_leases", "unique_prefix": "DHCPLSS", ... - "data_source": "python-script", + "data_source": "script", "localized": ["display_name", "description", "icon"], "mapped_to_table": "DHCP_Leases", ... diff --git a/front/plugins/arp_scan/config.json b/front/plugins/arp_scan/config.json index d8c3bfcb..4be96348 100755 --- a/front/plugins/arp_scan/config.json +++ b/front/plugins/arp_scan/config.json @@ -2,34 +2,34 @@ "code_name": "arpscan", "unique_prefix": "ARPSCAN", "enabled": true, - "data_source": "python-script", - "mapped_to_table": "DHCP_Leases", + "data_source": "script", + "mapped_to_table": "CurrentScan", "localized": ["display_name", "description", "icon"], "display_name": [ { "language_code": "en_us", - "string": "Un-Discoverable Devices" + "string": "Network scan (Arp-Scan)" } ], "icon": [ { "language_code": "en_us", - "string": "" + "string": "" } ], "description": [ { "language_code": "en_us", - "string": "This plugin is to import undiscoverable devices from a file." + "string": "This plugin is to execute an arp-scan on the local network" } ], "params" : [ { - "name" : "devices", + "name" : "subnets", "type" : "setting", - "value" : "UNDIS_devices_to_import" + "value" : "SCAN_SUBNETS" }], "settings": [ @@ -37,7 +37,7 @@ "function": "RUN", "type": "text.select", "default_value":"disabled", - "options": ["disabled", "once", "always_after_scan"], + "options": ["disabled", "once", "schedule", "scan_cycle", "always_after_scan", "on_new_device"], "localized": ["name", "description"], "name" :[{ "language_code":"en_us", @@ -50,8 +50,8 @@ }, { "function": "CMD", - "type": "text", - "default_value": "python3 /home/pi/pialert/front/plugins/undiscoverables/script.py devices={devices}", + "type": "readonly", + "default_value": "python3 /home/pi/pialert/front/plugins/arp_scan/script.py userSubnets={subnets}", "options": [], "localized": ["name", "description"], "name": [ @@ -63,7 +63,7 @@ "description": [ { "language_code": "en_us", - "string": "Command to run. This can not be changed" + "string": "Command to run. This should not be changed" } ] }, @@ -71,7 +71,7 @@ { "function": "RUN_TIMEOUT", "type": "integer", - "default_value": 10, + "default_value": 300, "options": [], "localized": ["name", "description"], "name": [ @@ -88,28 +88,39 @@ ] }, { - "function": "WATCH", - "type": "readonly", - "default_value": [], - "options": [], - "localized": ["name", "description"], - "name": [ - { - "language_code": "en_us", - "string": "Watched" - } - ], - "description": [ - { - "language_code": "en_us", - "string": "Undiscoverable Devices can not change their status, no watch is enabled." - } - ] + "function": "RUN_SCHD", + "type": "text", + "default_value":"*/3 * * * *", + "options": [], + "localized": ["name", "description"], + "name" : [{ + "language_code":"en_us", + "string" : "Schedule" + }], + "description": [{ + "language_code":"en_us", + "string" : "Only enabled if you select schedule in the ARPSCAN_RUN setting. Make sure you enter the schedule in the correct cron-like format (e.g. validate at crontab.guru). For example entering */3 * * * * will run the scan every 3 minutes. Will be run NEXT time the time passes." + }] + }, + { + "function": "WATCH", + "type": "text.multiselect", + "default_value":["Watched_Value1", "Watched_Value2"], + "options": ["Watched_Value1","Watched_Value2","Watched_Value3","Watched_Value4"], + "localized": ["name", "description"], + "name" :[{ + "language_code":"en_us", + "string" : "Watched" + }] , + "description":[{ + "language_code":"en_us", + "string" : "Send a notification if selected values change. Use CTRL + Click to select/deselect. " + }] }, { "function": "REPORT_ON", - "type": "readonly", - "default_value": [], + "type": "text.multiselect", + "default_value": ["new", "watched-changed"], "options": ["new", "watched-changed", "watched-not-changed"], "localized": ["name", "description"], "name": [ @@ -121,60 +132,31 @@ "description": [ { "language_code": "en_us", - "string": "No notifications will be sent." + "string": "When should notification be sent out." } ] - }, - { - "function": "devices_to_import", - "type": "list", - "default_value":["dummy_router"], - "options": [], - "localized": ["name", "description"], - "name" : [{ - "language_code":"en_us", - "string" : "UnDiscoverable Devices" - }], - "description": [{ - "language_code":"en_us", - "string" : "Devices to be added to the devices list." - }] - } + } ], "database_column_definitions": [ { - "column": "Watched_Value1", - "mapped_to_column": "DHCP_Name", + "column": "Object_PrimaryID", + "mapped_to_column": "cur_MAC", "css_classes": "col-sm-2", "show": true, - "type": "label", + "type": "devicemac", "default_value":"", "options": [], "localized": ["name"], "name":[{ "language_code":"en_us", - "string" : "Device Name" + "string" : "MAC" }] }, { - "column": "Object_PrimaryID", - "mapped_to_column": "DHCP_MAC", - "css_classes": "col-sm-2", - "show": true, - "type": "devicemac", - "default_value":"", - "options": [], - "localized": ["name"], - "name":[{ - "language_code":"en_us", - "string" : "MAC address" - }] - }, - { - "column": "Object_SecondaryID", - "mapped_to_column": "DHCP_IP", + "column": "Watched_Value1", + "mapped_to_column": "cur_IP", "css_classes": "col-sm-2", "show": true, "type": "deviceip", @@ -185,6 +167,34 @@ "language_code":"en_us", "string" : "IP" }] + }, + { + "column": "Watched_Value2", + "mapped_to_column": "cur_Vendor", + "css_classes": "col-sm-2", + "show": true, + "type": "label", + "default_value":"", + "options": [], + "localized": ["name"], + "name":[{ + "language_code":"en_us", + "string" : "Vendor" + }] + } , + { + "column": "Extra", + "mapped_to_column": "cur_ScanMethod", + "css_classes": "col-sm-2", + "show": true, + "type": "label", + "default_value":"", + "options": [], + "localized": ["name"], + "name":[{ + "language_code":"en_us", + "string" : "Scan method" + }] } , { "column": "DateTimeCreated", @@ -200,8 +210,7 @@ }] }, { - "column": "DateTimeChanged", - "mapped_to_column": "DHCP_DateTime", + "column": "DateTimeChanged", "css_classes": "col-sm-2", "show": true, "type": "label", diff --git a/front/plugins/arp_scan/script.py b/front/plugins/arp_scan/script.py index ecfece0c..5e16ee96 100755 --- a/front/plugins/arp_scan/script.py +++ b/front/plugins/arp_scan/script.py @@ -1,48 +1,121 @@ #!/usr/bin/env python -# test script by running python script.py devices=test,dummy import os import pathlib import argparse import sys +import re +import subprocess +from time import strftime sys.path.append("/home/pi/pialert/front/plugins") -from plugin_helper import Plugin_Objects +from plugin_helper import Plugin_Object, Plugin_Objects CUR_PATH = str(pathlib.Path(__file__).parent.resolve()) -LOG_FILE = os.path.join(CUR_PATH , 'script.log') -RESULT_FILE = os.path.join(CUR_PATH , 'last_result.log') - +LOG_FILE = os.path.join(CUR_PATH, 'script.log') +RESULT_FILE = os.path.join(CUR_PATH, 'last_result.log') def main(): - - # the script expects a parameter in the format of devices=device1,device2,... + # the script expects a parameter in the format of userSubnets=subnet1,subnet2,... parser = argparse.ArgumentParser(description='Import devices from settings') - parser.add_argument('devices', action="store", help="list of device names separated by ','") + parser.add_argument('userSubnets', nargs='+', help="list of subnets with options") values = parser.parse_args() - UNDIS_devices = Plugin_Objects( RESULT_FILE ) + devices = Plugin_Objects(RESULT_FILE) - if values.devices: - for fake_dev in values.devices.split('=')[1].split(','): - UNDIS_devices.add_object( - primaryId=fake_dev, # MAC (Device Name) - secondaryId="0.0.0.0", # IP Address (always 0.0.0.0) - watched1=fake_dev, # Device Name - watched2="", - watched3="", - watched4="", - extra="", - foreignKey="") + subnets_list = [] - UNDIS_devices.write_result_file() + if isinstance(values.userSubnets, list): + subnets_list = values.userSubnets + else: + subnets_list = [values.userSubnets] + + unique_devices = execute_arpscan(subnets_list) + + for device in unique_devices: + devices.add_object( + primaryId=device['mac'], # MAC (Device Name) + secondaryId=device['ip'], # IP Address + watched1=device['ip'], # Device Name + watched2=device.get('hw', ''), # Vendor (assuming it's in the 'hw' field) + watched3=device.get('interface', ''), # Add the interface + watched4='', + extra='arp-scan', + foreignKey="") + + devices.write_result_file() return 0 + +def execute_arpscan(userSubnets): + # output of possible multiple interfaces + arpscan_output = "" + devices_list = [] + + # scan each interface + + for interface in userSubnets : + + arpscan_output = execute_arpscan_on_interface (interface) + + print(arpscan_output) + + # Search IP + MAC + Vendor as regular expresion + re_ip = r'(?P((2[0-5]|1[0-9]|[0-9])?[0-9]\.){3}((2[0-5]|1[0-9]|[0-9])?[0-9]))' + re_mac = r'(?P([0-9a-fA-F]{2}[:-]){5}([0-9a-fA-F]{2}))' + re_hw = r'(?P.*)' + re_pattern = re.compile (re_ip + '\s+' + re_mac + '\s' + re_hw) + + devices_list_tmp = [ + {**device.groupdict(), "interface": interface} + for device in re.finditer(re_pattern, arpscan_output) + ] + + devices_list += devices_list_tmp + + # mylog('debug', ['[ARP Scan] Found: Devices including duplicates ', len(devices_list) ]) + + # Delete duplicate MAC + unique_mac = [] + unique_devices = [] + + for device in devices_list : + if device['mac'] not in unique_mac: + unique_mac.append(device['mac']) + unique_devices.append(device) + + # return list + # mylog('debug', ['[ARP Scan] Found: Devices without duplicates ', len(unique_devices) ]) + + print("Devices List len:", len(devices_list)) # Add this line to print devices_list + print("Devices List:", devices_list) # Add this line to print devices_list + + return devices_list + + +def execute_arpscan_on_interface(interface): + # Prepare command arguments + arpscan_args = ['sudo', 'arp-scan', '--ignoredups', '--retry=6'] + interface.split() + + # Execute command + try: + # try running a subprocess safely + result = subprocess.check_output(arpscan_args, universal_newlines=True) + except subprocess.CalledProcessError as e: + # An error occurred, handle it + error_type = type(e).__name__ # Capture the error type + result = "" + + return result + + + + #=============================================================================== # BEGIN #=============================================================================== if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/front/plugins/dhcp_leases/config.json b/front/plugins/dhcp_leases/config.json index 447ff547..8d341444 100755 --- a/front/plugins/dhcp_leases/config.json +++ b/front/plugins/dhcp_leases/config.json @@ -2,7 +2,7 @@ "code_name": "dhcp_leases", "unique_prefix": "DHCPLSS", "enabled": true, - "data_source": "python-script", + "data_source": "script", "data_filters": [ { "compare_column" : "Object_PrimaryID", diff --git a/front/plugins/dhcp_servers/config.json b/front/plugins/dhcp_servers/config.json index 90ce45d5..ce9516ee 100755 --- a/front/plugins/dhcp_servers/config.json +++ b/front/plugins/dhcp_servers/config.json @@ -2,7 +2,7 @@ "code_name": "dhcp_servers", "unique_prefix": "DHCPSRVS", "enabled": true, - "data_source": "python-script", + "data_source": "script", "localized": ["display_name", "description", "icon"], "display_name" : [{ "language_code":"en_us", diff --git a/front/plugins/known_template/config.json b/front/plugins/known_template/config.json index 85190a54..c7f9bddd 100755 --- a/front/plugins/known_template/config.json +++ b/front/plugins/known_template/config.json @@ -48,9 +48,8 @@ ], "settings":[ { - "function": "flows", - "type": "json", - "maxLength": 50, + "function": "FLOW", + "type": "json", "default_value": [{ "name":"apply_template", "trigger": [ @@ -121,13 +120,13 @@ "name": [ { "language_code": "en_us", - "string": "Flows" + "string": "Plugin flow" } ], "description": [ { "language_code": "en_us", - "string": "The flow." + "string": "This flow makes sure the template is applied to devices that are older than 3 days." } ] }, diff --git a/front/plugins/undiscoverables/config.json b/front/plugins/undiscoverables/config.json index 1cb29542..6b0a7163 100755 --- a/front/plugins/undiscoverables/config.json +++ b/front/plugins/undiscoverables/config.json @@ -2,7 +2,7 @@ "code_name": "undiscoverables", "unique_prefix": "UNDIS", "enabled": true, - "data_source": "python-script", + "data_source": "script", "mapped_to_table": "DHCP_Leases", "localized": ["display_name", "description", "icon"], diff --git a/front/plugins/unifi_import/config.json b/front/plugins/unifi_import/config.json index a8b8d2db..a4449c4f 100755 --- a/front/plugins/unifi_import/config.json +++ b/front/plugins/unifi_import/config.json @@ -2,7 +2,7 @@ "code_name": "unifi_import", "unique_prefix": "UNFIMP", "enabled": true, - "data_source": "python-script", + "data_source": "script", "data_filters": [ { "compare_column" : "Object_PrimaryID", diff --git a/front/plugins/website_monitor/config.json b/front/plugins/website_monitor/config.json index 34e2692f..051b050d 100755 --- a/front/plugins/website_monitor/config.json +++ b/front/plugins/website_monitor/config.json @@ -2,7 +2,7 @@ "code_name": "website_monitor", "unique_prefix": "WEBMON", "enabled": true, - "data_source": "python-script", + "data_source": "script", "localized": ["display_name", "description", "icon"], "display_name" : [{ "language_code":"en_us", diff --git a/pialert/__main__.py b/pialert/__main__.py index 47f0b0bd..1b7fb2db 100755 --- a/pialert/__main__.py +++ b/pialert/__main__.py @@ -213,6 +213,7 @@ def main (): nmapSchedule.last_run = timeNow() performNmapScan(db, get_all_devices(db)) + # todo replace the scans with plugins # Perform a network scan via arp-scan or pihole if last_network_scan + datetime.timedelta(minutes=conf.SCAN_CYCLE_MINUTES) < loop_start_time: last_network_scan = loop_start_time @@ -250,7 +251,7 @@ def main (): # -------------------------------------------------- # process all the scanned data into new devices mylog('debug', "[MAIN] start processig scan results") - process_scan (db, conf.arpscan_devices ) + process_scan (db) # Reporting if conf.cycle in conf.check_report: diff --git a/pialert/device.py b/pialert/device.py index b2217d89..0bfb7148 100755 --- a/pialert/device.py +++ b/pialert/device.py @@ -14,44 +14,48 @@ from scanners.pholusscan import performPholusScan, resolve_device_name_dig, reso #------------------------------------------------------------------------------- -def save_scanned_devices (db, p_arpscan_devices, p_cycle_interval): +def save_scanned_devices (db): sql = db.sql #TO-DO cycle = 1 # always 1, only one cycle supported - mylog('debug', ['[ARP Scan] Detected devices:', len(p_arpscan_devices)]) + # mylog('debug', ['[ARP Scan] Detected devices:', len(p_arpscan_devices)]) - # Delete previous scan data - sql.execute ("DELETE FROM CurrentScan WHERE cur_ScanCycle = ?", - (cycle,)) + # handled by the ARPSCAN plugin + # # Delete previous scan data + # sql.execute ("DELETE FROM CurrentScan") - if len(p_arpscan_devices) > 0: - # Insert new arp-scan devices - sql.executemany ("INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, "+ - " cur_IP, cur_Vendor, cur_ScanMethod) "+ - "VALUES ("+ str(cycle) + ", :mac, :ip, :hw, 'arp-scan')", - p_arpscan_devices) + # if len(p_arpscan_devices) > 0: + # # Insert new arp-scan devices + # sql.executemany ("INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, "+ + # " cur_IP, cur_Vendor, cur_ScanMethod) "+ + # "VALUES (1, :mac, :ip, :hw, 'arp-scan')", + # p_arpscan_devices) + + +# ------------------------ TO CONVERT INTO PLUGIN + # # Insert Pi-hole devices + # startTime = timeNow() + # sql.execute ("""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, + # cur_IP, cur_Vendor, cur_ScanMethod) + # SELECT ?, PH_MAC, PH_IP, PH_Vendor, 'Pi-hole' + # FROM PiHole_Network + # WHERE PH_LastQuery >= ? + # AND NOT EXISTS (SELECT 'X' FROM CurrentScan + # WHERE cur_MAC = PH_MAC + # AND cur_ScanCycle = ? )""", + # (cycle, + # (int(startTime.strftime('%s')) - 60 * p_cycle_interval), + # cycle) ) +# ------------------------ TO CONVERT INTO PLUGIN - # Insert Pi-hole devices - startTime = timeNow() - sql.execute ("""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, - cur_IP, cur_Vendor, cur_ScanMethod) - SELECT ?, PH_MAC, PH_IP, PH_Vendor, 'Pi-hole' - FROM PiHole_Network - WHERE PH_LastQuery >= ? - AND NOT EXISTS (SELECT 'X' FROM CurrentScan - WHERE cur_MAC = PH_MAC - AND cur_ScanCycle = ? )""", - (cycle, - (int(startTime.strftime('%s')) - 60 * p_cycle_interval), - cycle) ) # Check Internet connectivity internet_IP = get_internet_IP( conf.DIG_GET_IP_ARG ) # TESTING - Force IP # internet_IP = "" if internet_IP != "" : - sql.execute ("""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) - VALUES (?, 'Internet', ?, Null, 'queryDNS') """, (cycle, internet_IP) ) + sql.execute (f"""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) + VALUES ( 1, 'Internet', '{internet_IP}', Null, 'queryDNS') """) # #76 Add Local MAC of default local interface # BUGFIX #106 - Device that pialert is running @@ -73,93 +77,67 @@ def save_scanned_devices (db, p_arpscan_devices, p_cycle_interval): local_ip = '0.0.0.0' # Check if local mac has been detected with other methods - sql.execute ("SELECT COUNT(*) FROM CurrentScan WHERE cur_ScanCycle = ? AND cur_MAC = ? ", (cycle, local_mac) ) + sql.execute ("SELECT COUNT(*) FROM CurrentScan WHERE cur_MAC = ? ", (local_mac) ) if sql.fetchone()[0] == 0 : sql.execute ("INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) "+ - "VALUES ( ?, ?, ?, Null, 'local_MAC') ", (cycle, local_mac, local_ip) ) + "VALUES ( 1, ?, ?, Null, 'local_MAC') ", (local_mac, local_ip) ) #------------------------------------------------------------------------------- def print_scan_stats (db): sql = db.sql #TO-DO # Devices Detected - sql.execute ("""SELECT COUNT(*) FROM CurrentScan - WHERE cur_ScanCycle = ? """, - (conf.cycle,)) + sql.execute ("""SELECT COUNT(*) FROM CurrentScan""") mylog('verbose', ['[Scan Stats] Devices Detected.......: ', str (sql.fetchone()[0]) ]) # Devices arp-scan - sql.execute ("""SELECT COUNT(*) FROM CurrentScan - WHERE cur_ScanMethod='arp-scan' AND cur_ScanCycle = ? """, - (conf.cycle,)) + sql.execute ("""SELECT COUNT(*) FROM CurrentScan WHERE cur_ScanMethod='arp-scan' """) mylog('verbose', ['[Scan Stats] arp-scan detected..: ', str (sql.fetchone()[0]) ]) # Devices Pi-hole - sql.execute ("""SELECT COUNT(*) FROM CurrentScan - WHERE cur_ScanMethod='PiHole' AND cur_ScanCycle = ? """, - (conf.cycle,)) + sql.execute ("""SELECT COUNT(*) FROM CurrentScan WHERE cur_ScanMethod='PiHole'""") mylog('verbose', ['[Scan Stats] Pi-hole detected...: +' + str (sql.fetchone()[0]) ]) # New Devices sql.execute ("""SELECT COUNT(*) FROM CurrentScan - WHERE cur_ScanCycle = ? - AND NOT EXISTS (SELECT 1 FROM Devices - WHERE dev_MAC = cur_MAC) """, - (conf.cycle,)) + WHERE NOT EXISTS (SELECT 1 FROM Devices + WHERE dev_MAC = cur_MAC) """) mylog('verbose', ['[Scan Stats] New Devices........: ' + str (sql.fetchone()[0]) ]) - # Devices in this ScanCycle - sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan - WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle - AND dev_ScanCycle = ? """, - (conf.cycle,)) - - mylog('verbose', ['[Scan Stats] Devices in this cycle..: ' + str (sql.fetchone()[0]) ]) - # Down Alerts sql.execute ("""SELECT COUNT(*) FROM Devices - WHERE dev_AlertDeviceDown = 1 - AND dev_ScanCycle = ? + WHERE dev_AlertDeviceDown = 1 AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle) """, - (conf.cycle,)) + AND dev_ScanCycle = cur_ScanCycle) """) mylog('verbose', ['[Scan Stats] Down Alerts........: ' + str (sql.fetchone()[0]) ]) # New Down Alerts sql.execute ("""SELECT COUNT(*) FROM Devices WHERE dev_AlertDeviceDown = 1 - AND dev_PresentLastScan = 1 - AND dev_ScanCycle = ? + AND dev_PresentLastScan = 1 AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle) """, - (conf.cycle,)) + AND dev_ScanCycle = cur_ScanCycle) """) mylog('verbose', ['[Scan Stats] New Down Alerts....: ' + str (sql.fetchone()[0]) ]) # New Connections sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle - AND dev_PresentLastScan = 0 - AND dev_ScanCycle = ? """, - (conf.cycle,)) + AND dev_PresentLastScan = 0""") mylog('verbose', ['[Scan Stats] New Connections....: ' + str ( sql.fetchone()[0]) ]) # Disconnections sql.execute ("""SELECT COUNT(*) FROM Devices - WHERE dev_PresentLastScan = 1 - AND dev_ScanCycle = ? + WHERE dev_PresentLastScan = 1 AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle) """, - (conf.cycle,)) + AND dev_ScanCycle = cur_ScanCycle) """) mylog('verbose', ['[Scan Stats] Disconnections.....: ' + str ( sql.fetchone()[0]) ]) # IP Changes sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan - WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle - AND dev_ScanCycle = ? - AND dev_LastIP <> cur_IP """, - (conf.cycle,)) + WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle + AND dev_LastIP <> cur_IP """) mylog('verbose', ['[Scan Stats] IP Changes.........: ' + str ( sql.fetchone()[0]) ]) @@ -176,20 +154,18 @@ def create_new_devices (db): eve_PendingAlertEmail) SELECT cur_MAC, cur_IP, ?, 'New Device', cur_Vendor, 1 FROM CurrentScan - WHERE cur_ScanCycle = ? - AND NOT EXISTS (SELECT 1 FROM Devices + WHERE NOT EXISTS (SELECT 1 FROM Devices WHERE dev_MAC = cur_MAC) """, - (startTime, conf.cycle) ) + (startTime) ) mylog('debug','[New Devices] Insert Connection into session table') sql.execute ("""INSERT INTO Sessions (ses_MAC, ses_IP, ses_EventTypeConnection, ses_DateTimeConnection, ses_EventTypeDisconnection, ses_DateTimeDisconnection, ses_StillConnected, ses_AdditionalInfo) SELECT cur_MAC, cur_IP,'Connected',?, NULL , NULL ,1, cur_Vendor FROM CurrentScan - WHERE cur_ScanCycle = ? - AND NOT EXISTS (SELECT 1 FROM Sessions + WHERE NOT EXISTS (SELECT 1 FROM Sessions WHERE ses_MAC = cur_MAC) """, - (startTime, conf.cycle) ) + (startTime) ) # arpscan - Create new devices mylog('debug','[New Devices] 2 Create devices') @@ -236,13 +212,12 @@ def create_new_devices (db): SELECT cur_MAC, '(unknown)', cur_Vendor, cur_IP, ?, ?, {newDevDefaults} FROM CurrentScan - WHERE cur_ScanCycle = ? - AND NOT EXISTS (SELECT 1 FROM Devices + WHERE NOT EXISTS (SELECT 1 FROM Devices WHERE dev_MAC = cur_MAC) """ mylog('debug',f'[New Devices] 2 Create devices SQL: {sqlQuery}') - sql.execute (sqlQuery, (startTime, startTime, conf.cycle) ) + sql.execute (sqlQuery, (startTime, startTime) ) # Pi-hole - Insert events for new devices # NOT STRICYLY NECESARY (Devices can be created through Current_Scan) @@ -326,18 +301,15 @@ def update_devices_data_from_scan (db): WHERE dev_ScanCycle = ? AND dev_PresentLastScan = 0 AND EXISTS (SELECT 1 FROM CurrentScan - WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle) """, - (startTime, conf.cycle)) + WHERE dev_MAC = cur_MAC) """, + (startTime)) # Clean no active devices mylog('debug','[Update Devices] 2 Clean no active devices') sql.execute ("""UPDATE Devices SET dev_PresentLastScan = 0 - WHERE dev_ScanCycle = ? - AND NOT EXISTS (SELECT 1 FROM CurrentScan + WHERE NOT EXISTS (SELECT 1 FROM CurrentScan WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle) """, - (conf.cycle,)) + AND dev_ScanCycle = cur_ScanCycle) """) # Update IP & Vendor mylog('debug','[Update Devices] - 3 LastIP & Vendor') @@ -348,11 +320,9 @@ def update_devices_data_from_scan (db): dev_Vendor = (SELECT cur_Vendor FROM CurrentScan WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle) - WHERE dev_ScanCycle = ? - AND EXISTS (SELECT 1 FROM CurrentScan + WHERE EXISTS (SELECT 1 FROM CurrentScan WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle) """, - (conf.cycle,)) + AND dev_ScanCycle = cur_ScanCycle) """) # Pi-hole Network - Update (unknown) Name mylog('debug','[Update Devices] - 4 Unknown Name') diff --git a/pialert/initialise.py b/pialert/initialise.py index f9224610..735a6cd3 100755 --- a/pialert/initialise.py +++ b/pialert/initialise.py @@ -59,16 +59,19 @@ def importConfigs (db): # Only import file if the file was modifed since last import. # this avoids time zone issues as we just compare the previous timestamp to the current time stamp + + fileModifiedTime = os.path.getmtime(config_file) + mylog('debug', ['[Import Config] checking config file ']) mylog('debug', ['[Import Config] lastImportedConfFile :', conf.lastImportedConfFile]) - mylog('debug', ['[Import Config] file modified time :', os.path.getmtime(config_file)]) + mylog('debug', ['[Import Config] file modified time :', fileModifiedTime]) - if (os.path.getmtime(config_file) == conf.lastImportedConfFile) : + if (fileModifiedTime == conf.lastImportedConfFile) : mylog('debug', ['[Import Config] skipping config file import']) return - conf.lastImportedConfFile = os.path.getmtime(config_file) + conf.lastImportedConfFile = fileModifiedTime mylog('debug', ['[Import Config] importing config file']) conf.mySettings = [] # reset settings diff --git a/pialert/networkscan.py b/pialert/networkscan.py index b8bb7f19..bc575c2c 100755 --- a/pialert/networkscan.py +++ b/pialert/networkscan.py @@ -36,12 +36,13 @@ def scan_network (db): db.commitDB() + # Moved to the ARPSCAN Plugin # arp-scan command - conf.arpscan_devices = [] - if conf.ENABLE_ARPSCAN: - mylog('verbose','[Network Scan] arp-scan start') - conf.arpscan_devices = execute_arpscan (conf.userSubnets) - mylog('verbose','[Network Scan] arp-scan ends') + # conf.arpscan_devices = [] + # if conf.ENABLE_ARPSCAN: + # mylog('verbose','[Network Scan] arp-scan start') + # conf.arpscan_devices = execute_arpscan (conf.userSubnets) + # mylog('verbose','[Network Scan] arp-scan ends') # Pi-hole method if conf.PIHOLE_ACTIVE : @@ -57,8 +58,7 @@ def scan_network (db): -def process_scan (db, arpscan_devices): - +def process_scan (db): # Query ScanCycle properties scanCycle_data = query_ScanCycle_Data (db, True) @@ -76,7 +76,7 @@ def process_scan (db, arpscan_devices): # Load current scan data mylog('verbose','[Process Scan] Processing scan results') - save_scanned_devices (db, arpscan_devices, cycle_interval) + save_scanned_devices (db) db.commitDB() @@ -85,8 +85,7 @@ def process_scan (db, arpscan_devices): print_scan_stats(db) mylog('none','[Process Scan] Stats end') - # Create Events - mylog('verbose','[Process Scan] Updating DB Info') + # Create Events mylog('verbose','[Process Scan] Sessions Events (connect / discconnect)') insert_events(db) @@ -122,6 +121,9 @@ def process_scan (db, arpscan_devices): # Skip repeated notifications mylog('verbose','[Process Scan] Skipping repeated notifications') skip_repeated_notifications (db) + + # Clear current scan as processed + db.sql.execute ("DELETE FROM CurrentScan") # Commit changes db.commitDB() @@ -279,12 +281,11 @@ def insert_events (db): SELECT dev_MAC, dev_LastIP, ?, 'Device Down', '', 1 FROM Devices WHERE dev_AlertDeviceDown = 1 - AND dev_PresentLastScan = 1 - AND dev_ScanCycle = ? + AND dev_PresentLastScan = 1 AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle) """, - (startTime, conf.cycle) ) + (startTime) ) # Check new connections mylog('debug','[Events] - 2 - New Connections') @@ -294,9 +295,8 @@ def insert_events (db): SELECT cur_MAC, cur_IP, ?, 'Connected', '', dev_AlertEvents FROM Devices, CurrentScan WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle - AND dev_PresentLastScan = 0 - AND dev_ScanCycle = ? """, - (startTime, conf.cycle) ) + AND dev_PresentLastScan = 0 """, + (startTime) ) # Check disconnections mylog('debug','[Events] - 3 - Disconnections') @@ -308,11 +308,10 @@ def insert_events (db): FROM Devices WHERE dev_AlertDeviceDown = 0 AND dev_PresentLastScan = 1 - AND dev_ScanCycle = ? AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle) """, - (startTime, conf.cycle) ) + (startTime) ) # Check IP Changed mylog('debug','[Events] - 4 - IP Changes') @@ -322,8 +321,7 @@ def insert_events (db): SELECT cur_MAC, cur_IP, ?, 'IP Changed', 'Previous IP: '|| dev_LastIP, dev_AlertEvents FROM Devices, CurrentScan - WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle - AND dev_ScanCycle = ? + WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle AND dev_LastIP <> cur_IP """, - (startTime, conf.cycle) ) + (startTime) ) mylog('debug','[Events] - Events end') \ No newline at end of file diff --git a/pialert/plugin.py b/pialert/plugin.py index 77f59d8d..87846678 100755 --- a/pialert/plugin.py +++ b/pialert/plugin.py @@ -181,8 +181,8 @@ def execute_plugin(db, plugin): # build SQL query parameters to insert into the DB sqlParams = [] - # python-script - if plugin['data_source'] == 'python-script': + # script + if plugin['data_source'] == 'script': # ------- prepare params -------- # prepare command from plugin settings, custom parameters command = resolve_wildcards_arr(set_CMD.split(), params) @@ -203,24 +203,35 @@ def execute_plugin(db, plugin): # check the last run output - f = open(pluginsPath + '/' + plugin["code_name"] + '/last_result.log', 'r+') - newLines = f.read().split('\n') - f.close() + # Initialize newLines + newLines = [] - # cleanup - select only lines containing a separator to filter out unnecessary data - newLines = list(filter(lambda x: '|' in x, newLines)) + # Create the file path + file_path = os.path.join(pluginsPath, plugin["code_name"], 'last_result.log') - # # regular logging - # for line in newLines: - # append_line_to_file (pluginsPath + '/plugin.log', line +'\n') - - for line in newLines: - columns = line.split("|") - # There has to be always 9 columns - if len(columns) == 9: - sqlParams.append((plugin["unique_prefix"], columns[0], columns[1], 'null', columns[2], columns[3], columns[4], columns[5], columns[6], 0, columns[7], 'null', columns[8])) - else: - mylog('none', ['[Plugins]: Skipped invalid line in the output: ', line]) + # Check if the file exists + if os.path.exists(file_path): + # File exists, open it and read its contents + with open(file_path, 'r+') as f: + newLines = f.read().split('\n') + + # if the script produced some outpout, clean it up to ensure it's the correct format + # cleanup - select only lines containing a separator to filter out unnecessary data + newLines = list(filter(lambda x: '|' in x, newLines)) + + # # regular logging + # for line in newLines: + # append_line_to_file (pluginsPath + '/plugin.log', line +'\n') + + for line in newLines: + columns = line.split("|") + # There has to be always 9 columns + if len(columns) == 9: + sqlParams.append((plugin["unique_prefix"], columns[0], columns[1], 'null', columns[2], columns[3], columns[4], columns[5], columns[6], 0, columns[7], 'null', columns[8])) + else: + mylog('none', ['[Plugins]: Skipped invalid line in the output: ', line]) + else: + mylog('debug', [f'[Plugins] The file {file_path} does not exist']) # pialert-db-query if plugin['data_source'] == 'pialert-db-query':