From 0f634978476bd48ee658485d6ef58faf1817dcc9 Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Sun, 21 May 2023 12:08:15 +1000 Subject: [PATCH 01/20] split const and logger from main --- front/buildtimestamp.txt | 1 + pialert/__init__.py | 1 + pialert/const.py | 21 +++++ pialert/helper.py | 67 ++++++++++++++ pialert/logger.py | 78 +++++++++++++++++ {back => pialert}/pialert.py | 163 +++-------------------------------- 6 files changed, 182 insertions(+), 149 deletions(-) create mode 100644 front/buildtimestamp.txt create mode 100644 pialert/__init__.py create mode 100644 pialert/const.py create mode 100644 pialert/helper.py create mode 100644 pialert/logger.py rename {back => pialert}/pialert.py (97%) diff --git a/front/buildtimestamp.txt b/front/buildtimestamp.txt new file mode 100644 index 00000000..00445f81 --- /dev/null +++ b/front/buildtimestamp.txt @@ -0,0 +1 @@ +1684563510 diff --git a/pialert/__init__.py b/pialert/__init__.py new file mode 100644 index 00000000..ab440f14 --- /dev/null +++ b/pialert/__init__.py @@ -0,0 +1 @@ +""" __init__ for Pi.Alert """ \ No newline at end of file diff --git a/pialert/const.py b/pialert/const.py new file mode 100644 index 00000000..132a88b8 --- /dev/null +++ b/pialert/const.py @@ -0,0 +1,21 @@ +""" CONSTANTS for Pi.Alert """ + +#=============================================================================== +# PATHS +#=============================================================================== +# pialertPath = '/home/pi/pialert' +pialertPath ='/home/roland/repos/Pi.Alert' + +confPath = "/config/pialert.conf" +dbPath = '/db/pialert.db' + + +pluginsPath = pialertPath + '/front/plugins' +logPath = pialertPath + '/front/log' +fullConfPath = pialertPath + confPath +fullDbPath = pialertPath + dbPath +fullPholusPath = pialertPath+'/pholus/pholus3.py' + +vendorsDB = '/usr/share/arp-scan/ieee-oui.txt' +piholeDB = '/etc/pihole/pihole-FTL.db' +piholeDhcpleases = '/etc/pihole/dhcp.leases' \ No newline at end of file diff --git a/pialert/helper.py b/pialert/helper.py new file mode 100644 index 00000000..3423be04 --- /dev/null +++ b/pialert/helper.py @@ -0,0 +1,67 @@ +""" Colection of generic functions to support Pi.Alert """ +import datetime +import os +import subprocess + +from const import * +from logger import mylog + +#------------------------------------------------------------------------------- +def timeNow(): + return datetime.datetime.now().replace(microsecond=0) + +#------------------------------------------------------------------------------- +def updateSubnets(SCAN_SUBNETS): + + # remove old list + userSubnets = [] + + # multiple interfaces + if type(SCAN_SUBNETS) is list: + for interface in SCAN_SUBNETS : + userSubnets.append(interface) + # one interface only + else: + userSubnets.append(SCAN_SUBNETS) + + + +#------------------------------------------------------------------------------- +# check RW access of DB and config file +def checkPermissionsOK(): + global confR_access, confW_access, dbR_access, dbW_access + + confR_access = (os.access(fullConfPath, os.R_OK)) + confW_access = (os.access(fullConfPath, os.W_OK)) + dbR_access = (os.access(fullDbPath, os.R_OK)) + dbW_access = (os.access(fullDbPath, os.W_OK)) + + + mylog('none', ['\n Permissions check (All should be True)']) + mylog('none', ['------------------------------------------------']) + mylog('none', [ " " , confPath , " | " , " READ | " , confR_access]) + mylog('none', [ " " , confPath , " | " , " WRITE | " , confW_access]) + mylog('none', [ " " , dbPath , " | " , " READ | " , dbR_access]) + mylog('none', [ " " , dbPath , " | " , " WRITE | " , dbW_access]) + mylog('none', ['------------------------------------------------']) + + #return dbR_access and dbW_access and confR_access and confW_access + return (confR_access, dbR_access) +#------------------------------------------------------------------------------- +def fixPermissions(): + # Try fixing access rights if needed + chmodCommands = [] + + chmodCommands.append(['sudo', 'chmod', 'a+rw', '-R', fullDbPath]) + chmodCommands.append(['sudo', 'chmod', 'a+rw', '-R', fullConfPath]) + + for com in chmodCommands: + # Execute command + mylog('none', ["[Setup] Attempting to fix permissions."]) + try: + # try runnning a subprocess + result = subprocess.check_output (com, universal_newlines=True) + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', ["[Setup] Fix Failed. Execute this command manually inside of the container: ", ' '.join(com)]) + mylog('none', [e.output]) diff --git a/pialert/logger.py b/pialert/logger.py new file mode 100644 index 00000000..f0e6a450 --- /dev/null +++ b/pialert/logger.py @@ -0,0 +1,78 @@ +""" Colection of functions to support all logging for Pi.Alert """ + +import datetime +from const import * + +#------------------------------------------------------------------------------- +debugLevels = [ + ('none', 0), ('minimal', 1), ('verbose', 2), ('debug', 3) + ] +LOG_LEVEL = 'debug' + +def mylog(requestedDebugLevel, n): + + setLvl = 0 + reqLvl = 0 + + # Get debug urgency/relative weight + for lvl in debugLevels: + if LOG_LEVEL == lvl[0]: + setLvl = lvl[1] + if requestedDebugLevel == lvl[0]: + reqLvl = lvl[1] + + if reqLvl <= setLvl: + file_print (*n) + +#------------------------------------------------------------------------------- +def file_print (*args): + + result = '' + + for arg in args: + result += str(arg) + print(result) + + file = open(logPath + "/pialert.log", "a") + file.write(result + '\n') + file.close() + +#------------------------------------------------------------------------------- +def print_log (pText): + global log_timestamp + + # Check LOG actived + if not LOG_LEVEL == 'debug' : + return + + # Current Time + log_timestamp2 = datetime.datetime.now().replace(microsecond=0) + + # Print line + time + elapsed time + text + file_print ('[LOG_LEVEL=debug] ', + # log_timestamp2, ' ', + log_timestamp2.strftime ('%H:%M:%S'), ' ', + pText) + + + # Save current time to calculate elapsed time until next log + log_timestamp = log_timestamp2 + + return pText + + + +#------------------------------------------------------------------------------- +def append_file_binary (pPath, input): + file = open (pPath, 'ab') + file.write (input) + file.close() + + +#------------------------------------------------------------------------------- +def logResult (stdout, stderr): + if stderr != None: + append_file_binary (logPath + '/stderr.log', stderr) + if stdout != None: + append_file_binary (logPath + '/stdout.log', stdout) + diff --git a/back/pialert.py b/pialert/pialert.py similarity index 97% rename from back/pialert.py rename to pialert/pialert.py index 0cc997b1..d763833d 100755 --- a/back/pialert.py +++ b/pialert/pialert.py @@ -43,6 +43,12 @@ from json2table import convert import hashlib import multiprocessing +from const import * +from logger import mylog, print_log, logResult +from helper import checkPermissionsOK, fixPermissions, timeNow, updateSubnets + + + #=============================================================================== # SQL queries #=============================================================================== @@ -67,22 +73,7 @@ sql_new_devices = """SELECT * FROM ( SELECT eve_IP as dev_LastIP, eve_MAC as dev ) t2 ON t1.dev_MAC = t2.dev_MAC_t2""" -#=============================================================================== -# PATHS -#=============================================================================== -pialertPath = '/home/pi/pialert' -confPath = "/config/pialert.conf" -dbPath = '/db/pialert.db' - -pluginsPath = pialertPath + '/front/plugins' -logPath = pialertPath + '/front/log' -fullConfPath = pialertPath + confPath -fullDbPath = pialertPath + dbPath - -vendorsDB = '/usr/share/arp-scan/ieee-oui.txt' -piholeDB = '/etc/pihole/pihole-FTL.db' -piholeDhcpleases = '/etc/pihole/dhcp.leases' # Global variables @@ -97,121 +88,6 @@ lastTimeImported = 0 sql_connection = None -#------------------------------------------------------------------------------- -def timeNow(): - return datetime.datetime.now().replace(microsecond=0) - -#------------------------------------------------------------------------------- -debugLevels = [ - ('none', 0), ('minimal', 1), ('verbose', 2), ('debug', 3) - ] -LOG_LEVEL = 'debug' - -def mylog(requestedDebugLevel, n): - - setLvl = 0 - reqLvl = 0 - - # Get debug urgency/relative weight - for lvl in debugLevels: - if LOG_LEVEL == lvl[0]: - setLvl = lvl[1] - if requestedDebugLevel == lvl[0]: - reqLvl = lvl[1] - - if reqLvl <= setLvl: - file_print (*n) - -#------------------------------------------------------------------------------- -def file_print (*args): - - result = '' - - for arg in args: - result += str(arg) - print(result) - - file = open(logPath + "/pialert.log", "a") - file.write(result + '\n') - file.close() - - -#------------------------------------------------------------------------------- -def append_file_binary (pPath, input): - file = open (pPath, 'ab') - file.write (input) - file.close() - -#------------------------------------------------------------------------------- -def logResult (stdout, stderr): - if stderr != None: - append_file_binary (logPath + '/stderr.log', stderr) - if stdout != None: - append_file_binary (logPath + '/stdout.log', stdout) - -#------------------------------------------------------------------------------- -def print_log (pText): - global log_timestamp - - # Check LOG actived - if not LOG_LEVEL == 'debug' : - return - - # Current Time - log_timestamp2 = datetime.datetime.now().replace(microsecond=0) - - # Print line + time + elapsed time + text - file_print ('[LOG_LEVEL=debug] ', - # log_timestamp2, ' ', - log_timestamp2.strftime ('%H:%M:%S'), ' ', - pText) - - # Save current time to calculate elapsed time until next log - log_timestamp = log_timestamp2 - - return pText - -#------------------------------------------------------------------------------- -# check RW access of DB and config file -def checkPermissionsOK(): - global confR_access, confW_access, dbR_access, dbW_access - - confR_access = (os.access(fullConfPath, os.R_OK)) - confW_access = (os.access(fullConfPath, os.W_OK)) - dbR_access = (os.access(fullDbPath, os.R_OK)) - dbW_access = (os.access(fullDbPath, os.W_OK)) - - - mylog('none', ['\n Permissions check (All should be True)']) - mylog('none', ['------------------------------------------------']) - mylog('none', [ " " , confPath , " | " , " READ | " , confR_access]) - mylog('none', [ " " , confPath , " | " , " WRITE | " , confW_access]) - mylog('none', [ " " , dbPath , " | " , " READ | " , dbR_access]) - mylog('none', [ " " , dbPath , " | " , " WRITE | " , dbW_access]) - mylog('none', ['------------------------------------------------']) - - return dbR_access and dbW_access and confR_access and confW_access -#------------------------------------------------------------------------------- -def fixPermissions(): - # Try fixing access rights if needed - chmodCommands = [] - - chmodCommands.append(['sudo', 'chmod', 'a+rw', '-R', fullDbPath]) - chmodCommands.append(['sudo', 'chmod', 'a+rw', '-R', fullConfPath]) - - for com in chmodCommands: - # Execute command - mylog('none', ["[Setup] Attempting to fix permissions."]) - try: - # try runnning a subprocess - result = subprocess.check_output (com, universal_newlines=True) - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', ["[Setup] Fix Failed. Execute this command manually inside of the container: ", ' '.join(com)]) - mylog('none', [e.output]) - - -checkPermissionsOK() # Initial check #------------------------------------------------------------------------------- def initialiseFile(pathToCheck, defaultFile): @@ -229,7 +105,7 @@ def initialiseFile(pathToCheck, defaultFile): mylog('none', ["[Setup] ("+defaultFile+") copied over successfully to ("+pathToCheck+")."]) # write stdout and stderr into .log files for debugging if needed - logResult (stdout, stderr) + logResult (stdout, stderr) # TO-DO should be changed to mylog except subprocess.CalledProcessError as e: # An error occured, handle it @@ -241,6 +117,7 @@ def initialiseFile(pathToCheck, defaultFile): #=============================================================================== # check and initialize pialert.conf +(confR_access, dbR_access) = checkPermissionsOK() # Initial check if confR_access == False: initialiseFile(fullConfPath, "/home/pi/pialert/back/pialert.conf_bak" ) @@ -266,7 +143,7 @@ def openDB (): return # Log - print_log ('Opening DB') + print_log ('Opening DB') # TO-DO should be changed to mylog # Open DB and Cursor sql_connection = sqlite3.connect (fullDbPath, isolation_level=None) @@ -464,7 +341,7 @@ def importConfigs (): mySchedules.append(schedule_class("nmap", nmapSchedule, nmapSchedule.next(), False)) # Format and prepare the list of subnets - updateSubnets() + userSubnets = updateSubnets(SCAN_SUBNETS) # Plugins START # ----------------- @@ -1943,7 +1820,7 @@ def performPholusScan (timeoutSec): adjustedTimeout = str(round(int(timeoutSec) / 2, 0)) # python3 -m trace --trace /home/pi/pialert/pholus/pholus3.py eth1 -rdns_scanning 192.168.1.0/24 -stimeout 600 - pholus_args = ['python3', '/home/pi/pialert/pholus/pholus3.py', interface, "-rdns_scanning", mask, "-stimeout", adjustedTimeout] + pholus_args = ['python3', fullPholusPath, interface, "-rdns_scanning", mask, "-stimeout", adjustedTimeout] # Execute command output = "" @@ -2791,7 +2668,7 @@ def send_webhook (_json, _html): stdout, stderr = p.communicate() # write stdout and stderr into .log files for debugging if needed - logResult (stdout, stderr) + logResult (stdout, stderr) # TO-DO should be changed to mylog except subprocess.CalledProcessError as e: # An error occured, handle it mylog('none', [e.output]) @@ -2816,7 +2693,7 @@ def send_apprise (html, text): p = subprocess.Popen(["curl","-i","-X", "POST" ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), APPRISE_HOST], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = p.communicate() # write stdout and stderr into .log files for debugging if needed - logResult (stdout, stderr) + logResult (stdout, stderr) # TO-DO should be changed to mylog except subprocess.CalledProcessError as e: # An error occured, handle it mylog('none', [e.output]) @@ -3049,7 +2926,7 @@ def upgradeDB (): # Drop table if available, but incompatible if onlineHistoryAvailable and isIncompatible: - file_print ('[upgradeDB] Table is incompatible, Dropping the Online_History table)') + mylog('none','[upgradeDB] Table is incompatible, Dropping the Online_History table') sql.execute("DROP TABLE Online_History;") onlineHistoryAvailable = False @@ -3492,19 +3369,7 @@ def get_file_content(path): #------------------------------------------------------------------------------- -def updateSubnets(): - global userSubnets - - # remove old list - userSubnets = [] - # multiple interfaces - if type(SCAN_SUBNETS) is list: - for interface in SCAN_SUBNETS : - userSubnets.append(interface) - # one interface only - else: - userSubnets.append(SCAN_SUBNETS) #------------------------------------------------------------------------------- From 78f71abd315f05dca8dcaf03e7850b2ac7848ee2 Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Sun, 21 May 2023 15:03:16 +1000 Subject: [PATCH 02/20] moved database out --- pialert/database.py | 352 +++++++++++++++++++++++++++++++ pialert/helper.py | 2 +- pialert/pialert.py | 502 ++++++++------------------------------------ 3 files changed, 440 insertions(+), 416 deletions(-) create mode 100644 pialert/database.py diff --git a/pialert/database.py b/pialert/database.py new file mode 100644 index 00000000..ccf3db9a --- /dev/null +++ b/pialert/database.py @@ -0,0 +1,352 @@ +""" all things database to support Pi.Alert """ + +import sqlite3 + +from const import fullDbPath +from logger import print_log, mylog + +#=============================================================================== +# SQL queries +#=============================================================================== +sql_devices_all = "select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_Group, dev_FirstConnection, dev_LastConnection, dev_LastIP, dev_StaticIP, dev_PresentLastScan, dev_LastNotification, dev_NewDevice, dev_Network_Node_MAC_ADDR, dev_Network_Node_port, dev_Icon from Devices" +sql_devices_stats = "SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1" +sql_nmap_scan_all = "SELECT * FROM Nmap_Scan" +sql_pholus_scan_all = "SELECT * FROM Pholus_Scan" +sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0" +sql_settings = "SELECT * FROM Settings" +sql_plugins_objects = "SELECT * FROM Plugins_Objects" +sql_language_strings = "SELECT * FROM Plugins_Language_Strings" +sql_plugins_events = "SELECT * FROM Plugins_Events" +sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY 'Index' DESC" +sql_new_devices = """SELECT * FROM ( SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC FROM Events_Devices + WHERE eve_PendingAlertEmail = 1 + AND eve_EventType = 'New Device' + ORDER BY eve_DateTime ) t1 + LEFT JOIN + ( + SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices + ) t2 + ON t1.dev_MAC = t2.dev_MAC_t2""" + + +class DB(): + + def __init__(self): + self.sql = None + self.sql_connection = None + + def openDB (self): + # Check if DB is open + if self.sql_connection != None : + mylog('debug','openDB: databse already open') + return + + mylog('none', 'Opening DB' ) + # Open DB and Cursor + self.sql_connection = sqlite3.connect (fullDbPath, isolation_level=None) + self.sql_connection.execute('pragma journal_mode=wal') # + self.sql_connection.text_factory = str + self.sql_connection.row_factory = sqlite3.Row + self.sql = self.sql_connection.cursor() + + + #------------------------------------------------------------------------------- + def commitDB (self): + if self.sql_connection == None : + mylog('debug','commitDB: databse is not open') + return + mylog('debug','commitDB: comiting DB changes') + + # Commit changes to DB + self.sql_connection.commit() + + #------------------------------------------------------------------------------- + def get_sql_array(self, query): + if self.sql_connection == None : + mylog('debug','getQueryArray: databse is not open') + return + + self.sql.execute(query) + rows = self.sql.fetchall() + self.commitDB() + + # convert result into list of lists + arr = [] + for row in rows: + r_temp = [] + for column in row: + r_temp.append(column) + arr.append(r_temp) + + return arr + + + + + + + +#------------------------------------------------------------------------------- +def initOrSetParam(db, parID, parValue): + sql_connection = db.sql_connection + sql = db.sql + + sql.execute ("INSERT INTO Parameters(par_ID, par_Value) VALUES('"+str(parID)+"', '"+str(parValue)+"') ON CONFLICT(par_ID) DO UPDATE SET par_Value='"+str(parValue)+"' where par_ID='"+str(parID)+"'") + + db.commitDB() + +#------------------------------------------------------------------------------- +def updateState(db, newState): + + db.sql.execute ("UPDATE Parameters SET par_Value='"+ newState +"' WHERE par_ID='Back_App_State'") + + db.commitDB() + + + +#------------------------------------------------------------------------------- +def upgradeDB(db: DB()): + sql = db.sql #TO-DO + + # indicates, if Online_History table is available + onlineHistoryAvailable = db.sql.execute(""" + SELECT name FROM sqlite_master WHERE type='table' + AND name='Online_History'; + """).fetchall() != [] + + # Check if it is incompatible (Check if table has all required columns) + isIncompatible = False + + if onlineHistoryAvailable : + isIncompatible = sql.execute (""" + SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Online_History') WHERE name='Archived_Devices' + """).fetchone()[0] == 0 + + # Drop table if available, but incompatible + if onlineHistoryAvailable and isIncompatible: + mylog('none','[upgradeDB] Table is incompatible, Dropping the Online_History table') + sql.execute("DROP TABLE Online_History;") + onlineHistoryAvailable = False + + if onlineHistoryAvailable == False : + sql.execute(""" + CREATE TABLE "Online_History" ( + "Index" INTEGER, + "Scan_Date" TEXT, + "Online_Devices" INTEGER, + "Down_Devices" INTEGER, + "All_Devices" INTEGER, + "Archived_Devices" INTEGER, + PRIMARY KEY("Index" AUTOINCREMENT) + ); + """) + + # Alter Devices table + # dev_Network_Node_MAC_ADDR column + dev_Network_Node_MAC_ADDR_missing = sql.execute (""" + SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_MAC_ADDR' + """).fetchone()[0] == 0 + + if dev_Network_Node_MAC_ADDR_missing : + mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_MAC_ADDR to the Devices table"]) + sql.execute(""" + ALTER TABLE "Devices" ADD "dev_Network_Node_MAC_ADDR" TEXT + """) + + # dev_Network_Node_port column + dev_Network_Node_port_missing = sql.execute (""" + SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_port' + """).fetchone()[0] == 0 + + if dev_Network_Node_port_missing : + mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_port to the Devices table"]) + sql.execute(""" + ALTER TABLE "Devices" ADD "dev_Network_Node_port" INTEGER + """) + + # dev_Icon column + dev_Icon_missing = sql.execute (""" + SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Icon' + """).fetchone()[0] == 0 + + if dev_Icon_missing : + mylog('verbose', ["[upgradeDB] Adding dev_Icon to the Devices table"]) + sql.execute(""" + ALTER TABLE "Devices" ADD "dev_Icon" TEXT + """) + + # indicates, if Settings table is available + settingsMissing = sql.execute(""" + SELECT name FROM sqlite_master WHERE type='table' + AND name='Settings'; + """).fetchone() == None + + # Re-creating Settings table + mylog('verbose', ["[upgradeDB] Re-creating Settings table"]) + + if settingsMissing == False: + sql.execute("DROP TABLE Settings;") + + sql.execute(""" + CREATE TABLE "Settings" ( + "Code_Name" TEXT, + "Display_Name" TEXT, + "Description" TEXT, + "Type" TEXT, + "Options" TEXT, + "RegEx" TEXT, + "Value" TEXT, + "Group" TEXT, + "Events" TEXT + ); + """) + + # indicates, if Pholus_Scan table is available + pholusScanMissing = sql.execute(""" + SELECT name FROM sqlite_master WHERE type='table' + AND name='Pholus_Scan'; + """).fetchone() == None + + # if pholusScanMissing == False: + # # Re-creating Pholus_Scan table + # sql.execute("DROP TABLE Pholus_Scan;") + # pholusScanMissing = True + + if pholusScanMissing: + mylog('verbose', ["[upgradeDB] Re-creating Pholus_Scan table"]) + sql.execute(""" + CREATE TABLE "Pholus_Scan" ( + "Index" INTEGER, + "Info" TEXT, + "Time" TEXT, + "MAC" TEXT, + "IP_v4_or_v6" TEXT, + "Record_Type" TEXT, + "Value" TEXT, + "Extra" TEXT, + PRIMARY KEY("Index" AUTOINCREMENT) + ); + """) + + # indicates, if Nmap_Scan table is available + nmapScanMissing = sql.execute(""" + SELECT name FROM sqlite_master WHERE type='table' + AND name='Nmap_Scan'; + """).fetchone() == None + + # Re-creating Parameters table + mylog('verbose', ["[upgradeDB] Re-creating Parameters table"]) + sql.execute("DROP TABLE Parameters;") + + sql.execute(""" + CREATE TABLE "Parameters" ( + "par_ID" TEXT PRIMARY KEY, + "par_Value" TEXT + ); + """) + + # Initialize Parameters if unavailable + initOrSetParam(db, 'Back_App_State','Initializing') + + # if nmapScanMissing == False: + # # Re-creating Nmap_Scan table + # sql.execute("DROP TABLE Nmap_Scan;") + # nmapScanMissing = True + + if nmapScanMissing: + mylog('verbose', ["[upgradeDB] Re-creating Nmap_Scan table"]) + sql.execute(""" + CREATE TABLE "Nmap_Scan" ( + "Index" INTEGER, + "MAC" TEXT, + "Port" TEXT, + "Time" TEXT, + "State" TEXT, + "Service" TEXT, + "Extra" TEXT, + PRIMARY KEY("Index" AUTOINCREMENT) + ); + """) + + # Plugin state + sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects( + "Index" INTEGER, + Plugin TEXT NOT NULL, + Object_PrimaryID TEXT NOT NULL, + Object_SecondaryID TEXT NOT NULL, + DateTimeCreated TEXT NOT NULL, + DateTimeChanged TEXT NOT NULL, + Watched_Value1 TEXT NOT NULL, + Watched_Value2 TEXT NOT NULL, + Watched_Value3 TEXT NOT NULL, + Watched_Value4 TEXT NOT NULL, + Status TEXT NOT NULL, + Extra TEXT NOT NULL, + UserData TEXT NOT NULL, + ForeignKey TEXT NOT NULL, + PRIMARY KEY("Index" AUTOINCREMENT) + ); """ + sql.execute(sql_Plugins_Objects) + + # Plugin execution results + sql_Plugins_Events = """ CREATE TABLE IF NOT EXISTS Plugins_Events( + "Index" INTEGER, + Plugin TEXT NOT NULL, + Object_PrimaryID TEXT NOT NULL, + Object_SecondaryID TEXT NOT NULL, + DateTimeCreated TEXT NOT NULL, + DateTimeChanged TEXT NOT NULL, + Watched_Value1 TEXT NOT NULL, + Watched_Value2 TEXT NOT NULL, + Watched_Value3 TEXT NOT NULL, + Watched_Value4 TEXT NOT NULL, + Status TEXT NOT NULL, + Extra TEXT NOT NULL, + UserData TEXT NOT NULL, + ForeignKey TEXT NOT NULL, + PRIMARY KEY("Index" AUTOINCREMENT) + ); """ + sql.execute(sql_Plugins_Events) + + # Plugin execution history + sql_Plugins_History = """ CREATE TABLE IF NOT EXISTS Plugins_History( + "Index" INTEGER, + Plugin TEXT NOT NULL, + Object_PrimaryID TEXT NOT NULL, + Object_SecondaryID TEXT NOT NULL, + DateTimeCreated TEXT NOT NULL, + DateTimeChanged TEXT NOT NULL, + Watched_Value1 TEXT NOT NULL, + Watched_Value2 TEXT NOT NULL, + Watched_Value3 TEXT NOT NULL, + Watched_Value4 TEXT NOT NULL, + Status TEXT NOT NULL, + Extra TEXT NOT NULL, + UserData TEXT NOT NULL, + ForeignKey TEXT NOT NULL, + PRIMARY KEY("Index" AUTOINCREMENT) + ); """ + sql.execute(sql_Plugins_History) + + # Dynamically generated language strings + # indicates, if Language_Strings table is available + languageStringsMissing = sql.execute(""" + SELECT name FROM sqlite_master WHERE type='table' + AND name='Plugins_Language_Strings'; + """).fetchone() == None + + if languageStringsMissing == False: + sql.execute("DROP TABLE Plugins_Language_Strings;") + + sql.execute(""" CREATE TABLE IF NOT EXISTS Plugins_Language_Strings( + "Index" INTEGER, + Language_Code TEXT NOT NULL, + String_Key TEXT NOT NULL, + String_Value TEXT NOT NULL, + Extra TEXT NOT NULL, + PRIMARY KEY("Index" AUTOINCREMENT) + ); """) + + db.commitDB() + + diff --git a/pialert/helper.py b/pialert/helper.py index 3423be04..b92fdc57 100644 --- a/pialert/helper.py +++ b/pialert/helper.py @@ -29,7 +29,7 @@ def updateSubnets(SCAN_SUBNETS): #------------------------------------------------------------------------------- # check RW access of DB and config file def checkPermissionsOK(): - global confR_access, confW_access, dbR_access, dbW_access + #global confR_access, confW_access, dbR_access, dbW_access confR_access = (os.access(fullConfPath, os.R_OK)) confW_access = (os.access(fullConfPath, os.W_OK)) diff --git a/pialert/pialert.py b/pialert/pialert.py index d763833d..f6a80896 100755 --- a/pialert/pialert.py +++ b/pialert/pialert.py @@ -46,33 +46,7 @@ import multiprocessing from const import * from logger import mylog, print_log, logResult from helper import checkPermissionsOK, fixPermissions, timeNow, updateSubnets - - - -#=============================================================================== -# SQL queries -#=============================================================================== - -sql_devices_all = "select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_Group, dev_FirstConnection, dev_LastConnection, dev_LastIP, dev_StaticIP, dev_PresentLastScan, dev_LastNotification, dev_NewDevice, dev_Network_Node_MAC_ADDR, dev_Network_Node_port, dev_Icon from Devices" -sql_devices_stats = "SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1" -sql_nmap_scan_all = "SELECT * FROM Nmap_Scan" -sql_pholus_scan_all = "SELECT * FROM Pholus_Scan" -sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0" -sql_settings = "SELECT * FROM Settings" -sql_plugins_objects = "SELECT * FROM Plugins_Objects" -sql_language_strings = "SELECT * FROM Plugins_Language_Strings" -sql_plugins_events = "SELECT * FROM Plugins_Events" -sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY 'Index' DESC" -sql_new_devices = """SELECT * FROM ( SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC FROM Events_Devices - WHERE eve_PendingAlertEmail = 1 - AND eve_EventType = 'New Device' - ORDER BY eve_DateTime ) t1 - LEFT JOIN - ( - SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices - ) t2 - ON t1.dev_MAC = t2.dev_MAC_t2""" - +from database import * # Global variables @@ -118,6 +92,7 @@ def initialiseFile(pathToCheck, defaultFile): # check and initialize pialert.conf (confR_access, dbR_access) = checkPermissionsOK() # Initial check + if confR_access == False: initialiseFile(fullConfPath, "/home/pi/pialert/back/pialert.conf_bak" ) @@ -134,38 +109,7 @@ fixPermissions() # We need access to the DB to save new values so need to define DB access methods first #------------------------------------------------------------------------------- -def openDB (): - global sql_connection - global sql - # Check if DB is open - if sql_connection != None : - return - - # Log - print_log ('Opening DB') # TO-DO should be changed to mylog - - # Open DB and Cursor - sql_connection = sqlite3.connect (fullDbPath, isolation_level=None) - sql_connection.execute('pragma journal_mode=wal') # - sql_connection.text_factory = str - sql_connection.row_factory = sqlite3.Row - sql = sql_connection.cursor() - -#------------------------------------------------------------------------------- -def commitDB (): - global sql_connection - global sql - - # Check if DB is open - if sql_connection == None : - return - - # Log - # print_log ('Commiting DB changes') - - # Commit changes to DB - sql_connection.commit() #------------------------------------------------------------------------------- # Import user values @@ -189,7 +133,7 @@ def ccd(key, default, config, name, inputtype, options, group, events=[], desc = #------------------------------------------------------------------------------- -def importConfigs (): +def importConfigs (db): # Specify globals so they can be overwritten with the new config global lastTimeImported, mySettings, mySettingsSQLsafe, plugins, plugins_once_run @@ -359,7 +303,7 @@ def importConfigs (): # if plugin["enabled"] == 'true': # collect plugin level language strings - collect_lang_strings(plugin, pref) + collect_lang_strings(db, plugin, pref) for set in plugin["settings"]: setFunction = set["function"] @@ -377,7 +321,7 @@ def importConfigs (): mySchedules.append(schedule_class(pref, newSchedule, newSchedule.next(), False)) # Collect settings related language strings - collect_lang_strings(set, pref + "_" + set["function"]) + collect_lang_strings(db, set, pref + "_" + set["function"]) plugins_once_run = False # ----------------- @@ -398,7 +342,8 @@ def importConfigs (): # Is used to display a message in the UI when old (outdated) settings are loaded initOrSetParam("Back_Settings_Imported",(round(time.time() * 1000),) ) - commitDB() + #commitDB(sql_connection) + db.commitDB() # update only the settings datasource update_api(False, ["settings"]) @@ -431,16 +376,19 @@ def main (): # second set of global variables global startTime, log_timestamp, sql_connection, sql, plugins_once_run - # DB - sql_connection = None - sql = None - # Open DB once and keep open # Opening / closing DB frequently actually casues more issues - openDB() # main + db = DB() + print(db, db.sql, db.sql_connection ) + db.openDB() + print(db, db.sql, db.sql_connection ) + + # To-Do replace the following to lines with the db dict or class + sql_connection = db.sql_connection + sql = db.sql # Upgrade DB if needed - upgradeDB() + upgradeDB(db) while True: @@ -448,7 +396,7 @@ def main (): time_started = datetime.datetime.now() # re-load user configuration and plugins - importConfigs() + importConfigs(db) # Handle plugins executed ONCE if ENABLE_PLUGINS and plugins_once_run == False: @@ -468,7 +416,7 @@ def main (): last_run = time_started # Header - updateState("Process: Start") + updateState(db,"Process: Start") mylog('verbose', ['[', timeNow(), '] Process: Start']) # Timestamp @@ -565,7 +513,7 @@ def main (): # Check if new devices found sql.execute (sql_new_devices) newDevices = sql.fetchall() - commitDB() + db.commitDB() # new devices were found if len(newDevices) > 0: @@ -587,7 +535,7 @@ def main (): cleanup_database() # Commit SQL - commitDB() + db.commitDB() # Final message if cycle != "": @@ -598,7 +546,7 @@ def main (): cycle = "" # Footer - updateState("Process: Wait") + updateState(db,"Process: Wait") mylog('verbose', ['[', timeNow(), '] Process: Wait']) else: # do something @@ -614,7 +562,7 @@ def main (): def check_internet_IP (): # Header - updateState("Scan: Internet IP") + updateState(sql_connection,"Scan: Internet IP") mylog('verbose', ['[', startTime, '] Check Internet IP:']) # Get Internet IP @@ -720,7 +668,7 @@ def set_dynamic_DNS_IP (): return curl_output #------------------------------------------------------------------------------- -def get_previous_internet_IP (): +def get_previous_internet_IP (db): previous_IP = '0.0.0.0' @@ -728,7 +676,7 @@ def get_previous_internet_IP (): sql.execute ("SELECT dev_LastIP FROM Devices WHERE dev_MAC = 'Internet' ") result = sql.fetchone() - commitDB() + db.commitDB() if result is not None and len(result) > 0 : previous_IP = result[0] @@ -737,7 +685,7 @@ def get_previous_internet_IP (): return previous_IP #------------------------------------------------------------------------------- -def save_new_internet_IP (pNewIP): +def save_new_internet_IP (db, pNewIP): # Log new IP into logfile append_line_to_file (logPath + '/IP_changes.log', '['+str(startTime) +']\t'+ pNewIP +'\n') @@ -757,7 +705,7 @@ def save_new_internet_IP (pNewIP): (pNewIP,) ) # commit changes - commitDB() + db.commitDB() #------------------------------------------------------------------------------- def check_IP_format (pIP): @@ -777,9 +725,9 @@ def check_IP_format (pIP): #=============================================================================== # Cleanup / upkeep database #=============================================================================== -def cleanup_database (): +def cleanup_database (db): # Header - updateState("Upkeep: Clean DB") + updateState(sql_connection,"Upkeep: Clean DB") mylog('verbose', ['[', startTime, '] Upkeep Database:' ]) # Cleanup Online History @@ -825,14 +773,14 @@ def cleanup_database (): mylog('verbose', [' Shrink Database']) sql.execute ("VACUUM;") - commitDB() + db.commitDB() #=============================================================================== # UPDATE DEVICE MAC VENDORS #=============================================================================== -def update_devices_MAC_vendors (pArg = ''): +def update_devices_MAC_vendors (db, pArg = ''): # Header - updateState("Upkeep: Vendors") + updateState(sql_connection,"Upkeep: Vendors") mylog('verbose', ['[', startTime, '] Upkeep - Update HW Vendors:' ]) # Update vendors DB (iab oui) @@ -878,7 +826,7 @@ def update_devices_MAC_vendors (pArg = ''): recordsToUpdate ) # Commit DB - commitDB() + db.commitDB() if len(recordsToUpdate) > 0: return True @@ -920,11 +868,11 @@ def query_MAC_vendor (pMAC): #=============================================================================== # SCAN NETWORK #=============================================================================== -def scan_network (): +def scan_network (db): reporting = False # Header - updateState("Scan: Network") + updateState(sql_connection,"Scan: Network") mylog('verbose', ['[', startTime, '] Scan Devices:' ]) # Query ScanCycle properties @@ -935,7 +883,7 @@ def scan_network (): mylog('none', [' Exiting...\n']) return False - commitDB() + db.commitDB() # ScanCycle data cycle_interval = scanCycle_data['cic_EveryXmin'] @@ -951,13 +899,13 @@ def scan_network (): if PIHOLE_ACTIVE : mylog('verbose', [' Pi-hole start']) copy_pihole_network() - commitDB() + db.commitDB() # DHCP Leases method if DHCP_ACTIVE : mylog('verbose', [' DHCP Leases start']) read_DHCP_leases () - commitDB() + db.commitDB() # Load current scan data mylog('verbose', [' Processing scan results']) @@ -1007,7 +955,7 @@ def scan_network (): skip_repeated_notifications () # Commit changes - commitDB() + db.commitDB() # Run splugin scripts which are set to run every timne after a scan finished if ENABLE_PLUGINS: @@ -1536,7 +1484,7 @@ def update_devices_data_from_scan (): print_log ('Update devices end') #------------------------------------------------------------------------------- -def update_devices_names (): +def update_devices_names (db): # Initialize variables recordsToUpdate = [] recordsNotFound = [] @@ -1550,7 +1498,7 @@ def update_devices_names (): # BUGFIX #97 - Updating name of Devices w/o IP sql.execute ("SELECT * FROM Devices WHERE dev_Name IN ('(unknown)','', '(name not found)') AND dev_LastIP <> '-'") unknownDevices = sql.fetchall() - commitDB() + db.commitDB() # perform Pholus scan if (unknown) devices found if PHOLUS_ACTIVE and (len(unknownDevices) > 0 or PHOLUS_FORCE): @@ -1566,7 +1514,7 @@ def update_devices_names (): # get names from Pholus scan sql.execute ('SELECT * FROM Pholus_Scan where "Record_Type"="Answer"') pholusResults = list(sql.fetchall()) - commitDB() + db.commitDB() # Number of entries from previous Pholus scans mylog('verbose', [" Pholus entries from prev scans: ", len(pholusResults)]) @@ -1603,11 +1551,11 @@ def update_devices_names (): sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsNotFound ) # update names of devices which we were bale to resolve sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsToUpdate ) - commitDB() + db.commitDB() #------------------------------------------------------------------------------- -def performNmapScan(devicesToScan): +def performNmapScan(db, devicesToScan): global changedPorts_json_struc @@ -1619,7 +1567,7 @@ def performNmapScan(devicesToScan): devTotal = len(devicesToScan) - updateState("Scan: Nmap") + updateState(sql_connection,"Scan: Nmap") mylog('verbose', ['[', timeNow(), '] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device']) @@ -1777,11 +1725,11 @@ def performNmapScan(devicesToScan): # Delete old entries if available if len(indexesToDelete) > 0: sql.execute ("DELETE FROM Nmap_Scan where \"Index\" in (" + indexesToDelete[:-1] +")") - commitDB () + db.commitDB() # Insert new values into the DB sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params) - commitDB () + db.commitDB() #------------------------------------------------------------------------------- class nmap_entry: @@ -1797,7 +1745,7 @@ class nmap_entry: self.hash = str(mac) + str(port)+ str(state)+ str(service) #------------------------------------------------------------------------------- -def performPholusScan (timeoutSec): +def performPholusScan (db, timeoutSec): # scan every interface for subnet in userSubnets: @@ -1812,7 +1760,7 @@ def performPholusScan (timeoutSec): interface = temp[1].strip() # logging & updating app state - updateState("Scan: Pholus") + updateState(sql_connection,"Scan: Pholus") mylog('info', ['[', timeNow(), '] Scan: Pholus for ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min)']) mylog('verbose', [" Pholus scan on [interface] ", interface, " [mask] " , mask]) @@ -1858,7 +1806,7 @@ def performPholusScan (timeoutSec): if len(params) > 0: sql.executemany ("""INSERT INTO Pholus_Scan ("Info", "Time", "MAC", "IP_v4_or_v6", "Record_Type", "Value", "Extra") VALUES (?, ?, ?, ?, ?, ?, ?)""", params) - commitDB () + db.commitDB() #------------------------------------------------------------------------------- def cleanResult(str): @@ -1990,7 +1938,7 @@ def resolve_device_name_dig (pMAC, pIP): return -1 #------------------------------------------------------------------------------- -def void_ghost_disconnections (): +def void_ghost_disconnections (db): # Void connect ghost events (disconnect event exists in last X min.) print_log ('Void - 1 Connect ghost events') sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null, @@ -2049,10 +1997,10 @@ def void_ghost_disconnections (): ) """, (cycle, startTime) ) print_log ('Void end') - commitDB() + db.commitDB() #------------------------------------------------------------------------------- -def pair_sessions_events (): +def pair_sessions_events (db): # NOT NECESSARY FOR INCREMENTAL UPDATE # print_log ('Pair session - 1 Clean') # sql.execute ("""UPDATE Events @@ -2088,10 +2036,10 @@ def pair_sessions_events (): """ ) print_log ('Pair session end') - commitDB() + db.commitDB() #------------------------------------------------------------------------------- -def create_sessions_snapshot (): +def create_sessions_snapshot (db): # Clean sessions snapshot print_log ('Sessions Snapshot - 1 Clean') @@ -2103,12 +2051,12 @@ def create_sessions_snapshot (): SELECT * FROM Convert_Events_to_Sessions""" ) print_log ('Sessions end') - commitDB() + db.commitDB() #------------------------------------------------------------------------------- -def skip_repeated_notifications (): +def skip_repeated_notifications (db): # Skip repeated notifications # due strfime : Overflow --> use "strftime / 60" @@ -2126,7 +2074,7 @@ def skip_repeated_notifications (): """ ) print_log ('Skip Repeated end') - commitDB() + db.commitDB() #=============================================================================== @@ -2135,7 +2083,7 @@ def skip_repeated_notifications (): # create a json for webhook and mqtt notifications to provide further integration options json_final = [] -def send_notifications (): +def send_notifications (db): global mail_text, mail_html, json_final, changedPorts_json_struc, partial_html, partial_txt, partial_json deviceUrl = REPORT_DASHBOARD_URL + '/deviceDetails.php?mac=' @@ -2303,38 +2251,38 @@ def send_notifications (): mylog('none', [' Changes detected, sending reports']) if REPORT_MAIL and check_config('email'): - updateState("Send: Email") + updateState(sql_connection,"Send: Email") mylog('info', [' Sending report by Email']) send_email (mail_text, mail_html) else : mylog('verbose', [' Skip email']) if REPORT_APPRISE and check_config('apprise'): - updateState("Send: Apprise") + updateState(sql_connection,"Send: Apprise") mylog('info', [' Sending report by Apprise']) send_apprise (mail_html, mail_text) else : mylog('verbose', [' Skip Apprise']) if REPORT_WEBHOOK and check_config('webhook'): - updateState("Send: Webhook") + updateState(sql_connection,"Send: Webhook") mylog('info', [' Sending report by Webhook']) send_webhook (json_final, mail_text) else : mylog('verbose', [' Skip webhook']) if REPORT_NTFY and check_config('ntfy'): - updateState("Send: NTFY") + updateState(sql_connection,"Send: NTFY") mylog('info', [' Sending report by NTFY']) send_ntfy (mail_text) else : mylog('verbose', [' Skip NTFY']) if REPORT_PUSHSAFER and check_config('pushsafer'): - updateState("Send: PUSHSAFER") + updateState(sql_connection,"Send: PUSHSAFER") mylog('info', [' Sending report by PUSHSAFER']) send_pushsafer (mail_text) else : mylog('verbose', [' Skip PUSHSAFER']) # Update MQTT entities if REPORT_MQTT and check_config('mqtt'): - updateState("Send: MQTT") + updateState(sql_connection,"Send: MQTT") mylog('info', [' Establishing MQTT thread']) mqtt_start() else : @@ -2359,7 +2307,7 @@ def send_notifications (): mylog('info', ['[', timeNow(), '] Notifications: ', sql.rowcount]) # Commit changes - commitDB() + db.commitDB() #------------------------------------------------------------------------------- def construct_notifications(sqlQuery, tableTitle, skipText = False, suppliedJsonStruct = None): @@ -2904,266 +2852,6 @@ def mqtt_start(): # time.sleep(10) -#=============================================================================== -# DB -#=============================================================================== -#------------------------------------------------------------------------------- -def upgradeDB (): - - # indicates, if Online_History table is available - onlineHistoryAvailable = sql.execute(""" - SELECT name FROM sqlite_master WHERE type='table' - AND name='Online_History'; - """).fetchall() != [] - - # Check if it is incompatible (Check if table has all required columns) - isIncompatible = False - - if onlineHistoryAvailable : - isIncompatible = sql.execute (""" - SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Online_History') WHERE name='Archived_Devices' - """).fetchone()[0] == 0 - - # Drop table if available, but incompatible - if onlineHistoryAvailable and isIncompatible: - mylog('none','[upgradeDB] Table is incompatible, Dropping the Online_History table') - sql.execute("DROP TABLE Online_History;") - onlineHistoryAvailable = False - - if onlineHistoryAvailable == False : - sql.execute(""" - CREATE TABLE "Online_History" ( - "Index" INTEGER, - "Scan_Date" TEXT, - "Online_Devices" INTEGER, - "Down_Devices" INTEGER, - "All_Devices" INTEGER, - "Archived_Devices" INTEGER, - PRIMARY KEY("Index" AUTOINCREMENT) - ); - """) - - # Alter Devices table - # dev_Network_Node_MAC_ADDR column - dev_Network_Node_MAC_ADDR_missing = sql.execute (""" - SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_MAC_ADDR' - """).fetchone()[0] == 0 - - if dev_Network_Node_MAC_ADDR_missing : - mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_MAC_ADDR to the Devices table"]) - sql.execute(""" - ALTER TABLE "Devices" ADD "dev_Network_Node_MAC_ADDR" TEXT - """) - - # dev_Network_Node_port column - dev_Network_Node_port_missing = sql.execute (""" - SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_port' - """).fetchone()[0] == 0 - - if dev_Network_Node_port_missing : - mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_port to the Devices table"]) - sql.execute(""" - ALTER TABLE "Devices" ADD "dev_Network_Node_port" INTEGER - """) - - # dev_Icon column - dev_Icon_missing = sql.execute (""" - SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Icon' - """).fetchone()[0] == 0 - - if dev_Icon_missing : - mylog('verbose', ["[upgradeDB] Adding dev_Icon to the Devices table"]) - sql.execute(""" - ALTER TABLE "Devices" ADD "dev_Icon" TEXT - """) - - # indicates, if Settings table is available - settingsMissing = sql.execute(""" - SELECT name FROM sqlite_master WHERE type='table' - AND name='Settings'; - """).fetchone() == None - - # Re-creating Settings table - mylog('verbose', ["[upgradeDB] Re-creating Settings table"]) - - if settingsMissing == False: - sql.execute("DROP TABLE Settings;") - - sql.execute(""" - CREATE TABLE "Settings" ( - "Code_Name" TEXT, - "Display_Name" TEXT, - "Description" TEXT, - "Type" TEXT, - "Options" TEXT, - "RegEx" TEXT, - "Value" TEXT, - "Group" TEXT, - "Events" TEXT - ); - """) - - # indicates, if Pholus_Scan table is available - pholusScanMissing = sql.execute(""" - SELECT name FROM sqlite_master WHERE type='table' - AND name='Pholus_Scan'; - """).fetchone() == None - - # if pholusScanMissing == False: - # # Re-creating Pholus_Scan table - # sql.execute("DROP TABLE Pholus_Scan;") - # pholusScanMissing = True - - if pholusScanMissing: - mylog('verbose', ["[upgradeDB] Re-creating Pholus_Scan table"]) - sql.execute(""" - CREATE TABLE "Pholus_Scan" ( - "Index" INTEGER, - "Info" TEXT, - "Time" TEXT, - "MAC" TEXT, - "IP_v4_or_v6" TEXT, - "Record_Type" TEXT, - "Value" TEXT, - "Extra" TEXT, - PRIMARY KEY("Index" AUTOINCREMENT) - ); - """) - - # indicates, if Nmap_Scan table is available - nmapScanMissing = sql.execute(""" - SELECT name FROM sqlite_master WHERE type='table' - AND name='Nmap_Scan'; - """).fetchone() == None - - # Re-creating Parameters table - mylog('verbose', ["[upgradeDB] Re-creating Parameters table"]) - sql.execute("DROP TABLE Parameters;") - - sql.execute(""" - CREATE TABLE "Parameters" ( - "par_ID" TEXT PRIMARY KEY, - "par_Value" TEXT - ); - """) - - # Initialize Parameters if unavailable - initOrSetParam('Back_App_State','Initializing') - - # if nmapScanMissing == False: - # # Re-creating Nmap_Scan table - # sql.execute("DROP TABLE Nmap_Scan;") - # nmapScanMissing = True - - if nmapScanMissing: - mylog('verbose', ["[upgradeDB] Re-creating Nmap_Scan table"]) - sql.execute(""" - CREATE TABLE "Nmap_Scan" ( - "Index" INTEGER, - "MAC" TEXT, - "Port" TEXT, - "Time" TEXT, - "State" TEXT, - "Service" TEXT, - "Extra" TEXT, - PRIMARY KEY("Index" AUTOINCREMENT) - ); - """) - - # Plugin state - sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects( - "Index" INTEGER, - Plugin TEXT NOT NULL, - Object_PrimaryID TEXT NOT NULL, - Object_SecondaryID TEXT NOT NULL, - DateTimeCreated TEXT NOT NULL, - DateTimeChanged TEXT NOT NULL, - Watched_Value1 TEXT NOT NULL, - Watched_Value2 TEXT NOT NULL, - Watched_Value3 TEXT NOT NULL, - Watched_Value4 TEXT NOT NULL, - Status TEXT NOT NULL, - Extra TEXT NOT NULL, - UserData TEXT NOT NULL, - ForeignKey TEXT NOT NULL, - PRIMARY KEY("Index" AUTOINCREMENT) - ); """ - sql.execute(sql_Plugins_Objects) - - # Plugin execution results - sql_Plugins_Events = """ CREATE TABLE IF NOT EXISTS Plugins_Events( - "Index" INTEGER, - Plugin TEXT NOT NULL, - Object_PrimaryID TEXT NOT NULL, - Object_SecondaryID TEXT NOT NULL, - DateTimeCreated TEXT NOT NULL, - DateTimeChanged TEXT NOT NULL, - Watched_Value1 TEXT NOT NULL, - Watched_Value2 TEXT NOT NULL, - Watched_Value3 TEXT NOT NULL, - Watched_Value4 TEXT NOT NULL, - Status TEXT NOT NULL, - Extra TEXT NOT NULL, - UserData TEXT NOT NULL, - ForeignKey TEXT NOT NULL, - PRIMARY KEY("Index" AUTOINCREMENT) - ); """ - sql.execute(sql_Plugins_Events) - - # Plugin execution history - sql_Plugins_History = """ CREATE TABLE IF NOT EXISTS Plugins_History( - "Index" INTEGER, - Plugin TEXT NOT NULL, - Object_PrimaryID TEXT NOT NULL, - Object_SecondaryID TEXT NOT NULL, - DateTimeCreated TEXT NOT NULL, - DateTimeChanged TEXT NOT NULL, - Watched_Value1 TEXT NOT NULL, - Watched_Value2 TEXT NOT NULL, - Watched_Value3 TEXT NOT NULL, - Watched_Value4 TEXT NOT NULL, - Status TEXT NOT NULL, - Extra TEXT NOT NULL, - UserData TEXT NOT NULL, - ForeignKey TEXT NOT NULL, - PRIMARY KEY("Index" AUTOINCREMENT) - ); """ - sql.execute(sql_Plugins_History) - - # Dynamically generated language strings - # indicates, if Language_Strings table is available - languageStringsMissing = sql.execute(""" - SELECT name FROM sqlite_master WHERE type='table' - AND name='Plugins_Language_Strings'; - """).fetchone() == None - - if languageStringsMissing == False: - sql.execute("DROP TABLE Plugins_Language_Strings;") - - sql.execute(""" CREATE TABLE IF NOT EXISTS Plugins_Language_Strings( - "Index" INTEGER, - Language_Code TEXT NOT NULL, - String_Key TEXT NOT NULL, - String_Value TEXT NOT NULL, - Extra TEXT NOT NULL, - PRIMARY KEY("Index" AUTOINCREMENT) - ); """) - - commitDB () - -#------------------------------------------------------------------------------- -def initOrSetParam(parID, parValue): - - sql.execute ("INSERT INTO Parameters(par_ID, par_Value) VALUES('"+str(parID)+"', '"+str(parValue)+"') ON CONFLICT(par_ID) DO UPDATE SET par_Value='"+str(parValue)+"' where par_ID='"+str(parID)+"'") - - commitDB () - -#------------------------------------------------------------------------------- -def updateState(newState): - - sql.execute ("UPDATE Parameters SET par_Value='"+ newState +"' WHERE par_ID='Back_App_State'") - - commitDB () #=============================================================================== @@ -3442,43 +3130,27 @@ def to_text(_json): return payloadData #------------------------------------------------------------------------------- -def get_device_stats(): +def get_device_stats(db): # columns = ["online","down","all","archived","new","unknown"] sql.execute(sql_devices_stats) row = sql.fetchone() - commitDB() + db.commitDB() return row #------------------------------------------------------------------------------- -def get_all_devices(): +def get_all_devices(db): sql.execute(sql_devices_all) row = sql.fetchall() - commitDB() + db.commitDB() return row #------------------------------------------------------------------------------- -def get_sql_array(query): - sql.execute(query) - - rows = sql.fetchall() - - commitDB() - - # convert result into list of lists - arr = [] - for row in rows: - r_temp = [] - for column in row: - r_temp.append(column) - arr.append(r_temp) - - return arr #------------------------------------------------------------------------------- @@ -3499,7 +3171,7 @@ def hide_email(email): return email #------------------------------------------------------------------------------- -def check_and_run_event(): +def check_and_run_event(db): sql.execute(""" select * from Parameters where par_ID = "Front_Event" """) rows = sql.fetchall() @@ -3519,7 +3191,7 @@ def check_and_run_event(): sql.execute ("UPDATE Parameters SET par_Value='finished' WHERE par_ID='Front_Event'") # commit to DB - commitDB () + db.commitDB() #------------------------------------------------------------------------------- def handle_run(runType): @@ -3592,7 +3264,7 @@ def get_setting(key): return result #------------------------------------------------------------------------------- -def isNewVersion(): +def isNewVersion(db): global newVersionAvailable if newVersionAvailable == False: @@ -3621,7 +3293,7 @@ def isNewVersion(): if realeaseTimestamp > buildTimestamp + 600: mylog('none', [" New version of the container available!"]) newVersionAvailable = True - initOrSetParam('Back_New_Version_Available', str(newVersionAvailable)) + updateState(db, 'Back_New_Version_Available', str(newVersionAvailable)) return newVersionAvailable @@ -3645,19 +3317,19 @@ def get_plugins_configs(): return pluginsList #------------------------------------------------------------------------------- -def collect_lang_strings(json, pref): +def collect_lang_strings(db, json, pref): for prop in json["localized"]: for language_string in json[prop]: - import_language_string(language_string["language_code"], pref + "_" + prop, language_string["string"]) + import_language_string(db, language_string["language_code"], pref + "_" + prop, language_string["string"]) #------------------------------------------------------------------------------- -def import_language_string(code, key, value, extra = ""): +def import_language_string(db, code, key, value, extra = ""): sql.execute ("""INSERT INTO Plugins_Language_Strings ("Language_Code", "String_Key", "String_Value", "Extra") VALUES (?, ?, ?, ?)""", (str(code), str(key), str(value), str(extra))) - commitDB () + db.commitDB() #------------------------------------------------------------------------------- @@ -3670,7 +3342,7 @@ def run_plugin_scripts(runType): global plugins, tz, mySchedules # Header - updateState("Run: Plugins") + updateState(sql_connection,"Run: Plugins") mylog('debug', [' [Plugins] Check if any plugins need to be executed on run type: ', runType]) @@ -3703,7 +3375,7 @@ def run_plugin_scripts(runType): #------------------------------------------------------------------------------- # Executes the plugin command specified in the setting with the function specified as CMD -def execute_plugin(plugin): +def execute_plugin(db, plugin): # ------- necessary settings check -------- set = get_plugin_setting(plugin, "CMD") @@ -3740,7 +3412,7 @@ def execute_plugin(plugin): # Get Sql result if param["type"] == "sql": - resolved = flatten_array(get_sql_array(param["value"])) + resolved = flatten_array(db.get_sql_array(param["value"])) if resolved == None: mylog('none', [' [Plugins] The parameter "name":"', param["name"], '" was resolved as None']) @@ -3802,7 +3474,7 @@ def execute_plugin(plugin): mylog('verbose', [' [Plugins] Executing: ', q]) # set_CMD should contain a SQL query - arr = get_sql_array (q) + arr = db.get_sql_array (q) for row in arr: # There has to be always 9 columns @@ -3822,9 +3494,9 @@ def execute_plugin(plugin): # process results if any if len(sqlParams) > 0: sql.executemany ("""INSERT INTO Plugins_Events ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", "Watched_Value4", "Status" ,"Extra", "UserData", "ForeignKey") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", sqlParams) - commitDB () + db.commitDB() sql.executemany ("""INSERT INTO Plugins_History ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", "Watched_Value4", "Status" ,"Extra", "UserData", "ForeignKey") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", sqlParams) - commitDB () + db.commitDB() process_plugin_events(plugin) @@ -3842,7 +3514,7 @@ def handle_empty(value): #------------------------------------------------------------------------------- # Check if watched values changed for the given plugin -def process_plugin_events(plugin): +def process_plugin_events(db, plugin): global pluginObjects, pluginEvents @@ -3850,8 +3522,8 @@ def process_plugin_events(plugin): mylog('debug', [' [Plugins] Processing : ', pluginPref]) - plugObjectsArr = get_sql_array ("SELECT * FROM Plugins_Objects where Plugin = '" + str(pluginPref)+"'") - plugEventsArr = get_sql_array ("SELECT * FROM Plugins_Events where Plugin = '" + str(pluginPref)+"'") + plugObjectsArr = db.get_sql_array ("SELECT * FROM Plugins_Objects where Plugin = '" + str(pluginPref)+"'") + plugEventsArr = db.get_sql_array ("SELECT * FROM Plugins_Events where Plugin = '" + str(pluginPref)+"'") pluginObjects = [] pluginEvents = [] @@ -4004,7 +3676,7 @@ def process_plugin_events(plugin): sql.executemany (q, sqlParams) - commitDB() + db.commitDB() #------------------------------------------------------------------------------- class plugin_object_class: From 884aca149a4d959b6cd9791c5daaebc28d24793a Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Sun, 21 May 2023 17:40:05 +1000 Subject: [PATCH 03/20] more splitting done --- .gitignore | 3 +- db/pialert.db | Bin 180224 -> 245760 bytes pialert/config.py | 22 +++++ pialert/database.py | 61 +++++++++++- pialert/helper.py | 41 +++++++- pialert/logger.py | 1 + pialert/pialert.py | 225 ++++++++++++++------------------------------ 7 files changed, 194 insertions(+), 159 deletions(-) create mode 100644 pialert/config.py diff --git a/.gitignore b/.gitignore index f908738e..a785a3b4 100755 --- a/.gitignore +++ b/.gitignore @@ -9,4 +9,5 @@ front/plugins/**/*.log __pycache__/ *.py[cod] -*$py.class \ No newline at end of file +*$py.class +db/pialert.db \ No newline at end of file diff --git a/db/pialert.db b/db/pialert.db index ff4e27315457b56f048e1ce792b7f6e37591bc93..4b5bd1db4b2a54485a683c8efda866c6452e39e6 100755 GIT binary patch literal 245760 zcmeI53vgRkdYA!%;sX>2)3VH!Wtx|yX%ZF*0t6|MkSH$%LJ}+rga}ZwOmnf9z$FO_ z#D#GI(zLDJfo$(?H!ob!Io`mYQO z2oM1xKm>>Y5g-CYfCvx)B0vO)01>!91fCiPIgdXMe;)IAP~>CyhdxAr2oM1xKm>>Y z5g-CYfCvx)B0vO)z@th4=l>nOC5QJb-oNWjd%Jpn3NGkF1c(3;AOb{y2oM1xKm>>Y z5g-CY;Oivt;<*mTg43y{vT9M8omF=7MMc}y%KOUc0A8GXB4)XoP}Fj1(m=nHlVz<^ z-d~swDC0bw@5N^4;&IOd(sb)H6xC?@GhFPFqFS_ztg1Y#iHMW;oMRV6N``*1n)%J@t`F|@4j zDMyE6sHB0vO)01+SpM1Tko0U|&IhyW3I z=maL(I=ZJu9fKQl;kjUVelDzTgl7ZcP%t<@?GK)w^H2NF%oX&kTF6|DM4xqbbPxC( z1My--D;Kp&a2h-X&iJQ8Gyb`m*Q#K1Sxltdkg7KC?>fA{>-`@OJ%OYE5g-CYfCvx) zB0vO)01+SpM1Tko0U~hE1fFc~o^m!B3)+shcMo(+s{ozjV0N?+A)8YLt@6C^#@}w{kAOb{y2oM1xKm>>Y5g-CYfCvzQhfUx@ zTi4WRXvop!Xj*}{Zr7g>Y5g-CYfCvx)B0vO)01+SpA3g$6C%g?{DCBT-AAZE3ex9J=yZ|0H zVCVmP|AE8%8vLOT5g-CYfCvx)B0vO)01+SpM1Tkofrn0@w%GR6RAkVhb~zlO&}dh8 zcXwA;q`MnV*gKjhz{zU4RM1kDvZn6%-PWt|YVlT4-z)mk+ge#u_EbYri;7k*>*Wb0 zyRBt!DFuDYP&W0llG7?`zF@SNH@CQT(6Rp0;ozBYXx=|PGdFvBx^~q0)WAUh2}jo; zB*TZv=!Rr;v1G^-`ttiS9NX7c^eVi;FH+c3_YFlaLd?O*0RB?~fiU~m;?|)L3YZPU zy8r{B)9{bK|L@BV?;m=<{Lr(Q6d(dbfCvx)B0vO)01+SpM1Tko0U|&I-UopmSN%k04UwVJQ`#JBc-X*W!`&jQ^^!|G9Kkfa_-Zy$*={?`8^tO4v z;`w>c4}w?v5CI}U1c(3;AOb{y2oM1xKm>@u*G1szv&-3KU0Oa9o`$vK(`{`} zKhfHCIvfgt>#>%uu)Vh zjG?_&)r#51%iicpERv2X>Bxnpn9|^Pyr}MI6A;pOCv>#4-{*6_GsI-tO-S}Dk4@sR z-EmkMcPrvE953dy*N3~~iFE8zY-OVEfVQg4qFT|0yVJ3k(iRsXZBg6JLsD9WU4#dF zWDaqU6bgqrm$TdXT`gz&QJU0CuEeiKR&FX+VmHS{8kERtdO4nelwXY{(ms(A-{(K( z?(9E#()sP3jGE9&eD&BkiJQuJPTS2~jYO4HdL^E?q>Rr7e2OJTC9#}V601u~N_2T? z33D#79F3%66Y!*x-An@V%NEk~y%@U~SzSsi!{)sm1f&rOl)PlNv4;i9^$m!Tvo7nYZoVv&Rry&Q{P;R!a~;t6FuFrfr} zbv4F8C7BE^=~7v*mdqG}GJ>eSv!fL&hQi7W<8d)xHY!oQSk$tWyk1lm0a!YI6`^s8 zT;P&=x4z;@GhWOXDW+v+q$+A9pM?-I!4j-BMOF%CfI0=PAJ&KHezu^=!f58vc(AU7 zNQ=G|+hrJJw5F0~{1C_jf5{Bd7;mJYl`Hos%^IeqJYIOPnwnsmH3YU64Vg?!xJJ3D zTlvz8R??uw$TcAfn7LHX?1Wy)Z|1X*KbAtlos(tF0DM>^5c^&0*ch%+M|cJ=Pr)G) z?fFs<(I{(47phxpS<5mQ5zW~lkv5=~0jzNj=df+jDtmhQRwkk6G;ED>}QDjc+E%kr_wtY;||`4-7cp z<^4}eGYrT&Lw>t$ZK=Ggj2oKK)R;E5o9L&ZTDS7TssT26mgYp_$5yjd80^B*`5kKz z*#b={7_TXGF>e4H*-YK7Tze?H=u{ulMLcF-Cn=Kl))xKJ` zllAm(J6WHV%wN_En#_FrRhNyr-Ax%!UVabsExWY|1;V{Yc7Tt@j*bOu+4X>DtjpJ` zT6y1W|9xk>I{KG=2NgeiQZhmNb@!c5cXaljIN^Mk53qOx$^UlRknnrTxVFn`j0yVC zt^++c05IFf#zKaEWg#;YXKC-yJ%+tOV_$9hX)3oEcA?MpJM${ZX8TDT- z8_)WEz?six;)}BDI-r$8PqzG>whRX$A>a(mN+?QgVAQt`n%1RKmWuq{W&%HijKo}pc|F`%4Ux)YSyfyDR z_(2~cKm>>Y5g-CYfCvx)B0vO)01+Spj~sz>?Ol$kfza{eo{hQiTrfO87gjgIvw?6Z zI2#BArYhNz^=SEk)(w5LGMO_jv~eLL z%|e2q`9p*hwaP|bHzp0EIPU}*Cql=MHOrWuK176JY zzJgqL;=j0Twu)hzD-ZU)yzwfmBxaK3{Ek}Q2O|+KHQs%=lm#sXKj z>=8DmMqZms>GFpBHpOkjQIWeAryMBnhFXCaX5=#0)k0MZHfVQup{CzgsL=ZgHFLM2 za4ELRSCZ@5D*Xx^g$Ft#zxfov?XzU`e9ddUwq$)mGnUhaO|y>o@P) zYAf%s%<3vGkF~6?!jUTMgRZa#*8gv?-t|BZk4TStXT#gsKRD=omk)u>W4Y#!K3lyU zHt)l(N!eMLjP7ErI}D4y!`(8Z!UhcWvF&gYcjz4Kj2l;)oTae38rb$W`?I)fW!$SN zCGov^Tuj{)%iv)t9KM-1)BPTXFg}OF`Q!I%;zDvB7=b%`y=)J}yR*Fi+SUtIgY9Za z)_;2K^~2)werpKsf09KLi=VyR4Y#ZVX0d#a`V~)Rc4snrIkS5jqi7Jl0&~G~E`u9$ zTDY?L)WH>E_&!z`I{*K`3OPkY1c(3;AOb{y2oM1xKm>>Y5g-CY;Gq$q`Ts*hPZ1LV zB0vO)01+SpM1Tko0U|&IhyW3IU<7FX|G-dFL_~lH5CI}U1c(3;AOb{y2oM1xKm;Bd z0eom;oA)m<{?p#C@gEOO8H$()5CI}U1c(3;AOb{y2oM1xKm>>Y5xBnuW?u$*GqWD= z@5B86+uFU~=KV+aHv*DI1c(3;AOb{y2oM1xKm>>Y5g-CYfC#iE0F!a=GftOp?1GxT zm5G!}8TNq9WLbyzof~20hMKSV+FbLK%sr)5D)8z}c!BK>zHe8{jR#N9&77VM&CCX7 zWxRE=-d}Tg{}levhX@b>B0vO)01+SpM1Tko0U|&IhyW4zs1SI{ zHScU~A86}y`MT|<1>FDN?)_s-+gaTI@BMRr@li=E=}82L01+SpM1Tko0U|&IhyW2F z0z`laG!Y2jhFkL+@MrD<{5kWCr)R+Nq~jMIz5mwzx7}LTr@Q()f28x}j=$*m&W=+@ ze&tBE{cG(%(0sp=gQHaZ;AzPfZTFR zhY###X}q)l>`7;>KVQsgug4c(%H*`&%*A}!s6_Q*QOj2HdT~+1_mLVC;$4#Q#A56v zU0RT<-{mN7Q?ZHlqM-S_5mo&AvkXKmC> zqoElY^G)GqGE=Z!1X61}-xOSD7#qFRA>XAu|?426JxOWR>o~|e5n&1{Uakzou$AG z98X$;4784b7g~!yAsWl<$2$8*L3v)5OsSAt7Vl5D7Atbu;!((Lk=L93R!CS9v(++t zA#-#;ThOA_av8cf?A}h=B5U%AkSxBHahn|9+u0{O`xh3RpSj6j{9LGR<%KJT4(6L(n?}= zX{r8p^~OLv8JR2E{ym0ZZ-TztFzmI^4W1h{xtr?)aoxk4*sbySLdQG$FAX^ztST6< z74j7=qgE@tYCua^R(G_DRyH!h`tOg`98W;|Jmjoh;YGvrRrk|x%l_fI1H2I2OvG?W zEob73!yJqy3H9t^UieG5yYWTcA}s8mb^YiEHe4maM1Tm~Ujlcgj&=4A3^?BjvNl6* zkeA<2+S?0x8(It5T(O!zj7@tc0gcjdH#%UGz#EHU7!a2V>b`KaI%r0=oG;-NbC|Uu z6Lyg}LK>F0Y$BIgAVWG@WUgpivDf8&TNu$+NqLqqm&$sz#MMSuiwhq#)lf`(W z|H|S0Ywus(-@G7cM1Tko0U|&IhyW2F0z`la5CI}U1c<;xCeZHccDA`(-Q76>Y5g-CYfCvx) zB0vO)z=whW&Hq0X*+5DV0U|&IhyW2F0z`la5CI}U1c(3;_&^Ep^Z#FTc)#lX;s+WL z2`2(XfCvx)B0vO)01+SpM1Tko0U|&I?jM1ESI9XXm<~+_W+$hEN^mwjGZzko+MaZU zx*P6?!ZWjc{{P!J|Nrg#rvynN0z`la5CI}U1c(3;AOb{y2oM1xKm;BPfnL{;GokH? zH`uZHf4m59St)821>P_e-G+DUXgNOr|FXmThu$wg7`-SKB0vO)01+SpM1Tko0U|&I zhyW2F0z}~c66kRabjwNqao3b{V=g=w4A0Mn)s66MARG#|@%cZ!|L^{CCuu~02oM1x zKm>>Y5g-CYfCvx)B0vN_6a?B`4t)QggWmu5p~wzWf(Q@+B0vO)01+SpM1Tko0U|&I zh`>Y5g-CY;NcRW`TxV^Pq7mLB0vO) z01+SpM1Tko0U|&IhyW3I5Cm}k@AUq)1OCy62oM1xKm>>Y5g-CYfCvx)B0vO)01Di7B0vO)01+SpM1Tko0U|&I zhyW2F0v{~`Pq?PKH|D}~!SMWCSltNE2Ew5rU;qEA!~4~bR$@q7B0vO)01+SpM1Tko z0U|&IhyW2F0z{w%fhS#|?&-jEXfiN6IUQ7jv*DS!a3F;9|2FSm!Sny%4}FLL5g-CY zfCvx)B0vO)01+SpM1Tkofk%lzyQ|yT=5lp+^ZEZ5Vg29x#YZWDloldD1c(3;AOb{y z2oM1xKm>>Y5g-DOG=Y9s$k}3@zfG+F2PXsgPYDFV?4P;*|210wf27L=C5{LX0U|&I zhyW2F0z`la5CI}U1ildo;L?A)tIOGD&i`ru|2JYepfnQ!B0vO)01+SpM1Tko0U|&I zh`^&wfX)3~t}e0vpU(e3+SyI1BLYN#2oM1xKm>>Y5g-CYfCvzQZ%_ga=l^N`{|#CW zDA`1S2oM1xKm>>Y5g-CYfCvx)BJgMvXjuRMlEeF_-Y-4cNv6~h0U|&IhyW2F0z`la z5CI}U1c(3;cti<2<{EVd=coO_({ui5|CzbAK9{dMI1M)fXZ+Kl8UNf2o&SGC^PZAL z1c(3;AOb{y2oM1xKm>>Y5g-CYz~K=4|LOcc6$T>Y5g-DO zFadM_zXuXI>$vUk{&TP9b@cw5-f!-G(en$Q$BzE;(a#@^_WVK5XL>r^zu^8R_bcw_ zy1(50_q$*14tD+TuAl9?-F2k%A9mj8e4^uDbhwZF)R8+!F17#f_FroMo9&m{yIjBS z`YzX&>ulTqYx~hQt!>iz`_3PBrX7Fn_-7DR{d0EKJreYs^mzJD^etYFCNq&}I(|JC zE2O-bf2>+ppR8#??_ix<^`q4C>m>mQ$SkDU>Llkv;TOR<*0chpiTU)*XX;d;n@ zYRYrmSLbdS$y(DH-9ZDYcaM=k|+L ze^%d_D(16O+}jI0(AxU-Y4_ME&p=&nGLlN&SYBBaQcJ2~?CIs4{Eh_p=Ysf-itk1F z&6HVN4+Y(WBdG77)HfAdk(wG>8FfUxGF7;c%DfnGkFZ28h(L?WS0nL+2$s`#)O=AU za9^*MGyIl+v#e>kT+#6BX$5uP&vZHa#|>jD_&e4E~uNTc1Y1YwJss?t!x& z;45WaTTU@4*Q#nEU)fjmO(mt3ck@}TL1X|01@X`H*m{6V3s1Pm#xSb}gl00Um`IsH zw(meW$HP8<$~`!U*>PNiU4`sm;gCOO#?EV%3imtnynApG$w|M-<&`w^yHVC}X=SCP zmu2FEb8{gcYS!l-VHrLmVz{ufd_}~;!(l9PhfmNi5KWI`FsxR``&MEYQwHUUYK1?o znZ%=77;~RGg^4*a*q2;QT~0+V##SzAH$PqOs& z4-SYZO;=f+rBrL=rl+;Bie^;C*2AOq3`wLf-pt4h!3)Kf5wZqBhM?ox`t#4$GlYf3 z3}NA9hTu2%dv(M;G=%BBalDWDO)n?oQRca#m-1OmWmzi~)U1}-+0PhSwp!LQ%&DmE zXqoxo_T1e3=JXsFH$Uti97A!Zgt*JgsWg+ets50tzhnbatZeT4jcp!uZph3ni4GkE zHT-D>#=;0zON#r{sAp)>bNpmqB$-@^r(%HX=F)OxF{$nsbTy|`_DiA~V1m}hwktaY ztO0m6wvJbkQmLFbw6*ox>A{|n1x&*oS6v=7eZq%Q)GPVTd{(XGb+g5Q)}v6-p-nV) zDy7LXR%E}S6?6IGMxm-<0~wy3JM+S3S>IVe_#fZG5P1JN#JB*BGsgZLx;1*h%wCW= z{#-?Ga5oiSe#Sit3_PA#U2ViK^=`}o(6*pw)dJoR&p{hI#ZtC!3XBZ6eLhxyPRbk+ zctxfNpVoYlfkoD=KJA_#@C?m+jt%tPh+Vk6ynH3I61%nJwyP<7t>-sHYZGH75%W=r1y4d%~@mCGKh{EhSk9X^0cc1DRVU?ja zP?rlipH|CTnyvH#yW(8Y&-jIB;WM?zm7B!sJEe-xB!1Wb@SoDL9&hx7+jokA9+02~ zECE`2wUUx57nfI}u}mtpbWty7H3fcHgU7G{uC1Tx$94wW zND(S^C7#Trmr^J~F>d8c3S6`b_$1bJ>@h?@Y@k>mSyUUpMU1n@MeF1f0kL&r0j;f5 zy@H5fXP>}^xlz%jWeBs>TcbvM0E>vV4`fDyizmtI*|Q zQ;^L2BL(QQlymkiVf(_ejVyvg}zt*|!?cBv)doSRxvW7qf+GPE%lnR4PL|TFh!Y)j|atEF7z>jp@Zg zzNlgC(lzU0?S(2B$84_a#F13XlEs=TH2|_ahV`vqNoGB*ac8AzxpbwOc+)TmXBXcDdOJ-u% zV~KQX5tzeJT-!xWl^hHh#aMAZP#4kd4$fi`@Sf~TMI(t!^kx*M1y|#V)pRVC%Bn>r zyPt(Q1sicH<2ybfW7ZT=JJZ!OHRu_s4SM!Y_Qey?rPamQA`Vs4@#RD+fqfv1F^jfx zfKtjut%BK6)b=ttjSWBXGN2S~U`qvQs%5s|16IBr! zE8W&unIghRM8^vQnph%o0rD=nw2CdA2wLK(L_JQItC|^hw%t9zM&n~A`_l2Nv6q(< zv2=b%Q{K=aXP{)qs%0qDQx~){)KoKA*5w{Nekr0xMN20Qk#2}wc#mIE>o))K)V#Qz@VD}<7&l# z!fEi!wXyn$J}@;sqfAb+a)E8Yf>zlM!k|Dl94|OcVkZT$$Vw8+z96QSODMVpv9WTs zIJS=S|53+JJG?*b{fzf=?;rMlZ*Q)5-1FZ(Kk3POoiRH2Z8OX-)&t74@;v@$hW*5K_k2Hw#lA=6Pf4w88!(y5K-lc9i}`|PKyL=Y*T$xH zij}F{cD5AsgT=?tSPN<1oR&B13EN%m899maYeV%2qE{EA`VMr9Ij&Y|zp||tL(0^) zzN10c1OHg(GX)b9h)*;tEZz){cFM?>^QDTvv=1#NXuI$x`?_tWtJaq8fm!xY?SyBb z9`VX*;>NZHEOiB{m$l)@wXg?$T1MT3t|Wt17H{gsOuauD zTbD9SCDJMP*?CO-WPjgkVlkeQT@Av`?_l9yEiUGbtPV{QGpa4}u1Vs~Rd--^TZU}c z%46$H#M-*L;yyL#>DN7dQfhOBDxkCjYsP_9Nk%bo%?-KM-&c>qT3NX8b(rs9n%36m zlI|&1cZJ%cAb15i4GGA;G8A-lk}^}8$|RhZb;SsIoyB6|FVH^qhRBQ>x*op!( zwyTk-q8iFFo7`uTWf-QG_v4EbN>(*0<8K<1uT^2nep^h#%xmSXx0KwjAKI_X5_pU! zQONbkO7wDMWjq)N_kBcUdN~(ejIgM`96Ee*SutqjH zhd?s_nA#Srg@Sp}6kx^-08z%kf~=w!_Je65@&*i3T7IhtCUgbOG0M2ezUTR60@kcD zDHsCyKy#&_ZB~>Qm*WZ38|D$}3(CVUkja0DY-17^c{lHaLY9jKnOq2D7P4Af z)H0H>6wwDTk};LYUO-bISeT4sV9SnYqN}O&^3@EK^JvwmfDr=B5hWewL0I1HxI|i& z8<%4%F@_|`j5QSmf*6$rB>*zl*3U-VgJ&?3L9o(8`-B#d6w+3*JQBMpq@+wj!4Cw` z2UrY0jut~pU|ETo0NHw?dgdUQ2fY4*dtlZx6vMS27Api~VKlzZDT7)^i*lOTG*Vng z7cO`DynB>QxQ_|BVl2;O+XwT(S(%6hTnz*ZiPXimg2zJQ*we7g7kOmib7FzVd!jE1 zi><4v3^UTef@vhVtrx0>a!b=nR>RC0et&I!;jCC&f^|y~ReHbDpnxZXNxDqZ>VUdam_6?f%#9Z+CyvJ=y)|-9O&_R@d)# zz0~;|o$H+=9Y4|WsUv@Oq;RAMg3^Zw5CI}^KM1_B=bneHGku;zP6NM z_D?XCX7Z)WTA>6hLAz=>k0yxv99|3xa7ff*BxD&K;2>;48H-D!J&$K?J+tec1q(7* zs?4~n=Bn9SxO_6UE+eQptL{lYRX1a3wyfA@ z$p=^-545iwXsR@I=&9hu6fKoOQ;7#YIg;AU8}1;VqMN$l$lx%eqzt-UcKhO(TdQVJ z95+H~fiH8ksEmUtR{={{LBl03E$6e%m&AaZ+y64Klx^fIu-`*3<|}$x#x(bu`xG|> zm@&!WOG=D3xKMN4CqwR*dWN_WVAKryK1~1(x>-z5SVkA##J{&?yfug3GbHQ*X2hWL z0~pYoDmaVvvc8(Q7{8i)V5R^b9asVec4S$HLL!uS&}AK}s%<(7MtP~t$LZX1UdlCW1Pi^fnuuSFE9|Fi-*Vcf3d`Mfg@bvwkA?M^ zVZp9!z`$yDNMaxoQsV6Bme4-J!_v=}C(8V|ZX zS~vAMo2f4eN^Rb&_ca5DKT)@8S_2=_0-0fr^`29_TRaBq;UYWu8kDiZmZHq%K%=-* z+EWX+l*wIXI;%`(l~Q%1kcVH(FBz{XmWDv_2mK-cY4{6|w}W3n{54ijZO!|NTqwj< zcoFOStY@fJMy>_zTWXp}1ymbhW_?rKxozGtG1S`DEOs}k(SzxxO(k&s{~1Tp;r$iw z>)u(fr}t-j|4#2Sp8whNCq4hleZzgKySwX4U7zZj>+*K|=Z-IQd{f6K+po8ey8hDj z3D+6tzjywS<1-N0@|k(2!_g1s8m$Ps(D{lNaDsib`x({*7AIRRN8V8eIvxG-xA%EC zxw28Oqp+^Y7i6KYNr5R)QSIANaVHBb^DC8Y*e0bIVC;pK&hRVevU*NC52NUFQ|yl_w8*#4kBHD6J{iijl~jGVYP0T+NaVH=XM?N{QP-~brW464Al_IehMZ$J^Vc!%5S z2K3cXR3|?M>c24P$!QRMy|&c{pRBUpP{H#XTD~&XkKRWPNdzl>^*to-bUq2`zVz96 ziy+uTSQEXBRhQdea|)C_ID{gKktk}i_c!x{RdeN>s%)3F&4u9+YXXKvL9^bI0S&DJ zyIZ!-O{wSo%2o9ifP_u`EJBb5V_4^4n@7N#x+Ap*mB31km2_daqHlqjB2p-%I0s|g zH!Kn2j2m~%Nqq8J#>F$Gf(2+-;{b19^cxz}AQ5{h&C4DlV}%e$mCyqFjziIbt?;Zr zux1Kd|768XPNIWPfY$|IgIDOGt))e5>k*5eBgY`fRAV*7AXp%U-#fm?p)fAI)88N& zE1{)ZksIcevbI$%sAaY#4^I=7^)2WnP~L~ER_m*>^@q_#3crq2V5RxGgM^EcG2fSGX@x-q<| zPQt$7d``h1w_xFP5=y)QYfG@H5(?T>{`|TZa%Q&H*&xP}D`{-ixP1oG4NV?&>!wNRz6N+JM`wkl?SjQr`%K%nF$8q;O z*0yW`wh-qI4Q>uhszoT-FeW@?*kcY!Y<_V(@~j*3CMX(0J8f2R`Iz*b_HM|P*k@zB zMo53y5Z?b@*&U@|&GdUe4AQ?B%7QOJ+?p zZb*X3RCym;X;zS-c`oTC=t*JU4R%T}kkX72jAyYkT#dtM2$pZaZ!srEUP->WA37sA z2do$UqaC2;q-ecm(UE;S#s@J#KNvp(-p;PsRcz?nQzb2n=Qj3O2L{U6)YV>x)15dh zey$y36uqB}5k)n0ei0Y=8&&O59@6&#f&!dP)+S-LxXpXM>un$+blWcCfPT-|*7soS z4#O?cN?E6!%EmtL4%rgF-|qyuv!@#5T6(+`Z|IF|EPciVfRx7fSX z>+<}D=i{EIkN(KfcaOes)Y0>UJ#Y0q>;8TBPjuhzzS%w2^?$mYo&Vv;uN?V%N6PKL z)Ba5q1p|5 zxC1iUQgx(V0rxZEQFdiFukx{7xx}l^xB&yYO)zDwT#N>%gCVG3Fm1@I#R`mAaX&Y9 zK*nAk4mtQIU%*l!hSpMfh{CkX;62lpv43aqJZNo>g>dL=J62&GHh*JUW1e3CpmHqK z%*Q?!T0RFfUwB8f7t;5KcPtcv_ILMiwh-b-@ zW~)nWcNPlFwYSA^Q2MnW(Xux9ksMgWVJw1&w8U{N1vbc*VD7>K;joO^!2L>$S=BaS z=FglI=!~{avzo9&h)IiSN`^q0(lRw41omg&ndhSe$-ZImDhyNnI2U&La!9}oNg}QF%&;c% z5w=u47eKx(8b;Q9)(usWDex!9>XPumKhEA4JHpx)fn)NSkd` zBCl*2Wvwr)==z_Z1i5fT$+V4P5wVWSFfL_@9fQ0+0e(~K+^<+y<0HfRQ9o-fT@=Gc zsElBvRdZ}DKxZRJsTFSX@GqPKxiL`y50Z;WTc0Cbd>(>Fg_)lRH+QsI)k)~K^*RHZ z0&YEq6V?^~xDP=H%Tl8hjNDw!QJ{T^j~kXYTA+{WLUp zY?GQ;51LSh(E>iqOusN~ho^ABN*^rsDSC<72$VR?cHp3`g3GFC0kSJ?9sk6V7(W-v9wEKfVn}HJm#aC4%h&6#IkN{Mxc@ zY+q$H+iZ!LU))+`ZAQ{q2H>8;w~nDb2c{92Cw%)Fu63040&EM$5(5RJn1?mGRRbOk zhrLGmMZmC8c@t!gsPx|gs z!$n<5ET^H=@#oE8g5)kMvChc5dAYDCH1`dD0yRHw}^xlw6( zUoNsh*8zhoc>VB9%5YEyk|y}a4Dw2uW}-22EZnU1EB zsJ)FtSPdP(TnfC6T(f~cxCf|~@bP_o__@9U*%7Su*n%G1{6iUH<*(Hk6h=Tz)#&+a zpjc?sF5O}pTrK7==5Y%>G=z5slc0-f7?hU8mP5X8P{ZId(3FP3Wi_Q)w|=f($V&hSDF_W^p9N+F3j}i{TYeURn%W zDaDf@Eru_mZ_yqdPijA}wTJogX zV)*v$OHg2jx&@n+AP_!) zBA0_DW7tK865!!&xTposKVeH+a1*VG3dXKfs_^tZm=hLZzv)JK3O1awzcI0qj(NnP zgYGlzhbJ6g<{g zn}Vld78@RZW#7|2CU+~2@!g7J=W(jVcK$tg5lYIe7(zDYKI`E49{!~m_&Y1ct^o!j4)q>~uB=E21B93}Kf0q=od2J6zUuH^_fGa2 zy)PcUc(lLg4|~!*Q{Dfu`^UPzy-VwA@BGEi@9S9Tc}Px1a>ar8!Y}d)NbzQ*UBT7E6{bbh3>IP&%^ud^b6ILd47OmGDVZH|W6%cIJNw$%6X>}}_>+uXFg#h#FfkNiAo8DbVhb^uHV z`5YV8l%9JXScQc7ydmib_3)$2?>P5ZM;x)C!@_d^2X6I@l;gJg9hp8FwmtY^+t@CY zAZd%MXG!y@ZK?_$=fw)9@OWrvJXC=I(o$pzVC`>n1{A>gcl(+Opv62PEJbS5zItZW z0~;6H-}F)KVdaq=cIJUMO|Y3C*AL7VWyF##Vj9_8hvWpaRb3h_V@@=;sI1woFEN~X z4ZOiK%$aFV#CfoVX{(@bXxzY*DM2mr|FjPk9MSyTR>h8;zYr0F^GE=n45z*S$-d~73owSRF{Le7CN!kM4tGY+(ysM zMr~;ZV`Z$qg#3yxqN(v+@}C~z;OkO9xxN|!y8+Ku?UZy;<;=%fDWcH zHuiw;y>cGUx3W`(d{VMI^VaU{TX4R8GrydAYjyEza#CDK4mkhs?78OX{ao+5=d7pa z=<%K}!6ki&01+SpM1Tko0U|&IhyW3|8v?biT04yWuhr~Ud03>dKE>8L(OKmO|AqBj znPgA1$Z23>GvEaR>}{(!xA7~N_)|LAq@}(*E~gA)W6czQVIVv#*RRah@St+kZg^|d z1g9!%( z_|%3SZh_-M2JCZUkL@v!49E>`W&V0zhJ#Oc_)}1&5de-D?Y@6!NO+9{jxAk4a*vid1t zx$y3kjsAz%PRXBT026m<0*B|!$>)Yl2jI;oGl7{ypZj68VJ^W&;bL~5Z>zIxBR*7s zY1Onn45an*(oCbK%`X}>asKb__)UlRTfEb~zt+3gJLLIU&*stJI+})?^dSO7fCvx) zBJdF-@cI4e&f}3t*LOH~^V(iiFM{#Cl8LdmN8^dv6l^SohYT5qxU9q?>6mgo9=o9& z>`RHH*c-yc+b7`d!9hP9J7jO=a`ShGvA?E6JUP+wK2wdYPJ%>~VBUbpamfR;Z5>j+ z1`Sogfb#l)aM4IAhHt}2D4^3cQ`U?M-q)s#k1gU$E?J!Xb*20OEj*%iuQ=vYmRA&O z0_tgOB`&2TR+pBP^a0VumOz)Wb31v+3|P1T?TeN3V@fQsC=+9;=Decs#TVgqDJ*l@1((2Iwr2V_vx%3W1Uy}t zml}LzyP2LbpBom*mJ>{bs4-fL%7XVA>Pd4Uxyx$8a`5hI21Z*?NTd9NvmC?8f+5ST zygoQoKzcnS&r$NdWjD{4aeGqWZ8z3B+*E~|eOp@qpAv!kXe&6fm1`&_abCP@ZNbjt zK3`WYifwOAmg@g|A#A56vrO^|nQFv6wt)YGE{tnoY;{3EtHdH|FTZEZzDctncB;9oJr6)V! zOurM_XN&6m1xrG9u%&G7rAaWq?R$Bm1(1vCt`1Asnvff4DOY-Gk}Ms3>!(@(!S_H$ z_p=2le5$2z(^r#p)5Z7l^DR_Q!DA%y*?3Z@?r$w!cxo0d9DF5TXaCug&RV~Xb_C9| XSGZ*CgGC&ZE#qkL+Z0uUyYK%4Bum+v delta 4393 zcmeI0Yj6|S6@d5b>amh+uP_+d7Dg)>Y|AhF#DGH+BMVStWP=5vwBRcC8Wh`-D#->H zhS#b|(p)mS4$C@~6#AAIT3grIYDkrpCt5ySqZH z@T2aJ{;RytbI-l^+`H#|=UxvEa)X1W($>9AftbK|h9W)w7++19DFEsPo~IhV?4~HH zl$<5xq{;EKk<#kAZmN94pq5C0n)n2VftD(seP=utNlQ8R#VK}WL~E4 z(`V>a@KbP6UydLzeK-q$Hp%3}TKr&=*#>KH%@pH+Ed0_GvjtY;;VGt=2!Auhxan}F z^tuNBbr{x?qM2dX39A&*XH5*pHVS=#b|IRHUl{>6-DSqYIVd6;d&fz8{pTP9Pn-kZ z#_28jIe?fzutPZEX*}2_^a*Wy8eHx^AsCCA6?N@66p{*M_}y`^6QSJR zC*$CNG@NsubdipCoQLg3)N0gQoK6_>i#fA)Fp2fErF3PT7?`bPDr=qMuV+id&u4SB zCY`0(DW00i7FW-$H>IKH48)>*7gCpynN+Si(L1+M8qS3|n8(<Lp`!_{H>4hI(e z)f8>LrfH%7^+f(eg*B+PC21w8$Qsf~tC`d#ii>A_nz1YW19qprTK7;lrv07vh}NLF zz}V~Ha^6}?4 zm^b5}J}QIl+F>f2s;4qmr~fM5mv+Z=+W3WWpW(LQ74|OM#ya)Cr_O7?tKYBtn=Yiw z)LxTEw~M0s+|jkYZobMgLr7d0AZ8CoyJQ~{f@ zmvZPW_4V-jfUF+QKG}hY@2(`t8o^ab?l&@wdjT~xt(#Wo+Clf~e0({cQwiG{) z9YGm-i`@=Lp9E#tA8GS6w)i9d9)b9Hw8*v3-QaWc9&e-jATOa*(cqiC{8KHyC?Rz< zH#JG>-ey-rtJ_(P(sh6{>upLp^*`nkAe->ORrhDeH4mmWWeu?LM7zk z-;RQhMl8+}VH2!VRQ@4?0pGsP7;%pXbu`lBUx>sLbCacB&CpS#6EDuJ14I1Q%r}7v zAPwF=2F3XH80q8G7?jY6!57ECf$isrZgbMMoo5Zz>@egk@TWC~(Ke|bhs1x70$!yC<8+8_^6{5)x*CF(^HTk14PfLRE z4%|4&RD&@NVWZieB#@d$L$MQegS~0$Aqzo?FrPtbuwJaJIlU zH63898V;~U%?3JLw*U>Wc_}i`WwjEaZ0sA^H8RhJmBIdpHAMWw>7S4#LuZwO^#T>v zD^*ypl%4XiP05kUb-abvW!$H@`$>;1m_Q!Rf_PY?PV%gQWl54>d4_i^p5cv*)sQHW zB1AUk%Am#^*MuM1h%qL6G5K!GoqCeXKj!3QOj7i>xoqTswTqa_if>5#T z79?afBBg!Vo5+HM+@%$|WS?|j?j(`@5ez6beaO0qo9zyCbO^!3w{a~3<~YBKq#oLATe#}k|%sr+G&mXV}bT(WbOW-tG~TlAc;EJVN>X+vOrJt z)G3?M&@Dt_De|XcWfB@g$5V`J?FxkV31Pt>6FO4bCq{ciu|Q{_omi9{5dOG{$#SWt z$15CH8crE;D5a8YL&D2QRipDT%2g6xaU$X6EaJHf6J8CCveks7kwuM28m(0`l15o- zNNUKQ<9$3b#jI9sK;4pblOkQX2PK8t8)E-VNq{9!Z~cEY^*>lm6&t194*3toQaisy QQuwcSe)eTqD{cM$4Hw8WVgLXD diff --git a/pialert/config.py b/pialert/config.py new file mode 100644 index 00000000..e0da2934 --- /dev/null +++ b/pialert/config.py @@ -0,0 +1,22 @@ + + +global mySettings, mySettingsSQLsafe +#------------------------------------------------------------------------------- +# Import user values +# Check config dictionary +def ccd(key, default, config, name, inputtype, options, group, events=[], desc = "", regex = ""): + result = default + + # use existing value if already supplied, otherwise default value is used + if key in config: + result = config[key] + + global mySettings + + if inputtype == 'text': + result = result.replace('\'', "{s-quote}") + + mySettingsSQLsafe.append((key, name, desc, inputtype, options, regex, str(result), group, str(events))) + mySettings.append((key, name, desc, inputtype, options, regex, result, group, str(events))) + + return result \ No newline at end of file diff --git a/pialert/database.py b/pialert/database.py index ccf3db9a..071cfbaa 100644 --- a/pialert/database.py +++ b/pialert/database.py @@ -5,6 +5,7 @@ import sqlite3 from const import fullDbPath from logger import print_log, mylog + #=============================================================================== # SQL queries #=============================================================================== @@ -34,7 +35,8 @@ class DB(): def __init__(self): self.sql = None self.sql_connection = None - + + #------------------------------------------------------------------------------- def openDB (self): # Check if DB is open if self.sql_connection != None : @@ -55,7 +57,8 @@ class DB(): if self.sql_connection == None : mylog('debug','commitDB: databse is not open') return - mylog('debug','commitDB: comiting DB changes') + + # mylog('debug','commitDB: comiting DB changes') # Commit changes to DB self.sql_connection.commit() @@ -81,6 +84,58 @@ class DB(): return arr + #=============================================================================== + # Cleanup / upkeep database + #=============================================================================== + def cleanup_database (self, startTime, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA): + # Header + updateState(self,"Upkeep: Clean DB") + mylog('verbose', ['[', startTime, '] Upkeep Database:' ]) + + # Cleanup Online History + mylog('verbose', [' Online_History: Delete all but keep latest 150 entries']) + self.sql.execute ("""DELETE from Online_History where "Index" not in ( SELECT "Index" from Online_History order by Scan_Date desc limit 150)""") + + mylog('verbose', [' Optimize Database']) + # Cleanup Events + mylog('verbose', [' Events: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days']) + self.sql.execute ("DELETE FROM Events WHERE eve_DateTime <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')") + + # Cleanup Plugin Events History + mylog('verbose', [' Plugin Events History: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days']) + self.sql.execute ("DELETE FROM Plugins_History WHERE DateTimeChanged <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')") + + # Cleanup Pholus_Scan + if PHOLUS_DAYS_DATA != 0: + mylog('verbose', [' Pholus_Scan: Delete all older than ' + str(PHOLUS_DAYS_DATA) + ' days']) + self.sql.execute ("DELETE FROM Pholus_Scan WHERE Time <= date('now', '-"+ str(PHOLUS_DAYS_DATA) +" day')") # improvement possibility: keep at least N per mac + + # De-Dupe (de-duplicate - remove duplicate entries) from the Pholus_Scan table + mylog('verbose', [' Pholus_Scan: Delete all duplicates']) + self.sql.execute ("""DELETE FROM Pholus_Scan + WHERE rowid > ( + SELECT MIN(rowid) FROM Pholus_Scan p2 + WHERE Pholus_Scan.MAC = p2.MAC + AND Pholus_Scan.Value = p2.Value + AND Pholus_Scan.Record_Type = p2.Record_Type + );""") + + # De-Dupe (de-duplicate - remove duplicate entries) from the Nmap_Scan table + mylog('verbose', [' Nmap_Scan: Delete all duplicates']) + self.sql.execute ("""DELETE FROM Nmap_Scan + WHERE rowid > ( + SELECT MIN(rowid) FROM Nmap_Scan p2 + WHERE Nmap_Scan.MAC = p2.MAC + AND Nmap_Scan.Port = p2.Port + AND Nmap_Scan.State = p2.State + AND Nmap_Scan.Service = p2.Service + );""") + + # Shrink DB + mylog('verbose', [' Shrink Database']) + self.sql.execute ("VACUUM;") + + self.commitDB() @@ -97,7 +152,9 @@ def initOrSetParam(db, parID, parValue): #------------------------------------------------------------------------------- def updateState(db, newState): + #sql = db.sql + mylog('debug', ' [updateState] changing state to: "' + newState +'"') db.sql.execute ("UPDATE Parameters SET par_Value='"+ newState +"' WHERE par_ID='Back_App_State'") db.commitDB() diff --git a/pialert/helper.py b/pialert/helper.py index b92fdc57..51eb358e 100644 --- a/pialert/helper.py +++ b/pialert/helper.py @@ -4,7 +4,7 @@ import os import subprocess from const import * -from logger import mylog +from logger import mylog, logResult #------------------------------------------------------------------------------- def timeNow(): @@ -65,3 +65,42 @@ def fixPermissions(): # An error occured, handle it mylog('none', ["[Setup] Fix Failed. Execute this command manually inside of the container: ", ' '.join(com)]) mylog('none', [e.output]) + + +#------------------------------------------------------------------------------- +def initialiseFile(pathToCheck, defaultFile): + # if file not readable (missing?) try to copy over the backed-up (default) one + if str(os.access(pathToCheck, os.R_OK)) == "False": + mylog('none', ["[Setup] ("+ pathToCheck +") file is not readable or missing. Trying to copy over the default one."]) + try: + # try runnning a subprocess + p = subprocess.Popen(["cp", defaultFile , pathToCheck], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + stdout, stderr = p.communicate() + + if str(os.access(pathToCheck, os.R_OK)) == "False": + mylog('none', ["[Setup] Error copying ("+defaultFile+") to ("+pathToCheck+"). Make sure the app has Read & Write access to the parent directory."]) + else: + mylog('none', ["[Setup] ("+defaultFile+") copied over successfully to ("+pathToCheck+")."]) + + # write stdout and stderr into .log files for debugging if needed + logResult (stdout, stderr) # TO-DO should be changed to mylog + + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', ["[Setup] Error copying ("+defaultFile+"). Make sure the app has Read & Write access to " + pathToCheck]) + mylog('none', [e.output]) + + +def filePermissions(): + # check and initialize pialert.conf + (confR_access, dbR_access) = checkPermissionsOK() # Initial check + + if confR_access == False: + initialiseFile(fullConfPath, "/home/pi/pialert/back/pialert.conf_bak" ) + + # check and initialize pialert.db + if dbR_access == False: + initialiseFile(fullDbPath, "/home/pi/pialert/back/pialert.db_bak") + + # last attempt + fixPermissions() \ No newline at end of file diff --git a/pialert/logger.py b/pialert/logger.py index f0e6a450..154a1082 100644 --- a/pialert/logger.py +++ b/pialert/logger.py @@ -3,6 +3,7 @@ import datetime from const import * + #------------------------------------------------------------------------------- debugLevels = [ ('none', 0), ('minimal', 1), ('verbose', 2), ('debug', 3) diff --git a/pialert/pialert.py b/pialert/pialert.py index dc2215a2..cc0edbb8 100755 --- a/pialert/pialert.py +++ b/pialert/pialert.py @@ -45,8 +45,9 @@ import multiprocessing from const import * from logger import mylog, print_log, logResult -from helper import checkPermissionsOK, fixPermissions, timeNow, updateSubnets +from helper import filePermissions, timeNow, updateSubnets from database import * +from config import ccd # Global variables @@ -63,45 +64,8 @@ sql_connection = None -#------------------------------------------------------------------------------- -def initialiseFile(pathToCheck, defaultFile): - # if file not readable (missing?) try to copy over the backed-up (default) one - if str(os.access(pathToCheck, os.R_OK)) == "False": - mylog('none', ["[Setup] ("+ pathToCheck +") file is not readable or missing. Trying to copy over the default one."]) - try: - # try runnning a subprocess - p = subprocess.Popen(["cp", defaultFile , pathToCheck], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - stdout, stderr = p.communicate() - if str(os.access(pathToCheck, os.R_OK)) == "False": - mylog('none', ["[Setup] Error copying ("+defaultFile+") to ("+pathToCheck+"). Make sure the app has Read & Write access to the parent directory."]) - else: - mylog('none', ["[Setup] ("+defaultFile+") copied over successfully to ("+pathToCheck+")."]) - # write stdout and stderr into .log files for debugging if needed - logResult (stdout, stderr) # TO-DO should be changed to mylog - - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', ["[Setup] Error copying ("+defaultFile+"). Make sure the app has Read & Write access to " + pathToCheck]) - mylog('none', [e.output]) - -#=============================================================================== -# Basic checks and Setup -#=============================================================================== - -# check and initialize pialert.conf -(confR_access, dbR_access) = checkPermissionsOK() # Initial check - -if confR_access == False: - initialiseFile(fullConfPath, "/home/pi/pialert/back/pialert.conf_bak" ) - -# check and initialize pialert.db -if dbR_access == False: - initialiseFile(fullDbPath, "/home/pi/pialert/back/pialert.db_bak") - -# last attempt -fixPermissions() #=============================================================================== # Initialise user defined values @@ -110,27 +74,6 @@ fixPermissions() #------------------------------------------------------------------------------- - -#------------------------------------------------------------------------------- -# Import user values -# Check config dictionary -def ccd(key, default, config, name, inputtype, options, group, events=[], desc = "", regex = ""): - result = default - - # use existing value if already supplied, otherwise default value is used - if key in config: - result = config[key] - - global mySettings - - if inputtype == 'text': - result = result.replace('\'', "{s-quote}") - - mySettingsSQLsafe.append((key, name, desc, inputtype, options, regex, str(result), group, str(events))) - mySettings.append((key, name, desc, inputtype, options, regex, result, group, str(events))) - - return result - #------------------------------------------------------------------------------- def importConfigs (db): @@ -170,7 +113,7 @@ def importConfigs (db): return mySettings = [] # reset settings - mySettingsSQLsafe = [] # same as aboverr but safe to be passed into a SQL query + mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query # load the variables from pialert.conf code = compile(config_file.read_text(), config_file.name, "exec") @@ -288,6 +231,8 @@ def importConfigs (db): # Format and prepare the list of subnets userSubnets = updateSubnets(SCAN_SUBNETS) + + # Plugins START # ----------------- if ENABLE_PLUGINS: @@ -341,7 +286,7 @@ def importConfigs (db): lastTimeImported = time.time() # Is used to display a message in the UI when old (outdated) settings are loaded - initOrSetParam("Back_Settings_Imported",(round(time.time() * 1000),) ) + initOrSetParam(db, "Back_Settings_Imported",(round(time.time() * 1000),) ) #commitDB(sql_connection) db.commitDB() @@ -351,8 +296,15 @@ def importConfigs (db): mylog('info', ['[', timeNow(), '] Config: Imported new config']) + + + + + +#=============================================================================== +#=============================================================================== +# MAIN #=============================================================================== -# MAIN #=============================================================================== cycle = "" check_report = [1, "internet_IP", "update_vendors_silent"] @@ -375,37 +327,47 @@ def main (): # Initialize global variables global time_started, cycle, last_network_scan, last_internet_IP_scan, last_run, last_cleanup, last_update_vendors # second set of global variables - global startTime, log_timestamp, sql_connection, sql, plugins_once_run + global startTime, log_timestamp, plugins_once_run + + # To-Do all these DB Globals need to be removed + global db, sql, sql_connection + + # check file permissions and fix if required + filePermissions() # Open DB once and keep open # Opening / closing DB frequently actually casues more issues - db = DB() - print(db, db.sql, db.sql_connection ) + db = DB() # instance of class DB db.openDB() - print(db, db.sql, db.sql_connection ) - # To-Do replace the following to lines with the db dict or class + # To-Do replace the following to lines with the db class sql_connection = db.sql_connection sql = db.sql # Upgrade DB if needed upgradeDB(db) + + #=============================================================================== + # This is the mail loop of Pi.Alert + #=============================================================================== + while True: # update time started time_started = datetime.datetime.now() + mylog('debug', ['[', timeNow(), '] [MAIN] Stating loop']) # re-load user configuration and plugins importConfigs(db) # Handle plugins executed ONCE if ENABLE_PLUGINS and plugins_once_run == False: - run_plugin_scripts('once') + run_plugin_scripts(db, 'once') plugins_once_run = True # check if there is a front end initiated event which needs to be executed - check_and_run_event() + check_and_run_event(db) # Update API endpoints update_api() @@ -417,8 +379,7 @@ def main (): last_run = time_started # Header - updateState(db,"Process: Start") - mylog('verbose', ['[', timeNow(), '] Process: Start']) + updateState(db,"Process: Start") # Timestamp startTime = time_started @@ -426,7 +387,7 @@ def main (): # Check if any plugins need to run on schedule if ENABLE_PLUGINS: - run_plugin_scripts('schedule') + run_plugin_scripts(db,'schedule') # determine run/scan type based on passed time # -------------------------------------------- @@ -441,6 +402,7 @@ def main (): if last_update_vendors + datetime.timedelta(days = 7) < time_started: last_update_vendors = time_started cycle = 'update_vendors' + mylog('verbose', ['[', timeNow(), '] cycle:',cycle]) update_devices_MAC_vendors() # Execute scheduled or one-off Pholus scan if enabled and run conditions fulfilled @@ -459,7 +421,7 @@ def main (): if run: pholusSchedule.last_run = datetime.datetime.now(tz).replace(microsecond=0) - performPholusScan(PHOLUS_RUN_TIMEOUT) + performPholusScan(db, PHOLUS_RUN_TIMEOUT) # Execute scheduled or one-off Nmap scan if enabled and run conditions fulfilled if NMAP_RUN == "schedule" or NMAP_RUN == "once": @@ -483,6 +445,8 @@ def main (): if last_network_scan + datetime.timedelta(minutes=SCAN_CYCLE_MINUTES) < time_started: last_network_scan = time_started cycle = 1 # network scan + mylog('verbose', ['[', timeNow(), '] cycle:',cycle]) + # scan_network() # DEBUG start ++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -520,20 +484,21 @@ def main (): if len(newDevices) > 0: # run all plugins registered to be run when new devices are found if ENABLE_PLUGINS: - run_plugin_scripts('on_new_device') + run_plugin_scripts(db, 'on_new_device') # Scan newly found devices with Nmap if enabled if NMAP_ACTIVE and len(newDevices) > 0: - performNmapScan(newDevices) + performNmapScan( newDevices) # send all configured notifications - send_notifications() + send_notifications(db) # clean up the DB once a day if last_cleanup + datetime.timedelta(hours = 24) < time_started: last_cleanup = time_started cycle = 'cleanup' - cleanup_database() + mylog('verbose', ['[', timeNow(), '] cycle:',cycle]) + db.cleanup_database(startTime, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA) # Commit SQL db.commitDB() @@ -545,13 +510,15 @@ def main (): action = "network_scan" mylog('verbose', ['[', timeNow(), '] Last action: ', action]) cycle = "" + mylog('verbose', ['[', timeNow(), '] cycle:',cycle]) # Footer updateState(db,"Process: Wait") mylog('verbose', ['[', timeNow(), '] Process: Wait']) else: # do something - cycle = "" + cycle = "" + mylog('verbose', ['[', timeNow(), '] [MAIN] waiting to start next loop']) #loop time.sleep(5) # wait for N seconds @@ -563,7 +530,7 @@ def main (): def check_internet_IP (): # Header - updateState(sql_connection,"Scan: Internet IP") + updateState(db,"Scan: Internet IP") mylog('verbose', ['[', startTime, '] Check Internet IP:']) # Get Internet IP @@ -581,13 +548,13 @@ def check_internet_IP (): # Get previous stored IP mylog('verbose', [' Retrieving previous IP:']) - previous_IP = get_previous_internet_IP () + previous_IP = get_previous_internet_IP (db) mylog('verbose', [' ', previous_IP]) # Check IP Change if internet_IP != previous_IP : mylog('info', [' New internet IP: ', internet_IP]) - save_new_internet_IP (internet_IP) + save_new_internet_IP (db, internet_IP) else : mylog('verbose', [' No changes to perform']) @@ -700,7 +667,7 @@ def save_new_internet_IP (db, pNewIP): append_line_to_file (logPath + '/IP_changes.log', '['+str(startTime) +']\t'+ pNewIP +'\n') - prevIp = get_previous_internet_IP() + prevIp = get_previous_internet_IP(db) # Save event sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, eve_EventType, eve_AdditionalInfo, @@ -732,65 +699,13 @@ def check_IP_format (pIP): return IP.group(0) -#=============================================================================== -# Cleanup / upkeep database -#=============================================================================== -def cleanup_database (db): - # Header - updateState(sql_connection,"Upkeep: Clean DB") - mylog('verbose', ['[', startTime, '] Upkeep Database:' ]) - - # Cleanup Online History - mylog('verbose', [' Online_History: Delete all but keep latest 150 entries']) - sql.execute ("""DELETE from Online_History where "Index" not in ( SELECT "Index" from Online_History order by Scan_Date desc limit 150)""") - - mylog('verbose', [' Optimize Database']) - # Cleanup Events - mylog('verbose', [' Events: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days']) - sql.execute ("DELETE FROM Events WHERE eve_DateTime <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')") - - # Cleanup Plugin Events History - mylog('verbose', [' Plugin Events History: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days']) - sql.execute ("DELETE FROM Plugins_History WHERE DateTimeChanged <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')") - - # Cleanup Pholus_Scan - if PHOLUS_DAYS_DATA != 0: - mylog('verbose', [' Pholus_Scan: Delete all older than ' + str(PHOLUS_DAYS_DATA) + ' days']) - sql.execute ("DELETE FROM Pholus_Scan WHERE Time <= date('now', '-"+ str(PHOLUS_DAYS_DATA) +" day')") # improvement possibility: keep at least N per mac - - # De-Dupe (de-duplicate - remove duplicate entries) from the Pholus_Scan table - mylog('verbose', [' Pholus_Scan: Delete all duplicates']) - sql.execute ("""DELETE FROM Pholus_Scan - WHERE rowid > ( - SELECT MIN(rowid) FROM Pholus_Scan p2 - WHERE Pholus_Scan.MAC = p2.MAC - AND Pholus_Scan.Value = p2.Value - AND Pholus_Scan.Record_Type = p2.Record_Type - );""") - - # De-Dupe (de-duplicate - remove duplicate entries) from the Nmap_Scan table - mylog('verbose', [' Nmap_Scan: Delete all duplicates']) - sql.execute ("""DELETE FROM Nmap_Scan - WHERE rowid > ( - SELECT MIN(rowid) FROM Nmap_Scan p2 - WHERE Nmap_Scan.MAC = p2.MAC - AND Nmap_Scan.Port = p2.Port - AND Nmap_Scan.State = p2.State - AND Nmap_Scan.Service = p2.Service - );""") - - # Shrink DB - mylog('verbose', [' Shrink Database']) - sql.execute ("VACUUM;") - - db.commitDB() #=============================================================================== # UPDATE DEVICE MAC VENDORS #=============================================================================== def update_devices_MAC_vendors (db, pArg = ''): # Header - updateState(sql_connection,"Upkeep: Vendors") + updateState(db,"Upkeep: Vendors") mylog('verbose', ['[', startTime, '] Upkeep - Update HW Vendors:' ]) # Update vendors DB (iab oui) @@ -878,11 +793,11 @@ def query_MAC_vendor (pMAC): #=============================================================================== # SCAN NETWORK #=============================================================================== -def scan_network (db): +def scan_network (): reporting = False # Header - updateState(sql_connection,"Scan: Network") + updateState(db,"Scan: Network") mylog('verbose', ['[', startTime, '] Scan Devices:' ]) # Query ScanCycle properties @@ -942,19 +857,19 @@ def scan_network (db): # Resolve devices names print_log (' Resolve devices names') - update_devices_names() + update_devices_names(db) # Void false connection - disconnections mylog('verbose', [' Voiding false (ghost) disconnections']) - void_ghost_disconnections () + void_ghost_disconnections (db) # Pair session events (Connection / Disconnection) mylog('verbose', [' Pairing session events (connection / disconnection) ']) - pair_sessions_events() + pair_sessions_events(db) # Sessions snapshot mylog('verbose', [' Creating sessions snapshot']) - create_sessions_snapshot () + create_sessions_snapshot (db) # Sessions snapshot mylog('verbose', [' Inserting scan results into Online_History']) @@ -962,14 +877,14 @@ def scan_network (db): # Skip repeated notifications mylog('verbose', [' Skipping repeated notifications']) - skip_repeated_notifications () + skip_repeated_notifications (db) # Commit changes db.commitDB() # Run splugin scripts which are set to run every timne after a scan finished if ENABLE_PLUGINS: - run_plugin_scripts('always_after_scan') + run_plugin_scripts(db,'always_after_scan') return reporting @@ -1565,7 +1480,7 @@ def update_devices_names (db): #------------------------------------------------------------------------------- -def performNmapScan(db, devicesToScan): +def performNmapScan( devicesToScan): global changedPorts_json_struc @@ -1577,7 +1492,7 @@ def performNmapScan(db, devicesToScan): devTotal = len(devicesToScan) - updateState(sql_connection,"Scan: Nmap") + updateState(db,"Scan: Nmap") mylog('verbose', ['[', timeNow(), '] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device']) @@ -1770,7 +1685,7 @@ def performPholusScan (db, timeoutSec): interface = temp[1].strip() # logging & updating app state - updateState(sql_connection,"Scan: Pholus") + updateState(db,"Scan: Pholus") mylog('info', ['[', timeNow(), '] Scan: Pholus for ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min)']) mylog('verbose', [" Pholus scan on [interface] ", interface, " [mask] " , mask]) @@ -2129,7 +2044,7 @@ def send_notifications (db): # Open html Template template_file = open(pialertPath + '/back/report_template.html', 'r') - if isNewVersion(): + if isNewVersion(db): template_file = open(pialertPath + '/back/report_template_new_version.html', 'r') mail_html = template_file.read() @@ -2261,38 +2176,38 @@ def send_notifications (db): mylog('none', [' Changes detected, sending reports']) if REPORT_MAIL and check_config('email'): - updateState(sql_connection,"Send: Email") + updateState(db,"Send: Email") mylog('info', [' Sending report by Email']) send_email (mail_text, mail_html) else : mylog('verbose', [' Skip email']) if REPORT_APPRISE and check_config('apprise'): - updateState(sql_connection,"Send: Apprise") + updateState(db,"Send: Apprise") mylog('info', [' Sending report by Apprise']) send_apprise (mail_html, mail_text) else : mylog('verbose', [' Skip Apprise']) if REPORT_WEBHOOK and check_config('webhook'): - updateState(sql_connection,"Send: Webhook") + updateState(db,"Send: Webhook") mylog('info', [' Sending report by Webhook']) send_webhook (json_final, mail_text) else : mylog('verbose', [' Skip webhook']) if REPORT_NTFY and check_config('ntfy'): - updateState(sql_connection,"Send: NTFY") + updateState(db,"Send: NTFY") mylog('info', [' Sending report by NTFY']) send_ntfy (mail_text) else : mylog('verbose', [' Skip NTFY']) if REPORT_PUSHSAFER and check_config('pushsafer'): - updateState(sql_connection,"Send: PUSHSAFER") + updateState(db,"Send: PUSHSAFER") mylog('info', [' Sending report by PUSHSAFER']) send_pushsafer (mail_text) else : mylog('verbose', [' Skip PUSHSAFER']) # Update MQTT entities if REPORT_MQTT and check_config('mqtt'): - updateState(sql_connection,"Send: MQTT") + updateState(db,"Send: MQTT") mylog('info', [' Establishing MQTT thread']) mqtt_start() else : @@ -3347,12 +3262,12 @@ def custom_plugin_decoder(pluginDict): return namedtuple('X', pluginDict.keys())(*pluginDict.values()) #------------------------------------------------------------------------------- -def run_plugin_scripts(runType): +def run_plugin_scripts(db, runType): global plugins, tz, mySchedules # Header - updateState(sql_connection,"Run: Plugins") + updateState(db,"Run: Plugins") mylog('debug', [' [Plugins] Check if any plugins need to be executed on run type: ', runType]) From d5b7023927ea74b96563d69d264a903ba5a63737 Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Sun, 21 May 2023 21:22:09 +1000 Subject: [PATCH 04/20] more splitting --- pialert/api.py | 102 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 102 insertions(+) create mode 100644 pialert/api.py diff --git a/pialert/api.py b/pialert/api.py new file mode 100644 index 00000000..bb87ffa4 --- /dev/null +++ b/pialert/api.py @@ -0,0 +1,102 @@ +import json + + +# pialert modules +from const import pialertPath +from logger import mylog +from files import write_file +from database import * +from conf import ENABLE_PLUGINS, API_CUSTOM_SQL + +apiEndpoints = [] + +#=============================================================================== +# API +#=============================================================================== +def update_api(isNotification = False, updateOnlyDataSources = []): + mylog('verbose', [' [API] Update API not ding anything for now !']) + return + + folder = pialertPath + '/front/api/' + + if isNotification: + # Update last notification alert in all formats + mylog('verbose', [' [API] Updating notification_* files in /front/api']) + + write_file(folder + 'notification_text.txt' , mail_text) + write_file(folder + 'notification_text.html' , mail_html) + write_file(folder + 'notification_json_final.json' , json.dumps(json_final)) + + # Save plugins + if ENABLE_PLUGINS: + write_file(folder + 'plugins.json' , json.dumps({"data" : plugins})) + + # prepare database tables we want to expose + dataSourcesSQLs = [ + ["devices", sql_devices_all], + ["nmap_scan", sql_nmap_scan_all], + ["pholus_scan", sql_pholus_scan_all], + ["events_pending_alert", sql_events_pending_alert], + ["settings", sql_settings], + ["plugins_events", sql_plugins_events], + ["plugins_history", sql_plugins_history], + ["plugins_objects", sql_plugins_objects], + ["language_strings", sql_language_strings], + ["custom_endpoint", API_CUSTOM_SQL], + ] + + # Save selected database tables + for dsSQL in dataSourcesSQLs: + + if updateOnlyDataSources == [] or dsSQL[0] in updateOnlyDataSources: + + api_endpoint_class(dsSQL[1], folder + 'table_' + dsSQL[0] + '.json') + + +#------------------------------------------------------------------------------- + + +class api_endpoint_class: + def __init__(self, sql, path): + + global apiEndpoints + + self.sql = sql + self.jsonData = get_table_as_json(sql).json + self.path = path + self.fileName = path.split('/')[-1] + self.hash = hash(json.dumps(self.jsonData)) + + # check if the endpoint needs to be updated + found = False + changed = False + changedIndex = -1 + index = 0 + + # search previous endpoint states to check if API needs updating + for endpoint in apiEndpoints: + # match sql and API endpoint path + if endpoint.sql == self.sql and endpoint.path == self.path: + found = True + if endpoint.hash != self.hash: + changed = True + changedIndex = index + + index = index + 1 + + # cehck if API endpoints have changed or if it's a new one + if not found or changed: + + mylog('verbose', [f' [API] Updating {self.fileName} file in /front/api']) + + write_file(self.path, json.dumps(self.jsonData)) + + if not found: + apiEndpoints.append(self) + + elif changed and changedIndex != -1 and changedIndex < len(apiEndpoints): + # update hash + apiEndpoints[changedIndex].hash = self.hash + else: + mylog('info', [f' [API] ERROR Updating {self.fileName}']) + From 03163e424f79cfda8c8541990b1ea941dd58ea8c Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Sun, 21 May 2023 21:22:51 +1000 Subject: [PATCH 05/20] more splitting done --- db/pialert.db | Bin 245760 -> 245760 bytes pialert/conf.py | 95 ++++ pialert/config.py | 22 - pialert/database.py | 31 +- pialert/files.py | 26 + pialert/helper.py | 389 +++++++++++++- pialert/internet.py | 195 +++++++ pialert/logger.py | 15 +- pialert/pialert.py | 1179 +------------------------------------------ pialert/plugin.py | 556 ++++++++++++++++++++ 10 files changed, 1314 insertions(+), 1194 deletions(-) create mode 100644 pialert/conf.py delete mode 100644 pialert/config.py create mode 100644 pialert/files.py create mode 100644 pialert/internet.py create mode 100644 pialert/plugin.py diff --git a/db/pialert.db b/db/pialert.db index 4b5bd1db4b2a54485a683c8efda866c6452e39e6..16f9e46fa7d7de23b58a85777e24cdcfca00ccd2 100755 GIT binary patch delta 436 zcmZo@;BRQ)pCB!`oq>VD28f}6MQ@^xG2`})2`lZHU-IACEGTe+Ux0^&Q+uOzi7 zFSSIBg>&+o{Su2=0t%SLcylJRAJAsv&Dq#!!K)x6#?D~O$ic{AWME{hYhbEtWT;?h zVP#}!Wn#+2BqKW6;hgB^g=gc07{xX{_*pM(9tE`3)RY%UGVyO?;D5-!4QSyielJZX zW=mbC#N_Pw;M9_m%)IpCc+cE|{GyW76b(Z&3llRlb4w#53rhlm4XAojI%`jbH0;38W zkO73-jV3Vqam00I7@G&zm@b~c>@m=OC=Tqc8#GA%*lP84j2J14GXRMjb8yoL3F*heC zZEu>v_?VHoks)FF+*ynbY{rRf;_ix*Yko*fZ=S^{&JA=V2M{wf>Bn!sIFC`Ear=#J H%o>~k%8hrz delta 383 zcmZo@;BRQ)pCBzbkAZ=~28f}6dC^22W5#(K6IR+Y-{JqfSy12tzlsi0Kz{Ed^g?|$NCZPFE{3<{JemO>FO?9Wl85M<0jZI7qOiYb+G@ OY}~`P{l+$C4Nd@~)oBF) diff --git a/pialert/conf.py b/pialert/conf.py new file mode 100644 index 00000000..71fe51a7 --- /dev/null +++ b/pialert/conf.py @@ -0,0 +1,95 @@ +""" config related functions for Pi.Alert """ + +mySettings = [] + +# General +ENABLE_ARPSCAN = True +SCAN_SUBNETS = ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] +LOG_LEVEL = 'verbose' +TIMEZONE = 'Europe/Berlin' +ENABLE_PLUGINS = True +PIALERT_WEB_PROTECTION = False +PIALERT_WEB_PASSWORD = '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' +INCLUDED_SECTIONS = ['internet', 'new_devices', 'down_devices', 'events', 'ports'] +SCAN_CYCLE_MINUTES = 5 +DAYS_TO_KEEP_EVENTS = 90 +REPORT_DASHBOARD_URL = 'http://pi.alert/' +DIG_GET_IP_ARG = '-4 myip.opendns.com @resolver1.opendns.com' +UI_LANG = 'English' +UI_PRESENCE = ['online', 'offline', 'archived'] + +tz = '' + +# Email +REPORT_MAIL = False +SMTP_SERVER = '' +SMTP_PORT = 587 +REPORT_TO = 'user@gmail.com' +REPORT_FROM = 'Pi.Alert ' +SMTP_SKIP_LOGIN = False +SMTP_USER = '' +SMTP_PASS = '' +SMTP_SKIP_TLS = False +SMTP_FORCE_SSL = False + +# Webhooks +REPORT_WEBHOOK = False +WEBHOOK_URL = '' +WEBHOOK_PAYLOAD = 'json' +WEBHOOK_REQUEST_METHOD = 'GET' + +# Apprise +REPORT_APPRISE = False +APPRISE_HOST = '' +APPRISE_URL = '' +APPRISE_PAYLOAD = 'html' + +# NTFY +REPORT_NTFY = False +NTFY_HOST ='https://ntfy.sh' +NTFY_TOPIC ='' +NTFY_USER = '' +NTFY_PASSWORD = '' + +# PUSHSAFER +REPORT_PUSHSAFER = False +PUSHSAFER_TOKEN = 'ApiKey' + +# MQTT +REPORT_MQTT = False +MQTT_BROKER = '' +MQTT_PORT = 1883 +MQTT_USER = '' +MQTT_PASSWORD = '' +MQTT_QOS = 0 +MQTT_DELAY_SEC = 2 + +# DynDNS +DDNS_ACTIVE = False +DDNS_DOMAIN = 'your_domain.freeddns.org' +DDNS_USER = 'dynu_user' +DDNS_PASSWORD = 'A0000000B0000000C0000000D0000000' +DDNS_UPDATE_URL = 'https://api.dynu.com/nic/update?' + +# PiHole +PIHOLE_ACTIVE = False +DHCP_ACTIVE = False + +# PHOLUS +PHOLUS_ACTIVE = False +PHOLUS_TIMEOUT = 20 +PHOLUS_FORCE = False +PHOLUS_RUN = 'once' +PHOLUS_RUN_TIMEOUT = 600 +PHOLUS_RUN_SCHD = '0 4 * * *' +PHOLUS_DAYS_DATA = 0 + +# Nmap +NMAP_ACTIVE = True +NMAP_TIMEOUT = 150 +NMAP_RUN = 'none' +NMAP_RUN_SCHD = '0 2 * * *' +NMAP_ARGS = '-p -10000' + +# API +API_CUSTOM_SQL = 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' \ No newline at end of file diff --git a/pialert/config.py b/pialert/config.py deleted file mode 100644 index e0da2934..00000000 --- a/pialert/config.py +++ /dev/null @@ -1,22 +0,0 @@ - - -global mySettings, mySettingsSQLsafe -#------------------------------------------------------------------------------- -# Import user values -# Check config dictionary -def ccd(key, default, config, name, inputtype, options, group, events=[], desc = "", regex = ""): - result = default - - # use existing value if already supplied, otherwise default value is used - if key in config: - result = config[key] - - global mySettings - - if inputtype == 'text': - result = result.replace('\'', "{s-quote}") - - mySettingsSQLsafe.append((key, name, desc, inputtype, options, regex, str(result), group, str(events))) - mySettings.append((key, name, desc, inputtype, options, regex, result, group, str(events))) - - return result \ No newline at end of file diff --git a/pialert/database.py b/pialert/database.py index 071cfbaa..5c750d69 100644 --- a/pialert/database.py +++ b/pialert/database.py @@ -2,8 +2,10 @@ import sqlite3 +# pialert modules from const import fullDbPath -from logger import print_log, mylog +from logger import mylog +from helper import initOrSetParam, json_struc, row_to_json #=============================================================================== @@ -141,14 +143,7 @@ class DB(): -#------------------------------------------------------------------------------- -def initOrSetParam(db, parID, parValue): - sql_connection = db.sql_connection - sql = db.sql - sql.execute ("INSERT INTO Parameters(par_ID, par_Value) VALUES('"+str(parID)+"', '"+str(parValue)+"') ON CONFLICT(par_ID) DO UPDATE SET par_Value='"+str(parValue)+"' where par_ID='"+str(parID)+"'") - - db.commitDB() #------------------------------------------------------------------------------- def updateState(db, newState): @@ -159,6 +154,26 @@ def updateState(db, newState): db.commitDB() +#------------------------------------------------------------------------------- +def get_table_as_json(db, sqlQuery): + + db.sql.execute(sqlQuery) + + columnNames = list(map(lambda x: x[0], db.sql.description)) + + rows = db.sql.fetchall() + + result = {"data":[]} + + for row in rows: + tmp = row_to_json(columnNames, row) + result["data"].append(tmp) + return json_struc(result, columnNames) + + + + + #------------------------------------------------------------------------------- diff --git a/pialert/files.py b/pialert/files.py new file mode 100644 index 00000000..e283c59c --- /dev/null +++ b/pialert/files.py @@ -0,0 +1,26 @@ +import io +import sys + + +#------------------------------------------------------------------------------- +def write_file (pPath, pText): + # Write the text depending using the correct python version + if sys.version_info < (3, 0): + file = io.open (pPath , mode='w', encoding='utf-8') + file.write ( pText.decode('unicode_escape') ) + file.close() + else: + file = open (pPath, 'w', encoding='utf-8') + if pText is None: + pText = "" + file.write (pText) + file.close() + +#------------------------------------------------------------------------------- +def get_file_content(path): + + f = open(path, 'r') + content = f.read() + f.close() + + return content \ No newline at end of file diff --git a/pialert/helper.py b/pialert/helper.py index 51eb358e..2723228d 100644 --- a/pialert/helper.py +++ b/pialert/helper.py @@ -1,10 +1,28 @@ """ Colection of generic functions to support Pi.Alert """ + import datetime import os +import sys +import io +import re import subprocess +from cron_converter import Cron +from pytz import timezone +from datetime import timedelta +import json +import time +from pathlib import Path + + + from const import * -from logger import mylog, logResult +from logger import mylog, logResult, print_log +from conf import tz +from files import write_file +# from api import update_api # to avoid circular reference +from plugin import get_plugins_configs, get_setting, print_plugin_info + #------------------------------------------------------------------------------- def timeNow(): @@ -103,4 +121,371 @@ def filePermissions(): initialiseFile(fullDbPath, "/home/pi/pialert/back/pialert.db_bak") # last attempt - fixPermissions() \ No newline at end of file + fixPermissions() + +#------------------------------------------------------------------------------- +class schedule_class: + def __init__(self, service, scheduleObject, last_next_schedule, was_last_schedule_used, last_run = 0): + self.service = service + self.scheduleObject = scheduleObject + self.last_next_schedule = last_next_schedule + self.last_run = last_run + self.was_last_schedule_used = was_last_schedule_used + def runScheduleCheck(self): + + result = False + + # Initialize the last run time if never run before + if self.last_run == 0: + self.last_run = (datetime.datetime.now(tz) - timedelta(days=365)).replace(microsecond=0) + + # get the current time with the currently specified timezone + nowTime = datetime.datetime.now(tz).replace(microsecond=0) + + # Run the schedule if the current time is past the schedule time we saved last time and + # (maybe the following check is unnecessary:) + # if the last run is past the last time we run a scheduled Pholus scan + if nowTime > self.last_next_schedule and self.last_run < self.last_next_schedule: + print_log(f'Scheduler run for {self.service}: YES') + self.was_last_schedule_used = True + result = True + else: + print_log(f'Scheduler run for {self.service}: NO') + + if self.was_last_schedule_used: + self.was_last_schedule_used = False + self.last_next_schedule = self.scheduleObject.next() + + return result + + + + +#------------------------------------------------------------------------------- + +def bytes_to_string(value): + # if value is of type bytes, convert to string + if isinstance(value, bytes): + value = value.decode('utf-8') + return value + +#------------------------------------------------------------------------------- + +def if_byte_then_to_str(input): + if isinstance(input, bytes): + input = input.decode('utf-8') + input = bytes_to_string(re.sub('[^a-zA-Z0-9-_\s]', '', str(input))) + return input + +#------------------------------------------------------------------------------- +def collect_lang_strings(db, json, pref): + + for prop in json["localized"]: + for language_string in json[prop]: + import_language_string(db, language_string["language_code"], pref + "_" + prop, language_string["string"]) + + +#------------------------------------------------------------------------------- +def initOrSetParam(db, parID, parValue): + sql_connection = db.sql_connection + sql = db.sql + + sql.execute ("INSERT INTO Parameters(par_ID, par_Value) VALUES('"+str(parID)+"', '"+str(parValue)+"') ON CONFLICT(par_ID) DO UPDATE SET par_Value='"+str(parValue)+"' where par_ID='"+str(parID)+"'") + + db.commitDB() + + +#=============================================================================== +# Initialise user defined values +#=============================================================================== +# We need access to the DB to save new values so need to define DB access methods first +#------------------------------------------------------------------------------- + +#------------------------------------------------------------------------------- +# Import user values +# Check config dictionary +def ccd(key, default, config, name, inputtype, options, group, events=[], desc = "", regex = ""): + result = default + + # use existing value if already supplied, otherwise default value is used + if key in config: + result = config[key] + + if inputtype == 'text': + result = result.replace('\'', "{s-quote}") + + mySettingsSQLsafe.append((key, name, desc, inputtype, options, regex, str(result), group, str(events))) + mySettings.append((key, name, desc, inputtype, options, regex, result, group, str(events))) + + return result +#------------------------------------------------------------------------------- + +def importConfigs (db): + + sql = db.sql + + # Specify globals so they can be overwritten with the new config + global lastTimeImported, mySettings, mySettingsSQLsafe, plugins, plugins_once_run + lastTimeImported = 0 + # General + global ENABLE_ARPSCAN, SCAN_SUBNETS, LOG_LEVEL, TIMEZONE, ENABLE_PLUGINS, PIALERT_WEB_PROTECTION, PIALERT_WEB_PASSWORD, INCLUDED_SECTIONS, SCAN_CYCLE_MINUTES, DAYS_TO_KEEP_EVENTS, REPORT_DASHBOARD_URL, DIG_GET_IP_ARG, UI_LANG + # Email + global REPORT_MAIL, SMTP_SERVER, SMTP_PORT, REPORT_TO, REPORT_FROM, SMTP_SKIP_LOGIN, SMTP_USER, SMTP_PASS, SMTP_SKIP_TLS, SMTP_FORCE_SSL + # Webhooks + global REPORT_WEBHOOK, WEBHOOK_URL, WEBHOOK_PAYLOAD, WEBHOOK_REQUEST_METHOD + # Apprise + global REPORT_APPRISE, APPRISE_HOST, APPRISE_URL, APPRISE_PAYLOAD + # NTFY + global REPORT_NTFY, NTFY_HOST, NTFY_TOPIC, NTFY_USER, NTFY_PASSWORD + # PUSHSAFER + global REPORT_PUSHSAFER, PUSHSAFER_TOKEN + # MQTT + global REPORT_MQTT, MQTT_BROKER, MQTT_PORT, MQTT_USER, MQTT_PASSWORD, MQTT_QOS, MQTT_DELAY_SEC + # DynDNS + global DDNS_ACTIVE, DDNS_DOMAIN, DDNS_USER, DDNS_PASSWORD, DDNS_UPDATE_URL + # PiHole + global PIHOLE_ACTIVE, DHCP_ACTIVE + # Pholus + global PHOLUS_ACTIVE, PHOLUS_TIMEOUT, PHOLUS_FORCE, PHOLUS_DAYS_DATA, PHOLUS_RUN, PHOLUS_RUN_SCHD, PHOLUS_RUN_TIMEOUT + # Nmap + global NMAP_ACTIVE, NMAP_TIMEOUT, NMAP_RUN, NMAP_RUN_SCHD, NMAP_ARGS + # API + global API_CUSTOM_SQL + + # get config file + config_file = Path(fullConfPath) + + # Skip import if last time of import is NEWER than file age + if (os.path.getmtime(config_file) < lastTimeImported) : + return + + mySettings = [] # reset settings + mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query + + # load the variables from pialert.conf + code = compile(config_file.read_text(), config_file.name, "exec") + c_d = {} # config dictionary + exec(code, {"__builtins__": {}}, c_d) + + # Import setting if found in the dictionary + # General + ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'General', ['run']) + SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'General') + LOG_LEVEL = ccd('LOG_LEVEL', 'verbose' , c_d, 'Log verboseness', 'selecttext', "['none', 'minimal', 'verbose', 'debug']", 'General') + TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General') + ENABLE_PLUGINS = ccd('ENABLE_PLUGINS', True , c_d, 'Enable plugins', 'boolean', '', 'General') + PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General') + PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General') + INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General') + SCAN_CYCLE_MINUTES = ccd('SCAN_CYCLE_MINUTES', 5 , c_d, 'Scan cycle delay (m)', 'integer', '', 'General') + DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General') + REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General') + DIG_GET_IP_ARG = ccd('DIG_GET_IP_ARG', '-4 myip.opendns.com @resolver1.opendns.com' , c_d, 'DIG arguments', 'text', '', 'General') + UI_LANG = ccd('UI_LANG', 'English' , c_d, 'Language Interface', 'selecttext', "['English', 'German', 'Spanish']", 'General') + UI_PRESENCE = ccd('UI_PRESENCE', ['online', 'offline', 'archived'] , c_d, 'Include in presence', 'multiselect', "['online', 'offline', 'archived']", 'General') + + # Email + REPORT_MAIL = ccd('REPORT_MAIL', False , c_d, 'Enable email', 'boolean', '', 'Email', ['test']) + SMTP_SERVER = ccd('SMTP_SERVER', '' , c_d,'SMTP server URL', 'text', '', 'Email') + SMTP_PORT = ccd('SMTP_PORT', 587 , c_d, 'SMTP port', 'integer', '', 'Email') + REPORT_TO = ccd('REPORT_TO', 'user@gmail.com' , c_d, 'Email to', 'text', '', 'Email') + REPORT_FROM = ccd('REPORT_FROM', 'Pi.Alert ' , c_d, 'Email Subject', 'text', '', 'Email') + SMTP_SKIP_LOGIN = ccd('SMTP_SKIP_LOGIN', False , c_d, 'SMTP skip login', 'boolean', '', 'Email') + SMTP_USER = ccd('SMTP_USER', '' , c_d, 'SMTP user', 'text', '', 'Email') + SMTP_PASS = ccd('SMTP_PASS', '' , c_d, 'SMTP password', 'password', '', 'Email') + SMTP_SKIP_TLS = ccd('SMTP_SKIP_TLS', False , c_d, 'SMTP skip TLS', 'boolean', '', 'Email') + SMTP_FORCE_SSL = ccd('SMTP_FORCE_SSL', False , c_d, 'Force SSL', 'boolean', '', 'Email') + + # Webhooks + REPORT_WEBHOOK = ccd('REPORT_WEBHOOK', False , c_d, 'Enable Webhooks', 'boolean', '', 'Webhooks', ['test']) + WEBHOOK_URL = ccd('WEBHOOK_URL', '' , c_d, 'Target URL', 'text', '', 'Webhooks') + WEBHOOK_PAYLOAD = ccd('WEBHOOK_PAYLOAD', 'json' , c_d, 'Payload type', 'selecttext', "['json', 'html', 'text']", 'Webhooks') + WEBHOOK_REQUEST_METHOD = ccd('WEBHOOK_REQUEST_METHOD', 'GET' , c_d, 'Req type', 'selecttext', "['GET', 'POST', 'PUT']", 'Webhooks') + + # Apprise + REPORT_APPRISE = ccd('REPORT_APPRISE', False , c_d, 'Enable Apprise', 'boolean', '', 'Apprise', ['test']) + APPRISE_HOST = ccd('APPRISE_HOST', '' , c_d, 'Apprise host URL', 'text', '', 'Apprise') + APPRISE_URL = ccd('APPRISE_URL', '' , c_d, 'Apprise notification URL', 'text', '', 'Apprise') + APPRISE_PAYLOAD = ccd('APPRISE_PAYLOAD', 'html' , c_d, 'Payload type', 'selecttext', "['html', 'text']", 'Apprise') + + # NTFY + REPORT_NTFY = ccd('REPORT_NTFY', False , c_d, 'Enable NTFY', 'boolean', '', 'NTFY', ['test']) + NTFY_HOST = ccd('NTFY_HOST', 'https://ntfy.sh' , c_d, 'NTFY host URL', 'text', '', 'NTFY') + NTFY_TOPIC = ccd('NTFY_TOPIC', '' , c_d, 'NTFY topic', 'text', '', 'NTFY') + NTFY_USER = ccd('NTFY_USER', '' , c_d, 'NTFY user', 'text', '', 'NTFY') + NTFY_PASSWORD = ccd('NTFY_PASSWORD', '' , c_d, 'NTFY password', 'password', '', 'NTFY') + + # PUSHSAFER + REPORT_PUSHSAFER = ccd('REPORT_PUSHSAFER', False , c_d, 'Enable PUSHSAFER', 'boolean', '', 'PUSHSAFER', ['test']) + PUSHSAFER_TOKEN = ccd('PUSHSAFER_TOKEN', 'ApiKey' , c_d, 'PUSHSAFER token', 'text', '', 'PUSHSAFER') + + # MQTT + REPORT_MQTT = ccd('REPORT_MQTT', False , c_d, 'Enable MQTT', 'boolean', '', 'MQTT') + MQTT_BROKER = ccd('MQTT_BROKER', '' , c_d, 'MQTT broker', 'text', '', 'MQTT') + MQTT_PORT = ccd('MQTT_PORT', 1883 , c_d, 'MQTT broker port', 'integer', '', 'MQTT') + MQTT_USER = ccd('MQTT_USER', '' , c_d, 'MQTT user', 'text', '', 'MQTT') + MQTT_PASSWORD = ccd('MQTT_PASSWORD', '' , c_d, 'MQTT password', 'password', '', 'MQTT') + MQTT_QOS = ccd('MQTT_QOS', 0 , c_d, 'MQTT Quality of Service', 'selectinteger', "['0', '1', '2']", 'MQTT') + MQTT_DELAY_SEC = ccd('MQTT_DELAY_SEC', 2 , c_d, 'MQTT delay', 'selectinteger', "['2', '3', '4', '5']", 'MQTT') + + # DynDNS + DDNS_ACTIVE = ccd('DDNS_ACTIVE', False , c_d, 'Enable DynDNS', 'boolean', '', 'DynDNS') + DDNS_DOMAIN = ccd('DDNS_DOMAIN', 'your_domain.freeddns.org' , c_d, 'DynDNS domain URL', 'text', '', 'DynDNS') + DDNS_USER = ccd('DDNS_USER', 'dynu_user' , c_d, 'DynDNS user', 'text', '', 'DynDNS') + DDNS_PASSWORD = ccd('DDNS_PASSWORD', 'A0000000B0000000C0000000D0000000' , c_d, 'DynDNS password', 'password', '', 'DynDNS') + DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS') + + # PiHole + PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PiHole') + DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PiHole') + + # PHOLUS + PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus') + PHOLUS_TIMEOUT = ccd('PHOLUS_TIMEOUT', 20 , c_d, 'Pholus timeout', 'integer', '', 'Pholus') + PHOLUS_FORCE = ccd('PHOLUS_FORCE', False , c_d, 'Pholus force check', 'boolean', '', 'Pholus') + PHOLUS_RUN = ccd('PHOLUS_RUN', 'once' , c_d, 'Pholus enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Pholus') + PHOLUS_RUN_TIMEOUT = ccd('PHOLUS_RUN_TIMEOUT', 600 , c_d, 'Pholus timeout schedule', 'integer', '', 'Pholus') + PHOLUS_RUN_SCHD = ccd('PHOLUS_RUN_SCHD', '0 4 * * *' , c_d, 'Pholus schedule', 'text', '', 'Pholus') + PHOLUS_DAYS_DATA = ccd('PHOLUS_DAYS_DATA', 0 , c_d, 'Pholus keep days', 'integer', '', 'Pholus') + + # Nmap + NMAP_ACTIVE = ccd('NMAP_ACTIVE', True , c_d, 'Enable Nmap scans', 'boolean', '', 'Nmap') + NMAP_TIMEOUT = ccd('NMAP_TIMEOUT', 150 , c_d, 'Nmap timeout', 'integer', '', 'Nmap') + NMAP_RUN = ccd('NMAP_RUN', 'none' , c_d, 'Nmap enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Nmap') + NMAP_RUN_SCHD = ccd('NMAP_RUN_SCHD', '0 2 * * *' , c_d, 'Nmap schedule', 'text', '', 'Nmap') + NMAP_ARGS = ccd('NMAP_ARGS', '-p -10000' , c_d, 'Nmap custom arguments', 'text', '', 'Nmap') + + # API + API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', 'text', '', 'API') + + # Prepare scheduler + global tz, mySchedules, plugins + + # Init timezone in case it changed + tz = timezone(TIMEZONE) + + # reset schedules + mySchedules = [] + + # init pholus schedule + pholusSchedule = Cron(PHOLUS_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz)) + mySchedules.append(schedule_class("pholus", pholusSchedule, pholusSchedule.next(), False)) + + # init nmap schedule + nmapSchedule = Cron(NMAP_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz)) + mySchedules.append(schedule_class("nmap", nmapSchedule, nmapSchedule.next(), False)) + + # Format and prepare the list of subnets + userSubnets = updateSubnets(SCAN_SUBNETS) + + + + # Plugins START + # ----------------- + if ENABLE_PLUGINS: + plugins = get_plugins_configs() + + mylog('none', ['[', timeNow(), '] Plugins: Number of dynamically loaded plugins: ', len(plugins)]) + + # handle plugins + for plugin in plugins: + print_plugin_info(plugin, ['display_name','description']) + + pref = plugin["unique_prefix"] + + # if plugin["enabled"] == 'true': + + # collect plugin level language strings + collect_lang_strings(db, plugin, pref) + + for set in plugin["settings"]: + setFunction = set["function"] + # Setting code name / key + key = pref + "_" + setFunction + + v = ccd(key, set["default_value"], c_d, set["name"][0]["string"], set["type"] , str(set["options"]), pref) + + # Save the user defined value into the object + set["value"] = v + + # Setup schedules + if setFunction == 'RUN_SCHD': + newSchedule = Cron(v).schedule(start_date=datetime.datetime.now(tz)) + mySchedules.append(schedule_class(pref, newSchedule, newSchedule.next(), False)) + + # Collect settings related language strings + collect_lang_strings(db, set, pref + "_" + set["function"]) + + plugins_once_run = False + # ----------------- + # Plugins END + + + + + + # Insert settings into the DB + sql.execute ("DELETE FROM Settings") + sql.executemany ("""INSERT INTO Settings ("Code_Name", "Display_Name", "Description", "Type", "Options", + "RegEx", "Value", "Group", "Events" ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""", mySettingsSQLsafe) + + # Used to determine the next import + lastTimeImported = time.time() + + # Is used to display a message in the UI when old (outdated) settings are loaded + initOrSetParam(db, "Back_Settings_Imported",(round(time.time() * 1000),) ) + + #commitDB(sql_connection) + db.commitDB() + + # update only the settings datasource + # update_api(False, ["settings"]) + # TO DO this creates a circular reference between API and HELPER ! + + mylog('info', ['[', timeNow(), '] Config: Imported new config']) + + +#------------------------------------------------------------------------------- +class json_struc: + def __init__(self, jsn, columnNames): + self.json = jsn + self.columnNames = columnNames + +#------------------------------------------------------------------------------- +# Creates a JSON object from a DB row +def row_to_json(names, row): + + rowEntry = {} + + index = 0 + for name in names: + rowEntry[name]= if_byte_then_to_str(row[name]) + index += 1 + + return rowEntry + +#------------------------------------------------------------------------------- +def import_language_string(db, code, key, value, extra = ""): + + db.sql.execute ("""INSERT INTO Plugins_Language_Strings ("Language_Code", "String_Key", "String_Value", "Extra") VALUES (?, ?, ?, ?)""", (str(code), str(key), str(value), str(extra))) + + db.commitDB() + + + +#------------------------------------------------------------------------------- +# Make a regular expression +# for validating an Ip-address +ipRegex = "^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])$" + +# Define a function to +# validate an Ip address +def checkIPV4(ip): + # pass the regular expression + # and the string in search() method + if(re.search(ipRegex, ip)): + return True + else: + return False + + diff --git a/pialert/internet.py b/pialert/internet.py new file mode 100644 index 00000000..0f58f6b9 --- /dev/null +++ b/pialert/internet.py @@ -0,0 +1,195 @@ +""" internet related functions to support Pi.Alert """ + +import subprocess +import re + +# pialert modules +from database import updateState +from helper import timeNow +from logger import append_line_to_file, mylog +from const import logPath +from conf import DDNS_ACTIVE, DDNS_DOMAIN, DDNS_UPDATE_URL, DDNS_PASSWORD, DDNS_USER + + + +# need to find a better way to deal with settings ! +#global DDNS_ACTIVE, DDNS_DOMAIN, DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD + + +#=============================================================================== +# INTERNET IP CHANGE +#=============================================================================== +def check_internet_IP (db, DIG_GET_IP_ARG): + + # Header + updateState(db,"Scan: Internet IP") + mylog('verbose', ['[', timeNow(), '] Check Internet IP:']) + + # Get Internet IP + mylog('verbose', [' Retrieving Internet IP:']) + internet_IP = get_internet_IP(DIG_GET_IP_ARG) + # TESTING - Force IP + # internet_IP = "1.2.3.4" + + # Check result = IP + if internet_IP == "" : + mylog('none', [' Error retrieving Internet IP']) + mylog('none', [' Exiting...']) + return False + mylog('verbose', [' ', internet_IP]) + + # Get previous stored IP + mylog('verbose', [' Retrieving previous IP:']) + previous_IP = get_previous_internet_IP (db) + mylog('verbose', [' ', previous_IP]) + + # Check IP Change + if internet_IP != previous_IP : + mylog('info', [' New internet IP: ', internet_IP]) + save_new_internet_IP (db, internet_IP) + + else : + mylog('verbose', [' No changes to perform']) + + # Get Dynamic DNS IP + if DDNS_ACTIVE : + mylog('verbose', [' Retrieving Dynamic DNS IP']) + dns_IP = get_dynamic_DNS_IP() + + # Check Dynamic DNS IP + if dns_IP == "" or dns_IP == "0.0.0.0" : + mylog('info', [' Error retrieving Dynamic DNS IP']) + mylog('info', [' ', dns_IP]) + + # Check DNS Change + if dns_IP != internet_IP : + mylog('info', [' Updating Dynamic DNS IP']) + message = set_dynamic_DNS_IP () + mylog('info', [' ', message]) + else : + mylog('verbose', [' No changes to perform']) + else : + mylog('verbose', [' Skipping Dynamic DNS update']) + + + +#------------------------------------------------------------------------------- +def get_internet_IP (DIG_GET_IP_ARG): + # BUGFIX #46 - curl http://ipv4.icanhazip.com repeatedly is very slow + # Using 'dig' + dig_args = ['dig', '+short'] + DIG_GET_IP_ARG.strip().split() + try: + cmd_output = subprocess.check_output (dig_args, universal_newlines=True) + except subprocess.CalledProcessError as e: + mylog('none', [e.output]) + cmd_output = '' # no internet + + # Check result is an IP + IP = check_IP_format (cmd_output) + + # Handle invalid response + if IP == '': + IP = '0.0.0.0' + + return IP + +#------------------------------------------------------------------------------- +def get_previous_internet_IP (db): + + previous_IP = '0.0.0.0' + + # get previous internet IP stored in DB + db.sql.execute ("SELECT dev_LastIP FROM Devices WHERE dev_MAC = 'Internet' ") + result = db.sql.fetchone() + + db.commitDB() + + if result is not None and len(result) > 0 : + previous_IP = result[0] + + # return previous IP + return previous_IP + + + +#------------------------------------------------------------------------------- +def save_new_internet_IP (db, pNewIP): + # Log new IP into logfile + append_line_to_file (logPath + '/IP_changes.log', + '['+str(timeNow()) +']\t'+ pNewIP +'\n') + + prevIp = get_previous_internet_IP(db) + # Save event + db.sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, + eve_EventType, eve_AdditionalInfo, + eve_PendingAlertEmail) + VALUES ('Internet', ?, ?, 'Internet IP Changed', + 'Previous Internet IP: '|| ?, 1) """, + (pNewIP, timeNow(), prevIp) ) + + # Save new IP + db.sql.execute ("""UPDATE Devices SET dev_LastIP = ? + WHERE dev_MAC = 'Internet' """, + (pNewIP,) ) + + # commit changes + db.commitDB() + +#------------------------------------------------------------------------------- +def check_IP_format (pIP): + # Check IP format + IPv4SEG = r'(?:25[0-5]|(?:2[0-4]|1{0,1}[0-9]){0,1}[0-9])' + IPv4ADDR = r'(?:(?:' + IPv4SEG + r'\.){3,3}' + IPv4SEG + r')' + IP = re.search(IPv4ADDR, pIP) + + # Return error if not IP + if IP is None : + return "" + + # Return IP + return IP.group(0) + + + +#------------------------------------------------------------------------------- +def get_dynamic_DNS_IP (): + # Using OpenDNS server + # dig_args = ['dig', '+short', DDNS_DOMAIN, '@resolver1.opendns.com'] + + # Using default DNS server + dig_args = ['dig', '+short', DDNS_DOMAIN] + + try: + # try runnning a subprocess + dig_output = subprocess.check_output (dig_args, universal_newlines=True) + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', [e.output]) + dig_output = '' # probably no internet + + # Check result is an IP + IP = check_IP_format (dig_output) + + # Handle invalid response + if IP == '': + IP = '0.0.0.0' + + return IP + +#------------------------------------------------------------------------------- +def set_dynamic_DNS_IP (): + try: + # try runnning a subprocess + # Update Dynamic IP + curl_output = subprocess.check_output (['curl', '-s', + DDNS_UPDATE_URL + + 'username=' + DDNS_USER + + '&password=' + DDNS_PASSWORD + + '&hostname=' + DDNS_DOMAIN], + universal_newlines=True) + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', [e.output]) + curl_output = "" + + return curl_output diff --git a/pialert/logger.py b/pialert/logger.py index 154a1082..d090f79d 100644 --- a/pialert/logger.py +++ b/pialert/logger.py @@ -1,6 +1,8 @@ """ Colection of functions to support all logging for Pi.Alert """ - +import sys +import io import datetime + from const import * @@ -77,3 +79,14 @@ def logResult (stdout, stderr): if stdout != None: append_file_binary (logPath + '/stdout.log', stdout) +#------------------------------------------------------------------------------- +def append_line_to_file (pPath, pText): + # append the line depending using the correct python version + if sys.version_info < (3, 0): + file = io.open (pPath , mode='a', encoding='utf-8') + file.write ( pText.decode('unicode_escape') ) + file.close() + else: + file = open (pPath, 'a', encoding='utf-8') + file.write (pText) + file.close() \ No newline at end of file diff --git a/pialert/pialert.py b/pialert/pialert.py index cc0edbb8..33fbe889 100755 --- a/pialert/pialert.py +++ b/pialert/pialert.py @@ -38,16 +38,25 @@ from paho.mqtt import client as mqtt_client import threading from pathlib import Path from cron_converter import Cron -from pytz import timezone + from json2table import convert import hashlib import multiprocessing + +# pialert modules from const import * -from logger import mylog, print_log, logResult -from helper import filePermissions, timeNow, updateSubnets +from conf import * +# from config import DIG_GET_IP_ARG, ENABLE_PLUGINS +from logger import append_line_to_file, mylog, print_log, logResult +from helper import bytes_to_string, checkIPV4, filePermissions, importConfigs, timeNow, updateSubnets, write_file from database import * -from config import ccd +from internet import check_IP_format, check_internet_IP, get_internet_IP +from api import update_api +from files import get_file_content +from plugin import execute_plugin, get_plugin_setting, plugin_object_class, print_plugin_info + + # Global variables @@ -65,242 +74,6 @@ sql_connection = None - - -#=============================================================================== -# Initialise user defined values -#=============================================================================== -# We need access to the DB to save new values so need to define DB access methods first -#------------------------------------------------------------------------------- - - -#------------------------------------------------------------------------------- - -def importConfigs (db): - - # Specify globals so they can be overwritten with the new config - global lastTimeImported, mySettings, mySettingsSQLsafe, plugins, plugins_once_run - # General - global ENABLE_ARPSCAN, SCAN_SUBNETS, LOG_LEVEL, TIMEZONE, ENABLE_PLUGINS, PIALERT_WEB_PROTECTION, PIALERT_WEB_PASSWORD, INCLUDED_SECTIONS, SCAN_CYCLE_MINUTES, DAYS_TO_KEEP_EVENTS, REPORT_DASHBOARD_URL, DIG_GET_IP_ARG, UI_LANG - # Email - global REPORT_MAIL, SMTP_SERVER, SMTP_PORT, REPORT_TO, REPORT_FROM, SMTP_SKIP_LOGIN, SMTP_USER, SMTP_PASS, SMTP_SKIP_TLS, SMTP_FORCE_SSL - # Webhooks - global REPORT_WEBHOOK, WEBHOOK_URL, WEBHOOK_PAYLOAD, WEBHOOK_REQUEST_METHOD - # Apprise - global REPORT_APPRISE, APPRISE_HOST, APPRISE_URL, APPRISE_PAYLOAD - # NTFY - global REPORT_NTFY, NTFY_HOST, NTFY_TOPIC, NTFY_USER, NTFY_PASSWORD - # PUSHSAFER - global REPORT_PUSHSAFER, PUSHSAFER_TOKEN - # MQTT - global REPORT_MQTT, MQTT_BROKER, MQTT_PORT, MQTT_USER, MQTT_PASSWORD, MQTT_QOS, MQTT_DELAY_SEC - # DynDNS - global DDNS_ACTIVE, DDNS_DOMAIN, DDNS_USER, DDNS_PASSWORD, DDNS_UPDATE_URL - # PiHole - global PIHOLE_ACTIVE, DHCP_ACTIVE - # Pholus - global PHOLUS_ACTIVE, PHOLUS_TIMEOUT, PHOLUS_FORCE, PHOLUS_DAYS_DATA, PHOLUS_RUN, PHOLUS_RUN_SCHD, PHOLUS_RUN_TIMEOUT - # Nmap - global NMAP_ACTIVE, NMAP_TIMEOUT, NMAP_RUN, NMAP_RUN_SCHD, NMAP_ARGS - # API - global API_CUSTOM_SQL - - # get config file - config_file = Path(fullConfPath) - - # Skip import if last time of import is NEWER than file age - if (os.path.getmtime(config_file) < lastTimeImported) : - return - - mySettings = [] # reset settings - mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query - - # load the variables from pialert.conf - code = compile(config_file.read_text(), config_file.name, "exec") - c_d = {} # config dictionary - exec(code, {"__builtins__": {}}, c_d) - - # Import setting if found in the dictionary - # General - ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'General', ['run']) - SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'General') - LOG_LEVEL = ccd('LOG_LEVEL', 'verbose' , c_d, 'Log verboseness', 'selecttext', "['none', 'minimal', 'verbose', 'debug']", 'General') - TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General') - ENABLE_PLUGINS = ccd('ENABLE_PLUGINS', True , c_d, 'Enable plugins', 'boolean', '', 'General') - PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General') - PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General') - INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General') - SCAN_CYCLE_MINUTES = ccd('SCAN_CYCLE_MINUTES', 5 , c_d, 'Scan cycle delay (m)', 'integer', '', 'General') - DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General') - REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General') - DIG_GET_IP_ARG = ccd('DIG_GET_IP_ARG', '-4 myip.opendns.com @resolver1.opendns.com' , c_d, 'DIG arguments', 'text', '', 'General') - UI_LANG = ccd('UI_LANG', 'English' , c_d, 'Language Interface', 'selecttext', "['English', 'German', 'Spanish']", 'General') - UI_PRESENCE = ccd('UI_PRESENCE', ['online', 'offline', 'archived'] , c_d, 'Include in presence', 'multiselect', "['online', 'offline', 'archived']", 'General') - - # Email - REPORT_MAIL = ccd('REPORT_MAIL', False , c_d, 'Enable email', 'boolean', '', 'Email', ['test']) - SMTP_SERVER = ccd('SMTP_SERVER', '' , c_d,'SMTP server URL', 'text', '', 'Email') - SMTP_PORT = ccd('SMTP_PORT', 587 , c_d, 'SMTP port', 'integer', '', 'Email') - REPORT_TO = ccd('REPORT_TO', 'user@gmail.com' , c_d, 'Email to', 'text', '', 'Email') - REPORT_FROM = ccd('REPORT_FROM', 'Pi.Alert ' , c_d, 'Email Subject', 'text', '', 'Email') - SMTP_SKIP_LOGIN = ccd('SMTP_SKIP_LOGIN', False , c_d, 'SMTP skip login', 'boolean', '', 'Email') - SMTP_USER = ccd('SMTP_USER', '' , c_d, 'SMTP user', 'text', '', 'Email') - SMTP_PASS = ccd('SMTP_PASS', '' , c_d, 'SMTP password', 'password', '', 'Email') - SMTP_SKIP_TLS = ccd('SMTP_SKIP_TLS', False , c_d, 'SMTP skip TLS', 'boolean', '', 'Email') - SMTP_FORCE_SSL = ccd('SMTP_FORCE_SSL', False , c_d, 'Force SSL', 'boolean', '', 'Email') - - # Webhooks - REPORT_WEBHOOK = ccd('REPORT_WEBHOOK', False , c_d, 'Enable Webhooks', 'boolean', '', 'Webhooks', ['test']) - WEBHOOK_URL = ccd('WEBHOOK_URL', '' , c_d, 'Target URL', 'text', '', 'Webhooks') - WEBHOOK_PAYLOAD = ccd('WEBHOOK_PAYLOAD', 'json' , c_d, 'Payload type', 'selecttext', "['json', 'html', 'text']", 'Webhooks') - WEBHOOK_REQUEST_METHOD = ccd('WEBHOOK_REQUEST_METHOD', 'GET' , c_d, 'Req type', 'selecttext', "['GET', 'POST', 'PUT']", 'Webhooks') - - # Apprise - REPORT_APPRISE = ccd('REPORT_APPRISE', False , c_d, 'Enable Apprise', 'boolean', '', 'Apprise', ['test']) - APPRISE_HOST = ccd('APPRISE_HOST', '' , c_d, 'Apprise host URL', 'text', '', 'Apprise') - APPRISE_URL = ccd('APPRISE_URL', '' , c_d, 'Apprise notification URL', 'text', '', 'Apprise') - APPRISE_PAYLOAD = ccd('APPRISE_PAYLOAD', 'html' , c_d, 'Payload type', 'selecttext', "['html', 'text']", 'Apprise') - - # NTFY - REPORT_NTFY = ccd('REPORT_NTFY', False , c_d, 'Enable NTFY', 'boolean', '', 'NTFY', ['test']) - NTFY_HOST = ccd('NTFY_HOST', 'https://ntfy.sh' , c_d, 'NTFY host URL', 'text', '', 'NTFY') - NTFY_TOPIC = ccd('NTFY_TOPIC', '' , c_d, 'NTFY topic', 'text', '', 'NTFY') - NTFY_USER = ccd('NTFY_USER', '' , c_d, 'NTFY user', 'text', '', 'NTFY') - NTFY_PASSWORD = ccd('NTFY_PASSWORD', '' , c_d, 'NTFY password', 'password', '', 'NTFY') - - # PUSHSAFER - REPORT_PUSHSAFER = ccd('REPORT_PUSHSAFER', False , c_d, 'Enable PUSHSAFER', 'boolean', '', 'PUSHSAFER', ['test']) - PUSHSAFER_TOKEN = ccd('PUSHSAFER_TOKEN', 'ApiKey' , c_d, 'PUSHSAFER token', 'text', '', 'PUSHSAFER') - - # MQTT - REPORT_MQTT = ccd('REPORT_MQTT', False , c_d, 'Enable MQTT', 'boolean', '', 'MQTT') - MQTT_BROKER = ccd('MQTT_BROKER', '' , c_d, 'MQTT broker', 'text', '', 'MQTT') - MQTT_PORT = ccd('MQTT_PORT', 1883 , c_d, 'MQTT broker port', 'integer', '', 'MQTT') - MQTT_USER = ccd('MQTT_USER', '' , c_d, 'MQTT user', 'text', '', 'MQTT') - MQTT_PASSWORD = ccd('MQTT_PASSWORD', '' , c_d, 'MQTT password', 'password', '', 'MQTT') - MQTT_QOS = ccd('MQTT_QOS', 0 , c_d, 'MQTT Quality of Service', 'selectinteger', "['0', '1', '2']", 'MQTT') - MQTT_DELAY_SEC = ccd('MQTT_DELAY_SEC', 2 , c_d, 'MQTT delay', 'selectinteger', "['2', '3', '4', '5']", 'MQTT') - - # DynDNS - DDNS_ACTIVE = ccd('DDNS_ACTIVE', False , c_d, 'Enable DynDNS', 'boolean', '', 'DynDNS') - DDNS_DOMAIN = ccd('DDNS_DOMAIN', 'your_domain.freeddns.org' , c_d, 'DynDNS domain URL', 'text', '', 'DynDNS') - DDNS_USER = ccd('DDNS_USER', 'dynu_user' , c_d, 'DynDNS user', 'text', '', 'DynDNS') - DDNS_PASSWORD = ccd('DDNS_PASSWORD', 'A0000000B0000000C0000000D0000000' , c_d, 'DynDNS password', 'password', '', 'DynDNS') - DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS') - - # PiHole - PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PiHole') - DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PiHole') - - # PHOLUS - PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus') - PHOLUS_TIMEOUT = ccd('PHOLUS_TIMEOUT', 20 , c_d, 'Pholus timeout', 'integer', '', 'Pholus') - PHOLUS_FORCE = ccd('PHOLUS_FORCE', False , c_d, 'Pholus force check', 'boolean', '', 'Pholus') - PHOLUS_RUN = ccd('PHOLUS_RUN', 'once' , c_d, 'Pholus enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Pholus') - PHOLUS_RUN_TIMEOUT = ccd('PHOLUS_RUN_TIMEOUT', 600 , c_d, 'Pholus timeout schedule', 'integer', '', 'Pholus') - PHOLUS_RUN_SCHD = ccd('PHOLUS_RUN_SCHD', '0 4 * * *' , c_d, 'Pholus schedule', 'text', '', 'Pholus') - PHOLUS_DAYS_DATA = ccd('PHOLUS_DAYS_DATA', 0 , c_d, 'Pholus keep days', 'integer', '', 'Pholus') - - # Nmap - NMAP_ACTIVE = ccd('NMAP_ACTIVE', True , c_d, 'Enable Nmap scans', 'boolean', '', 'Nmap') - NMAP_TIMEOUT = ccd('NMAP_TIMEOUT', 150 , c_d, 'Nmap timeout', 'integer', '', 'Nmap') - NMAP_RUN = ccd('NMAP_RUN', 'none' , c_d, 'Nmap enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Nmap') - NMAP_RUN_SCHD = ccd('NMAP_RUN_SCHD', '0 2 * * *' , c_d, 'Nmap schedule', 'text', '', 'Nmap') - NMAP_ARGS = ccd('NMAP_ARGS', '-p -10000' , c_d, 'Nmap custom arguments', 'text', '', 'Nmap') - - # API - API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', 'text', '', 'API') - - # Prepare scheduler - global tz, mySchedules, plugins - - # Init timezone in case it changed - tz = timezone(TIMEZONE) - - # reset schedules - mySchedules = [] - - # init pholus schedule - pholusSchedule = Cron(PHOLUS_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz)) - mySchedules.append(schedule_class("pholus", pholusSchedule, pholusSchedule.next(), False)) - - # init nmap schedule - nmapSchedule = Cron(NMAP_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz)) - mySchedules.append(schedule_class("nmap", nmapSchedule, nmapSchedule.next(), False)) - - # Format and prepare the list of subnets - userSubnets = updateSubnets(SCAN_SUBNETS) - - - - # Plugins START - # ----------------- - if ENABLE_PLUGINS: - plugins = get_plugins_configs() - - mylog('none', ['[', timeNow(), '] Plugins: Number of dynamically loaded plugins: ', len(plugins)]) - - # handle plugins - for plugin in plugins: - print_plugin_info(plugin, ['display_name','description']) - - pref = plugin["unique_prefix"] - - # if plugin["enabled"] == 'true': - - # collect plugin level language strings - collect_lang_strings(db, plugin, pref) - - for set in plugin["settings"]: - setFunction = set["function"] - # Setting code name / key - key = pref + "_" + setFunction - - v = ccd(key, set["default_value"], c_d, set["name"][0]["string"], set["type"] , str(set["options"]), pref) - - # Save the user defined value into the object - set["value"] = v - - # Setup schedules - if setFunction == 'RUN_SCHD': - newSchedule = Cron(v).schedule(start_date=datetime.datetime.now(tz)) - mySchedules.append(schedule_class(pref, newSchedule, newSchedule.next(), False)) - - # Collect settings related language strings - collect_lang_strings(db, set, pref + "_" + set["function"]) - - plugins_once_run = False - # ----------------- - # Plugins END - - - - - - # Insert settings into the DB - sql.execute ("DELETE FROM Settings") - sql.executemany ("""INSERT INTO Settings ("Code_Name", "Display_Name", "Description", "Type", "Options", - "RegEx", "Value", "Group", "Events" ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""", mySettingsSQLsafe) - - # Used to determine the next import - lastTimeImported = time.time() - - # Is used to display a message in the UI when old (outdated) settings are loaded - initOrSetParam(db, "Back_Settings_Imported",(round(time.time() * 1000),) ) - - #commitDB(sql_connection) - db.commitDB() - - # update only the settings datasource - update_api(False, ["settings"]) - - mylog('info', ['[', timeNow(), '] Config: Imported new config']) - - - - - - #=============================================================================== #=============================================================================== # MAIN @@ -349,7 +122,7 @@ def main (): #=============================================================================== - # This is the mail loop of Pi.Alert + # This is the main loop of Pi.Alert #=============================================================================== while True: @@ -396,7 +169,7 @@ def main (): if last_internet_IP_scan + datetime.timedelta(minutes=3) < time_started: cycle = 'internet_IP' last_internet_IP_scan = time_started - check_internet_IP() + check_internet_IP(db,DIG_GET_IP_ARG) # Update vendors once a week if last_update_vendors + datetime.timedelta(days = 7) < time_started: @@ -523,180 +296,7 @@ def main (): #loop time.sleep(5) # wait for N seconds - -#=============================================================================== -# INTERNET IP CHANGE -#=============================================================================== -def check_internet_IP (): - # Header - updateState(db,"Scan: Internet IP") - mylog('verbose', ['[', startTime, '] Check Internet IP:']) - - # Get Internet IP - mylog('verbose', [' Retrieving Internet IP:']) - internet_IP = get_internet_IP() - # TESTING - Force IP - # internet_IP = "1.2.3.4" - - # Check result = IP - if internet_IP == "" : - mylog('none', [' Error retrieving Internet IP']) - mylog('none', [' Exiting...']) - return False - mylog('verbose', [' ', internet_IP]) - - # Get previous stored IP - mylog('verbose', [' Retrieving previous IP:']) - previous_IP = get_previous_internet_IP (db) - mylog('verbose', [' ', previous_IP]) - - # Check IP Change - if internet_IP != previous_IP : - mylog('info', [' New internet IP: ', internet_IP]) - save_new_internet_IP (db, internet_IP) - - else : - mylog('verbose', [' No changes to perform']) - - # Get Dynamic DNS IP - if DDNS_ACTIVE : - mylog('verbose', [' Retrieving Dynamic DNS IP']) - dns_IP = get_dynamic_DNS_IP() - - # Check Dynamic DNS IP - if dns_IP == "" or dns_IP == "0.0.0.0" : - mylog('info', [' Error retrieving Dynamic DNS IP']) - mylog('info', [' ', dns_IP]) - - # Check DNS Change - if dns_IP != internet_IP : - mylog('info', [' Updating Dynamic DNS IP']) - message = set_dynamic_DNS_IP () - mylog('info', [' ', message]) - else : - mylog('verbose', [' No changes to perform']) - else : - mylog('verbose', [' Skipping Dynamic DNS update']) - - - -#------------------------------------------------------------------------------- -def get_internet_IP (): - # BUGFIX #46 - curl http://ipv4.icanhazip.com repeatedly is very slow - # Using 'dig' - dig_args = ['dig', '+short'] + DIG_GET_IP_ARG.strip().split() - try: - cmd_output = subprocess.check_output (dig_args, universal_newlines=True) - except subprocess.CalledProcessError as e: - mylog('none', [e.output]) - cmd_output = '' # no internet - - # Check result is an IP - IP = check_IP_format (cmd_output) - - # Handle invalid response - if IP == '': - IP = '0.0.0.0' - - return IP - -#------------------------------------------------------------------------------- -def get_dynamic_DNS_IP (): - # Using OpenDNS server - # dig_args = ['dig', '+short', DDNS_DOMAIN, '@resolver1.opendns.com'] - - # Using default DNS server - dig_args = ['dig', '+short', DDNS_DOMAIN] - - try: - # try runnning a subprocess - dig_output = subprocess.check_output (dig_args, universal_newlines=True) - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', [e.output]) - dig_output = '' # probably no internet - - # Check result is an IP - IP = check_IP_format (dig_output) - - # Handle invalid response - if IP == '': - IP = '0.0.0.0' - - return IP - -#------------------------------------------------------------------------------- -def set_dynamic_DNS_IP (): - try: - # try runnning a subprocess - # Update Dynamic IP - curl_output = subprocess.check_output (['curl', '-s', - DDNS_UPDATE_URL + - 'username=' + DDNS_USER + - '&password=' + DDNS_PASSWORD + - '&hostname=' + DDNS_DOMAIN], - universal_newlines=True) - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', [e.output]) - curl_output = "" - - return curl_output - -#------------------------------------------------------------------------------- -def get_previous_internet_IP (db): - - previous_IP = '0.0.0.0' - - # get previous internet IP stored in DB - sql.execute ("SELECT dev_LastIP FROM Devices WHERE dev_MAC = 'Internet' ") - result = sql.fetchone() - - db.commitDB() - - if result is not None and len(result) > 0 : - previous_IP = result[0] - - # return previous IP - return previous_IP - -#------------------------------------------------------------------------------- -def save_new_internet_IP (db, pNewIP): - # Log new IP into logfile - append_line_to_file (logPath + '/IP_changes.log', - '['+str(startTime) +']\t'+ pNewIP +'\n') - - prevIp = get_previous_internet_IP(db) - # Save event - sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, - eve_EventType, eve_AdditionalInfo, - eve_PendingAlertEmail) - VALUES ('Internet', ?, ?, 'Internet IP Changed', - 'Previous Internet IP: '|| ?, 1) """, - (pNewIP, startTime, prevIp) ) - - # Save new IP - sql.execute ("""UPDATE Devices SET dev_LastIP = ? - WHERE dev_MAC = 'Internet' """, - (pNewIP,) ) - - # commit changes - db.commitDB() - -#------------------------------------------------------------------------------- -def check_IP_format (pIP): - # Check IP format - IPv4SEG = r'(?:25[0-5]|(?:2[0-4]|1{0,1}[0-9]){0,1}[0-9])' - IPv4ADDR = r'(?:(?:' + IPv4SEG + r'\.){3,3}' + IPv4SEG + r')' - IP = re.search(IPv4ADDR, pIP) - - # Return error if not IP - if IP is None : - return "" - - # Return IP - return IP.group(0) @@ -1023,7 +623,7 @@ def save_scanned_devices (p_arpscan_devices, p_cycle_interval): cycle) ) # Check Internet connectivity - internet_IP = get_internet_IP() + internet_IP = get_internet_IP(DIG_GET_IP_ARG) # TESTING - Force IP # internet_IP = "" if internet_IP != "" : @@ -2802,183 +2402,22 @@ def to_binary_sensor(input): return result -#=============================================================================== -# API -#=============================================================================== -def update_api(isNotification = False, updateOnlyDataSources = []): - - folder = pialertPath + '/front/api/' - - if isNotification: - # Update last notification alert in all formats - mylog('verbose', [' [API] Updating notification_* files in /front/api']) - - write_file(folder + 'notification_text.txt' , mail_text) - write_file(folder + 'notification_text.html' , mail_html) - write_file(folder + 'notification_json_final.json' , json.dumps(json_final)) - - # Save plugins - if ENABLE_PLUGINS: - write_file(folder + 'plugins.json' , json.dumps({"data" : plugins})) - - # prepare database tables we want to expose - dataSourcesSQLs = [ - ["devices", sql_devices_all], - ["nmap_scan", sql_nmap_scan_all], - ["pholus_scan", sql_pholus_scan_all], - ["events_pending_alert", sql_events_pending_alert], - ["settings", sql_settings], - ["plugins_events", sql_plugins_events], - ["plugins_history", sql_plugins_history], - ["plugins_objects", sql_plugins_objects], - ["language_strings", sql_language_strings], - ["custom_endpoint", API_CUSTOM_SQL], - ] - - # Save selected database tables - for dsSQL in dataSourcesSQLs: - - if updateOnlyDataSources == [] or dsSQL[0] in updateOnlyDataSources: - - api_endpoint_class(dsSQL[1], folder + 'table_' + dsSQL[0] + '.json') -#------------------------------------------------------------------------------- -apiEndpoints = [] -class api_endpoint_class: - def __init__(self, sql, path): - global apiEndpoints - - self.sql = sql - self.jsonData = get_table_as_json(sql).json - self.path = path - self.fileName = path.split('/')[-1] - self.hash = hash(json.dumps(self.jsonData)) - - # check if the endpoint needs to be updated - found = False - changed = False - changedIndex = -1 - index = 0 - - # search previous endpoint states to check if API needs updating - for endpoint in apiEndpoints: - # match sql and API endpoint path - if endpoint.sql == self.sql and endpoint.path == self.path: - found = True - if endpoint.hash != self.hash: - changed = True - changedIndex = index - - index = index + 1 - - # cehck if API endpoints have changed or if it's a new one - if not found or changed: - - mylog('verbose', [f' [API] Updating {self.fileName} file in /front/api']) - - write_file(self.path, json.dumps(self.jsonData)) - - if not found: - apiEndpoints.append(self) - - elif changed and changedIndex != -1 and changedIndex < len(apiEndpoints): - # update hash - apiEndpoints[changedIndex].hash = self.hash - else: - mylog('info', [f' [API] ERROR Updating {self.fileName}']) - - -#------------------------------------------------------------------------------- -def get_table_as_json(sqlQuery): - - sql.execute(sqlQuery) - - columnNames = list(map(lambda x: x[0], sql.description)) - - rows = sql.fetchall() - - result = {"data":[]} - - for row in rows: - tmp = row_to_json(columnNames, row) - result["data"].append(tmp) - return json_struc(result, columnNames) - -#------------------------------------------------------------------------------- -class json_struc: - def __init__(self, jsn, columnNames): - self.json = jsn - self.columnNames = columnNames - -#------------------------------------------------------------------------------- -# Creates a JSON object from a DB row -def row_to_json(names, row): - - rowEntry = {} - - index = 0 - for name in names: - rowEntry[name]= if_byte_then_to_str(row[name]) - index += 1 - - return rowEntry #=============================================================================== # UTIL #=============================================================================== -#------------------------------------------------------------------------------- -def write_file (pPath, pText): - # Write the text depending using the correct python version - if sys.version_info < (3, 0): - file = io.open (pPath , mode='w', encoding='utf-8') - file.write ( pText.decode('unicode_escape') ) - file.close() - else: - file = open (pPath, 'w', encoding='utf-8') - if pText is None: - pText = "" - file.write (pText) - file.close() -#------------------------------------------------------------------------------- -def append_line_to_file (pPath, pText): - # append the line depending using the correct python version - if sys.version_info < (3, 0): - file = io.open (pPath , mode='a', encoding='utf-8') - file.write ( pText.decode('unicode_escape') ) - file.close() - else: - file = open (pPath, 'a', encoding='utf-8') - file.write (pText) - file.close() -#------------------------------------------------------------------------------- -# Make a regular expression -# for validating an Ip-address -ipRegex = "^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])$" -# Define a function to -# validate an Ip address -def checkIPV4(ip): - # pass the regular expression - # and the string in search() method - if(re.search(ipRegex, ip)): - return True - else: - return False -#------------------------------------------------------------------------------- -def get_file_content(path): - f = open(path, 'r') - content = f.read() - f.close() - return content + #------------------------------------------------------------------------------- @@ -2993,21 +2432,9 @@ def sanitize_string(input): value = bytes_to_string(re.sub('[^a-zA-Z0-9-_\s]', '', str(input))) return value -#------------------------------------------------------------------------------- -def if_byte_then_to_str(input): - if isinstance(input, bytes): - input = input.decode('utf-8') - input = bytes_to_string(re.sub('[^a-zA-Z0-9-_\s]', '', str(input))) - return input -#------------------------------------------------------------------------------- -def bytes_to_string(value): - # if value is of type bytes, convert to string - if isinstance(value, bytes): - value = value.decode('utf-8') - return value #------------------------------------------------------------------------------- @@ -3157,36 +2584,9 @@ def handle_test(testType): mylog('info', ['[', timeNow(), '] END Test: ', testType]) -#------------------------------------------------------------------------------- -# Return setting value -def get_setting_value(key): - - set = get_setting(key) - if get_setting(key) is not None: - setVal = set[6] # setting value - setTyp = set[3] # setting type - return setVal - - return '' - -#------------------------------------------------------------------------------- -# Return whole setting touple -def get_setting(key): - result = None - # index order: key, name, desc, inputtype, options, regex, result, group, events - for set in mySettings: - if set[0] == key: - result = set - - if result is None: - mylog('info', [' Error - setting_missing - Setting not found for key: ', key]) - mylog('info', [' Error - logging the settings into file: ', logPath + '/setting_missing.json']) - write_file (logPath + '/setting_missing.json', json.dumps({ 'data' : mySettings})) - - return result #------------------------------------------------------------------------------- def isNewVersion(db): @@ -3227,39 +2627,6 @@ def isNewVersion(db): #------------------------------------------------------------------------------- # Plugins #------------------------------------------------------------------------------- -#------------------------------------------------------------------------------- -def get_plugins_configs(): - - pluginsList = [] - - # only top level directories required. No need for the loop - # for root, dirs, files in os.walk(pluginsPath): - - dirs = next(os.walk(pluginsPath))[1] - for d in dirs: # Loop over directories, not files - pluginsList.append(json.loads(get_file_content(pluginsPath + "/" + d + '/config.json'))) - - return pluginsList - -#------------------------------------------------------------------------------- -def collect_lang_strings(db, json, pref): - - for prop in json["localized"]: - for language_string in json[prop]: - import_language_string(db, language_string["language_code"], pref + "_" + prop, language_string["string"]) - - -#------------------------------------------------------------------------------- -def import_language_string(db, code, key, value, extra = ""): - - sql.execute ("""INSERT INTO Plugins_Language_Strings ("Language_Code", "String_Key", "String_Value", "Extra") VALUES (?, ?, ?, ?)""", (str(code), str(key), str(value), str(extra))) - - db.commitDB() - - -#------------------------------------------------------------------------------- -def custom_plugin_decoder(pluginDict): - return namedtuple('X', pluginDict.keys())(*pluginDict.values()) #------------------------------------------------------------------------------- def run_plugin_scripts(db, runType): @@ -3296,521 +2663,11 @@ def run_plugin_scripts(db, runType): print_plugin_info(plugin, ['display_name']) mylog('debug', [' [Plugins] CMD: ', get_plugin_setting(plugin, "CMD")["value"]]) - execute_plugin(plugin) - -#------------------------------------------------------------------------------- -# Executes the plugin command specified in the setting with the function specified as CMD -def execute_plugin(db, plugin): - - # ------- necessary settings check -------- - set = get_plugin_setting(plugin, "CMD") - - # handle missing "function":"CMD" setting - if set == None: - return - - set_CMD = set["value"] - - set = get_plugin_setting(plugin, "RUN_TIMEOUT") - - # handle missing "function":"_TIMEOUT" setting - if set == None: - set_RUN_TIMEOUT = 10 - else: - set_RUN_TIMEOUT = set["value"] - - mylog('debug', [' [Plugins] Timeout: ', set_RUN_TIMEOUT]) - - # Prepare custom params - params = [] - - if "params" in plugin: - for param in plugin["params"]: - resolved = "" - - # Get setting value - if param["type"] == "setting": - resolved = get_setting(param["value"]) - - if resolved != None: - resolved = plugin_param_from_glob_set(resolved) - - # Get Sql result - if param["type"] == "sql": - resolved = flatten_array(db.get_sql_array(param["value"])) - - if resolved == None: - mylog('none', [' [Plugins] The parameter "name":"', param["name"], '" was resolved as None']) - - else: - params.append( [param["name"], resolved] ) - - - # build SQL query parameters to insert into the DB - sqlParams = [] - - # python-script - if plugin['data_source'] == 'python-script': - # ------- prepare params -------- - # prepare command from plugin settings, custom parameters - command = resolve_wildcards_arr(set_CMD.split(), params) - - # Execute command - mylog('verbose', [' [Plugins] Executing: ', set_CMD]) - mylog('debug', [' [Plugins] Resolved : ', command]) - - try: - # try runnning a subprocess with a forced timeout in case the subprocess hangs - output = subprocess.check_output (command, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(set_RUN_TIMEOUT)) - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', [e.output]) - mylog('none', [' [Plugins] Error - enable LOG_LEVEL=debug and check logs']) - except subprocess.TimeoutExpired as timeErr: - mylog('none', [' [Plugins] TIMEOUT - the process forcefully terminated as timeout reached']) - - - # check the last run output - f = open(pluginsPath + '/' + plugin["code_name"] + '/last_result.log', 'r+') - newLines = f.read().split('\n') - f.close() - - # cleanup - select only lines containing a separator to filter out unnecessary data - newLines = list(filter(lambda x: '|' in x, newLines)) - - # # regular logging - # for line in newLines: - # append_line_to_file (pluginsPath + '/plugin.log', line +'\n') - - for line in newLines: - columns = line.split("|") - # There has to be always 9 columns - if len(columns) == 9: - sqlParams.append((plugin["unique_prefix"], columns[0], columns[1], 'null', columns[2], columns[3], columns[4], columns[5], columns[6], 0, columns[7], 'null', columns[8])) - else: - mylog('none', [' [Plugins]: Skipped invalid line in the output: ', line]) - - # pialert-db-query - if plugin['data_source'] == 'pialert-db-query': - # replace single quotes wildcards - q = set_CMD.replace("{s-quote}", '\'') - - # Execute command - mylog('verbose', [' [Plugins] Executing: ', q]) - - # set_CMD should contain a SQL query - arr = db.get_sql_array (q) - - for row in arr: - # There has to be always 9 columns - if len(row) == 9 and (row[0] in ['','null']) == False : - sqlParams.append((plugin["unique_prefix"], row[0], handle_empty(row[1]), 'null', row[2], row[3], row[4], handle_empty(row[5]), handle_empty(row[6]), 0, row[7], 'null', row[8])) - else: - mylog('none', [' [Plugins]: Skipped invalid sql result']) - - - # check if the subprocess / SQL query failed / there was no valid output - if len(sqlParams) == 0: - mylog('none', [' [Plugins] No output received from the plugin ', plugin["unique_prefix"], ' - enable LOG_LEVEL=debug and check logs']) - return - else: - mylog('verbose', ['[', timeNow(), '] [Plugins]: SUCCESS, received ', len(sqlParams), ' entries']) - - # process results if any - if len(sqlParams) > 0: - sql.executemany ("""INSERT INTO Plugins_Events ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", "Watched_Value4", "Status" ,"Extra", "UserData", "ForeignKey") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", sqlParams) - db.commitDB() - sql.executemany ("""INSERT INTO Plugins_History ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", "Watched_Value4", "Status" ,"Extra", "UserData", "ForeignKey") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", sqlParams) - db.commitDB() - - process_plugin_events(plugin) - - # update API endpoints - update_api(False, ["plugins_events","plugins_objects"]) - - -#------------------------------------------------------------------------------- -# Handle empty value -def handle_empty(value): - if value == '' or value is None: - value = 'null' - - return value - -#------------------------------------------------------------------------------- -# Check if watched values changed for the given plugin -def process_plugin_events(db, plugin): - - global pluginObjects, pluginEvents - - pluginPref = plugin["unique_prefix"] - - mylog('debug', [' [Plugins] Processing : ', pluginPref]) - - plugObjectsArr = db.get_sql_array ("SELECT * FROM Plugins_Objects where Plugin = '" + str(pluginPref)+"'") - plugEventsArr = db.get_sql_array ("SELECT * FROM Plugins_Events where Plugin = '" + str(pluginPref)+"'") - - pluginObjects = [] - pluginEvents = [] - - for obj in plugObjectsArr: - pluginObjects.append(plugin_object_class(plugin, obj)) - - existingPluginObjectsCount = len(pluginObjects) - - mylog('debug', [' [Plugins] Existing objects : ', existingPluginObjectsCount]) - mylog('debug', [' [Plugins] New and existing events : ', len(plugEventsArr)]) - - # set status as new - will be changed later if conditions are fulfilled, e.g. entry found - for eve in plugEventsArr: - tmpObject = plugin_object_class(plugin, eve) - tmpObject.status = "new" - pluginEvents.append(tmpObject) - - - # Update the status to "exists" - index = 0 - for tmpObjFromEvent in pluginEvents: - - # compare hash of the IDs for uniqueness - if any(x.idsHash == tmpObject.idsHash for x in pluginObjects): - mylog('debug', [' [Plugins] Found existing object']) - pluginEvents[index].status = "exists" - index += 1 - - # Loop thru events and update the one that exist to determine if watched columns changed - index = 0 - for tmpObjFromEvent in pluginEvents: - - if tmpObjFromEvent.status == "exists": - - # compare hash of the changed watched columns for uniqueness - if any(x.watchedHash != tmpObject.watchedHash for x in pluginObjects): - pluginEvents[index].status = "watched-changed" - else: - pluginEvents[index].status = "watched-not-changed" - index += 1 - - # Merge existing plugin objects with newly discovered ones and update existing ones with new values - for eveObj in pluginEvents: - if eveObj.status == 'new': - pluginObjects.append(eveObj) - else: - index = 0 - for plugObj in pluginObjects: - # find corresponding object for the event and merge - if plugObj.idsHash == eveObj.idsHash: - pluginObjects[index] = combine_plugin_objects(plugObj, eveObj) - - index += 1 - - # Update the DB - # ---------------------------- - - # Update the Plugin_Objects - for plugObj in pluginObjects: - - createdTime = plugObj.created - - if plugObj.status == 'new': - - createdTime = plugObj.changed - - sql.execute ("INSERT INTO Plugins_Objects (Plugin, Object_PrimaryID, Object_SecondaryID, DateTimeCreated, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status, Extra, UserData, ForeignKey) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)", (plugObj.pluginPref, plugObj.primaryId , plugObj.secondaryId , createdTime, plugObj.changed , plugObj.watched1 , plugObj.watched2 , plugObj.watched3 , plugObj.watched4 , plugObj.status , plugObj.extra, plugObj.userData, plugObj.foreignKey )) - else: - sql.execute (f"UPDATE Plugins_Objects set Plugin = '{plugObj.pluginPref}', DateTimeChanged = '{plugObj.changed}', Watched_Value1 = '{plugObj.watched1}', Watched_Value2 = '{plugObj.watched2}', Watched_Value3 = '{plugObj.watched3}', Watched_Value4 = '{plugObj.watched4}', Status = '{plugObj.status}', Extra = '{plugObj.extra}', ForeignKey = '{plugObj.foreignKey}' WHERE \"Index\" = {plugObj.index}") - - # Update the Plugins_Events with the new statuses - sql.execute (f'DELETE FROM Plugins_Events where Plugin = "{pluginPref}"') - - for plugObj in pluginEvents: - - createdTime = plugObj.created - - # use the same datetime for created and changed if a new entry - if plugObj.status == 'new': - createdTime = plugObj.changed - - # insert only events if they are to be reported on - if plugObj.status in get_plugin_setting_value(plugin, "REPORT_ON"): - - sql.execute ("INSERT INTO Plugins_Events (Plugin, Object_PrimaryID, Object_SecondaryID, DateTimeCreated, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status, Extra, UserData, ForeignKey) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)", (plugObj.pluginPref, plugObj.primaryId , plugObj.secondaryId , createdTime, plugObj.changed , plugObj.watched1 , plugObj.watched2 , plugObj.watched3 , plugObj.watched4 , plugObj.status , plugObj.extra, plugObj.userData, plugObj.foreignKey )) - - # Perform databse table mapping if enabled for the plugin - if len(pluginEvents) > 0 and "mapped_to_table" in plugin: - - sqlParams = [] - - dbTable = plugin['mapped_to_table'] - - mylog('debug', [' [Plugins] Mapping objects to database table: ', dbTable]) - - # collect all columns to be mapped - mappedCols = [] - columnsStr = '' - valuesStr = '' - - for clmn in plugin['database_column_definitions']: - if 'mapped_to_column' in clmn: - mappedCols.append(clmn) - columnsStr = f'{columnsStr}, "{clmn["mapped_to_column"]}"' - valuesStr = f'{valuesStr}, ?' - - if len(columnsStr) > 0: - columnsStr = columnsStr[1:] # remove first ',' - valuesStr = valuesStr[1:] # remove first ',' - - # map the column names to plugin object event values - for plgEv in pluginEvents: - - tmpList = [] - - for col in mappedCols: - if col['column'] == 'Index': - tmpList.append(plgEv.index) - elif col['column'] == 'Plugin': - tmpList.append(plgEv.pluginPref) - elif col['column'] == 'Object_PrimaryID': - tmpList.append(plgEv.primaryId) - elif col['column'] == 'Object_SecondaryID': - tmpList.append(plgEv.secondaryId) - elif col['column'] == 'DateTimeCreated': - tmpList.append(plgEv.created) - elif col['column'] == 'DateTimeChanged': - tmpList.append(plgEv.changed) - elif col['column'] == 'Watched_Value1': - tmpList.append(plgEv.watched1) - elif col['column'] == 'Watched_Value2': - tmpList.append(plgEv.watched2) - elif col['column'] == 'Watched_Value3': - tmpList.append(plgEv.watched3) - elif col['column'] == 'Watched_Value4': - tmpList.append(plgEv.watched4) - elif col['column'] == 'UserData': - tmpList.append(plgEv.userData) - elif col['column'] == 'Extra': - tmpList.append(plgEv.extra) - elif col['column'] == 'Status': - tmpList.append(plgEv.status) - - sqlParams.append(tuple(tmpList)) - - q = f'INSERT into {dbTable} ({columnsStr}) VALUES ({valuesStr})' - - mylog('debug', [' [Plugins] SQL query for mapping: ', q ]) - - sql.executemany (q, sqlParams) - - db.commitDB() - -#------------------------------------------------------------------------------- -class plugin_object_class: - def __init__(self, plugin, objDbRow): - self.index = objDbRow[0] - self.pluginPref = objDbRow[1] - self.primaryId = objDbRow[2] - self.secondaryId = objDbRow[3] - self.created = objDbRow[4] - self.changed = objDbRow[5] - self.watched1 = objDbRow[6] - self.watched2 = objDbRow[7] - self.watched3 = objDbRow[8] - self.watched4 = objDbRow[9] - self.status = objDbRow[10] - self.extra = objDbRow[11] - self.userData = objDbRow[12] - self.foreignKey = objDbRow[13] - - # self.idsHash = str(hash(str(self.primaryId) + str(self.secondaryId))) - self.idsHash = str(self.primaryId) + str(self.secondaryId) - - self.watchedClmns = [] - self.watchedIndxs = [] - - setObj = get_plugin_setting(plugin, 'WATCH') - - indexNameColumnMapping = [(6, 'Watched_Value1' ), (7, 'Watched_Value2' ), (8, 'Watched_Value3' ), (9, 'Watched_Value4' )] - - if setObj is not None: - - self.watchedClmns = setObj["value"] - - for clmName in self.watchedClmns: - for mapping in indexNameColumnMapping: - if clmName == indexNameColumnMapping[1]: - self.watchedIndxs.append(indexNameColumnMapping[0]) - - tmp = '' - for indx in self.watchedIndxs: - tmp += str(objDbRow[indx]) - - self.watchedHash = str(hash(tmp)) - - -#------------------------------------------------------------------------------- -# Combine plugin objects, keep user-defined values, created time, changed time if nothing changed and the index -def combine_plugin_objects(old, new): - - new.userData = old.userData - new.index = old.index - new.created = old.created - - # Keep changed time if nothing changed - if new.status in ['watched-not-changed']: - new.changed = old.changed - - # return the new object, with some of the old values - return new - -#------------------------------------------------------------------------------- -# Replace {wildcars} with parameters -def resolve_wildcards_arr(commandArr, params): - - mylog('debug', [' [Plugins]: Pre-Resolved CMD: '] + commandArr) - - for param in params: - # mylog('debug', [' [Plugins]: key : {', param[0], '}']) - # mylog('debug', [' [Plugins]: resolved: ', param[1]]) - - i = 0 - - for comPart in commandArr: - - commandArr[i] = comPart.replace('{' + param[0] + '}', param[1]).replace('{s-quote}',"'") - - i += 1 - - return commandArr - -#------------------------------------------------------------------------------- -# Flattens a setting to make it passable to a script -def plugin_param_from_glob_set(globalSetting): - - setVal = globalSetting[6] # setting value - setTyp = globalSetting[3] # setting type - - - noConversion = ['text', 'integer', 'boolean', 'password', 'readonly', 'selectinteger', 'selecttext' ] - arrayConversion = ['multiselect', 'list'] - - if setTyp in noConversion: - return setVal - - if setTyp in arrayConversion: - return flatten_array(setVal) - - -#------------------------------------------------------------------------------- -# Gets the whole setting object -def get_plugin_setting(plugin, function_key): - - result = None - - for set in plugin['settings']: - if set["function"] == function_key: - result = set - - if result == None: - mylog('none', [' [Plugins] Setting with "function":"', function_key, '" is missing in plugin: ', get_plugin_string(plugin, 'display_name')]) - - return result - -#------------------------------------------------------------------------------- -# Gets the setting value -def get_plugin_setting_value(plugin, function_key): - - resultObj = get_plugin_setting(plugin, function_key) - - if resultObj != None: - return resultObj["value"] - - return None - - -#------------------------------------------------------------------------------- -# Get localized string value on the top JSON depth, not recursive -def get_plugin_string(props, el): - - result = '' - - if el in props['localized']: - for str in props[el]: - if str['language_code'] == 'en_us': - result = str['string'] - - if result == '': - result = 'en_us string missing' - - else: - result = props[el] - - return result - -#------------------------------------------------------------------------------- -def print_plugin_info(plugin, elements = ['display_name']): - - mylog('verbose', [' [Plugins] ---------------------------------------------']) - - for el in elements: - res = get_plugin_string(plugin, el) - mylog('verbose', [' [Plugins] ', el ,': ', res]) - -#------------------------------------------------------------------------------- -def flatten_array(arr): - - tmp = '' - - mylog('debug', arr) - - for arrayItem in arr: - # only one column flattening is supported - if isinstance(arrayItem, list): - arrayItem = str(arrayItem[0]) - - tmp += arrayItem + ',' - # tmp = tmp.replace("'","").replace(' ','') # No single quotes or empty spaces allowed - tmp = tmp.replace("'","") # No single quotes allowed - - return tmp[:-1] # Remove last comma ',' - - + execute_plugin(plugin) #------------------------------------------------------------------------------- # Cron-like Scheduling -#------------------------------------------------------------------------------- -class schedule_class: - def __init__(self, service, scheduleObject, last_next_schedule, was_last_schedule_used, last_run = 0): - self.service = service - self.scheduleObject = scheduleObject - self.last_next_schedule = last_next_schedule - self.last_run = last_run - self.was_last_schedule_used = was_last_schedule_used - def runScheduleCheck(self): - result = False - - # Initialize the last run time if never run before - if self.last_run == 0: - self.last_run = (datetime.datetime.now(tz) - timedelta(days=365)).replace(microsecond=0) - - # get the current time with the currently specified timezone - nowTime = datetime.datetime.now(tz).replace(microsecond=0) - - # Run the schedule if the current time is past the schedule time we saved last time and - # (maybe the following check is unnecessary:) - # if the last run is past the last time we run a scheduled Pholus scan - if nowTime > self.last_next_schedule and self.last_run < self.last_next_schedule: - print_log(f'Scheduler run for {self.service}: YES') - self.was_last_schedule_used = True - result = True - else: - print_log(f'Scheduler run for {self.service}: NO') - - if self.was_last_schedule_used: - self.was_last_schedule_used = False - self.last_next_schedule = self.scheduleObject.next() - - return result #=============================================================================== # BEGIN diff --git a/pialert/plugin.py b/pialert/plugin.py new file mode 100644 index 00000000..eb1c1e6c --- /dev/null +++ b/pialert/plugin.py @@ -0,0 +1,556 @@ +import os +import json +import subprocess +import datetime +from collections import namedtuple + +# pialert modules +from const import pluginsPath, logPath +from files import get_file_content, write_file +from logger import mylog +from conf import mySettings +#from api import update_api + + +#------------------------------------------------------------------------------- +# this is duplicated from helper to avoid circular reference !! TO-DO +#------------------------------------------------------------------------------- +def timeNow(): + return datetime.datetime.now().replace(microsecond=0) + +#------------------------------------------------------------------------------- +def get_plugins_configs(): + + pluginsList = [] + + # only top level directories required. No need for the loop + # for root, dirs, files in os.walk(pluginsPath): + + dirs = next(os.walk(pluginsPath))[1] + for d in dirs: # Loop over directories, not files + pluginsList.append(json.loads(get_file_content(pluginsPath + "/" + d + '/config.json'))) + + return pluginsList + + + +#------------------------------------------------------------------------------- +def print_plugin_info(plugin, elements = ['display_name']): + + mylog('verbose', [' [Plugins] ---------------------------------------------']) + + for el in elements: + res = get_plugin_string(plugin, el) + mylog('verbose', [' [Plugins] ', el ,': ', res]) + + +#------------------------------------------------------------------------------- +# Gets the whole setting object +def get_plugin_setting(plugin, function_key): + + result = None + + for set in plugin['settings']: + if set["function"] == function_key: + result = set + + if result == None: + mylog('none', [' [Plugins] Setting with "function":"', function_key, '" is missing in plugin: ', get_plugin_string(plugin, 'display_name')]) + + return result + +#------------------------------------------------------------------------------- +# Return whole setting touple +def get_setting(key): + result = None + # index order: key, name, desc, inputtype, options, regex, result, group, events + for set in mySettings: + if set[0] == key: + result = set + + if result is None: + mylog('info', [' Error - setting_missing - Setting not found for key: ', key]) + mylog('info', [' Error - logging the settings into file: ', logPath + '/setting_missing.json']) + write_file (logPath + '/setting_missing.json', json.dumps({ 'data' : mySettings})) + + return result + + +#------------------------------------------------------------------------------- +# Get localized string value on the top JSON depth, not recursive +def get_plugin_string(props, el): + + result = '' + + if el in props['localized']: + for str in props[el]: + if str['language_code'] == 'en_us': + result = str['string'] + + if result == '': + result = 'en_us string missing' + + else: + result = props[el] + + return result + + +#------------------------------------------------------------------------------- +# Executes the plugin command specified in the setting with the function specified as CMD +def execute_plugin(db, plugin): + sql = db.sql + + # ------- necessary settings check -------- + set = get_plugin_setting(plugin, "CMD") + + # handle missing "function":"CMD" setting + if set == None: + return + + set_CMD = set["value"] + + set = get_plugin_setting(plugin, "RUN_TIMEOUT") + + # handle missing "function":"_TIMEOUT" setting + if set == None: + set_RUN_TIMEOUT = 10 + else: + set_RUN_TIMEOUT = set["value"] + + mylog('debug', [' [Plugins] Timeout: ', set_RUN_TIMEOUT]) + + # Prepare custom params + params = [] + + if "params" in plugin: + for param in plugin["params"]: + resolved = "" + + # Get setting value + if param["type"] == "setting": + resolved = get_setting(param["value"]) + + if resolved != None: + resolved = plugin_param_from_glob_set(resolved) + + # Get Sql result + if param["type"] == "sql": + resolved = flatten_array(db.get_sql_array(param["value"])) + + if resolved == None: + mylog('none', [' [Plugins] The parameter "name":"', param["name"], '" was resolved as None']) + + else: + params.append( [param["name"], resolved] ) + + + # build SQL query parameters to insert into the DB + sqlParams = [] + + # python-script + if plugin['data_source'] == 'python-script': + # ------- prepare params -------- + # prepare command from plugin settings, custom parameters + command = resolve_wildcards_arr(set_CMD.split(), params) + + # Execute command + mylog('verbose', [' [Plugins] Executing: ', set_CMD]) + mylog('debug', [' [Plugins] Resolved : ', command]) + + try: + # try runnning a subprocess with a forced timeout in case the subprocess hangs + output = subprocess.check_output (command, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(set_RUN_TIMEOUT)) + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', [e.output]) + mylog('none', [' [Plugins] Error - enable LOG_LEVEL=debug and check logs']) + except subprocess.TimeoutExpired as timeErr: + mylog('none', [' [Plugins] TIMEOUT - the process forcefully terminated as timeout reached']) + + + # check the last run output + f = open(pluginsPath + '/' + plugin["code_name"] + '/last_result.log', 'r+') + newLines = f.read().split('\n') + f.close() + + # cleanup - select only lines containing a separator to filter out unnecessary data + newLines = list(filter(lambda x: '|' in x, newLines)) + + # # regular logging + # for line in newLines: + # append_line_to_file (pluginsPath + '/plugin.log', line +'\n') + + for line in newLines: + columns = line.split("|") + # There has to be always 9 columns + if len(columns) == 9: + sqlParams.append((plugin["unique_prefix"], columns[0], columns[1], 'null', columns[2], columns[3], columns[4], columns[5], columns[6], 0, columns[7], 'null', columns[8])) + else: + mylog('none', [' [Plugins]: Skipped invalid line in the output: ', line]) + + # pialert-db-query + if plugin['data_source'] == 'pialert-db-query': + # replace single quotes wildcards + q = set_CMD.replace("{s-quote}", '\'') + + # Execute command + mylog('verbose', [' [Plugins] Executing: ', q]) + + # set_CMD should contain a SQL query + arr = db.get_sql_array (q) + + for row in arr: + # There has to be always 9 columns + if len(row) == 9 and (row[0] in ['','null']) == False : + sqlParams.append((plugin["unique_prefix"], row[0], handle_empty(row[1]), 'null', row[2], row[3], row[4], handle_empty(row[5]), handle_empty(row[6]), 0, row[7], 'null', row[8])) + else: + mylog('none', [' [Plugins]: Skipped invalid sql result']) + + + # check if the subprocess / SQL query failed / there was no valid output + if len(sqlParams) == 0: + mylog('none', [' [Plugins] No output received from the plugin ', plugin["unique_prefix"], ' - enable LOG_LEVEL=debug and check logs']) + return + else: + mylog('verbose', ['[', timeNow(), '] [Plugins]: SUCCESS, received ', len(sqlParams), ' entries']) + + # process results if any + if len(sqlParams) > 0: + sql.executemany ("""INSERT INTO Plugins_Events ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", "Watched_Value4", "Status" ,"Extra", "UserData", "ForeignKey") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", sqlParams) + db.commitDB() + sql.executemany ("""INSERT INTO Plugins_History ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", "Watched_Value4", "Status" ,"Extra", "UserData", "ForeignKey") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", sqlParams) + db.commitDB() + + process_plugin_events(plugin) + + # update API endpoints + # update_api(False, ["plugins_events","plugins_objects"]) # TO-DO - remover circular reference + +#------------------------------------------------------------------------------- +def custom_plugin_decoder(pluginDict): + return namedtuple('X', pluginDict.keys())(*pluginDict.values()) + +#------------------------------------------------------------------------------- +# Handle empty value +def handle_empty(value): + if value == '' or value is None: + value = 'null' + + return value + + +#------------------------------------------------------------------------------- +# Flattens a setting to make it passable to a script +def plugin_param_from_glob_set(globalSetting): + + setVal = globalSetting[6] # setting value + setTyp = globalSetting[3] # setting type + + + noConversion = ['text', 'integer', 'boolean', 'password', 'readonly', 'selectinteger', 'selecttext' ] + arrayConversion = ['multiselect', 'list'] + + if setTyp in noConversion: + return setVal + + if setTyp in arrayConversion: + return flatten_array(setVal) + +#------------------------------------------------------------------------------- +# Gets the setting value +def get_plugin_setting_value(plugin, function_key): + + resultObj = get_plugin_string(plugin, function_key) + + if resultObj != None: + return resultObj["value"] + + return None + +#------------------------------------------------------------------------------- +# Return setting value +def get_setting_value(key): + + set = get_setting(key) + + if get_setting(key) is not None: + + setVal = set[6] # setting value + setTyp = set[3] # setting type + + return setVal + + return '' + +#------------------------------------------------------------------------------- +def flatten_array(arr): + + tmp = '' + + mylog('debug', arr) + + for arrayItem in arr: + # only one column flattening is supported + if isinstance(arrayItem, list): + arrayItem = str(arrayItem[0]) + + tmp += arrayItem + ',' + # tmp = tmp.replace("'","").replace(' ','') # No single quotes or empty spaces allowed + tmp = tmp.replace("'","") # No single quotes allowed + + return tmp[:-1] # Remove last comma ',' + + +#------------------------------------------------------------------------------- +# Replace {wildcars} with parameters +def resolve_wildcards_arr(commandArr, params): + + mylog('debug', [' [Plugins]: Pre-Resolved CMD: '] + commandArr) + + for param in params: + # mylog('debug', [' [Plugins]: key : {', param[0], '}']) + # mylog('debug', [' [Plugins]: resolved: ', param[1]]) + + i = 0 + + for comPart in commandArr: + + commandArr[i] = comPart.replace('{' + param[0] + '}', param[1]).replace('{s-quote}',"'") + + i += 1 + + return commandArr + + +#------------------------------------------------------------------------------- +# Combine plugin objects, keep user-defined values, created time, changed time if nothing changed and the index +def combine_plugin_objects(old, new): + + new.userData = old.userData + new.index = old.index + new.created = old.created + + # Keep changed time if nothing changed + if new.status in ['watched-not-changed']: + new.changed = old.changed + + # return the new object, with some of the old values + return new + +#------------------------------------------------------------------------------- +# Check if watched values changed for the given plugin +def process_plugin_events(db, plugin): + sql = db.sql + + global pluginObjects, pluginEvents + + pluginPref = plugin["unique_prefix"] + + mylog('debug', [' [Plugins] Processing : ', pluginPref]) + + plugObjectsArr = db.get_sql_array ("SELECT * FROM Plugins_Objects where Plugin = '" + str(pluginPref)+"'") + plugEventsArr = db.get_sql_array ("SELECT * FROM Plugins_Events where Plugin = '" + str(pluginPref)+"'") + + pluginObjects = [] + pluginEvents = [] + + for obj in plugObjectsArr: + pluginObjects.append(plugin_object_class(plugin, obj)) + + existingPluginObjectsCount = len(pluginObjects) + + mylog('debug', [' [Plugins] Existing objects : ', existingPluginObjectsCount]) + mylog('debug', [' [Plugins] New and existing events : ', len(plugEventsArr)]) + + # set status as new - will be changed later if conditions are fulfilled, e.g. entry found + for eve in plugEventsArr: + tmpObject = plugin_object_class(plugin, eve) + tmpObject.status = "new" + pluginEvents.append(tmpObject) + + + # Update the status to "exists" + index = 0 + for tmpObjFromEvent in pluginEvents: + + # compare hash of the IDs for uniqueness + if any(x.idsHash == tmpObject.idsHash for x in pluginObjects): + mylog('debug', [' [Plugins] Found existing object']) + pluginEvents[index].status = "exists" + index += 1 + + # Loop thru events and update the one that exist to determine if watched columns changed + index = 0 + for tmpObjFromEvent in pluginEvents: + + if tmpObjFromEvent.status == "exists": + + # compare hash of the changed watched columns for uniqueness + if any(x.watchedHash != tmpObject.watchedHash for x in pluginObjects): + pluginEvents[index].status = "watched-changed" + else: + pluginEvents[index].status = "watched-not-changed" + index += 1 + + # Merge existing plugin objects with newly discovered ones and update existing ones with new values + for eveObj in pluginEvents: + if eveObj.status == 'new': + pluginObjects.append(eveObj) + else: + index = 0 + for plugObj in pluginObjects: + # find corresponding object for the event and merge + if plugObj.idsHash == eveObj.idsHash: + pluginObjects[index] = combine_plugin_objects(plugObj, eveObj) + + index += 1 + +# Update the DB + # ---------------------------- + + # Update the Plugin_Objects + for plugObj in pluginObjects: + + createdTime = plugObj.created + + if plugObj.status == 'new': + + createdTime = plugObj.changed + + sql.execute ("INSERT INTO Plugins_Objects (Plugin, Object_PrimaryID, Object_SecondaryID, DateTimeCreated, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status, Extra, UserData, ForeignKey) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)", (plugObj.pluginPref, plugObj.primaryId , plugObj.secondaryId , createdTime, plugObj.changed , plugObj.watched1 , plugObj.watched2 , plugObj.watched3 , plugObj.watched4 , plugObj.status , plugObj.extra, plugObj.userData, plugObj.foreignKey )) + else: + sql.execute (f"UPDATE Plugins_Objects set Plugin = '{plugObj.pluginPref}', DateTimeChanged = '{plugObj.changed}', Watched_Value1 = '{plugObj.watched1}', Watched_Value2 = '{plugObj.watched2}', Watched_Value3 = '{plugObj.watched3}', Watched_Value4 = '{plugObj.watched4}', Status = '{plugObj.status}', Extra = '{plugObj.extra}', ForeignKey = '{plugObj.foreignKey}' WHERE \"Index\" = {plugObj.index}") + + # Update the Plugins_Events with the new statuses + sql.execute (f'DELETE FROM Plugins_Events where Plugin = "{pluginPref}"') + + for plugObj in pluginEvents: + + createdTime = plugObj.created + + # use the same datetime for created and changed if a new entry + if plugObj.status == 'new': + createdTime = plugObj.changed + + # insert only events if they are to be reported on + if plugObj.status in get_plugin_setting_value(plugin, "REPORT_ON"): + + sql.execute ("INSERT INTO Plugins_Events (Plugin, Object_PrimaryID, Object_SecondaryID, DateTimeCreated, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status, Extra, UserData, ForeignKey) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)", (plugObj.pluginPref, plugObj.primaryId , plugObj.secondaryId , createdTime, plugObj.changed , plugObj.watched1 , plugObj.watched2 , plugObj.watched3 , plugObj.watched4 , plugObj.status , plugObj.extra, plugObj.userData, plugObj.foreignKey )) + + # Perform databse table mapping if enabled for the plugin + if len(pluginEvents) > 0 and "mapped_to_table" in plugin: + + sqlParams = [] + + dbTable = plugin['mapped_to_table'] + + mylog('debug', [' [Plugins] Mapping objects to database table: ', dbTable]) + + # collect all columns to be mapped + mappedCols = [] + columnsStr = '' + valuesStr = '' + + for clmn in plugin['database_column_definitions']: + if 'mapped_to_column' in clmn: + mappedCols.append(clmn) + columnsStr = f'{columnsStr}, "{clmn["mapped_to_column"]}"' + valuesStr = f'{valuesStr}, ?' + + if len(columnsStr) > 0: + columnsStr = columnsStr[1:] # remove first ',' + valuesStr = valuesStr[1:] # remove first ',' + + # map the column names to plugin object event values + for plgEv in pluginEvents: + + tmpList = [] + + for col in mappedCols: + if col['column'] == 'Index': + tmpList.append(plgEv.index) + elif col['column'] == 'Plugin': + tmpList.append(plgEv.pluginPref) + elif col['column'] == 'Object_PrimaryID': + tmpList.append(plgEv.primaryId) + elif col['column'] == 'Object_SecondaryID': + tmpList.append(plgEv.secondaryId) + elif col['column'] == 'DateTimeCreated': + tmpList.append(plgEv.created) + elif col['column'] == 'DateTimeChanged': + tmpList.append(plgEv.changed) + elif col['column'] == 'Watched_Value1': + tmpList.append(plgEv.watched1) + elif col['column'] == 'Watched_Value2': + tmpList.append(plgEv.watched2) + elif col['column'] == 'Watched_Value3': + tmpList.append(plgEv.watched3) + elif col['column'] == 'Watched_Value4': + tmpList.append(plgEv.watched4) + elif col['column'] == 'UserData': + tmpList.append(plgEv.userData) + elif col['column'] == 'Extra': + tmpList.append(plgEv.extra) + elif col['column'] == 'Status': + tmpList.append(plgEv.status) + + sqlParams.append(tuple(tmpList)) + + q = f'INSERT into {dbTable} ({columnsStr}) VALUES ({valuesStr})' + + mylog('debug', [' [Plugins] SQL query for mapping: ', q ]) + + sql.executemany (q, sqlParams) + + db.commitDB() + + + + + +#------------------------------------------------------------------------------- +class plugin_object_class: + def __init__(self, plugin, objDbRow): + self.index = objDbRow[0] + self.pluginPref = objDbRow[1] + self.primaryId = objDbRow[2] + self.secondaryId = objDbRow[3] + self.created = objDbRow[4] + self.changed = objDbRow[5] + self.watched1 = objDbRow[6] + self.watched2 = objDbRow[7] + self.watched3 = objDbRow[8] + self.watched4 = objDbRow[9] + self.status = objDbRow[10] + self.extra = objDbRow[11] + self.userData = objDbRow[12] + self.foreignKey = objDbRow[13] + + # self.idsHash = str(hash(str(self.primaryId) + str(self.secondaryId))) + self.idsHash = str(self.primaryId) + str(self.secondaryId) + + self.watchedClmns = [] + self.watchedIndxs = [] + + setObj = get_plugin_setting(plugin, 'WATCH') + + indexNameColumnMapping = [(6, 'Watched_Value1' ), (7, 'Watched_Value2' ), (8, 'Watched_Value3' ), (9, 'Watched_Value4' )] + + if setObj is not None: + + self.watchedClmns = setObj["value"] + + for clmName in self.watchedClmns: + for mapping in indexNameColumnMapping: + if clmName == indexNameColumnMapping[1]: + self.watchedIndxs.append(indexNameColumnMapping[0]) + + tmp = '' + for indx in self.watchedIndxs: + tmp += str(objDbRow[indx]) + + self.watchedHash = str(hash(tmp)) + + + From bd43a169751eb54f7b3dd3a6be07e2b383461b6a Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Mon, 22 May 2023 22:05:21 +1000 Subject: [PATCH 06/20] everything split out not tested --- pialert/nmapscan.py | 204 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 204 insertions(+) create mode 100644 pialert/nmapscan.py diff --git a/pialert/nmapscan.py b/pialert/nmapscan.py new file mode 100644 index 00000000..8a6aa9e6 --- /dev/null +++ b/pialert/nmapscan.py @@ -0,0 +1,204 @@ + +import subprocess + +from const import logPath +from conf import NMAP_ARGS, NMAP_TIMEOUT +from database import updateState, sql_nmap_scan_all +from helper import json_struc, timeNow +from logger import append_line_to_file, mylog +#------------------------------------------------------------------------------- + + + +class nmap_entry: + def __init__(self, mac, time, port, state, service, name = '', extra = '', index = 0): + self.mac = mac + self.time = time + self.port = port + self.state = state + self.service = service + self.name = name + self.extra = extra + self.index = index + self.hash = str(mac) + str(port)+ str(state)+ str(service) + + +#------------------------------------------------------------------------------- +def performNmapScan(db, devicesToScan): + sql = db.sql # TO-DO + + global changedPorts_json_struc + + changedPortsTmp = [] + + if len(devicesToScan) > 0: + + timeoutSec = NMAP_TIMEOUT + + devTotal = len(devicesToScan) + + updateState(db,"Scan: Nmap") + + mylog('verbose', ['[', timeNow(), '] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device']) + + mylog('verbose', [" Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ]) + + devIndex = 0 + for device in devicesToScan: + # Execute command + output = "" + # prepare arguments from user supplied ones + nmapArgs = ['nmap'] + NMAP_ARGS.split() + [device["dev_LastIP"]] + + progress = ' (' + str(devIndex+1) + '/' + str(devTotal) + ')' + + try: + # try runnning a subprocess with a forced (timeout + 30 seconds) in case the subprocess hangs + output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30)) + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', [e.output]) + mylog('none', [" Error - Nmap Scan - check logs", progress]) + except subprocess.TimeoutExpired as timeErr: + mylog('verbose', [' Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', device["dev_LastIP"], progress]) + + if output == "": # check if the subprocess failed + mylog('info', ['[', timeNow(), '] Scan: Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details']) + else: + mylog('verbose', ['[', timeNow(), '] Scan: Nmap SUCCESS for ', device["dev_LastIP"], progress]) + + devIndex += 1 + + # check the last run output + newLines = output.split('\n') + + # regular logging + for line in newLines: + append_line_to_file (logPath + '/pialert_nmap.log', line +'\n') + + # collect ports / new Nmap Entries + newEntriesTmp = [] + + index = 0 + startCollecting = False + duration = "" + for line in newLines: + if 'Starting Nmap' in line: + if len(newLines) > index+1 and 'Note: Host seems down' in newLines[index+1]: + break # this entry is empty + elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line: + startCollecting = True + elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line: + startCollecting = False # end reached + elif startCollecting and len(line.split()) == 3: + newEntriesTmp.append(nmap_entry(device["dev_MAC"], timeNow(), line.split()[0], line.split()[1], line.split()[2], device["dev_Name"])) + elif 'Nmap done' in line: + duration = line.split('scanned in ')[1] + index += 1 + + # previous Nmap Entries + oldEntries = [] + + mylog('verbose', ['[', timeNow(), '] Scan: Ports found by NMAP: ', len(newEntriesTmp)]) + + if len(newEntriesTmp) > 0: + + # get all current NMAP ports from the DB + sql.execute(sql_nmap_scan_all) + + rows = sql.fetchall() + + for row in rows: + # only collect entries matching the current MAC address + if row["MAC"] == device["dev_MAC"]: + oldEntries.append(nmap_entry(row["MAC"], row["Time"], row["Port"], row["State"], row["Service"], device["dev_Name"], row["Extra"], row["Index"])) + + + newEntries = [] + + # Collect all entries that don't match the ones in the DB + for newTmpEntry in newEntriesTmp: + + found = False + + # Check the new entry is already available in oldEntries and remove from processing if yes + for oldEntry in oldEntries: + if newTmpEntry.hash == oldEntry.hash: + found = True + + if not found: + newEntries.append(newTmpEntry) + + + mylog('verbose', ['[', timeNow(), '] Scan: Nmap newly discovered or changed ports: ', len(newEntries)]) + + # collect new ports, find the corresponding old entry and return for notification purposes + # also update the DB with the new values after deleting the old ones + if len(newEntries) > 0: + + # params to build the SQL query + params = [] + indexesToDelete = "" + + # Find old entry matching the new entry hash + for newEntry in newEntries: + + foundEntry = None + + for oldEntry in oldEntries: + if oldEntry.hash == newEntry.hash: + indexesToDelete = indexesToDelete + str(oldEntry.index) + ',' + foundEntry = oldEntry + + columnNames = ["Name", "MAC", "Port", "State", "Service", "Extra", "NewOrOld" ] + + # Old entry found + if foundEntry is not None: + # Build params for sql query + params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, oldEntry.extra)) + # Build JSON for API and notifications + changedPortsTmp.append({ + "Name" : foundEntry.name, + "MAC" : newEntry.mac, + "Port" : newEntry.port, + "State" : newEntry.state, + "Service" : newEntry.service, + "Extra" : foundEntry.extra, + "NewOrOld" : "New values" + }) + changedPortsTmp.append({ + "Name" : foundEntry.name, + "MAC" : foundEntry.mac, + "Port" : foundEntry.port, + "State" : foundEntry.state, + "Service" : foundEntry.service, + "Extra" : foundEntry.extra, + "NewOrOld" : "Old values" + }) + # New entry - no matching Old entry found + else: + # Build params for sql query + params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, '')) + # Build JSON for API and notifications + changedPortsTmp.append({ + "Name" : "New device", + "MAC" : newEntry.mac, + "Port" : newEntry.port, + "State" : newEntry.state, + "Service" : newEntry.service, + "Extra" : "", + "NewOrOld" : "New device" + }) + + changedPorts_json_struc = json_struc({ "data" : changedPortsTmp}, columnNames) + + # Delete old entries if available + if len(indexesToDelete) > 0: + sql.execute ("DELETE FROM Nmap_Scan where \"Index\" in (" + indexesToDelete[:-1] +")") + db.commitDB() + + # Insert new values into the DB + sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params) + db.commitDB() + + From e27610a19926724a70e1deb3bac7ab4196a80e5b Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Mon, 22 May 2023 22:05:54 +1000 Subject: [PATCH 07/20] everything split out ut not tested --- pialert/arpscan.py | 55 + pialert/conf.py | 4 + pialert/database.py | 45 +- pialert/device.py | 434 ++++++++ pialert/helper.py | 86 +- pialert/mac_vendor.py | 102 ++ pialert/mqtt.py | 244 +++++ pialert/networkscan.py | 311 ++++++ pialert/pholusscan.py | 201 ++++ pialert/pialert.py | 2313 +--------------------------------------- pialert/pihole.py | 48 + pialert/plugin.py | 45 +- pialert/reporting.py | 640 +++++++++++ 13 files changed, 2226 insertions(+), 2302 deletions(-) create mode 100644 pialert/arpscan.py create mode 100644 pialert/device.py create mode 100644 pialert/mac_vendor.py create mode 100644 pialert/mqtt.py create mode 100644 pialert/networkscan.py create mode 100644 pialert/pholusscan.py create mode 100644 pialert/pihole.py create mode 100644 pialert/reporting.py diff --git a/pialert/arpscan.py b/pialert/arpscan.py new file mode 100644 index 00000000..54af2213 --- /dev/null +++ b/pialert/arpscan.py @@ -0,0 +1,55 @@ +import re +import subprocess + +from logger import mylog + + +#------------------------------------------------------------------------------- +def execute_arpscan (userSubnets): + + # output of possible multiple interfaces + arpscan_output = "" + + # scan each interface + for interface in userSubnets : + arpscan_output += execute_arpscan_on_interface (interface) + + # Search IP + MAC + Vendor as regular expresion + re_ip = r'(?P((2[0-5]|1[0-9]|[0-9])?[0-9]\.){3}((2[0-5]|1[0-9]|[0-9])?[0-9]))' + re_mac = r'(?P([0-9a-fA-F]{2}[:-]){5}([0-9a-fA-F]{2}))' + re_hw = r'(?P.*)' + re_pattern = re.compile (re_ip + '\s+' + re_mac + '\s' + re_hw) + + # Create Userdict of devices + devices_list = [device.groupdict() + for device in re.finditer (re_pattern, arpscan_output)] + + # Delete duplicate MAC + unique_mac = [] + unique_devices = [] + + for device in devices_list : + if device['mac'] not in unique_mac: + unique_mac.append(device['mac']) + unique_devices.append(device) + + # return list + return unique_devices + +#------------------------------------------------------------------------------- +def execute_arpscan_on_interface (interface): + # Prepare command arguments + subnets = interface.strip().split() + # Retry is 6 to avoid false offline devices + arpscan_args = ['sudo', 'arp-scan', '--ignoredups', '--retry=6'] + subnets + + # Execute command + try: + # try runnning a subprocess + result = subprocess.check_output (arpscan_args, universal_newlines=True) + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', [e.output]) + result = "" + + return result diff --git a/pialert/conf.py b/pialert/conf.py index 71fe51a7..3b3b7f3c 100644 --- a/pialert/conf.py +++ b/pialert/conf.py @@ -1,6 +1,10 @@ """ config related functions for Pi.Alert """ mySettings = [] +debug_force_notification = False +cycle = 1 +userSubnets = [] +mySchedules = [] # General ENABLE_ARPSCAN = True diff --git a/pialert/database.py b/pialert/database.py index 5c750d69..0ba19b89 100644 --- a/pialert/database.py +++ b/pialert/database.py @@ -5,7 +5,7 @@ import sqlite3 # pialert modules from const import fullDbPath from logger import mylog -from helper import initOrSetParam, json_struc, row_to_json +from helper import initOrSetParam, json_struc, row_to_json, timeNow #=============================================================================== @@ -422,3 +422,46 @@ def upgradeDB(db: DB()): db.commitDB() +#------------------------------------------------------------------------------- +def get_device_stats(db): + sql = db.sql #TO-DO + # columns = ["online","down","all","archived","new","unknown"] + sql.execute(sql_devices_stats) + + row = sql.fetchone() + db.commitDB() + + return row +#------------------------------------------------------------------------------- +def get_all_devices(db): + sql = db.sql #TO-DO + sql.execute(sql_devices_all) + + row = sql.fetchall() + + db.commitDB() + return row + +#------------------------------------------------------------------------------- + +#------------------------------------------------------------------------------- +def insertOnlineHistory(db, cycle): + sql = db.sql #TO-DO + startTime = timeNow() + # Add to History + sql.execute("SELECT * FROM Devices") + History_All = sql.fetchall() + History_All_Devices = len(History_All) + + sql.execute("SELECT * FROM Devices WHERE dev_Archived = 1") + History_Archived = sql.fetchall() + History_Archived_Devices = len(History_Archived) + + sql.execute("""SELECT * FROM CurrentScan WHERE cur_ScanCycle = ? """, (cycle,)) + History_Online = sql.fetchall() + History_Online_Devices = len(History_Online) + History_Offline_Devices = History_All_Devices - History_Archived_Devices - History_Online_Devices + + sql.execute ("INSERT INTO Online_History (Scan_Date, Online_Devices, Down_Devices, All_Devices, Archived_Devices) "+ + "VALUES ( ?, ?, ?, ?, ?)", (startTime, History_Online_Devices, History_Offline_Devices, History_All_Devices, History_Archived_Devices ) ) + db.commit() \ No newline at end of file diff --git a/pialert/device.py b/pialert/device.py new file mode 100644 index 00000000..5888ed46 --- /dev/null +++ b/pialert/device.py @@ -0,0 +1,434 @@ + + + + +import subprocess + +from pialert.conf import PHOLUS_ACTIVE, PHOLUS_FORCE, PHOLUS_TIMEOUT, cycle, DIG_GET_IP_ARG, userSubnets +from pialert.helper import timeNow +from pialert.internet import check_IP_format, get_internet_IP +from pialert.logger import mylog, print_log +from pialert.mac_vendor import query_MAC_vendor +from pialert.pholusscan import performPholusScan, resolve_device_name_pholus +#------------------------------------------------------------------------------- + + +def save_scanned_devices (db, p_arpscan_devices, p_cycle_interval): + sql = db.sql #TO-DO + cycle = 1 # always 1, only one cycle supported + + # Delete previous scan data + sql.execute ("DELETE FROM CurrentScan WHERE cur_ScanCycle = ?", + (cycle,)) + + if len(p_arpscan_devices) > 0: + # Insert new arp-scan devices + sql.executemany ("INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, "+ + " cur_IP, cur_Vendor, cur_ScanMethod) "+ + "VALUES ("+ str(cycle) + ", :mac, :ip, :hw, 'arp-scan')", + p_arpscan_devices) + + # Insert Pi-hole devices + startTime = timeNow() + sql.execute ("""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, + cur_IP, cur_Vendor, cur_ScanMethod) + SELECT ?, PH_MAC, PH_IP, PH_Vendor, 'Pi-hole' + FROM PiHole_Network + WHERE PH_LastQuery >= ? + AND NOT EXISTS (SELECT 'X' FROM CurrentScan + WHERE cur_MAC = PH_MAC + AND cur_ScanCycle = ? )""", + (cycle, + (int(startTime.strftime('%s')) - 60 * p_cycle_interval), + cycle) ) + + # Check Internet connectivity + internet_IP = get_internet_IP(DIG_GET_IP_ARG) + # TESTING - Force IP + # internet_IP = "" + if internet_IP != "" : + sql.execute ("""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) + VALUES (?, 'Internet', ?, Null, 'queryDNS') """, (cycle, internet_IP) ) + + # #76 Add Local MAC of default local interface + # BUGFIX #106 - Device that pialert is running + # local_mac_cmd = ["bash -lc ifconfig `ip route list default | awk {'print $5'}` | grep ether | awk '{print $2}'"] + # local_mac_cmd = ["/sbin/ifconfig `ip route list default | sort -nk11 | head -1 | awk {'print $5'}` | grep ether | awk '{print $2}'"] + local_mac_cmd = ["/sbin/ifconfig `ip -o route get 1 | sed 's/^.*dev \\([^ ]*\\).*$/\\1/;q'` | grep ether | awk '{print $2}'"] + local_mac = subprocess.Popen (local_mac_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip() + + # local_dev_cmd = ["ip -o route get 1 | sed 's/^.*dev \\([^ ]*\\).*$/\\1/;q'"] + # local_dev = subprocess.Popen (local_dev_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip() + + # local_ip_cmd = ["ip route list default | awk {'print $7'}"] + local_ip_cmd = ["ip -o route get 1 | sed 's/^.*src \\([^ ]*\\).*$/\\1/;q'"] + local_ip = subprocess.Popen (local_ip_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip() + + mylog('debug', [' Saving this IP into the CurrentScan table:', local_ip]) + + if check_IP_format(local_ip) == '': + local_ip = '0.0.0.0' + + # Check if local mac has been detected with other methods + sql.execute ("SELECT COUNT(*) FROM CurrentScan WHERE cur_ScanCycle = ? AND cur_MAC = ? ", (cycle, local_mac) ) + if sql.fetchone()[0] == 0 : + sql.execute ("INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) "+ + "VALUES ( ?, ?, ?, Null, 'local_MAC') ", (cycle, local_mac, local_ip) ) + +#------------------------------------------------------------------------------- +def print_scan_stats (db): + sql = db.sql #TO-DO + # Devices Detected + sql.execute ("""SELECT COUNT(*) FROM CurrentScan + WHERE cur_ScanCycle = ? """, + (cycle,)) + mylog('verbose', [' Devices Detected.......: ', str (sql.fetchone()[0]) ]) + + # Devices arp-scan + sql.execute ("""SELECT COUNT(*) FROM CurrentScan + WHERE cur_ScanMethod='arp-scan' AND cur_ScanCycle = ? """, + (cycle,)) + mylog('verbose', [' arp-scan detected..: ', str (sql.fetchone()[0]) ]) + + # Devices Pi-hole + sql.execute ("""SELECT COUNT(*) FROM CurrentScan + WHERE cur_ScanMethod='PiHole' AND cur_ScanCycle = ? """, + (cycle,)) + mylog('verbose', [' Pi-hole detected...: +' + str (sql.fetchone()[0]) ]) + + # New Devices + sql.execute ("""SELECT COUNT(*) FROM CurrentScan + WHERE cur_ScanCycle = ? + AND NOT EXISTS (SELECT 1 FROM Devices + WHERE dev_MAC = cur_MAC) """, + (cycle,)) + mylog('verbose', [' New Devices........: ' + str (sql.fetchone()[0]) ]) + + # Devices in this ScanCycle + sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan + WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle + AND dev_ScanCycle = ? """, + (cycle,)) + + mylog('verbose', [' Devices in this cycle..: ' + str (sql.fetchone()[0]) ]) + + # Down Alerts + sql.execute ("""SELECT COUNT(*) FROM Devices + WHERE dev_AlertDeviceDown = 1 + AND dev_ScanCycle = ? + AND NOT EXISTS (SELECT 1 FROM CurrentScan + WHERE dev_MAC = cur_MAC + AND dev_ScanCycle = cur_ScanCycle) """, + (cycle,)) + mylog('verbose', [' Down Alerts........: ' + str (sql.fetchone()[0]) ]) + + # New Down Alerts + sql.execute ("""SELECT COUNT(*) FROM Devices + WHERE dev_AlertDeviceDown = 1 + AND dev_PresentLastScan = 1 + AND dev_ScanCycle = ? + AND NOT EXISTS (SELECT 1 FROM CurrentScan + WHERE dev_MAC = cur_MAC + AND dev_ScanCycle = cur_ScanCycle) """, + (cycle,)) + mylog('verbose', [' New Down Alerts....: ' + str (sql.fetchone()[0]) ]) + + # New Connections + sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan + WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle + AND dev_PresentLastScan = 0 + AND dev_ScanCycle = ? """, + (cycle,)) + mylog('verbose', [' New Connections....: ' + str ( sql.fetchone()[0]) ]) + + # Disconnections + sql.execute ("""SELECT COUNT(*) FROM Devices + WHERE dev_PresentLastScan = 1 + AND dev_ScanCycle = ? + AND NOT EXISTS (SELECT 1 FROM CurrentScan + WHERE dev_MAC = cur_MAC + AND dev_ScanCycle = cur_ScanCycle) """, + (cycle,)) + mylog('verbose', [' Disconnections.....: ' + str ( sql.fetchone()[0]) ]) + + # IP Changes + sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan + WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle + AND dev_ScanCycle = ? + AND dev_LastIP <> cur_IP """, + (cycle,)) + mylog('verbose', [' IP Changes.........: ' + str ( sql.fetchone()[0]) ]) + + + +#------------------------------------------------------------------------------- +def create_new_devices (db): + sql = db.sql # TO-DO + startTime = timeNow() + + # arpscan - Insert events for new devices + print_log ('New devices - 1 Events') + sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, + eve_EventType, eve_AdditionalInfo, + eve_PendingAlertEmail) + SELECT cur_MAC, cur_IP, ?, 'New Device', cur_Vendor, 1 + FROM CurrentScan + WHERE cur_ScanCycle = ? + AND NOT EXISTS (SELECT 1 FROM Devices + WHERE dev_MAC = cur_MAC) """, + (startTime, cycle) ) + + print_log ('New devices - Insert Connection into session table') + sql.execute ("""INSERT INTO Sessions (ses_MAC, ses_IP, ses_EventTypeConnection, ses_DateTimeConnection, + ses_EventTypeDisconnection, ses_DateTimeDisconnection, ses_StillConnected, ses_AdditionalInfo) + SELECT cur_MAC, cur_IP,'Connected',?, NULL , NULL ,1, cur_Vendor + FROM CurrentScan + WHERE cur_ScanCycle = ? + AND NOT EXISTS (SELECT 1 FROM Sessions + WHERE ses_MAC = cur_MAC) """, + (startTime, cycle) ) + + # arpscan - Create new devices + print_log ('New devices - 2 Create devices') + sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_Vendor, + dev_LastIP, dev_FirstConnection, dev_LastConnection, + dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown, + dev_PresentLastScan) + SELECT cur_MAC, '(unknown)', cur_Vendor, cur_IP, ?, ?, + 1, 1, 0, 1 + FROM CurrentScan + WHERE cur_ScanCycle = ? + AND NOT EXISTS (SELECT 1 FROM Devices + WHERE dev_MAC = cur_MAC) """, + (startTime, startTime, cycle) ) + + # Pi-hole - Insert events for new devices + # NOT STRICYLY NECESARY (Devices can be created through Current_Scan) + # Bugfix #2 - Pi-hole devices w/o IP + print_log ('New devices - 3 Pi-hole Events') + sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, + eve_EventType, eve_AdditionalInfo, + eve_PendingAlertEmail) + SELECT PH_MAC, IFNULL (PH_IP,'-'), ?, 'New Device', + '(Pi-Hole) ' || PH_Vendor, 1 + FROM PiHole_Network + WHERE NOT EXISTS (SELECT 1 FROM Devices + WHERE dev_MAC = PH_MAC) """, + (startTime, ) ) + + # Pi-hole - Create New Devices + # Bugfix #2 - Pi-hole devices w/o IP + print_log ('New devices - 4 Pi-hole Create devices') + sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_Vendor, + dev_LastIP, dev_FirstConnection, dev_LastConnection, + dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown, + dev_PresentLastScan) + SELECT PH_MAC, PH_Name, PH_Vendor, IFNULL (PH_IP,'-'), + ?, ?, 1, 1, 0, 1 + FROM PiHole_Network + WHERE NOT EXISTS (SELECT 1 FROM Devices + WHERE dev_MAC = PH_MAC) """, + (startTime, startTime) ) + + # DHCP Leases - Insert events for new devices + print_log ('New devices - 5 DHCP Leases Events') + sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, + eve_EventType, eve_AdditionalInfo, + eve_PendingAlertEmail) + SELECT DHCP_MAC, DHCP_IP, ?, 'New Device', '(DHCP lease)',1 + FROM DHCP_Leases + WHERE NOT EXISTS (SELECT 1 FROM Devices + WHERE dev_MAC = DHCP_MAC) """, + (startTime, ) ) + + # DHCP Leases - Create New Devices + print_log ('New devices - 6 DHCP Leases Create devices') + # BUGFIX #23 - Duplicated MAC in DHCP.Leases + # TEST - Force Duplicated MAC + # sql.execute ("""INSERT INTO DHCP_Leases VALUES + # (1610700000, 'TEST1', '10.10.10.1', 'Test 1', '*')""") + # sql.execute ("""INSERT INTO DHCP_Leases VALUES + # (1610700000, 'TEST2', '10.10.10.2', 'Test 2', '*')""") + sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_LastIP, + dev_Vendor, dev_FirstConnection, dev_LastConnection, + dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown, + dev_PresentLastScan) + SELECT DISTINCT DHCP_MAC, + (SELECT DHCP_Name FROM DHCP_Leases AS D2 + WHERE D2.DHCP_MAC = D1.DHCP_MAC + ORDER BY DHCP_DateTime DESC LIMIT 1), + (SELECT DHCP_IP FROM DHCP_Leases AS D2 + WHERE D2.DHCP_MAC = D1.DHCP_MAC + ORDER BY DHCP_DateTime DESC LIMIT 1), + '(unknown)', ?, ?, 1, 1, 0, 1 + FROM DHCP_Leases AS D1 + WHERE NOT EXISTS (SELECT 1 FROM Devices + WHERE dev_MAC = DHCP_MAC) """, + (startTime, startTime) ) + + # sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_Vendor, + # dev_LastIP, dev_FirstConnection, dev_LastConnection, + # dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown, + # dev_PresentLastScan) + # SELECT DHCP_MAC, DHCP_Name, '(unknown)', DHCP_IP, ?, ?, + # 1, 1, 0, 1 + # FROM DHCP_Leases + # WHERE NOT EXISTS (SELECT 1 FROM Devices + # WHERE dev_MAC = DHCP_MAC) """, + # (startTime, startTime) ) + print_log ('New Devices end') + db.commit() + + +#------------------------------------------------------------------------------- +def update_devices_data_from_scan (db): + sql = db.sql #TO-DO + startTime = timeNow() + # Update Last Connection + print_log ('Update devices - 1 Last Connection') + sql.execute ("""UPDATE Devices SET dev_LastConnection = ?, + dev_PresentLastScan = 1 + WHERE dev_ScanCycle = ? + AND dev_PresentLastScan = 0 + AND EXISTS (SELECT 1 FROM CurrentScan + WHERE dev_MAC = cur_MAC + AND dev_ScanCycle = cur_ScanCycle) """, + (startTime, cycle)) + + # Clean no active devices + print_log ('Update devices - 2 Clean no active devices') + sql.execute ("""UPDATE Devices SET dev_PresentLastScan = 0 + WHERE dev_ScanCycle = ? + AND NOT EXISTS (SELECT 1 FROM CurrentScan + WHERE dev_MAC = cur_MAC + AND dev_ScanCycle = cur_ScanCycle) """, + (cycle,)) + + # Update IP & Vendor + print_log ('Update devices - 3 LastIP & Vendor') + sql.execute ("""UPDATE Devices + SET dev_LastIP = (SELECT cur_IP FROM CurrentScan + WHERE dev_MAC = cur_MAC + AND dev_ScanCycle = cur_ScanCycle), + dev_Vendor = (SELECT cur_Vendor FROM CurrentScan + WHERE dev_MAC = cur_MAC + AND dev_ScanCycle = cur_ScanCycle) + WHERE dev_ScanCycle = ? + AND EXISTS (SELECT 1 FROM CurrentScan + WHERE dev_MAC = cur_MAC + AND dev_ScanCycle = cur_ScanCycle) """, + (cycle,)) + + # Pi-hole Network - Update (unknown) Name + print_log ('Update devices - 4 Unknown Name') + sql.execute ("""UPDATE Devices + SET dev_NAME = (SELECT PH_Name FROM PiHole_Network + WHERE PH_MAC = dev_MAC) + WHERE (dev_Name in ("(unknown)", "(name not found)", "" ) + OR dev_Name IS NULL) + AND EXISTS (SELECT 1 FROM PiHole_Network + WHERE PH_MAC = dev_MAC + AND PH_NAME IS NOT NULL + AND PH_NAME <> '') """) + + # DHCP Leases - Update (unknown) Name + sql.execute ("""UPDATE Devices + SET dev_NAME = (SELECT DHCP_Name FROM DHCP_Leases + WHERE DHCP_MAC = dev_MAC) + WHERE (dev_Name in ("(unknown)", "(name not found)", "" ) + OR dev_Name IS NULL) + AND EXISTS (SELECT 1 FROM DHCP_Leases + WHERE DHCP_MAC = dev_MAC)""") + + # DHCP Leases - Vendor + print_log ('Update devices - 5 Vendor') + + recordsToUpdate = [] + query = """SELECT * FROM Devices + WHERE dev_Vendor = '(unknown)' OR dev_Vendor ='' + OR dev_Vendor IS NULL""" + + for device in sql.execute (query) : + vendor = query_MAC_vendor (device['dev_MAC']) + if vendor != -1 and vendor != -2 : + recordsToUpdate.append ([vendor, device['dev_MAC']]) + + sql.executemany ("UPDATE Devices SET dev_Vendor = ? WHERE dev_MAC = ? ", + recordsToUpdate ) + + # clean-up device leases table + sql.execute ("DELETE FROM DHCP_Leases") + print_log ('Update devices end') + +#------------------------------------------------------------------------------- +def update_devices_names (db): + sql = db.sql #TO-DO + # Initialize variables + recordsToUpdate = [] + recordsNotFound = [] + + ignored = 0 + notFound = 0 + + foundDig = 0 + foundPholus = 0 + + # BUGFIX #97 - Updating name of Devices w/o IP + sql.execute ("SELECT * FROM Devices WHERE dev_Name IN ('(unknown)','', '(name not found)') AND dev_LastIP <> '-'") + unknownDevices = sql.fetchall() + db.commitDB() + + # perform Pholus scan if (unknown) devices found + if PHOLUS_ACTIVE and (len(unknownDevices) > 0 or PHOLUS_FORCE): + performPholusScan(db, PHOLUS_TIMEOUT, userSubnets) + + # skip checks if no unknown devices + if len(unknownDevices) == 0 and PHOLUS_FORCE == False: + return + + # Devices without name + mylog('verbose', [' Trying to resolve devices without name']) + + # get names from Pholus scan + sql.execute ('SELECT * FROM Pholus_Scan where "Record_Type"="Answer"') + pholusResults = list(sql.fetchall()) + db.commitDB() + + # Number of entries from previous Pholus scans + mylog('verbose', [" Pholus entries from prev scans: ", len(pholusResults)]) + + for device in unknownDevices: + newName = -1 + + # Resolve device name with DiG + newName = resolve_device_name_pholus (device['dev_MAC'], device['dev_LastIP']) + + # count + if newName != -1: + foundDig += 1 + + # Resolve with Pholus + if newName == -1: + newName = resolve_device_name_pholus (device['dev_MAC'], device['dev_LastIP'], pholusResults) + # count + if newName != -1: + foundPholus += 1 + + # isf still not found update name so we can distinguish the devices where we tried already + if newName == -1 : + recordsNotFound.append (["(name not found)", device['dev_MAC']]) + else: + # name wa sfound with DiG or Pholus + recordsToUpdate.append ([newName, device['dev_MAC']]) + + # Print log + mylog('verbose', [" Names Found (DiG/Pholus): ", len(recordsToUpdate), " (",foundDig,"/",foundPholus ,")" ]) + mylog('verbose', [" Names Not Found : ", len(recordsNotFound) ]) + + # update not found devices with (name not found) + sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsNotFound ) + # update names of devices which we were bale to resolve + sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsToUpdate ) + db.commitDB() + + diff --git a/pialert/helper.py b/pialert/helper.py index 2723228d..ba5bffa8 100644 --- a/pialert/helper.py +++ b/pialert/helper.py @@ -2,8 +2,6 @@ import datetime import os -import sys -import io import re import subprocess from cron_converter import Cron @@ -12,6 +10,7 @@ from datetime import timedelta import json import time from pathlib import Path +import requests @@ -489,3 +488,86 @@ def checkIPV4(ip): return False +#------------------------------------------------------------------------------- +def isNewVersion(db): + global newVersionAvailable + + if newVersionAvailable == False: + + f = open(pialertPath + '/front/buildtimestamp.txt', 'r') + buildTimestamp = int(f.read().strip()) + f.close() + + data = "" + + try: + url = requests.get("https://api.github.com/repos/jokob-sk/Pi.Alert/releases") + text = url.text + data = json.loads(text) + except requests.exceptions.ConnectionError as e: + mylog('info', [" Couldn't check for new release."]) + data = "" + + # make sure we received a valid response and not an API rate limit exceeded message + if data != "" and len(data) > 0 and isinstance(data, list) and "published_at" in data[0]: + + dateTimeStr = data[0]["published_at"] + + realeaseTimestamp = int(datetime.datetime.strptime(dateTimeStr, '%Y-%m-%dT%H:%M:%SZ').strftime('%s')) + + if realeaseTimestamp > buildTimestamp + 600: + mylog('none', [" New version of the container available!"]) + newVersionAvailable = True + # updateState(db, 'Back_New_Version_Available', str(newVersionAvailable)) ## TO DO add this back in but avoid circular ref with database + + return newVersionAvailable + +#------------------------------------------------------------------------------- +def hide_email(email): + m = email.split('@') + + if len(m) == 2: + return f'{m[0][0]}{"*"*(len(m[0])-2)}{m[0][-1] if len(m[0]) > 1 else ""}@{m[1]}' + + return email + +#------------------------------------------------------------------------------- +def removeDuplicateNewLines(text): + if "\n\n\n" in text: + return removeDuplicateNewLines(text.replace("\n\n\n", "\n\n")) + else: + return text + +#------------------------------------------------------------------------------- + +def add_json_list (row, list): + new_row = [] + for column in row : + column = bytes_to_string(column) + + new_row.append(column) + + list.append(new_row) + + return list + +#------------------------------------------------------------------------------- + +def sanitize_string(input): + if isinstance(input, bytes): + input = input.decode('utf-8') + value = bytes_to_string(re.sub('[^a-zA-Z0-9-_\s]', '', str(input))) + return value + + +#------------------------------------------------------------------------------- +def generate_mac_links (html, deviceUrl): + + p = re.compile(r'(?:[0-9a-fA-F]:?){12}') + + MACs = re.findall(p, html) + + for mac in MACs: + html = html.replace('' + mac + '','' + mac + '') + + return html \ No newline at end of file diff --git a/pialert/mac_vendor.py b/pialert/mac_vendor.py new file mode 100644 index 00000000..f7ca6987 --- /dev/null +++ b/pialert/mac_vendor.py @@ -0,0 +1,102 @@ + +import subprocess + +from pialert.database import updateState +from pialert.helper import timeNow +from pialert.logger import mylog +from conf import pialertPath, vendorsDB + +#=============================================================================== +# UPDATE DEVICE MAC VENDORS +#=============================================================================== + + + +def update_devices_MAC_vendors (db, pArg = ''): + sql = db.sql # TO-DO + # Header + updateState(db,"Upkeep: Vendors") + mylog('verbose', ['[', timeNow(), '] Upkeep - Update HW Vendors:' ]) + + # Update vendors DB (iab oui) + mylog('verbose', [' Updating vendors DB (iab & oui)']) + update_args = ['sh', pialertPath + '/update_vendors.sh', pArg] + + try: + # try runnning a subprocess + update_output = subprocess.check_output (update_args) + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', [' FAILED: Updating vendors DB, set LOG_LEVEL=debug for more info']) + mylog('none', [e.output]) + + # Initialize variables + recordsToUpdate = [] + ignored = 0 + notFound = 0 + + # All devices loop + mylog('verbose', [' Searching devices vendor']) + for device in sql.execute ("""SELECT * FROM Devices + WHERE dev_Vendor = '(unknown)' + OR dev_Vendor ='' + OR dev_Vendor IS NULL""") : + # Search vendor in HW Vendors DB + vendor = query_MAC_vendor (device['dev_MAC']) + if vendor == -1 : + notFound += 1 + elif vendor == -2 : + ignored += 1 + else : + recordsToUpdate.append ([vendor, device['dev_MAC']]) + + # Print log + mylog('verbose', [" Devices Ignored: ", ignored]) + mylog('verbose', [" Vendors Not Found:", notFound]) + mylog('verbose', [" Vendors updated: ", len(recordsToUpdate) ]) + + + # update devices + sql.executemany ("UPDATE Devices SET dev_Vendor = ? WHERE dev_MAC = ? ", + recordsToUpdate ) + + # Commit DB + db.commitDB() + + if len(recordsToUpdate) > 0: + return True + else: + return False + +#------------------------------------------------------------------------------- +def query_MAC_vendor (pMAC): + try : + # BUGFIX #6 - Fix pMAC parameter as numbers + pMACstr = str(pMAC) + + # Check MAC parameter + mac = pMACstr.replace (':','') + if len(pMACstr) != 17 or len(mac) != 12 : + return -2 + + # Search vendor in HW Vendors DB + mac = mac[0:6] + grep_args = ['grep', '-i', mac, vendorsDB] + # Execute command + try: + # try runnning a subprocess + grep_output = subprocess.check_output (grep_args) + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', [e.output]) + grep_output = " There was an error, check logs for details" + + # Return Vendor + vendor = grep_output[7:] + vendor = vendor.rstrip() + return vendor + + # not Found + except subprocess.CalledProcessError : + return -1 + diff --git a/pialert/mqtt.py b/pialert/mqtt.py new file mode 100644 index 00000000..0d47e142 --- /dev/null +++ b/pialert/mqtt.py @@ -0,0 +1,244 @@ + +import time +import re +from paho.mqtt import client as mqtt_client + +from logger import mylog +from conf import MQTT_BROKER, MQTT_DELAY_SEC, MQTT_PASSWORD, MQTT_PORT, MQTT_QOS, MQTT_USER +from database import get_all_devices, get_device_stats +from helper import bytes_to_string, sanitize_string + + + +#------------------------------------------------------------------------------- +# MQTT +#------------------------------------------------------------------------------- + +mqtt_connected_to_broker = False +mqtt_sensors = [] + +#------------------------------------------------------------------------------- +class sensor_config: + def __init__(self, deviceId, deviceName, sensorType, sensorName, icon): + self.deviceId = deviceId + self.deviceName = deviceName + self.sensorType = sensorType + self.sensorName = sensorName + self.icon = icon + self.hash = str(hash(str(deviceId) + str(deviceName)+ str(sensorType)+ str(sensorName)+ str(icon))) + +#------------------------------------------------------------------------------- + +def publish_mqtt(client, topic, message): + status = 1 + while status != 0: + result = client.publish( + topic=topic, + payload=message, + qos=MQTT_QOS, + retain=True, + ) + + status = result[0] + + if status != 0: + mylog('info', ["Waiting to reconnect to MQTT broker"]) + time.sleep(0.1) + return True + +#------------------------------------------------------------------------------- +def create_generic_device(client): + + deviceName = 'PiAlert' + deviceId = 'pialert' + + create_sensor(client, deviceId, deviceName, 'sensor', 'online', 'wifi-check') + create_sensor(client, deviceId, deviceName, 'sensor', 'down', 'wifi-cancel') + create_sensor(client, deviceId, deviceName, 'sensor', 'all', 'wifi') + create_sensor(client, deviceId, deviceName, 'sensor', 'archived', 'wifi-lock') + create_sensor(client, deviceId, deviceName, 'sensor', 'new', 'wifi-plus') + create_sensor(client, deviceId, deviceName, 'sensor', 'unknown', 'wifi-alert') + + +#------------------------------------------------------------------------------- +def create_sensor(client, deviceId, deviceName, sensorType, sensorName, icon): + + new_sensor_config = sensor_config(deviceId, deviceName, sensorType, sensorName, icon) + + # check if config already in list and if not, add it, otherwise skip + global mqtt_sensors, uniqueSensorCount + + is_unique = True + + for sensor in mqtt_sensors: + if sensor.hash == new_sensor_config.hash: + is_unique = False + break + + # save if unique + if is_unique: + publish_sensor(client, new_sensor_config) + + + + +#------------------------------------------------------------------------------- +def publish_sensor(client, sensorConf): + + global mqtt_sensors + + message = '{ \ + "name":"'+ sensorConf.deviceName +' '+sensorConf.sensorName+'", \ + "state_topic":"system-sensors/'+sensorConf.sensorType+'/'+sensorConf.deviceId+'/state", \ + "value_template":"{{value_json.'+sensorConf.sensorName+'}}", \ + "unique_id":"'+sensorConf.deviceId+'_sensor_'+sensorConf.sensorName+'", \ + "device": \ + { \ + "identifiers": ["'+sensorConf.deviceId+'_sensor"], \ + "manufacturer": "PiAlert", \ + "name":"'+sensorConf.deviceName+'" \ + }, \ + "icon":"mdi:'+sensorConf.icon+'" \ + }' + + topic='homeassistant/'+sensorConf.sensorType+'/'+sensorConf.deviceId+'/'+sensorConf.sensorName+'/config' + + # add the sensor to the global list to keep track of succesfully added sensors + if publish_mqtt(client, topic, message): + # hack - delay adding to the queue in case the process is + time.sleep(MQTT_DELAY_SEC) # restarted and previous publish processes aborted + # (it takes ~2s to update a sensor config on the broker) + mqtt_sensors.append(sensorConf) + +#------------------------------------------------------------------------------- +def mqtt_create_client(): + def on_disconnect(client, userdata, rc): + global mqtt_connected_to_broker + mqtt_connected_to_broker = False + + # not sure is below line is correct / necessary + # client = mqtt_create_client() + + def on_connect(client, userdata, flags, rc): + global mqtt_connected_to_broker + + if rc == 0: + mylog('verbose', [" Connected to broker"]) + mqtt_connected_to_broker = True # Signal connection + else: + mylog('none', [" Connection failed"]) + mqtt_connected_to_broker = False + + + client = mqtt_client.Client('PiAlert') # Set Connecting Client ID + client.username_pw_set(MQTT_USER, MQTT_PASSWORD) + client.on_connect = on_connect + client.on_disconnect = on_disconnect + client.connect(MQTT_BROKER, MQTT_PORT) + client.loop_start() + + return client + +#------------------------------------------------------------------------------- +def mqtt_start(): + + global client, mqtt_connected_to_broker + + if mqtt_connected_to_broker == False: + mqtt_connected_to_broker = True + client = mqtt_create_client() + + # General stats + + # Create a generic device for overal stats + create_generic_device(client) + + # Get the data + row = get_device_stats() + + columns = ["online","down","all","archived","new","unknown"] + + payload = "" + + # Update the values + for column in columns: + payload += '"'+column+'": ' + str(row[column]) +',' + + # Publish (warap into {} and remove last ',' from above) + publish_mqtt(client, "system-sensors/sensor/pialert/state", + '{ \ + '+ payload[:-1] +'\ + }' + ) + + + # Specific devices + + # Get all devices + devices = get_all_devices() + + sec_delay = len(devices) * int(MQTT_DELAY_SEC)*5 + + mylog('info', [" Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60,1) , 'min)' ]) + + for device in devices: + + # Create devices in Home Assistant - send config messages + deviceId = 'mac_' + device["dev_MAC"].replace(" ", "").replace(":", "_").lower() + deviceNameDisplay = re.sub('[^a-zA-Z0-9-_\s]', '', device["dev_Name"]) + + create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'last_ip', 'ip-network') + create_sensor(client, deviceId, deviceNameDisplay, 'binary_sensor', 'is_present', 'wifi') + create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'mac_address', 'folder-key-network') + create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'is_new', 'bell-alert-outline') + create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'vendor', 'cog') + + # update device sensors in home assistant + + publish_mqtt(client, 'system-sensors/sensor/'+deviceId+'/state', + '{ \ + "last_ip": "' + device["dev_LastIP"] +'", \ + "is_new": "' + str(device["dev_NewDevice"]) +'", \ + "vendor": "' + sanitize_string(device["dev_Vendor"]) +'", \ + "mac_address": "' + str(device["dev_MAC"]) +'" \ + }' + ) + + publish_mqtt(client, 'system-sensors/binary_sensor/'+deviceId+'/state', + '{ \ + "is_present": "' + to_binary_sensor(str(device["dev_PresentLastScan"])) +'"\ + }' + ) + + # delete device / topic + # homeassistant/sensor/mac_44_ef_bf_c4_b1_af/is_present/config + # client.publish( + # topic="homeassistant/sensor/"+deviceId+"/is_present/config", + # payload="", + # qos=1, + # retain=True, + # ) + # time.sleep(10) + + +#=============================================================================== +# Home Assistant UTILs +#=============================================================================== +def to_binary_sensor(input): + # In HA a binary sensor returns ON or OFF + result = "OFF" + + # bytestring + if isinstance(input, str): + if input == "1": + result = "ON" + elif isinstance(input, int): + if input == 1: + result = "ON" + elif isinstance(input, bool): + if input == True: + result = "ON" + elif isinstance(input, bytes): + if bytes_to_string(input) == "1": + result = "ON" + return result \ No newline at end of file diff --git a/pialert/networkscan.py b/pialert/networkscan.py new file mode 100644 index 00000000..c3962ae3 --- /dev/null +++ b/pialert/networkscan.py @@ -0,0 +1,311 @@ + + +from arpscan import execute_arpscan +from conf import DHCP_ACTIVE, ENABLE_PLUGINS, PIHOLE_ACTIVE, cycle, ENABLE_ARPSCAN +from database import insertOnlineHistory, updateState +from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, update_devices_names +from helper import timeNow +from logger import mylog, print_log +from pialert.plugin import run_plugin_scripts +from pihole import copy_pihole_network, read_DHCP_leases +from reporting import skip_repeated_notifications + +#=============================================================================== +# SCAN NETWORK +#=============================================================================== + + +def scan_network (db): + sql = db.sql #TO-DO + reporting = False + + # Header + updateState(db,"Scan: Network") + mylog('verbose', ['[', timeNow(), '] Scan Devices:' ]) + + # Query ScanCycle properties + scanCycle_data = query_ScanCycle_Data (True) + if scanCycle_data is None: + mylog('none', ['\n*************** ERROR ***************']) + mylog('none', ['ScanCycle %s not found' % cycle ]) + mylog('none', [' Exiting...\n']) + return False + + db.commitDB() + + # ScanCycle data + cycle_interval = scanCycle_data['cic_EveryXmin'] + + # arp-scan command + arpscan_devices = [] + if ENABLE_ARPSCAN: + mylog('verbose', [' arp-scan start']) + arpscan_devices = execute_arpscan () + print_log ('arp-scan ends') + + # Pi-hole method + if PIHOLE_ACTIVE : + mylog('verbose', [' Pi-hole start']) + copy_pihole_network(db) + db.commitDB() + + # DHCP Leases method + if DHCP_ACTIVE : + mylog('verbose', [' DHCP Leases start']) + read_DHCP_leases (db) + db.commitDB() + + # Load current scan data + mylog('verbose', [' Processing scan results']) + save_scanned_devices (arpscan_devices, cycle_interval) + + # Print stats + print_log ('Print Stats') + print_scan_stats() + print_log ('Stats end') + + # Create Events + mylog('verbose', [' Updating DB Info']) + mylog('verbose', [' Sessions Events (connect / discconnect)']) + insert_events() + + # Create New Devices + # after create events -> avoid 'connection' event + mylog('verbose', [' Creating new devices']) + create_new_devices () + + # Update devices info + mylog('verbose', [' Updating Devices Info']) + update_devices_data_from_scan () + + # Resolve devices names + print_log (' Resolve devices names') + update_devices_names(db) + + # Void false connection - disconnections + mylog('verbose', [' Voiding false (ghost) disconnections']) + void_ghost_disconnections (db) + + # Pair session events (Connection / Disconnection) + mylog('verbose', [' Pairing session events (connection / disconnection) ']) + pair_sessions_events(db) + + # Sessions snapshot + mylog('verbose', [' Creating sessions snapshot']) + create_sessions_snapshot (db) + + # Sessions snapshot + mylog('verbose', [' Inserting scan results into Online_History']) + insertOnlineHistory(db,cycle) + + # Skip repeated notifications + mylog('verbose', [' Skipping repeated notifications']) + skip_repeated_notifications (db) + + # Commit changes + db.commitDB() + + # Run splugin scripts which are set to run every timne after a scan finished + if ENABLE_PLUGINS: + run_plugin_scripts(db,'always_after_scan') + + return reporting + +#------------------------------------------------------------------------------- +def query_ScanCycle_Data (db, pOpenCloseDB = False, cycle = 1): + # Query Data + db.sql.execute ("""SELECT cic_arpscanCycles, cic_EveryXmin + FROM ScanCycles + WHERE cic_ID = ? """, (cycle,)) + sqlRow = db.sql.fetchone() + + # Return Row + return sqlRow + + + +#------------------------------------------------------------------------------- +def void_ghost_disconnections (db): + sql = db.sql #TO-DO + startTime = timeNow() + # Void connect ghost events (disconnect event exists in last X min.) + print_log ('Void - 1 Connect ghost events') + sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null, + eve_EventType ='VOIDED - ' || eve_EventType + WHERE eve_MAC != 'Internet' + AND eve_EventType = 'Connected' + AND eve_DateTime = ? + AND eve_MAC IN ( + SELECT Events.eve_MAC + FROM CurrentScan, Devices, ScanCycles, Events + WHERE cur_ScanCycle = ? + AND dev_MAC = cur_MAC + AND dev_ScanCycle = cic_ID + AND cic_ID = cur_ScanCycle + AND eve_MAC = cur_MAC + AND eve_EventType = 'Disconnected' + AND eve_DateTime >= + DATETIME (?, '-' || cic_EveryXmin ||' minutes') + ) """, + (startTime, cycle, startTime) ) + + # Void connect paired events + print_log ('Void - 2 Paired events') + sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null + WHERE eve_MAC != 'Internet' + AND eve_PairEventRowid IN ( + SELECT Events.RowID + FROM CurrentScan, Devices, ScanCycles, Events + WHERE cur_ScanCycle = ? + AND dev_MAC = cur_MAC + AND dev_ScanCycle = cic_ID + AND cic_ID = cur_ScanCycle + AND eve_MAC = cur_MAC + AND eve_EventType = 'Disconnected' + AND eve_DateTime >= + DATETIME (?, '-' || cic_EveryXmin ||' minutes') + ) """, + (cycle, startTime) ) + + # Void disconnect ghost events + print_log ('Void - 3 Disconnect ghost events') + sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null, + eve_EventType = 'VOIDED - '|| eve_EventType + WHERE eve_MAC != 'Internet' + AND ROWID IN ( + SELECT Events.RowID + FROM CurrentScan, Devices, ScanCycles, Events + WHERE cur_ScanCycle = ? + AND dev_MAC = cur_MAC + AND dev_ScanCycle = cic_ID + AND cic_ID = cur_ScanCycle + AND eve_MAC = cur_MAC + AND eve_EventType = 'Disconnected' + AND eve_DateTime >= + DATETIME (?, '-' || cic_EveryXmin ||' minutes') + ) """, + (cycle, startTime) ) + print_log ('Void end') + db.commitDB() + +#------------------------------------------------------------------------------- +def pair_sessions_events (db): + sql = db.sql #TO-DO + + # NOT NECESSARY FOR INCREMENTAL UPDATE + # print_log ('Pair session - 1 Clean') + # sql.execute ("""UPDATE Events + # SET eve_PairEventRowid = NULL + # WHERE eve_EventType IN ('New Device', 'Connected') + # """ ) + + + # Pair Connection / New Device events + print_log ('Pair session - 1 Connections / New Devices') + sql.execute ("""UPDATE Events + SET eve_PairEventRowid = + (SELECT ROWID + FROM Events AS EVE2 + WHERE EVE2.eve_EventType IN ('New Device', 'Connected', + 'Device Down', 'Disconnected') + AND EVE2.eve_MAC = Events.eve_MAC + AND EVE2.eve_Datetime > Events.eve_DateTime + ORDER BY EVE2.eve_DateTime ASC LIMIT 1) + WHERE eve_EventType IN ('New Device', 'Connected') + AND eve_PairEventRowid IS NULL + """ ) + + # Pair Disconnection / Device Down + print_log ('Pair session - 2 Disconnections') + sql.execute ("""UPDATE Events + SET eve_PairEventRowid = + (SELECT ROWID + FROM Events AS EVE2 + WHERE EVE2.eve_PairEventRowid = Events.ROWID) + WHERE eve_EventType IN ('Device Down', 'Disconnected') + AND eve_PairEventRowid IS NULL + """ ) + print_log ('Pair session end') + + db.commitDB() + +#------------------------------------------------------------------------------- +def create_sessions_snapshot (db): + sql = db.sql #TO-DO + + # Clean sessions snapshot + print_log ('Sessions Snapshot - 1 Clean') + sql.execute ("DELETE FROM SESSIONS" ) + + # Insert sessions + print_log ('Sessions Snapshot - 2 Insert') + sql.execute ("""INSERT INTO Sessions + SELECT * FROM Convert_Events_to_Sessions""" ) + + print_log ('Sessions end') + db.commitDB() + + +#------------------------------------------------------------------------------- +def insert_events (db): + sql = db.sql #TO-DO + startTime = timeNow() + + # Check device down + print_log ('Events 1 - Devices down') + sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, + eve_EventType, eve_AdditionalInfo, + eve_PendingAlertEmail) + SELECT dev_MAC, dev_LastIP, ?, 'Device Down', '', 1 + FROM Devices + WHERE dev_AlertDeviceDown = 1 + AND dev_PresentLastScan = 1 + AND dev_ScanCycle = ? + AND NOT EXISTS (SELECT 1 FROM CurrentScan + WHERE dev_MAC = cur_MAC + AND dev_ScanCycle = cur_ScanCycle) """, + (startTime, cycle) ) + + # Check new connections + print_log ('Events 2 - New Connections') + sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, + eve_EventType, eve_AdditionalInfo, + eve_PendingAlertEmail) + SELECT cur_MAC, cur_IP, ?, 'Connected', '', dev_AlertEvents + FROM Devices, CurrentScan + WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle + AND dev_PresentLastScan = 0 + AND dev_ScanCycle = ? """, + (startTime, cycle) ) + + # Check disconnections + print_log ('Events 3 - Disconnections') + sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, + eve_EventType, eve_AdditionalInfo, + eve_PendingAlertEmail) + SELECT dev_MAC, dev_LastIP, ?, 'Disconnected', '', + dev_AlertEvents + FROM Devices + WHERE dev_AlertDeviceDown = 0 + AND dev_PresentLastScan = 1 + AND dev_ScanCycle = ? + AND NOT EXISTS (SELECT 1 FROM CurrentScan + WHERE dev_MAC = cur_MAC + AND dev_ScanCycle = cur_ScanCycle) """, + (startTime, cycle) ) + + # Check IP Changed + print_log ('Events 4 - IP Changes') + sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, + eve_EventType, eve_AdditionalInfo, + eve_PendingAlertEmail) + SELECT cur_MAC, cur_IP, ?, 'IP Changed', + 'Previous IP: '|| dev_LastIP, dev_AlertEvents + FROM Devices, CurrentScan + WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle + AND dev_ScanCycle = ? + AND dev_LastIP <> cur_IP """, + (startTime, cycle) ) + print_log ('Events end') + + diff --git a/pialert/pholusscan.py b/pialert/pholusscan.py new file mode 100644 index 00000000..41847d0f --- /dev/null +++ b/pialert/pholusscan.py @@ -0,0 +1,201 @@ +import subprocess +import re + +from const import fullPholusPath, logPath +from pialert.database import updateState +from pialert.helper import checkIPV4, timeNow +from pialert.logger import mylog + +#------------------------------------------------------------------------------- + +def performPholusScan (db, timeoutSec, userSubnets): + sql = db.sql # TO-DO + # scan every interface + for subnet in userSubnets: + + temp = subnet.split("--interface=") + + if len(temp) != 2: + mylog('none', [" Skip scan (need subnet in format '192.168.1.0/24 --inteface=eth0'), got: ", subnet]) + return + + mask = temp[0].strip() + interface = temp[1].strip() + + # logging & updating app state + updateState(db,"Scan: Pholus") + mylog('info', ['[', timeNow(), '] Scan: Pholus for ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min)']) + mylog('verbose', [" Pholus scan on [interface] ", interface, " [mask] " , mask]) + + # the scan always lasts 2x as long, so the desired user time from settings needs to be halved + adjustedTimeout = str(round(int(timeoutSec) / 2, 0)) + + # python3 -m trace --trace /home/pi/pialert/pholus/pholus3.py eth1 -rdns_scanning 192.168.1.0/24 -stimeout 600 + pholus_args = ['python3', fullPholusPath, interface, "-rdns_scanning", mask, "-stimeout", adjustedTimeout] + + # Execute command + output = "" + + try: + # try runnning a subprocess with a forced (timeout + 30 seconds) in case the subprocess hangs + output = subprocess.check_output (pholus_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30)) + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', [e.output]) + mylog('none', [" Error - Pholus Scan - check logs"]) + except subprocess.TimeoutExpired as timeErr: + mylog('none', [' Pholus TIMEOUT - the process forcefully terminated as timeout reached']) + + if output == "": # check if the subprocess failed + mylog('none', ['[', timeNow(), '] Scan: Pholus FAIL - check logs']) + else: + mylog('verbose', ['[', timeNow(), '] Scan: Pholus SUCCESS']) + + # check the last run output + f = open(logPath + '/pialert_pholus_lastrun.log', 'r+') + newLines = f.read().split('\n') + f.close() + + # cleanup - select only lines containing a separator to filter out unnecessary data + newLines = list(filter(lambda x: '|' in x, newLines)) + + # build SQL query parameters to insert into the DB + params = [] + + for line in newLines: + columns = line.split("|") + if len(columns) == 4: + params.append(( interface + " " + mask, timeNow() , columns[0].replace(" ", ""), columns[1].replace(" ", ""), columns[2].replace(" ", ""), columns[3], '')) + + if len(params) > 0: + sql.executemany ("""INSERT INTO Pholus_Scan ("Info", "Time", "MAC", "IP_v4_or_v6", "Record_Type", "Value", "Extra") VALUES (?, ?, ?, ?, ?, ?, ?)""", params) + db.commitDB() + +#------------------------------------------------------------------------------- +def cleanResult(str): + # alternative str.split('.')[0] + str = str.replace("._airplay", "") + str = str.replace("._tcp", "") + str = str.replace(".local", "") + str = str.replace("._esphomelib", "") + str = str.replace("._googlecast", "") + str = str.replace(".lan", "") + str = str.replace(".home", "") + str = re.sub(r'-[a-fA-F0-9]{32}', '', str) # removing last part of e.g. Nest-Audio-ff77ff77ff77ff77ff77ff77ff77ff77 + # remove trailing dots + if str.endswith('.'): + str = str[:-1] + + return str + + +# Disclaimer - I'm interfacing with a script I didn't write (pholus3.py) so it's possible I'm missing types of answers +# it's also possible the pholus3.py script can be adjusted to provide a better output to interface with it +# Hit me with a PR if you know how! :) +def resolve_device_name_pholus (pMAC, pIP, allRes): + + pholusMatchesIndexes = [] + + index = 0 + for result in allRes: + # limiting entries used for name resolution to the ones containing the current IP (v4 only) + if result["MAC"] == pMAC and result["Record_Type"] == "Answer" and result["IP_v4_or_v6"] == pIP and '._googlezone' not in result["Value"]: + # found entries with a matching MAC address, let's collect indexes + pholusMatchesIndexes.append(index) + + index += 1 + + # return if nothing found + if len(pholusMatchesIndexes) == 0: + return -1 + + # we have some entries let's try to select the most useful one + + # airplay matches contain a lot of information + # Matches for example: + # Brand Tv (50)._airplay._tcp.local. TXT Class:32769 "acl=0 deviceid=66:66:66:66:66:66 features=0x77777,0x38BCB46 rsf=0x3 fv=p20.T-FFFFFF-03.1 flags=0x204 model=XXXX manufacturer=Brand serialNumber=XXXXXXXXXXX protovers=1.1 srcvers=777.77.77 pi=FF:FF:FF:FF:FF:FF psi=00000000-0000-0000-0000-FFFFFFFFFF gid=00000000-0000-0000-0000-FFFFFFFFFF gcgl=0 pk=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + for i in pholusMatchesIndexes: + if checkIPV4(allRes[i]['IP_v4_or_v6']) and '._airplay._tcp.local. TXT Class:32769' in str(allRes[i]["Value"]) : + return allRes[i]["Value"].split('._airplay._tcp.local. TXT Class:32769')[0] + + # second best - contains airplay + # Matches for example: + # _airplay._tcp.local. PTR Class:IN "Brand Tv (50)._airplay._tcp.local." + for i in pholusMatchesIndexes: + if checkIPV4(allRes[i]['IP_v4_or_v6']) and '_airplay._tcp.local. PTR Class:IN' in allRes[i]["Value"] and ('._googlecast') not in allRes[i]["Value"]: + return cleanResult(allRes[i]["Value"].split('"')[1]) + + # Contains PTR Class:32769 + # Matches for example: + # 3.1.168.192.in-addr.arpa. PTR Class:32769 "MyPc.local." + for i in pholusMatchesIndexes: + if checkIPV4(allRes[i]['IP_v4_or_v6']) and 'PTR Class:32769' in allRes[i]["Value"]: + return cleanResult(allRes[i]["Value"].split('"')[1]) + + # Contains AAAA Class:IN + # Matches for example: + # DESKTOP-SOMEID.local. AAAA Class:IN "fe80::fe80:fe80:fe80:fe80" + for i in pholusMatchesIndexes: + if checkIPV4(allRes[i]['IP_v4_or_v6']) and 'AAAA Class:IN' in allRes[i]["Value"]: + return cleanResult(allRes[i]["Value"].split('.local.')[0]) + + # Contains _googlecast._tcp.local. PTR Class:IN + # Matches for example: + # _googlecast._tcp.local. PTR Class:IN "Nest-Audio-ff77ff77ff77ff77ff77ff77ff77ff77._googlecast._tcp.local." + for i in pholusMatchesIndexes: + if checkIPV4(allRes[i]['IP_v4_or_v6']) and '_googlecast._tcp.local. PTR Class:IN' in allRes[i]["Value"] and ('Google-Cast-Group') not in allRes[i]["Value"]: + return cleanResult(allRes[i]["Value"].split('"')[1]) + + # Contains A Class:32769 + # Matches for example: + # Android.local. A Class:32769 "192.168.1.6" + for i in pholusMatchesIndexes: + if checkIPV4(allRes[i]['IP_v4_or_v6']) and ' A Class:32769' in allRes[i]["Value"]: + return cleanResult(allRes[i]["Value"].split(' A Class:32769')[0]) + + # # Contains PTR Class:IN + # Matches for example: + # _esphomelib._tcp.local. PTR Class:IN "ceiling-light-1._esphomelib._tcp.local." + for i in pholusMatchesIndexes: + if checkIPV4(allRes[i]['IP_v4_or_v6']) and 'PTR Class:IN' in allRes[i]["Value"]: + return cleanResult(allRes[i]["Value"].split('"')[1]) + + return -1 + +#------------------------------------------------------------------------------- + +def resolve_device_name_dig (pMAC, pIP): + + newName = "" + + try : + dig_args = ['dig', '+short', '-x', pIP] + + # Execute command + try: + # try runnning a subprocess + newName = subprocess.check_output (dig_args, universal_newlines=True) + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', [e.output]) + # newName = "Error - check logs" + return -1 + + # Check returns + newName = newName.strip() + + if len(newName) == 0 : + return -1 + + # Cleanup + newName = cleanResult(newName) + + if newName == "" or len(newName) == 0: + return -1 + + # Return newName + return newName + + # not Found + except subprocess.CalledProcessError : + return -1 diff --git a/pialert/pialert.py b/pialert/pialert.py index 33fbe889..f217bc0d 100755 --- a/pialert/pialert.py +++ b/pialert/pialert.py @@ -14,8 +14,7 @@ # IMPORTS #=============================================================================== from __future__ import print_function -from email.mime.multipart import MIMEMultipart -from email.mime.text import MIMEText + import sys from collections import namedtuple @@ -33,8 +32,6 @@ import smtplib import csv import json import requests -from base64 import b64encode -from paho.mqtt import client as mqtt_client import threading from pathlib import Path from cron_converter import Cron @@ -49,19 +46,27 @@ from const import * from conf import * # from config import DIG_GET_IP_ARG, ENABLE_PLUGINS from logger import append_line_to_file, mylog, print_log, logResult -from helper import bytes_to_string, checkIPV4, filePermissions, importConfigs, timeNow, updateSubnets, write_file +from helper import checkIPV4, filePermissions, importConfigs, isNewVersion, removeDuplicateNewLines, timeNow, write_file from database import * from internet import check_IP_format, check_internet_IP, get_internet_IP from api import update_api from files import get_file_content -from plugin import execute_plugin, get_plugin_setting, plugin_object_class, print_plugin_info +from mqtt import mqtt_start +from pialert.arpscan import execute_arpscan +from pialert.mac_vendor import query_MAC_vendor, update_devices_MAC_vendors +from pialert.networkscan import scan_network +from pialert.nmapscan import performNmapScan +from pialert.pholusscan import performPholusScan, resolve_device_name_pholus +from pialert.pihole import copy_pihole_network, read_DHCP_leases +from pialert.reporting import send_apprise, send_email, send_notifications, send_ntfy, send_pushsafer, send_webhook, skip_repeated_notifications +from plugin import execute_plugin, get_plugin_setting, print_plugin_info, run_plugin_scripts # Global variables -debug_force_notification = False + userSubnets = [] changedPorts_json_struc = None @@ -194,7 +199,7 @@ def main (): if run: pholusSchedule.last_run = datetime.datetime.now(tz).replace(microsecond=0) - performPholusScan(db, PHOLUS_RUN_TIMEOUT) + performPholusScan(db, PHOLUS_RUN_TIMEOUT, userSubnets) # Execute scheduled or one-off Nmap scan if enabled and run conditions fulfilled if NMAP_RUN == "schedule" or NMAP_RUN == "once": @@ -212,7 +217,7 @@ def main (): if run: nmapSchedule.last_run = datetime.datetime.now(tz).replace(microsecond=0) - performNmapScan(get_all_devices()) + performNmapScan(db, get_all_devices(db)) # Perform a network scan via arp-scan or pihole if last_network_scan + datetime.timedelta(minutes=SCAN_CYCLE_MINUTES) < time_started: @@ -261,7 +266,7 @@ def main (): # Scan newly found devices with Nmap if enabled if NMAP_ACTIVE and len(newDevices) > 0: - performNmapScan( newDevices) + performNmapScan( db, newDevices) # send all configured notifications send_notifications(db) @@ -300,2228 +305,12 @@ def main (): -#=============================================================================== -# UPDATE DEVICE MAC VENDORS -#=============================================================================== -def update_devices_MAC_vendors (db, pArg = ''): - # Header - updateState(db,"Upkeep: Vendors") - mylog('verbose', ['[', startTime, '] Upkeep - Update HW Vendors:' ]) - - # Update vendors DB (iab oui) - mylog('verbose', [' Updating vendors DB (iab & oui)']) - update_args = ['sh', pialertPath + '/update_vendors.sh', pArg] - - try: - # try runnning a subprocess - update_output = subprocess.check_output (update_args) - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', [' FAILED: Updating vendors DB, set LOG_LEVEL=debug for more info']) - mylog('none', [e.output]) - - # Initialize variables - recordsToUpdate = [] - ignored = 0 - notFound = 0 - - # All devices loop - mylog('verbose', [' Searching devices vendor']) - for device in sql.execute ("""SELECT * FROM Devices - WHERE dev_Vendor = '(unknown)' - OR dev_Vendor ='' - OR dev_Vendor IS NULL""") : - # Search vendor in HW Vendors DB - vendor = query_MAC_vendor (device['dev_MAC']) - if vendor == -1 : - notFound += 1 - elif vendor == -2 : - ignored += 1 - else : - recordsToUpdate.append ([vendor, device['dev_MAC']]) - - # Print log - mylog('verbose', [" Devices Ignored: ", ignored]) - mylog('verbose', [" Vendors Not Found:", notFound]) - mylog('verbose', [" Vendors updated: ", len(recordsToUpdate) ]) - - - # update devices - sql.executemany ("UPDATE Devices SET dev_Vendor = ? WHERE dev_MAC = ? ", - recordsToUpdate ) - - # Commit DB - db.commitDB() - - if len(recordsToUpdate) > 0: - return True - else: - return False - -#------------------------------------------------------------------------------- -def query_MAC_vendor (pMAC): - try : - # BUGFIX #6 - Fix pMAC parameter as numbers - pMACstr = str(pMAC) - - # Check MAC parameter - mac = pMACstr.replace (':','') - if len(pMACstr) != 17 or len(mac) != 12 : - return -2 - - # Search vendor in HW Vendors DB - mac = mac[0:6] - grep_args = ['grep', '-i', mac, vendorsDB] - # Execute command - try: - # try runnning a subprocess - grep_output = subprocess.check_output (grep_args) - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', [e.output]) - grep_output = " There was an error, check logs for details" - - # Return Vendor - vendor = grep_output[7:] - vendor = vendor.rstrip() - return vendor - - # not Found - except subprocess.CalledProcessError : - return -1 - -#=============================================================================== -# SCAN NETWORK -#=============================================================================== -def scan_network (): - reporting = False - - # Header - updateState(db,"Scan: Network") - mylog('verbose', ['[', startTime, '] Scan Devices:' ]) - - # Query ScanCycle properties - scanCycle_data = query_ScanCycle_Data (True) - if scanCycle_data is None: - mylog('none', ['\n*************** ERROR ***************']) - mylog('none', ['ScanCycle %s not found' % cycle ]) - mylog('none', [' Exiting...\n']) - return False - - db.commitDB() - - # ScanCycle data - cycle_interval = scanCycle_data['cic_EveryXmin'] - - # arp-scan command - arpscan_devices = [] - if ENABLE_ARPSCAN: - mylog('verbose', [' arp-scan start']) - arpscan_devices = execute_arpscan () - print_log ('arp-scan ends') - - # Pi-hole method - if PIHOLE_ACTIVE : - mylog('verbose', [' Pi-hole start']) - copy_pihole_network() - db.commitDB() - - # DHCP Leases method - if DHCP_ACTIVE : - mylog('verbose', [' DHCP Leases start']) - read_DHCP_leases () - db.commitDB() - - # Load current scan data - mylog('verbose', [' Processing scan results']) - save_scanned_devices (arpscan_devices, cycle_interval) - - # Print stats - print_log ('Print Stats') - print_scan_stats() - print_log ('Stats end') - - # Create Events - mylog('verbose', [' Updating DB Info']) - mylog('verbose', [' Sessions Events (connect / discconnect)']) - insert_events() - - # Create New Devices - # after create events -> avoid 'connection' event - mylog('verbose', [' Creating new devices']) - create_new_devices () - - # Update devices info - mylog('verbose', [' Updating Devices Info']) - update_devices_data_from_scan () - - # Resolve devices names - print_log (' Resolve devices names') - update_devices_names(db) - - # Void false connection - disconnections - mylog('verbose', [' Voiding false (ghost) disconnections']) - void_ghost_disconnections (db) - - # Pair session events (Connection / Disconnection) - mylog('verbose', [' Pairing session events (connection / disconnection) ']) - pair_sessions_events(db) - - # Sessions snapshot - mylog('verbose', [' Creating sessions snapshot']) - create_sessions_snapshot (db) - - # Sessions snapshot - mylog('verbose', [' Inserting scan results into Online_History']) - insertOnlineHistory() - - # Skip repeated notifications - mylog('verbose', [' Skipping repeated notifications']) - skip_repeated_notifications (db) - - # Commit changes - db.commitDB() - - # Run splugin scripts which are set to run every timne after a scan finished - if ENABLE_PLUGINS: - run_plugin_scripts(db,'always_after_scan') - - return reporting - -#------------------------------------------------------------------------------- -def query_ScanCycle_Data (pOpenCloseDB = False, cycle = 1): - # Query Data - sql.execute ("""SELECT cic_arpscanCycles, cic_EveryXmin - FROM ScanCycles - WHERE cic_ID = ? """, (cycle,)) - sqlRow = sql.fetchone() - - # Return Row - return sqlRow - -#------------------------------------------------------------------------------- -def execute_arpscan (): - - # output of possible multiple interfaces - arpscan_output = "" - - # scan each interface - for interface in userSubnets : - arpscan_output += execute_arpscan_on_interface (interface) - - # Search IP + MAC + Vendor as regular expresion - re_ip = r'(?P((2[0-5]|1[0-9]|[0-9])?[0-9]\.){3}((2[0-5]|1[0-9]|[0-9])?[0-9]))' - re_mac = r'(?P([0-9a-fA-F]{2}[:-]){5}([0-9a-fA-F]{2}))' - re_hw = r'(?P.*)' - re_pattern = re.compile (re_ip + '\s+' + re_mac + '\s' + re_hw) - - # Create Userdict of devices - devices_list = [device.groupdict() - for device in re.finditer (re_pattern, arpscan_output)] - - # Delete duplicate MAC - unique_mac = [] - unique_devices = [] - - for device in devices_list : - if device['mac'] not in unique_mac: - unique_mac.append(device['mac']) - unique_devices.append(device) - - # return list - return unique_devices - -#------------------------------------------------------------------------------- -def execute_arpscan_on_interface (interface): - # Prepare command arguments - subnets = interface.strip().split() - # Retry is 6 to avoid false offline devices - arpscan_args = ['sudo', 'arp-scan', '--ignoredups', '--retry=6'] + subnets - - # Execute command - try: - # try runnning a subprocess - result = subprocess.check_output (arpscan_args, universal_newlines=True) - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', [e.output]) - result = "" - - return result - -#------------------------------------------------------------------------------- -def copy_pihole_network (): - - # Open Pi-hole DB - sql.execute ("ATTACH DATABASE '"+ piholeDB +"' AS PH") - - # Copy Pi-hole Network table - sql.execute ("DELETE FROM PiHole_Network") - sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery, - PH_Name, PH_IP) - SELECT hwaddr, macVendor, lastQuery, - (SELECT name FROM PH.network_addresses - WHERE network_id = id ORDER BY lastseen DESC, ip), - (SELECT ip FROM PH.network_addresses - WHERE network_id = id ORDER BY lastseen DESC, ip) - FROM PH.network - WHERE hwaddr NOT LIKE 'ip-%' - AND hwaddr <> '00:00:00:00:00:00' """) - sql.execute ("""UPDATE PiHole_Network SET PH_Name = '(unknown)' - WHERE PH_Name IS NULL OR PH_Name = '' """) - # Close Pi-hole DB - sql.execute ("DETACH PH") - - return str(sql.rowcount) != "0" - -#------------------------------------------------------------------------------- -def read_DHCP_leases (): - # Read DHCP Leases - # Bugfix #1 - dhcp.leases: lines with different number of columns (5 col) - data = [] - with open(piholeDhcpleases, 'r') as f: - for line in f: - reporting = True - row = line.rstrip().split() - if len(row) == 5 : - data.append (row) - - # Insert into PiAlert table - sql.executemany ("""INSERT INTO DHCP_Leases (DHCP_DateTime, DHCP_MAC, - DHCP_IP, DHCP_Name, DHCP_MAC2) - VALUES (?, ?, ?, ?, ?) - """, data) - - - -#------------------------------------------------------------------------------- -def save_scanned_devices (p_arpscan_devices, p_cycle_interval): - cycle = 1 # always 1, only one cycle supported - - # Delete previous scan data - sql.execute ("DELETE FROM CurrentScan WHERE cur_ScanCycle = ?", - (cycle,)) - - if len(p_arpscan_devices) > 0: - # Insert new arp-scan devices - sql.executemany ("INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, "+ - " cur_IP, cur_Vendor, cur_ScanMethod) "+ - "VALUES ("+ str(cycle) + ", :mac, :ip, :hw, 'arp-scan')", - p_arpscan_devices) - - # Insert Pi-hole devices - sql.execute ("""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, - cur_IP, cur_Vendor, cur_ScanMethod) - SELECT ?, PH_MAC, PH_IP, PH_Vendor, 'Pi-hole' - FROM PiHole_Network - WHERE PH_LastQuery >= ? - AND NOT EXISTS (SELECT 'X' FROM CurrentScan - WHERE cur_MAC = PH_MAC - AND cur_ScanCycle = ? )""", - (cycle, - (int(startTime.strftime('%s')) - 60 * p_cycle_interval), - cycle) ) - - # Check Internet connectivity - internet_IP = get_internet_IP(DIG_GET_IP_ARG) - # TESTING - Force IP - # internet_IP = "" - if internet_IP != "" : - sql.execute ("""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) - VALUES (?, 'Internet', ?, Null, 'queryDNS') """, (cycle, internet_IP) ) - - # #76 Add Local MAC of default local interface - # BUGFIX #106 - Device that pialert is running - # local_mac_cmd = ["bash -lc ifconfig `ip route list default | awk {'print $5'}` | grep ether | awk '{print $2}'"] - # local_mac_cmd = ["/sbin/ifconfig `ip route list default | sort -nk11 | head -1 | awk {'print $5'}` | grep ether | awk '{print $2}'"] - local_mac_cmd = ["/sbin/ifconfig `ip -o route get 1 | sed 's/^.*dev \\([^ ]*\\).*$/\\1/;q'` | grep ether | awk '{print $2}'"] - local_mac = subprocess.Popen (local_mac_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip() - - # local_dev_cmd = ["ip -o route get 1 | sed 's/^.*dev \\([^ ]*\\).*$/\\1/;q'"] - # local_dev = subprocess.Popen (local_dev_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip() - - # local_ip_cmd = ["ip route list default | awk {'print $7'}"] - local_ip_cmd = ["ip -o route get 1 | sed 's/^.*src \\([^ ]*\\).*$/\\1/;q'"] - local_ip = subprocess.Popen (local_ip_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip() - - mylog('debug', [' Saving this IP into the CurrentScan table:', local_ip]) - - if check_IP_format(local_ip) == '': - local_ip = '0.0.0.0' - - # Check if local mac has been detected with other methods - sql.execute ("SELECT COUNT(*) FROM CurrentScan WHERE cur_ScanCycle = ? AND cur_MAC = ? ", (cycle, local_mac) ) - if sql.fetchone()[0] == 0 : - sql.execute ("INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) "+ - "VALUES ( ?, ?, ?, Null, 'local_MAC') ", (cycle, local_mac, local_ip) ) - -#------------------------------------------------------------------------------- -def print_scan_stats (): - # Devices Detected - sql.execute ("""SELECT COUNT(*) FROM CurrentScan - WHERE cur_ScanCycle = ? """, - (cycle,)) - mylog('verbose', [' Devices Detected.......: ', str (sql.fetchone()[0]) ]) - - # Devices arp-scan - sql.execute ("""SELECT COUNT(*) FROM CurrentScan - WHERE cur_ScanMethod='arp-scan' AND cur_ScanCycle = ? """, - (cycle,)) - mylog('verbose', [' arp-scan detected..: ', str (sql.fetchone()[0]) ]) - - # Devices Pi-hole - sql.execute ("""SELECT COUNT(*) FROM CurrentScan - WHERE cur_ScanMethod='PiHole' AND cur_ScanCycle = ? """, - (cycle,)) - mylog('verbose', [' Pi-hole detected...: +' + str (sql.fetchone()[0]) ]) - - # New Devices - sql.execute ("""SELECT COUNT(*) FROM CurrentScan - WHERE cur_ScanCycle = ? - AND NOT EXISTS (SELECT 1 FROM Devices - WHERE dev_MAC = cur_MAC) """, - (cycle,)) - mylog('verbose', [' New Devices........: ' + str (sql.fetchone()[0]) ]) - - # Devices in this ScanCycle - sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan - WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle - AND dev_ScanCycle = ? """, - (cycle,)) - - mylog('verbose', [' Devices in this cycle..: ' + str (sql.fetchone()[0]) ]) - - # Down Alerts - sql.execute ("""SELECT COUNT(*) FROM Devices - WHERE dev_AlertDeviceDown = 1 - AND dev_ScanCycle = ? - AND NOT EXISTS (SELECT 1 FROM CurrentScan - WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle) """, - (cycle,)) - mylog('verbose', [' Down Alerts........: ' + str (sql.fetchone()[0]) ]) - - # New Down Alerts - sql.execute ("""SELECT COUNT(*) FROM Devices - WHERE dev_AlertDeviceDown = 1 - AND dev_PresentLastScan = 1 - AND dev_ScanCycle = ? - AND NOT EXISTS (SELECT 1 FROM CurrentScan - WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle) """, - (cycle,)) - mylog('verbose', [' New Down Alerts....: ' + str (sql.fetchone()[0]) ]) - - # New Connections - sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan - WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle - AND dev_PresentLastScan = 0 - AND dev_ScanCycle = ? """, - (cycle,)) - mylog('verbose', [' New Connections....: ' + str ( sql.fetchone()[0]) ]) - - # Disconnections - sql.execute ("""SELECT COUNT(*) FROM Devices - WHERE dev_PresentLastScan = 1 - AND dev_ScanCycle = ? - AND NOT EXISTS (SELECT 1 FROM CurrentScan - WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle) """, - (cycle,)) - mylog('verbose', [' Disconnections.....: ' + str ( sql.fetchone()[0]) ]) - - # IP Changes - sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan - WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle - AND dev_ScanCycle = ? - AND dev_LastIP <> cur_IP """, - (cycle,)) - mylog('verbose', [' IP Changes.........: ' + str ( sql.fetchone()[0]) ]) - -#------------------------------------------------------------------------------- -def insertOnlineHistory(): - # Add to History - sql.execute("SELECT * FROM Devices") - History_All = sql.fetchall() - History_All_Devices = len(History_All) - - sql.execute("SELECT * FROM Devices WHERE dev_Archived = 1") - History_Archived = sql.fetchall() - History_Archived_Devices = len(History_Archived) - - sql.execute("""SELECT * FROM CurrentScan WHERE cur_ScanCycle = ? """, (cycle,)) - History_Online = sql.fetchall() - History_Online_Devices = len(History_Online) - History_Offline_Devices = History_All_Devices - History_Archived_Devices - History_Online_Devices - - sql.execute ("INSERT INTO Online_History (Scan_Date, Online_Devices, Down_Devices, All_Devices, Archived_Devices) "+ - "VALUES ( ?, ?, ?, ?, ?)", (startTime, History_Online_Devices, History_Offline_Devices, History_All_Devices, History_Archived_Devices ) ) - -#------------------------------------------------------------------------------- -def create_new_devices (): - # arpscan - Insert events for new devices - print_log ('New devices - 1 Events') - sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, - eve_EventType, eve_AdditionalInfo, - eve_PendingAlertEmail) - SELECT cur_MAC, cur_IP, ?, 'New Device', cur_Vendor, 1 - FROM CurrentScan - WHERE cur_ScanCycle = ? - AND NOT EXISTS (SELECT 1 FROM Devices - WHERE dev_MAC = cur_MAC) """, - (startTime, cycle) ) - - print_log ('New devices - Insert Connection into session table') - sql.execute ("""INSERT INTO Sessions (ses_MAC, ses_IP, ses_EventTypeConnection, ses_DateTimeConnection, - ses_EventTypeDisconnection, ses_DateTimeDisconnection, ses_StillConnected, ses_AdditionalInfo) - SELECT cur_MAC, cur_IP,'Connected',?, NULL , NULL ,1, cur_Vendor - FROM CurrentScan - WHERE cur_ScanCycle = ? - AND NOT EXISTS (SELECT 1 FROM Sessions - WHERE ses_MAC = cur_MAC) """, - (startTime, cycle) ) - - # arpscan - Create new devices - print_log ('New devices - 2 Create devices') - sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_Vendor, - dev_LastIP, dev_FirstConnection, dev_LastConnection, - dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown, - dev_PresentLastScan) - SELECT cur_MAC, '(unknown)', cur_Vendor, cur_IP, ?, ?, - 1, 1, 0, 1 - FROM CurrentScan - WHERE cur_ScanCycle = ? - AND NOT EXISTS (SELECT 1 FROM Devices - WHERE dev_MAC = cur_MAC) """, - (startTime, startTime, cycle) ) - - # Pi-hole - Insert events for new devices - # NOT STRICYLY NECESARY (Devices can be created through Current_Scan) - # Bugfix #2 - Pi-hole devices w/o IP - print_log ('New devices - 3 Pi-hole Events') - sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, - eve_EventType, eve_AdditionalInfo, - eve_PendingAlertEmail) - SELECT PH_MAC, IFNULL (PH_IP,'-'), ?, 'New Device', - '(Pi-Hole) ' || PH_Vendor, 1 - FROM PiHole_Network - WHERE NOT EXISTS (SELECT 1 FROM Devices - WHERE dev_MAC = PH_MAC) """, - (startTime, ) ) - - # Pi-hole - Create New Devices - # Bugfix #2 - Pi-hole devices w/o IP - print_log ('New devices - 4 Pi-hole Create devices') - sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_Vendor, - dev_LastIP, dev_FirstConnection, dev_LastConnection, - dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown, - dev_PresentLastScan) - SELECT PH_MAC, PH_Name, PH_Vendor, IFNULL (PH_IP,'-'), - ?, ?, 1, 1, 0, 1 - FROM PiHole_Network - WHERE NOT EXISTS (SELECT 1 FROM Devices - WHERE dev_MAC = PH_MAC) """, - (startTime, startTime) ) - - # DHCP Leases - Insert events for new devices - print_log ('New devices - 5 DHCP Leases Events') - sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, - eve_EventType, eve_AdditionalInfo, - eve_PendingAlertEmail) - SELECT DHCP_MAC, DHCP_IP, ?, 'New Device', '(DHCP lease)',1 - FROM DHCP_Leases - WHERE NOT EXISTS (SELECT 1 FROM Devices - WHERE dev_MAC = DHCP_MAC) """, - (startTime, ) ) - - # DHCP Leases - Create New Devices - print_log ('New devices - 6 DHCP Leases Create devices') - # BUGFIX #23 - Duplicated MAC in DHCP.Leases - # TEST - Force Duplicated MAC - # sql.execute ("""INSERT INTO DHCP_Leases VALUES - # (1610700000, 'TEST1', '10.10.10.1', 'Test 1', '*')""") - # sql.execute ("""INSERT INTO DHCP_Leases VALUES - # (1610700000, 'TEST2', '10.10.10.2', 'Test 2', '*')""") - sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_LastIP, - dev_Vendor, dev_FirstConnection, dev_LastConnection, - dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown, - dev_PresentLastScan) - SELECT DISTINCT DHCP_MAC, - (SELECT DHCP_Name FROM DHCP_Leases AS D2 - WHERE D2.DHCP_MAC = D1.DHCP_MAC - ORDER BY DHCP_DateTime DESC LIMIT 1), - (SELECT DHCP_IP FROM DHCP_Leases AS D2 - WHERE D2.DHCP_MAC = D1.DHCP_MAC - ORDER BY DHCP_DateTime DESC LIMIT 1), - '(unknown)', ?, ?, 1, 1, 0, 1 - FROM DHCP_Leases AS D1 - WHERE NOT EXISTS (SELECT 1 FROM Devices - WHERE dev_MAC = DHCP_MAC) """, - (startTime, startTime) ) - - # sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_Vendor, - # dev_LastIP, dev_FirstConnection, dev_LastConnection, - # dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown, - # dev_PresentLastScan) - # SELECT DHCP_MAC, DHCP_Name, '(unknown)', DHCP_IP, ?, ?, - # 1, 1, 0, 1 - # FROM DHCP_Leases - # WHERE NOT EXISTS (SELECT 1 FROM Devices - # WHERE dev_MAC = DHCP_MAC) """, - # (startTime, startTime) ) - print_log ('New Devices end') - -#------------------------------------------------------------------------------- -def insert_events (): - # Check device down - print_log ('Events 1 - Devices down') - sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, - eve_EventType, eve_AdditionalInfo, - eve_PendingAlertEmail) - SELECT dev_MAC, dev_LastIP, ?, 'Device Down', '', 1 - FROM Devices - WHERE dev_AlertDeviceDown = 1 - AND dev_PresentLastScan = 1 - AND dev_ScanCycle = ? - AND NOT EXISTS (SELECT 1 FROM CurrentScan - WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle) """, - (startTime, cycle) ) - - # Check new connections - print_log ('Events 2 - New Connections') - sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, - eve_EventType, eve_AdditionalInfo, - eve_PendingAlertEmail) - SELECT cur_MAC, cur_IP, ?, 'Connected', '', dev_AlertEvents - FROM Devices, CurrentScan - WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle - AND dev_PresentLastScan = 0 - AND dev_ScanCycle = ? """, - (startTime, cycle) ) - - # Check disconnections - print_log ('Events 3 - Disconnections') - sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, - eve_EventType, eve_AdditionalInfo, - eve_PendingAlertEmail) - SELECT dev_MAC, dev_LastIP, ?, 'Disconnected', '', - dev_AlertEvents - FROM Devices - WHERE dev_AlertDeviceDown = 0 - AND dev_PresentLastScan = 1 - AND dev_ScanCycle = ? - AND NOT EXISTS (SELECT 1 FROM CurrentScan - WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle) """, - (startTime, cycle) ) - - # Check IP Changed - print_log ('Events 4 - IP Changes') - sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, - eve_EventType, eve_AdditionalInfo, - eve_PendingAlertEmail) - SELECT cur_MAC, cur_IP, ?, 'IP Changed', - 'Previous IP: '|| dev_LastIP, dev_AlertEvents - FROM Devices, CurrentScan - WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle - AND dev_ScanCycle = ? - AND dev_LastIP <> cur_IP """, - (startTime, cycle) ) - print_log ('Events end') - -#------------------------------------------------------------------------------- -def update_devices_data_from_scan (): - # Update Last Connection - print_log ('Update devices - 1 Last Connection') - sql.execute ("""UPDATE Devices SET dev_LastConnection = ?, - dev_PresentLastScan = 1 - WHERE dev_ScanCycle = ? - AND dev_PresentLastScan = 0 - AND EXISTS (SELECT 1 FROM CurrentScan - WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle) """, - (startTime, cycle)) - - # Clean no active devices - print_log ('Update devices - 2 Clean no active devices') - sql.execute ("""UPDATE Devices SET dev_PresentLastScan = 0 - WHERE dev_ScanCycle = ? - AND NOT EXISTS (SELECT 1 FROM CurrentScan - WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle) """, - (cycle,)) - - # Update IP & Vendor - print_log ('Update devices - 3 LastIP & Vendor') - sql.execute ("""UPDATE Devices - SET dev_LastIP = (SELECT cur_IP FROM CurrentScan - WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle), - dev_Vendor = (SELECT cur_Vendor FROM CurrentScan - WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle) - WHERE dev_ScanCycle = ? - AND EXISTS (SELECT 1 FROM CurrentScan - WHERE dev_MAC = cur_MAC - AND dev_ScanCycle = cur_ScanCycle) """, - (cycle,)) - - # Pi-hole Network - Update (unknown) Name - print_log ('Update devices - 4 Unknown Name') - sql.execute ("""UPDATE Devices - SET dev_NAME = (SELECT PH_Name FROM PiHole_Network - WHERE PH_MAC = dev_MAC) - WHERE (dev_Name in ("(unknown)", "(name not found)", "" ) - OR dev_Name IS NULL) - AND EXISTS (SELECT 1 FROM PiHole_Network - WHERE PH_MAC = dev_MAC - AND PH_NAME IS NOT NULL - AND PH_NAME <> '') """) - - # DHCP Leases - Update (unknown) Name - sql.execute ("""UPDATE Devices - SET dev_NAME = (SELECT DHCP_Name FROM DHCP_Leases - WHERE DHCP_MAC = dev_MAC) - WHERE (dev_Name in ("(unknown)", "(name not found)", "" ) - OR dev_Name IS NULL) - AND EXISTS (SELECT 1 FROM DHCP_Leases - WHERE DHCP_MAC = dev_MAC)""") - - # DHCP Leases - Vendor - print_log ('Update devices - 5 Vendor') - - recordsToUpdate = [] - query = """SELECT * FROM Devices - WHERE dev_Vendor = '(unknown)' OR dev_Vendor ='' - OR dev_Vendor IS NULL""" - - for device in sql.execute (query) : - vendor = query_MAC_vendor (device['dev_MAC']) - if vendor != -1 and vendor != -2 : - recordsToUpdate.append ([vendor, device['dev_MAC']]) - - sql.executemany ("UPDATE Devices SET dev_Vendor = ? WHERE dev_MAC = ? ", - recordsToUpdate ) - - # clean-up device leases table - sql.execute ("DELETE FROM DHCP_Leases") - print_log ('Update devices end') - -#------------------------------------------------------------------------------- -def update_devices_names (db): - # Initialize variables - recordsToUpdate = [] - recordsNotFound = [] - - ignored = 0 - notFound = 0 - - foundDig = 0 - foundPholus = 0 - - # BUGFIX #97 - Updating name of Devices w/o IP - sql.execute ("SELECT * FROM Devices WHERE dev_Name IN ('(unknown)','', '(name not found)') AND dev_LastIP <> '-'") - unknownDevices = sql.fetchall() - db.commitDB() - - # perform Pholus scan if (unknown) devices found - if PHOLUS_ACTIVE and (len(unknownDevices) > 0 or PHOLUS_FORCE): - performPholusScan(PHOLUS_TIMEOUT) - - # skip checks if no unknown devices - if len(unknownDevices) == 0 and PHOLUS_FORCE == False: - return - - # Devices without name - mylog('verbose', [' Trying to resolve devices without name']) - - # get names from Pholus scan - sql.execute ('SELECT * FROM Pholus_Scan where "Record_Type"="Answer"') - pholusResults = list(sql.fetchall()) - db.commitDB() - - # Number of entries from previous Pholus scans - mylog('verbose', [" Pholus entries from prev scans: ", len(pholusResults)]) - - for device in unknownDevices: - newName = -1 - - # Resolve device name with DiG - newName = resolve_device_name_dig (device['dev_MAC'], device['dev_LastIP']) - - # count - if newName != -1: - foundDig += 1 - - # Resolve with Pholus - if newName == -1: - newName = resolve_device_name_pholus (device['dev_MAC'], device['dev_LastIP'], pholusResults) - # count - if newName != -1: - foundPholus += 1 - - # isf still not found update name so we can distinguish the devices where we tried already - if newName == -1 : - recordsNotFound.append (["(name not found)", device['dev_MAC']]) - else: - # name wa sfound with DiG or Pholus - recordsToUpdate.append ([newName, device['dev_MAC']]) - - # Print log - mylog('verbose', [" Names Found (DiG/Pholus): ", len(recordsToUpdate), " (",foundDig,"/",foundPholus ,")" ]) - mylog('verbose', [" Names Not Found : ", len(recordsNotFound) ]) - - # update not found devices with (name not found) - sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsNotFound ) - # update names of devices which we were bale to resolve - sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsToUpdate ) - db.commitDB() - - -#------------------------------------------------------------------------------- -def performNmapScan( devicesToScan): - - global changedPorts_json_struc - - changedPortsTmp = [] - - if len(devicesToScan) > 0: - - timeoutSec = NMAP_TIMEOUT - - devTotal = len(devicesToScan) - - updateState(db,"Scan: Nmap") - - mylog('verbose', ['[', timeNow(), '] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device']) - - mylog('verbose', [" Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ]) - - devIndex = 0 - for device in devicesToScan: - # Execute command - output = "" - # prepare arguments from user supplied ones - nmapArgs = ['nmap'] + NMAP_ARGS.split() + [device["dev_LastIP"]] - - progress = ' (' + str(devIndex+1) + '/' + str(devTotal) + ')' - - try: - # try runnning a subprocess with a forced (timeout + 30 seconds) in case the subprocess hangs - output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30)) - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', [e.output]) - mylog('none', [" Error - Nmap Scan - check logs", progress]) - except subprocess.TimeoutExpired as timeErr: - mylog('verbose', [' Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', device["dev_LastIP"], progress]) - - if output == "": # check if the subprocess failed - mylog('info', ['[', timeNow(), '] Scan: Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details']) - else: - mylog('verbose', ['[', timeNow(), '] Scan: Nmap SUCCESS for ', device["dev_LastIP"], progress]) - - devIndex += 1 - - # check the last run output - newLines = output.split('\n') - - # regular logging - for line in newLines: - append_line_to_file (logPath + '/pialert_nmap.log', line +'\n') - - # collect ports / new Nmap Entries - newEntriesTmp = [] - - index = 0 - startCollecting = False - duration = "" - for line in newLines: - if 'Starting Nmap' in line: - if len(newLines) > index+1 and 'Note: Host seems down' in newLines[index+1]: - break # this entry is empty - elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line: - startCollecting = True - elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line: - startCollecting = False # end reached - elif startCollecting and len(line.split()) == 3: - newEntriesTmp.append(nmap_entry(device["dev_MAC"], timeNow(), line.split()[0], line.split()[1], line.split()[2], device["dev_Name"])) - elif 'Nmap done' in line: - duration = line.split('scanned in ')[1] - index += 1 - - # previous Nmap Entries - oldEntries = [] - - mylog('verbose', ['[', timeNow(), '] Scan: Ports found by NMAP: ', len(newEntriesTmp)]) - - if len(newEntriesTmp) > 0: - - # get all current NMAP ports from the DB - sql.execute(sql_nmap_scan_all) - - rows = sql.fetchall() - - for row in rows: - # only collect entries matching the current MAC address - if row["MAC"] == device["dev_MAC"]: - oldEntries.append(nmap_entry(row["MAC"], row["Time"], row["Port"], row["State"], row["Service"], device["dev_Name"], row["Extra"], row["Index"])) - - - newEntries = [] - - # Collect all entries that don't match the ones in the DB - for newTmpEntry in newEntriesTmp: - - found = False - - # Check the new entry is already available in oldEntries and remove from processing if yes - for oldEntry in oldEntries: - if newTmpEntry.hash == oldEntry.hash: - found = True - - if not found: - newEntries.append(newTmpEntry) - - - mylog('verbose', ['[', timeNow(), '] Scan: Nmap newly discovered or changed ports: ', len(newEntries)]) - - # collect new ports, find the corresponding old entry and return for notification purposes - # also update the DB with the new values after deleting the old ones - if len(newEntries) > 0: - - # params to build the SQL query - params = [] - indexesToDelete = "" - - # Find old entry matching the new entry hash - for newEntry in newEntries: - - foundEntry = None - - for oldEntry in oldEntries: - if oldEntry.hash == newEntry.hash: - indexesToDelete = indexesToDelete + str(oldEntry.index) + ',' - foundEntry = oldEntry - - columnNames = ["Name", "MAC", "Port", "State", "Service", "Extra", "NewOrOld" ] - - # Old entry found - if foundEntry is not None: - # Build params for sql query - params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, oldEntry.extra)) - # Build JSON for API and notifications - changedPortsTmp.append({ - "Name" : foundEntry.name, - "MAC" : newEntry.mac, - "Port" : newEntry.port, - "State" : newEntry.state, - "Service" : newEntry.service, - "Extra" : foundEntry.extra, - "NewOrOld" : "New values" - }) - changedPortsTmp.append({ - "Name" : foundEntry.name, - "MAC" : foundEntry.mac, - "Port" : foundEntry.port, - "State" : foundEntry.state, - "Service" : foundEntry.service, - "Extra" : foundEntry.extra, - "NewOrOld" : "Old values" - }) - # New entry - no matching Old entry found - else: - # Build params for sql query - params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, '')) - # Build JSON for API and notifications - changedPortsTmp.append({ - "Name" : "New device", - "MAC" : newEntry.mac, - "Port" : newEntry.port, - "State" : newEntry.state, - "Service" : newEntry.service, - "Extra" : "", - "NewOrOld" : "New device" - }) - - changedPorts_json_struc = json_struc({ "data" : changedPortsTmp}, columnNames) - - # Delete old entries if available - if len(indexesToDelete) > 0: - sql.execute ("DELETE FROM Nmap_Scan where \"Index\" in (" + indexesToDelete[:-1] +")") - db.commitDB() - - # Insert new values into the DB - sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params) - db.commitDB() - -#------------------------------------------------------------------------------- -class nmap_entry: - def __init__(self, mac, time, port, state, service, name = '', extra = '', index = 0): - self.mac = mac - self.time = time - self.port = port - self.state = state - self.service = service - self.name = name - self.extra = extra - self.index = index - self.hash = str(mac) + str(port)+ str(state)+ str(service) - -#------------------------------------------------------------------------------- -def performPholusScan (db, timeoutSec): - - # scan every interface - for subnet in userSubnets: - - temp = subnet.split("--interface=") - - if len(temp) != 2: - mylog('none', [" Skip scan (need subnet in format '192.168.1.0/24 --inteface=eth0'), got: ", subnet]) - return - - mask = temp[0].strip() - interface = temp[1].strip() - - # logging & updating app state - updateState(db,"Scan: Pholus") - mylog('info', ['[', timeNow(), '] Scan: Pholus for ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min)']) - mylog('verbose', [" Pholus scan on [interface] ", interface, " [mask] " , mask]) - - # the scan always lasts 2x as long, so the desired user time from settings needs to be halved - adjustedTimeout = str(round(int(timeoutSec) / 2, 0)) - - # python3 -m trace --trace /home/pi/pialert/pholus/pholus3.py eth1 -rdns_scanning 192.168.1.0/24 -stimeout 600 - pholus_args = ['python3', fullPholusPath, interface, "-rdns_scanning", mask, "-stimeout", adjustedTimeout] - - # Execute command - output = "" - - try: - # try runnning a subprocess with a forced (timeout + 30 seconds) in case the subprocess hangs - output = subprocess.check_output (pholus_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30)) - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', [e.output]) - mylog('none', [" Error - Pholus Scan - check logs"]) - except subprocess.TimeoutExpired as timeErr: - mylog('none', [' Pholus TIMEOUT - the process forcefully terminated as timeout reached']) - - if output == "": # check if the subprocess failed - mylog('none', ['[', timeNow(), '] Scan: Pholus FAIL - check logs']) - else: - mylog('verbose', ['[', timeNow(), '] Scan: Pholus SUCCESS']) - - # check the last run output - f = open(logPath + '/pialert_pholus_lastrun.log', 'r+') - newLines = f.read().split('\n') - f.close() - - # cleanup - select only lines containing a separator to filter out unnecessary data - newLines = list(filter(lambda x: '|' in x, newLines)) - - # build SQL query parameters to insert into the DB - params = [] - - for line in newLines: - columns = line.split("|") - if len(columns) == 4: - params.append(( interface + " " + mask, timeNow() , columns[0].replace(" ", ""), columns[1].replace(" ", ""), columns[2].replace(" ", ""), columns[3], '')) - - if len(params) > 0: - sql.executemany ("""INSERT INTO Pholus_Scan ("Info", "Time", "MAC", "IP_v4_or_v6", "Record_Type", "Value", "Extra") VALUES (?, ?, ?, ?, ?, ?, ?)""", params) - db.commitDB() - -#------------------------------------------------------------------------------- -def cleanResult(str): - # alternative str.split('.')[0] - str = str.replace("._airplay", "") - str = str.replace("._tcp", "") - str = str.replace(".local", "") - str = str.replace("._esphomelib", "") - str = str.replace("._googlecast", "") - str = str.replace(".lan", "") - str = str.replace(".home", "") - str = re.sub(r'-[a-fA-F0-9]{32}', '', str) # removing last part of e.g. Nest-Audio-ff77ff77ff77ff77ff77ff77ff77ff77 - # remove trailing dots - if str.endswith('.'): - str = str[:-1] - - return str - - -# Disclaimer - I'm interfacing with a script I didn't write (pholus3.py) so it's possible I'm missing types of answers -# it's also possible the pholus3.py script can be adjusted to provide a better output to interface with it -# Hit me with a PR if you know how! :) -def resolve_device_name_pholus (pMAC, pIP, allRes): - - pholusMatchesIndexes = [] - - index = 0 - for result in allRes: - # limiting entries used for name resolution to the ones containing the current IP (v4 only) - if result["MAC"] == pMAC and result["Record_Type"] == "Answer" and result["IP_v4_or_v6"] == pIP and '._googlezone' not in result["Value"]: - # found entries with a matching MAC address, let's collect indexes - pholusMatchesIndexes.append(index) - - index += 1 - - # return if nothing found - if len(pholusMatchesIndexes) == 0: - return -1 - - # we have some entries let's try to select the most useful one - - # airplay matches contain a lot of information - # Matches for example: - # Brand Tv (50)._airplay._tcp.local. TXT Class:32769 "acl=0 deviceid=66:66:66:66:66:66 features=0x77777,0x38BCB46 rsf=0x3 fv=p20.T-FFFFFF-03.1 flags=0x204 model=XXXX manufacturer=Brand serialNumber=XXXXXXXXXXX protovers=1.1 srcvers=777.77.77 pi=FF:FF:FF:FF:FF:FF psi=00000000-0000-0000-0000-FFFFFFFFFF gid=00000000-0000-0000-0000-FFFFFFFFFF gcgl=0 pk=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" - for i in pholusMatchesIndexes: - if checkIPV4(allRes[i]['IP_v4_or_v6']) and '._airplay._tcp.local. TXT Class:32769' in str(allRes[i]["Value"]) : - return allRes[i]["Value"].split('._airplay._tcp.local. TXT Class:32769')[0] - - # second best - contains airplay - # Matches for example: - # _airplay._tcp.local. PTR Class:IN "Brand Tv (50)._airplay._tcp.local." - for i in pholusMatchesIndexes: - if checkIPV4(allRes[i]['IP_v4_or_v6']) and '_airplay._tcp.local. PTR Class:IN' in allRes[i]["Value"] and ('._googlecast') not in allRes[i]["Value"]: - return cleanResult(allRes[i]["Value"].split('"')[1]) - - # Contains PTR Class:32769 - # Matches for example: - # 3.1.168.192.in-addr.arpa. PTR Class:32769 "MyPc.local." - for i in pholusMatchesIndexes: - if checkIPV4(allRes[i]['IP_v4_or_v6']) and 'PTR Class:32769' in allRes[i]["Value"]: - return cleanResult(allRes[i]["Value"].split('"')[1]) - - # Contains AAAA Class:IN - # Matches for example: - # DESKTOP-SOMEID.local. AAAA Class:IN "fe80::fe80:fe80:fe80:fe80" - for i in pholusMatchesIndexes: - if checkIPV4(allRes[i]['IP_v4_or_v6']) and 'AAAA Class:IN' in allRes[i]["Value"]: - return cleanResult(allRes[i]["Value"].split('.local.')[0]) - - # Contains _googlecast._tcp.local. PTR Class:IN - # Matches for example: - # _googlecast._tcp.local. PTR Class:IN "Nest-Audio-ff77ff77ff77ff77ff77ff77ff77ff77._googlecast._tcp.local." - for i in pholusMatchesIndexes: - if checkIPV4(allRes[i]['IP_v4_or_v6']) and '_googlecast._tcp.local. PTR Class:IN' in allRes[i]["Value"] and ('Google-Cast-Group') not in allRes[i]["Value"]: - return cleanResult(allRes[i]["Value"].split('"')[1]) - - # Contains A Class:32769 - # Matches for example: - # Android.local. A Class:32769 "192.168.1.6" - for i in pholusMatchesIndexes: - if checkIPV4(allRes[i]['IP_v4_or_v6']) and ' A Class:32769' in allRes[i]["Value"]: - return cleanResult(allRes[i]["Value"].split(' A Class:32769')[0]) - - # # Contains PTR Class:IN - # Matches for example: - # _esphomelib._tcp.local. PTR Class:IN "ceiling-light-1._esphomelib._tcp.local." - for i in pholusMatchesIndexes: - if checkIPV4(allRes[i]['IP_v4_or_v6']) and 'PTR Class:IN' in allRes[i]["Value"]: - return cleanResult(allRes[i]["Value"].split('"')[1]) - - return -1 - -#------------------------------------------------------------------------------- - -def resolve_device_name_dig (pMAC, pIP): - - newName = "" - - try : - dig_args = ['dig', '+short', '-x', pIP] - - # Execute command - try: - # try runnning a subprocess - newName = subprocess.check_output (dig_args, universal_newlines=True) - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', [e.output]) - # newName = "Error - check logs" - return -1 - - # Check returns - newName = newName.strip() - - if len(newName) == 0 : - return -1 - - # Cleanup - newName = cleanResult(newName) - - if newName == "" or len(newName) == 0: - return -1 - - # Return newName - return newName - - # not Found - except subprocess.CalledProcessError : - return -1 - -#------------------------------------------------------------------------------- -def void_ghost_disconnections (db): - # Void connect ghost events (disconnect event exists in last X min.) - print_log ('Void - 1 Connect ghost events') - sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null, - eve_EventType ='VOIDED - ' || eve_EventType - WHERE eve_MAC != 'Internet' - AND eve_EventType = 'Connected' - AND eve_DateTime = ? - AND eve_MAC IN ( - SELECT Events.eve_MAC - FROM CurrentScan, Devices, ScanCycles, Events - WHERE cur_ScanCycle = ? - AND dev_MAC = cur_MAC - AND dev_ScanCycle = cic_ID - AND cic_ID = cur_ScanCycle - AND eve_MAC = cur_MAC - AND eve_EventType = 'Disconnected' - AND eve_DateTime >= - DATETIME (?, '-' || cic_EveryXmin ||' minutes') - ) """, - (startTime, cycle, startTime) ) - - # Void connect paired events - print_log ('Void - 2 Paired events') - sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null - WHERE eve_MAC != 'Internet' - AND eve_PairEventRowid IN ( - SELECT Events.RowID - FROM CurrentScan, Devices, ScanCycles, Events - WHERE cur_ScanCycle = ? - AND dev_MAC = cur_MAC - AND dev_ScanCycle = cic_ID - AND cic_ID = cur_ScanCycle - AND eve_MAC = cur_MAC - AND eve_EventType = 'Disconnected' - AND eve_DateTime >= - DATETIME (?, '-' || cic_EveryXmin ||' minutes') - ) """, - (cycle, startTime) ) - - # Void disconnect ghost events - print_log ('Void - 3 Disconnect ghost events') - sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null, - eve_EventType = 'VOIDED - '|| eve_EventType - WHERE eve_MAC != 'Internet' - AND ROWID IN ( - SELECT Events.RowID - FROM CurrentScan, Devices, ScanCycles, Events - WHERE cur_ScanCycle = ? - AND dev_MAC = cur_MAC - AND dev_ScanCycle = cic_ID - AND cic_ID = cur_ScanCycle - AND eve_MAC = cur_MAC - AND eve_EventType = 'Disconnected' - AND eve_DateTime >= - DATETIME (?, '-' || cic_EveryXmin ||' minutes') - ) """, - (cycle, startTime) ) - print_log ('Void end') - db.commitDB() - -#------------------------------------------------------------------------------- -def pair_sessions_events (db): - # NOT NECESSARY FOR INCREMENTAL UPDATE - # print_log ('Pair session - 1 Clean') - # sql.execute ("""UPDATE Events - # SET eve_PairEventRowid = NULL - # WHERE eve_EventType IN ('New Device', 'Connected') - # """ ) - - - # Pair Connection / New Device events - print_log ('Pair session - 1 Connections / New Devices') - sql.execute ("""UPDATE Events - SET eve_PairEventRowid = - (SELECT ROWID - FROM Events AS EVE2 - WHERE EVE2.eve_EventType IN ('New Device', 'Connected', - 'Device Down', 'Disconnected') - AND EVE2.eve_MAC = Events.eve_MAC - AND EVE2.eve_Datetime > Events.eve_DateTime - ORDER BY EVE2.eve_DateTime ASC LIMIT 1) - WHERE eve_EventType IN ('New Device', 'Connected') - AND eve_PairEventRowid IS NULL - """ ) - - # Pair Disconnection / Device Down - print_log ('Pair session - 2 Disconnections') - sql.execute ("""UPDATE Events - SET eve_PairEventRowid = - (SELECT ROWID - FROM Events AS EVE2 - WHERE EVE2.eve_PairEventRowid = Events.ROWID) - WHERE eve_EventType IN ('Device Down', 'Disconnected') - AND eve_PairEventRowid IS NULL - """ ) - print_log ('Pair session end') - - db.commitDB() - -#------------------------------------------------------------------------------- -def create_sessions_snapshot (db): - - # Clean sessions snapshot - print_log ('Sessions Snapshot - 1 Clean') - sql.execute ("DELETE FROM SESSIONS" ) - - # Insert sessions - print_log ('Sessions Snapshot - 2 Insert') - sql.execute ("""INSERT INTO Sessions - SELECT * FROM Convert_Events_to_Sessions""" ) - - print_log ('Sessions end') - db.commitDB() - - - -#------------------------------------------------------------------------------- -def skip_repeated_notifications (db): - - # Skip repeated notifications - # due strfime : Overflow --> use "strftime / 60" - print_log ('Skip Repeated') - sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0 - WHERE eve_PendingAlertEmail = 1 AND eve_MAC IN - ( - SELECT dev_MAC FROM Devices - WHERE dev_LastNotification IS NOT NULL - AND dev_LastNotification <>"" - AND (strftime("%s", dev_LastNotification)/60 + - dev_SkipRepeated * 60) > - (strftime('%s','now','localtime')/60 ) - ) - """ ) - print_log ('Skip Repeated end') - - db.commitDB() - - -#=============================================================================== -# REPORTING -#=============================================================================== -# create a json for webhook and mqtt notifications to provide further integration options -json_final = [] - -def send_notifications (db): - global mail_text, mail_html, json_final, changedPorts_json_struc, partial_html, partial_txt, partial_json - - deviceUrl = REPORT_DASHBOARD_URL + '/deviceDetails.php?mac=' - plugins_report = False - - # Reporting section - mylog('verbose', [' Check if something to report']) - - # prepare variables for JSON construction - json_internet = [] - json_new_devices = [] - json_down_devices = [] - json_events = [] - json_ports = [] - json_plugins = [] - - # Disable reporting on events for devices where reporting is disabled based on the MAC address - sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0 - WHERE eve_PendingAlertEmail = 1 AND eve_EventType != 'Device Down' AND eve_MAC IN - ( - SELECT dev_MAC FROM Devices WHERE dev_AlertEvents = 0 - )""") - sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0 - WHERE eve_PendingAlertEmail = 1 AND eve_EventType = 'Device Down' AND eve_MAC IN - ( - SELECT dev_MAC FROM Devices WHERE dev_AlertDeviceDown = 0 - )""") - - # Open text Template - template_file = open(pialertPath + '/back/report_template.txt', 'r') - mail_text = template_file.read() - template_file.close() - - # Open html Template - template_file = open(pialertPath + '/back/report_template.html', 'r') - if isNewVersion(db): - template_file = open(pialertPath + '/back/report_template_new_version.html', 'r') - - mail_html = template_file.read() - template_file.close() - - # Report Header & footer - timeFormated = startTime.strftime ('%Y-%m-%d %H:%M') - mail_text = mail_text.replace ('', timeFormated) - mail_html = mail_html.replace ('', timeFormated) - - mail_text = mail_text.replace ('', socket.gethostname() ) - mail_html = mail_html.replace ('', socket.gethostname() ) - - if 'internet' in INCLUDED_SECTIONS: - # Compose Internet Section - sqlQuery = """SELECT eve_MAC as MAC, eve_IP as IP, eve_DateTime as Datetime, eve_EventType as "Event Type", eve_AdditionalInfo as "More info" FROM Events - WHERE eve_PendingAlertEmail = 1 AND eve_MAC = 'Internet' - ORDER BY eve_DateTime""" - - notiStruc = construct_notifications(sqlQuery, "Internet IP change") - - # collect "internet" (IP changes) for the webhook json - json_internet = notiStruc.json["data"] - - mail_text = mail_text.replace ('', notiStruc.text + '\n') - mail_html = mail_html.replace ('', notiStruc.html) - - if 'new_devices' in INCLUDED_SECTIONS: - # Compose New Devices Section - sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices - WHERE eve_PendingAlertEmail = 1 - AND eve_EventType = 'New Device' - ORDER BY eve_DateTime""" - - notiStruc = construct_notifications(sqlQuery, "New devices") - - # collect "new_devices" for the webhook json - json_new_devices = notiStruc.json["data"] - - mail_text = mail_text.replace ('', notiStruc.text + '\n') - mail_html = mail_html.replace ('', notiStruc.html) - - if 'down_devices' in INCLUDED_SECTIONS: - # Compose Devices Down Section - sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices - WHERE eve_PendingAlertEmail = 1 - AND eve_EventType = 'Device Down' - ORDER BY eve_DateTime""" - - notiStruc = construct_notifications(sqlQuery, "Down devices") - - # collect "new_devices" for the webhook json - json_down_devices = notiStruc.json["data"] - - mail_text = mail_text.replace ('', notiStruc.text + '\n') - mail_html = mail_html.replace ('', notiStruc.html) - - if 'events' in INCLUDED_SECTIONS: - # Compose Events Section - sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices - WHERE eve_PendingAlertEmail = 1 - AND eve_EventType IN ('Connected','Disconnected', - 'IP Changed') - ORDER BY eve_DateTime""" - - notiStruc = construct_notifications(sqlQuery, "Events") - - # collect "events" for the webhook json - json_events = notiStruc.json["data"] - - mail_text = mail_text.replace ('', notiStruc.text + '\n') - mail_html = mail_html.replace ('', notiStruc.html) - - if 'ports' in INCLUDED_SECTIONS: - # collect "ports" for the webhook json - if changedPorts_json_struc is not None: - json_ports = changedPorts_json_struc.json["data"] - - notiStruc = construct_notifications("", "Ports", True, changedPorts_json_struc) - - mail_html = mail_html.replace ('', notiStruc.html) - - portsTxt = "" - if changedPorts_json_struc is not None: - portsTxt = "Ports \n---------\n Ports changed! Check PiAlert for details!\n" - - mail_text = mail_text.replace ('', portsTxt ) - - if 'plugins' in INCLUDED_SECTIONS and ENABLE_PLUGINS: - # Compose Plugins Section - sqlQuery = """SELECT Plugin, Object_PrimaryId, Object_SecondaryId, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status from Plugins_Events""" - - notiStruc = construct_notifications(sqlQuery, "Plugins") - - # collect "plugins" for the webhook json - json_plugins = notiStruc.json["data"] - - mail_text = mail_text.replace ('', notiStruc.text + '\n') - mail_html = mail_html.replace ('', notiStruc.html) - - # check if we need to report something - plugins_report = len(json_plugins) > 0 - - - json_final = { - "internet": json_internet, - "new_devices": json_new_devices, - "down_devices": json_down_devices, - "events": json_events, - "ports": json_ports, - "plugins": json_plugins, - } - - mail_text = removeDuplicateNewLines(mail_text) - - # Create clickable MAC links - mail_html = generate_mac_links (mail_html, deviceUrl) - - # Write output emails for debug - write_file (logPath + '/report_output.json', json.dumps(json_final)) - write_file (logPath + '/report_output.txt', mail_text) - write_file (logPath + '/report_output.html', mail_html) - - # Send Mail - if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or json_ports != [] or debug_force_notification or plugins_report: - - update_api(True) - - mylog('none', [' Changes detected, sending reports']) - - if REPORT_MAIL and check_config('email'): - updateState(db,"Send: Email") - mylog('info', [' Sending report by Email']) - send_email (mail_text, mail_html) - else : - mylog('verbose', [' Skip email']) - if REPORT_APPRISE and check_config('apprise'): - updateState(db,"Send: Apprise") - mylog('info', [' Sending report by Apprise']) - send_apprise (mail_html, mail_text) - else : - mylog('verbose', [' Skip Apprise']) - if REPORT_WEBHOOK and check_config('webhook'): - updateState(db,"Send: Webhook") - mylog('info', [' Sending report by Webhook']) - send_webhook (json_final, mail_text) - else : - mylog('verbose', [' Skip webhook']) - if REPORT_NTFY and check_config('ntfy'): - updateState(db,"Send: NTFY") - mylog('info', [' Sending report by NTFY']) - send_ntfy (mail_text) - else : - mylog('verbose', [' Skip NTFY']) - if REPORT_PUSHSAFER and check_config('pushsafer'): - updateState(db,"Send: PUSHSAFER") - mylog('info', [' Sending report by PUSHSAFER']) - send_pushsafer (mail_text) - else : - mylog('verbose', [' Skip PUSHSAFER']) - # Update MQTT entities - if REPORT_MQTT and check_config('mqtt'): - updateState(db,"Send: MQTT") - mylog('info', [' Establishing MQTT thread']) - mqtt_start() - else : - mylog('verbose', [' Skip MQTT']) - else : - mylog('verbose', [' No changes to report']) - - # Clean Pending Alert Events - sql.execute ("""UPDATE Devices SET dev_LastNotification = ? - WHERE dev_MAC IN (SELECT eve_MAC FROM Events - WHERE eve_PendingAlertEmail = 1) - """, (datetime.datetime.now(),) ) - sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0 - WHERE eve_PendingAlertEmail = 1""") - - # clear plugin events - sql.execute ("DELETE FROM Plugins_Events") - - changedPorts_json_struc = None - - # DEBUG - print number of rows updated - mylog('info', ['[', timeNow(), '] Notifications: ', sql.rowcount]) - - # Commit changes - db.commitDB() - -#------------------------------------------------------------------------------- -def construct_notifications(sqlQuery, tableTitle, skipText = False, suppliedJsonStruct = None): - - if suppliedJsonStruct is None and sqlQuery == "": - return noti_struc("", "", "") - - table_attributes = {"style" : "border-collapse: collapse; font-size: 12px; color:#70707", "width" : "100%", "cellspacing" : 0, "cellpadding" : "3px", "bordercolor" : "#C0C0C0", "border":"1"} - headerProps = "width='120px' style='color:blue; font-size: 16px;' bgcolor='#909090' " - thProps = "width='120px' style='color:#F0F0F0' bgcolor='#909090' " - - build_direction = "TOP_TO_BOTTOM" - text_line = '{}\t{}\n' - - if suppliedJsonStruct is None: - json_struc = get_table_as_json(sqlQuery) - else: - json_struc = suppliedJsonStruct - - jsn = json_struc.json - html = "" - text = "" - - if len(jsn["data"]) > 0: - text = tableTitle + "\n---------\n" - - html = convert(jsn, build_direction=build_direction, table_attributes=table_attributes) - html = format_table(html, "data", headerProps, tableTitle).replace('
    ','
      ') - - headers = json_struc.columnNames - - # prepare text-only message - if skipText == False: - - for device in jsn["data"]: - for header in headers: - padding = "" - if len(header) < 4: - padding = "\t" - text += text_line.format ( header + ': ' + padding, device[header]) - text += '\n' - - # Format HTML table headers - for header in headers: - html = format_table(html, header, thProps) - - return noti_struc(jsn, text, html) - -#------------------------------------------------------------------------------- -class noti_struc: - def __init__(self, json, text, html): - self.json = json - self.text = text - self.html = html - -#------------------------------------------------------------------------------- -def check_config(service): - - if service == 'email': - if SMTP_SERVER == '' or REPORT_FROM == '' or REPORT_TO == '': - mylog('none', [' Error: Email service not set up correctly. Check your pialert.conf SMTP_*, REPORT_FROM and REPORT_TO variables.']) - return False - else: - return True - - if service == 'apprise': - if APPRISE_URL == '' or APPRISE_HOST == '': - mylog('none', [' Error: Apprise service not set up correctly. Check your pialert.conf APPRISE_* variables.']) - return False - else: - return True - - if service == 'webhook': - if WEBHOOK_URL == '': - mylog('none', [' Error: Webhook service not set up correctly. Check your pialert.conf WEBHOOK_* variables.']) - return False - else: - return True - - if service == 'ntfy': - if NTFY_HOST == '' or NTFY_TOPIC == '': - mylog('none', [' Error: NTFY service not set up correctly. Check your pialert.conf NTFY_* variables.']) - return False - else: - return True - - if service == 'pushsafer': - if PUSHSAFER_TOKEN == 'ApiKey': - mylog('none', [' Error: Pushsafer service not set up correctly. Check your pialert.conf PUSHSAFER_TOKEN variable.']) - return False - else: - return True - - if service == 'mqtt': - if MQTT_BROKER == '' or MQTT_PORT == '' or MQTT_USER == '' or MQTT_PASSWORD == '': - mylog('none', [' Error: MQTT service not set up correctly. Check your pialert.conf MQTT_* variables.']) - return False - else: - return True - - - -#------------------------------------------------------------------------------- -def format_table (html, thValue, props, newThValue = ''): - - if newThValue == '': - newThValue = thValue - - return html.replace(""+thValue+"", ""+newThValue+"" ) - -#------------------------------------------------------------------------------- -def generate_mac_links (html, deviceUrl): - - p = re.compile(r'(?:[0-9a-fA-F]:?){12}') - - MACs = re.findall(p, html) - - for mac in MACs: - html = html.replace('' + mac + '','' + mac + '') - - return html - -#------------------------------------------------------------------------------- -def format_report_section (pActive, pSection, pTable, pText, pHTML): - global mail_text - global mail_html - - # Replace section text - if pActive : - mail_text = mail_text.replace ('<'+ pTable +'>', pText) - mail_html = mail_html.replace ('<'+ pTable +'>', pHTML) - - mail_text = remove_tag (mail_text, pSection) - mail_html = remove_tag (mail_html, pSection) - else: - mail_text = remove_section (mail_text, pSection) - mail_html = remove_section (mail_html, pSection) - -#------------------------------------------------------------------------------- -def remove_section (pText, pSection): - # Search section into the text - if pText.find ('<'+ pSection +'>') >=0 \ - and pText.find ('') >=0 : - # return text without the section - return pText[:pText.find ('<'+ pSection+'>')] + \ - pText[pText.find ('') + len (pSection) +3:] - else : - # return all text - return pText - -#------------------------------------------------------------------------------- -def remove_tag (pText, pTag): - # return text without the tag - return pText.replace ('<'+ pTag +'>','').replace ('','') - - -#------------------------------------------------------------------------------- -# Reporting -#------------------------------------------------------------------------------- -def send_email (pText, pHTML): - - # Print more info for debugging if LOG_LEVEL == 'debug' - if LOG_LEVEL == 'debug': - print_log ('REPORT_TO: ' + hide_email(str(REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(SMTP_USER))) - - # Compose email - msg = MIMEMultipart('alternative') - msg['Subject'] = 'Pi.Alert Report' - msg['From'] = REPORT_FROM - msg['To'] = REPORT_TO - msg.attach (MIMEText (pText, 'plain')) - msg.attach (MIMEText (pHTML, 'html')) - - failedAt = '' - - failedAt = print_log ('SMTP try') - - try: - # Send mail - failedAt = print_log('Trying to open connection to ' + str(SMTP_SERVER) + ':' + str(SMTP_PORT)) - - if SMTP_FORCE_SSL: - failedAt = print_log('SMTP_FORCE_SSL == True so using .SMTP_SSL()') - if SMTP_PORT == 0: - failedAt = print_log('SMTP_PORT == 0 so sending .SMTP_SSL(SMTP_SERVER)') - smtp_connection = smtplib.SMTP_SSL(SMTP_SERVER) - else: - failedAt = print_log('SMTP_PORT == 0 so sending .SMTP_SSL(SMTP_SERVER, SMTP_PORT)') - smtp_connection = smtplib.SMTP_SSL(SMTP_SERVER, SMTP_PORT) - - else: - failedAt = print_log('SMTP_FORCE_SSL == False so using .SMTP()') - if SMTP_PORT == 0: - failedAt = print_log('SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER)') - smtp_connection = smtplib.SMTP (SMTP_SERVER) - else: - failedAt = print_log('SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER, SMTP_PORT)') - smtp_connection = smtplib.SMTP (SMTP_SERVER, SMTP_PORT) - - failedAt = print_log('Setting SMTP debug level') - - # Log level set to debug of the communication between SMTP server and client - if LOG_LEVEL == 'debug': - smtp_connection.set_debuglevel(1) - - failedAt = print_log( 'Sending .ehlo()') - smtp_connection.ehlo() - - if not SMTP_SKIP_TLS: - failedAt = print_log('SMTP_SKIP_TLS == False so sending .starttls()') - smtp_connection.starttls() - failedAt = print_log('SMTP_SKIP_TLS == False so sending .ehlo()') - smtp_connection.ehlo() - if not SMTP_SKIP_LOGIN: - failedAt = print_log('SMTP_SKIP_LOGIN == False so sending .login()') - smtp_connection.login (SMTP_USER, SMTP_PASS) - - failedAt = print_log('Sending .sendmail()') - smtp_connection.sendmail (REPORT_FROM, REPORT_TO, msg.as_string()) - smtp_connection.quit() - except smtplib.SMTPAuthenticationError as e: - mylog('none', [' ERROR: Failed at - ', failedAt]) - mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPAuthenticationError), skipping Email (enable LOG_LEVEL=debug for more logging)']) - except smtplib.SMTPServerDisconnected as e: - mylog('none', [' ERROR: Failed at - ', failedAt]) - mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPServerDisconnected), skipping Email (enable LOG_LEVEL=debug for more logging)']) - - print_log(' DEBUG: Last executed - ' + str(failedAt)) - -#------------------------------------------------------------------------------- -def send_ntfy (_Text): - headers = { - "Title": "Pi.Alert Notification", - "Actions": "view, Open Dashboard, "+ REPORT_DASHBOARD_URL, - "Priority": "urgent", - "Tags": "warning" - } - # if username and password are set generate hash and update header - if NTFY_USER != "" and NTFY_PASSWORD != "": - # Generate hash for basic auth - usernamepassword = "{}:{}".format(NTFY_USER,NTFY_PASSWORD) - basichash = b64encode(bytes(NTFY_USER + ':' + NTFY_PASSWORD, "utf-8")).decode("ascii") - - # add authorization header with hash - headers["Authorization"] = "Basic {}".format(basichash) - - requests.post("{}/{}".format( NTFY_HOST, NTFY_TOPIC), - data=_Text, - headers=headers) - -def send_pushsafer (_Text): - url = 'https://www.pushsafer.com/api' - post_fields = { - "t" : 'Pi.Alert Message', - "m" : _Text, - "s" : 11, - "v" : 3, - "i" : 148, - "c" : '#ef7f7f', - "d" : 'a', - "u" : REPORT_DASHBOARD_URL, - "ut" : 'Open Pi.Alert', - "k" : PUSHSAFER_TOKEN, - } - requests.post(url, data=post_fields) - -#------------------------------------------------------------------------------- -def send_webhook (_json, _html): - - # use data type based on specified payload type - if WEBHOOK_PAYLOAD == 'json': - payloadData = _json - if WEBHOOK_PAYLOAD == 'html': - payloadData = _html - if WEBHOOK_PAYLOAD == 'text': - payloadData = to_text(_json) - - # Define slack-compatible payload - _json_payload = { "text": payloadData } if WEBHOOK_PAYLOAD == 'text' else { - "username": "Pi.Alert", - "text": "There are new notifications", - "attachments": [{ - "title": "Pi.Alert Notifications", - "title_link": REPORT_DASHBOARD_URL, - "text": payloadData - }] - } - - # DEBUG - Write the json payload into a log file for debugging - write_file (logPath + '/webhook_payload.json', json.dumps(_json_payload)) - - # Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both - if(WEBHOOK_URL.startswith('https://discord.com/api/webhooks/') and not WEBHOOK_URL.endswith("/slack")): - _WEBHOOK_URL = f"{WEBHOOK_URL}/slack" - curlParams = ["curl","-i","-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL] - else: - _WEBHOOK_URL = WEBHOOK_URL - curlParams = ["curl","-i","-X", WEBHOOK_REQUEST_METHOD ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL] - - # execute CURL call - try: - # try runnning a subprocess - mylog('debug', curlParams) - p = subprocess.Popen(curlParams, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - - stdout, stderr = p.communicate() - - # write stdout and stderr into .log files for debugging if needed - logResult (stdout, stderr) # TO-DO should be changed to mylog - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', [e.output]) - -#------------------------------------------------------------------------------- -def send_apprise (html, text): - #Define Apprise compatible payload (https://github.com/caronc/apprise-api#stateless-solution) - payload = html - - if APPRISE_PAYLOAD == 'text': - payload = text - - _json_payload={ - "urls": APPRISE_URL, - "title": "Pi.Alert Notifications", - "format": APPRISE_PAYLOAD, - "body": payload - } - - try: - # try runnning a subprocess - p = subprocess.Popen(["curl","-i","-X", "POST" ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), APPRISE_HOST], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - stdout, stderr = p.communicate() - # write stdout and stderr into .log files for debugging if needed - logResult (stdout, stderr) # TO-DO should be changed to mylog - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', [e.output]) - -#------------------------------------------------------------------------------- -# MQTT -#------------------------------------------------------------------------------- -mqtt_connected_to_broker = False -mqtt_sensors = [] - -def publish_mqtt(client, topic, message): - status = 1 - while status != 0: - result = client.publish( - topic=topic, - payload=message, - qos=MQTT_QOS, - retain=True, - ) - - status = result[0] - - if status != 0: - mylog('info', ["Waiting to reconnect to MQTT broker"]) - time.sleep(0.1) - return True - -#------------------------------------------------------------------------------- -def create_generic_device(client): - - deviceName = 'PiAlert' - deviceId = 'pialert' - - create_sensor(client, deviceId, deviceName, 'sensor', 'online', 'wifi-check') - create_sensor(client, deviceId, deviceName, 'sensor', 'down', 'wifi-cancel') - create_sensor(client, deviceId, deviceName, 'sensor', 'all', 'wifi') - create_sensor(client, deviceId, deviceName, 'sensor', 'archived', 'wifi-lock') - create_sensor(client, deviceId, deviceName, 'sensor', 'new', 'wifi-plus') - create_sensor(client, deviceId, deviceName, 'sensor', 'unknown', 'wifi-alert') - - -#------------------------------------------------------------------------------- -def create_sensor(client, deviceId, deviceName, sensorType, sensorName, icon): - - new_sensor_config = sensor_config(deviceId, deviceName, sensorType, sensorName, icon) - - # check if config already in list and if not, add it, otherwise skip - global mqtt_sensors, uniqueSensorCount - - is_unique = True - - for sensor in mqtt_sensors: - if sensor.hash == new_sensor_config.hash: - is_unique = False - break - - # save if unique - if is_unique: - publish_sensor(client, new_sensor_config) - - -#------------------------------------------------------------------------------- -class sensor_config: - def __init__(self, deviceId, deviceName, sensorType, sensorName, icon): - self.deviceId = deviceId - self.deviceName = deviceName - self.sensorType = sensorType - self.sensorName = sensorName - self.icon = icon - self.hash = str(hash(str(deviceId) + str(deviceName)+ str(sensorType)+ str(sensorName)+ str(icon))) - -#------------------------------------------------------------------------------- -def publish_sensor(client, sensorConf): - - global mqtt_sensors - - message = '{ \ - "name":"'+ sensorConf.deviceName +' '+sensorConf.sensorName+'", \ - "state_topic":"system-sensors/'+sensorConf.sensorType+'/'+sensorConf.deviceId+'/state", \ - "value_template":"{{value_json.'+sensorConf.sensorName+'}}", \ - "unique_id":"'+sensorConf.deviceId+'_sensor_'+sensorConf.sensorName+'", \ - "device": \ - { \ - "identifiers": ["'+sensorConf.deviceId+'_sensor"], \ - "manufacturer": "PiAlert", \ - "name":"'+sensorConf.deviceName+'" \ - }, \ - "icon":"mdi:'+sensorConf.icon+'" \ - }' - - topic='homeassistant/'+sensorConf.sensorType+'/'+sensorConf.deviceId+'/'+sensorConf.sensorName+'/config' - - # add the sensor to the global list to keep track of succesfully added sensors - if publish_mqtt(client, topic, message): - # hack - delay adding to the queue in case the process is - time.sleep(MQTT_DELAY_SEC) # restarted and previous publish processes aborted - # (it takes ~2s to update a sensor config on the broker) - mqtt_sensors.append(sensorConf) - -#------------------------------------------------------------------------------- -def mqtt_create_client(): - def on_disconnect(client, userdata, rc): - global mqtt_connected_to_broker - mqtt_connected_to_broker = False - - # not sure is below line is correct / necessary - # client = mqtt_create_client() - - def on_connect(client, userdata, flags, rc): - global mqtt_connected_to_broker - - if rc == 0: - mylog('verbose', [" Connected to broker"]) - mqtt_connected_to_broker = True # Signal connection - else: - mylog('none', [" Connection failed"]) - mqtt_connected_to_broker = False - - - client = mqtt_client.Client('PiAlert') # Set Connecting Client ID - client.username_pw_set(MQTT_USER, MQTT_PASSWORD) - client.on_connect = on_connect - client.on_disconnect = on_disconnect - client.connect(MQTT_BROKER, MQTT_PORT) - client.loop_start() - - return client - -#------------------------------------------------------------------------------- -def mqtt_start(): - - global client, mqtt_connected_to_broker - - if mqtt_connected_to_broker == False: - mqtt_connected_to_broker = True - client = mqtt_create_client() - - # General stats - - # Create a generic device for overal stats - create_generic_device(client) - - # Get the data - row = get_device_stats() - - columns = ["online","down","all","archived","new","unknown"] - - payload = "" - - # Update the values - for column in columns: - payload += '"'+column+'": ' + str(row[column]) +',' - - # Publish (warap into {} and remove last ',' from above) - publish_mqtt(client, "system-sensors/sensor/pialert/state", - '{ \ - '+ payload[:-1] +'\ - }' - ) - - - # Specific devices - - # Get all devices - devices = get_all_devices() - - sec_delay = len(devices) * int(MQTT_DELAY_SEC)*5 - - mylog('info', [" Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60,1) , 'min)' ]) - - for device in devices: - - # Create devices in Home Assistant - send config messages - deviceId = 'mac_' + device["dev_MAC"].replace(" ", "").replace(":", "_").lower() - deviceNameDisplay = re.sub('[^a-zA-Z0-9-_\s]', '', device["dev_Name"]) - - create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'last_ip', 'ip-network') - create_sensor(client, deviceId, deviceNameDisplay, 'binary_sensor', 'is_present', 'wifi') - create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'mac_address', 'folder-key-network') - create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'is_new', 'bell-alert-outline') - create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'vendor', 'cog') - - # update device sensors in home assistant - - publish_mqtt(client, 'system-sensors/sensor/'+deviceId+'/state', - '{ \ - "last_ip": "' + device["dev_LastIP"] +'", \ - "is_new": "' + str(device["dev_NewDevice"]) +'", \ - "vendor": "' + sanitize_string(device["dev_Vendor"]) +'", \ - "mac_address": "' + str(device["dev_MAC"]) +'" \ - }' - ) - - publish_mqtt(client, 'system-sensors/binary_sensor/'+deviceId+'/state', - '{ \ - "is_present": "' + to_binary_sensor(str(device["dev_PresentLastScan"])) +'"\ - }' - ) - - # delete device / topic - # homeassistant/sensor/mac_44_ef_bf_c4_b1_af/is_present/config - # client.publish( - # topic="homeassistant/sensor/"+deviceId+"/is_present/config", - # payload="", - # qos=1, - # retain=True, - # ) - # time.sleep(10) - - - - -#=============================================================================== -# Home Assistant UTILs -#=============================================================================== -def to_binary_sensor(input): - # In HA a binary sensor returns ON or OFF - result = "OFF" - - # bytestring - if isinstance(input, str): - if input == "1": - result = "ON" - elif isinstance(input, int): - if input == 1: - result = "ON" - elif isinstance(input, bool): - if input == True: - result = "ON" - elif isinstance(input, bytes): - if bytes_to_string(input) == "1": - result = "ON" - return result - - - - - #=============================================================================== # UTIL #=============================================================================== - - - - - - - - -#------------------------------------------------------------------------------- - - - - -#------------------------------------------------------------------------------- - -def sanitize_string(input): - if isinstance(input, bytes): - input = input.decode('utf-8') - value = bytes_to_string(re.sub('[^a-zA-Z0-9-_\s]', '', str(input))) - return value - - - - - -#------------------------------------------------------------------------------- - -def add_json_list (row, list): - new_row = [] - for column in row : - column = bytes_to_string(column) - - new_row.append(column) - - list.append(new_row) - - return list - -#------------------------------------------------------------------------------- - -def to_text(_json): - payloadData = "" - if len(_json['internet']) > 0 and 'internet' in INCLUDED_SECTIONS: - payloadData += "INTERNET\n" - for event in _json['internet']: - payloadData += event[3] + ' on ' + event[2] + '. ' + event[4] + '. New address:' + event[1] + '\n' - - if len(_json['new_devices']) > 0 and 'new_devices' in INCLUDED_SECTIONS: - payloadData += "NEW DEVICES:\n" - for event in _json['new_devices']: - if event[4] is None: - event[4] = event[11] - payloadData += event[1] + ' - ' + event[4] + '\n' - - if len(_json['down_devices']) > 0 and 'down_devices' in INCLUDED_SECTIONS: - write_file (logPath + '/down_devices_example.log', _json['down_devices']) - payloadData += 'DOWN DEVICES:\n' - for event in _json['down_devices']: - if event[4] is None: - event[4] = event[11] - payloadData += event[1] + ' - ' + event[4] + '\n' - - if len(_json['events']) > 0 and 'events' in INCLUDED_SECTIONS: - payloadData += "EVENTS:\n" - for event in _json['events']: - if event[8] != "Internet": - payloadData += event[8] + " on " + event[1] + " " + event[3] + " at " + event[2] + "\n" - - return payloadData - -#------------------------------------------------------------------------------- -def get_device_stats(db): - - # columns = ["online","down","all","archived","new","unknown"] - sql.execute(sql_devices_stats) - - row = sql.fetchone() - db.commitDB() - - return row -#------------------------------------------------------------------------------- -def get_all_devices(db): - - sql.execute(sql_devices_all) - - row = sql.fetchall() - - db.commitDB() - return row - -#------------------------------------------------------------------------------- - - - -#------------------------------------------------------------------------------- -def removeDuplicateNewLines(text): - if "\n\n\n" in text: - return removeDuplicateNewLines(text.replace("\n\n\n", "\n\n")) - else: - return text - - -#------------------------------------------------------------------------------- -def hide_email(email): - m = email.split('@') - - if len(m) == 2: - return f'{m[0][0]}{"*"*(len(m[0])-2)}{m[0][-1] if len(m[0]) > 1 else ""}@{m[1]}' - - return email - #------------------------------------------------------------------------------- def check_and_run_event(db): sql.execute(""" select * from Parameters where par_ID = "Front_Event" """) @@ -2588,39 +377,7 @@ def handle_test(testType): -#------------------------------------------------------------------------------- -def isNewVersion(db): - global newVersionAvailable - if newVersionAvailable == False: - - f = open(pialertPath + '/front/buildtimestamp.txt', 'r') - buildTimestamp = int(f.read().strip()) - f.close() - - data = "" - - try: - url = requests.get("https://api.github.com/repos/jokob-sk/Pi.Alert/releases") - text = url.text - data = json.loads(text) - except requests.exceptions.ConnectionError as e: - mylog('info', [" Couldn't check for new release."]) - data = "" - - # make sure we received a valid response and not an API rate limit exceeded message - if data != "" and len(data) > 0 and isinstance(data, list) and "published_at" in data[0]: - - dateTimeStr = data[0]["published_at"] - - realeaseTimestamp = int(datetime.datetime.strptime(dateTimeStr, '%Y-%m-%dT%H:%M:%SZ').strftime('%s')) - - if realeaseTimestamp > buildTimestamp + 600: - mylog('none', [" New version of the container available!"]) - newVersionAvailable = True - updateState(db, 'Back_New_Version_Available', str(newVersionAvailable)) - - return newVersionAvailable #------------------------------------------------------------------------------- @@ -2628,46 +385,6 @@ def isNewVersion(db): # Plugins #------------------------------------------------------------------------------- -#------------------------------------------------------------------------------- -def run_plugin_scripts(db, runType): - - global plugins, tz, mySchedules - - # Header - updateState(db,"Run: Plugins") - - mylog('debug', [' [Plugins] Check if any plugins need to be executed on run type: ', runType]) - - for plugin in plugins: - - shouldRun = False - - set = get_plugin_setting(plugin, "RUN") - if set != None and set['value'] == runType: - if runType != "schedule": - shouldRun = True - elif runType == "schedule": - # run if overdue scheduled time - prefix = plugin["unique_prefix"] - - # check scheduels if any contains a unique plugin prefix matching the current plugin - for schd in mySchedules: - if schd.service == prefix: - # Check if schedule overdue - shouldRun = schd.runScheduleCheck() - if shouldRun: - # note the last time the scheduled plugin run was executed - schd.last_run = datetime.datetime.now(tz).replace(microsecond=0) - - if shouldRun: - - print_plugin_info(plugin, ['display_name']) - mylog('debug', [' [Plugins] CMD: ', get_plugin_setting(plugin, "CMD")["value"]]) - execute_plugin(plugin) - -#------------------------------------------------------------------------------- -# Cron-like Scheduling - #=============================================================================== # BEGIN diff --git a/pialert/pihole.py b/pialert/pihole.py new file mode 100644 index 00000000..50d25e86 --- /dev/null +++ b/pialert/pihole.py @@ -0,0 +1,48 @@ + +from const import piholeDB, piholeDhcpleases + +#------------------------------------------------------------------------------- +def copy_pihole_network (db): + sql = db.sql # TO-DO + # Open Pi-hole DB + sql.execute ("ATTACH DATABASE '"+ piholeDB +"' AS PH") + + # Copy Pi-hole Network table + sql.execute ("DELETE FROM PiHole_Network") + sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery, + PH_Name, PH_IP) + SELECT hwaddr, macVendor, lastQuery, + (SELECT name FROM PH.network_addresses + WHERE network_id = id ORDER BY lastseen DESC, ip), + (SELECT ip FROM PH.network_addresses + WHERE network_id = id ORDER BY lastseen DESC, ip) + FROM PH.network + WHERE hwaddr NOT LIKE 'ip-%' + AND hwaddr <> '00:00:00:00:00:00' """) + sql.execute ("""UPDATE PiHole_Network SET PH_Name = '(unknown)' + WHERE PH_Name IS NULL OR PH_Name = '' """) + # Close Pi-hole DB + sql.execute ("DETACH PH") + db.commit() + + return str(sql.rowcount) != "0" + +#------------------------------------------------------------------------------- +def read_DHCP_leases (db): + sql = db.sql # TO-DO + # Read DHCP Leases + # Bugfix #1 - dhcp.leases: lines with different number of columns (5 col) + data = [] + with open(piholeDhcpleases, 'r') as f: + for line in f: + reporting = True + row = line.rstrip().split() + if len(row) == 5 : + data.append (row) + + # Insert into PiAlert table + sql.executemany ("""INSERT INTO DHCP_Leases (DHCP_DateTime, DHCP_MAC, + DHCP_IP, DHCP_Name, DHCP_MAC2) + VALUES (?, ?, ?, ?, ?) + """, data) + diff --git a/pialert/plugin.py b/pialert/plugin.py index eb1c1e6c..df4c4e3f 100644 --- a/pialert/plugin.py +++ b/pialert/plugin.py @@ -6,9 +6,10 @@ from collections import namedtuple # pialert modules from const import pluginsPath, logPath +from conf import mySettings from files import get_file_content, write_file from logger import mylog -from conf import mySettings +from database import updateState #from api import update_api @@ -18,6 +19,48 @@ from conf import mySettings def timeNow(): return datetime.datetime.now().replace(microsecond=0) + +#------------------------------------------------------------------------------- +def run_plugin_scripts(db, runType): + + global plugins, tz, mySchedules + + # Header + updateState(db,"Run: Plugins") + + mylog('debug', [' [Plugins] Check if any plugins need to be executed on run type: ', runType]) + + for plugin in plugins: + + shouldRun = False + + set = get_plugin_setting(plugin, "RUN") + if set != None and set['value'] == runType: + if runType != "schedule": + shouldRun = True + elif runType == "schedule": + # run if overdue scheduled time + prefix = plugin["unique_prefix"] + + # check scheduels if any contains a unique plugin prefix matching the current plugin + for schd in mySchedules: + if schd.service == prefix: + # Check if schedule overdue + shouldRun = schd.runScheduleCheck() + if shouldRun: + # note the last time the scheduled plugin run was executed + schd.last_run = datetime.datetime.now(tz).replace(microsecond=0) + + if shouldRun: + + print_plugin_info(plugin, ['display_name']) + mylog('debug', [' [Plugins] CMD: ', get_plugin_setting(plugin, "CMD")["value"]]) + execute_plugin(plugin) + + + + + #------------------------------------------------------------------------------- def get_plugins_configs(): diff --git a/pialert/reporting.py b/pialert/reporting.py new file mode 100644 index 00000000..2277ed95 --- /dev/null +++ b/pialert/reporting.py @@ -0,0 +1,640 @@ +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText + +import datetime +import json +import smtplib +import socket +from base64 import b64encode +import subprocess +import requests +from json2table import convert + +from const import pialertPath, logPath +# from pialert.api import update_api +from conf import * +from database import get_table_as_json, updateState +from files import write_file +from helper import generate_mac_links, isNewVersion, removeDuplicateNewLines, timeNow, hide_email +from logger import logResult, mylog, print_log +from mqtt import mqtt_start + + + + +#=============================================================================== +# REPORTING +#=============================================================================== +# create a json for webhook and mqtt notifications to provide further integration options + + +json_final = [] + +#------------------------------------------------------------------------------- +class noti_struc: + def __init__(self, json, text, html): + self.json = json + self.text = text + self.html = html + + +#------------------------------------------------------------------------------- +def construct_notifications(sqlQuery, tableTitle, skipText = False, suppliedJsonStruct = None): + + if suppliedJsonStruct is None and sqlQuery == "": + return noti_struc("", "", "") + + table_attributes = {"style" : "border-collapse: collapse; font-size: 12px; color:#70707", "width" : "100%", "cellspacing" : 0, "cellpadding" : "3px", "bordercolor" : "#C0C0C0", "border":"1"} + headerProps = "width='120px' style='color:blue; font-size: 16px;' bgcolor='#909090' " + thProps = "width='120px' style='color:#F0F0F0' bgcolor='#909090' " + + build_direction = "TOP_TO_BOTTOM" + text_line = '{}\t{}\n' + + if suppliedJsonStruct is None: + json_struc = get_table_as_json(sqlQuery) + else: + json_struc = suppliedJsonStruct + + jsn = json_struc.json + html = "" + text = "" + + if len(jsn["data"]) > 0: + text = tableTitle + "\n---------\n" + + html = convert(jsn, build_direction=build_direction, table_attributes=table_attributes) + html = format_table(html, "data", headerProps, tableTitle).replace('
        ','
          ') + + headers = json_struc.columnNames + + # prepare text-only message + if skipText == False: + + for device in jsn["data"]: + for header in headers: + padding = "" + if len(header) < 4: + padding = "\t" + text += text_line.format ( header + ': ' + padding, device[header]) + text += '\n' + + # Format HTML table headers + for header in headers: + html = format_table(html, header, thProps) + + return noti_struc(jsn, text, html) + + + + +def send_notifications (db): + sql = db.sql #TO-DO + global mail_text, mail_html, json_final, changedPorts_json_struc, partial_html, partial_txt, partial_json + + deviceUrl = REPORT_DASHBOARD_URL + '/deviceDetails.php?mac=' + plugins_report = False + + # Reporting section + mylog('verbose', [' Check if something to report']) + + # prepare variables for JSON construction + json_internet = [] + json_new_devices = [] + json_down_devices = [] + json_events = [] + json_ports = [] + json_plugins = [] + + # Disable reporting on events for devices where reporting is disabled based on the MAC address + sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0 + WHERE eve_PendingAlertEmail = 1 AND eve_EventType != 'Device Down' AND eve_MAC IN + ( + SELECT dev_MAC FROM Devices WHERE dev_AlertEvents = 0 + )""") + sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0 + WHERE eve_PendingAlertEmail = 1 AND eve_EventType = 'Device Down' AND eve_MAC IN + ( + SELECT dev_MAC FROM Devices WHERE dev_AlertDeviceDown = 0 + )""") + + # Open text Template + template_file = open(pialertPath + '/back/report_template.txt', 'r') + mail_text = template_file.read() + template_file.close() + + # Open html Template + template_file = open(pialertPath + '/back/report_template.html', 'r') + if isNewVersion(db): + template_file = open(pialertPath + '/back/report_template_new_version.html', 'r') + + mail_html = template_file.read() + template_file.close() + + # Report Header & footer + timeFormated = timeNow().strftime ('%Y-%m-%d %H:%M') + mail_text = mail_text.replace ('', timeFormated) + mail_html = mail_html.replace ('', timeFormated) + + mail_text = mail_text.replace ('', socket.gethostname() ) + mail_html = mail_html.replace ('', socket.gethostname() ) + + if 'internet' in INCLUDED_SECTIONS: + # Compose Internet Section + sqlQuery = """SELECT eve_MAC as MAC, eve_IP as IP, eve_DateTime as Datetime, eve_EventType as "Event Type", eve_AdditionalInfo as "More info" FROM Events + WHERE eve_PendingAlertEmail = 1 AND eve_MAC = 'Internet' + ORDER BY eve_DateTime""" + + notiStruc = construct_notifications(sqlQuery, "Internet IP change") + + # collect "internet" (IP changes) for the webhook json + json_internet = notiStruc.json["data"] + + mail_text = mail_text.replace ('', notiStruc.text + '\n') + mail_html = mail_html.replace ('', notiStruc.html) + + if 'new_devices' in INCLUDED_SECTIONS: + # Compose New Devices Section + sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices + WHERE eve_PendingAlertEmail = 1 + AND eve_EventType = 'New Device' + ORDER BY eve_DateTime""" + + notiStruc = construct_notifications(sqlQuery, "New devices") + + # collect "new_devices" for the webhook json + json_new_devices = notiStruc.json["data"] + + mail_text = mail_text.replace ('', notiStruc.text + '\n') + mail_html = mail_html.replace ('', notiStruc.html) + + if 'down_devices' in INCLUDED_SECTIONS: + # Compose Devices Down Section + sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices + WHERE eve_PendingAlertEmail = 1 + AND eve_EventType = 'Device Down' + ORDER BY eve_DateTime""" + + notiStruc = construct_notifications(sqlQuery, "Down devices") + + # collect "new_devices" for the webhook json + json_down_devices = notiStruc.json["data"] + + mail_text = mail_text.replace ('', notiStruc.text + '\n') + mail_html = mail_html.replace ('', notiStruc.html) + + if 'events' in INCLUDED_SECTIONS: + # Compose Events Section + sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices + WHERE eve_PendingAlertEmail = 1 + AND eve_EventType IN ('Connected','Disconnected', + 'IP Changed') + ORDER BY eve_DateTime""" + + notiStruc = construct_notifications(sqlQuery, "Events") + + # collect "events" for the webhook json + json_events = notiStruc.json["data"] + + mail_text = mail_text.replace ('', notiStruc.text + '\n') + mail_html = mail_html.replace ('', notiStruc.html) + + if 'ports' in INCLUDED_SECTIONS: + # collect "ports" for the webhook json + if changedPorts_json_struc is not None: + json_ports = changedPorts_json_struc.json["data"] + + notiStruc = construct_notifications("", "Ports", True, changedPorts_json_struc) + + mail_html = mail_html.replace ('', notiStruc.html) + + portsTxt = "" + if changedPorts_json_struc is not None: + portsTxt = "Ports \n---------\n Ports changed! Check PiAlert for details!\n" + + mail_text = mail_text.replace ('', portsTxt ) + + if 'plugins' in INCLUDED_SECTIONS and ENABLE_PLUGINS: + # Compose Plugins Section + sqlQuery = """SELECT Plugin, Object_PrimaryId, Object_SecondaryId, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status from Plugins_Events""" + + notiStruc = construct_notifications(sqlQuery, "Plugins") + + # collect "plugins" for the webhook json + json_plugins = notiStruc.json["data"] + + mail_text = mail_text.replace ('', notiStruc.text + '\n') + mail_html = mail_html.replace ('', notiStruc.html) + + # check if we need to report something + plugins_report = len(json_plugins) > 0 + + + json_final = { + "internet": json_internet, + "new_devices": json_new_devices, + "down_devices": json_down_devices, + "events": json_events, + "ports": json_ports, + "plugins": json_plugins, + } + + mail_text = removeDuplicateNewLines(mail_text) + + # Create clickable MAC links + mail_html = generate_mac_links (mail_html, deviceUrl) + + # Write output emails for debug + write_file (logPath + '/report_output.json', json.dumps(json_final)) + write_file (logPath + '/report_output.txt', mail_text) + write_file (logPath + '/report_output.html', mail_html) + + # Send Mail + if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or json_ports != [] or debug_force_notification or plugins_report: + + # update_api(True) # TO-DO + + mylog('none', [' Changes detected, sending reports']) + + if REPORT_MAIL and check_config('email'): + updateState(db,"Send: Email") + mylog('info', [' Sending report by Email']) + send_email (mail_text, mail_html) + else : + mylog('verbose', [' Skip email']) + if REPORT_APPRISE and check_config('apprise'): + updateState(db,"Send: Apprise") + mylog('info', [' Sending report by Apprise']) + send_apprise (mail_html, mail_text) + else : + mylog('verbose', [' Skip Apprise']) + if REPORT_WEBHOOK and check_config('webhook'): + updateState(db,"Send: Webhook") + mylog('info', [' Sending report by Webhook']) + send_webhook (json_final, mail_text) + else : + mylog('verbose', [' Skip webhook']) + if REPORT_NTFY and check_config('ntfy'): + updateState(db,"Send: NTFY") + mylog('info', [' Sending report by NTFY']) + send_ntfy (mail_text) + else : + mylog('verbose', [' Skip NTFY']) + if REPORT_PUSHSAFER and check_config('pushsafer'): + updateState(db,"Send: PUSHSAFER") + mylog('info', [' Sending report by PUSHSAFER']) + send_pushsafer (mail_text) + else : + mylog('verbose', [' Skip PUSHSAFER']) + # Update MQTT entities + if REPORT_MQTT and check_config('mqtt'): + updateState(db,"Send: MQTT") + mylog('info', [' Establishing MQTT thread']) + mqtt_start() + else : + mylog('verbose', [' Skip MQTT']) + else : + mylog('verbose', [' No changes to report']) + + # Clean Pending Alert Events + sql.execute ("""UPDATE Devices SET dev_LastNotification = ? + WHERE dev_MAC IN (SELECT eve_MAC FROM Events + WHERE eve_PendingAlertEmail = 1) + """, (datetime.datetime.now(),) ) + sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0 + WHERE eve_PendingAlertEmail = 1""") + + # clear plugin events + sql.execute ("DELETE FROM Plugins_Events") + + changedPorts_json_struc = None + + # DEBUG - print number of rows updated + mylog('info', ['[', timeNow(), '] Notifications: ', sql.rowcount]) + + # Commit changes + db.commitDB() + + +#------------------------------------------------------------------------------- +def check_config(service): + + if service == 'email': + if SMTP_SERVER == '' or REPORT_FROM == '' or REPORT_TO == '': + mylog('none', [' Error: Email service not set up correctly. Check your pialert.conf SMTP_*, REPORT_FROM and REPORT_TO variables.']) + return False + else: + return True + + if service == 'apprise': + if APPRISE_URL == '' or APPRISE_HOST == '': + mylog('none', [' Error: Apprise service not set up correctly. Check your pialert.conf APPRISE_* variables.']) + return False + else: + return True + + if service == 'webhook': + if WEBHOOK_URL == '': + mylog('none', [' Error: Webhook service not set up correctly. Check your pialert.conf WEBHOOK_* variables.']) + return False + else: + return True + + if service == 'ntfy': + if NTFY_HOST == '' or NTFY_TOPIC == '': + mylog('none', [' Error: NTFY service not set up correctly. Check your pialert.conf NTFY_* variables.']) + return False + else: + return True + + if service == 'pushsafer': + if PUSHSAFER_TOKEN == 'ApiKey': + mylog('none', [' Error: Pushsafer service not set up correctly. Check your pialert.conf PUSHSAFER_TOKEN variable.']) + return False + else: + return True + + if service == 'mqtt': + if MQTT_BROKER == '' or MQTT_PORT == '' or MQTT_USER == '' or MQTT_PASSWORD == '': + mylog('none', [' Error: MQTT service not set up correctly. Check your pialert.conf MQTT_* variables.']) + return False + else: + return True + +#------------------------------------------------------------------------------- +def format_table (html, thValue, props, newThValue = ''): + + if newThValue == '': + newThValue = thValue + + return html.replace(""+thValue+"", ""+newThValue+"" ) + +#------------------------------------------------------------------------------- +def format_report_section (pActive, pSection, pTable, pText, pHTML): + global mail_text + global mail_html + + # Replace section text + if pActive : + mail_text = mail_text.replace ('<'+ pTable +'>', pText) + mail_html = mail_html.replace ('<'+ pTable +'>', pHTML) + + mail_text = remove_tag (mail_text, pSection) + mail_html = remove_tag (mail_html, pSection) + else: + mail_text = remove_section (mail_text, pSection) + mail_html = remove_section (mail_html, pSection) + +#------------------------------------------------------------------------------- +def remove_section (pText, pSection): + # Search section into the text + if pText.find ('<'+ pSection +'>') >=0 \ + and pText.find ('') >=0 : + # return text without the section + return pText[:pText.find ('<'+ pSection+'>')] + \ + pText[pText.find ('') + len (pSection) +3:] + else : + # return all text + return pText + +#------------------------------------------------------------------------------- +def remove_tag (pText, pTag): + # return text without the tag + return pText.replace ('<'+ pTag +'>','').replace ('','') + + +#------------------------------------------------------------------------------- +# Reporting +#------------------------------------------------------------------------------- +def send_email (pText, pHTML): + + # Print more info for debugging if LOG_LEVEL == 'debug' + if LOG_LEVEL == 'debug': + print_log ('REPORT_TO: ' + hide_email(str(REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(SMTP_USER))) + + # Compose email + msg = MIMEMultipart('alternative') + msg['Subject'] = 'Pi.Alert Report' + msg['From'] = REPORT_FROM + msg['To'] = REPORT_TO + msg.attach (MIMEText (pText, 'plain')) + msg.attach (MIMEText (pHTML, 'html')) + + failedAt = '' + + failedAt = print_log ('SMTP try') + + try: + # Send mail + failedAt = print_log('Trying to open connection to ' + str(SMTP_SERVER) + ':' + str(SMTP_PORT)) + + if SMTP_FORCE_SSL: + failedAt = print_log('SMTP_FORCE_SSL == True so using .SMTP_SSL()') + if SMTP_PORT == 0: + failedAt = print_log('SMTP_PORT == 0 so sending .SMTP_SSL(SMTP_SERVER)') + smtp_connection = smtplib.SMTP_SSL(SMTP_SERVER) + else: + failedAt = print_log('SMTP_PORT == 0 so sending .SMTP_SSL(SMTP_SERVER, SMTP_PORT)') + smtp_connection = smtplib.SMTP_SSL(SMTP_SERVER, SMTP_PORT) + + else: + failedAt = print_log('SMTP_FORCE_SSL == False so using .SMTP()') + if SMTP_PORT == 0: + failedAt = print_log('SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER)') + smtp_connection = smtplib.SMTP (SMTP_SERVER) + else: + failedAt = print_log('SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER, SMTP_PORT)') + smtp_connection = smtplib.SMTP (SMTP_SERVER, SMTP_PORT) + + failedAt = print_log('Setting SMTP debug level') + + # Log level set to debug of the communication between SMTP server and client + if LOG_LEVEL == 'debug': + smtp_connection.set_debuglevel(1) + + failedAt = print_log( 'Sending .ehlo()') + smtp_connection.ehlo() + + if not SMTP_SKIP_TLS: + failedAt = print_log('SMTP_SKIP_TLS == False so sending .starttls()') + smtp_connection.starttls() + failedAt = print_log('SMTP_SKIP_TLS == False so sending .ehlo()') + smtp_connection.ehlo() + if not SMTP_SKIP_LOGIN: + failedAt = print_log('SMTP_SKIP_LOGIN == False so sending .login()') + smtp_connection.login (SMTP_USER, SMTP_PASS) + + failedAt = print_log('Sending .sendmail()') + smtp_connection.sendmail (REPORT_FROM, REPORT_TO, msg.as_string()) + smtp_connection.quit() + except smtplib.SMTPAuthenticationError as e: + mylog('none', [' ERROR: Failed at - ', failedAt]) + mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPAuthenticationError), skipping Email (enable LOG_LEVEL=debug for more logging)']) + except smtplib.SMTPServerDisconnected as e: + mylog('none', [' ERROR: Failed at - ', failedAt]) + mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPServerDisconnected), skipping Email (enable LOG_LEVEL=debug for more logging)']) + + print_log(' DEBUG: Last executed - ' + str(failedAt)) + +#------------------------------------------------------------------------------- +def send_ntfy (_Text): + headers = { + "Title": "Pi.Alert Notification", + "Actions": "view, Open Dashboard, "+ REPORT_DASHBOARD_URL, + "Priority": "urgent", + "Tags": "warning" + } + # if username and password are set generate hash and update header + if NTFY_USER != "" and NTFY_PASSWORD != "": + # Generate hash for basic auth + usernamepassword = "{}:{}".format(NTFY_USER,NTFY_PASSWORD) + basichash = b64encode(bytes(NTFY_USER + ':' + NTFY_PASSWORD, "utf-8")).decode("ascii") + + # add authorization header with hash + headers["Authorization"] = "Basic {}".format(basichash) + + requests.post("{}/{}".format( NTFY_HOST, NTFY_TOPIC), + data=_Text, + headers=headers) + +def send_pushsafer (_Text): + url = 'https://www.pushsafer.com/api' + post_fields = { + "t" : 'Pi.Alert Message', + "m" : _Text, + "s" : 11, + "v" : 3, + "i" : 148, + "c" : '#ef7f7f', + "d" : 'a', + "u" : REPORT_DASHBOARD_URL, + "ut" : 'Open Pi.Alert', + "k" : PUSHSAFER_TOKEN, + } + requests.post(url, data=post_fields) + +#------------------------------------------------------------------------------- +def send_webhook (_json, _html): + + # use data type based on specified payload type + if WEBHOOK_PAYLOAD == 'json': + payloadData = _json + if WEBHOOK_PAYLOAD == 'html': + payloadData = _html + if WEBHOOK_PAYLOAD == 'text': + payloadData = to_text(_json) + + # Define slack-compatible payload + _json_payload = { "text": payloadData } if WEBHOOK_PAYLOAD == 'text' else { + "username": "Pi.Alert", + "text": "There are new notifications", + "attachments": [{ + "title": "Pi.Alert Notifications", + "title_link": REPORT_DASHBOARD_URL, + "text": payloadData + }] + } + + # DEBUG - Write the json payload into a log file for debugging + write_file (logPath + '/webhook_payload.json', json.dumps(_json_payload)) + + # Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both + if(WEBHOOK_URL.startswith('https://discord.com/api/webhooks/') and not WEBHOOK_URL.endswith("/slack")): + _WEBHOOK_URL = f"{WEBHOOK_URL}/slack" + curlParams = ["curl","-i","-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL] + else: + _WEBHOOK_URL = WEBHOOK_URL + curlParams = ["curl","-i","-X", WEBHOOK_REQUEST_METHOD ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL] + + # execute CURL call + try: + # try runnning a subprocess + mylog('debug', curlParams) + p = subprocess.Popen(curlParams, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + + stdout, stderr = p.communicate() + + # write stdout and stderr into .log files for debugging if needed + logResult (stdout, stderr) # TO-DO should be changed to mylog + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', [e.output]) + +#------------------------------------------------------------------------------- +def send_apprise (html, text): + #Define Apprise compatible payload (https://github.com/caronc/apprise-api#stateless-solution) + payload = html + + if APPRISE_PAYLOAD == 'text': + payload = text + + _json_payload={ + "urls": APPRISE_URL, + "title": "Pi.Alert Notifications", + "format": APPRISE_PAYLOAD, + "body": payload + } + + try: + # try runnning a subprocess + p = subprocess.Popen(["curl","-i","-X", "POST" ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), APPRISE_HOST], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + stdout, stderr = p.communicate() + # write stdout and stderr into .log files for debugging if needed + logResult (stdout, stderr) # TO-DO should be changed to mylog + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', [e.output]) + + +def to_text(_json): + payloadData = "" + if len(_json['internet']) > 0 and 'internet' in INCLUDED_SECTIONS: + payloadData += "INTERNET\n" + for event in _json['internet']: + payloadData += event[3] + ' on ' + event[2] + '. ' + event[4] + '. New address:' + event[1] + '\n' + + if len(_json['new_devices']) > 0 and 'new_devices' in INCLUDED_SECTIONS: + payloadData += "NEW DEVICES:\n" + for event in _json['new_devices']: + if event[4] is None: + event[4] = event[11] + payloadData += event[1] + ' - ' + event[4] + '\n' + + if len(_json['down_devices']) > 0 and 'down_devices' in INCLUDED_SECTIONS: + write_file (logPath + '/down_devices_example.log', _json['down_devices']) + payloadData += 'DOWN DEVICES:\n' + for event in _json['down_devices']: + if event[4] is None: + event[4] = event[11] + payloadData += event[1] + ' - ' + event[4] + '\n' + + if len(_json['events']) > 0 and 'events' in INCLUDED_SECTIONS: + payloadData += "EVENTS:\n" + for event in _json['events']: + if event[8] != "Internet": + payloadData += event[8] + " on " + event[1] + " " + event[3] + " at " + event[2] + "\n" + + return payloadData + + + +#------------------------------------------------------------------------------- +def skip_repeated_notifications (db): + + # Skip repeated notifications + # due strfime : Overflow --> use "strftime / 60" + print_log ('Skip Repeated') + db.sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0 + WHERE eve_PendingAlertEmail = 1 AND eve_MAC IN + ( + SELECT dev_MAC FROM Devices + WHERE dev_LastNotification IS NOT NULL + AND dev_LastNotification <>"" + AND (strftime("%s", dev_LastNotification)/60 + + dev_SkipRepeated * 60) > + (strftime('%s','now','localtime')/60 ) + ) + """ ) + print_log ('Skip Repeated end') + + db.commitDB() \ No newline at end of file From 3adfa2c268d1133ea2845aa8a0eab1b23f61322e Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Tue, 23 May 2023 21:18:44 +1000 Subject: [PATCH 08/20] resolved cirqular imports --- db/pialert.db | Bin 245760 -> 245760 bytes pialert/conf.py | 3 +- pialert/database.py | 17 +-- pialert/device.py | 12 +- pialert/helper.py | 293 ++++------------------------------------- pialert/initialise.py | 263 ++++++++++++++++++++++++++++++++++++ pialert/internet.py | 4 +- pialert/mac_vendor.py | 8 +- pialert/networkscan.py | 19 +-- pialert/nmapscan.py | 4 +- pialert/pholusscan.py | 5 +- pialert/pialert.py | 37 +++--- pialert/pihole.py | 11 +- pialert/plugin.py | 6 +- pialert/reporting.py | 4 +- 15 files changed, 360 insertions(+), 326 deletions(-) create mode 100644 pialert/initialise.py diff --git a/db/pialert.db b/db/pialert.db index 16f9e46fa7d7de23b58a85777e24cdcfca00ccd2..623697800f95c196d98f45a2a461bd137bf22354 100755 GIT binary patch delta 223 zcmZo@;BRQ)pCB!`n}LDB28f}6#ciUFG2`yWgslmTtNmF``G50&+bk&Xl3&<73aG@) zl$ZY@0~7xw2L4I>+kg^n{3=R}%$oX6iOJdVjs*qr!6k_$slmyKc~%OZc_pbud8s7| zo&nRh+o}Y?ie^F{}2k{fyhc_A~Ks zSk4l_EU=u#0YoG)D=cT30Ae*T3v6du!2C~$v1mKVD28f}6MQ@^xG2`~egslmTtNmHc_' , c_d, 'Email Subject', 'text', '', 'Email') - SMTP_SKIP_LOGIN = ccd('SMTP_SKIP_LOGIN', False , c_d, 'SMTP skip login', 'boolean', '', 'Email') - SMTP_USER = ccd('SMTP_USER', '' , c_d, 'SMTP user', 'text', '', 'Email') - SMTP_PASS = ccd('SMTP_PASS', '' , c_d, 'SMTP password', 'password', '', 'Email') - SMTP_SKIP_TLS = ccd('SMTP_SKIP_TLS', False , c_d, 'SMTP skip TLS', 'boolean', '', 'Email') - SMTP_FORCE_SSL = ccd('SMTP_FORCE_SSL', False , c_d, 'Force SSL', 'boolean', '', 'Email') - - # Webhooks - REPORT_WEBHOOK = ccd('REPORT_WEBHOOK', False , c_d, 'Enable Webhooks', 'boolean', '', 'Webhooks', ['test']) - WEBHOOK_URL = ccd('WEBHOOK_URL', '' , c_d, 'Target URL', 'text', '', 'Webhooks') - WEBHOOK_PAYLOAD = ccd('WEBHOOK_PAYLOAD', 'json' , c_d, 'Payload type', 'selecttext', "['json', 'html', 'text']", 'Webhooks') - WEBHOOK_REQUEST_METHOD = ccd('WEBHOOK_REQUEST_METHOD', 'GET' , c_d, 'Req type', 'selecttext', "['GET', 'POST', 'PUT']", 'Webhooks') - - # Apprise - REPORT_APPRISE = ccd('REPORT_APPRISE', False , c_d, 'Enable Apprise', 'boolean', '', 'Apprise', ['test']) - APPRISE_HOST = ccd('APPRISE_HOST', '' , c_d, 'Apprise host URL', 'text', '', 'Apprise') - APPRISE_URL = ccd('APPRISE_URL', '' , c_d, 'Apprise notification URL', 'text', '', 'Apprise') - APPRISE_PAYLOAD = ccd('APPRISE_PAYLOAD', 'html' , c_d, 'Payload type', 'selecttext', "['html', 'text']", 'Apprise') - - # NTFY - REPORT_NTFY = ccd('REPORT_NTFY', False , c_d, 'Enable NTFY', 'boolean', '', 'NTFY', ['test']) - NTFY_HOST = ccd('NTFY_HOST', 'https://ntfy.sh' , c_d, 'NTFY host URL', 'text', '', 'NTFY') - NTFY_TOPIC = ccd('NTFY_TOPIC', '' , c_d, 'NTFY topic', 'text', '', 'NTFY') - NTFY_USER = ccd('NTFY_USER', '' , c_d, 'NTFY user', 'text', '', 'NTFY') - NTFY_PASSWORD = ccd('NTFY_PASSWORD', '' , c_d, 'NTFY password', 'password', '', 'NTFY') - - # PUSHSAFER - REPORT_PUSHSAFER = ccd('REPORT_PUSHSAFER', False , c_d, 'Enable PUSHSAFER', 'boolean', '', 'PUSHSAFER', ['test']) - PUSHSAFER_TOKEN = ccd('PUSHSAFER_TOKEN', 'ApiKey' , c_d, 'PUSHSAFER token', 'text', '', 'PUSHSAFER') - - # MQTT - REPORT_MQTT = ccd('REPORT_MQTT', False , c_d, 'Enable MQTT', 'boolean', '', 'MQTT') - MQTT_BROKER = ccd('MQTT_BROKER', '' , c_d, 'MQTT broker', 'text', '', 'MQTT') - MQTT_PORT = ccd('MQTT_PORT', 1883 , c_d, 'MQTT broker port', 'integer', '', 'MQTT') - MQTT_USER = ccd('MQTT_USER', '' , c_d, 'MQTT user', 'text', '', 'MQTT') - MQTT_PASSWORD = ccd('MQTT_PASSWORD', '' , c_d, 'MQTT password', 'password', '', 'MQTT') - MQTT_QOS = ccd('MQTT_QOS', 0 , c_d, 'MQTT Quality of Service', 'selectinteger', "['0', '1', '2']", 'MQTT') - MQTT_DELAY_SEC = ccd('MQTT_DELAY_SEC', 2 , c_d, 'MQTT delay', 'selectinteger', "['2', '3', '4', '5']", 'MQTT') - - # DynDNS - DDNS_ACTIVE = ccd('DDNS_ACTIVE', False , c_d, 'Enable DynDNS', 'boolean', '', 'DynDNS') - DDNS_DOMAIN = ccd('DDNS_DOMAIN', 'your_domain.freeddns.org' , c_d, 'DynDNS domain URL', 'text', '', 'DynDNS') - DDNS_USER = ccd('DDNS_USER', 'dynu_user' , c_d, 'DynDNS user', 'text', '', 'DynDNS') - DDNS_PASSWORD = ccd('DDNS_PASSWORD', 'A0000000B0000000C0000000D0000000' , c_d, 'DynDNS password', 'password', '', 'DynDNS') - DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS') - - # PiHole - PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PiHole') - DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PiHole') - - # PHOLUS - PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus') - PHOLUS_TIMEOUT = ccd('PHOLUS_TIMEOUT', 20 , c_d, 'Pholus timeout', 'integer', '', 'Pholus') - PHOLUS_FORCE = ccd('PHOLUS_FORCE', False , c_d, 'Pholus force check', 'boolean', '', 'Pholus') - PHOLUS_RUN = ccd('PHOLUS_RUN', 'once' , c_d, 'Pholus enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Pholus') - PHOLUS_RUN_TIMEOUT = ccd('PHOLUS_RUN_TIMEOUT', 600 , c_d, 'Pholus timeout schedule', 'integer', '', 'Pholus') - PHOLUS_RUN_SCHD = ccd('PHOLUS_RUN_SCHD', '0 4 * * *' , c_d, 'Pholus schedule', 'text', '', 'Pholus') - PHOLUS_DAYS_DATA = ccd('PHOLUS_DAYS_DATA', 0 , c_d, 'Pholus keep days', 'integer', '', 'Pholus') - - # Nmap - NMAP_ACTIVE = ccd('NMAP_ACTIVE', True , c_d, 'Enable Nmap scans', 'boolean', '', 'Nmap') - NMAP_TIMEOUT = ccd('NMAP_TIMEOUT', 150 , c_d, 'Nmap timeout', 'integer', '', 'Nmap') - NMAP_RUN = ccd('NMAP_RUN', 'none' , c_d, 'Nmap enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Nmap') - NMAP_RUN_SCHD = ccd('NMAP_RUN_SCHD', '0 2 * * *' , c_d, 'Nmap schedule', 'text', '', 'Nmap') - NMAP_ARGS = ccd('NMAP_ARGS', '-p -10000' , c_d, 'Nmap custom arguments', 'text', '', 'Nmap') - - # API - API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', 'text', '', 'API') - - # Prepare scheduler - global tz, mySchedules, plugins - - # Init timezone in case it changed - tz = timezone(TIMEZONE) - - # reset schedules - mySchedules = [] - - # init pholus schedule - pholusSchedule = Cron(PHOLUS_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz)) - mySchedules.append(schedule_class("pholus", pholusSchedule, pholusSchedule.next(), False)) - - # init nmap schedule - nmapSchedule = Cron(NMAP_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz)) - mySchedules.append(schedule_class("nmap", nmapSchedule, nmapSchedule.next(), False)) - - # Format and prepare the list of subnets - userSubnets = updateSubnets(SCAN_SUBNETS) - # Plugins START - # ----------------- - if ENABLE_PLUGINS: - plugins = get_plugins_configs() - mylog('none', ['[', timeNow(), '] Plugins: Number of dynamically loaded plugins: ', len(plugins)]) - - # handle plugins - for plugin in plugins: - print_plugin_info(plugin, ['display_name','description']) - - pref = plugin["unique_prefix"] - - # if plugin["enabled"] == 'true': - - # collect plugin level language strings - collect_lang_strings(db, plugin, pref) - - for set in plugin["settings"]: - setFunction = set["function"] - # Setting code name / key - key = pref + "_" + setFunction - - v = ccd(key, set["default_value"], c_d, set["name"][0]["string"], set["type"] , str(set["options"]), pref) - - # Save the user defined value into the object - set["value"] = v - - # Setup schedules - if setFunction == 'RUN_SCHD': - newSchedule = Cron(v).schedule(start_date=datetime.datetime.now(tz)) - mySchedules.append(schedule_class(pref, newSchedule, newSchedule.next(), False)) - - # Collect settings related language strings - collect_lang_strings(db, set, pref + "_" + set["function"]) - - plugins_once_run = False - # ----------------- - # Plugins END - - - - - - # Insert settings into the DB - sql.execute ("DELETE FROM Settings") - sql.executemany ("""INSERT INTO Settings ("Code_Name", "Display_Name", "Description", "Type", "Options", - "RegEx", "Value", "Group", "Events" ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""", mySettingsSQLsafe) - - # Used to determine the next import - lastTimeImported = time.time() - - # Is used to display a message in the UI when old (outdated) settings are loaded - initOrSetParam(db, "Back_Settings_Imported",(round(time.time() * 1000),) ) - - #commitDB(sql_connection) - db.commitDB() - - # update only the settings datasource - # update_api(False, ["settings"]) - # TO DO this creates a circular reference between API and HELPER ! - - mylog('info', ['[', timeNow(), '] Config: Imported new config']) - - -#------------------------------------------------------------------------------- -class json_struc: - def __init__(self, jsn, columnNames): - self.json = jsn - self.columnNames = columnNames + #------------------------------------------------------------------------------- # Creates a JSON object from a DB row @@ -570,4 +317,20 @@ def generate_mac_links (html, deviceUrl): for mac in MACs: html = html.replace('' + mac + '','' + mac + '') - return html \ No newline at end of file + return html + + + +#------------------------------------------------------------------------------- +def initOrSetParam(db, parID, parValue): + sql = db.sql + + sql.execute ("INSERT INTO Parameters(par_ID, par_Value) VALUES('"+str(parID)+"', '"+str(parValue)+"') ON CONFLICT(par_ID) DO UPDATE SET par_Value='"+str(parValue)+"' where par_ID='"+str(parID)+"'") + + db.commitDB() + +#------------------------------------------------------------------------------- +class json_struc: + def __init__(self, jsn, columnNames): + self.json = jsn + self.columnNames = columnNames \ No newline at end of file diff --git a/pialert/initialise.py b/pialert/initialise.py new file mode 100644 index 00000000..6202fed1 --- /dev/null +++ b/pialert/initialise.py @@ -0,0 +1,263 @@ + +import os +import time +from pytz import timezone +from cron_converter import Cron +from pathlib import Path +import datetime + +from conf import * +from const import * +from helper import collect_lang_strings, schedule_class, timeNow, updateSubnets, initOrSetParam +from logger import mylog +from plugin import get_plugins_configs, print_plugin_info + +#=============================================================================== +# Initialise user defined values +#=============================================================================== +# We need access to the DB to save new values so need to define DB access methods first +#------------------------------------------------------------------------------- + +#------------------------------------------------------------------------------- +# Import user values +# Check config dictionary +def ccd(key, default, config, name, inputtype, options, group, events=[], desc = "", regex = ""): + result = default + + # use existing value if already supplied, otherwise default value is used + if key in config: + result = config[key] + + if inputtype == 'text': + result = result.replace('\'', "{s-quote}") + + mySettingsSQLsafe.append((key, name, desc, inputtype, options, regex, str(result), group, str(events))) + mySettings.append((key, name, desc, inputtype, options, regex, result, group, str(events))) + + return result +#------------------------------------------------------------------------------- + +def importConfigs (db): + + sql = db.sql + + # Specify globals so they can be overwritten with the new config + global lastTimeImported, mySettings, mySettingsSQLsafe, plugins, plugins_once_run + lastTimeImported = 0 + # General + global ENABLE_ARPSCAN, SCAN_SUBNETS, LOG_LEVEL, TIMEZONE, ENABLE_PLUGINS, PIALERT_WEB_PROTECTION, PIALERT_WEB_PASSWORD, INCLUDED_SECTIONS, SCAN_CYCLE_MINUTES, DAYS_TO_KEEP_EVENTS, REPORT_DASHBOARD_URL, DIG_GET_IP_ARG, UI_LANG + # Email + global REPORT_MAIL, SMTP_SERVER, SMTP_PORT, REPORT_TO, REPORT_FROM, SMTP_SKIP_LOGIN, SMTP_USER, SMTP_PASS, SMTP_SKIP_TLS, SMTP_FORCE_SSL + # Webhooks + global REPORT_WEBHOOK, WEBHOOK_URL, WEBHOOK_PAYLOAD, WEBHOOK_REQUEST_METHOD + # Apprise + global REPORT_APPRISE, APPRISE_HOST, APPRISE_URL, APPRISE_PAYLOAD + # NTFY + global REPORT_NTFY, NTFY_HOST, NTFY_TOPIC, NTFY_USER, NTFY_PASSWORD + # PUSHSAFER + global REPORT_PUSHSAFER, PUSHSAFER_TOKEN + # MQTT + global REPORT_MQTT, MQTT_BROKER, MQTT_PORT, MQTT_USER, MQTT_PASSWORD, MQTT_QOS, MQTT_DELAY_SEC + # DynDNS + global DDNS_ACTIVE, DDNS_DOMAIN, DDNS_USER, DDNS_PASSWORD, DDNS_UPDATE_URL + # PiHole + global PIHOLE_ACTIVE, DHCP_ACTIVE + # Pholus + global PHOLUS_ACTIVE, PHOLUS_TIMEOUT, PHOLUS_FORCE, PHOLUS_DAYS_DATA, PHOLUS_RUN, PHOLUS_RUN_SCHD, PHOLUS_RUN_TIMEOUT + # Nmap + global NMAP_ACTIVE, NMAP_TIMEOUT, NMAP_RUN, NMAP_RUN_SCHD, NMAP_ARGS + # API + global API_CUSTOM_SQL + + # get config file + config_file = Path(fullConfPath) + + # Skip import if last time of import is NEWER than file age + if (os.path.getmtime(config_file) < lastTimeImported) : + return + + mySettings = [] # reset settings + mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query + + # load the variables from pialert.conf + code = compile(config_file.read_text(), config_file.name, "exec") + c_d = {} # config dictionary + exec(code, {"__builtins__": {}}, c_d) + + # Import setting if found in the dictionary + # General + ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'General', ['run']) + SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'General') + LOG_LEVEL = ccd('LOG_LEVEL', 'verbose' , c_d, 'Log verboseness', 'selecttext', "['none', 'minimal', 'verbose', 'debug']", 'General') + TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General') + ENABLE_PLUGINS = ccd('ENABLE_PLUGINS', True , c_d, 'Enable plugins', 'boolean', '', 'General') + PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General') + PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General') + INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General') + SCAN_CYCLE_MINUTES = ccd('SCAN_CYCLE_MINUTES', 5 , c_d, 'Scan cycle delay (m)', 'integer', '', 'General') + DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General') + REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General') + DIG_GET_IP_ARG = ccd('DIG_GET_IP_ARG', '-4 myip.opendns.com @resolver1.opendns.com' , c_d, 'DIG arguments', 'text', '', 'General') + UI_LANG = ccd('UI_LANG', 'English' , c_d, 'Language Interface', 'selecttext', "['English', 'German', 'Spanish']", 'General') + UI_PRESENCE = ccd('UI_PRESENCE', ['online', 'offline', 'archived'] , c_d, 'Include in presence', 'multiselect', "['online', 'offline', 'archived']", 'General') + + # Email + REPORT_MAIL = ccd('REPORT_MAIL', False , c_d, 'Enable email', 'boolean', '', 'Email', ['test']) + SMTP_SERVER = ccd('SMTP_SERVER', '' , c_d,'SMTP server URL', 'text', '', 'Email') + SMTP_PORT = ccd('SMTP_PORT', 587 , c_d, 'SMTP port', 'integer', '', 'Email') + REPORT_TO = ccd('REPORT_TO', 'user@gmail.com' , c_d, 'Email to', 'text', '', 'Email') + REPORT_FROM = ccd('REPORT_FROM', 'Pi.Alert ' , c_d, 'Email Subject', 'text', '', 'Email') + SMTP_SKIP_LOGIN = ccd('SMTP_SKIP_LOGIN', False , c_d, 'SMTP skip login', 'boolean', '', 'Email') + SMTP_USER = ccd('SMTP_USER', '' , c_d, 'SMTP user', 'text', '', 'Email') + SMTP_PASS = ccd('SMTP_PASS', '' , c_d, 'SMTP password', 'password', '', 'Email') + SMTP_SKIP_TLS = ccd('SMTP_SKIP_TLS', False , c_d, 'SMTP skip TLS', 'boolean', '', 'Email') + SMTP_FORCE_SSL = ccd('SMTP_FORCE_SSL', False , c_d, 'Force SSL', 'boolean', '', 'Email') + + # Webhooks + REPORT_WEBHOOK = ccd('REPORT_WEBHOOK', False , c_d, 'Enable Webhooks', 'boolean', '', 'Webhooks', ['test']) + WEBHOOK_URL = ccd('WEBHOOK_URL', '' , c_d, 'Target URL', 'text', '', 'Webhooks') + WEBHOOK_PAYLOAD = ccd('WEBHOOK_PAYLOAD', 'json' , c_d, 'Payload type', 'selecttext', "['json', 'html', 'text']", 'Webhooks') + WEBHOOK_REQUEST_METHOD = ccd('WEBHOOK_REQUEST_METHOD', 'GET' , c_d, 'Req type', 'selecttext', "['GET', 'POST', 'PUT']", 'Webhooks') + + # Apprise + REPORT_APPRISE = ccd('REPORT_APPRISE', False , c_d, 'Enable Apprise', 'boolean', '', 'Apprise', ['test']) + APPRISE_HOST = ccd('APPRISE_HOST', '' , c_d, 'Apprise host URL', 'text', '', 'Apprise') + APPRISE_URL = ccd('APPRISE_URL', '' , c_d, 'Apprise notification URL', 'text', '', 'Apprise') + APPRISE_PAYLOAD = ccd('APPRISE_PAYLOAD', 'html' , c_d, 'Payload type', 'selecttext', "['html', 'text']", 'Apprise') + + # NTFY + REPORT_NTFY = ccd('REPORT_NTFY', False , c_d, 'Enable NTFY', 'boolean', '', 'NTFY', ['test']) + NTFY_HOST = ccd('NTFY_HOST', 'https://ntfy.sh' , c_d, 'NTFY host URL', 'text', '', 'NTFY') + NTFY_TOPIC = ccd('NTFY_TOPIC', '' , c_d, 'NTFY topic', 'text', '', 'NTFY') + NTFY_USER = ccd('NTFY_USER', '' , c_d, 'NTFY user', 'text', '', 'NTFY') + NTFY_PASSWORD = ccd('NTFY_PASSWORD', '' , c_d, 'NTFY password', 'password', '', 'NTFY') + + # PUSHSAFER + REPORT_PUSHSAFER = ccd('REPORT_PUSHSAFER', False , c_d, 'Enable PUSHSAFER', 'boolean', '', 'PUSHSAFER', ['test']) + PUSHSAFER_TOKEN = ccd('PUSHSAFER_TOKEN', 'ApiKey' , c_d, 'PUSHSAFER token', 'text', '', 'PUSHSAFER') + + # MQTT + REPORT_MQTT = ccd('REPORT_MQTT', False , c_d, 'Enable MQTT', 'boolean', '', 'MQTT') + MQTT_BROKER = ccd('MQTT_BROKER', '' , c_d, 'MQTT broker', 'text', '', 'MQTT') + MQTT_PORT = ccd('MQTT_PORT', 1883 , c_d, 'MQTT broker port', 'integer', '', 'MQTT') + MQTT_USER = ccd('MQTT_USER', '' , c_d, 'MQTT user', 'text', '', 'MQTT') + MQTT_PASSWORD = ccd('MQTT_PASSWORD', '' , c_d, 'MQTT password', 'password', '', 'MQTT') + MQTT_QOS = ccd('MQTT_QOS', 0 , c_d, 'MQTT Quality of Service', 'selectinteger', "['0', '1', '2']", 'MQTT') + MQTT_DELAY_SEC = ccd('MQTT_DELAY_SEC', 2 , c_d, 'MQTT delay', 'selectinteger', "['2', '3', '4', '5']", 'MQTT') + + # DynDNS + DDNS_ACTIVE = ccd('DDNS_ACTIVE', False , c_d, 'Enable DynDNS', 'boolean', '', 'DynDNS') + DDNS_DOMAIN = ccd('DDNS_DOMAIN', 'your_domain.freeddns.org' , c_d, 'DynDNS domain URL', 'text', '', 'DynDNS') + DDNS_USER = ccd('DDNS_USER', 'dynu_user' , c_d, 'DynDNS user', 'text', '', 'DynDNS') + DDNS_PASSWORD = ccd('DDNS_PASSWORD', 'A0000000B0000000C0000000D0000000' , c_d, 'DynDNS password', 'password', '', 'DynDNS') + DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS') + + # PiHole + PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PiHole') + DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PiHole') + + # PHOLUS + PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus') + PHOLUS_TIMEOUT = ccd('PHOLUS_TIMEOUT', 20 , c_d, 'Pholus timeout', 'integer', '', 'Pholus') + PHOLUS_FORCE = ccd('PHOLUS_FORCE', False , c_d, 'Pholus force check', 'boolean', '', 'Pholus') + PHOLUS_RUN = ccd('PHOLUS_RUN', 'once' , c_d, 'Pholus enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Pholus') + PHOLUS_RUN_TIMEOUT = ccd('PHOLUS_RUN_TIMEOUT', 600 , c_d, 'Pholus timeout schedule', 'integer', '', 'Pholus') + PHOLUS_RUN_SCHD = ccd('PHOLUS_RUN_SCHD', '0 4 * * *' , c_d, 'Pholus schedule', 'text', '', 'Pholus') + PHOLUS_DAYS_DATA = ccd('PHOLUS_DAYS_DATA', 0 , c_d, 'Pholus keep days', 'integer', '', 'Pholus') + + # Nmap + NMAP_ACTIVE = ccd('NMAP_ACTIVE', True , c_d, 'Enable Nmap scans', 'boolean', '', 'Nmap') + NMAP_TIMEOUT = ccd('NMAP_TIMEOUT', 150 , c_d, 'Nmap timeout', 'integer', '', 'Nmap') + NMAP_RUN = ccd('NMAP_RUN', 'none' , c_d, 'Nmap enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Nmap') + NMAP_RUN_SCHD = ccd('NMAP_RUN_SCHD', '0 2 * * *' , c_d, 'Nmap schedule', 'text', '', 'Nmap') + NMAP_ARGS = ccd('NMAP_ARGS', '-p -10000' , c_d, 'Nmap custom arguments', 'text', '', 'Nmap') + + # API + API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', 'text', '', 'API') + + # Prepare scheduler + global tz, mySchedules, plugins + + # Init timezone in case it changed + tz = timezone(TIMEZONE) + + # reset schedules + mySchedules = [] + + # init pholus schedule + pholusSchedule = Cron(PHOLUS_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz)) + mySchedules.append(schedule_class("pholus", pholusSchedule, pholusSchedule.next(), False)) + + # init nmap schedule + nmapSchedule = Cron(NMAP_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz)) + mySchedules.append(schedule_class("nmap", nmapSchedule, nmapSchedule.next(), False)) + + # Format and prepare the list of subnets + userSubnets = updateSubnets(SCAN_SUBNETS) + + + + # Plugins START + # ----------------- + if ENABLE_PLUGINS: + plugins = get_plugins_configs() + + mylog('none', ['[', timeNow(), '] Plugins: Number of dynamically loaded plugins: ', len(plugins)]) + + # handle plugins + for plugin in plugins: + print_plugin_info(plugin, ['display_name','description']) + + pref = plugin["unique_prefix"] + + # if plugin["enabled"] == 'true': + + # collect plugin level language strings + collect_lang_strings(db, plugin, pref) + + for set in plugin["settings"]: + setFunction = set["function"] + # Setting code name / key + key = pref + "_" + setFunction + + v = ccd(key, set["default_value"], c_d, set["name"][0]["string"], set["type"] , str(set["options"]), pref) + + # Save the user defined value into the object + set["value"] = v + + # Setup schedules + if setFunction == 'RUN_SCHD': + newSchedule = Cron(v).schedule(start_date=datetime.datetime.now(tz)) + mySchedules.append(schedule_class(pref, newSchedule, newSchedule.next(), False)) + + # Collect settings related language strings + collect_lang_strings(db, set, pref + "_" + set["function"]) + + plugins_once_run = False + # ----------------- + # Plugins END + + + + + + # Insert settings into the DB + sql.execute ("DELETE FROM Settings") + sql.executemany ("""INSERT INTO Settings ("Code_Name", "Display_Name", "Description", "Type", "Options", + "RegEx", "Value", "Group", "Events" ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""", mySettingsSQLsafe) + + # Used to determine the next import + lastTimeImported = time.time() + + # Is used to display a message in the UI when old (outdated) settings are loaded + initOrSetParam(db, "Back_Settings_Imported",(round(time.time() * 1000),) ) + + #commitDB(sql_connection) + db.commitDB() + + # update only the settings datasource + # update_api(False, ["settings"]) + # TO DO this creates a circular reference between API and HELPER ! + + mylog('info', ['[', timeNow(), '] Config: Imported new config']) + diff --git a/pialert/internet.py b/pialert/internet.py index 0f58f6b9..3b13db4f 100644 --- a/pialert/internet.py +++ b/pialert/internet.py @@ -4,8 +4,8 @@ import subprocess import re # pialert modules -from database import updateState -from helper import timeNow + +from helper import timeNow, updateState from logger import append_line_to_file, mylog from const import logPath from conf import DDNS_ACTIVE, DDNS_DOMAIN, DDNS_UPDATE_URL, DDNS_PASSWORD, DDNS_USER diff --git a/pialert/mac_vendor.py b/pialert/mac_vendor.py index f7ca6987..3d3fc83e 100644 --- a/pialert/mac_vendor.py +++ b/pialert/mac_vendor.py @@ -1,10 +1,10 @@ import subprocess -from pialert.database import updateState -from pialert.helper import timeNow -from pialert.logger import mylog -from conf import pialertPath, vendorsDB +from const import pialertPath, vendorsDB +from helper import timeNow, updateState +from logger import mylog + #=============================================================================== # UPDATE DEVICE MAC VENDORS diff --git a/pialert/networkscan.py b/pialert/networkscan.py index c3962ae3..63a81481 100644 --- a/pialert/networkscan.py +++ b/pialert/networkscan.py @@ -1,15 +1,17 @@ + +from conf import DHCP_ACTIVE, PIHOLE_ACTIVE, cycle, ENABLE_ARPSCAN from arpscan import execute_arpscan -from conf import DHCP_ACTIVE, ENABLE_PLUGINS, PIHOLE_ACTIVE, cycle, ENABLE_ARPSCAN -from database import insertOnlineHistory, updateState +from database import insertOnlineHistory from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, update_devices_names from helper import timeNow from logger import mylog, print_log -from pialert.plugin import run_plugin_scripts from pihole import copy_pihole_network, read_DHCP_leases from reporting import skip_repeated_notifications - + + + #=============================================================================== # SCAN NETWORK #=============================================================================== @@ -20,7 +22,8 @@ def scan_network (db): reporting = False # Header - updateState(db,"Scan: Network") + # moved updateState to main loop + # updateState(db,"Scan: Network") mylog('verbose', ['[', timeNow(), '] Scan Devices:' ]) # Query ScanCycle properties @@ -105,9 +108,9 @@ def scan_network (db): # Commit changes db.commitDB() - # Run splugin scripts which are set to run every timne after a scan finished - if ENABLE_PLUGINS: - run_plugin_scripts(db,'always_after_scan') + # moved plugin execution to main loop + # if ENABLE_PLUGINS: + # run_plugin_scripts(db,'always_after_scan') return reporting diff --git a/pialert/nmapscan.py b/pialert/nmapscan.py index 8a6aa9e6..e5f0e1d2 100644 --- a/pialert/nmapscan.py +++ b/pialert/nmapscan.py @@ -3,8 +3,8 @@ import subprocess from const import logPath from conf import NMAP_ARGS, NMAP_TIMEOUT -from database import updateState, sql_nmap_scan_all -from helper import json_struc, timeNow +from database import sql_nmap_scan_all +from helper import json_struc, timeNow, updateState from logger import append_line_to_file, mylog #------------------------------------------------------------------------------- diff --git a/pialert/pholusscan.py b/pialert/pholusscan.py index 41847d0f..6c6f79ec 100644 --- a/pialert/pholusscan.py +++ b/pialert/pholusscan.py @@ -2,9 +2,8 @@ import subprocess import re from const import fullPholusPath, logPath -from pialert.database import updateState -from pialert.helper import checkIPV4, timeNow -from pialert.logger import mylog +from helper import checkIPV4, timeNow, updateState +from logger import mylog #------------------------------------------------------------------------------- diff --git a/pialert/pialert.py b/pialert/pialert.py index f217bc0d..ae75f53a 100755 --- a/pialert/pialert.py +++ b/pialert/pialert.py @@ -44,24 +44,21 @@ import multiprocessing # pialert modules from const import * from conf import * -# from config import DIG_GET_IP_ARG, ENABLE_PLUGINS -from logger import append_line_to_file, mylog, print_log, logResult -from helper import checkIPV4, filePermissions, importConfigs, isNewVersion, removeDuplicateNewLines, timeNow, write_file -from database import * -from internet import check_IP_format, check_internet_IP, get_internet_IP +from logger import mylog +from helper import filePermissions, timeNow, updateState from api import update_api from files import get_file_content -from mqtt import mqtt_start -from pialert.arpscan import execute_arpscan -from pialert.mac_vendor import query_MAC_vendor, update_devices_MAC_vendors -from pialert.networkscan import scan_network -from pialert.nmapscan import performNmapScan -from pialert.pholusscan import performPholusScan, resolve_device_name_pholus -from pialert.pihole import copy_pihole_network, read_DHCP_leases -from pialert.reporting import send_apprise, send_email, send_notifications, send_ntfy, send_pushsafer, send_webhook, skip_repeated_notifications -from plugin import execute_plugin, get_plugin_setting, print_plugin_info, run_plugin_scripts - +from networkscan import scan_network +from initialise import importConfigs +from mac_vendor import update_devices_MAC_vendors +from database import DB, get_all_devices, upgradeDB, sql_new_devices +from reporting import send_apprise, send_email, send_notifications, send_ntfy, send_pushsafer, send_webhook +from plugin import run_plugin_scripts +# different scanners +from pholusscan import performPholusScan +from nmapscan import performNmapScan +from internet import check_internet_IP # Global variables @@ -224,13 +221,14 @@ def main (): last_network_scan = time_started cycle = 1 # network scan mylog('verbose', ['[', timeNow(), '] cycle:',cycle]) - + updateState(db,"Scan: Network") + # scan_network() # DEBUG start ++++++++++++++++++++++++++++++++++++++++++++++++++++++ # Start scan_network as a process - p = multiprocessing.Process(target=scan_network) + p = multiprocessing.Process(target=scan_network(db)) p.start() # Wait for 3600 seconds (max 1h) or until process finishes @@ -249,7 +247,10 @@ def main (): p.join() # DEBUG end ++++++++++++++++++++++++++++++++++++++++++++++++++++++ - + # Run splugin scripts which are set to run every timne after a scan finished + if ENABLE_PLUGINS: + run_plugin_scripts(db,'always_after_scan') + # Reporting if cycle in check_report: diff --git a/pialert/pihole.py b/pialert/pihole.py index 50d25e86..d13a1c04 100644 --- a/pialert/pihole.py +++ b/pialert/pihole.py @@ -1,8 +1,11 @@ - from const import piholeDB, piholeDhcpleases #------------------------------------------------------------------------------- def copy_pihole_network (db): + """ + attach the PiHole Database and copy the PiHole_Network table accross into the PiAlert DB + """ + sql = db.sql # TO-DO # Open Pi-hole DB sql.execute ("ATTACH DATABASE '"+ piholeDB +"' AS PH") @@ -29,6 +32,10 @@ def copy_pihole_network (db): #------------------------------------------------------------------------------- def read_DHCP_leases (db): + """ + read the PiHole DHCP file and insert all records into the DHCP_Leases table. + """ + sql = db.sql # TO-DO # Read DHCP Leases # Bugfix #1 - dhcp.leases: lines with different number of columns (5 col) @@ -45,4 +52,4 @@ def read_DHCP_leases (db): DHCP_IP, DHCP_Name, DHCP_MAC2) VALUES (?, ?, ?, ?, ?) """, data) - + db.commit() diff --git a/pialert/plugin.py b/pialert/plugin.py index df4c4e3f..f2ce9569 100644 --- a/pialert/plugin.py +++ b/pialert/plugin.py @@ -6,11 +6,11 @@ from collections import namedtuple # pialert modules from const import pluginsPath, logPath -from conf import mySettings +from conf import mySettings, plugins from files import get_file_content, write_file from logger import mylog -from database import updateState -#from api import update_api +from helper import updateState + #------------------------------------------------------------------------------- diff --git a/pialert/reporting.py b/pialert/reporting.py index 2277ed95..0100fc62 100644 --- a/pialert/reporting.py +++ b/pialert/reporting.py @@ -13,9 +13,9 @@ from json2table import convert from const import pialertPath, logPath # from pialert.api import update_api from conf import * -from database import get_table_as_json, updateState +from database import get_table_as_json from files import write_file -from helper import generate_mac_links, isNewVersion, removeDuplicateNewLines, timeNow, hide_email +from helper import generate_mac_links, isNewVersion, removeDuplicateNewLines, timeNow, hide_email, json_struc, updateState from logger import logResult, mylog, print_log from mqtt import mqtt_start From 0db7521bee6c1204e91b8095cb49f75f39e15b7c Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Wed, 24 May 2023 22:34:09 +1000 Subject: [PATCH 09/20] change to import conf --- db/pialert.db | Bin 245760 -> 262144 bytes pialert/conf.py | 13 ++- pialert/database.py | 2 +- pialert/device.py | 44 ++++---- pialert/files.py | 13 ++- pialert/helper.py | 85 +++++----------- pialert/initialise.py | 225 ++++++++++++++++++----------------------- pialert/internet.py | 18 ++-- pialert/mqtt.py | 12 +-- pialert/networkscan.py | 45 ++++----- pialert/nmapscan.py | 6 +- pialert/pialert.py | 67 ++++++------ pialert/pihole.py | 2 + pialert/plugin.py | 16 +-- pialert/reporting.py | 159 +++++++++++++++-------------- pialert/scheduler.py | 41 ++++++++ 16 files changed, 375 insertions(+), 373 deletions(-) create mode 100644 pialert/scheduler.py diff --git a/db/pialert.db b/db/pialert.db index 623697800f95c196d98f45a2a461bd137bf22354..2e998bb24810d810e43ce4dc1c1ba7efcbb8a2cf 100755 GIT binary patch delta 2309 zcmZ`(32YSC8J>Cn?C$LD%#JyH;Ty1x4aR%hJAki1NdOZYNQenw8!$&8#wj-8Ft-17 z04Ju##(^IwASf7zI}XK@ibg?csv29(P+>N`Z${O1KRq(2m6AiKgi(`kJ#)pLlH9&;ArCiL%m{e!lTgwGbr}PKa9pa zP|RD0O&^EPuKHm0P$sLrSFewh)84q@ZmT-B{aLH>;`MOs`ng zv}{HFn!yHHiAhk6OCq=uagOno@r7~QxM6&1T;i?I8ST;3L+N@UjAJ;&(jt(~7sgPYDP9=XJQM0=2YUD+z9gzKVYpFW`+VKp+Q!Ap z>z2Hvpl*FR0c?z+M&C?=9ENU6FM?)sTDw=NID80R!GH5u|z zN&kobE4^2{r(JY6xXWCr&d;2k&T?m#TB(kA{Mm8Nk!HVV-)671kFb4eL)#qXzG5j| zP|0=ulf5s0Bp($9!WcB^N-xt!8etfoJm;^BUO1ezG;&?l!LkKq`y zorcF5X4A~mFp^;wy>J>XGR&mOXGG|u>A5p7k>M!nKLb?|#E~@13t<&Ac+V%=*VF4h zNVMi}0$GN!m`*2rqHCIU#Rt_q@Knwg-X8-QI5lj++4jAxic*+F5; z2&*ax4rYpc5~W&6|5krb->pm9755f*l`HJBJ6oJnovDtyj$b?0I?8P`Y#ETqK4-sR z8S*`;ORDX`Y{e4Yv#eHv72MuRy0cphQU%5Aft?JWq@#NvPsf>@;-pb}U43=a z8XDNc9hx2l&8c8HS2$(s6pGmg@$|=bD7RMcg`e_HGswLUav4sy!u#NR{$d)14v6HQ zpaln@oZ;hi;(&;Ds`dK=;FTc@e?)Ldte8S$4?&pWWLk6xHZy#TK0gHc49l#9pZ)O3 zlv3odpj~1uJq#7RcQIK<1nnZac0|ygM2&rd_QZ#2xAy&)_Jjwtuk}N`^<^J?CYK8@ zM@icy&sERso(fN_{#WfSZI3ox%W%Kqu5rt*Ph9A#cK*(J*qN=~S1Z&k$4W=3ZK6$6 zt|?QM4E7Q0liTIl(pQotQE3U3K_;%Hqgw^BCi-A26f$fidk54oTtmw`grpx^)B$OH z243Jy3({zR2e)x!3wU4#Hdwb_hH|cMHTgOrQ^k5QO?*OAw?U$q&_+H?&(jy3!irUt zxDE1ET*=owwzZThyCCjAdAzX=_V6ap(TpxJ_A6*@7re=EIpuE`&&#N4JM=>iE~T1o z5m+5{cEbjSOUT|MF0jRv-viwY7tw_tNM~3}H+x_X!x{?j5O_6J?|=mi7t&ihAVy_^B`!a9cYskv7yeU|!qVFtr_^tWCZ#jwhXdlm9Hxo2qdPGQqrTDcP<44e8=-(j?UdLIe`iq}c{cF~qKZ|#iT7qvpz0QWg+!Uv z*#b>6jEc&%3i>AvBL5qaDrA2d-w`%(9$i0!Up0v%q$i0_MN`O04r3_?Q_O`1|;c z_%8F6^C|Kk;!WeZ$rHkMgLN6pGuBLIpzga&%+1L~+nZ)EK4xTYWGI+EcNSv0&STVP+ cur_IP """, - (cycle,)) + (conf.cycle,)) mylog('verbose', [' IP Changes.........: ' + str ( sql.fetchone()[0]) ]) @@ -176,7 +176,7 @@ def create_new_devices (db): WHERE cur_ScanCycle = ? AND NOT EXISTS (SELECT 1 FROM Devices WHERE dev_MAC = cur_MAC) """, - (startTime, cycle) ) + (startTime, conf.cycle) ) print_log ('New devices - Insert Connection into session table') sql.execute ("""INSERT INTO Sessions (ses_MAC, ses_IP, ses_EventTypeConnection, ses_DateTimeConnection, @@ -186,7 +186,7 @@ def create_new_devices (db): WHERE cur_ScanCycle = ? AND NOT EXISTS (SELECT 1 FROM Sessions WHERE ses_MAC = cur_MAC) """, - (startTime, cycle) ) + (startTime, conf.cycle) ) # arpscan - Create new devices print_log ('New devices - 2 Create devices') @@ -200,7 +200,7 @@ def create_new_devices (db): WHERE cur_ScanCycle = ? AND NOT EXISTS (SELECT 1 FROM Devices WHERE dev_MAC = cur_MAC) """, - (startTime, startTime, cycle) ) + (startTime, startTime, conf.cycle) ) # Pi-hole - Insert events for new devices # NOT STRICYLY NECESARY (Devices can be created through Current_Scan) @@ -277,7 +277,7 @@ def create_new_devices (db): # WHERE dev_MAC = DHCP_MAC) """, # (startTime, startTime) ) print_log ('New Devices end') - db.commit() + db.commitDB() #------------------------------------------------------------------------------- @@ -293,7 +293,7 @@ def update_devices_data_from_scan (db): AND EXISTS (SELECT 1 FROM CurrentScan WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle) """, - (startTime, cycle)) + (startTime, conf.cycle)) # Clean no active devices print_log ('Update devices - 2 Clean no active devices') @@ -302,7 +302,7 @@ def update_devices_data_from_scan (db): AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle) """, - (cycle,)) + (conf.cycle,)) # Update IP & Vendor print_log ('Update devices - 3 LastIP & Vendor') @@ -317,7 +317,7 @@ def update_devices_data_from_scan (db): AND EXISTS (SELECT 1 FROM CurrentScan WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle) """, - (cycle,)) + (conf.cycle,)) # Pi-hole Network - Update (unknown) Name print_log ('Update devices - 4 Unknown Name') @@ -379,11 +379,11 @@ def update_devices_names (db): db.commitDB() # perform Pholus scan if (unknown) devices found - if PHOLUS_ACTIVE and (len(unknownDevices) > 0 or PHOLUS_FORCE): - performPholusScan(db, PHOLUS_TIMEOUT, userSubnets) + if conf.PHOLUS_ACTIVE and (len(unknownDevices) > 0 or conf.PHOLUS_FORCE): + performPholusScan(db, conf.PHOLUS_TIMEOUT, conf.userSubnets) # skip checks if no unknown devices - if len(unknownDevices) == 0 and PHOLUS_FORCE == False: + if len(unknownDevices) == 0 and conf.PHOLUS_FORCE == False: return # Devices without name diff --git a/pialert/files.py b/pialert/files.py index e283c59c..55d33faa 100644 --- a/pialert/files.py +++ b/pialert/files.py @@ -23,4 +23,15 @@ def get_file_content(path): content = f.read() f.close() - return content \ No newline at end of file + return content + +#------------------------------------------------------------------------------- +def read_config_file(filename): + """ + retuns dict on the config file key:value pairs + """ + # load the variables from pialert.conf + code = compile(filename.read_text(), filename.name, "exec") + confDict = {} # config dictionary + exec(code, {"__builtins__": {}}, confDict) + return confDict \ No newline at end of file diff --git a/pialert/helper.py b/pialert/helper.py index 18886cd6..d3c54015 100644 --- a/pialert/helper.py +++ b/pialert/helper.py @@ -12,11 +12,8 @@ import time from pathlib import Path import requests - - - +import conf from const import * -from conf import tz from logger import mylog, logResult, print_log # from api import update_api # to avoid circular reference @@ -25,9 +22,15 @@ from logger import mylog, logResult, print_log #------------------------------------------------------------------------------- def timeNow(): return datetime.datetime.now().replace(microsecond=0) +#------------------------------------------------------------------------------- +def timeNowTZ(): + return datetime.datetime.now(conf.tz).replace(microsecond=0) #------------------------------------------------------------------------------- -def updateState(db, newState): +def updateState(db, newState): + + # ?? Why is the state written to the DB? + #sql = db.sql mylog('debug', ' [updateState] changing state to: "' + newState +'"') @@ -35,18 +38,20 @@ def updateState(db, newState): db.commitDB() #------------------------------------------------------------------------------- -def updateSubnets(SCAN_SUBNETS): +def updateSubnets(scan_subnets): # remove old list - userSubnets = [] + subnets = [] # multiple interfaces - if type(SCAN_SUBNETS) is list: - for interface in SCAN_SUBNETS : - userSubnets.append(interface) + if type(scan_subnets) is list: + for interface in scan_subnets : + subnets.append(interface) # one interface only else: - userSubnets.append(SCAN_SUBNETS) + subnets.append(scan_subnets) + + return subnets @@ -129,43 +134,6 @@ def filePermissions(): # last attempt fixPermissions() -#------------------------------------------------------------------------------- -class schedule_class: - def __init__(self, service, scheduleObject, last_next_schedule, was_last_schedule_used, last_run = 0): - self.service = service - self.scheduleObject = scheduleObject - self.last_next_schedule = last_next_schedule - self.last_run = last_run - self.was_last_schedule_used = was_last_schedule_used - def runScheduleCheck(self): - - result = False - - # Initialize the last run time if never run before - if self.last_run == 0: - self.last_run = (datetime.datetime.now(tz) - timedelta(days=365)).replace(microsecond=0) - - # get the current time with the currently specified timezone - nowTime = datetime.datetime.now(tz).replace(microsecond=0) - - # Run the schedule if the current time is past the schedule time we saved last time and - # (maybe the following check is unnecessary:) - # if the last run is past the last time we run a scheduled Pholus scan - if nowTime > self.last_next_schedule and self.last_run < self.last_next_schedule: - print_log(f'Scheduler run for {self.service}: YES') - self.was_last_schedule_used = True - result = True - else: - print_log(f'Scheduler run for {self.service}: NO') - - if self.was_last_schedule_used: - self.was_last_schedule_used = False - self.last_next_schedule = self.scheduleObject.next() - - return result - - - #------------------------------------------------------------------------------- @@ -220,15 +188,11 @@ def import_language_string(db, code, key, value, extra = ""): #------------------------------------------------------------------------------- -# Make a regular expression -# for validating an Ip-address -ipRegex = "^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])$" - -# Define a function to -# validate an Ip address def checkIPV4(ip): - # pass the regular expression - # and the string in search() method + """ Define a function to validate an Ip address + """ + ipRegex = "^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])$" + if(re.search(ipRegex, ip)): return True else: @@ -236,10 +200,9 @@ def checkIPV4(ip): #------------------------------------------------------------------------------- -def isNewVersion(db): - global newVersionAvailable +def isNewVersion(newVersion: bool): - if newVersionAvailable == False: + if newVersion == False: f = open(pialertPath + '/front/buildtimestamp.txt', 'r') buildTimestamp = int(f.read().strip()) @@ -264,10 +227,10 @@ def isNewVersion(db): if realeaseTimestamp > buildTimestamp + 600: mylog('none', [" New version of the container available!"]) - newVersionAvailable = True + newVersion = True # updateState(db, 'Back_New_Version_Available', str(newVersionAvailable)) ## TO DO add this back in but avoid circular ref with database - return newVersionAvailable + return newVersion #------------------------------------------------------------------------------- def hide_email(email): diff --git a/pialert/initialise.py b/pialert/initialise.py index 6202fed1..f8d91813 100644 --- a/pialert/initialise.py +++ b/pialert/initialise.py @@ -6,10 +6,12 @@ from cron_converter import Cron from pathlib import Path import datetime -from conf import * +import conf from const import * -from helper import collect_lang_strings, schedule_class, timeNow, updateSubnets, initOrSetParam +from helper import collect_lang_strings, timeNow, updateSubnets, initOrSetParam from logger import mylog +from files import read_config_file +from scheduler import schedule_class from plugin import get_plugins_configs, print_plugin_info #=============================================================================== @@ -21,18 +23,18 @@ from plugin import get_plugins_configs, print_plugin_info #------------------------------------------------------------------------------- # Import user values # Check config dictionary -def ccd(key, default, config, name, inputtype, options, group, events=[], desc = "", regex = ""): +def ccd(key, default, config_dir, name, inputtype, options, group, events=[], desc = "", regex = ""): result = default # use existing value if already supplied, otherwise default value is used - if key in config: - result = config[key] + if key in config_dir: + result = config_dir[key] if inputtype == 'text': result = result.replace('\'', "{s-quote}") - mySettingsSQLsafe.append((key, name, desc, inputtype, options, regex, str(result), group, str(events))) - mySettings.append((key, name, desc, inputtype, options, regex, result, group, str(events))) + conf.mySettingsSQLsafe.append((key, name, desc, inputtype, options, regex, str(result), group, str(events))) + conf.mySettings.append((key, name, desc, inputtype, options, regex, result, group, str(events))) return result #------------------------------------------------------------------------------- @@ -41,33 +43,7 @@ def importConfigs (db): sql = db.sql - # Specify globals so they can be overwritten with the new config - global lastTimeImported, mySettings, mySettingsSQLsafe, plugins, plugins_once_run lastTimeImported = 0 - # General - global ENABLE_ARPSCAN, SCAN_SUBNETS, LOG_LEVEL, TIMEZONE, ENABLE_PLUGINS, PIALERT_WEB_PROTECTION, PIALERT_WEB_PASSWORD, INCLUDED_SECTIONS, SCAN_CYCLE_MINUTES, DAYS_TO_KEEP_EVENTS, REPORT_DASHBOARD_URL, DIG_GET_IP_ARG, UI_LANG - # Email - global REPORT_MAIL, SMTP_SERVER, SMTP_PORT, REPORT_TO, REPORT_FROM, SMTP_SKIP_LOGIN, SMTP_USER, SMTP_PASS, SMTP_SKIP_TLS, SMTP_FORCE_SSL - # Webhooks - global REPORT_WEBHOOK, WEBHOOK_URL, WEBHOOK_PAYLOAD, WEBHOOK_REQUEST_METHOD - # Apprise - global REPORT_APPRISE, APPRISE_HOST, APPRISE_URL, APPRISE_PAYLOAD - # NTFY - global REPORT_NTFY, NTFY_HOST, NTFY_TOPIC, NTFY_USER, NTFY_PASSWORD - # PUSHSAFER - global REPORT_PUSHSAFER, PUSHSAFER_TOKEN - # MQTT - global REPORT_MQTT, MQTT_BROKER, MQTT_PORT, MQTT_USER, MQTT_PASSWORD, MQTT_QOS, MQTT_DELAY_SEC - # DynDNS - global DDNS_ACTIVE, DDNS_DOMAIN, DDNS_USER, DDNS_PASSWORD, DDNS_UPDATE_URL - # PiHole - global PIHOLE_ACTIVE, DHCP_ACTIVE - # Pholus - global PHOLUS_ACTIVE, PHOLUS_TIMEOUT, PHOLUS_FORCE, PHOLUS_DAYS_DATA, PHOLUS_RUN, PHOLUS_RUN_SCHD, PHOLUS_RUN_TIMEOUT - # Nmap - global NMAP_ACTIVE, NMAP_TIMEOUT, NMAP_RUN, NMAP_RUN_SCHD, NMAP_ARGS - # API - global API_CUSTOM_SQL # get config file config_file = Path(fullConfPath) @@ -76,136 +52,133 @@ def importConfigs (db): if (os.path.getmtime(config_file) < lastTimeImported) : return - mySettings = [] # reset settings - mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query - - # load the variables from pialert.conf - code = compile(config_file.read_text(), config_file.name, "exec") - c_d = {} # config dictionary - exec(code, {"__builtins__": {}}, c_d) + conf.mySettings = [] # reset settings + conf.mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query + + c_d = read_config_file(config_file) # Import setting if found in the dictionary # General - ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'General', ['run']) - SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'General') - LOG_LEVEL = ccd('LOG_LEVEL', 'verbose' , c_d, 'Log verboseness', 'selecttext', "['none', 'minimal', 'verbose', 'debug']", 'General') - TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General') - ENABLE_PLUGINS = ccd('ENABLE_PLUGINS', True , c_d, 'Enable plugins', 'boolean', '', 'General') - PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General') - PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General') - INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General') - SCAN_CYCLE_MINUTES = ccd('SCAN_CYCLE_MINUTES', 5 , c_d, 'Scan cycle delay (m)', 'integer', '', 'General') - DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General') - REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General') - DIG_GET_IP_ARG = ccd('DIG_GET_IP_ARG', '-4 myip.opendns.com @resolver1.opendns.com' , c_d, 'DIG arguments', 'text', '', 'General') - UI_LANG = ccd('UI_LANG', 'English' , c_d, 'Language Interface', 'selecttext', "['English', 'German', 'Spanish']", 'General') - UI_PRESENCE = ccd('UI_PRESENCE', ['online', 'offline', 'archived'] , c_d, 'Include in presence', 'multiselect', "['online', 'offline', 'archived']", 'General') + conf.ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'General', ['run']) + conf.SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'General') + conf.LOG_LEVEL = ccd('LOG_LEVEL', 'verbose' , c_d, 'Log verboseness', 'selecttext', "['none', 'minimal', 'verbose', 'debug']", 'General') + conf.TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General') + conf.ENABLE_PLUGINS = ccd('ENABLE_PLUGINS', True , c_d, 'Enable plugins', 'boolean', '', 'General') + conf.PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General') + conf.PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General') + conf.INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General') + conf.SCAN_CYCLE_MINUTES = ccd('SCAN_CYCLE_MINUTES', 5 , c_d, 'Scan cycle delay (m)', 'integer', '', 'General') + conf.DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General') + conf.REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General') + conf.DIG_GET_IP_ARG = ccd('DIG_GET_IP_ARG', '-4 myip.opendns.com @resolver1.opendns.com' , c_d, 'DIG arguments', 'text', '', 'General') + conf.UI_LANG = ccd('UI_LANG', 'English' , c_d, 'Language Interface', 'selecttext', "['English', 'German', 'Spanish']", 'General') + conf.UI_PRESENCE = ccd('UI_PRESENCE', ['online', 'offline', 'archived'] , c_d, 'Include in presence', 'multiselect', "['online', 'offline', 'archived']", 'General') # Email - REPORT_MAIL = ccd('REPORT_MAIL', False , c_d, 'Enable email', 'boolean', '', 'Email', ['test']) - SMTP_SERVER = ccd('SMTP_SERVER', '' , c_d,'SMTP server URL', 'text', '', 'Email') - SMTP_PORT = ccd('SMTP_PORT', 587 , c_d, 'SMTP port', 'integer', '', 'Email') - REPORT_TO = ccd('REPORT_TO', 'user@gmail.com' , c_d, 'Email to', 'text', '', 'Email') - REPORT_FROM = ccd('REPORT_FROM', 'Pi.Alert ' , c_d, 'Email Subject', 'text', '', 'Email') - SMTP_SKIP_LOGIN = ccd('SMTP_SKIP_LOGIN', False , c_d, 'SMTP skip login', 'boolean', '', 'Email') - SMTP_USER = ccd('SMTP_USER', '' , c_d, 'SMTP user', 'text', '', 'Email') - SMTP_PASS = ccd('SMTP_PASS', '' , c_d, 'SMTP password', 'password', '', 'Email') - SMTP_SKIP_TLS = ccd('SMTP_SKIP_TLS', False , c_d, 'SMTP skip TLS', 'boolean', '', 'Email') - SMTP_FORCE_SSL = ccd('SMTP_FORCE_SSL', False , c_d, 'Force SSL', 'boolean', '', 'Email') + conf.REPORT_MAIL = ccd('REPORT_MAIL', False , c_d, 'Enable email', 'boolean', '', 'Email', ['test']) + conf.SMTP_SERVER = ccd('SMTP_SERVER', '' , c_d,'SMTP server URL', 'text', '', 'Email') + conf.SMTP_PORT = ccd('SMTP_PORT', 587 , c_d, 'SMTP port', 'integer', '', 'Email') + conf.REPORT_TO = ccd('REPORT_TO', 'user@gmail.com' , c_d, 'Email to', 'text', '', 'Email') + conf.REPORT_FROM = ccd('REPORT_FROM', 'Pi.Alert ' , c_d, 'Email Subject', 'text', '', 'Email') + conf.SMTP_SKIP_LOGIN = ccd('SMTP_SKIP_LOGIN', False , c_d, 'SMTP skip login', 'boolean', '', 'Email') + conf.SMTP_USER = ccd('SMTP_USER', '' , c_d, 'SMTP user', 'text', '', 'Email') + conf.SMTP_PASS = ccd('SMTP_PASS', '' , c_d, 'SMTP password', 'password', '', 'Email') + conf.SMTP_SKIP_TLS = ccd('SMTP_SKIP_TLS', False , c_d, 'SMTP skip TLS', 'boolean', '', 'Email') + conf.SMTP_FORCE_SSL = ccd('SMTP_FORCE_SSL', False , c_d, 'Force SSL', 'boolean', '', 'Email') # Webhooks - REPORT_WEBHOOK = ccd('REPORT_WEBHOOK', False , c_d, 'Enable Webhooks', 'boolean', '', 'Webhooks', ['test']) - WEBHOOK_URL = ccd('WEBHOOK_URL', '' , c_d, 'Target URL', 'text', '', 'Webhooks') - WEBHOOK_PAYLOAD = ccd('WEBHOOK_PAYLOAD', 'json' , c_d, 'Payload type', 'selecttext', "['json', 'html', 'text']", 'Webhooks') - WEBHOOK_REQUEST_METHOD = ccd('WEBHOOK_REQUEST_METHOD', 'GET' , c_d, 'Req type', 'selecttext', "['GET', 'POST', 'PUT']", 'Webhooks') + conf.REPORT_WEBHOOK = ccd('REPORT_WEBHOOK', False , c_d, 'Enable Webhooks', 'boolean', '', 'Webhooks', ['test']) + conf.WEBHOOK_URL = ccd('WEBHOOK_URL', '' , c_d, 'Target URL', 'text', '', 'Webhooks') + conf.WEBHOOK_PAYLOAD = ccd('WEBHOOK_PAYLOAD', 'json' , c_d, 'Payload type', 'selecttext', "['json', 'html', 'text']", 'Webhooks') + conf.WEBHOOK_REQUEST_METHOD = ccd('WEBHOOK_REQUEST_METHOD', 'GET' , c_d, 'Req type', 'selecttext', "['GET', 'POST', 'PUT']", 'Webhooks') # Apprise - REPORT_APPRISE = ccd('REPORT_APPRISE', False , c_d, 'Enable Apprise', 'boolean', '', 'Apprise', ['test']) - APPRISE_HOST = ccd('APPRISE_HOST', '' , c_d, 'Apprise host URL', 'text', '', 'Apprise') - APPRISE_URL = ccd('APPRISE_URL', '' , c_d, 'Apprise notification URL', 'text', '', 'Apprise') - APPRISE_PAYLOAD = ccd('APPRISE_PAYLOAD', 'html' , c_d, 'Payload type', 'selecttext', "['html', 'text']", 'Apprise') + conf.REPORT_APPRISE = ccd('REPORT_APPRISE', False , c_d, 'Enable Apprise', 'boolean', '', 'Apprise', ['test']) + conf.APPRISE_HOST = ccd('APPRISE_HOST', '' , c_d, 'Apprise host URL', 'text', '', 'Apprise') + conf.APPRISE_URL = ccd('APPRISE_URL', '' , c_d, 'Apprise notification URL', 'text', '', 'Apprise') + conf.APPRISE_PAYLOAD = ccd('APPRISE_PAYLOAD', 'html' , c_d, 'Payload type', 'selecttext', "['html', 'text']", 'Apprise') # NTFY - REPORT_NTFY = ccd('REPORT_NTFY', False , c_d, 'Enable NTFY', 'boolean', '', 'NTFY', ['test']) - NTFY_HOST = ccd('NTFY_HOST', 'https://ntfy.sh' , c_d, 'NTFY host URL', 'text', '', 'NTFY') - NTFY_TOPIC = ccd('NTFY_TOPIC', '' , c_d, 'NTFY topic', 'text', '', 'NTFY') - NTFY_USER = ccd('NTFY_USER', '' , c_d, 'NTFY user', 'text', '', 'NTFY') - NTFY_PASSWORD = ccd('NTFY_PASSWORD', '' , c_d, 'NTFY password', 'password', '', 'NTFY') + conf.REPORT_NTFY = ccd('REPORT_NTFY', False , c_d, 'Enable NTFY', 'boolean', '', 'NTFY', ['test']) + conf.NTFY_HOST = ccd('NTFY_HOST', 'https://ntfy.sh' , c_d, 'NTFY host URL', 'text', '', 'NTFY') + conf.NTFY_TOPIC = ccd('NTFY_TOPIC', '' , c_d, 'NTFY topic', 'text', '', 'NTFY') + conf.NTFY_USER = ccd('NTFY_USER', '' , c_d, 'NTFY user', 'text', '', 'NTFY') + conf.NTFY_PASSWORD = ccd('NTFY_PASSWORD', '' , c_d, 'NTFY password', 'password', '', 'NTFY') # PUSHSAFER - REPORT_PUSHSAFER = ccd('REPORT_PUSHSAFER', False , c_d, 'Enable PUSHSAFER', 'boolean', '', 'PUSHSAFER', ['test']) - PUSHSAFER_TOKEN = ccd('PUSHSAFER_TOKEN', 'ApiKey' , c_d, 'PUSHSAFER token', 'text', '', 'PUSHSAFER') + conf.REPORT_PUSHSAFER = ccd('REPORT_PUSHSAFER', False , c_d, 'Enable PUSHSAFER', 'boolean', '', 'PUSHSAFER', ['test']) + conf.PUSHSAFER_TOKEN = ccd('PUSHSAFER_TOKEN', 'ApiKey' , c_d, 'PUSHSAFER token', 'text', '', 'PUSHSAFER') # MQTT - REPORT_MQTT = ccd('REPORT_MQTT', False , c_d, 'Enable MQTT', 'boolean', '', 'MQTT') - MQTT_BROKER = ccd('MQTT_BROKER', '' , c_d, 'MQTT broker', 'text', '', 'MQTT') - MQTT_PORT = ccd('MQTT_PORT', 1883 , c_d, 'MQTT broker port', 'integer', '', 'MQTT') - MQTT_USER = ccd('MQTT_USER', '' , c_d, 'MQTT user', 'text', '', 'MQTT') - MQTT_PASSWORD = ccd('MQTT_PASSWORD', '' , c_d, 'MQTT password', 'password', '', 'MQTT') - MQTT_QOS = ccd('MQTT_QOS', 0 , c_d, 'MQTT Quality of Service', 'selectinteger', "['0', '1', '2']", 'MQTT') - MQTT_DELAY_SEC = ccd('MQTT_DELAY_SEC', 2 , c_d, 'MQTT delay', 'selectinteger', "['2', '3', '4', '5']", 'MQTT') + conf.REPORT_MQTT = ccd('REPORT_MQTT', False , c_d, 'Enable MQTT', 'boolean', '', 'MQTT') + conf.MQTT_BROKER = ccd('MQTT_BROKER', '' , c_d, 'MQTT broker', 'text', '', 'MQTT') + conf.MQTT_PORT = ccd('MQTT_PORT', 1883 , c_d, 'MQTT broker port', 'integer', '', 'MQTT') + conf.MQTT_USER = ccd('MQTT_USER', '' , c_d, 'MQTT user', 'text', '', 'MQTT') + conf.MQTT_PASSWORD = ccd('MQTT_PASSWORD', '' , c_d, 'MQTT password', 'password', '', 'MQTT') + conf.MQTT_QOS = ccd('MQTT_QOS', 0 , c_d, 'MQTT Quality of Service', 'selectinteger', "['0', '1', '2']", 'MQTT') + conf.MQTT_DELAY_SEC = ccd('MQTT_DELAY_SEC', 2 , c_d, 'MQTT delay', 'selectinteger', "['2', '3', '4', '5']", 'MQTT') # DynDNS - DDNS_ACTIVE = ccd('DDNS_ACTIVE', False , c_d, 'Enable DynDNS', 'boolean', '', 'DynDNS') - DDNS_DOMAIN = ccd('DDNS_DOMAIN', 'your_domain.freeddns.org' , c_d, 'DynDNS domain URL', 'text', '', 'DynDNS') - DDNS_USER = ccd('DDNS_USER', 'dynu_user' , c_d, 'DynDNS user', 'text', '', 'DynDNS') - DDNS_PASSWORD = ccd('DDNS_PASSWORD', 'A0000000B0000000C0000000D0000000' , c_d, 'DynDNS password', 'password', '', 'DynDNS') - DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS') + conf.DDNS_ACTIVE = ccd('DDNS_ACTIVE', False , c_d, 'Enable DynDNS', 'boolean', '', 'DynDNS') + conf.DDNS_DOMAIN = ccd('DDNS_DOMAIN', 'your_domain.freeddns.org' , c_d, 'DynDNS domain URL', 'text', '', 'DynDNS') + conf.DDNS_USER = ccd('DDNS_USER', 'dynu_user' , c_d, 'DynDNS user', 'text', '', 'DynDNS') + conf.DDNS_PASSWORD = ccd('DDNS_PASSWORD', 'A0000000B0000000C0000000D0000000' , c_d, 'DynDNS password', 'password', '', 'DynDNS') + conf.DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS') # PiHole - PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PiHole') - DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PiHole') + conf.PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PiHole') + conf.DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PiHole') # PHOLUS - PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus') - PHOLUS_TIMEOUT = ccd('PHOLUS_TIMEOUT', 20 , c_d, 'Pholus timeout', 'integer', '', 'Pholus') - PHOLUS_FORCE = ccd('PHOLUS_FORCE', False , c_d, 'Pholus force check', 'boolean', '', 'Pholus') - PHOLUS_RUN = ccd('PHOLUS_RUN', 'once' , c_d, 'Pholus enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Pholus') - PHOLUS_RUN_TIMEOUT = ccd('PHOLUS_RUN_TIMEOUT', 600 , c_d, 'Pholus timeout schedule', 'integer', '', 'Pholus') - PHOLUS_RUN_SCHD = ccd('PHOLUS_RUN_SCHD', '0 4 * * *' , c_d, 'Pholus schedule', 'text', '', 'Pholus') - PHOLUS_DAYS_DATA = ccd('PHOLUS_DAYS_DATA', 0 , c_d, 'Pholus keep days', 'integer', '', 'Pholus') + conf.PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus') + conf.PHOLUS_TIMEOUT = ccd('PHOLUS_TIMEOUT', 20 , c_d, 'Pholus timeout', 'integer', '', 'Pholus') + conf.PHOLUS_FORCE = ccd('PHOLUS_FORCE', False , c_d, 'Pholus force check', 'boolean', '', 'Pholus') + conf.PHOLUS_RUN = ccd('PHOLUS_RUN', 'once' , c_d, 'Pholus enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Pholus') + conf.PHOLUS_RUN_TIMEOUT = ccd('PHOLUS_RUN_TIMEOUT', 600 , c_d, 'Pholus timeout schedule', 'integer', '', 'Pholus') + conf.PHOLUS_RUN_SCHD = ccd('PHOLUS_RUN_SCHD', '0 4 * * *' , c_d, 'Pholus schedule', 'text', '', 'Pholus') + conf.PHOLUS_DAYS_DATA = ccd('PHOLUS_DAYS_DATA', 0 , c_d, 'Pholus keep days', 'integer', '', 'Pholus') # Nmap - NMAP_ACTIVE = ccd('NMAP_ACTIVE', True , c_d, 'Enable Nmap scans', 'boolean', '', 'Nmap') - NMAP_TIMEOUT = ccd('NMAP_TIMEOUT', 150 , c_d, 'Nmap timeout', 'integer', '', 'Nmap') - NMAP_RUN = ccd('NMAP_RUN', 'none' , c_d, 'Nmap enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Nmap') - NMAP_RUN_SCHD = ccd('NMAP_RUN_SCHD', '0 2 * * *' , c_d, 'Nmap schedule', 'text', '', 'Nmap') - NMAP_ARGS = ccd('NMAP_ARGS', '-p -10000' , c_d, 'Nmap custom arguments', 'text', '', 'Nmap') + conf.NMAP_ACTIVE = ccd('NMAP_ACTIVE', True , c_d, 'Enable Nmap scans', 'boolean', '', 'Nmap') + conf.NMAP_TIMEOUT = ccd('NMAP_TIMEOUT', 150 , c_d, 'Nmap timeout', 'integer', '', 'Nmap') + conf.NMAP_RUN = ccd('NMAP_RUN', 'none' , c_d, 'Nmap enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Nmap') + conf.NMAP_RUN_SCHD = ccd('NMAP_RUN_SCHD', '0 2 * * *' , c_d, 'Nmap schedule', 'text', '', 'Nmap') + conf.NMAP_ARGS = ccd('NMAP_ARGS', '-p -10000' , c_d, 'Nmap custom arguments', 'text', '', 'Nmap') # API - API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', 'text', '', 'API') + conf.API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', 'text', '', 'API') # Prepare scheduler - global tz, mySchedules, plugins + #global tz, mySchedules, plugins # Init timezone in case it changed - tz = timezone(TIMEZONE) - + conf.tz = timezone(conf.TIMEZONE) + # global mySchedules # reset schedules - mySchedules = [] + conf.mySchedules = [] # init pholus schedule - pholusSchedule = Cron(PHOLUS_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz)) - mySchedules.append(schedule_class("pholus", pholusSchedule, pholusSchedule.next(), False)) + pholusSchedule = Cron(conf.PHOLUS_RUN_SCHD).schedule(start_date=datetime.datetime.now(conf.tz)) + + conf.mySchedules.append(schedule_class("pholus", pholusSchedule, pholusSchedule.next(), False)) + mylog('debug', "schedules (appended) : " + str(conf.mySchedules)) # init nmap schedule - nmapSchedule = Cron(NMAP_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz)) - mySchedules.append(schedule_class("nmap", nmapSchedule, nmapSchedule.next(), False)) + nmapSchedule = Cron(conf.NMAP_RUN_SCHD).schedule(start_date=datetime.datetime.now(conf.tz)) + conf.mySchedules.append(schedule_class("nmap", nmapSchedule, nmapSchedule.next(), False)) # Format and prepare the list of subnets - userSubnets = updateSubnets(SCAN_SUBNETS) - - + conf.userSubnets = updateSubnets(conf.SCAN_SUBNETS) # Plugins START # ----------------- - if ENABLE_PLUGINS: - plugins = get_plugins_configs() + if conf.ENABLE_PLUGINS: + conf.plugins = get_plugins_configs() - mylog('none', ['[', timeNow(), '] Plugins: Number of dynamically loaded plugins: ', len(plugins)]) + mylog('none', ['[', timeNow(), '] Plugins: Number of dynamically loaded plugins: ', len(conf.plugins)]) # handle plugins - for plugin in plugins: + for plugin in conf.plugins: print_plugin_info(plugin, ['display_name','description']) pref = plugin["unique_prefix"] @@ -227,13 +200,13 @@ def importConfigs (db): # Setup schedules if setFunction == 'RUN_SCHD': - newSchedule = Cron(v).schedule(start_date=datetime.datetime.now(tz)) - mySchedules.append(schedule_class(pref, newSchedule, newSchedule.next(), False)) + newSchedule = Cron(v).schedule(start_date=datetime.datetime.now(conf.tz)) + conf.mySchedules.append(schedule_class(pref, newSchedule, newSchedule.next(), False)) # Collect settings related language strings collect_lang_strings(db, set, pref + "_" + set["function"]) - plugins_once_run = False + conf.plugins_once_run = False # ----------------- # Plugins END @@ -244,10 +217,10 @@ def importConfigs (db): # Insert settings into the DB sql.execute ("DELETE FROM Settings") sql.executemany ("""INSERT INTO Settings ("Code_Name", "Display_Name", "Description", "Type", "Options", - "RegEx", "Value", "Group", "Events" ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""", mySettingsSQLsafe) + "RegEx", "Value", "Group", "Events" ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""", conf.mySettingsSQLsafe) # Used to determine the next import - lastTimeImported = time.time() + conf.lastTimeImported = time.time() # Is used to display a message in the UI when old (outdated) settings are loaded initOrSetParam(db, "Back_Settings_Imported",(round(time.time() * 1000),) ) diff --git a/pialert/internet.py b/pialert/internet.py index 3b13db4f..cd68f881 100644 --- a/pialert/internet.py +++ b/pialert/internet.py @@ -5,10 +5,10 @@ import re # pialert modules +import conf from helper import timeNow, updateState from logger import append_line_to_file, mylog from const import logPath -from conf import DDNS_ACTIVE, DDNS_DOMAIN, DDNS_UPDATE_URL, DDNS_PASSWORD, DDNS_USER @@ -19,7 +19,7 @@ from conf import DDNS_ACTIVE, DDNS_DOMAIN, DDNS_UPDATE_URL, DDNS_PASSWORD, DDNS_ #=============================================================================== # INTERNET IP CHANGE #=============================================================================== -def check_internet_IP (db, DIG_GET_IP_ARG): +def check_internet_IP ( db ): # Header updateState(db,"Scan: Internet IP") @@ -27,7 +27,7 @@ def check_internet_IP (db, DIG_GET_IP_ARG): # Get Internet IP mylog('verbose', [' Retrieving Internet IP:']) - internet_IP = get_internet_IP(DIG_GET_IP_ARG) + internet_IP = get_internet_IP(conf.DIG_GET_IP_ARG) # TESTING - Force IP # internet_IP = "1.2.3.4" @@ -52,7 +52,7 @@ def check_internet_IP (db, DIG_GET_IP_ARG): mylog('verbose', [' No changes to perform']) # Get Dynamic DNS IP - if DDNS_ACTIVE : + if conf.DDNS_ACTIVE : mylog('verbose', [' Retrieving Dynamic DNS IP']) dns_IP = get_dynamic_DNS_IP() @@ -157,7 +157,7 @@ def get_dynamic_DNS_IP (): # dig_args = ['dig', '+short', DDNS_DOMAIN, '@resolver1.opendns.com'] # Using default DNS server - dig_args = ['dig', '+short', DDNS_DOMAIN] + dig_args = ['dig', '+short', conf.DDNS_DOMAIN] try: # try runnning a subprocess @@ -182,10 +182,10 @@ def set_dynamic_DNS_IP (): # try runnning a subprocess # Update Dynamic IP curl_output = subprocess.check_output (['curl', '-s', - DDNS_UPDATE_URL + - 'username=' + DDNS_USER + - '&password=' + DDNS_PASSWORD + - '&hostname=' + DDNS_DOMAIN], + conf.DDNS_UPDATE_URL + + 'username=' + conf.DDNS_USER + + '&password=' + conf.DDNS_PASSWORD + + '&hostname=' + conf.DDNS_DOMAIN], universal_newlines=True) except subprocess.CalledProcessError as e: # An error occured, handle it diff --git a/pialert/mqtt.py b/pialert/mqtt.py index 0d47e142..8843b1d5 100644 --- a/pialert/mqtt.py +++ b/pialert/mqtt.py @@ -3,8 +3,8 @@ import time import re from paho.mqtt import client as mqtt_client +import conf from logger import mylog -from conf import MQTT_BROKER, MQTT_DELAY_SEC, MQTT_PASSWORD, MQTT_PORT, MQTT_QOS, MQTT_USER from database import get_all_devices, get_device_stats from helper import bytes_to_string, sanitize_string @@ -35,7 +35,7 @@ def publish_mqtt(client, topic, message): result = client.publish( topic=topic, payload=message, - qos=MQTT_QOS, + qos=conf.MQTT_QOS, retain=True, ) @@ -106,7 +106,7 @@ def publish_sensor(client, sensorConf): # add the sensor to the global list to keep track of succesfully added sensors if publish_mqtt(client, topic, message): # hack - delay adding to the queue in case the process is - time.sleep(MQTT_DELAY_SEC) # restarted and previous publish processes aborted + time.sleep(conf.MQTT_DELAY_SEC) # restarted and previous publish processes aborted # (it takes ~2s to update a sensor config on the broker) mqtt_sensors.append(sensorConf) @@ -131,10 +131,10 @@ def mqtt_create_client(): client = mqtt_client.Client('PiAlert') # Set Connecting Client ID - client.username_pw_set(MQTT_USER, MQTT_PASSWORD) + client.username_pw_set(conf.MQTT_USER, conf.MQTT_PASSWORD) client.on_connect = on_connect client.on_disconnect = on_disconnect - client.connect(MQTT_BROKER, MQTT_PORT) + client.connect(conf.MQTT_BROKER, conf.MQTT_PORT) client.loop_start() return client @@ -177,7 +177,7 @@ def mqtt_start(): # Get all devices devices = get_all_devices() - sec_delay = len(devices) * int(MQTT_DELAY_SEC)*5 + sec_delay = len(devices) * int(conf.MQTT_DELAY_SEC)*5 mylog('info', [" Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60,1) , 'min)' ]) diff --git a/pialert/networkscan.py b/pialert/networkscan.py index 63a81481..3471bdb4 100644 --- a/pialert/networkscan.py +++ b/pialert/networkscan.py @@ -1,7 +1,6 @@ - -from conf import DHCP_ACTIVE, PIHOLE_ACTIVE, cycle, ENABLE_ARPSCAN +import conf from arpscan import execute_arpscan from database import insertOnlineHistory from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, update_devices_names @@ -27,10 +26,10 @@ def scan_network (db): mylog('verbose', ['[', timeNow(), '] Scan Devices:' ]) # Query ScanCycle properties - scanCycle_data = query_ScanCycle_Data (True) + scanCycle_data = query_ScanCycle_Data (db, True) if scanCycle_data is None: mylog('none', ['\n*************** ERROR ***************']) - mylog('none', ['ScanCycle %s not found' % cycle ]) + mylog('none', ['ScanCycle %s not found' % conf.cycle ]) mylog('none', [' Exiting...\n']) return False @@ -41,45 +40,45 @@ def scan_network (db): # arp-scan command arpscan_devices = [] - if ENABLE_ARPSCAN: + if conf.ENABLE_ARPSCAN: mylog('verbose', [' arp-scan start']) - arpscan_devices = execute_arpscan () + arpscan_devices = execute_arpscan (conf.userSubnets) print_log ('arp-scan ends') # Pi-hole method - if PIHOLE_ACTIVE : + if conf.PIHOLE_ACTIVE : mylog('verbose', [' Pi-hole start']) copy_pihole_network(db) db.commitDB() # DHCP Leases method - if DHCP_ACTIVE : + if conf.DHCP_ACTIVE : mylog('verbose', [' DHCP Leases start']) read_DHCP_leases (db) db.commitDB() # Load current scan data mylog('verbose', [' Processing scan results']) - save_scanned_devices (arpscan_devices, cycle_interval) + save_scanned_devices (db, arpscan_devices, cycle_interval) # Print stats - print_log ('Print Stats') - print_scan_stats() - print_log ('Stats end') + mylog ('none', 'Print Stats') + print_scan_stats(db) + mylog ('none', 'Stats end') # Create Events mylog('verbose', [' Updating DB Info']) mylog('verbose', [' Sessions Events (connect / discconnect)']) - insert_events() + insert_events(db) # Create New Devices # after create events -> avoid 'connection' event mylog('verbose', [' Creating new devices']) - create_new_devices () + create_new_devices (db) # Update devices info mylog('verbose', [' Updating Devices Info']) - update_devices_data_from_scan () + update_devices_data_from_scan (db) # Resolve devices names print_log (' Resolve devices names') @@ -99,7 +98,7 @@ def scan_network (db): # Sessions snapshot mylog('verbose', [' Inserting scan results into Online_History']) - insertOnlineHistory(db,cycle) + insertOnlineHistory(db,conf.cycle) # Skip repeated notifications mylog('verbose', [' Skipping repeated notifications']) @@ -150,7 +149,7 @@ def void_ghost_disconnections (db): AND eve_DateTime >= DATETIME (?, '-' || cic_EveryXmin ||' minutes') ) """, - (startTime, cycle, startTime) ) + (startTime, conf.cycle, startTime) ) # Void connect paired events print_log ('Void - 2 Paired events') @@ -168,7 +167,7 @@ def void_ghost_disconnections (db): AND eve_DateTime >= DATETIME (?, '-' || cic_EveryXmin ||' minutes') ) """, - (cycle, startTime) ) + (conf.cycle, startTime) ) # Void disconnect ghost events print_log ('Void - 3 Disconnect ghost events') @@ -187,7 +186,7 @@ def void_ghost_disconnections (db): AND eve_DateTime >= DATETIME (?, '-' || cic_EveryXmin ||' minutes') ) """, - (cycle, startTime) ) + (conf.cycle, startTime) ) print_log ('Void end') db.commitDB() @@ -267,7 +266,7 @@ def insert_events (db): AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle) """, - (startTime, cycle) ) + (startTime, conf.cycle) ) # Check new connections print_log ('Events 2 - New Connections') @@ -279,7 +278,7 @@ def insert_events (db): WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle AND dev_PresentLastScan = 0 AND dev_ScanCycle = ? """, - (startTime, cycle) ) + (startTime, conf.cycle) ) # Check disconnections print_log ('Events 3 - Disconnections') @@ -295,7 +294,7 @@ def insert_events (db): AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle) """, - (startTime, cycle) ) + (startTime, conf.cycle) ) # Check IP Changed print_log ('Events 4 - IP Changes') @@ -308,7 +307,7 @@ def insert_events (db): WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle AND dev_ScanCycle = ? AND dev_LastIP <> cur_IP """, - (startTime, cycle) ) + (startTime, conf.cycle) ) print_log ('Events end') diff --git a/pialert/nmapscan.py b/pialert/nmapscan.py index e5f0e1d2..38a55105 100644 --- a/pialert/nmapscan.py +++ b/pialert/nmapscan.py @@ -1,8 +1,8 @@ import subprocess +import conf from const import logPath -from conf import NMAP_ARGS, NMAP_TIMEOUT from database import sql_nmap_scan_all from helper import json_struc, timeNow, updateState from logger import append_line_to_file, mylog @@ -33,7 +33,7 @@ def performNmapScan(db, devicesToScan): if len(devicesToScan) > 0: - timeoutSec = NMAP_TIMEOUT + timeoutSec = conf.NMAP_TIMEOUT devTotal = len(devicesToScan) @@ -48,7 +48,7 @@ def performNmapScan(db, devicesToScan): # Execute command output = "" # prepare arguments from user supplied ones - nmapArgs = ['nmap'] + NMAP_ARGS.split() + [device["dev_LastIP"]] + nmapArgs = ['nmap'] + conf.NMAP_ARGS.split() + [device["dev_LastIP"]] progress = ' (' + str(devIndex+1) + '/' + str(devTotal) + ')' diff --git a/pialert/pialert.py b/pialert/pialert.py index ae75f53a..141801dd 100755 --- a/pialert/pialert.py +++ b/pialert/pialert.py @@ -42,10 +42,10 @@ import multiprocessing # pialert modules +import conf from const import * -from conf import * from logger import mylog -from helper import filePermissions, timeNow, updateState +from helper import filePermissions, isNewVersion, timeNow, updateState from api import update_api from files import get_file_content from networkscan import scan_network @@ -62,10 +62,6 @@ from internet import check_internet_IP # Global variables - - - -userSubnets = [] changedPorts_json_struc = None time_started = datetime.datetime.now() cron_instance = Cron() @@ -83,7 +79,7 @@ sql_connection = None #=============================================================================== cycle = "" check_report = [1, "internet_IP", "update_vendors_silent"] -plugins_once_run = False +conf.plugins_once_run = False # timestamps of last execution times startTime = time_started @@ -103,9 +99,7 @@ def main (): global time_started, cycle, last_network_scan, last_internet_IP_scan, last_run, last_cleanup, last_update_vendors # second set of global variables global startTime, log_timestamp, plugins_once_run - - # To-Do all these DB Globals need to be removed - global db, sql, sql_connection + # check file permissions and fix if required filePermissions() @@ -116,7 +110,7 @@ def main (): db.openDB() # To-Do replace the following to lines with the db class - sql_connection = db.sql_connection + # sql_connection = db.sql_connection sql = db.sql # Upgrade DB if needed @@ -134,10 +128,15 @@ def main (): mylog('debug', ['[', timeNow(), '] [MAIN] Stating loop']) # re-load user configuration and plugins + mylog('debug', "tz before config : " + str(conf.tz)) importConfigs(db) - + mylog('debug', "tz after config : " + str(conf.tz)) + + # check if new version is available + conf.newVersionAvailable = isNewVersion(False) + # Handle plugins executed ONCE - if ENABLE_PLUGINS and plugins_once_run == False: + if conf.ENABLE_PLUGINS and conf.plugins_once_run == False: run_plugin_scripts(db, 'once') plugins_once_run = True @@ -161,7 +160,7 @@ def main (): startTime = startTime.replace (microsecond=0) # Check if any plugins need to run on schedule - if ENABLE_PLUGINS: + if conf.ENABLE_PLUGINS: run_plugin_scripts(db,'schedule') # determine run/scan type based on passed time @@ -171,7 +170,7 @@ def main (): if last_internet_IP_scan + datetime.timedelta(minutes=3) < time_started: cycle = 'internet_IP' last_internet_IP_scan = time_started - check_internet_IP(db,DIG_GET_IP_ARG) + check_internet_IP(db) # Update vendors once a week if last_update_vendors + datetime.timedelta(days = 7) < time_started: @@ -181,43 +180,48 @@ def main (): update_devices_MAC_vendors() # Execute scheduled or one-off Pholus scan if enabled and run conditions fulfilled - if PHOLUS_RUN == "schedule" or PHOLUS_RUN == "once": + if conf.PHOLUS_RUN == "schedule" or conf.PHOLUS_RUN == "once": - pholusSchedule = [sch for sch in mySchedules if sch.service == "pholus"][0] + mylog('debug', "PHOLUS_RUN_SCHD: " + conf.PHOLUS_RUN_SCHD) + mylog('debug', "schedules : " + str(conf.mySchedules)) + + pholusSchedule = [sch for sch in conf.mySchedules if sch.service == "pholus"][0] run = False # run once after application starts - if PHOLUS_RUN == "once" and pholusSchedule.last_run == 0: + + + if conf.PHOLUS_RUN == "once" and pholusSchedule.last_run == 0: run = True # run if overdue scheduled time - if PHOLUS_RUN == "schedule": + if conf.PHOLUS_RUN == "schedule": run = pholusSchedule.runScheduleCheck() if run: - pholusSchedule.last_run = datetime.datetime.now(tz).replace(microsecond=0) - performPholusScan(db, PHOLUS_RUN_TIMEOUT, userSubnets) + pholusSchedule.last_run = datetime.datetime.now(conf.tz).replace(microsecond=0) + performPholusScan(db, conf.PHOLUS_RUN_TIMEOUT, conf.userSubnets) # Execute scheduled or one-off Nmap scan if enabled and run conditions fulfilled - if NMAP_RUN == "schedule" or NMAP_RUN == "once": + if conf.NMAP_RUN == "schedule" or conf.NMAP_RUN == "once": - nmapSchedule = [sch for sch in mySchedules if sch.service == "nmap"][0] + nmapSchedule = [sch for sch in conf.mySchedules if sch.service == "nmap"][0] run = False # run once after application starts - if NMAP_RUN == "once" and nmapSchedule.last_run == 0: + if conf.NMAP_RUN == "once" and conf.nmapSchedule.last_run == 0: run = True # run if overdue scheduled time - if NMAP_RUN == "schedule": + if conf.NMAP_RUN == "schedule": run = nmapSchedule.runScheduleCheck() if run: - nmapSchedule.last_run = datetime.datetime.now(tz).replace(microsecond=0) + conf.nmapSchedule.last_run = datetime.datetime.now(conf.tz).replace(microsecond=0) performNmapScan(db, get_all_devices(db)) # Perform a network scan via arp-scan or pihole - if last_network_scan + datetime.timedelta(minutes=SCAN_CYCLE_MINUTES) < time_started: + if last_network_scan + datetime.timedelta(minutes=conf.SCAN_CYCLE_MINUTES) < time_started: last_network_scan = time_started cycle = 1 # network scan mylog('verbose', ['[', timeNow(), '] cycle:',cycle]) @@ -248,7 +252,7 @@ def main (): # DEBUG end ++++++++++++++++++++++++++++++++++++++++++++++++++++++ # Run splugin scripts which are set to run every timne after a scan finished - if ENABLE_PLUGINS: + if conf.ENABLE_PLUGINS: run_plugin_scripts(db,'always_after_scan') @@ -262,11 +266,11 @@ def main (): # new devices were found if len(newDevices) > 0: # run all plugins registered to be run when new devices are found - if ENABLE_PLUGINS: + if conf.ENABLE_PLUGINS: run_plugin_scripts(db, 'on_new_device') # Scan newly found devices with Nmap if enabled - if NMAP_ACTIVE and len(newDevices) > 0: + if conf.NMAP_ACTIVE and len(newDevices) > 0: performNmapScan( db, newDevices) # send all configured notifications @@ -277,7 +281,7 @@ def main (): last_cleanup = time_started cycle = 'cleanup' mylog('verbose', ['[', timeNow(), '] cycle:',cycle]) - db.cleanup_database(startTime, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA) + db.cleanup_database(startTime, conf.DAYS_TO_KEEP_EVENTS, conf.PHOLUS_DAYS_DATA) # Commit SQL db.commitDB() @@ -314,6 +318,7 @@ def main (): #------------------------------------------------------------------------------- def check_and_run_event(db): + sql = db.sql # TO-DO sql.execute(""" select * from Parameters where par_ID = "Front_Event" """) rows = sql.fetchall() diff --git a/pialert/pihole.py b/pialert/pihole.py index d13a1c04..b85efd28 100644 --- a/pialert/pihole.py +++ b/pialert/pihole.py @@ -1,3 +1,5 @@ +""" module to import db and leases from PiHole """ + from const import piholeDB, piholeDhcpleases #------------------------------------------------------------------------------- diff --git a/pialert/plugin.py b/pialert/plugin.py index f2ce9569..a9c685ee 100644 --- a/pialert/plugin.py +++ b/pialert/plugin.py @@ -5,11 +5,11 @@ import datetime from collections import namedtuple # pialert modules +import conf from const import pluginsPath, logPath -from conf import mySettings, plugins from files import get_file_content, write_file from logger import mylog -from helper import updateState +from helper import timeNowTZ, updateState @@ -23,14 +23,14 @@ def timeNow(): #------------------------------------------------------------------------------- def run_plugin_scripts(db, runType): - global plugins, tz, mySchedules + # global plugins, tz, mySchedules # Header updateState(db,"Run: Plugins") mylog('debug', [' [Plugins] Check if any plugins need to be executed on run type: ', runType]) - for plugin in plugins: + for plugin in conf.plugins: shouldRun = False @@ -43,13 +43,13 @@ def run_plugin_scripts(db, runType): prefix = plugin["unique_prefix"] # check scheduels if any contains a unique plugin prefix matching the current plugin - for schd in mySchedules: + for schd in conf.mySchedules: if schd.service == prefix: # Check if schedule overdue shouldRun = schd.runScheduleCheck() if shouldRun: # note the last time the scheduled plugin run was executed - schd.last_run = datetime.datetime.now(tz).replace(microsecond=0) + schd.last_run = timeNowTZ() if shouldRun: @@ -107,14 +107,14 @@ def get_plugin_setting(plugin, function_key): def get_setting(key): result = None # index order: key, name, desc, inputtype, options, regex, result, group, events - for set in mySettings: + for set in conf.mySettings: if set[0] == key: result = set if result is None: mylog('info', [' Error - setting_missing - Setting not found for key: ', key]) mylog('info', [' Error - logging the settings into file: ', logPath + '/setting_missing.json']) - write_file (logPath + '/setting_missing.json', json.dumps({ 'data' : mySettings})) + write_file (logPath + '/setting_missing.json', json.dumps({ 'data' : conf.mySettings})) return result diff --git a/pialert/reporting.py b/pialert/reporting.py index 0100fc62..3df6fa18 100644 --- a/pialert/reporting.py +++ b/pialert/reporting.py @@ -10,9 +10,9 @@ import subprocess import requests from json2table import convert +# pialert modules +import conf from const import pialertPath, logPath -# from pialert.api import update_api -from conf import * from database import get_table_as_json from files import write_file from helper import generate_mac_links, isNewVersion, removeDuplicateNewLines, timeNow, hide_email, json_struc, updateState @@ -39,7 +39,7 @@ class noti_struc: #------------------------------------------------------------------------------- -def construct_notifications(sqlQuery, tableTitle, skipText = False, suppliedJsonStruct = None): +def construct_notifications(db, sqlQuery, tableTitle, skipText = False, suppliedJsonStruct = None): if suppliedJsonStruct is None and sqlQuery == "": return noti_struc("", "", "") @@ -52,7 +52,7 @@ def construct_notifications(sqlQuery, tableTitle, skipText = False, suppliedJson text_line = '{}\t{}\n' if suppliedJsonStruct is None: - json_struc = get_table_as_json(sqlQuery) + json_struc = get_table_as_json(db, sqlQuery) else: json_struc = suppliedJsonStruct @@ -92,7 +92,7 @@ def send_notifications (db): sql = db.sql #TO-DO global mail_text, mail_html, json_final, changedPorts_json_struc, partial_html, partial_txt, partial_json - deviceUrl = REPORT_DASHBOARD_URL + '/deviceDetails.php?mac=' + deviceUrl = conf.REPORT_DASHBOARD_URL + '/deviceDetails.php?mac=' plugins_report = False # Reporting section @@ -125,7 +125,7 @@ def send_notifications (db): # Open html Template template_file = open(pialertPath + '/back/report_template.html', 'r') - if isNewVersion(db): + if conf.newVersionAvailable : template_file = open(pialertPath + '/back/report_template_new_version.html', 'r') mail_html = template_file.read() @@ -139,13 +139,13 @@ def send_notifications (db): mail_text = mail_text.replace ('', socket.gethostname() ) mail_html = mail_html.replace ('', socket.gethostname() ) - if 'internet' in INCLUDED_SECTIONS: + if 'internet' in conf.INCLUDED_SECTIONS: # Compose Internet Section sqlQuery = """SELECT eve_MAC as MAC, eve_IP as IP, eve_DateTime as Datetime, eve_EventType as "Event Type", eve_AdditionalInfo as "More info" FROM Events WHERE eve_PendingAlertEmail = 1 AND eve_MAC = 'Internet' ORDER BY eve_DateTime""" - notiStruc = construct_notifications(sqlQuery, "Internet IP change") + notiStruc = construct_notifications(db, sqlQuery, "Internet IP change") # collect "internet" (IP changes) for the webhook json json_internet = notiStruc.json["data"] @@ -153,14 +153,14 @@ def send_notifications (db): mail_text = mail_text.replace ('', notiStruc.text + '\n') mail_html = mail_html.replace ('', notiStruc.html) - if 'new_devices' in INCLUDED_SECTIONS: + if 'new_devices' in conf.INCLUDED_SECTIONS: # Compose New Devices Section sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices WHERE eve_PendingAlertEmail = 1 AND eve_EventType = 'New Device' ORDER BY eve_DateTime""" - notiStruc = construct_notifications(sqlQuery, "New devices") + notiStruc = construct_notifications(db, sqlQuery, "New devices") # collect "new_devices" for the webhook json json_new_devices = notiStruc.json["data"] @@ -168,14 +168,14 @@ def send_notifications (db): mail_text = mail_text.replace ('', notiStruc.text + '\n') mail_html = mail_html.replace ('', notiStruc.html) - if 'down_devices' in INCLUDED_SECTIONS: + if 'down_devices' in conf.INCLUDED_SECTIONS: # Compose Devices Down Section sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices WHERE eve_PendingAlertEmail = 1 AND eve_EventType = 'Device Down' ORDER BY eve_DateTime""" - notiStruc = construct_notifications(sqlQuery, "Down devices") + notiStruc = construct_notifications(db, sqlQuery, "Down devices") # collect "new_devices" for the webhook json json_down_devices = notiStruc.json["data"] @@ -183,7 +183,7 @@ def send_notifications (db): mail_text = mail_text.replace ('', notiStruc.text + '\n') mail_html = mail_html.replace ('', notiStruc.html) - if 'events' in INCLUDED_SECTIONS: + if 'events' in conf.INCLUDED_SECTIONS: # Compose Events Section sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices WHERE eve_PendingAlertEmail = 1 @@ -191,7 +191,7 @@ def send_notifications (db): 'IP Changed') ORDER BY eve_DateTime""" - notiStruc = construct_notifications(sqlQuery, "Events") + notiStruc = construct_notifications(db, sqlQuery, "Events") # collect "events" for the webhook json json_events = notiStruc.json["data"] @@ -199,12 +199,12 @@ def send_notifications (db): mail_text = mail_text.replace ('', notiStruc.text + '\n') mail_html = mail_html.replace ('', notiStruc.html) - if 'ports' in INCLUDED_SECTIONS: + if 'ports' in conf.INCLUDED_SECTIONS: # collect "ports" for the webhook json if changedPorts_json_struc is not None: json_ports = changedPorts_json_struc.json["data"] - notiStruc = construct_notifications("", "Ports", True, changedPorts_json_struc) + notiStruc = construct_notifications(db, "", "Ports", True, changedPorts_json_struc) mail_html = mail_html.replace ('', notiStruc.html) @@ -214,11 +214,11 @@ def send_notifications (db): mail_text = mail_text.replace ('', portsTxt ) - if 'plugins' in INCLUDED_SECTIONS and ENABLE_PLUGINS: + if 'plugins' in conf.INCLUDED_SECTIONS and conf.ENABLE_PLUGINS: # Compose Plugins Section sqlQuery = """SELECT Plugin, Object_PrimaryId, Object_SecondaryId, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status from Plugins_Events""" - notiStruc = construct_notifications(sqlQuery, "Plugins") + notiStruc = construct_notifications(db, sqlQuery, "Plugins") # collect "plugins" for the webhook json json_plugins = notiStruc.json["data"] @@ -250,44 +250,44 @@ def send_notifications (db): write_file (logPath + '/report_output.html', mail_html) # Send Mail - if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or json_ports != [] or debug_force_notification or plugins_report: + if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or json_ports != [] or conf.debug_force_notification or plugins_report: # update_api(True) # TO-DO mylog('none', [' Changes detected, sending reports']) - if REPORT_MAIL and check_config('email'): + if conf.REPORT_MAIL and check_config('email'): updateState(db,"Send: Email") mylog('info', [' Sending report by Email']) send_email (mail_text, mail_html) else : mylog('verbose', [' Skip email']) - if REPORT_APPRISE and check_config('apprise'): + if conf.REPORT_APPRISE and check_config('apprise'): updateState(db,"Send: Apprise") mylog('info', [' Sending report by Apprise']) send_apprise (mail_html, mail_text) else : mylog('verbose', [' Skip Apprise']) - if REPORT_WEBHOOK and check_config('webhook'): + if conf.REPORT_WEBHOOK and check_config('webhook'): updateState(db,"Send: Webhook") mylog('info', [' Sending report by Webhook']) send_webhook (json_final, mail_text) else : mylog('verbose', [' Skip webhook']) - if REPORT_NTFY and check_config('ntfy'): + if conf.REPORT_NTFY and check_config('ntfy'): updateState(db,"Send: NTFY") mylog('info', [' Sending report by NTFY']) send_ntfy (mail_text) else : mylog('verbose', [' Skip NTFY']) - if REPORT_PUSHSAFER and check_config('pushsafer'): + if conf.REPORT_PUSHSAFER and check_config('pushsafer'): updateState(db,"Send: PUSHSAFER") mylog('info', [' Sending report by PUSHSAFER']) send_pushsafer (mail_text) else : mylog('verbose', [' Skip PUSHSAFER']) # Update MQTT entities - if REPORT_MQTT and check_config('mqtt'): + if conf.REPORT_MQTT and check_config('mqtt'): updateState(db,"Send: MQTT") mylog('info', [' Establishing MQTT thread']) mqtt_start() @@ -320,42 +320,42 @@ def send_notifications (db): def check_config(service): if service == 'email': - if SMTP_SERVER == '' or REPORT_FROM == '' or REPORT_TO == '': + if conf.SMTP_SERVER == '' or conf.REPORT_FROM == '' or conf.REPORT_TO == '': mylog('none', [' Error: Email service not set up correctly. Check your pialert.conf SMTP_*, REPORT_FROM and REPORT_TO variables.']) return False else: return True if service == 'apprise': - if APPRISE_URL == '' or APPRISE_HOST == '': + if conf.APPRISE_URL == '' or conf.APPRISE_HOST == '': mylog('none', [' Error: Apprise service not set up correctly. Check your pialert.conf APPRISE_* variables.']) return False else: return True if service == 'webhook': - if WEBHOOK_URL == '': + if conf.WEBHOOK_URL == '': mylog('none', [' Error: Webhook service not set up correctly. Check your pialert.conf WEBHOOK_* variables.']) return False else: return True if service == 'ntfy': - if NTFY_HOST == '' or NTFY_TOPIC == '': + if conf.NTFY_HOST == '' or conf.NTFY_TOPIC == '': mylog('none', [' Error: NTFY service not set up correctly. Check your pialert.conf NTFY_* variables.']) return False else: return True if service == 'pushsafer': - if PUSHSAFER_TOKEN == 'ApiKey': + if conf.PUSHSAFER_TOKEN == 'ApiKey': mylog('none', [' Error: Pushsafer service not set up correctly. Check your pialert.conf PUSHSAFER_TOKEN variable.']) return False else: return True if service == 'mqtt': - if MQTT_BROKER == '' or MQTT_PORT == '' or MQTT_USER == '' or MQTT_PASSWORD == '': + if conf.MQTT_BROKER == '' or conf.MQTT_PORT == '' or conf.MQTT_USER == '' or conf.MQTT_PASSWORD == '': mylog('none', [' Error: MQTT service not set up correctly. Check your pialert.conf MQTT_* variables.']) return False else: @@ -371,19 +371,18 @@ def format_table (html, thValue, props, newThValue = ''): #------------------------------------------------------------------------------- def format_report_section (pActive, pSection, pTable, pText, pHTML): - global mail_text - global mail_html + # Replace section text if pActive : - mail_text = mail_text.replace ('<'+ pTable +'>', pText) - mail_html = mail_html.replace ('<'+ pTable +'>', pHTML) + conf.mail_text = conf.mail_text.replace ('<'+ pTable +'>', pText) + conf.mail_html = conf.mail_html.replace ('<'+ pTable +'>', pHTML) - mail_text = remove_tag (mail_text, pSection) - mail_html = remove_tag (mail_html, pSection) + conf.mail_text = remove_tag (conf.mail_text, pSection) + conf.mail_html = remove_tag (conf.mail_html, pSection) else: - mail_text = remove_section (mail_text, pSection) - mail_html = remove_section (mail_html, pSection) + conf.mail_text = remove_section (conf.mail_text, pSection) + conf.mail_html = remove_section (conf.mail_html, pSection) #------------------------------------------------------------------------------- def remove_section (pText, pSection): @@ -409,14 +408,14 @@ def remove_tag (pText, pTag): def send_email (pText, pHTML): # Print more info for debugging if LOG_LEVEL == 'debug' - if LOG_LEVEL == 'debug': - print_log ('REPORT_TO: ' + hide_email(str(REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(SMTP_USER))) + if conf.LOG_LEVEL == 'debug': + print_log ('REPORT_TO: ' + hide_email(str(conf.REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(conf.SMTP_USER))) # Compose email msg = MIMEMultipart('alternative') msg['Subject'] = 'Pi.Alert Report' - msg['From'] = REPORT_FROM - msg['To'] = REPORT_TO + msg['From'] = conf.REPORT_FROM + msg['To'] = conf.REPORT_TO msg.attach (MIMEText (pText, 'plain')) msg.attach (MIMEText (pHTML, 'html')) @@ -426,46 +425,46 @@ def send_email (pText, pHTML): try: # Send mail - failedAt = print_log('Trying to open connection to ' + str(SMTP_SERVER) + ':' + str(SMTP_PORT)) + failedAt = print_log('Trying to open connection to ' + str(conf.SMTP_SERVER) + ':' + str(conf.SMTP_PORT)) - if SMTP_FORCE_SSL: + if conf.SMTP_FORCE_SSL: failedAt = print_log('SMTP_FORCE_SSL == True so using .SMTP_SSL()') - if SMTP_PORT == 0: + if conf.SMTP_PORT == 0: failedAt = print_log('SMTP_PORT == 0 so sending .SMTP_SSL(SMTP_SERVER)') - smtp_connection = smtplib.SMTP_SSL(SMTP_SERVER) + smtp_connection = smtplib.SMTP_SSL(conf.SMTP_SERVER) else: failedAt = print_log('SMTP_PORT == 0 so sending .SMTP_SSL(SMTP_SERVER, SMTP_PORT)') - smtp_connection = smtplib.SMTP_SSL(SMTP_SERVER, SMTP_PORT) + smtp_connection = smtplib.SMTP_SSL(conf.SMTP_SERVER, conf.SMTP_PORT) else: failedAt = print_log('SMTP_FORCE_SSL == False so using .SMTP()') - if SMTP_PORT == 0: + if conf.SMTP_PORT == 0: failedAt = print_log('SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER)') - smtp_connection = smtplib.SMTP (SMTP_SERVER) + smtp_connection = smtplib.SMTP (conf.SMTP_SERVER) else: failedAt = print_log('SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER, SMTP_PORT)') - smtp_connection = smtplib.SMTP (SMTP_SERVER, SMTP_PORT) + smtp_connection = smtplib.SMTP (conf.SMTP_SERVER, conf.SMTP_PORT) failedAt = print_log('Setting SMTP debug level') # Log level set to debug of the communication between SMTP server and client - if LOG_LEVEL == 'debug': + if conf.LOG_LEVEL == 'debug': smtp_connection.set_debuglevel(1) failedAt = print_log( 'Sending .ehlo()') smtp_connection.ehlo() - if not SMTP_SKIP_TLS: + if not conf.SMTP_SKIP_TLS: failedAt = print_log('SMTP_SKIP_TLS == False so sending .starttls()') smtp_connection.starttls() failedAt = print_log('SMTP_SKIP_TLS == False so sending .ehlo()') smtp_connection.ehlo() - if not SMTP_SKIP_LOGIN: + if not conf.SMTP_SKIP_LOGIN: failedAt = print_log('SMTP_SKIP_LOGIN == False so sending .login()') - smtp_connection.login (SMTP_USER, SMTP_PASS) + smtp_connection.login (conf.SMTP_USER, conf.SMTP_PASS) failedAt = print_log('Sending .sendmail()') - smtp_connection.sendmail (REPORT_FROM, REPORT_TO, msg.as_string()) + smtp_connection.sendmail (conf.REPORT_FROM, conf.REPORT_TO, msg.as_string()) smtp_connection.quit() except smtplib.SMTPAuthenticationError as e: mylog('none', [' ERROR: Failed at - ', failedAt]) @@ -480,20 +479,20 @@ def send_email (pText, pHTML): def send_ntfy (_Text): headers = { "Title": "Pi.Alert Notification", - "Actions": "view, Open Dashboard, "+ REPORT_DASHBOARD_URL, + "Actions": "view, Open Dashboard, "+ conf.REPORT_DASHBOARD_URL, "Priority": "urgent", "Tags": "warning" } # if username and password are set generate hash and update header - if NTFY_USER != "" and NTFY_PASSWORD != "": + if conf.NTFY_USER != "" and conf.NTFY_PASSWORD != "": # Generate hash for basic auth - usernamepassword = "{}:{}".format(NTFY_USER,NTFY_PASSWORD) - basichash = b64encode(bytes(NTFY_USER + ':' + NTFY_PASSWORD, "utf-8")).decode("ascii") + usernamepassword = "{}:{}".format(conf.NTFY_USER,conf.NTFY_PASSWORD) + basichash = b64encode(bytes(conf.NTFY_USER + ':' + conf.NTFY_PASSWORD, "utf-8")).decode("ascii") # add authorization header with hash headers["Authorization"] = "Basic {}".format(basichash) - requests.post("{}/{}".format( NTFY_HOST, NTFY_TOPIC), + requests.post("{}/{}".format( conf.NTFY_HOST, conf.NTFY_TOPIC), data=_Text, headers=headers) @@ -507,9 +506,9 @@ def send_pushsafer (_Text): "i" : 148, "c" : '#ef7f7f', "d" : 'a', - "u" : REPORT_DASHBOARD_URL, + "u" : conf.REPORT_DASHBOARD_URL, "ut" : 'Open Pi.Alert', - "k" : PUSHSAFER_TOKEN, + "k" : conf.PUSHSAFER_TOKEN, } requests.post(url, data=post_fields) @@ -517,20 +516,20 @@ def send_pushsafer (_Text): def send_webhook (_json, _html): # use data type based on specified payload type - if WEBHOOK_PAYLOAD == 'json': + if conf.WEBHOOK_PAYLOAD == 'json': payloadData = _json - if WEBHOOK_PAYLOAD == 'html': + if conf.WEBHOOK_PAYLOAD == 'html': payloadData = _html - if WEBHOOK_PAYLOAD == 'text': + if conf.WEBHOOK_PAYLOAD == 'text': payloadData = to_text(_json) # Define slack-compatible payload - _json_payload = { "text": payloadData } if WEBHOOK_PAYLOAD == 'text' else { + _json_payload = { "text": payloadData } if conf.WEBHOOK_PAYLOAD == 'text' else { "username": "Pi.Alert", "text": "There are new notifications", "attachments": [{ "title": "Pi.Alert Notifications", - "title_link": REPORT_DASHBOARD_URL, + "title_link": conf.REPORT_DASHBOARD_URL, "text": payloadData }] } @@ -539,12 +538,12 @@ def send_webhook (_json, _html): write_file (logPath + '/webhook_payload.json', json.dumps(_json_payload)) # Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both - if(WEBHOOK_URL.startswith('https://discord.com/api/webhooks/') and not WEBHOOK_URL.endswith("/slack")): - _WEBHOOK_URL = f"{WEBHOOK_URL}/slack" + if(conf.WEBHOOK_URL.startswith('https://discord.com/api/webhooks/') and not conf.WEBHOOK_URL.endswith("/slack")): + _WEBHOOK_URL = f"{conf.WEBHOOK_URL}/slack" curlParams = ["curl","-i","-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL] else: - _WEBHOOK_URL = WEBHOOK_URL - curlParams = ["curl","-i","-X", WEBHOOK_REQUEST_METHOD ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL] + _WEBHOOK_URL = conf.WEBHOOK_URL + curlParams = ["curl","-i","-X", conf.WEBHOOK_REQUEST_METHOD ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL] # execute CURL call try: @@ -565,19 +564,19 @@ def send_apprise (html, text): #Define Apprise compatible payload (https://github.com/caronc/apprise-api#stateless-solution) payload = html - if APPRISE_PAYLOAD == 'text': + if conf.APPRISE_PAYLOAD == 'text': payload = text _json_payload={ - "urls": APPRISE_URL, + "urls": conf.APPRISE_URL, "title": "Pi.Alert Notifications", - "format": APPRISE_PAYLOAD, + "format": conf.APPRISE_PAYLOAD, "body": payload } try: # try runnning a subprocess - p = subprocess.Popen(["curl","-i","-X", "POST" ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), APPRISE_HOST], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + p = subprocess.Popen(["curl","-i","-X", "POST" ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), conf.APPRISE_HOST], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = p.communicate() # write stdout and stderr into .log files for debugging if needed logResult (stdout, stderr) # TO-DO should be changed to mylog @@ -588,19 +587,19 @@ def send_apprise (html, text): def to_text(_json): payloadData = "" - if len(_json['internet']) > 0 and 'internet' in INCLUDED_SECTIONS: + if len(_json['internet']) > 0 and 'internet' in conf.INCLUDED_SECTIONS: payloadData += "INTERNET\n" for event in _json['internet']: payloadData += event[3] + ' on ' + event[2] + '. ' + event[4] + '. New address:' + event[1] + '\n' - if len(_json['new_devices']) > 0 and 'new_devices' in INCLUDED_SECTIONS: + if len(_json['new_devices']) > 0 and 'new_devices' in conf.INCLUDED_SECTIONS: payloadData += "NEW DEVICES:\n" for event in _json['new_devices']: if event[4] is None: event[4] = event[11] payloadData += event[1] + ' - ' + event[4] + '\n' - if len(_json['down_devices']) > 0 and 'down_devices' in INCLUDED_SECTIONS: + if len(_json['down_devices']) > 0 and 'down_devices' in conf.INCLUDED_SECTIONS: write_file (logPath + '/down_devices_example.log', _json['down_devices']) payloadData += 'DOWN DEVICES:\n' for event in _json['down_devices']: @@ -608,7 +607,7 @@ def to_text(_json): event[4] = event[11] payloadData += event[1] + ' - ' + event[4] + '\n' - if len(_json['events']) > 0 and 'events' in INCLUDED_SECTIONS: + if len(_json['events']) > 0 and 'events' in conf.INCLUDED_SECTIONS: payloadData += "EVENTS:\n" for event in _json['events']: if event[8] != "Internet": diff --git a/pialert/scheduler.py b/pialert/scheduler.py new file mode 100644 index 00000000..b0cbc2ca --- /dev/null +++ b/pialert/scheduler.py @@ -0,0 +1,41 @@ +""" class to manage schedules """ +import datetime + +from logger import print_log +import conf + +#------------------------------------------------------------------------------- +class schedule_class: + def __init__(self, service, scheduleObject, last_next_schedule, was_last_schedule_used, last_run = 0): + self.service = service + self.scheduleObject = scheduleObject + self.last_next_schedule = last_next_schedule + self.last_run = last_run + self.was_last_schedule_used = was_last_schedule_used + + def runScheduleCheck(self): + + result = False + + # Initialize the last run time if never run before + if self.last_run == 0: + self.last_run = (datetime.datetime.now(conf.tz) - datetime.timedelta(days=365)).replace(microsecond=0) + + # get the current time with the currently specified timezone + nowTime = datetime.datetime.now(conf.tz).replace(microsecond=0) + + # Run the schedule if the current time is past the schedule time we saved last time and + # (maybe the following check is unnecessary:) + # if the last run is past the last time we run a scheduled Pholus scan + if nowTime > self.last_next_schedule and self.last_run < self.last_next_schedule: + print_log(f'Scheduler run for {self.service}: YES') + self.was_last_schedule_used = True + result = True + else: + print_log(f'Scheduler run for {self.service}: NO') + + if self.was_last_schedule_used: + self.was_last_schedule_used = False + self.last_next_schedule = self.scheduleObject.next() + + return result From fd50ab7debd5421e80c5d0cc78aad042bd0c1833 Mon Sep 17 00:00:00 2001 From: Roland Beck <7224371+Data-Monkey@users.noreply.github.com> Date: Thu, 25 May 2023 13:53:08 +1000 Subject: [PATCH 10/20] Create README.md --- pialert/README.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 pialert/README.md diff --git a/pialert/README.md b/pialert/README.md new file mode 100644 index 00000000..6ef008b2 --- /dev/null +++ b/pialert/README.md @@ -0,0 +1,14 @@ +# Pi.Alert all split into modules + +I am trying to split this big original file into modules and gives me some nice challanges to solve. +Since the original code is all in one file, the original author has taken quite some shortcuts by defining lots of variables as global !! +These need to be changed now. + +Here is the main structure + +| Module | Description | +|--------|-----------| +|pialert.py | The MAIN program of Pi.Alert| +|const.py | A place to define the constants for Pi.Alert like log path or config path.| +|const.py| const.py holds the configuration variables and makes them availabe for all modules. It is also the workaround for the global variables until I can work them out| +|api.py| | From c14c762bdec266e7f38b8d636663ade1be55dad2 Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Thu, 25 May 2023 19:51:03 +1000 Subject: [PATCH 11/20] cleanup --- db/pialert.db | Bin 262144 -> 245760 bytes pialert/conf.py | 4 +- pialert/helper.py | 7 +- pialert/pialert.py | 173 +++++++++++-------------------------------- pialert/reporting.py | 71 +++++++++++++++++- 5 files changed, 119 insertions(+), 136 deletions(-) diff --git a/db/pialert.db b/db/pialert.db index 2e998bb24810d810e43ce4dc1c1ba7efcbb8a2cf..50a882253f85487ce8647ac016428d647dc9f395 100755 GIT binary patch delta 410 zcmZo@5NK%NpCB#xo`HeE28f}6wPK=`#fU-)sWDU%K zvWnPc&8$p}fwBrfSw;?VREsUFOo7rsi#NxesTF2oXqkNXz7nI#rU%>wtY-Yb`M&`j z{gPkUGz#cL6H{LPhYU>o(-`=t@oxi4bnz?5F*0kaJ0&J($2%4j#0QromZSz0H%UKMV1(vfUFgpmba|bf;b@0XVTJv)Ah;rZM Q4g_kj=H4E-nt8nl02@1CD*ylh delta 1880 zcmYk63v3kE6^3Wdnc1CvjtLmsF}Bwj+xUes#s-_0?ckDto!CH1O@Q^r4~z|d#Jk|P z_v``=kD3@4juD1bO0cCmkT_A9RwxQ;Q>9hYszIm`X?ZCXNhp<)KB5$&sQ0#!nvv#8 zckZ2Yzw`g+o^aR)oatqb_v-PjOgDLJ^f+7<{lv@|sy)W~Cf-iA5=x%ta|m<8lwUpo!}Ir&^WW&k<(-FDmvWU$$R znLu`4(Sa2}Hty@dZ9qC->%b*e?F_y?Z=*CTRh?2i|ML9OGwy!qzUJz5RXCF!pE!;= zsvQ|dt+B-Z7yD1`DYl2U&C*7(RzUw7eye_&dC4b!`GhM-1y;uNL%+K}WBA_BG+>50`isgKzPhuDH z$9)oe8E5oM>;fL@#~h%g{1Y*^Q@&8BvV>pn$7D_)z(k-WTs|OG&f}2*DNHWU4&Yv( z98Mm@MFwRHqz&ZfKMxA$z6oLoD2qQF#5|zItPkOnK$-mX5H1AD;I1LO29(b0hoz&7 zxOEs;1EulwFxKI6TF9I0vDlzg5%Z~NFTWAM1Z&4dR8`C&AI}72)D-JR0R6(vWWidP z$hQL+`&~CFmXbKofTw{L@K+7E1SpX+8>K9dTXl_Shf?WJf)Y?XUwPj1On4Oc4cB2; zowL|!a|}5)IFjx6?Z2|`w^!>`dMYNsr|@$~RUay&N??pKF`KsV<`F6PW@~r^_Xv4w z`Lh>gmDF&|5j+L-3{M@wJP$oB5Z9!6n%e!LZk|0N6x!5?ZihzIBE!m&N{%^>ar~!o zthPFi;#m=?ie1Ms8)%bNd>p?KA2xE)sC4cr-Z_fZKu_|_sI+#2^@mYhs$vFxkFiVc zsN|e3ECyQ7&v)S=peOj#F3bn2u;O3%?|mrel5UAzW;J(XjflODt)RqS%eR6OdkqIe z5_|P`v4=zd$6oa<_N@aLXMG;RPtZ*6C z&JUgB^gG^nyyD0-9vL-8hP};RuCLbJ+AVE^mI@!h1vRQ}QNB?iqL}vagR@eKy__(K`3AL#Jx`33bM1MY|35$8oWwICNGn&Jmu0_) z_n*g~0kv@c%W}S(>t4ob%%Wzl4@<+E_*fYGfEw9GvO6_!KH*D1&+}D6A5ehr5^e>m z=i-Q5_p?8OJArob+Y!t%Xs09)Q79@1Fp51uJ9r=}w?4-gqF4pAo&O%iG@v?b{z1$W za6jPnhonr~xa|;@0DYe?9Fm>zS=J8A?6z73hlNbJ(xb<8r{bOP2E7&Dc=ue_b>| 0 and rows[0]['par_Value'] != 'finished': - event = rows[0]['par_Value'].split('|')[0] - param = rows[0]['par_Value'].split('|')[1] - else: - return - - if event == 'test': - handle_test(param) - if event == 'run': - handle_run(param) - - # clear event execution flag - sql.execute ("UPDATE Parameters SET par_Value='finished' WHERE par_ID='Front_Event'") - - # commit to DB - db.commitDB() - -#------------------------------------------------------------------------------- -def handle_run(runType): - global last_network_scan - - mylog('info', ['[', timeNow(), '] START Run: ', runType]) - - if runType == 'ENABLE_ARPSCAN': - last_network_scan = now_minus_24h - - mylog('info', ['[', timeNow(), '] END Run: ', runType]) - -#------------------------------------------------------------------------------- -def handle_test(testType): - - mylog('info', ['[', timeNow(), '] START Test: ', testType]) - - # Open text sample - sample_txt = get_file_content(pialertPath + '/back/report_sample.txt') - - # Open html sample - sample_html = get_file_content(pialertPath + '/back/report_sample.html') - - # Open json sample and get only the payload part - sample_json_payload = json.loads(get_file_content(pialertPath + '/back/webhook_json_sample.json'))[0]["body"]["attachments"][0]["text"] - - if testType == 'REPORT_MAIL': - send_email(sample_txt, sample_html) - if testType == 'REPORT_WEBHOOK': - send_webhook (sample_json_payload, sample_txt) - if testType == 'REPORT_APPRISE': - send_apprise (sample_html, sample_txt) - if testType == 'REPORT_NTFY': - send_ntfy (sample_txt) - if testType == 'REPORT_PUSHSAFER': - send_pushsafer (sample_txt) - - mylog('info', ['[', timeNow(), '] END Test: ', testType]) - - - - - - diff --git a/pialert/reporting.py b/pialert/reporting.py index 3df6fa18..4b6de43c 100644 --- a/pialert/reporting.py +++ b/pialert/reporting.py @@ -14,7 +14,7 @@ from json2table import convert import conf from const import pialertPath, logPath from database import get_table_as_json -from files import write_file +from files import get_file_content, write_file from helper import generate_mac_links, isNewVersion, removeDuplicateNewLines, timeNow, hide_email, json_struc, updateState from logger import logResult, mylog, print_log from mqtt import mqtt_start @@ -636,4 +636,71 @@ def skip_repeated_notifications (db): """ ) print_log ('Skip Repeated end') - db.commitDB() \ No newline at end of file + db.commitDB() + + +#=============================================================================== +# UTIL +#=============================================================================== + +#------------------------------------------------------------------------------- +def check_and_run_event(db): + sql = db.sql # TO-DO + sql.execute(""" select * from Parameters where par_ID = "Front_Event" """) + rows = sql.fetchall() + + event, param = ['',''] + if len(rows) > 0 and rows[0]['par_Value'] != 'finished': + event = rows[0]['par_Value'].split('|')[0] + param = rows[0]['par_Value'].split('|')[1] + else: + return + + if event == 'test': + handle_test(param) + if event == 'run': + handle_run(param) + + # clear event execution flag + sql.execute ("UPDATE Parameters SET par_Value='finished' WHERE par_ID='Front_Event'") + + # commit to DB + db.commitDB() + +#------------------------------------------------------------------------------- +def handle_run(runType): + global last_network_scan + + mylog('info', ['[', timeNow(), '] START Run: ', runType]) + + if runType == 'ENABLE_ARPSCAN': + last_network_scan = conf.time_started - datetime.timedelta(hours = 24) + + mylog('info', ['[', timeNow(), '] END Run: ', runType]) + +#------------------------------------------------------------------------------- +def handle_test(testType): + + mylog('info', ['[', timeNow(), '] START Test: ', testType]) + + # Open text sample + sample_txt = get_file_content(pialertPath + '/back/report_sample.txt') + + # Open html sample + sample_html = get_file_content(pialertPath + '/back/report_sample.html') + + # Open json sample and get only the payload part + sample_json_payload = json.loads(get_file_content(pialertPath + '/back/webhook_json_sample.json'))[0]["body"]["attachments"][0]["text"] + + if testType == 'REPORT_MAIL': + send_email(sample_txt, sample_html) + if testType == 'REPORT_WEBHOOK': + send_webhook (sample_json_payload, sample_txt) + if testType == 'REPORT_APPRISE': + send_apprise (sample_html, sample_txt) + if testType == 'REPORT_NTFY': + send_ntfy (sample_txt) + if testType == 'REPORT_PUSHSAFER': + send_pushsafer (sample_txt) + + mylog('info', ['[', timeNow(), '] END Test: ', testType]) \ No newline at end of file From a2f2bce3abb71b87330c8084123fbe1fde7b6588 Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Fri, 26 May 2023 19:04:11 +1000 Subject: [PATCH 12/20] working docker version --- pialert/initialise.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pialert/initialise.py b/pialert/initialise.py index f8d91813..d7417cef 100644 --- a/pialert/initialise.py +++ b/pialert/initialise.py @@ -153,6 +153,7 @@ def importConfigs (db): # Init timezone in case it changed conf.tz = timezone(conf.TIMEZONE) + # global mySchedules # reset schedules conf.mySchedules = [] @@ -161,7 +162,6 @@ def importConfigs (db): pholusSchedule = Cron(conf.PHOLUS_RUN_SCHD).schedule(start_date=datetime.datetime.now(conf.tz)) conf.mySchedules.append(schedule_class("pholus", pholusSchedule, pholusSchedule.next(), False)) - mylog('debug', "schedules (appended) : " + str(conf.mySchedules)) # init nmap schedule nmapSchedule = Cron(conf.NMAP_RUN_SCHD).schedule(start_date=datetime.datetime.now(conf.tz)) From 52027c65b3b1c17bde01f5fa7e754303e8d2b496 Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Fri, 26 May 2023 19:04:20 +1000 Subject: [PATCH 13/20] working docker version --- dockerfiles/start.sh | 4 +- front/plugins/undiscoverables/config.json | 4 +- pialert/conf.py | 6 +- pialert/const.py | 4 +- pialert/database.py | 240 +++++++++++++++++++++- pialert/device.py | 42 ++-- pialert/helper.py | 1 - pialert/logger.py | 21 +- pialert/networkscan.py | 66 +++--- pialert/pialert.py | 58 ++---- pialert/plugin.py | 24 +-- pialert/reporting.py | 14 +- pialert/scheduler.py | 6 +- 13 files changed, 354 insertions(+), 136 deletions(-) diff --git a/dockerfiles/start.sh b/dockerfiles/start.sh index 44535788..4ff77756 100755 --- a/dockerfiles/start.sh +++ b/dockerfiles/start.sh @@ -28,4 +28,6 @@ chmod -R a+rw /home/pi/pialert/config /etc/init.d/nginx start # cron -f -python /home/pi/pialert/back/pialert.py +#python /home/pi/pialert/back/pialert.py +echo "DATA MONKEY VERSION ..." +python /home/pi/pialert/pialert/pialert.py diff --git a/front/plugins/undiscoverables/config.json b/front/plugins/undiscoverables/config.json index de03f76f..fd316f49 100755 --- a/front/plugins/undiscoverables/config.json +++ b/front/plugins/undiscoverables/config.json @@ -45,7 +45,7 @@ }], "description": [{ "language_code":"en_us", - "string" : "When enabled, \"once\" is the preferred option. It runs at startup and after every save of the config here.
          Changes will only show in the devices after the next scan!" + "string" : "When enabled, ONCE is the preferred option. It runs at startup and after every save of the config here.
          Changes will only show in the devices after the next scan!" }] }, { @@ -214,4 +214,4 @@ }] } ] -} +} \ No newline at end of file diff --git a/pialert/conf.py b/pialert/conf.py index a5e651e1..69157e1d 100644 --- a/pialert/conf.py +++ b/pialert/conf.py @@ -15,6 +15,8 @@ plugins_once_run = False newVersionAvailable = False time_started = '' check_report = [] +log_timestamp = 0 + # ACTUAL CONFIGRATION ITEMS set to defaults @@ -103,9 +105,9 @@ PHOLUS_DAYS_DATA = 0 # Nmap NMAP_ACTIVE = True NMAP_TIMEOUT = 150 -NMAP_RUN = 'none' +NMAP_RUN = 'once' NMAP_RUN_SCHD = '0 2 * * *' -NMAP_ARGS = '-p -10000' +NMAP_ARGS = '-p -10000 --max-parallelism 100' # API API_CUSTOM_SQL = 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' \ No newline at end of file diff --git a/pialert/const.py b/pialert/const.py index 132a88b8..ab720c0a 100644 --- a/pialert/const.py +++ b/pialert/const.py @@ -3,8 +3,8 @@ #=============================================================================== # PATHS #=============================================================================== -# pialertPath = '/home/pi/pialert' -pialertPath ='/home/roland/repos/Pi.Alert' +pialertPath = '/home/pi/pialert' +#pialertPath ='/home/roland/repos/Pi.Alert' confPath = "/config/pialert.conf" dbPath = '/db/pialert.db' diff --git a/pialert/database.py b/pialert/database.py index 2e0d433a..06857214 100644 --- a/pialert/database.py +++ b/pialert/database.py @@ -56,7 +56,6 @@ class DB(): self.sql_connection.row_factory = sqlite3.Row self.sql = self.sql_connection.cursor() - #------------------------------------------------------------------------------- def commitDB (self): if self.sql_connection == None : @@ -88,7 +87,6 @@ class DB(): return arr - #=============================================================================== # Cleanup / upkeep database #=============================================================================== @@ -142,12 +140,250 @@ class DB(): self.commitDB() + #------------------------------------------------------------------------------- + def upgradeDB(self): + sql = self.sql #TO-DO + # indicates, if Online_History table is available + onlineHistoryAvailable = sql.execute(""" + SELECT name FROM sqlite_master WHERE type='table' + AND name='Online_History'; + """).fetchall() != [] + # Check if it is incompatible (Check if table has all required columns) + isIncompatible = False + + if onlineHistoryAvailable : + isIncompatible = sql.execute (""" + SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Online_History') WHERE name='Archived_Devices' + """).fetchone()[0] == 0 + + # Drop table if available, but incompatible + if onlineHistoryAvailable and isIncompatible: + mylog('none','[upgradeDB] Table is incompatible, Dropping the Online_History table') + sql.execute("DROP TABLE Online_History;") + onlineHistoryAvailable = False + if onlineHistoryAvailable == False : + sql.execute(""" + CREATE TABLE "Online_History" ( + "Index" INTEGER, + "Scan_Date" TEXT, + "Online_Devices" INTEGER, + "Down_Devices" INTEGER, + "All_Devices" INTEGER, + "Archived_Devices" INTEGER, + PRIMARY KEY("Index" AUTOINCREMENT) + ); + """) + # Alter Devices table + # dev_Network_Node_MAC_ADDR column + dev_Network_Node_MAC_ADDR_missing = sql.execute (""" + SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_MAC_ADDR' + """).fetchone()[0] == 0 + if dev_Network_Node_MAC_ADDR_missing : + mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_MAC_ADDR to the Devices table"]) + sql.execute(""" + ALTER TABLE "Devices" ADD "dev_Network_Node_MAC_ADDR" TEXT + """) + # dev_Network_Node_port column + dev_Network_Node_port_missing = sql.execute (""" + SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_port' + """).fetchone()[0] == 0 + + if dev_Network_Node_port_missing : + mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_port to the Devices table"]) + sql.execute(""" + ALTER TABLE "Devices" ADD "dev_Network_Node_port" INTEGER + """) + + # dev_Icon column + dev_Icon_missing = sql.execute (""" + SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Icon' + """).fetchone()[0] == 0 + + if dev_Icon_missing : + mylog('verbose', ["[upgradeDB] Adding dev_Icon to the Devices table"]) + sql.execute(""" + ALTER TABLE "Devices" ADD "dev_Icon" TEXT + """) + + # indicates, if Settings table is available + settingsMissing = sql.execute(""" + SELECT name FROM sqlite_master WHERE type='table' + AND name='Settings'; + """).fetchone() == None + + # Re-creating Settings table + mylog('verbose', ["[upgradeDB] Re-creating Settings table"]) + + if settingsMissing == False: + sql.execute("DROP TABLE Settings;") + + sql.execute(""" + CREATE TABLE "Settings" ( + "Code_Name" TEXT, + "Display_Name" TEXT, + "Description" TEXT, + "Type" TEXT, + "Options" TEXT, + "RegEx" TEXT, + "Value" TEXT, + "Group" TEXT, + "Events" TEXT + ); + """) + + # indicates, if Pholus_Scan table is available + pholusScanMissing = sql.execute(""" + SELECT name FROM sqlite_master WHERE type='table' + AND name='Pholus_Scan'; + """).fetchone() == None + + # if pholusScanMissing == False: + # # Re-creating Pholus_Scan table + # sql.execute("DROP TABLE Pholus_Scan;") + # pholusScanMissing = True + + if pholusScanMissing: + mylog('verbose', ["[upgradeDB] Re-creating Pholus_Scan table"]) + sql.execute(""" + CREATE TABLE "Pholus_Scan" ( + "Index" INTEGER, + "Info" TEXT, + "Time" TEXT, + "MAC" TEXT, + "IP_v4_or_v6" TEXT, + "Record_Type" TEXT, + "Value" TEXT, + "Extra" TEXT, + PRIMARY KEY("Index" AUTOINCREMENT) + ); + """) + + # indicates, if Nmap_Scan table is available + nmapScanMissing = sql.execute(""" + SELECT name FROM sqlite_master WHERE type='table' + AND name='Nmap_Scan'; + """).fetchone() == None + + # Re-creating Parameters table + mylog('verbose', ["[upgradeDB] Re-creating Parameters table"]) + sql.execute("DROP TABLE Parameters;") + + sql.execute(""" + CREATE TABLE "Parameters" ( + "par_ID" TEXT PRIMARY KEY, + "par_Value" TEXT + ); + """) + + # Initialize Parameters if unavailable + initOrSetParam(self, 'Back_App_State','Initializing') + + # if nmapScanMissing == False: + # # Re-creating Nmap_Scan table + # sql.execute("DROP TABLE Nmap_Scan;") + # nmapScanMissing = True + + if nmapScanMissing: + mylog('verbose', ["[upgradeDB] Re-creating Nmap_Scan table"]) + sql.execute(""" + CREATE TABLE "Nmap_Scan" ( + "Index" INTEGER, + "MAC" TEXT, + "Port" TEXT, + "Time" TEXT, + "State" TEXT, + "Service" TEXT, + "Extra" TEXT, + PRIMARY KEY("Index" AUTOINCREMENT) + ); + """) + + # Plugin state + sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects( + "Index" INTEGER, + Plugin TEXT NOT NULL, + Object_PrimaryID TEXT NOT NULL, + Object_SecondaryID TEXT NOT NULL, + DateTimeCreated TEXT NOT NULL, + DateTimeChanged TEXT NOT NULL, + Watched_Value1 TEXT NOT NULL, + Watched_Value2 TEXT NOT NULL, + Watched_Value3 TEXT NOT NULL, + Watched_Value4 TEXT NOT NULL, + Status TEXT NOT NULL, + Extra TEXT NOT NULL, + UserData TEXT NOT NULL, + ForeignKey TEXT NOT NULL, + PRIMARY KEY("Index" AUTOINCREMENT) + ); """ + sql.execute(sql_Plugins_Objects) + + # Plugin execution results + sql_Plugins_Events = """ CREATE TABLE IF NOT EXISTS Plugins_Events( + "Index" INTEGER, + Plugin TEXT NOT NULL, + Object_PrimaryID TEXT NOT NULL, + Object_SecondaryID TEXT NOT NULL, + DateTimeCreated TEXT NOT NULL, + DateTimeChanged TEXT NOT NULL, + Watched_Value1 TEXT NOT NULL, + Watched_Value2 TEXT NOT NULL, + Watched_Value3 TEXT NOT NULL, + Watched_Value4 TEXT NOT NULL, + Status TEXT NOT NULL, + Extra TEXT NOT NULL, + UserData TEXT NOT NULL, + ForeignKey TEXT NOT NULL, + PRIMARY KEY("Index" AUTOINCREMENT) + ); """ + sql.execute(sql_Plugins_Events) + + # Plugin execution history + sql_Plugins_History = """ CREATE TABLE IF NOT EXISTS Plugins_History( + "Index" INTEGER, + Plugin TEXT NOT NULL, + Object_PrimaryID TEXT NOT NULL, + Object_SecondaryID TEXT NOT NULL, + DateTimeCreated TEXT NOT NULL, + DateTimeChanged TEXT NOT NULL, + Watched_Value1 TEXT NOT NULL, + Watched_Value2 TEXT NOT NULL, + Watched_Value3 TEXT NOT NULL, + Watched_Value4 TEXT NOT NULL, + Status TEXT NOT NULL, + Extra TEXT NOT NULL, + UserData TEXT NOT NULL, + ForeignKey TEXT NOT NULL, + PRIMARY KEY("Index" AUTOINCREMENT) + ); """ + sql.execute(sql_Plugins_History) + + # Dynamically generated language strings + # indicates, if Language_Strings table is available + languageStringsMissing = sql.execute(""" + SELECT name FROM sqlite_master WHERE type='table' + AND name='Plugins_Language_Strings'; + """).fetchone() == None + + if languageStringsMissing == False: + sql.execute("DROP TABLE Plugins_Language_Strings;") + + sql.execute(""" CREATE TABLE IF NOT EXISTS Plugins_Language_Strings( + "Index" INTEGER, + Language_Code TEXT NOT NULL, + String_Key TEXT NOT NULL, + String_Value TEXT NOT NULL, + Extra TEXT NOT NULL, + PRIMARY KEY("Index" AUTOINCREMENT) + ); """) + + self.commitDB() #------------------------------------------------------------------------------- diff --git a/pialert/device.py b/pialert/device.py index 6d52ad34..e22ab380 100644 --- a/pialert/device.py +++ b/pialert/device.py @@ -9,7 +9,7 @@ from helper import timeNow from internet import check_IP_format, get_internet_IP from logger import mylog, print_log from mac_vendor import query_MAC_vendor -from pholusscan import performPholusScan, resolve_device_name_pholus +from pholusscan import performPholusScan, resolve_device_name_dig, resolve_device_name_pholus #------------------------------------------------------------------------------- @@ -167,7 +167,7 @@ def create_new_devices (db): startTime = timeNow() # arpscan - Insert events for new devices - print_log ('New devices - 1 Events') + mylog('debug','[New Devices] New devices - 1 Events') sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, eve_EventType, eve_AdditionalInfo, eve_PendingAlertEmail) @@ -178,7 +178,7 @@ def create_new_devices (db): WHERE dev_MAC = cur_MAC) """, (startTime, conf.cycle) ) - print_log ('New devices - Insert Connection into session table') + mylog('debug','[New Devices] Insert Connection into session table') sql.execute ("""INSERT INTO Sessions (ses_MAC, ses_IP, ses_EventTypeConnection, ses_DateTimeConnection, ses_EventTypeDisconnection, ses_DateTimeDisconnection, ses_StillConnected, ses_AdditionalInfo) SELECT cur_MAC, cur_IP,'Connected',?, NULL , NULL ,1, cur_Vendor @@ -189,7 +189,7 @@ def create_new_devices (db): (startTime, conf.cycle) ) # arpscan - Create new devices - print_log ('New devices - 2 Create devices') + mylog('debug','[New Devices] 2 Create devices') sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_Vendor, dev_LastIP, dev_FirstConnection, dev_LastConnection, dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown, @@ -205,7 +205,7 @@ def create_new_devices (db): # Pi-hole - Insert events for new devices # NOT STRICYLY NECESARY (Devices can be created through Current_Scan) # Bugfix #2 - Pi-hole devices w/o IP - print_log ('New devices - 3 Pi-hole Events') + mylog('debug','[New Devices] 3 Pi-hole Events') sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, eve_EventType, eve_AdditionalInfo, eve_PendingAlertEmail) @@ -218,7 +218,7 @@ def create_new_devices (db): # Pi-hole - Create New Devices # Bugfix #2 - Pi-hole devices w/o IP - print_log ('New devices - 4 Pi-hole Create devices') + mylog('debug','[New Devices] 4 Pi-hole Create devices') sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_Vendor, dev_LastIP, dev_FirstConnection, dev_LastConnection, dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown, @@ -231,7 +231,7 @@ def create_new_devices (db): (startTime, startTime) ) # DHCP Leases - Insert events for new devices - print_log ('New devices - 5 DHCP Leases Events') + mylog('debug','[New Devices] 5 DHCP Leases Events') sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, eve_EventType, eve_AdditionalInfo, eve_PendingAlertEmail) @@ -242,7 +242,7 @@ def create_new_devices (db): (startTime, ) ) # DHCP Leases - Create New Devices - print_log ('New devices - 6 DHCP Leases Create devices') + mylog('debug','[New Devices] 6 DHCP Leases Create devices') # BUGFIX #23 - Duplicated MAC in DHCP.Leases # TEST - Force Duplicated MAC # sql.execute ("""INSERT INTO DHCP_Leases VALUES @@ -276,7 +276,7 @@ def create_new_devices (db): # WHERE NOT EXISTS (SELECT 1 FROM Devices # WHERE dev_MAC = DHCP_MAC) """, # (startTime, startTime) ) - print_log ('New Devices end') + mylog('debug','[New Devices] New Devices end') db.commitDB() @@ -285,7 +285,7 @@ def update_devices_data_from_scan (db): sql = db.sql #TO-DO startTime = timeNow() # Update Last Connection - print_log ('Update devices - 1 Last Connection') + mylog('debug','[Update Devices] 1 Last Connection') sql.execute ("""UPDATE Devices SET dev_LastConnection = ?, dev_PresentLastScan = 1 WHERE dev_ScanCycle = ? @@ -296,7 +296,7 @@ def update_devices_data_from_scan (db): (startTime, conf.cycle)) # Clean no active devices - print_log ('Update devices - 2 Clean no active devices') + mylog('debug','[Update Devices] 2 Clean no active devices') sql.execute ("""UPDATE Devices SET dev_PresentLastScan = 0 WHERE dev_ScanCycle = ? AND NOT EXISTS (SELECT 1 FROM CurrentScan @@ -305,7 +305,7 @@ def update_devices_data_from_scan (db): (conf.cycle,)) # Update IP & Vendor - print_log ('Update devices - 3 LastIP & Vendor') + mylog('debug','[Update Devices] - 3 LastIP & Vendor') sql.execute ("""UPDATE Devices SET dev_LastIP = (SELECT cur_IP FROM CurrentScan WHERE dev_MAC = cur_MAC @@ -320,7 +320,7 @@ def update_devices_data_from_scan (db): (conf.cycle,)) # Pi-hole Network - Update (unknown) Name - print_log ('Update devices - 4 Unknown Name') + mylog('debug','[Update Devices] - 4 Unknown Name') sql.execute ("""UPDATE Devices SET dev_NAME = (SELECT PH_Name FROM PiHole_Network WHERE PH_MAC = dev_MAC) @@ -341,7 +341,7 @@ def update_devices_data_from_scan (db): WHERE DHCP_MAC = dev_MAC)""") # DHCP Leases - Vendor - print_log ('Update devices - 5 Vendor') + mylog('debug','[Update Devices] - 5 Vendor') recordsToUpdate = [] query = """SELECT * FROM Devices @@ -358,7 +358,7 @@ def update_devices_data_from_scan (db): # clean-up device leases table sql.execute ("DELETE FROM DHCP_Leases") - print_log ('Update devices end') + mylog('debug','[Update Devices] Update devices end') #------------------------------------------------------------------------------- def update_devices_names (db): @@ -387,7 +387,7 @@ def update_devices_names (db): return # Devices without name - mylog('verbose', [' Trying to resolve devices without name']) + mylog('verbose', '[Update Device Name] Trying to resolve devices without name') # get names from Pholus scan sql.execute ('SELECT * FROM Pholus_Scan where "Record_Type"="Answer"') @@ -395,13 +395,13 @@ def update_devices_names (db): db.commitDB() # Number of entries from previous Pholus scans - mylog('verbose', [" Pholus entries from prev scans: ", len(pholusResults)]) + mylog('verbose', '[Update Device Name] Pholus entries from prev scans: ', len(pholusResults)) for device in unknownDevices: newName = -1 # Resolve device name with DiG - newName = resolve_device_name_pholus (device['dev_MAC'], device['dev_LastIP']) + newName = resolve_device_name_dig (device['dev_MAC'], device['dev_LastIP']) # count if newName != -1: @@ -422,13 +422,11 @@ def update_devices_names (db): recordsToUpdate.append ([newName, device['dev_MAC']]) # Print log - mylog('verbose', [" Names Found (DiG/Pholus): ", len(recordsToUpdate), " (",foundDig,"/",foundPholus ,")" ]) - mylog('verbose', [" Names Not Found : ", len(recordsNotFound) ]) + mylog('verbose', '[Update Device Name] Names Found (DiG/Pholus): ', len(recordsToUpdate), " (",foundDig,"/",foundPholus ,")" ) + mylog('verbose', '[Update Device Name] Names Not Found : ', len(recordsNotFound) ) # update not found devices with (name not found) sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsNotFound ) # update names of devices which we were bale to resolve sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsToUpdate ) db.commitDB() - - diff --git a/pialert/helper.py b/pialert/helper.py index 24047a54..85906df7 100644 --- a/pialert/helper.py +++ b/pialert/helper.py @@ -4,7 +4,6 @@ import datetime import os import re import subprocess -from cron_converter import Cron from pytz import timezone from datetime import timedelta import json diff --git a/pialert/logger.py b/pialert/logger.py index d090f79d..8b6cf060 100644 --- a/pialert/logger.py +++ b/pialert/logger.py @@ -3,14 +3,22 @@ import sys import io import datetime +import conf from const import * +#------------------------------------------------------------------------------- +# duplication from helper to avoid circle +#------------------------------------------------------------------------------- +def timeNowTZ(): + if conf.tz == '': + return datetime.datetime.now().replace(microsecond=0) + else: + return datetime.datetime.now(conf.tz).replace(microsecond=0) #------------------------------------------------------------------------------- debugLevels = [ ('none', 0), ('minimal', 1), ('verbose', 2), ('debug', 3) ] -LOG_LEVEL = 'debug' def mylog(requestedDebugLevel, n): @@ -19,7 +27,7 @@ def mylog(requestedDebugLevel, n): # Get debug urgency/relative weight for lvl in debugLevels: - if LOG_LEVEL == lvl[0]: + if conf.LOG_LEVEL == lvl[0]: setLvl = lvl[1] if requestedDebugLevel == lvl[0]: reqLvl = lvl[1] @@ -30,7 +38,7 @@ def mylog(requestedDebugLevel, n): #------------------------------------------------------------------------------- def file_print (*args): - result = '' + result = timeNowTZ().strftime ('%H:%M:%S') + ' ' for arg in args: result += str(arg) @@ -42,14 +50,13 @@ def file_print (*args): #------------------------------------------------------------------------------- def print_log (pText): - global log_timestamp # Check LOG actived - if not LOG_LEVEL == 'debug' : + if not conf.LOG_LEVEL == 'debug' : return # Current Time - log_timestamp2 = datetime.datetime.now().replace(microsecond=0) + log_timestamp2 = datetime.datetime.now(conf.tz).replace(microsecond=0) # Print line + time + elapsed time + text file_print ('[LOG_LEVEL=debug] ', @@ -59,7 +66,7 @@ def print_log (pText): # Save current time to calculate elapsed time until next log - log_timestamp = log_timestamp2 + conf.log_timestamp = log_timestamp2 return pText diff --git a/pialert/networkscan.py b/pialert/networkscan.py index 3471bdb4..ac4dfbac 100644 --- a/pialert/networkscan.py +++ b/pialert/networkscan.py @@ -41,67 +41,67 @@ def scan_network (db): # arp-scan command arpscan_devices = [] if conf.ENABLE_ARPSCAN: - mylog('verbose', [' arp-scan start']) + mylog('verbose','[Network Scan] arp-scan start') arpscan_devices = execute_arpscan (conf.userSubnets) - print_log ('arp-scan ends') + mylog('verbose','[Network Scan] arp-scan ends') # Pi-hole method if conf.PIHOLE_ACTIVE : - mylog('verbose', [' Pi-hole start']) + mylog('verbose','[Network Scan] Pi-hole start') copy_pihole_network(db) db.commitDB() # DHCP Leases method if conf.DHCP_ACTIVE : - mylog('verbose', [' DHCP Leases start']) + mylog('verbose','[Network Scan] DHCP Leases start') read_DHCP_leases (db) db.commitDB() # Load current scan data - mylog('verbose', [' Processing scan results']) + mylog('verbose','[Network Scan] Processing scan results') save_scanned_devices (db, arpscan_devices, cycle_interval) # Print stats - mylog ('none', 'Print Stats') + mylog('none','[Network Scan] Print Stats') print_scan_stats(db) - mylog ('none', 'Stats end') + mylog('none','[Network Scan] Stats end') # Create Events - mylog('verbose', [' Updating DB Info']) - mylog('verbose', [' Sessions Events (connect / discconnect)']) + mylog('verbose','[Network Scan] Updating DB Info') + mylog('verbose','[Network Scan] Sessions Events (connect / discconnect)') insert_events(db) # Create New Devices # after create events -> avoid 'connection' event - mylog('verbose', [' Creating new devices']) + mylog('verbose','[Network Scan] Creating new devices') create_new_devices (db) # Update devices info - mylog('verbose', [' Updating Devices Info']) + mylog('verbose','[Network Scan] Updating Devices Info') update_devices_data_from_scan (db) # Resolve devices names - print_log (' Resolve devices names') + mylog('verbose','[Network Scan] Resolve devices names') update_devices_names(db) # Void false connection - disconnections - mylog('verbose', [' Voiding false (ghost) disconnections']) + mylog('verbose','[Network Scan] Voiding false (ghost) disconnections') void_ghost_disconnections (db) # Pair session events (Connection / Disconnection) - mylog('verbose', [' Pairing session events (connection / disconnection) ']) + mylog('verbose','[Network Scan] Pairing session events (connection / disconnection) ') pair_sessions_events(db) # Sessions snapshot - mylog('verbose', [' Creating sessions snapshot']) + mylog('verbose','[Network Scan] Creating sessions snapshot') create_sessions_snapshot (db) # Sessions snapshot - mylog('verbose', [' Inserting scan results into Online_History']) + mylog('verbose','[Network Scan] Inserting scan results into Online_History') insertOnlineHistory(db,conf.cycle) # Skip repeated notifications - mylog('verbose', [' Skipping repeated notifications']) + mylog('verbose','[Network Scan] Skipping repeated notifications') skip_repeated_notifications (db) # Commit changes @@ -131,7 +131,7 @@ def void_ghost_disconnections (db): sql = db.sql #TO-DO startTime = timeNow() # Void connect ghost events (disconnect event exists in last X min.) - print_log ('Void - 1 Connect ghost events') + mylog('debug','[Void Ghost Con] - 1 Connect ghost events') sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null, eve_EventType ='VOIDED - ' || eve_EventType WHERE eve_MAC != 'Internet' @@ -152,7 +152,7 @@ def void_ghost_disconnections (db): (startTime, conf.cycle, startTime) ) # Void connect paired events - print_log ('Void - 2 Paired events') + mylog('debug','[Void Ghost Con] - 2 Paired events') sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null WHERE eve_MAC != 'Internet' AND eve_PairEventRowid IN ( @@ -170,7 +170,7 @@ def void_ghost_disconnections (db): (conf.cycle, startTime) ) # Void disconnect ghost events - print_log ('Void - 3 Disconnect ghost events') + mylog('debug','[Void Ghost Con] - 3 Disconnect ghost events') sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null, eve_EventType = 'VOIDED - '|| eve_EventType WHERE eve_MAC != 'Internet' @@ -187,7 +187,7 @@ def void_ghost_disconnections (db): DATETIME (?, '-' || cic_EveryXmin ||' minutes') ) """, (conf.cycle, startTime) ) - print_log ('Void end') + mylog('debug','[Void Ghost Con] Void Ghost Connections end') db.commitDB() #------------------------------------------------------------------------------- @@ -203,7 +203,7 @@ def pair_sessions_events (db): # Pair Connection / New Device events - print_log ('Pair session - 1 Connections / New Devices') + mylog('debug','[Pair Session] - 1 Connections / New Devices') sql.execute ("""UPDATE Events SET eve_PairEventRowid = (SELECT ROWID @@ -218,7 +218,7 @@ def pair_sessions_events (db): """ ) # Pair Disconnection / Device Down - print_log ('Pair session - 2 Disconnections') + mylog('debug','[Pair Session] - 2 Disconnections') sql.execute ("""UPDATE Events SET eve_PairEventRowid = (SELECT ROWID @@ -227,7 +227,7 @@ def pair_sessions_events (db): WHERE eve_EventType IN ('Device Down', 'Disconnected') AND eve_PairEventRowid IS NULL """ ) - print_log ('Pair session end') + mylog('debug','[Pair Session] Pair session end') db.commitDB() @@ -236,15 +236,15 @@ def create_sessions_snapshot (db): sql = db.sql #TO-DO # Clean sessions snapshot - print_log ('Sessions Snapshot - 1 Clean') + mylog('debug','[Sessions Snapshot] - 1 Clean') sql.execute ("DELETE FROM SESSIONS" ) # Insert sessions - print_log ('Sessions Snapshot - 2 Insert') + mylog('debug','[Sessions Snapshot] - 2 Insert') sql.execute ("""INSERT INTO Sessions SELECT * FROM Convert_Events_to_Sessions""" ) - print_log ('Sessions end') + mylog('debug','[Sessions Snapshot] Sessions end') db.commitDB() @@ -254,7 +254,7 @@ def insert_events (db): startTime = timeNow() # Check device down - print_log ('Events 1 - Devices down') + mylog('debug','[Events] - 1 - Devices down') sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, eve_EventType, eve_AdditionalInfo, eve_PendingAlertEmail) @@ -269,7 +269,7 @@ def insert_events (db): (startTime, conf.cycle) ) # Check new connections - print_log ('Events 2 - New Connections') + mylog('debug','[Events] - 2 - New Connections') sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, eve_EventType, eve_AdditionalInfo, eve_PendingAlertEmail) @@ -281,7 +281,7 @@ def insert_events (db): (startTime, conf.cycle) ) # Check disconnections - print_log ('Events 3 - Disconnections') + mylog('debug','[Events] - 3 - Disconnections') sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, eve_EventType, eve_AdditionalInfo, eve_PendingAlertEmail) @@ -297,7 +297,7 @@ def insert_events (db): (startTime, conf.cycle) ) # Check IP Changed - print_log ('Events 4 - IP Changes') + mylog('debug','[Events] - 4 - IP Changes') sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, eve_EventType, eve_AdditionalInfo, eve_PendingAlertEmail) @@ -308,6 +308,4 @@ def insert_events (db): AND dev_ScanCycle = ? AND dev_LastIP <> cur_IP """, (startTime, conf.cycle) ) - print_log ('Events end') - - + mylog('debug','[Events] - Events end') \ No newline at end of file diff --git a/pialert/pialert.py b/pialert/pialert.py index 5c5916f3..9afaea84 100755 --- a/pialert/pialert.py +++ b/pialert/pialert.py @@ -15,26 +15,18 @@ #=============================================================================== from __future__ import print_function - import sys from collections import namedtuple import time import datetime -from datetime import timedelta -import json -from pathlib import Path -from cron_converter import Cron -from json2table import convert import multiprocessing - # pialert modules import conf from const import * from logger import mylog -from helper import filePermissions, isNewVersion, timeNow, updateState +from helper import filePermissions, isNewVersion, timeNow, timeNowTZ, updateState from api import update_api -from files import get_file_content from networkscan import scan_network from initialise import importConfigs from mac_vendor import update_devices_MAC_vendors @@ -70,8 +62,8 @@ def main (): # to be deleted if not used - log_timestamp = conf.time_started - cron_instance = Cron() + conf.log_timestamp = conf.time_started + #cron_instance = Cron() # timestamps of last execution times startTime = conf.time_started @@ -98,7 +90,7 @@ def main (): sql = db.sql # To-Do replace with the db class # Upgrade DB if needed - upgradeDB(db) + db.upgradeDB() #=============================================================================== @@ -110,7 +102,7 @@ def main (): # update time started time_started = datetime.datetime.now() # not sure why we need this ... loop_start_time = timeNow() - mylog('debug', ['[', timeNow(), '] [MAIN] Stating loop']) + mylog('debug', ['[ +++++++ ', timeNow(), '] [MAIN] Stating loop']) # re-load user configuration and plugins importConfigs(db) @@ -153,15 +145,15 @@ def main (): # check for changes in Internet IP if last_internet_IP_scan + datetime.timedelta(minutes=3) < time_started: - cycle = 'internet_IP' + conf.cycle = 'internet_IP' last_internet_IP_scan = time_started check_internet_IP(db) # Update vendors once a week if last_update_vendors + datetime.timedelta(days = 7) < time_started: last_update_vendors = time_started - cycle = 'update_vendors' - mylog('verbose', ['[', timeNow(), '] cycle:',cycle]) + conf.cycle = 'update_vendors' + mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle]) update_devices_MAC_vendors() # Execute scheduled or one-off Pholus scan if enabled and run conditions fulfilled @@ -199,14 +191,14 @@ def main (): run = nmapSchedule.runScheduleCheck() if run: - conf.nmapSchedule.last_run = datetime.datetime.now(conf.tz).replace(microsecond=0) + conf.nmapSchedule.last_run = timeNow() performNmapScan(db, get_all_devices(db)) # Perform a network scan via arp-scan or pihole if last_network_scan + datetime.timedelta(minutes=conf.SCAN_CYCLE_MINUTES) < time_started: last_network_scan = time_started - cycle = 1 # network scan - mylog('verbose', ['[', timeNow(), '] cycle:',cycle]) + conf.cycle = 1 # network scan + mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle]) updateState(db,"Scan: Network") # scan_network() @@ -261,48 +253,34 @@ def main (): # clean up the DB once a day if last_cleanup + datetime.timedelta(hours = 24) < time_started: last_cleanup = time_started - cycle = 'cleanup' - mylog('verbose', ['[', timeNow(), '] cycle:',cycle]) + conf.cycle = 'cleanup' + mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle]) db.cleanup_database(startTime, conf.DAYS_TO_KEEP_EVENTS, conf.PHOLUS_DAYS_DATA) # Commit SQL db.commitDB() # Final message - if cycle != "": - action = str(cycle) + if conf.cycle != "": + action = str(conf.cycle) if action == "1": action = "network_scan" mylog('verbose', ['[', timeNow(), '] Last action: ', action]) - cycle = "" - mylog('verbose', ['[', timeNow(), '] cycle:',cycle]) + conf.cycle = "" + mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle]) # Footer updateState(db,"Process: Wait") mylog('verbose', ['[', timeNow(), '] Process: Wait']) else: # do something - cycle = "" + conf.cycle = "" mylog('verbose', ['[', timeNow(), '] [MAIN] waiting to start next loop']) #loop time.sleep(5) # wait for N seconds - - - - - - - - -#------------------------------------------------------------------------------- -#------------------------------------------------------------------------------- -# Plugins -#------------------------------------------------------------------------------- - - #=============================================================================== # BEGIN #=============================================================================== diff --git a/pialert/plugin.py b/pialert/plugin.py index a9c685ee..ec0904aa 100644 --- a/pialert/plugin.py +++ b/pialert/plugin.py @@ -55,7 +55,7 @@ def run_plugin_scripts(db, runType): print_plugin_info(plugin, ['display_name']) mylog('debug', [' [Plugins] CMD: ', get_plugin_setting(plugin, "CMD")["value"]]) - execute_plugin(plugin) + execute_plugin(db, plugin) @@ -70,8 +70,9 @@ def get_plugins_configs(): # for root, dirs, files in os.walk(pluginsPath): dirs = next(os.walk(pluginsPath))[1] - for d in dirs: # Loop over directories, not files - pluginsList.append(json.loads(get_file_content(pluginsPath + "/" + d + '/config.json'))) + for d in dirs: # Loop over directories, not files + if not d.startswith( "__" ): # ignore __pycache__ + pluginsList.append(json.loads(get_file_content(pluginsPath + "/" + d + '/config.json'))) return pluginsList @@ -126,9 +127,9 @@ def get_plugin_string(props, el): result = '' if el in props['localized']: - for str in props[el]: - if str['language_code'] == 'en_us': - result = str['string'] + for val in props[el]: + if val['language_code'] == 'en_us': + result = val['string'] if result == '': result = 'en_us string missing' @@ -265,7 +266,7 @@ def execute_plugin(db, plugin): sql.executemany ("""INSERT INTO Plugins_History ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", "Watched_Value4", "Status" ,"Extra", "UserData", "ForeignKey") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", sqlParams) db.commitDB() - process_plugin_events(plugin) + process_plugin_events(db, plugin) # update API endpoints # update_api(False, ["plugins_events","plugins_objects"]) # TO-DO - remover circular reference @@ -304,7 +305,7 @@ def plugin_param_from_glob_set(globalSetting): # Gets the setting value def get_plugin_setting_value(plugin, function_key): - resultObj = get_plugin_string(plugin, function_key) + resultObj = get_plugin_setting(plugin, function_key) if resultObj != None: return resultObj["value"] @@ -386,7 +387,7 @@ def combine_plugin_objects(old, new): def process_plugin_events(db, plugin): sql = db.sql - global pluginObjects, pluginEvents + ##global pluginObjects, pluginEvents pluginPref = plugin["unique_prefix"] @@ -449,9 +450,8 @@ def process_plugin_events(db, plugin): index += 1 -# Update the DB + # Update the DB # ---------------------------- - # Update the Plugin_Objects for plugObj in pluginObjects: @@ -477,7 +477,7 @@ def process_plugin_events(db, plugin): createdTime = plugObj.changed # insert only events if they are to be reported on - if plugObj.status in get_plugin_setting_value(plugin, "REPORT_ON"): + if plugObj.status in get_plugin_setting_value(plugin, "REPORT_ON"): sql.execute ("INSERT INTO Plugins_Events (Plugin, Object_PrimaryID, Object_SecondaryID, DateTimeCreated, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status, Extra, UserData, ForeignKey) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)", (plugObj.pluginPref, plugObj.primaryId , plugObj.secondaryId , createdTime, plugObj.changed , plugObj.watched1 , plugObj.watched2 , plugObj.watched3 , plugObj.watched4 , plugObj.status , plugObj.extra, plugObj.userData, plugObj.foreignKey )) diff --git a/pialert/reporting.py b/pialert/reporting.py index 4b6de43c..8ee72af2 100644 --- a/pialert/reporting.py +++ b/pialert/reporting.py @@ -407,9 +407,7 @@ def remove_tag (pText, pTag): #------------------------------------------------------------------------------- def send_email (pText, pHTML): - # Print more info for debugging if LOG_LEVEL == 'debug' - if conf.LOG_LEVEL == 'debug': - print_log ('REPORT_TO: ' + hide_email(str(conf.REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(conf.SMTP_USER))) + mylog('debug', '[Send Email] REPORT_TO: ' + hide_email(str(conf.REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(conf.SMTP_USER))) # Compose email msg = MIMEMultipart('alternative') @@ -473,7 +471,7 @@ def send_email (pText, pHTML): mylog('none', [' ERROR: Failed at - ', failedAt]) mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPServerDisconnected), skipping Email (enable LOG_LEVEL=debug for more logging)']) - print_log(' DEBUG: Last executed - ' + str(failedAt)) + mylog('debug', '[Send Email] Last executed - ' + str(failedAt)) #------------------------------------------------------------------------------- def send_ntfy (_Text): @@ -548,7 +546,7 @@ def send_webhook (_json, _html): # execute CURL call try: # try runnning a subprocess - mylog('debug', curlParams) + mylog('debug', '[send_webhook] curlParams: '+ curlParams) p = subprocess.Popen(curlParams, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = p.communicate() @@ -557,7 +555,7 @@ def send_webhook (_json, _html): logResult (stdout, stderr) # TO-DO should be changed to mylog except subprocess.CalledProcessError as e: # An error occured, handle it - mylog('none', [e.output]) + mylog('none', ['[send_webhook]', e.output]) #------------------------------------------------------------------------------- def send_apprise (html, text): @@ -622,7 +620,7 @@ def skip_repeated_notifications (db): # Skip repeated notifications # due strfime : Overflow --> use "strftime / 60" - print_log ('Skip Repeated') + mylog('verbose','[Skip Repeated Notifications] Skip Repeated') db.sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0 WHERE eve_PendingAlertEmail = 1 AND eve_MAC IN ( @@ -634,7 +632,7 @@ def skip_repeated_notifications (db): (strftime('%s','now','localtime')/60 ) ) """ ) - print_log ('Skip Repeated end') + mylog('verbose','[Skip Repeated Notifications] Skip Repeated end') db.commitDB() diff --git a/pialert/scheduler.py b/pialert/scheduler.py index b0cbc2ca..2350ded0 100644 --- a/pialert/scheduler.py +++ b/pialert/scheduler.py @@ -1,7 +1,7 @@ """ class to manage schedules """ import datetime -from logger import print_log +from logger import mylog, print_log import conf #------------------------------------------------------------------------------- @@ -28,11 +28,11 @@ class schedule_class: # (maybe the following check is unnecessary:) # if the last run is past the last time we run a scheduled Pholus scan if nowTime > self.last_next_schedule and self.last_run < self.last_next_schedule: - print_log(f'Scheduler run for {self.service}: YES') + mylog('debug',f'[Scheduler] - Scheduler run for {self.service}: YES') self.was_last_schedule_used = True result = True else: - print_log(f'Scheduler run for {self.service}: NO') + mylog('debug',f'[Scheduler] - Scheduler run for {self.service}: NO') if self.was_last_schedule_used: self.was_last_schedule_used = False From a71f16ee37a3fb6b68858974e55899ce370c4779 Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Sat, 27 May 2023 22:38:12 +1000 Subject: [PATCH 14/20] changed to __main__.py and scanners folder --- Dockerfile | 8 +- dockerfiles/start.sh | 22 +- pialert/{pialert.py => __main__.py} | 65 ++++-- pialert/api.py | 16 +- pialert/database.py | 325 ++++----------------------- pialert/device.py | 32 +-- pialert/helper.py | 8 +- pialert/initialise.py | 8 +- pialert/networkscan.py | 19 +- pialert/plugin.py | 4 +- pialert/reporting.py | 48 ++-- pialert/{ => scanners}/arpscan.py | 6 +- pialert/{ => scanners}/internet.py | 36 +-- pialert/{ => scanners}/nmapscan.py | 0 pialert/{ => scanners}/pholusscan.py | 18 +- pialert/{ => scanners}/pihole.py | 45 +++- 16 files changed, 230 insertions(+), 430 deletions(-) rename pialert/{pialert.py => __main__.py} (85%) rename pialert/{ => scanners}/arpscan.py (84%) rename pialert/{ => scanners}/internet.py (81%) rename pialert/{ => scanners}/nmapscan.py (100%) rename pialert/{ => scanners}/pholusscan.py (90%) rename pialert/{ => scanners}/pihole.py (61%) diff --git a/Dockerfile b/Dockerfile index 43259427..0004f2ec 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,8 @@ FROM debian:bullseye-slim # default UID and GID -ENV USER=pi USER_ID=1000 USER_GID=1000 TZ=Europe/London PORT=20211 +ENV USER=pi USER_ID=1000 USER_GID=1000 PORT=20211 +#TZ=Europe/London # Todo, figure out why using a workdir instead of full paths don't work # Todo, do we still need all these packages? I can already see sudo which isn't needed @@ -46,3 +47,8 @@ RUN rm /etc/nginx/sites-available/default \ ENTRYPOINT ["tini", "--"] CMD ["/home/pi/pialert/dockerfiles/start.sh"] + + + + +## command to build docker: DOCKER_BUILDKIT=1 docker build . --iidfile dockerID \ No newline at end of file diff --git a/dockerfiles/start.sh b/dockerfiles/start.sh index 4ff77756..72c09e9b 100755 --- a/dockerfiles/start.sh +++ b/dockerfiles/start.sh @@ -1,15 +1,15 @@ #!/bin/sh /home/pi/pialert/dockerfiles/user-mapping.sh -# if custom variables not set we do not need to do anything -if [ -n "${TZ}" ]; then - FILECONF=/home/pi/pialert/config/pialert.conf - if [ -f "$FILECONF" ]; then - sed -ie "s|Europe/Berlin|${TZ}|g" /home/pi/pialert/config/pialert.conf - else - sed -ie "s|Europe/Berlin|${TZ}|g" /home/pi/pialert/back/pialert.conf_bak - fi -fi +# # if custom variables not set we do not need to do anything +# if [ -n "${TZ}" ]; then +# FILECONF=/home/pi/pialert/config/pialert.conf +# if [ -f "$FILECONF" ]; then +# sed -ie "s|Europe/Berlin|${TZ}|g" /home/pi/pialert/config/pialert.conf +# else +# sed -ie "s|Europe/Berlin|${TZ}|g" /home/pi/pialert/back/pialert.conf_bak +# fi +# fi if [ -n "${PORT}" ]; then sed -ie 's/listen 20211/listen '${PORT}'/g' /etc/nginx/sites-available/default @@ -29,5 +29,5 @@ chmod -R a+rw /home/pi/pialert/config # cron -f #python /home/pi/pialert/back/pialert.py -echo "DATA MONKEY VERSION ..." -python /home/pi/pialert/pialert/pialert.py +echo "[DEBUG] DATA MONKEY VERSION ..." +python /home/pi/pialert/pialert/ diff --git a/pialert/pialert.py b/pialert/__main__.py similarity index 85% rename from pialert/pialert.py rename to pialert/__main__.py index 9afaea84..7780d114 100755 --- a/pialert/pialert.py +++ b/pialert/__main__.py @@ -30,29 +30,53 @@ from api import update_api from networkscan import scan_network from initialise import importConfigs from mac_vendor import update_devices_MAC_vendors -from database import DB, get_all_devices, upgradeDB, sql_new_devices +from database import DB, get_all_devices, sql_new_devices from reporting import check_and_run_event, send_notifications from plugin import run_plugin_scripts # different scanners -from pholusscan import performPholusScan -from nmapscan import performNmapScan -from internet import check_internet_IP - +from scanners.pholusscan import performPholusScan +from scanners.nmapscan import performNmapScan +from scanners.internet import check_internet_IP # Global variables changedPorts_json_struc = None - - #=============================================================================== #=============================================================================== # MAIN #=============================================================================== #=============================================================================== +""" +main structure of Pi Alert + + Initialise All + start Loop forever + initialise loop + (re)import config + (re)import plugin config + run plugins (once) + run frontend events + update API + run scans + run plugins (scheduled) + check internet IP + check vendor + run PHOLUS + run NMAP + run "scan_network()" + ARP Scan + PiHole copy db + PiHole DHCP leases + processing scan results + run plugins (after Scan) + reporting + cleanup + end loop +""" def main (): - + conf.time_started = datetime.datetime.now() conf.cycle = "" conf.check_report = [1, "internet_IP", "update_vendors_silent"] @@ -102,7 +126,7 @@ def main (): # update time started time_started = datetime.datetime.now() # not sure why we need this ... loop_start_time = timeNow() - mylog('debug', ['[ +++++++ ', timeNow(), '] [MAIN] Stating loop']) + mylog('debug', '[MAIN] Stating loop') # re-load user configuration and plugins importConfigs(db) @@ -153,8 +177,8 @@ def main (): if last_update_vendors + datetime.timedelta(days = 7) < time_started: last_update_vendors = time_started conf.cycle = 'update_vendors' - mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle]) - update_devices_MAC_vendors() + mylog('verbose', ['[MAIN] cycle:',conf.cycle]) + update_devices_MAC_vendors(db) # Execute scheduled or one-off Pholus scan if enabled and run conditions fulfilled if conf.PHOLUS_RUN == "schedule" or conf.PHOLUS_RUN == "once": @@ -183,7 +207,7 @@ def main (): run = False # run once after application starts - if conf.NMAP_RUN == "once" and conf.nmapSchedule.last_run == 0: + if conf.NMAP_RUN == "once" and nmapSchedule.last_run == 0: run = True # run if overdue scheduled time @@ -191,14 +215,14 @@ def main (): run = nmapSchedule.runScheduleCheck() if run: - conf.nmapSchedule.last_run = timeNow() + nmapSchedule.last_run = timeNow() performNmapScan(db, get_all_devices(db)) # Perform a network scan via arp-scan or pihole if last_network_scan + datetime.timedelta(minutes=conf.SCAN_CYCLE_MINUTES) < time_started: last_network_scan = time_started conf.cycle = 1 # network scan - mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle]) + mylog('verbose', ['[MAIN] cycle:',conf.cycle]) updateState(db,"Scan: Network") # scan_network() @@ -214,8 +238,7 @@ def main (): # If thread is still active if p.is_alive(): - print("DEBUG scan_network running too long - let\'s kill it") - mylog('info', [' DEBUG scan_network running too long - let\'s kill it']) + mylog('none', "[MAIN] scan_network running too long - let\'s kill it") # Terminate - may not work if process is stuck for good p.terminate() @@ -254,7 +277,7 @@ def main (): if last_cleanup + datetime.timedelta(hours = 24) < time_started: last_cleanup = time_started conf.cycle = 'cleanup' - mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle]) + mylog('verbose', ['[MAIN] cycle:',conf.cycle]) db.cleanup_database(startTime, conf.DAYS_TO_KEEP_EVENTS, conf.PHOLUS_DAYS_DATA) # Commit SQL @@ -265,17 +288,17 @@ def main (): action = str(conf.cycle) if action == "1": action = "network_scan" - mylog('verbose', ['[', timeNow(), '] Last action: ', action]) + mylog('verbose', ['[MAIN] Last action: ', action]) conf.cycle = "" - mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle]) + mylog('verbose', ['[MAIN] cycle:',conf.cycle]) # Footer updateState(db,"Process: Wait") - mylog('verbose', ['[', timeNow(), '] Process: Wait']) + mylog('verbose', ['[MAIN] Process: Wait']) else: # do something conf.cycle = "" - mylog('verbose', ['[', timeNow(), '] [MAIN] waiting to start next loop']) + mylog('verbose', ['[MAIN] waiting to start next loop']) #loop time.sleep(5) # wait for N seconds diff --git a/pialert/api.py b/pialert/api.py index bb87ffa4..2dd2e966 100644 --- a/pialert/api.py +++ b/pialert/api.py @@ -2,11 +2,11 @@ import json # pialert modules +import conf from const import pialertPath from logger import mylog from files import write_file from database import * -from conf import ENABLE_PLUGINS, API_CUSTOM_SQL apiEndpoints = [] @@ -14,7 +14,7 @@ apiEndpoints = [] # API #=============================================================================== def update_api(isNotification = False, updateOnlyDataSources = []): - mylog('verbose', [' [API] Update API not ding anything for now !']) + mylog('verbose', [' [API] Update API not doing anything for now !']) return folder = pialertPath + '/front/api/' @@ -28,7 +28,7 @@ def update_api(isNotification = False, updateOnlyDataSources = []): write_file(folder + 'notification_json_final.json' , json.dumps(json_final)) # Save plugins - if ENABLE_PLUGINS: + if conf.ENABLE_PLUGINS: write_file(folder + 'plugins.json' , json.dumps({"data" : plugins})) # prepare database tables we want to expose @@ -42,7 +42,7 @@ def update_api(isNotification = False, updateOnlyDataSources = []): ["plugins_history", sql_plugins_history], ["plugins_objects", sql_plugins_objects], ["language_strings", sql_language_strings], - ["custom_endpoint", API_CUSTOM_SQL], + ["custom_endpoint", conf.API_CUSTOM_SQL], ] # Save selected database tables @@ -57,12 +57,12 @@ def update_api(isNotification = False, updateOnlyDataSources = []): class api_endpoint_class: - def __init__(self, sql, path): + def __init__(self, db, path): global apiEndpoints - - self.sql = sql - self.jsonData = get_table_as_json(sql).json + self.db = db + self.sql = db.sql + self.jsonData = db.get_table_as_json( self.sql).json self.path = path self.fileName = path.split('/')[-1] self.hash = hash(json.dumps(self.jsonData)) diff --git a/pialert/database.py b/pialert/database.py index 06857214..71b5d64c 100644 --- a/pialert/database.py +++ b/pialert/database.py @@ -48,7 +48,7 @@ class DB(): mylog('debug','openDB: databse already open') return - mylog('none', 'Opening DB' ) + mylog('none', '[Database] Opening DB' ) # Open DB and Cursor self.sql_connection = sqlite3.connect (fullDbPath, isolation_level=None) self.sql_connection.execute('pragma journal_mode=wal') # @@ -60,12 +60,11 @@ class DB(): def commitDB (self): if self.sql_connection == None : mylog('debug','commitDB: databse is not open') - return - - # mylog('debug','commitDB: comiting DB changes') + return False # Commit changes to DB self.sql_connection.commit() + return True #------------------------------------------------------------------------------- def get_sql_array(self, query): @@ -75,7 +74,7 @@ class DB(): self.sql.execute(query) rows = self.sql.fetchall() - self.commitDB() + #self.commitDB() # convert result into list of lists arr = [] @@ -386,293 +385,48 @@ class DB(): self.commitDB() -#------------------------------------------------------------------------------- -def get_table_as_json(db, sqlQuery): + #------------------------------------------------------------------------------- + def get_table_as_json(self, sqlQuery): - db.sql.execute(sqlQuery) + self.sql.execute(sqlQuery) - columnNames = list(map(lambda x: x[0], db.sql.description)) + columnNames = list(map(lambda x: x[0], self.sql.description)) - rows = db.sql.fetchall() + rows = self.sql.fetchall() - result = {"data":[]} + result = {"data":[]} - for row in rows: - tmp = row_to_json(columnNames, row) - result["data"].append(tmp) - return json_struc(result, columnNames) + for row in rows: + tmp = row_to_json(columnNames, row) + result["data"].append(tmp) + return json_struc(result, columnNames) - - - - - - -#------------------------------------------------------------------------------- -def upgradeDB(db: DB()): - sql = db.sql #TO-DO - - # indicates, if Online_History table is available - onlineHistoryAvailable = db.sql.execute(""" - SELECT name FROM sqlite_master WHERE type='table' - AND name='Online_History'; - """).fetchall() != [] - - # Check if it is incompatible (Check if table has all required columns) - isIncompatible = False - - if onlineHistoryAvailable : - isIncompatible = sql.execute (""" - SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Online_History') WHERE name='Archived_Devices' - """).fetchone()[0] == 0 - - # Drop table if available, but incompatible - if onlineHistoryAvailable and isIncompatible: - mylog('none','[upgradeDB] Table is incompatible, Dropping the Online_History table') - sql.execute("DROP TABLE Online_History;") - onlineHistoryAvailable = False - - if onlineHistoryAvailable == False : - sql.execute(""" - CREATE TABLE "Online_History" ( - "Index" INTEGER, - "Scan_Date" TEXT, - "Online_Devices" INTEGER, - "Down_Devices" INTEGER, - "All_Devices" INTEGER, - "Archived_Devices" INTEGER, - PRIMARY KEY("Index" AUTOINCREMENT) - ); - """) - - # Alter Devices table - # dev_Network_Node_MAC_ADDR column - dev_Network_Node_MAC_ADDR_missing = sql.execute (""" - SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_MAC_ADDR' - """).fetchone()[0] == 0 - - if dev_Network_Node_MAC_ADDR_missing : - mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_MAC_ADDR to the Devices table"]) - sql.execute(""" - ALTER TABLE "Devices" ADD "dev_Network_Node_MAC_ADDR" TEXT - """) - - # dev_Network_Node_port column - dev_Network_Node_port_missing = sql.execute (""" - SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_port' - """).fetchone()[0] == 0 - - if dev_Network_Node_port_missing : - mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_port to the Devices table"]) - sql.execute(""" - ALTER TABLE "Devices" ADD "dev_Network_Node_port" INTEGER - """) - - # dev_Icon column - dev_Icon_missing = sql.execute (""" - SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Icon' - """).fetchone()[0] == 0 - - if dev_Icon_missing : - mylog('verbose', ["[upgradeDB] Adding dev_Icon to the Devices table"]) - sql.execute(""" - ALTER TABLE "Devices" ADD "dev_Icon" TEXT - """) - - # indicates, if Settings table is available - settingsMissing = sql.execute(""" - SELECT name FROM sqlite_master WHERE type='table' - AND name='Settings'; - """).fetchone() == None - - # Re-creating Settings table - mylog('verbose', ["[upgradeDB] Re-creating Settings table"]) - - if settingsMissing == False: - sql.execute("DROP TABLE Settings;") - - sql.execute(""" - CREATE TABLE "Settings" ( - "Code_Name" TEXT, - "Display_Name" TEXT, - "Description" TEXT, - "Type" TEXT, - "Options" TEXT, - "RegEx" TEXT, - "Value" TEXT, - "Group" TEXT, - "Events" TEXT - ); - """) - - # indicates, if Pholus_Scan table is available - pholusScanMissing = sql.execute(""" - SELECT name FROM sqlite_master WHERE type='table' - AND name='Pholus_Scan'; - """).fetchone() == None - - # if pholusScanMissing == False: - # # Re-creating Pholus_Scan table - # sql.execute("DROP TABLE Pholus_Scan;") - # pholusScanMissing = True - - if pholusScanMissing: - mylog('verbose', ["[upgradeDB] Re-creating Pholus_Scan table"]) - sql.execute(""" - CREATE TABLE "Pholus_Scan" ( - "Index" INTEGER, - "Info" TEXT, - "Time" TEXT, - "MAC" TEXT, - "IP_v4_or_v6" TEXT, - "Record_Type" TEXT, - "Value" TEXT, - "Extra" TEXT, - PRIMARY KEY("Index" AUTOINCREMENT) - ); - """) - - # indicates, if Nmap_Scan table is available - nmapScanMissing = sql.execute(""" - SELECT name FROM sqlite_master WHERE type='table' - AND name='Nmap_Scan'; - """).fetchone() == None - - # Re-creating Parameters table - mylog('verbose', ["[upgradeDB] Re-creating Parameters table"]) - sql.execute("DROP TABLE Parameters;") - - sql.execute(""" - CREATE TABLE "Parameters" ( - "par_ID" TEXT PRIMARY KEY, - "par_Value" TEXT - ); - """) - - # Initialize Parameters if unavailable - initOrSetParam(db, 'Back_App_State','Initializing') - - # if nmapScanMissing == False: - # # Re-creating Nmap_Scan table - # sql.execute("DROP TABLE Nmap_Scan;") - # nmapScanMissing = True - - if nmapScanMissing: - mylog('verbose', ["[upgradeDB] Re-creating Nmap_Scan table"]) - sql.execute(""" - CREATE TABLE "Nmap_Scan" ( - "Index" INTEGER, - "MAC" TEXT, - "Port" TEXT, - "Time" TEXT, - "State" TEXT, - "Service" TEXT, - "Extra" TEXT, - PRIMARY KEY("Index" AUTOINCREMENT) - ); - """) - - # Plugin state - sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects( - "Index" INTEGER, - Plugin TEXT NOT NULL, - Object_PrimaryID TEXT NOT NULL, - Object_SecondaryID TEXT NOT NULL, - DateTimeCreated TEXT NOT NULL, - DateTimeChanged TEXT NOT NULL, - Watched_Value1 TEXT NOT NULL, - Watched_Value2 TEXT NOT NULL, - Watched_Value3 TEXT NOT NULL, - Watched_Value4 TEXT NOT NULL, - Status TEXT NOT NULL, - Extra TEXT NOT NULL, - UserData TEXT NOT NULL, - ForeignKey TEXT NOT NULL, - PRIMARY KEY("Index" AUTOINCREMENT) - ); """ - sql.execute(sql_Plugins_Objects) - - # Plugin execution results - sql_Plugins_Events = """ CREATE TABLE IF NOT EXISTS Plugins_Events( - "Index" INTEGER, - Plugin TEXT NOT NULL, - Object_PrimaryID TEXT NOT NULL, - Object_SecondaryID TEXT NOT NULL, - DateTimeCreated TEXT NOT NULL, - DateTimeChanged TEXT NOT NULL, - Watched_Value1 TEXT NOT NULL, - Watched_Value2 TEXT NOT NULL, - Watched_Value3 TEXT NOT NULL, - Watched_Value4 TEXT NOT NULL, - Status TEXT NOT NULL, - Extra TEXT NOT NULL, - UserData TEXT NOT NULL, - ForeignKey TEXT NOT NULL, - PRIMARY KEY("Index" AUTOINCREMENT) - ); """ - sql.execute(sql_Plugins_Events) - - # Plugin execution history - sql_Plugins_History = """ CREATE TABLE IF NOT EXISTS Plugins_History( - "Index" INTEGER, - Plugin TEXT NOT NULL, - Object_PrimaryID TEXT NOT NULL, - Object_SecondaryID TEXT NOT NULL, - DateTimeCreated TEXT NOT NULL, - DateTimeChanged TEXT NOT NULL, - Watched_Value1 TEXT NOT NULL, - Watched_Value2 TEXT NOT NULL, - Watched_Value3 TEXT NOT NULL, - Watched_Value4 TEXT NOT NULL, - Status TEXT NOT NULL, - Extra TEXT NOT NULL, - UserData TEXT NOT NULL, - ForeignKey TEXT NOT NULL, - PRIMARY KEY("Index" AUTOINCREMENT) - ); """ - sql.execute(sql_Plugins_History) - - # Dynamically generated language strings - # indicates, if Language_Strings table is available - languageStringsMissing = sql.execute(""" - SELECT name FROM sqlite_master WHERE type='table' - AND name='Plugins_Language_Strings'; - """).fetchone() == None - - if languageStringsMissing == False: - sql.execute("DROP TABLE Plugins_Language_Strings;") - - sql.execute(""" CREATE TABLE IF NOT EXISTS Plugins_Language_Strings( - "Index" INTEGER, - Language_Code TEXT NOT NULL, - String_Key TEXT NOT NULL, - String_Value TEXT NOT NULL, - Extra TEXT NOT NULL, - PRIMARY KEY("Index" AUTOINCREMENT) - ); """) - - db.commitDB() + #------------------------------------------------------------------------------- + # referece from here: https://codereview.stackexchange.com/questions/241043/interface-class-for-sqlite-databases + #------------------------------------------------------------------------------- + def read(self, query, *args): + """check the query and arguments are aligned and are read only""" + mylog('debug',[ '[Database] - SELECT Query: ', query, " params: ", args]) + try: + assert query.count('?') == len(args) + assert query.upper().strip().startswith('SELECT') + self.sql.execute(query, args) + rows = self.sql.fetchall() + return rows + except AssertionError: + mylog('none',[ '[Database] - ERROR: inconsistent query and/or arguments.', query, " params: ", args]) + except sqlite3.Error as e: + mylog('none',[ '[Database] - SQL ERROR: ', e]) + return None #------------------------------------------------------------------------------- def get_device_stats(db): - sql = db.sql #TO-DO # columns = ["online","down","all","archived","new","unknown"] - sql.execute(sql_devices_stats) - - row = sql.fetchone() - db.commitDB() - - return row + return db.read(sql_devices_stats) #------------------------------------------------------------------------------- def get_all_devices(db): - sql = db.sql #TO-DO - sql.execute(sql_devices_all) - - row = sql.fetchall() - - db.commitDB() - return row + return db.read(sql_devices_all) #------------------------------------------------------------------------------- @@ -681,20 +435,17 @@ def insertOnlineHistory(db, cycle): sql = db.sql #TO-DO startTime = timeNow() # Add to History - sql.execute("SELECT * FROM Devices") - History_All = sql.fetchall() + + History_All = db.read("SELECT * FROM Devices") History_All_Devices = len(History_All) - sql.execute("SELECT * FROM Devices WHERE dev_Archived = 1") - History_Archived = sql.fetchall() + History_Archived = db.read("SELECT * FROM Devices WHERE dev_Archived = 1") History_Archived_Devices = len(History_Archived) - sql.execute("""SELECT * FROM CurrentScan WHERE cur_ScanCycle = ? """, (cycle,)) - History_Online = sql.fetchall() + History_Online = db.read("SELECT * FROM CurrentScan WHERE cur_ScanCycle = ? ", cycle) History_Online_Devices = len(History_Online) History_Offline_Devices = History_All_Devices - History_Archived_Devices - History_Online_Devices sql.execute ("INSERT INTO Online_History (Scan_Date, Online_Devices, Down_Devices, All_Devices, Archived_Devices) "+ "VALUES ( ?, ?, ?, ?, ?)", (startTime, History_Online_Devices, History_Offline_Devices, History_All_Devices, History_Archived_Devices ) ) - db.commitDB() - + db.commitDB() \ No newline at end of file diff --git a/pialert/device.py b/pialert/device.py index e22ab380..a29d9ffc 100644 --- a/pialert/device.py +++ b/pialert/device.py @@ -6,10 +6,10 @@ import subprocess import conf from helper import timeNow -from internet import check_IP_format, get_internet_IP +from scanners.internet import check_IP_format, get_internet_IP from logger import mylog, print_log from mac_vendor import query_MAC_vendor -from pholusscan import performPholusScan, resolve_device_name_dig, resolve_device_name_pholus +from scanners.pholusscan import performPholusScan, resolve_device_name_dig, resolve_device_name_pholus #------------------------------------------------------------------------------- @@ -64,7 +64,7 @@ def save_scanned_devices (db, p_arpscan_devices, p_cycle_interval): local_ip_cmd = ["ip -o route get 1 | sed 's/^.*src \\([^ ]*\\).*$/\\1/;q'"] local_ip = subprocess.Popen (local_ip_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip() - mylog('debug', [' Saving this IP into the CurrentScan table:', local_ip]) + mylog('debug', ['[Save Devices] Saving this IP into the CurrentScan table:', local_ip]) if check_IP_format(local_ip) == '': local_ip = '0.0.0.0' @@ -82,19 +82,19 @@ def print_scan_stats (db): sql.execute ("""SELECT COUNT(*) FROM CurrentScan WHERE cur_ScanCycle = ? """, (conf.cycle,)) - mylog('verbose', [' Devices Detected.......: ', str (sql.fetchone()[0]) ]) + mylog('verbose', ['[Scan Stats] Devices Detected.......: ', str (sql.fetchone()[0]) ]) # Devices arp-scan sql.execute ("""SELECT COUNT(*) FROM CurrentScan WHERE cur_ScanMethod='arp-scan' AND cur_ScanCycle = ? """, (conf.cycle,)) - mylog('verbose', [' arp-scan detected..: ', str (sql.fetchone()[0]) ]) + mylog('verbose', ['[Scan Stats] arp-scan detected..: ', str (sql.fetchone()[0]) ]) # Devices Pi-hole sql.execute ("""SELECT COUNT(*) FROM CurrentScan WHERE cur_ScanMethod='PiHole' AND cur_ScanCycle = ? """, (conf.cycle,)) - mylog('verbose', [' Pi-hole detected...: +' + str (sql.fetchone()[0]) ]) + mylog('verbose', ['[Scan Stats] Pi-hole detected...: +' + str (sql.fetchone()[0]) ]) # New Devices sql.execute ("""SELECT COUNT(*) FROM CurrentScan @@ -102,7 +102,7 @@ def print_scan_stats (db): AND NOT EXISTS (SELECT 1 FROM Devices WHERE dev_MAC = cur_MAC) """, (conf.cycle,)) - mylog('verbose', [' New Devices........: ' + str (sql.fetchone()[0]) ]) + mylog('verbose', ['[Scan Stats] New Devices........: ' + str (sql.fetchone()[0]) ]) # Devices in this ScanCycle sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan @@ -110,7 +110,7 @@ def print_scan_stats (db): AND dev_ScanCycle = ? """, (conf.cycle,)) - mylog('verbose', [' Devices in this cycle..: ' + str (sql.fetchone()[0]) ]) + mylog('verbose', ['[Scan Stats] Devices in this cycle..: ' + str (sql.fetchone()[0]) ]) # Down Alerts sql.execute ("""SELECT COUNT(*) FROM Devices @@ -120,7 +120,7 @@ def print_scan_stats (db): WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle) """, (conf.cycle,)) - mylog('verbose', [' Down Alerts........: ' + str (sql.fetchone()[0]) ]) + mylog('verbose', ['[Scan Stats] Down Alerts........: ' + str (sql.fetchone()[0]) ]) # New Down Alerts sql.execute ("""SELECT COUNT(*) FROM Devices @@ -131,7 +131,7 @@ def print_scan_stats (db): WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle) """, (conf.cycle,)) - mylog('verbose', [' New Down Alerts....: ' + str (sql.fetchone()[0]) ]) + mylog('verbose', ['[Scan Stats] New Down Alerts....: ' + str (sql.fetchone()[0]) ]) # New Connections sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan @@ -139,7 +139,7 @@ def print_scan_stats (db): AND dev_PresentLastScan = 0 AND dev_ScanCycle = ? """, (conf.cycle,)) - mylog('verbose', [' New Connections....: ' + str ( sql.fetchone()[0]) ]) + mylog('verbose', ['[Scan Stats] New Connections....: ' + str ( sql.fetchone()[0]) ]) # Disconnections sql.execute ("""SELECT COUNT(*) FROM Devices @@ -149,7 +149,7 @@ def print_scan_stats (db): WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle) """, (conf.cycle,)) - mylog('verbose', [' Disconnections.....: ' + str ( sql.fetchone()[0]) ]) + mylog('verbose', ['[Scan Stats] Disconnections.....: ' + str ( sql.fetchone()[0]) ]) # IP Changes sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan @@ -157,7 +157,7 @@ def print_scan_stats (db): AND dev_ScanCycle = ? AND dev_LastIP <> cur_IP """, (conf.cycle,)) - mylog('verbose', [' IP Changes.........: ' + str ( sql.fetchone()[0]) ]) + mylog('verbose', ['[Scan Stats] IP Changes.........: ' + str ( sql.fetchone()[0]) ]) @@ -395,7 +395,7 @@ def update_devices_names (db): db.commitDB() # Number of entries from previous Pholus scans - mylog('verbose', '[Update Device Name] Pholus entries from prev scans: ', len(pholusResults)) + mylog('verbose', ['[Update Device Name] Pholus entries from prev scans: ', len(pholusResults)]) for device in unknownDevices: newName = -1 @@ -422,8 +422,8 @@ def update_devices_names (db): recordsToUpdate.append ([newName, device['dev_MAC']]) # Print log - mylog('verbose', '[Update Device Name] Names Found (DiG/Pholus): ', len(recordsToUpdate), " (",foundDig,"/",foundPholus ,")" ) - mylog('verbose', '[Update Device Name] Names Not Found : ', len(recordsNotFound) ) + mylog('verbose', ['[Update Device Name] Names Found (DiG/Pholus): ', len(recordsToUpdate), " (",foundDig,"/",foundPholus ,")"] ) + mylog('verbose', ['[Update Device Name] Names Not Found : ', len(recordsNotFound)] ) # update not found devices with (name not found) sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsNotFound ) diff --git a/pialert/helper.py b/pialert/helper.py index 85906df7..d33c5c97 100644 --- a/pialert/helper.py +++ b/pialert/helper.py @@ -32,14 +32,12 @@ def updateState(db, newState): #sql = db.sql - mylog('debug', ' [updateState] changing state to: "' + newState +'"') + mylog('debug', '[updateState] changing state to: "' + newState +'"') db.sql.execute ("UPDATE Parameters SET par_Value='"+ newState +"' WHERE par_ID='Back_App_State'") db.commitDB() #------------------------------------------------------------------------------- def updateSubnets(scan_subnets): - - # remove old list subnets = [] # multiple interfaces @@ -64,8 +62,8 @@ def checkPermissionsOK(): dbR_access = (os.access(fullDbPath, os.R_OK)) dbW_access = (os.access(fullDbPath, os.W_OK)) - - mylog('none', ['\n Permissions check (All should be True)']) + mylog('none', ['\n']) + mylog('none', ['Permissions check (All should be True)']) mylog('none', ['------------------------------------------------']) mylog('none', [ " " , confPath , " | " , " READ | " , confR_access]) mylog('none', [ " " , confPath , " | " , " WRITE | " , confW_access]) diff --git a/pialert/initialise.py b/pialert/initialise.py index d7417cef..9e858d35 100644 --- a/pialert/initialise.py +++ b/pialert/initialise.py @@ -175,13 +175,12 @@ def importConfigs (db): if conf.ENABLE_PLUGINS: conf.plugins = get_plugins_configs() - mylog('none', ['[', timeNow(), '] Plugins: Number of dynamically loaded plugins: ', len(conf.plugins)]) + mylog('none', ['[Config] Plugins: Number of dynamically loaded plugins: ', len(conf.plugins)]) # handle plugins for plugin in conf.plugins: + pref = plugin["unique_prefix"] print_plugin_info(plugin, ['display_name','description']) - - pref = plugin["unique_prefix"] # if plugin["enabled"] == 'true': @@ -232,5 +231,4 @@ def importConfigs (db): # update_api(False, ["settings"]) # TO DO this creates a circular reference between API and HELPER ! - mylog('info', ['[', timeNow(), '] Config: Imported new config']) - + mylog('info', '[Config] Imported new config') diff --git a/pialert/networkscan.py b/pialert/networkscan.py index ac4dfbac..423f88b7 100644 --- a/pialert/networkscan.py +++ b/pialert/networkscan.py @@ -1,12 +1,12 @@ import conf -from arpscan import execute_arpscan +from scanners.arpscan import execute_arpscan +from scanners.pihole import copy_pihole_network, read_DHCP_leases from database import insertOnlineHistory from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, update_devices_names from helper import timeNow -from logger import mylog, print_log -from pihole import copy_pihole_network, read_DHCP_leases +from logger import mylog from reporting import skip_repeated_notifications @@ -18,19 +18,20 @@ from reporting import skip_repeated_notifications def scan_network (db): sql = db.sql #TO-DO - reporting = False + # Header # moved updateState to main loop # updateState(db,"Scan: Network") - mylog('verbose', ['[', timeNow(), '] Scan Devices:' ]) + mylog('verbose', ['[Network Scan] Scan Devices:' ]) # Query ScanCycle properties scanCycle_data = query_ScanCycle_Data (db, True) if scanCycle_data is None: - mylog('none', ['\n*************** ERROR ***************']) - mylog('none', ['ScanCycle %s not found' % conf.cycle ]) - mylog('none', [' Exiting...\n']) + mylog('none', ['\n']) + mylog('none', ['[Network Scan]*************** ERROR ***************']) + mylog('none', ['[Network Scan] ScanCycle %s not found' % conf.cycle ]) + mylog('none', ['[Network Scan] Exiting...\n']) return False db.commitDB() @@ -57,6 +58,7 @@ def scan_network (db): read_DHCP_leases (db) db.commitDB() + # Load current scan data mylog('verbose','[Network Scan] Processing scan results') save_scanned_devices (db, arpscan_devices, cycle_interval) @@ -111,7 +113,6 @@ def scan_network (db): # if ENABLE_PLUGINS: # run_plugin_scripts(db,'always_after_scan') - return reporting #------------------------------------------------------------------------------- def query_ScanCycle_Data (db, pOpenCloseDB = False, cycle = 1): diff --git a/pialert/plugin.py b/pialert/plugin.py index ec0904aa..647464ae 100644 --- a/pialert/plugin.py +++ b/pialert/plugin.py @@ -21,7 +21,7 @@ def timeNow(): #------------------------------------------------------------------------------- -def run_plugin_scripts(db, runType): +def run_plugin_scripts(db, runType, plugins = conf.plugins): # global plugins, tz, mySchedules @@ -30,7 +30,7 @@ def run_plugin_scripts(db, runType): mylog('debug', [' [Plugins] Check if any plugins need to be executed on run type: ', runType]) - for plugin in conf.plugins: + for plugin in plugins: shouldRun = False diff --git a/pialert/reporting.py b/pialert/reporting.py index 8ee72af2..d12ec74c 100644 --- a/pialert/reporting.py +++ b/pialert/reporting.py @@ -13,7 +13,7 @@ from json2table import convert # pialert modules import conf from const import pialertPath, logPath -from database import get_table_as_json +#from database import get_table_as_json from files import get_file_content, write_file from helper import generate_mac_links, isNewVersion, removeDuplicateNewLines, timeNow, hide_email, json_struc, updateState from logger import logResult, mylog, print_log @@ -52,7 +52,7 @@ def construct_notifications(db, sqlQuery, tableTitle, skipText = False, supplied text_line = '{}\t{}\n' if suppliedJsonStruct is None: - json_struc = get_table_as_json(db, sqlQuery) + json_struc = db.get_table_as_json(sqlQuery) else: json_struc = suppliedJsonStruct @@ -96,7 +96,7 @@ def send_notifications (db): plugins_report = False # Reporting section - mylog('verbose', [' Check if something to report']) + mylog('verbose', ['[Notification] Check if something to report']) # prepare variables for JSON construction json_internet = [] @@ -254,47 +254,47 @@ def send_notifications (db): # update_api(True) # TO-DO - mylog('none', [' Changes detected, sending reports']) + mylog('none', ['[Notification] Changes detected, sending reports']) if conf.REPORT_MAIL and check_config('email'): updateState(db,"Send: Email") - mylog('info', [' Sending report by Email']) + mylog('info', ['[Notification] Sending report by Email']) send_email (mail_text, mail_html) else : - mylog('verbose', [' Skip email']) + mylog('verbose', ['[Notification] Skip email']) if conf.REPORT_APPRISE and check_config('apprise'): updateState(db,"Send: Apprise") - mylog('info', [' Sending report by Apprise']) + mylog('info', ['[Notification] Sending report by Apprise']) send_apprise (mail_html, mail_text) else : - mylog('verbose', [' Skip Apprise']) + mylog('verbose', ['[Notification] Skip Apprise']) if conf.REPORT_WEBHOOK and check_config('webhook'): updateState(db,"Send: Webhook") - mylog('info', [' Sending report by Webhook']) + mylog('info', ['[Notification] Sending report by Webhook']) send_webhook (json_final, mail_text) else : - mylog('verbose', [' Skip webhook']) + mylog('verbose', ['[Notification] Skip webhook']) if conf.REPORT_NTFY and check_config('ntfy'): updateState(db,"Send: NTFY") - mylog('info', [' Sending report by NTFY']) + mylog('info', ['[Notification] Sending report by NTFY']) send_ntfy (mail_text) else : - mylog('verbose', [' Skip NTFY']) + mylog('verbose', ['[Notification] Skip NTFY']) if conf.REPORT_PUSHSAFER and check_config('pushsafer'): updateState(db,"Send: PUSHSAFER") - mylog('info', [' Sending report by PUSHSAFER']) + mylog('info', ['[Notification] Sending report by PUSHSAFER']) send_pushsafer (mail_text) else : - mylog('verbose', [' Skip PUSHSAFER']) + mylog('verbose', ['[Notification] Skip PUSHSAFER']) # Update MQTT entities if conf.REPORT_MQTT and check_config('mqtt'): updateState(db,"Send: MQTT") - mylog('info', [' Establishing MQTT thread']) + mylog('info', ['[Notification] Establishing MQTT thread']) mqtt_start() else : - mylog('verbose', [' Skip MQTT']) + mylog('verbose', ['[Notification] Skip MQTT']) else : - mylog('verbose', [' No changes to report']) + mylog('verbose', ['[Notification] No changes to report']) # Clean Pending Alert Events sql.execute ("""UPDATE Devices SET dev_LastNotification = ? @@ -310,7 +310,7 @@ def send_notifications (db): changedPorts_json_struc = None # DEBUG - print number of rows updated - mylog('info', ['[', timeNow(), '] Notifications: ', sql.rowcount]) + mylog('info', ['[Notification] Notifications changes: ', sql.rowcount]) # Commit changes db.commitDB() @@ -321,42 +321,42 @@ def check_config(service): if service == 'email': if conf.SMTP_SERVER == '' or conf.REPORT_FROM == '' or conf.REPORT_TO == '': - mylog('none', [' Error: Email service not set up correctly. Check your pialert.conf SMTP_*, REPORT_FROM and REPORT_TO variables.']) + mylog('none', ['[Check Config] Error: Email service not set up correctly. Check your pialert.conf SMTP_*, REPORT_FROM and REPORT_TO variables.']) return False else: return True if service == 'apprise': if conf.APPRISE_URL == '' or conf.APPRISE_HOST == '': - mylog('none', [' Error: Apprise service not set up correctly. Check your pialert.conf APPRISE_* variables.']) + mylog('none', ['[Check Config] Error: Apprise service not set up correctly. Check your pialert.conf APPRISE_* variables.']) return False else: return True if service == 'webhook': if conf.WEBHOOK_URL == '': - mylog('none', [' Error: Webhook service not set up correctly. Check your pialert.conf WEBHOOK_* variables.']) + mylog('none', ['[Check Config] Error: Webhook service not set up correctly. Check your pialert.conf WEBHOOK_* variables.']) return False else: return True if service == 'ntfy': if conf.NTFY_HOST == '' or conf.NTFY_TOPIC == '': - mylog('none', [' Error: NTFY service not set up correctly. Check your pialert.conf NTFY_* variables.']) + mylog('none', ['[Check Config] Error: NTFY service not set up correctly. Check your pialert.conf NTFY_* variables.']) return False else: return True if service == 'pushsafer': if conf.PUSHSAFER_TOKEN == 'ApiKey': - mylog('none', [' Error: Pushsafer service not set up correctly. Check your pialert.conf PUSHSAFER_TOKEN variable.']) + mylog('none', ['[Check Config] Error: Pushsafer service not set up correctly. Check your pialert.conf PUSHSAFER_TOKEN variable.']) return False else: return True if service == 'mqtt': if conf.MQTT_BROKER == '' or conf.MQTT_PORT == '' or conf.MQTT_USER == '' or conf.MQTT_PASSWORD == '': - mylog('none', [' Error: MQTT service not set up correctly. Check your pialert.conf MQTT_* variables.']) + mylog('none', ['[Check Config] Error: MQTT service not set up correctly. Check your pialert.conf MQTT_* variables.']) return False else: return True diff --git a/pialert/arpscan.py b/pialert/scanners/arpscan.py similarity index 84% rename from pialert/arpscan.py rename to pialert/scanners/arpscan.py index 54af2213..e3077a24 100644 --- a/pialert/arpscan.py +++ b/pialert/scanners/arpscan.py @@ -3,7 +3,6 @@ import subprocess from logger import mylog - #------------------------------------------------------------------------------- def execute_arpscan (userSubnets): @@ -34,6 +33,7 @@ def execute_arpscan (userSubnets): unique_devices.append(device) # return list + mylog('debug', ['[ARP Scan] Completed found ', len(unique_devices) ,' devices ' ]) return unique_devices #------------------------------------------------------------------------------- @@ -41,6 +41,7 @@ def execute_arpscan_on_interface (interface): # Prepare command arguments subnets = interface.strip().split() # Retry is 6 to avoid false offline devices + mylog('debug', ['[ARP Scan] - arpscan command: sudo arp-scan --ignoredups --retry=6 ', str(subnets)]) arpscan_args = ['sudo', 'arp-scan', '--ignoredups', '--retry=6'] + subnets # Execute command @@ -49,7 +50,8 @@ def execute_arpscan_on_interface (interface): result = subprocess.check_output (arpscan_args, universal_newlines=True) except subprocess.CalledProcessError as e: # An error occured, handle it - mylog('none', [e.output]) + mylog('none', ['[ARP Scan]', e.output]) result = "" + mylog('debug', ['[ARP Scan] on Interface Completed with results: ', result]) return result diff --git a/pialert/internet.py b/pialert/scanners/internet.py similarity index 81% rename from pialert/internet.py rename to pialert/scanners/internet.py index cd68f881..d309a5c7 100644 --- a/pialert/internet.py +++ b/pialert/scanners/internet.py @@ -23,53 +23,53 @@ def check_internet_IP ( db ): # Header updateState(db,"Scan: Internet IP") - mylog('verbose', ['[', timeNow(), '] Check Internet IP:']) + mylog('verbose', ['[Internet IP] Check Internet IP started']) # Get Internet IP - mylog('verbose', [' Retrieving Internet IP:']) + mylog('verbose', ['[Internet IP] - Retrieving Internet IP']) internet_IP = get_internet_IP(conf.DIG_GET_IP_ARG) # TESTING - Force IP # internet_IP = "1.2.3.4" # Check result = IP if internet_IP == "" : - mylog('none', [' Error retrieving Internet IP']) - mylog('none', [' Exiting...']) + mylog('none', ['[Internet IP] Error retrieving Internet IP']) + mylog('none', ['[Internet IP] Exiting...']) return False - mylog('verbose', [' ', internet_IP]) + mylog('verbose', ['[Internet IP] IP: ', internet_IP]) # Get previous stored IP - mylog('verbose', [' Retrieving previous IP:']) + mylog('verbose', ['[Internet IP] Retrieving previous IP:']) previous_IP = get_previous_internet_IP (db) - mylog('verbose', [' ', previous_IP]) + mylog('verbose', ['[Internet IP] ', previous_IP]) # Check IP Change if internet_IP != previous_IP : - mylog('info', [' New internet IP: ', internet_IP]) + mylog('info', ['[Internet IP] New internet IP: ', internet_IP]) save_new_internet_IP (db, internet_IP) else : - mylog('verbose', [' No changes to perform']) + mylog('verbose', ['[Internet IP] No changes to perform']) # Get Dynamic DNS IP if conf.DDNS_ACTIVE : - mylog('verbose', [' Retrieving Dynamic DNS IP']) + mylog('verbose', ['[DDNS] Retrieving Dynamic DNS IP']) dns_IP = get_dynamic_DNS_IP() # Check Dynamic DNS IP if dns_IP == "" or dns_IP == "0.0.0.0" : - mylog('info', [' Error retrieving Dynamic DNS IP']) - mylog('info', [' ', dns_IP]) + mylog('none', ['[DDNS] Error retrieving Dynamic DNS IP']) + mylog('none', ['[DDNS] ', dns_IP]) # Check DNS Change if dns_IP != internet_IP : - mylog('info', [' Updating Dynamic DNS IP']) + mylog('none', ['[DDNS] Updating Dynamic DNS IP']) message = set_dynamic_DNS_IP () - mylog('info', [' ', message]) + mylog('none', ['[DDNS] ', message]) else : - mylog('verbose', [' No changes to perform']) + mylog('verbose', ['[DDNS] No changes to perform']) else : - mylog('verbose', [' Skipping Dynamic DNS update']) + mylog('verbose', ['[DDNS] Skipping Dynamic DNS update']) @@ -164,7 +164,7 @@ def get_dynamic_DNS_IP (): dig_output = subprocess.check_output (dig_args, universal_newlines=True) except subprocess.CalledProcessError as e: # An error occured, handle it - mylog('none', [e.output]) + mylog('none', ['[DDNS] ERROR - ', e.output]) dig_output = '' # probably no internet # Check result is an IP @@ -189,7 +189,7 @@ def set_dynamic_DNS_IP (): universal_newlines=True) except subprocess.CalledProcessError as e: # An error occured, handle it - mylog('none', [e.output]) + mylog('none', ['[DDNS] ERROR - ',e.output]) curl_output = "" return curl_output diff --git a/pialert/nmapscan.py b/pialert/scanners/nmapscan.py similarity index 100% rename from pialert/nmapscan.py rename to pialert/scanners/nmapscan.py diff --git a/pialert/pholusscan.py b/pialert/scanners/pholusscan.py similarity index 90% rename from pialert/pholusscan.py rename to pialert/scanners/pholusscan.py index 6c6f79ec..fe4c9dfe 100644 --- a/pialert/pholusscan.py +++ b/pialert/scanners/pholusscan.py @@ -15,7 +15,7 @@ def performPholusScan (db, timeoutSec, userSubnets): temp = subnet.split("--interface=") if len(temp) != 2: - mylog('none', [" Skip scan (need subnet in format '192.168.1.0/24 --inteface=eth0'), got: ", subnet]) + mylog('none', ["[PholusScan] Skip scan (need subnet in format '192.168.1.0/24 --inteface=eth0'), got: ", subnet]) return mask = temp[0].strip() @@ -23,8 +23,8 @@ def performPholusScan (db, timeoutSec, userSubnets): # logging & updating app state updateState(db,"Scan: Pholus") - mylog('info', ['[', timeNow(), '] Scan: Pholus for ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min)']) - mylog('verbose', [" Pholus scan on [interface] ", interface, " [mask] " , mask]) + mylog('none', ['[PholusScan] Scan: Pholus for ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min)']) + mylog('verbose', ["[PholusScan] Pholus scan on [interface] ", interface, " [mask] " , mask]) # the scan always lasts 2x as long, so the desired user time from settings needs to be halved adjustedTimeout = str(round(int(timeoutSec) / 2, 0)) @@ -40,15 +40,15 @@ def performPholusScan (db, timeoutSec, userSubnets): output = subprocess.check_output (pholus_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30)) except subprocess.CalledProcessError as e: # An error occured, handle it - mylog('none', [e.output]) - mylog('none', [" Error - Pholus Scan - check logs"]) + mylog('none', [[PholusScan], e.output]) + mylog('none', ["[PholusScan] Error - Pholus Scan - check logs"]) except subprocess.TimeoutExpired as timeErr: - mylog('none', [' Pholus TIMEOUT - the process forcefully terminated as timeout reached']) + mylog('none', ['[PholusScan] Pholus TIMEOUT - the process forcefully terminated as timeout reached']) if output == "": # check if the subprocess failed - mylog('none', ['[', timeNow(), '] Scan: Pholus FAIL - check logs']) + mylog('none', ['[PholusScan] Scan: Pholus FAIL - check logs']) else: - mylog('verbose', ['[', timeNow(), '] Scan: Pholus SUCCESS']) + mylog('verbose', ['[PholusScan] Scan: Pholus SUCCESS']) # check the last run output f = open(logPath + '/pialert_pholus_lastrun.log', 'r+') @@ -176,7 +176,7 @@ def resolve_device_name_dig (pMAC, pIP): newName = subprocess.check_output (dig_args, universal_newlines=True) except subprocess.CalledProcessError as e: # An error occured, handle it - mylog('none', [e.output]) + mylog('none', ['[device_name_dig] ', e.output]) # newName = "Error - check logs" return -1 diff --git a/pialert/pihole.py b/pialert/scanners/pihole.py similarity index 61% rename from pialert/pihole.py rename to pialert/scanners/pihole.py index b85efd28..63ee3ba8 100644 --- a/pialert/pihole.py +++ b/pialert/scanners/pihole.py @@ -1,6 +1,10 @@ """ module to import db and leases from PiHole """ +import sqlite3 + +import conf from const import piholeDB, piholeDhcpleases +from logger import mylog #------------------------------------------------------------------------------- def copy_pihole_network (db): @@ -10,11 +14,20 @@ def copy_pihole_network (db): sql = db.sql # TO-DO # Open Pi-hole DB - sql.execute ("ATTACH DATABASE '"+ piholeDB +"' AS PH") + mylog('debug', '[PiHole Network] - attach PiHole DB') + + try: + sql.execute ("ATTACH DATABASE '"+ piholeDB +"' AS PH") + except sqlite3.Error as e: + mylog('none',[ '[PiHole Network] - SQL ERROR: ', e]) + # Copy Pi-hole Network table - sql.execute ("DELETE FROM PiHole_Network") - sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery, + + try: + sql.execute ("DELETE FROM PiHole_Network") + + sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery, PH_Name, PH_IP) SELECT hwaddr, macVendor, lastQuery, (SELECT name FROM PH.network_addresses @@ -24,24 +37,29 @@ def copy_pihole_network (db): FROM PH.network WHERE hwaddr NOT LIKE 'ip-%' AND hwaddr <> '00:00:00:00:00:00' """) - sql.execute ("""UPDATE PiHole_Network SET PH_Name = '(unknown)' + sql.execute ("""UPDATE PiHole_Network SET PH_Name = '(unknown)' WHERE PH_Name IS NULL OR PH_Name = '' """) - # Close Pi-hole DB - sql.execute ("DETACH PH") - db.commit() + # Close Pi-hole DB + sql.execute ("DETACH PH") + except sqlite3.Error as e: + mylog('none',[ '[PiHole Network] - SQL ERROR: ', e]) + + db.commitDB() + + mylog('debug',[ '[PiHole Network] - completed - found ',sql.rowcount, ' devices']) return str(sql.rowcount) != "0" #------------------------------------------------------------------------------- def read_DHCP_leases (db): """ read the PiHole DHCP file and insert all records into the DHCP_Leases table. - """ - - sql = db.sql # TO-DO + """ + mylog('debug', '[PiHole DHCP] - read DHCP_Leases file') # Read DHCP Leases # Bugfix #1 - dhcp.leases: lines with different number of columns (5 col) data = [] + reporting = False with open(piholeDhcpleases, 'r') as f: for line in f: reporting = True @@ -50,8 +68,11 @@ def read_DHCP_leases (db): data.append (row) # Insert into PiAlert table - sql.executemany ("""INSERT INTO DHCP_Leases (DHCP_DateTime, DHCP_MAC, + db.sql.executemany ("""INSERT INTO DHCP_Leases (DHCP_DateTime, DHCP_MAC, DHCP_IP, DHCP_Name, DHCP_MAC2) VALUES (?, ?, ?, ?, ?) """, data) - db.commit() + db.commitDB() + + mylog('debug', ['[PiHole DHCP] - completed - added ',len(data), ' devices.']) + return reporting \ No newline at end of file From 7177cdd51d2f4bef12fd99165c0aec8b8a8271a8 Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Sun, 28 May 2023 16:10:58 +1000 Subject: [PATCH 15/20] more cleanup and removed files.py module again --- pialert/__main__.py | 10 +++++-- pialert/api.py | 38 +++++++++++-------------- pialert/conf.py | 2 +- pialert/const.py | 28 ++++++++++++++++++- pialert/database.py | 25 ++--------------- pialert/files.py | 37 ------------------------ pialert/helper.py | 25 ++++++++++++++++- pialert/initialise.py | 22 +++++++++++---- pialert/logger.py | 8 ++---- pialert/networkscan.py | 51 ++++++++++++++++++++++------------ pialert/plugin.py | 6 ++-- pialert/reporting.py | 20 ++++++++----- pialert/scanners/nmapscan.py | 3 +- pialert/scanners/pholusscan.py | 2 +- pialert/scanners/pihole.py | 3 ++ 15 files changed, 151 insertions(+), 129 deletions(-) delete mode 100644 pialert/files.py diff --git a/pialert/__main__.py b/pialert/__main__.py index 7780d114..095b0cce 100755 --- a/pialert/__main__.py +++ b/pialert/__main__.py @@ -27,10 +27,10 @@ from const import * from logger import mylog from helper import filePermissions, isNewVersion, timeNow, timeNowTZ, updateState from api import update_api -from networkscan import scan_network +from networkscan import process_scan, scan_network from initialise import importConfigs from mac_vendor import update_devices_MAC_vendors -from database import DB, get_all_devices, sql_new_devices +from database import DB, get_all_devices from reporting import check_and_run_event, send_notifications from plugin import run_plugin_scripts @@ -145,7 +145,7 @@ def main (): check_and_run_event(db) # Update API endpoints - update_api() + update_api(db) # proceed if 1 minute passed if last_scan_run + datetime.timedelta(minutes=1) < loop_start_time : @@ -252,6 +252,10 @@ def main (): if conf.ENABLE_PLUGINS: run_plugin_scripts(db,'always_after_scan') + # -------------------------------------------------- + # process all the scanned data into new devices + mylog('debug', "[MAIN] start processig scan results") + process_scan (db, conf.arpscan_devices ) # Reporting if conf.cycle in conf.check_report: diff --git a/pialert/api.py b/pialert/api.py index 2dd2e966..91549bf7 100644 --- a/pialert/api.py +++ b/pialert/api.py @@ -3,33 +3,27 @@ import json # pialert modules import conf -from const import pialertPath +from const import (apiPath, sql_devices_all, sql_nmap_scan_all, sql_pholus_scan_all, sql_events_pending_alert, + sql_settings, sql_plugins_events, sql_plugins_history, sql_plugins_objects,sql_language_strings) from logger import mylog -from files import write_file -from database import * +from helper import write_file apiEndpoints = [] #=============================================================================== # API #=============================================================================== -def update_api(isNotification = False, updateOnlyDataSources = []): - mylog('verbose', [' [API] Update API not doing anything for now !']) - return +def update_api(db, isNotification = False, updateOnlyDataSources = []): + mylog('verbose', ['[API] Update API starting']) + # return - folder = pialertPath + '/front/api/' + folder = apiPath - if isNotification: - # Update last notification alert in all formats - mylog('verbose', [' [API] Updating notification_* files in /front/api']) - - write_file(folder + 'notification_text.txt' , mail_text) - write_file(folder + 'notification_text.html' , mail_html) - write_file(folder + 'notification_json_final.json' , json.dumps(json_final)) + # update notifications moved to reporting send_api() # Save plugins if conf.ENABLE_PLUGINS: - write_file(folder + 'plugins.json' , json.dumps({"data" : plugins})) + write_file(folder + 'plugins.json' , json.dumps({"data" : conf.plugins})) # prepare database tables we want to expose dataSourcesSQLs = [ @@ -50,19 +44,19 @@ def update_api(isNotification = False, updateOnlyDataSources = []): if updateOnlyDataSources == [] or dsSQL[0] in updateOnlyDataSources: - api_endpoint_class(dsSQL[1], folder + 'table_' + dsSQL[0] + '.json') + api_endpoint_class(db, dsSQL[1], folder + 'table_' + dsSQL[0] + '.json') #------------------------------------------------------------------------------- class api_endpoint_class: - def __init__(self, db, path): + def __init__(self, db, query, path): global apiEndpoints self.db = db - self.sql = db.sql - self.jsonData = db.get_table_as_json( self.sql).json + self.query = query + self.jsonData = db.get_table_as_json(self.query).json self.path = path self.fileName = path.split('/')[-1] self.hash = hash(json.dumps(self.jsonData)) @@ -76,7 +70,7 @@ class api_endpoint_class: # search previous endpoint states to check if API needs updating for endpoint in apiEndpoints: # match sql and API endpoint path - if endpoint.sql == self.sql and endpoint.path == self.path: + if endpoint.query == self.query and endpoint.path == self.path: found = True if endpoint.hash != self.hash: changed = True @@ -87,7 +81,7 @@ class api_endpoint_class: # cehck if API endpoints have changed or if it's a new one if not found or changed: - mylog('verbose', [f' [API] Updating {self.fileName} file in /front/api']) + mylog('verbose', [f'[API] Updating {self.fileName} file in /front/api']) write_file(self.path, json.dumps(self.jsonData)) @@ -98,5 +92,5 @@ class api_endpoint_class: # update hash apiEndpoints[changedIndex].hash = self.hash else: - mylog('info', [f' [API] ERROR Updating {self.fileName}']) + mylog('info', [f'[API] ERROR Updating {self.fileName}']) diff --git a/pialert/conf.py b/pialert/conf.py index 69157e1d..74991966 100644 --- a/pialert/conf.py +++ b/pialert/conf.py @@ -16,7 +16,7 @@ newVersionAvailable = False time_started = '' check_report = [] log_timestamp = 0 - +arpscan_devices = [] # ACTUAL CONFIGRATION ITEMS set to defaults diff --git a/pialert/const.py b/pialert/const.py index ab720c0a..f57aca39 100644 --- a/pialert/const.py +++ b/pialert/const.py @@ -12,10 +12,36 @@ dbPath = '/db/pialert.db' pluginsPath = pialertPath + '/front/plugins' logPath = pialertPath + '/front/log' +apiPath = pialertPath + '/front/api/' fullConfPath = pialertPath + confPath fullDbPath = pialertPath + dbPath fullPholusPath = pialertPath+'/pholus/pholus3.py' + vendorsDB = '/usr/share/arp-scan/ieee-oui.txt' piholeDB = '/etc/pihole/pihole-FTL.db' -piholeDhcpleases = '/etc/pihole/dhcp.leases' \ No newline at end of file +piholeDhcpleases = '/etc/pihole/dhcp.leases' + + +#=============================================================================== +# SQL queries +#=============================================================================== +sql_devices_all = "select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_Group, dev_FirstConnection, dev_LastConnection, dev_LastIP, dev_StaticIP, dev_PresentLastScan, dev_LastNotification, dev_NewDevice, dev_Network_Node_MAC_ADDR, dev_Network_Node_port, dev_Icon from Devices" +sql_devices_stats = "SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1" +sql_nmap_scan_all = "SELECT * FROM Nmap_Scan" +sql_pholus_scan_all = "SELECT * FROM Pholus_Scan" +sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0" +sql_settings = "SELECT * FROM Settings" +sql_plugins_objects = "SELECT * FROM Plugins_Objects" +sql_language_strings = "SELECT * FROM Plugins_Language_Strings" +sql_plugins_events = "SELECT * FROM Plugins_Events" +sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY 'Index' DESC" +sql_new_devices = """SELECT * FROM ( SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC FROM Events_Devices + WHERE eve_PendingAlertEmail = 1 + AND eve_EventType = 'New Device' + ORDER BY eve_DateTime ) t1 + LEFT JOIN + ( + SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices + ) t2 + ON t1.dev_MAC = t2.dev_MAC_t2""" \ No newline at end of file diff --git a/pialert/database.py b/pialert/database.py index 71b5d64c..c40a3732 100644 --- a/pialert/database.py +++ b/pialert/database.py @@ -3,7 +3,7 @@ import sqlite3 # pialert modules -from const import fullDbPath +from const import fullDbPath, sql_devices_stats, sql_devices_all from logger import mylog from helper import json_struc, initOrSetParam, row_to_json, timeNow #, updateState @@ -11,28 +11,7 @@ from helper import json_struc, initOrSetParam, row_to_json, timeNow #, updateSta -#=============================================================================== -# SQL queries -#=============================================================================== -sql_devices_all = "select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_Group, dev_FirstConnection, dev_LastConnection, dev_LastIP, dev_StaticIP, dev_PresentLastScan, dev_LastNotification, dev_NewDevice, dev_Network_Node_MAC_ADDR, dev_Network_Node_port, dev_Icon from Devices" -sql_devices_stats = "SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1" -sql_nmap_scan_all = "SELECT * FROM Nmap_Scan" -sql_pholus_scan_all = "SELECT * FROM Pholus_Scan" -sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0" -sql_settings = "SELECT * FROM Settings" -sql_plugins_objects = "SELECT * FROM Plugins_Objects" -sql_language_strings = "SELECT * FROM Plugins_Language_Strings" -sql_plugins_events = "SELECT * FROM Plugins_Events" -sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY 'Index' DESC" -sql_new_devices = """SELECT * FROM ( SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC FROM Events_Devices - WHERE eve_PendingAlertEmail = 1 - AND eve_EventType = 'New Device' - ORDER BY eve_DateTime ) t1 - LEFT JOIN - ( - SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices - ) t2 - ON t1.dev_MAC = t2.dev_MAC_t2""" + class DB(): diff --git a/pialert/files.py b/pialert/files.py deleted file mode 100644 index 55d33faa..00000000 --- a/pialert/files.py +++ /dev/null @@ -1,37 +0,0 @@ -import io -import sys - - -#------------------------------------------------------------------------------- -def write_file (pPath, pText): - # Write the text depending using the correct python version - if sys.version_info < (3, 0): - file = io.open (pPath , mode='w', encoding='utf-8') - file.write ( pText.decode('unicode_escape') ) - file.close() - else: - file = open (pPath, 'w', encoding='utf-8') - if pText is None: - pText = "" - file.write (pText) - file.close() - -#------------------------------------------------------------------------------- -def get_file_content(path): - - f = open(path, 'r') - content = f.read() - f.close() - - return content - -#------------------------------------------------------------------------------- -def read_config_file(filename): - """ - retuns dict on the config file key:value pairs - """ - # load the variables from pialert.conf - code = compile(filename.read_text(), filename.name, "exec") - confDict = {} # config dictionary - exec(code, {"__builtins__": {}}, confDict) - return confDict \ No newline at end of file diff --git a/pialert/helper.py b/pialert/helper.py index d33c5c97..e1d5881f 100644 --- a/pialert/helper.py +++ b/pialert/helper.py @@ -1,5 +1,7 @@ """ Colection of generic functions to support Pi.Alert """ +import io +import sys import datetime import os import re @@ -14,7 +16,6 @@ import requests import conf from const import * from logger import mylog, logResult -# from api import update_api # to avoid circular reference @@ -297,3 +298,25 @@ class json_struc: +#------------------------------------------------------------------------------- +def get_file_content(path): + + f = open(path, 'r') + content = f.read() + f.close() + + return content + +#------------------------------------------------------------------------------- +def write_file (pPath, pText): + # Write the text depending using the correct python version + if sys.version_info < (3, 0): + file = io.open (pPath , mode='w', encoding='utf-8') + file.write ( pText.decode('unicode_escape') ) + file.close() + else: + file = open (pPath, 'w', encoding='utf-8') + if pText is None: + pText = "" + file.write (pText) + file.close() \ No newline at end of file diff --git a/pialert/initialise.py b/pialert/initialise.py index 9e858d35..38a6dd5b 100644 --- a/pialert/initialise.py +++ b/pialert/initialise.py @@ -10,7 +10,7 @@ import conf from const import * from helper import collect_lang_strings, timeNow, updateSubnets, initOrSetParam from logger import mylog -from files import read_config_file +from api import update_api from scheduler import schedule_class from plugin import get_plugins_configs, print_plugin_info @@ -148,9 +148,6 @@ def importConfigs (db): # API conf.API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', 'text', '', 'API') - # Prepare scheduler - #global tz, mySchedules, plugins - # Init timezone in case it changed conf.tz = timezone(conf.TIMEZONE) @@ -228,7 +225,20 @@ def importConfigs (db): db.commitDB() # update only the settings datasource - # update_api(False, ["settings"]) - # TO DO this creates a circular reference between API and HELPER ! + update_api(db, False, ["settings"]) + #TO DO this creates a circular reference between API and HELPER ! mylog('info', '[Config] Imported new config') + + + +#------------------------------------------------------------------------------- +def read_config_file(filename): + """ + retuns dict on the config file key:value pairs + """ + # load the variables from pialert.conf + code = compile(filename.read_text(), filename.name, "exec") + confDict = {} # config dictionary + exec(code, {"__builtins__": {}}, confDict) + return confDict \ No newline at end of file diff --git a/pialert/logger.py b/pialert/logger.py index 8b6cf060..3ae32dc2 100644 --- a/pialert/logger.py +++ b/pialert/logger.py @@ -9,11 +9,9 @@ from const import * #------------------------------------------------------------------------------- # duplication from helper to avoid circle #------------------------------------------------------------------------------- -def timeNowTZ(): - if conf.tz == '': +def timeNow(): return datetime.datetime.now().replace(microsecond=0) - else: - return datetime.datetime.now(conf.tz).replace(microsecond=0) + #------------------------------------------------------------------------------- debugLevels = [ @@ -38,7 +36,7 @@ def mylog(requestedDebugLevel, n): #------------------------------------------------------------------------------- def file_print (*args): - result = timeNowTZ().strftime ('%H:%M:%S') + ' ' + result = timeNow().strftime ('%H:%M:%S') + ' ' for arg in args: result += str(arg) diff --git a/pialert/networkscan.py b/pialert/networkscan.py index 423f88b7..7fcf719a 100644 --- a/pialert/networkscan.py +++ b/pialert/networkscan.py @@ -36,14 +36,13 @@ def scan_network (db): db.commitDB() - # ScanCycle data - cycle_interval = scanCycle_data['cic_EveryXmin'] + # arp-scan command - arpscan_devices = [] + conf.arpscan_devices = [] if conf.ENABLE_ARPSCAN: mylog('verbose','[Network Scan] arp-scan start') - arpscan_devices = execute_arpscan (conf.userSubnets) + conf.arpscan_devices = execute_arpscan (conf.userSubnets) mylog('verbose','[Network Scan] arp-scan ends') # Pi-hole method @@ -59,51 +58,69 @@ def scan_network (db): db.commitDB() + +def process_scan (db, arpscan_devices = conf.arpscan_devices ): + + + # Query ScanCycle properties + scanCycle_data = query_ScanCycle_Data (db, True) + if scanCycle_data is None: + mylog('none', ['\n']) + mylog('none', ['[Process Scan]*************** ERROR ***************']) + mylog('none', ['[Process Scan] ScanCycle %s not found' % conf.cycle ]) + mylog('none', ['[Process Scan] Exiting...\n']) + return False + + db.commitDB() + + # ScanCycle data + cycle_interval = scanCycle_data['cic_EveryXmin'] + # Load current scan data - mylog('verbose','[Network Scan] Processing scan results') + mylog('verbose','[Process Scan] Processing scan results') save_scanned_devices (db, arpscan_devices, cycle_interval) # Print stats - mylog('none','[Network Scan] Print Stats') + mylog('none','[Process Scan] Print Stats') print_scan_stats(db) - mylog('none','[Network Scan] Stats end') + mylog('none','[Process Scan] Stats end') # Create Events - mylog('verbose','[Network Scan] Updating DB Info') - mylog('verbose','[Network Scan] Sessions Events (connect / discconnect)') + mylog('verbose','[Process Scan] Updating DB Info') + mylog('verbose','[Process Scan] Sessions Events (connect / discconnect)') insert_events(db) # Create New Devices # after create events -> avoid 'connection' event - mylog('verbose','[Network Scan] Creating new devices') + mylog('verbose','[Process Scan] Creating new devices') create_new_devices (db) # Update devices info - mylog('verbose','[Network Scan] Updating Devices Info') + mylog('verbose','[Process Scan] Updating Devices Info') update_devices_data_from_scan (db) # Resolve devices names - mylog('verbose','[Network Scan] Resolve devices names') + mylog('verbose','[Process Scan] Resolve devices names') update_devices_names(db) # Void false connection - disconnections - mylog('verbose','[Network Scan] Voiding false (ghost) disconnections') + mylog('verbose','[Process Scan] Voiding false (ghost) disconnections') void_ghost_disconnections (db) # Pair session events (Connection / Disconnection) - mylog('verbose','[Network Scan] Pairing session events (connection / disconnection) ') + mylog('verbose','[Process Scan] Pairing session events (connection / disconnection) ') pair_sessions_events(db) # Sessions snapshot - mylog('verbose','[Network Scan] Creating sessions snapshot') + mylog('verbose','[Process Scan] Creating sessions snapshot') create_sessions_snapshot (db) # Sessions snapshot - mylog('verbose','[Network Scan] Inserting scan results into Online_History') + mylog('verbose','[Process Scan] Inserting scan results into Online_History') insertOnlineHistory(db,conf.cycle) # Skip repeated notifications - mylog('verbose','[Network Scan] Skipping repeated notifications') + mylog('verbose','[Process Scan] Skipping repeated notifications') skip_repeated_notifications (db) # Commit changes diff --git a/pialert/plugin.py b/pialert/plugin.py index 647464ae..83714a62 100644 --- a/pialert/plugin.py +++ b/pialert/plugin.py @@ -7,9 +7,9 @@ from collections import namedtuple # pialert modules import conf from const import pluginsPath, logPath -from files import get_file_content, write_file from logger import mylog -from helper import timeNowTZ, updateState +from helper import timeNowTZ, updateState, get_file_content, write_file +from api import update_api @@ -269,7 +269,7 @@ def execute_plugin(db, plugin): process_plugin_events(db, plugin) # update API endpoints - # update_api(False, ["plugins_events","plugins_objects"]) # TO-DO - remover circular reference + update_api(db, False, ["plugins_events","plugins_objects"]) #------------------------------------------------------------------------------- def custom_plugin_decoder(pluginDict): diff --git a/pialert/reporting.py b/pialert/reporting.py index d12ec74c..6a8186ea 100644 --- a/pialert/reporting.py +++ b/pialert/reporting.py @@ -12,10 +12,8 @@ from json2table import convert # pialert modules import conf -from const import pialertPath, logPath -#from database import get_table_as_json -from files import get_file_content, write_file -from helper import generate_mac_links, isNewVersion, removeDuplicateNewLines, timeNow, hide_email, json_struc, updateState +from const import pialertPath, logPath, apiPath +from helper import generate_mac_links, removeDuplicateNewLines, timeNow, hide_email, json_struc, updateState, get_file_content, write_file from logger import logResult, mylog, print_log from mqtt import mqtt_start @@ -250,12 +248,13 @@ def send_notifications (db): write_file (logPath + '/report_output.html', mail_html) # Send Mail - if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or json_ports != [] or conf.debug_force_notification or plugins_report: - - # update_api(True) # TO-DO + if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or json_ports != [] or conf.debug_force_notification or plugins_report: mylog('none', ['[Notification] Changes detected, sending reports']) + mylog('info', ['[Notification] Udateing API files']) + send_api() + if conf.REPORT_MAIL and check_config('email'): updateState(db,"Send: Email") mylog('info', ['[Notification] Sending report by Email']) @@ -613,6 +612,13 @@ def to_text(_json): return payloadData +#------------------------------------------------------------------------------- +def send_api(): + mylog('verbose', ['[Send API] Updating notification_* files in ', apiPath]) + + write_file(apiPath + 'notification_text.txt' , mail_text) + write_file(apiPath + 'notification_text.html' , mail_html) + write_file(apiPath + 'notification_json_final.json' , json.dumps(json_final)) #------------------------------------------------------------------------------- diff --git a/pialert/scanners/nmapscan.py b/pialert/scanners/nmapscan.py index 38a55105..b13cb5d8 100644 --- a/pialert/scanners/nmapscan.py +++ b/pialert/scanners/nmapscan.py @@ -2,8 +2,7 @@ import subprocess import conf -from const import logPath -from database import sql_nmap_scan_all +from const import logPath, sql_nmap_scan_all from helper import json_struc, timeNow, updateState from logger import append_line_to_file, mylog #------------------------------------------------------------------------------- diff --git a/pialert/scanners/pholusscan.py b/pialert/scanners/pholusscan.py index fe4c9dfe..0f9e9fc1 100644 --- a/pialert/scanners/pholusscan.py +++ b/pialert/scanners/pholusscan.py @@ -40,7 +40,7 @@ def performPholusScan (db, timeoutSec, userSubnets): output = subprocess.check_output (pholus_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30)) except subprocess.CalledProcessError as e: # An error occured, handle it - mylog('none', [[PholusScan], e.output]) + mylog('none', ['[PholusScan]', e.output]) mylog('none', ["[PholusScan] Error - Pholus Scan - check logs"]) except subprocess.TimeoutExpired as timeErr: mylog('none', ['[PholusScan] Pholus TIMEOUT - the process forcefully terminated as timeout reached']) diff --git a/pialert/scanners/pihole.py b/pialert/scanners/pihole.py index 63ee3ba8..31ce2bdc 100644 --- a/pialert/scanners/pihole.py +++ b/pialert/scanners/pihole.py @@ -50,6 +50,9 @@ def copy_pihole_network (db): mylog('debug',[ '[PiHole Network] - completed - found ',sql.rowcount, ' devices']) return str(sql.rowcount) != "0" + +#------------------------------------------------------------------------------- + #------------------------------------------------------------------------------- def read_DHCP_leases (db): """ From f50e3d4e926f32b80f775b9fbb08a82c5759b6ec Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Mon, 29 May 2023 16:35:09 +1000 Subject: [PATCH 16/20] split pubishers --- pialert/publishers/mqtt.py | 254 +++++++++++++++++++++++++++++++++++++ 1 file changed, 254 insertions(+) create mode 100644 pialert/publishers/mqtt.py diff --git a/pialert/publishers/mqtt.py b/pialert/publishers/mqtt.py new file mode 100644 index 00000000..826a8ede --- /dev/null +++ b/pialert/publishers/mqtt.py @@ -0,0 +1,254 @@ + +import time +import re +from paho.mqtt import client as mqtt_client + +import conf +from logger import mylog +from database import get_all_devices, get_device_stats +from helper import bytes_to_string, sanitize_string + + + +#------------------------------------------------------------------------------- +# MQTT +#------------------------------------------------------------------------------- + +mqtt_connected_to_broker = False +mqtt_sensors = [] + + +#------------------------------------------------------------------------------- +def check_config(): + if conf.MQTT_BROKER == '' or conf.MQTT_PORT == '' or conf.MQTT_USER == '' or conf.MQTT_PASSWORD == '': + mylog('none', ['[Check Config] Error: MQTT service not set up correctly. Check your pialert.conf MQTT_* variables.']) + return False + else: + return True + + +#------------------------------------------------------------------------------- +class sensor_config: + def __init__(self, deviceId, deviceName, sensorType, sensorName, icon): + self.deviceId = deviceId + self.deviceName = deviceName + self.sensorType = sensorType + self.sensorName = sensorName + self.icon = icon + self.hash = str(hash(str(deviceId) + str(deviceName)+ str(sensorType)+ str(sensorName)+ str(icon))) + +#------------------------------------------------------------------------------- + +def publish_mqtt(client, topic, message): + status = 1 + while status != 0: + result = client.publish( + topic=topic, + payload=message, + qos=conf.MQTT_QOS, + retain=True, + ) + + status = result[0] + + if status != 0: + mylog('info', ["Waiting to reconnect to MQTT broker"]) + time.sleep(0.1) + return True + +#------------------------------------------------------------------------------- +def create_generic_device(client): + + deviceName = 'PiAlert' + deviceId = 'pialert' + + create_sensor(client, deviceId, deviceName, 'sensor', 'online', 'wifi-check') + create_sensor(client, deviceId, deviceName, 'sensor', 'down', 'wifi-cancel') + create_sensor(client, deviceId, deviceName, 'sensor', 'all', 'wifi') + create_sensor(client, deviceId, deviceName, 'sensor', 'archived', 'wifi-lock') + create_sensor(client, deviceId, deviceName, 'sensor', 'new', 'wifi-plus') + create_sensor(client, deviceId, deviceName, 'sensor', 'unknown', 'wifi-alert') + + +#------------------------------------------------------------------------------- +def create_sensor(client, deviceId, deviceName, sensorType, sensorName, icon): + + new_sensor_config = sensor_config(deviceId, deviceName, sensorType, sensorName, icon) + + # check if config already in list and if not, add it, otherwise skip + global mqtt_sensors, uniqueSensorCount + + is_unique = True + + for sensor in mqtt_sensors: + if sensor.hash == new_sensor_config.hash: + is_unique = False + break + + # save if unique + if is_unique: + publish_sensor(client, new_sensor_config) + + + + +#------------------------------------------------------------------------------- +def publish_sensor(client, sensorConf): + + global mqtt_sensors + + message = '{ \ + "name":"'+ sensorConf.deviceName +' '+sensorConf.sensorName+'", \ + "state_topic":"system-sensors/'+sensorConf.sensorType+'/'+sensorConf.deviceId+'/state", \ + "value_template":"{{value_json.'+sensorConf.sensorName+'}}", \ + "unique_id":"'+sensorConf.deviceId+'_sensor_'+sensorConf.sensorName+'", \ + "device": \ + { \ + "identifiers": ["'+sensorConf.deviceId+'_sensor"], \ + "manufacturer": "PiAlert", \ + "name":"'+sensorConf.deviceName+'" \ + }, \ + "icon":"mdi:'+sensorConf.icon+'" \ + }' + + topic='homeassistant/'+sensorConf.sensorType+'/'+sensorConf.deviceId+'/'+sensorConf.sensorName+'/config' + + # add the sensor to the global list to keep track of succesfully added sensors + if publish_mqtt(client, topic, message): + # hack - delay adding to the queue in case the process is + time.sleep(conf.MQTT_DELAY_SEC) # restarted and previous publish processes aborted + # (it takes ~2s to update a sensor config on the broker) + mqtt_sensors.append(sensorConf) + +#------------------------------------------------------------------------------- +def mqtt_create_client(): + def on_disconnect(client, userdata, rc): + global mqtt_connected_to_broker + mqtt_connected_to_broker = False + + # not sure is below line is correct / necessary + # client = mqtt_create_client() + + def on_connect(client, userdata, flags, rc): + global mqtt_connected_to_broker + + if rc == 0: + mylog('verbose', [" Connected to broker"]) + mqtt_connected_to_broker = True # Signal connection + else: + mylog('none', [" Connection failed"]) + mqtt_connected_to_broker = False + + + client = mqtt_client.Client('PiAlert') # Set Connecting Client ID + client.username_pw_set(conf.MQTT_USER, conf.MQTT_PASSWORD) + client.on_connect = on_connect + client.on_disconnect = on_disconnect + client.connect(conf.MQTT_BROKER, conf.MQTT_PORT) + client.loop_start() + + return client + +#------------------------------------------------------------------------------- +def mqtt_start(): + + global client, mqtt_connected_to_broker + + if mqtt_connected_to_broker == False: + mqtt_connected_to_broker = True + client = mqtt_create_client() + + # General stats + + # Create a generic device for overal stats + create_generic_device(client) + + # Get the data + row = get_device_stats() + + columns = ["online","down","all","archived","new","unknown"] + + payload = "" + + # Update the values + for column in columns: + payload += '"'+column+'": ' + str(row[column]) +',' + + # Publish (warap into {} and remove last ',' from above) + publish_mqtt(client, "system-sensors/sensor/pialert/state", + '{ \ + '+ payload[:-1] +'\ + }' + ) + + + # Specific devices + + # Get all devices + devices = get_all_devices() + + sec_delay = len(devices) * int(conf.MQTT_DELAY_SEC)*5 + + mylog('info', [" Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60,1) , 'min)' ]) + + for device in devices: + + # Create devices in Home Assistant - send config messages + deviceId = 'mac_' + device["dev_MAC"].replace(" ", "").replace(":", "_").lower() + deviceNameDisplay = re.sub('[^a-zA-Z0-9-_\s]', '', device["dev_Name"]) + + create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'last_ip', 'ip-network') + create_sensor(client, deviceId, deviceNameDisplay, 'binary_sensor', 'is_present', 'wifi') + create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'mac_address', 'folder-key-network') + create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'is_new', 'bell-alert-outline') + create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'vendor', 'cog') + + # update device sensors in home assistant + + publish_mqtt(client, 'system-sensors/sensor/'+deviceId+'/state', + '{ \ + "last_ip": "' + device["dev_LastIP"] +'", \ + "is_new": "' + str(device["dev_NewDevice"]) +'", \ + "vendor": "' + sanitize_string(device["dev_Vendor"]) +'", \ + "mac_address": "' + str(device["dev_MAC"]) +'" \ + }' + ) + + publish_mqtt(client, 'system-sensors/binary_sensor/'+deviceId+'/state', + '{ \ + "is_present": "' + to_binary_sensor(str(device["dev_PresentLastScan"])) +'"\ + }' + ) + + # delete device / topic + # homeassistant/sensor/mac_44_ef_bf_c4_b1_af/is_present/config + # client.publish( + # topic="homeassistant/sensor/"+deviceId+"/is_present/config", + # payload="", + # qos=1, + # retain=True, + # ) + # time.sleep(10) + + +#=============================================================================== +# Home Assistant UTILs +#=============================================================================== +def to_binary_sensor(input): + # In HA a binary sensor returns ON or OFF + result = "OFF" + + # bytestring + if isinstance(input, str): + if input == "1": + result = "ON" + elif isinstance(input, int): + if input == 1: + result = "ON" + elif isinstance(input, bool): + if input == True: + result = "ON" + elif isinstance(input, bytes): + if bytes_to_string(input) == "1": + result = "ON" + return result \ No newline at end of file From 5b05be24add7e8a403ec43bb983f71676f2e6f67 Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Mon, 29 May 2023 16:35:22 +1000 Subject: [PATCH 17/20] split publishers --- pialert/helper.py | 121 ++++---- pialert/mqtt.py | 244 ---------------- pialert/publishers/__init__.py | 8 + pialert/publishers/apprise.py | 42 +++ pialert/publishers/email.py | 88 ++++++ pialert/publishers/ntfy.py | 36 +++ pialert/publishers/pushsafer.py | 33 +++ pialert/publishers/webhook.py | 98 +++++++ pialert/reporting.py | 477 +++++++++----------------------- test/__init__.py | 1 + test/test_helper.py | 29 ++ 11 files changed, 534 insertions(+), 643 deletions(-) delete mode 100644 pialert/mqtt.py create mode 100644 pialert/publishers/__init__.py create mode 100644 pialert/publishers/apprise.py create mode 100644 pialert/publishers/email.py create mode 100644 pialert/publishers/ntfy.py create mode 100644 pialert/publishers/pushsafer.py create mode 100644 pialert/publishers/webhook.py create mode 100644 test/__init__.py create mode 100644 test/test_helper.py diff --git a/pialert/helper.py b/pialert/helper.py index e1d5881f..58006d15 100644 --- a/pialert/helper.py +++ b/pialert/helper.py @@ -13,7 +13,7 @@ import time from pathlib import Path import requests -import conf +import conf from const import * from logger import mylog, logResult @@ -27,29 +27,29 @@ def timeNowTZ(): return datetime.datetime.now(conf.tz).replace(microsecond=0) #------------------------------------------------------------------------------- -def updateState(db, newState): +def updateState(db, newState): # ?? Why is the state written to the DB? - + #sql = db.sql mylog('debug', '[updateState] changing state to: "' + newState +'"') - db.sql.execute ("UPDATE Parameters SET par_Value='"+ newState +"' WHERE par_ID='Back_App_State'") + db.sql.execute ("UPDATE Parameters SET par_Value='"+ newState +"' WHERE par_ID='Back_App_State'") db.commitDB() #------------------------------------------------------------------------------- def updateSubnets(scan_subnets): - subnets = [] + subnets = [] # multiple interfaces - if type(scan_subnets) is list: - for interface in scan_subnets : + if type(scan_subnets) is list: + for interface in scan_subnets : subnets.append(interface) # one interface only - else: - subnets.append(scan_subnets) + else: + subnets.append(scan_subnets) - return subnets + return subnets @@ -57,7 +57,7 @@ def updateSubnets(scan_subnets): # check RW access of DB and config file def checkPermissionsOK(): #global confR_access, confW_access, dbR_access, dbW_access - + confR_access = (os.access(fullConfPath, os.R_OK)) confW_access = (os.access(fullConfPath, os.W_OK)) dbR_access = (os.access(fullDbPath, os.R_OK)) @@ -72,14 +72,14 @@ def checkPermissionsOK(): mylog('none', [ " " , dbPath , " | " , " WRITE | " , dbW_access]) mylog('none', ['------------------------------------------------']) - #return dbR_access and dbW_access and confR_access and confW_access - return (confR_access, dbR_access) + #return dbR_access and dbW_access and confR_access and confW_access + return (confR_access, dbR_access) #------------------------------------------------------------------------------- def fixPermissions(): # Try fixing access rights if needed chmodCommands = [] - - chmodCommands.append(['sudo', 'chmod', 'a+rw', '-R', fullDbPath]) + + chmodCommands.append(['sudo', 'chmod', 'a+rw', '-R', fullDbPath]) chmodCommands.append(['sudo', 'chmod', 'a+rw', '-R', fullConfPath]) for com in chmodCommands: @@ -90,7 +90,7 @@ def fixPermissions(): result = subprocess.check_output (com, universal_newlines=True) except subprocess.CalledProcessError as e: # An error occured, handle it - mylog('none', ["[Setup] Fix Failed. Execute this command manually inside of the container: ", ' '.join(com)]) + mylog('none', ["[Setup] Fix Failed. Execute this command manually inside of the container: ", ' '.join(com)]) mylog('none', [e.output]) @@ -111,7 +111,7 @@ def initialiseFile(pathToCheck, defaultFile): # write stdout and stderr into .log files for debugging if needed logResult (stdout, stderr) # TO-DO should be changed to mylog - + except subprocess.CalledProcessError as e: # An error occured, handle it mylog('none', ["[Setup] Error copying ("+defaultFile+"). Make sure the app has Read & Write access to " + pathToCheck]) @@ -130,7 +130,7 @@ def filePermissions(): initialiseFile(fullDbPath, "/home/pi/pialert/back/pialert.db_bak") # last attempt - fixPermissions() + fixPermissions() #------------------------------------------------------------------------------- @@ -139,7 +139,7 @@ def bytes_to_string(value): # if value is of type bytes, convert to string if isinstance(value, bytes): value = value.decode('utf-8') - return value + return value #------------------------------------------------------------------------------- @@ -152,21 +152,15 @@ def if_byte_then_to_str(input): #------------------------------------------------------------------------------- def collect_lang_strings(db, json, pref): - for prop in json["localized"]: + for prop in json["localized"]: for language_string in json[prop]: - import_language_string(db, language_string["language_code"], pref + "_" + prop, language_string["string"]) + import_language_string(db, language_string["language_code"], pref + "_" + prop, language_string["string"]) - - - - - - #------------------------------------------------------------------------------- # Creates a JSON object from a DB row -def row_to_json(names, row): - +def row_to_json(names, row): + rowEntry = {} index = 0 @@ -179,7 +173,7 @@ def row_to_json(names, row): #------------------------------------------------------------------------------- def import_language_string(db, code, key, value, extra = ""): - db.sql.execute ("""INSERT INTO Plugins_Language_Strings ("Language_Code", "String_Key", "String_Value", "Extra") VALUES (?, ?, ?, ?)""", (str(code), str(key), str(value), str(extra))) + db.sql.execute ("""INSERT INTO Plugins_Language_Strings ("Language_Code", "String_Key", "String_Value", "Extra") VALUES (?, ?, ?, ?)""", (str(code), str(key), str(value), str(extra))) db.commitDB() @@ -198,13 +192,13 @@ def checkIPV4(ip): #------------------------------------------------------------------------------- -def isNewVersion(newVersion: bool): +def isNewVersion(newVersion: bool): - if newVersion == False: + if newVersion == False: - f = open(pialertPath + '/front/buildtimestamp.txt', 'r') + f = open(pialertPath + '/front/buildtimestamp.txt', 'r') buildTimestamp = int(f.read().strip()) - f.close() + f.close() data = "" @@ -213,19 +207,19 @@ def isNewVersion(newVersion: bool): text = url.text data = json.loads(text) except requests.exceptions.ConnectionError as e: - mylog('info', [" Couldn't check for new release."]) + mylog('info', [" Couldn't check for new release."]) data = "" - + # make sure we received a valid response and not an API rate limit exceeded message - if data != "" and len(data) > 0 and isinstance(data, list) and "published_at" in data[0]: + if data != "" and len(data) > 0 and isinstance(data, list) and "published_at" in data[0]: - dateTimeStr = data[0]["published_at"] + dateTimeStr = data[0]["published_at"] - realeaseTimestamp = int(datetime.datetime.strptime(dateTimeStr, '%Y-%m-%dT%H:%M:%SZ').strftime('%s')) + realeaseTimestamp = int(datetime.datetime.strptime(dateTimeStr, '%Y-%m-%dT%H:%M:%SZ').strftime('%s')) - if realeaseTimestamp > buildTimestamp + 600: + if realeaseTimestamp > buildTimestamp + 600: mylog('none', [" New version of the container available!"]) - newVersion = True + newVersion = True # updateState(db, 'Back_New_Version_Available', str(newVersionAvailable)) ## TO DO add this back in but avoid circular ref with database return newVersion @@ -237,7 +231,7 @@ def hide_email(email): if len(m) == 2: return f'{m[0][0]}{"*"*(len(m[0])-2)}{m[0][-1] if len(m[0]) > 1 else ""}@{m[1]}' - return email + return email #------------------------------------------------------------------------------- def removeDuplicateNewLines(text): @@ -250,14 +244,14 @@ def removeDuplicateNewLines(text): def add_json_list (row, list): new_row = [] - for column in row : + for column in row : column = bytes_to_string(column) new_row.append(column) - list.append(new_row) + list.append(new_row) - return list + return list #------------------------------------------------------------------------------- @@ -275,7 +269,7 @@ def generate_mac_links (html, deviceUrl): MACs = re.findall(p, html) - for mac in MACs: + for mac in MACs: html = html.replace('' + mac + '','' + mac + '') return html @@ -283,40 +277,47 @@ def generate_mac_links (html, deviceUrl): #------------------------------------------------------------------------------- -def initOrSetParam(db, parID, parValue): +def initOrSetParam(db, parID, parValue): sql = db.sql - sql.execute ("INSERT INTO Parameters(par_ID, par_Value) VALUES('"+str(parID)+"', '"+str(parValue)+"') ON CONFLICT(par_ID) DO UPDATE SET par_Value='"+str(parValue)+"' where par_ID='"+str(parID)+"'") + sql.execute ("INSERT INTO Parameters(par_ID, par_Value) VALUES('"+str(parID)+"', '"+str(parValue)+"') ON CONFLICT(par_ID) DO UPDATE SET par_Value='"+str(parValue)+"' where par_ID='"+str(parID)+"'") - db.commitDB() + db.commitDB() #------------------------------------------------------------------------------- class json_struc: - def __init__(self, jsn, columnNames): + def __init__(self, jsn, columnNames): self.json = jsn - self.columnNames = columnNames + self.columnNames = columnNames #------------------------------------------------------------------------------- def get_file_content(path): - f = open(path, 'r') - content = f.read() - f.close() + f = open(path, 'r') + content = f.read() + f.close() - return content + return content #------------------------------------------------------------------------------- def write_file (pPath, pText): # Write the text depending using the correct python version if sys.version_info < (3, 0): file = io.open (pPath , mode='w', encoding='utf-8') - file.write ( pText.decode('unicode_escape') ) - file.close() + file.write ( pText.decode('unicode_escape') ) + file.close() else: - file = open (pPath, 'w', encoding='utf-8') + file = open (pPath, 'w', encoding='utf-8') if pText is None: pText = "" - file.write (pText) - file.close() \ No newline at end of file + file.write (pText) + file.close() + +#------------------------------------------------------------------------------- +class noti_struc: + def __init__(self, json, text, html): + self.json = json + self.text = text + self.html = html \ No newline at end of file diff --git a/pialert/mqtt.py b/pialert/mqtt.py deleted file mode 100644 index 8843b1d5..00000000 --- a/pialert/mqtt.py +++ /dev/null @@ -1,244 +0,0 @@ - -import time -import re -from paho.mqtt import client as mqtt_client - -import conf -from logger import mylog -from database import get_all_devices, get_device_stats -from helper import bytes_to_string, sanitize_string - - - -#------------------------------------------------------------------------------- -# MQTT -#------------------------------------------------------------------------------- - -mqtt_connected_to_broker = False -mqtt_sensors = [] - -#------------------------------------------------------------------------------- -class sensor_config: - def __init__(self, deviceId, deviceName, sensorType, sensorName, icon): - self.deviceId = deviceId - self.deviceName = deviceName - self.sensorType = sensorType - self.sensorName = sensorName - self.icon = icon - self.hash = str(hash(str(deviceId) + str(deviceName)+ str(sensorType)+ str(sensorName)+ str(icon))) - -#------------------------------------------------------------------------------- - -def publish_mqtt(client, topic, message): - status = 1 - while status != 0: - result = client.publish( - topic=topic, - payload=message, - qos=conf.MQTT_QOS, - retain=True, - ) - - status = result[0] - - if status != 0: - mylog('info', ["Waiting to reconnect to MQTT broker"]) - time.sleep(0.1) - return True - -#------------------------------------------------------------------------------- -def create_generic_device(client): - - deviceName = 'PiAlert' - deviceId = 'pialert' - - create_sensor(client, deviceId, deviceName, 'sensor', 'online', 'wifi-check') - create_sensor(client, deviceId, deviceName, 'sensor', 'down', 'wifi-cancel') - create_sensor(client, deviceId, deviceName, 'sensor', 'all', 'wifi') - create_sensor(client, deviceId, deviceName, 'sensor', 'archived', 'wifi-lock') - create_sensor(client, deviceId, deviceName, 'sensor', 'new', 'wifi-plus') - create_sensor(client, deviceId, deviceName, 'sensor', 'unknown', 'wifi-alert') - - -#------------------------------------------------------------------------------- -def create_sensor(client, deviceId, deviceName, sensorType, sensorName, icon): - - new_sensor_config = sensor_config(deviceId, deviceName, sensorType, sensorName, icon) - - # check if config already in list and if not, add it, otherwise skip - global mqtt_sensors, uniqueSensorCount - - is_unique = True - - for sensor in mqtt_sensors: - if sensor.hash == new_sensor_config.hash: - is_unique = False - break - - # save if unique - if is_unique: - publish_sensor(client, new_sensor_config) - - - - -#------------------------------------------------------------------------------- -def publish_sensor(client, sensorConf): - - global mqtt_sensors - - message = '{ \ - "name":"'+ sensorConf.deviceName +' '+sensorConf.sensorName+'", \ - "state_topic":"system-sensors/'+sensorConf.sensorType+'/'+sensorConf.deviceId+'/state", \ - "value_template":"{{value_json.'+sensorConf.sensorName+'}}", \ - "unique_id":"'+sensorConf.deviceId+'_sensor_'+sensorConf.sensorName+'", \ - "device": \ - { \ - "identifiers": ["'+sensorConf.deviceId+'_sensor"], \ - "manufacturer": "PiAlert", \ - "name":"'+sensorConf.deviceName+'" \ - }, \ - "icon":"mdi:'+sensorConf.icon+'" \ - }' - - topic='homeassistant/'+sensorConf.sensorType+'/'+sensorConf.deviceId+'/'+sensorConf.sensorName+'/config' - - # add the sensor to the global list to keep track of succesfully added sensors - if publish_mqtt(client, topic, message): - # hack - delay adding to the queue in case the process is - time.sleep(conf.MQTT_DELAY_SEC) # restarted and previous publish processes aborted - # (it takes ~2s to update a sensor config on the broker) - mqtt_sensors.append(sensorConf) - -#------------------------------------------------------------------------------- -def mqtt_create_client(): - def on_disconnect(client, userdata, rc): - global mqtt_connected_to_broker - mqtt_connected_to_broker = False - - # not sure is below line is correct / necessary - # client = mqtt_create_client() - - def on_connect(client, userdata, flags, rc): - global mqtt_connected_to_broker - - if rc == 0: - mylog('verbose', [" Connected to broker"]) - mqtt_connected_to_broker = True # Signal connection - else: - mylog('none', [" Connection failed"]) - mqtt_connected_to_broker = False - - - client = mqtt_client.Client('PiAlert') # Set Connecting Client ID - client.username_pw_set(conf.MQTT_USER, conf.MQTT_PASSWORD) - client.on_connect = on_connect - client.on_disconnect = on_disconnect - client.connect(conf.MQTT_BROKER, conf.MQTT_PORT) - client.loop_start() - - return client - -#------------------------------------------------------------------------------- -def mqtt_start(): - - global client, mqtt_connected_to_broker - - if mqtt_connected_to_broker == False: - mqtt_connected_to_broker = True - client = mqtt_create_client() - - # General stats - - # Create a generic device for overal stats - create_generic_device(client) - - # Get the data - row = get_device_stats() - - columns = ["online","down","all","archived","new","unknown"] - - payload = "" - - # Update the values - for column in columns: - payload += '"'+column+'": ' + str(row[column]) +',' - - # Publish (warap into {} and remove last ',' from above) - publish_mqtt(client, "system-sensors/sensor/pialert/state", - '{ \ - '+ payload[:-1] +'\ - }' - ) - - - # Specific devices - - # Get all devices - devices = get_all_devices() - - sec_delay = len(devices) * int(conf.MQTT_DELAY_SEC)*5 - - mylog('info', [" Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60,1) , 'min)' ]) - - for device in devices: - - # Create devices in Home Assistant - send config messages - deviceId = 'mac_' + device["dev_MAC"].replace(" ", "").replace(":", "_").lower() - deviceNameDisplay = re.sub('[^a-zA-Z0-9-_\s]', '', device["dev_Name"]) - - create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'last_ip', 'ip-network') - create_sensor(client, deviceId, deviceNameDisplay, 'binary_sensor', 'is_present', 'wifi') - create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'mac_address', 'folder-key-network') - create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'is_new', 'bell-alert-outline') - create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'vendor', 'cog') - - # update device sensors in home assistant - - publish_mqtt(client, 'system-sensors/sensor/'+deviceId+'/state', - '{ \ - "last_ip": "' + device["dev_LastIP"] +'", \ - "is_new": "' + str(device["dev_NewDevice"]) +'", \ - "vendor": "' + sanitize_string(device["dev_Vendor"]) +'", \ - "mac_address": "' + str(device["dev_MAC"]) +'" \ - }' - ) - - publish_mqtt(client, 'system-sensors/binary_sensor/'+deviceId+'/state', - '{ \ - "is_present": "' + to_binary_sensor(str(device["dev_PresentLastScan"])) +'"\ - }' - ) - - # delete device / topic - # homeassistant/sensor/mac_44_ef_bf_c4_b1_af/is_present/config - # client.publish( - # topic="homeassistant/sensor/"+deviceId+"/is_present/config", - # payload="", - # qos=1, - # retain=True, - # ) - # time.sleep(10) - - -#=============================================================================== -# Home Assistant UTILs -#=============================================================================== -def to_binary_sensor(input): - # In HA a binary sensor returns ON or OFF - result = "OFF" - - # bytestring - if isinstance(input, str): - if input == "1": - result = "ON" - elif isinstance(input, int): - if input == 1: - result = "ON" - elif isinstance(input, bool): - if input == True: - result = "ON" - elif isinstance(input, bytes): - if bytes_to_string(input) == "1": - result = "ON" - return result \ No newline at end of file diff --git a/pialert/publishers/__init__.py b/pialert/publishers/__init__.py new file mode 100644 index 00000000..52c36748 --- /dev/null +++ b/pialert/publishers/__init__.py @@ -0,0 +1,8 @@ +""" Publishers for Pi.Alert """ + +""" +each publisher exposes: + +- check_config () returning True / False +- send (message) returning True / Fasle +""" \ No newline at end of file diff --git a/pialert/publishers/apprise.py b/pialert/publishers/apprise.py new file mode 100644 index 00000000..9d065a3f --- /dev/null +++ b/pialert/publishers/apprise.py @@ -0,0 +1,42 @@ + +import json +import subprocess +import conf +from helper import noti_struc +from logger import logResult, mylog + +#------------------------------------------------------------------------------- +def check_config(): + if conf.APPRISE_URL == '' or conf.APPRISE_HOST == '': + mylog('none', ['[Check Config] Error: Apprise service not set up correctly. Check your pialert.conf APPRISE_* variables.']) + return False + else: + return True + +#------------------------------------------------------------------------------- +def send (msg: noti_struc): + html = msg.html + text = msg.text + + #Define Apprise compatible payload (https://github.com/caronc/apprise-api#stateless-solution) + payload = html + + if conf.APPRISE_PAYLOAD == 'text': + payload = text + + _json_payload={ + "urls": conf.APPRISE_URL, + "title": "Pi.Alert Notifications", + "format": conf.APPRISE_PAYLOAD, + "body": payload + } + + try: + # try runnning a subprocess + p = subprocess.Popen(["curl","-i","-X", "POST" ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), conf.APPRISE_HOST], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + stdout, stderr = p.communicate() + # write stdout and stderr into .log files for debugging if needed + logResult (stdout, stderr) # TO-DO should be changed to mylog + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', [e.output]) \ No newline at end of file diff --git a/pialert/publishers/email.py b/pialert/publishers/email.py new file mode 100644 index 00000000..bae0ca1b --- /dev/null +++ b/pialert/publishers/email.py @@ -0,0 +1,88 @@ +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +import smtplib + + +import conf +from helper import hide_email, noti_struc +from logger import mylog, print_log + +#------------------------------------------------------------------------------- +def check_config (): + if conf.SMTP_SERVER == '' or conf.REPORT_FROM == '' or conf.REPORT_TO == '': + mylog('none', ['[Email Check Config] Error: Email service not set up correctly. Check your pialert.conf SMTP_*, REPORT_FROM and REPORT_TO variables.']) + return False + else: + return True + +#------------------------------------------------------------------------------- +def send (msg: noti_struc): + + pText = msg.text + pHTML = msg.html + + mylog('debug', '[Send Email] REPORT_TO: ' + hide_email(str(conf.REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(conf.SMTP_USER))) + + # Compose email + msg = MIMEMultipart('alternative') + msg['Subject'] = 'Pi.Alert Report' + msg['From'] = conf.REPORT_FROM + msg['To'] = conf.REPORT_TO + msg.attach (MIMEText (pText, 'plain')) + msg.attach (MIMEText (pHTML, 'html')) + + failedAt = '' + + failedAt = print_log ('SMTP try') + + try: + # Send mail + failedAt = print_log('Trying to open connection to ' + str(conf.SMTP_SERVER) + ':' + str(conf.SMTP_PORT)) + + if conf.SMTP_FORCE_SSL: + failedAt = print_log('SMTP_FORCE_SSL == True so using .SMTP_SSL()') + if conf.SMTP_PORT == 0: + failedAt = print_log('SMTP_PORT == 0 so sending .SMTP_SSL(SMTP_SERVER)') + smtp_connection = smtplib.SMTP_SSL(conf.SMTP_SERVER) + else: + failedAt = print_log('SMTP_PORT == 0 so sending .SMTP_SSL(SMTP_SERVER, SMTP_PORT)') + smtp_connection = smtplib.SMTP_SSL(conf.SMTP_SERVER, conf.SMTP_PORT) + + else: + failedAt = print_log('SMTP_FORCE_SSL == False so using .SMTP()') + if conf.SMTP_PORT == 0: + failedAt = print_log('SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER)') + smtp_connection = smtplib.SMTP (conf.SMTP_SERVER) + else: + failedAt = print_log('SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER, SMTP_PORT)') + smtp_connection = smtplib.SMTP (conf.SMTP_SERVER, conf.SMTP_PORT) + + failedAt = print_log('Setting SMTP debug level') + + # Log level set to debug of the communication between SMTP server and client + if conf.LOG_LEVEL == 'debug': + smtp_connection.set_debuglevel(1) + + failedAt = print_log( 'Sending .ehlo()') + smtp_connection.ehlo() + + if not conf.SMTP_SKIP_TLS: + failedAt = print_log('SMTP_SKIP_TLS == False so sending .starttls()') + smtp_connection.starttls() + failedAt = print_log('SMTP_SKIP_TLS == False so sending .ehlo()') + smtp_connection.ehlo() + if not conf.SMTP_SKIP_LOGIN: + failedAt = print_log('SMTP_SKIP_LOGIN == False so sending .login()') + smtp_connection.login (conf.SMTP_USER, conf.SMTP_PASS) + + failedAt = print_log('Sending .sendmail()') + smtp_connection.sendmail (conf.REPORT_FROM, conf.REPORT_TO, msg.as_string()) + smtp_connection.quit() + except smtplib.SMTPAuthenticationError as e: + mylog('none', [' ERROR: Failed at - ', failedAt]) + mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPAuthenticationError), skipping Email (enable LOG_LEVEL=debug for more logging)']) + except smtplib.SMTPServerDisconnected as e: + mylog('none', [' ERROR: Failed at - ', failedAt]) + mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPServerDisconnected), skipping Email (enable LOG_LEVEL=debug for more logging)']) + + mylog('debug', '[Send Email] Last executed - ' + str(failedAt)) \ No newline at end of file diff --git a/pialert/publishers/ntfy.py b/pialert/publishers/ntfy.py new file mode 100644 index 00000000..2dc5318f --- /dev/null +++ b/pialert/publishers/ntfy.py @@ -0,0 +1,36 @@ + +import conf +import requests +from base64 import b64encode + +from logger import mylog, noti_struc + +#------------------------------------------------------------------------------- +def check_config(): + if conf.NTFY_HOST == '' or conf.NTFY_TOPIC == '': + mylog('none', ['[Check Config] Error: NTFY service not set up correctly. Check your pialert.conf NTFY_* variables.']) + return False + else: + return True + +#------------------------------------------------------------------------------- +def send (msg: noti_struc): + _Text = msg.html + headers = { + "Title": "Pi.Alert Notification", + "Actions": "view, Open Dashboard, "+ conf.REPORT_DASHBOARD_URL, + "Priority": "urgent", + "Tags": "warning" + } + # if username and password are set generate hash and update header + if conf.NTFY_USER != "" and conf.NTFY_PASSWORD != "": + # Generate hash for basic auth + # usernamepassword = "{}:{}".format(conf.NTFY_USER,conf.NTFY_PASSWORD) + basichash = b64encode(bytes(conf.NTFY_USER + ':' + conf.NTFY_PASSWORD, "utf-8")).decode("ascii") + + # add authorization header with hash + headers["Authorization"] = "Basic {}".format(basichash) + + requests.post("{}/{}".format( conf.NTFY_HOST, conf.NTFY_TOPIC), + data=_Text, + headers=headers) diff --git a/pialert/publishers/pushsafer.py b/pialert/publishers/pushsafer.py new file mode 100644 index 00000000..b8252209 --- /dev/null +++ b/pialert/publishers/pushsafer.py @@ -0,0 +1,33 @@ + +import requests + + +import conf +from helper import noti_struc +from logger import mylog + +#------------------------------------------------------------------------------- +def check_config(): + if conf.PUSHSAFER_TOKEN == 'ApiKey': + mylog('none', ['[Check Config] Error: Pushsafer service not set up correctly. Check your pialert.conf PUSHSAFER_TOKEN variable.']) + return False + else: + return True + +#------------------------------------------------------------------------------- +def send ( msg:noti_struc ): + _Text = msg.text + url = 'https://www.pushsafer.com/api' + post_fields = { + "t" : 'Pi.Alert Message', + "m" : _Text, + "s" : 11, + "v" : 3, + "i" : 148, + "c" : '#ef7f7f', + "d" : 'a', + "u" : conf.REPORT_DASHBOARD_URL, + "ut" : 'Open Pi.Alert', + "k" : conf.PUSHSAFER_TOKEN, + } + requests.post(url, data=post_fields) \ No newline at end of file diff --git a/pialert/publishers/webhook.py b/pialert/publishers/webhook.py new file mode 100644 index 00000000..850f5cc0 --- /dev/null +++ b/pialert/publishers/webhook.py @@ -0,0 +1,98 @@ +import json +import subprocess + +import conf +from const import logPath +from helper import noti_struc, write_file +from logger import logResult, mylog + +#------------------------------------------------------------------------------- +def check_config(): + if conf.WEBHOOK_URL == '': + mylog('none', ['[Check Config] Error: Webhook service not set up correctly. Check your pialert.conf WEBHOOK_* variables.']) + return False + else: + return True + +#------------------------------------------------------------------------------- + +def send_webhook (msg: noti_struc): + + # use data type based on specified payload type + if conf.WEBHOOK_PAYLOAD == 'json': + payloadData = msg.json + if conf.WEBHOOK_PAYLOAD == 'html': + payloadData = msg.html + if conf.WEBHOOK_PAYLOAD == 'text': + payloadData = to_text(msg.json) # TO DO can we just send msg.text? + + # Define slack-compatible payload + _json_payload = { "text": payloadData } if conf.WEBHOOK_PAYLOAD == 'text' else { + "username": "Pi.Alert", + "text": "There are new notifications", + "attachments": [{ + "title": "Pi.Alert Notifications", + "title_link": conf.REPORT_DASHBOARD_URL, + "text": payloadData + }] + } + + # DEBUG - Write the json payload into a log file for debugging + write_file (logPath + '/webhook_payload.json', json.dumps(_json_payload)) + + # Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both + if(conf.WEBHOOK_URL.startswith('https://discord.com/api/webhooks/') and not conf.WEBHOOK_URL.endswith("/slack")): + _WEBHOOK_URL = f"{conf.WEBHOOK_URL}/slack" + curlParams = ["curl","-i","-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL] + else: + _WEBHOOK_URL = conf.WEBHOOK_URL + curlParams = ["curl","-i","-X", conf.WEBHOOK_REQUEST_METHOD ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL] + + # execute CURL call + try: + # try runnning a subprocess + mylog('debug', '[send_webhook] curlParams: '+ curlParams) + p = subprocess.Popen(curlParams, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + + stdout, stderr = p.communicate() + + # write stdout and stderr into .log files for debugging if needed + logResult (stdout, stderr) # TO-DO should be changed to mylog + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', ['[send_webhook]', e.output]) + + + + + +#------------------------------------------------------------------------------- +def to_text(_json): + payloadData = "" + if len(_json['internet']) > 0 and 'internet' in conf.INCLUDED_SECTIONS: + payloadData += "INTERNET\n" + for event in _json['internet']: + payloadData += event[3] + ' on ' + event[2] + '. ' + event[4] + '. New address:' + event[1] + '\n' + + if len(_json['new_devices']) > 0 and 'new_devices' in conf.INCLUDED_SECTIONS: + payloadData += "NEW DEVICES:\n" + for event in _json['new_devices']: + if event[4] is None: + event[4] = event[11] + payloadData += event[1] + ' - ' + event[4] + '\n' + + if len(_json['down_devices']) > 0 and 'down_devices' in conf.INCLUDED_SECTIONS: + write_file (logPath + '/down_devices_example.log', _json['down_devices']) + payloadData += 'DOWN DEVICES:\n' + for event in _json['down_devices']: + if event[4] is None: + event[4] = event[11] + payloadData += event[1] + ' - ' + event[4] + '\n' + + if len(_json['events']) > 0 and 'events' in conf.INCLUDED_SECTIONS: + payloadData += "EVENTS:\n" + for event in _json['events']: + if event[8] != "Internet": + payloadData += event[8] + " on " + event[1] + " " + event[3] + " at " + event[2] + "\n" + + return payloadData \ No newline at end of file diff --git a/pialert/reporting.py b/pialert/reporting.py index 6a8186ea..0bb5e57a 100644 --- a/pialert/reporting.py +++ b/pialert/reporting.py @@ -1,40 +1,41 @@ -from email.mime.multipart import MIMEMultipart -from email.mime.text import MIMEText + import datetime import json -import smtplib + import socket -from base64 import b64encode + import subprocess import requests from json2table import convert # pialert modules -import conf +import conf from const import pialertPath, logPath, apiPath -from helper import generate_mac_links, removeDuplicateNewLines, timeNow, hide_email, json_struc, updateState, get_file_content, write_file +from helper import noti_struc, generate_mac_links, removeDuplicateNewLines, timeNow, hide_email, updateState, get_file_content, write_file from logger import logResult, mylog, print_log -from mqtt import mqtt_start - - +from publishers.email import (check_config as email_check_config, + send as send_email ) +from publishers.ntfy import (check_config as ntfy_check_config, + send as send_ntfy ) +from publishers.apprise import (check_config as apprise_check_config, + send as send_apprise) +from publishers.webhook import (check_config as webhook_check_config, + send as send_webhook) +from publishers.pushsafer import (check_config as pushsafer_check_config, + send as send_pushsafer) +from publishers.mqtt import (check_config as mqtt_check_config, + mqtt_start ) #=============================================================================== # REPORTING #=============================================================================== -# create a json for webhook and mqtt notifications to provide further integration options +# create a json for webhook and mqtt notifications to provide further integration options json_final = [] -#------------------------------------------------------------------------------- -class noti_struc: - def __init__(self, json, text, html): - self.json = json - self.text = text - self.html = html - #------------------------------------------------------------------------------- def construct_notifications(db, sqlQuery, tableTitle, skipText = False, suppliedJsonStruct = None): @@ -55,7 +56,7 @@ def construct_notifications(db, sqlQuery, tableTitle, skipText = False, supplied json_struc = suppliedJsonStruct jsn = json_struc.json - html = "" + html = "" text = "" if len(jsn["data"]) > 0: @@ -68,13 +69,13 @@ def construct_notifications(db, sqlQuery, tableTitle, skipText = False, supplied # prepare text-only message if skipText == False: - + for device in jsn["data"]: for header in headers: padding = "" if len(header) < 4: padding = "\t" - text += text_line.format ( header + ': ' + padding, device[header]) + text += text_line.format ( header + ': ' + padding, device[header]) text += '\n' # Format HTML table headers @@ -86,7 +87,8 @@ def construct_notifications(db, sqlQuery, tableTitle, skipText = False, supplied -def send_notifications (db): +def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS): + sql = db.sql #TO-DO global mail_text, mail_html, json_final, changedPorts_json_struc, partial_html, partial_txt, partial_json @@ -94,7 +96,7 @@ def send_notifications (db): plugins_report = False # Reporting section - mylog('verbose', ['[Notification] Check if something to report']) + mylog('verbose', ['[Notification] Check if something to report']) # prepare variables for JSON construction json_internet = [] @@ -108,26 +110,26 @@ def send_notifications (db): sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0 WHERE eve_PendingAlertEmail = 1 AND eve_EventType != 'Device Down' AND eve_MAC IN ( - SELECT dev_MAC FROM Devices WHERE dev_AlertEvents = 0 + SELECT dev_MAC FROM Devices WHERE dev_AlertEvents = 0 )""") sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0 WHERE eve_PendingAlertEmail = 1 AND eve_EventType = 'Device Down' AND eve_MAC IN ( - SELECT dev_MAC FROM Devices WHERE dev_AlertDeviceDown = 0 + SELECT dev_MAC FROM Devices WHERE dev_AlertDeviceDown = 0 )""") # Open text Template - template_file = open(pialertPath + '/back/report_template.txt', 'r') - mail_text = template_file.read() - template_file.close() + template_file = open(pialertPath + '/back/report_template.txt', 'r') + mail_text = template_file.read() + template_file.close() # Open html Template - template_file = open(pialertPath + '/back/report_template.html', 'r') + template_file = open(pialertPath + '/back/report_template.html', 'r') if conf.newVersionAvailable : - template_file = open(pialertPath + '/back/report_template_new_version.html', 'r') + template_file = open(pialertPath + '/back/report_template_new_version.html', 'r') - mail_html = template_file.read() - template_file.close() + mail_html = template_file.read() + template_file.close() # Report Header & footer timeFormated = timeNow().strftime ('%Y-%m-%d %H:%M') @@ -137,7 +139,7 @@ def send_notifications (db): mail_text = mail_text.replace ('', socket.gethostname() ) mail_html = mail_html.replace ('', socket.gethostname() ) - if 'internet' in conf.INCLUDED_SECTIONS: + if 'internet' in INCLUDED_SECTIONS: # Compose Internet Section sqlQuery = """SELECT eve_MAC as MAC, eve_IP as IP, eve_DateTime as Datetime, eve_EventType as "Event Type", eve_AdditionalInfo as "More info" FROM Events WHERE eve_PendingAlertEmail = 1 AND eve_MAC = 'Internet' @@ -145,14 +147,14 @@ def send_notifications (db): notiStruc = construct_notifications(db, sqlQuery, "Internet IP change") - # collect "internet" (IP changes) for the webhook json + # collect "internet" (IP changes) for the webhook json json_internet = notiStruc.json["data"] mail_text = mail_text.replace ('', notiStruc.text + '\n') mail_html = mail_html.replace ('', notiStruc.html) - if 'new_devices' in conf.INCLUDED_SECTIONS: - # Compose New Devices Section + if 'new_devices' in INCLUDED_SECTIONS: + # Compose New Devices Section sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices WHERE eve_PendingAlertEmail = 1 AND eve_EventType = 'New Device' @@ -160,14 +162,14 @@ def send_notifications (db): notiStruc = construct_notifications(db, sqlQuery, "New devices") - # collect "new_devices" for the webhook json + # collect "new_devices" for the webhook json json_new_devices = notiStruc.json["data"] mail_text = mail_text.replace ('', notiStruc.text + '\n') mail_html = mail_html.replace ('', notiStruc.html) - if 'down_devices' in conf.INCLUDED_SECTIONS: - # Compose Devices Down Section + if 'down_devices' in INCLUDED_SECTIONS: + # Compose Devices Down Section sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices WHERE eve_PendingAlertEmail = 1 AND eve_EventType = 'Device Down' @@ -175,14 +177,14 @@ def send_notifications (db): notiStruc = construct_notifications(db, sqlQuery, "Down devices") - # collect "new_devices" for the webhook json + # collect "new_devices" for the webhook json json_down_devices = notiStruc.json["data"] mail_text = mail_text.replace ('', notiStruc.text + '\n') mail_html = mail_html.replace ('', notiStruc.html) - if 'events' in conf.INCLUDED_SECTIONS: - # Compose Events Section + if 'events' in INCLUDED_SECTIONS: + # Compose Events Section sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices WHERE eve_PendingAlertEmail = 1 AND eve_EventType IN ('Connected','Disconnected', @@ -191,16 +193,16 @@ def send_notifications (db): notiStruc = construct_notifications(db, sqlQuery, "Events") - # collect "events" for the webhook json + # collect "events" for the webhook json json_events = notiStruc.json["data"] mail_text = mail_text.replace ('', notiStruc.text + '\n') mail_html = mail_html.replace ('', notiStruc.html) - - if 'ports' in conf.INCLUDED_SECTIONS: - # collect "ports" for the webhook json - if changedPorts_json_struc is not None: - json_ports = changedPorts_json_struc.json["data"] + + if 'ports' in INCLUDED_SECTIONS: + # collect "ports" for the webhook json + if changedPorts_json_struc is not None: + json_ports = changedPorts_json_struc.json["data"] notiStruc = construct_notifications(db, "", "Ports", True, changedPorts_json_struc) @@ -208,17 +210,17 @@ def send_notifications (db): portsTxt = "" if changedPorts_json_struc is not None: - portsTxt = "Ports \n---------\n Ports changed! Check PiAlert for details!\n" + portsTxt = "Ports \n---------\n Ports changed! Check PiAlert for details!\n" mail_text = mail_text.replace ('', portsTxt ) - if 'plugins' in conf.INCLUDED_SECTIONS and conf.ENABLE_PLUGINS: - # Compose Plugins Section + if 'plugins' in INCLUDED_SECTIONS and conf.ENABLE_PLUGINS: + # Compose Plugins Section sqlQuery = """SELECT Plugin, Object_PrimaryId, Object_SecondaryId, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status from Plugins_Events""" notiStruc = construct_notifications(db, sqlQuery, "Plugins") - # collect "plugins" for the webhook json + # collect "plugins" for the webhook json json_plugins = notiStruc.json["data"] mail_text = mail_text.replace ('', notiStruc.text + '\n') @@ -229,42 +231,44 @@ def send_notifications (db): json_final = { - "internet": json_internet, + "internet": json_internet, "new_devices": json_new_devices, - "down_devices": json_down_devices, + "down_devices": json_down_devices, "events": json_events, "ports": json_ports, "plugins": json_plugins, - } + } mail_text = removeDuplicateNewLines(mail_text) - - # Create clickable MAC links + + # Create clickable MAC links mail_html = generate_mac_links (mail_html, deviceUrl) - # Write output emails for debug - write_file (logPath + '/report_output.json', json.dumps(json_final)) - write_file (logPath + '/report_output.txt', mail_text) - write_file (logPath + '/report_output.html', mail_html) + # Write output emails for debug + write_file (logPath + '/report_output.json', json.dumps(json_final)) + write_file (logPath + '/report_output.txt', mail_text) + write_file (logPath + '/report_output.html', mail_html) # Send Mail - if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or json_ports != [] or conf.debug_force_notification or plugins_report: + if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or json_ports != [] or conf.debug_force_notification or plugins_report: mylog('none', ['[Notification] Changes detected, sending reports']) + msg = noti_struc(json_final, mail_text, mail_html) + mylog('info', ['[Notification] Udateing API files']) send_api() - if conf.REPORT_MAIL and check_config('email'): + if conf.REPORT_MAIL and check_config('email'): updateState(db,"Send: Email") mylog('info', ['[Notification] Sending report by Email']) - send_email (mail_text, mail_html) + send_email (msg ) else : mylog('verbose', ['[Notification] Skip email']) if conf.REPORT_APPRISE and check_config('apprise'): updateState(db,"Send: Apprise") mylog('info', ['[Notification] Sending report by Apprise']) - send_apprise (mail_html, mail_text) + send_apprise (msg) else : mylog('verbose', ['[Notification] Skip Apprise']) if conf.REPORT_WEBHOOK and check_config('webhook'): @@ -276,20 +280,20 @@ def send_notifications (db): if conf.REPORT_NTFY and check_config('ntfy'): updateState(db,"Send: NTFY") mylog('info', ['[Notification] Sending report by NTFY']) - send_ntfy (mail_text) + send_ntfy (msg) else : mylog('verbose', ['[Notification] Skip NTFY']) if conf.REPORT_PUSHSAFER and check_config('pushsafer'): updateState(db,"Send: PUSHSAFER") mylog('info', ['[Notification] Sending report by PUSHSAFER']) - send_pushsafer (mail_text) + send_pushsafer (msg) else : mylog('verbose', ['[Notification] Skip PUSHSAFER']) # Update MQTT entities if conf.REPORT_MQTT and check_config('mqtt'): updateState(db,"Send: MQTT") - mylog('info', ['[Notification] Establishing MQTT thread']) - mqtt_start() + mylog('info', ['[Notification] Establishing MQTT thread']) + mqtt_start() else : mylog('verbose', ['[Notification] Skip MQTT']) else : @@ -305,13 +309,13 @@ def send_notifications (db): # clear plugin events sql.execute ("DELETE FROM Plugins_Events") - + changedPorts_json_struc = None - # DEBUG - print number of rows updated + # DEBUG - print number of rows updated mylog('info', ['[Notification] Notifications changes: ', sql.rowcount]) - # Commit changes + # Commit changes db.commitDB() @@ -319,53 +323,53 @@ def send_notifications (db): def check_config(service): if service == 'email': - if conf.SMTP_SERVER == '' or conf.REPORT_FROM == '' or conf.REPORT_TO == '': - mylog('none', ['[Check Config] Error: Email service not set up correctly. Check your pialert.conf SMTP_*, REPORT_FROM and REPORT_TO variables.']) - return False - else: - return True + return email_check_config() + + # if conf.SMTP_SERVER == '' or conf.REPORT_FROM == '' or conf.REPORT_TO == '': + # mylog('none', ['[Check Config] Error: Email service not set up correctly. Check your pialert.conf SMTP_*, REPORT_FROM and REPORT_TO variables.']) + # return False + # else: + # return True if service == 'apprise': - if conf.APPRISE_URL == '' or conf.APPRISE_HOST == '': - mylog('none', ['[Check Config] Error: Apprise service not set up correctly. Check your pialert.conf APPRISE_* variables.']) - return False - else: - return True + return apprise_check_config() + + # if conf.APPRISE_URL == '' or conf.APPRISE_HOST == '': + # mylog('none', ['[Check Config] Error: Apprise service not set up correctly. Check your pialert.conf APPRISE_* variables.']) + # return False + # else: + # return True if service == 'webhook': - if conf.WEBHOOK_URL == '': - mylog('none', ['[Check Config] Error: Webhook service not set up correctly. Check your pialert.conf WEBHOOK_* variables.']) - return False - else: - return True + return webhook_check_config() + + # if conf.WEBHOOK_URL == '': + # mylog('none', ['[Check Config] Error: Webhook service not set up correctly. Check your pialert.conf WEBHOOK_* variables.']) + # return False + # else: + # return True if service == 'ntfy': - if conf.NTFY_HOST == '' or conf.NTFY_TOPIC == '': - mylog('none', ['[Check Config] Error: NTFY service not set up correctly. Check your pialert.conf NTFY_* variables.']) - return False - else: - return True + return ntfy_check_config () + # + # if conf.NTFY_HOST == '' or conf.NTFY_TOPIC == '': + # mylog('none', ['[Check Config] Error: NTFY service not set up correctly. Check your pialert.conf NTFY_* variables.']) + # return False + # else: + # return True if service == 'pushsafer': - if conf.PUSHSAFER_TOKEN == 'ApiKey': - mylog('none', ['[Check Config] Error: Pushsafer service not set up correctly. Check your pialert.conf PUSHSAFER_TOKEN variable.']) - return False - else: - return True + return pushsafer_check_config() if service == 'mqtt': - if conf.MQTT_BROKER == '' or conf.MQTT_PORT == '' or conf.MQTT_USER == '' or conf.MQTT_PASSWORD == '': - mylog('none', ['[Check Config] Error: MQTT service not set up correctly. Check your pialert.conf MQTT_* variables.']) - return False - else: - return True + return mqtt_check_config() #------------------------------------------------------------------------------- def format_table (html, thValue, props, newThValue = ''): if newThValue == '': newThValue = thValue - + return html.replace(""+thValue+"", ""+newThValue+"" ) #------------------------------------------------------------------------------- @@ -375,9 +379,9 @@ def format_report_section (pActive, pSection, pTable, pText, pHTML): # Replace section text if pActive : conf.mail_text = conf.mail_text.replace ('<'+ pTable +'>', pText) - conf.mail_html = conf.mail_html.replace ('<'+ pTable +'>', pHTML) + conf.mail_html = conf.mail_html.replace ('<'+ pTable +'>', pHTML) - conf.mail_text = remove_tag (conf.mail_text, pSection) + conf.mail_text = remove_tag (conf.mail_text, pSection) conf.mail_html = remove_tag (conf.mail_html, pSection) else: conf.mail_text = remove_section (conf.mail_text, pSection) @@ -387,7 +391,7 @@ def format_report_section (pActive, pSection, pTable, pText, pHTML): def remove_section (pText, pSection): # Search section into the text if pText.find ('<'+ pSection +'>') >=0 \ - and pText.find ('') >=0 : + and pText.find ('') >=0 : # return text without the section return pText[:pText.find ('<'+ pSection+'>')] + \ pText[pText.find ('') + len (pSection) +3:] @@ -402,215 +406,8 @@ def remove_tag (pText, pTag): #------------------------------------------------------------------------------- -# Reporting +# Reporting #------------------------------------------------------------------------------- -def send_email (pText, pHTML): - - mylog('debug', '[Send Email] REPORT_TO: ' + hide_email(str(conf.REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(conf.SMTP_USER))) - - # Compose email - msg = MIMEMultipart('alternative') - msg['Subject'] = 'Pi.Alert Report' - msg['From'] = conf.REPORT_FROM - msg['To'] = conf.REPORT_TO - msg.attach (MIMEText (pText, 'plain')) - msg.attach (MIMEText (pHTML, 'html')) - - failedAt = '' - - failedAt = print_log ('SMTP try') - - try: - # Send mail - failedAt = print_log('Trying to open connection to ' + str(conf.SMTP_SERVER) + ':' + str(conf.SMTP_PORT)) - - if conf.SMTP_FORCE_SSL: - failedAt = print_log('SMTP_FORCE_SSL == True so using .SMTP_SSL()') - if conf.SMTP_PORT == 0: - failedAt = print_log('SMTP_PORT == 0 so sending .SMTP_SSL(SMTP_SERVER)') - smtp_connection = smtplib.SMTP_SSL(conf.SMTP_SERVER) - else: - failedAt = print_log('SMTP_PORT == 0 so sending .SMTP_SSL(SMTP_SERVER, SMTP_PORT)') - smtp_connection = smtplib.SMTP_SSL(conf.SMTP_SERVER, conf.SMTP_PORT) - - else: - failedAt = print_log('SMTP_FORCE_SSL == False so using .SMTP()') - if conf.SMTP_PORT == 0: - failedAt = print_log('SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER)') - smtp_connection = smtplib.SMTP (conf.SMTP_SERVER) - else: - failedAt = print_log('SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER, SMTP_PORT)') - smtp_connection = smtplib.SMTP (conf.SMTP_SERVER, conf.SMTP_PORT) - - failedAt = print_log('Setting SMTP debug level') - - # Log level set to debug of the communication between SMTP server and client - if conf.LOG_LEVEL == 'debug': - smtp_connection.set_debuglevel(1) - - failedAt = print_log( 'Sending .ehlo()') - smtp_connection.ehlo() - - if not conf.SMTP_SKIP_TLS: - failedAt = print_log('SMTP_SKIP_TLS == False so sending .starttls()') - smtp_connection.starttls() - failedAt = print_log('SMTP_SKIP_TLS == False so sending .ehlo()') - smtp_connection.ehlo() - if not conf.SMTP_SKIP_LOGIN: - failedAt = print_log('SMTP_SKIP_LOGIN == False so sending .login()') - smtp_connection.login (conf.SMTP_USER, conf.SMTP_PASS) - - failedAt = print_log('Sending .sendmail()') - smtp_connection.sendmail (conf.REPORT_FROM, conf.REPORT_TO, msg.as_string()) - smtp_connection.quit() - except smtplib.SMTPAuthenticationError as e: - mylog('none', [' ERROR: Failed at - ', failedAt]) - mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPAuthenticationError), skipping Email (enable LOG_LEVEL=debug for more logging)']) - except smtplib.SMTPServerDisconnected as e: - mylog('none', [' ERROR: Failed at - ', failedAt]) - mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPServerDisconnected), skipping Email (enable LOG_LEVEL=debug for more logging)']) - - mylog('debug', '[Send Email] Last executed - ' + str(failedAt)) - -#------------------------------------------------------------------------------- -def send_ntfy (_Text): - headers = { - "Title": "Pi.Alert Notification", - "Actions": "view, Open Dashboard, "+ conf.REPORT_DASHBOARD_URL, - "Priority": "urgent", - "Tags": "warning" - } - # if username and password are set generate hash and update header - if conf.NTFY_USER != "" and conf.NTFY_PASSWORD != "": - # Generate hash for basic auth - usernamepassword = "{}:{}".format(conf.NTFY_USER,conf.NTFY_PASSWORD) - basichash = b64encode(bytes(conf.NTFY_USER + ':' + conf.NTFY_PASSWORD, "utf-8")).decode("ascii") - - # add authorization header with hash - headers["Authorization"] = "Basic {}".format(basichash) - - requests.post("{}/{}".format( conf.NTFY_HOST, conf.NTFY_TOPIC), - data=_Text, - headers=headers) - -def send_pushsafer (_Text): - url = 'https://www.pushsafer.com/api' - post_fields = { - "t" : 'Pi.Alert Message', - "m" : _Text, - "s" : 11, - "v" : 3, - "i" : 148, - "c" : '#ef7f7f', - "d" : 'a', - "u" : conf.REPORT_DASHBOARD_URL, - "ut" : 'Open Pi.Alert', - "k" : conf.PUSHSAFER_TOKEN, - } - requests.post(url, data=post_fields) - -#------------------------------------------------------------------------------- -def send_webhook (_json, _html): - - # use data type based on specified payload type - if conf.WEBHOOK_PAYLOAD == 'json': - payloadData = _json - if conf.WEBHOOK_PAYLOAD == 'html': - payloadData = _html - if conf.WEBHOOK_PAYLOAD == 'text': - payloadData = to_text(_json) - - # Define slack-compatible payload - _json_payload = { "text": payloadData } if conf.WEBHOOK_PAYLOAD == 'text' else { - "username": "Pi.Alert", - "text": "There are new notifications", - "attachments": [{ - "title": "Pi.Alert Notifications", - "title_link": conf.REPORT_DASHBOARD_URL, - "text": payloadData - }] - } - - # DEBUG - Write the json payload into a log file for debugging - write_file (logPath + '/webhook_payload.json', json.dumps(_json_payload)) - - # Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both - if(conf.WEBHOOK_URL.startswith('https://discord.com/api/webhooks/') and not conf.WEBHOOK_URL.endswith("/slack")): - _WEBHOOK_URL = f"{conf.WEBHOOK_URL}/slack" - curlParams = ["curl","-i","-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL] - else: - _WEBHOOK_URL = conf.WEBHOOK_URL - curlParams = ["curl","-i","-X", conf.WEBHOOK_REQUEST_METHOD ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL] - - # execute CURL call - try: - # try runnning a subprocess - mylog('debug', '[send_webhook] curlParams: '+ curlParams) - p = subprocess.Popen(curlParams, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - - stdout, stderr = p.communicate() - - # write stdout and stderr into .log files for debugging if needed - logResult (stdout, stderr) # TO-DO should be changed to mylog - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', ['[send_webhook]', e.output]) - -#------------------------------------------------------------------------------- -def send_apprise (html, text): - #Define Apprise compatible payload (https://github.com/caronc/apprise-api#stateless-solution) - payload = html - - if conf.APPRISE_PAYLOAD == 'text': - payload = text - - _json_payload={ - "urls": conf.APPRISE_URL, - "title": "Pi.Alert Notifications", - "format": conf.APPRISE_PAYLOAD, - "body": payload - } - - try: - # try runnning a subprocess - p = subprocess.Popen(["curl","-i","-X", "POST" ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), conf.APPRISE_HOST], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - stdout, stderr = p.communicate() - # write stdout and stderr into .log files for debugging if needed - logResult (stdout, stderr) # TO-DO should be changed to mylog - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', [e.output]) - - -def to_text(_json): - payloadData = "" - if len(_json['internet']) > 0 and 'internet' in conf.INCLUDED_SECTIONS: - payloadData += "INTERNET\n" - for event in _json['internet']: - payloadData += event[3] + ' on ' + event[2] + '. ' + event[4] + '. New address:' + event[1] + '\n' - - if len(_json['new_devices']) > 0 and 'new_devices' in conf.INCLUDED_SECTIONS: - payloadData += "NEW DEVICES:\n" - for event in _json['new_devices']: - if event[4] is None: - event[4] = event[11] - payloadData += event[1] + ' - ' + event[4] + '\n' - - if len(_json['down_devices']) > 0 and 'down_devices' in conf.INCLUDED_SECTIONS: - write_file (logPath + '/down_devices_example.log', _json['down_devices']) - payloadData += 'DOWN DEVICES:\n' - for event in _json['down_devices']: - if event[4] is None: - event[4] = event[11] - payloadData += event[1] + ' - ' + event[4] + '\n' - - if len(_json['events']) > 0 and 'events' in conf.INCLUDED_SECTIONS: - payloadData += "EVENTS:\n" - for event in _json['events']: - if event[8] != "Internet": - payloadData += event[8] + " on " + event[1] + " " + event[3] + " at " + event[2] + "\n" - - return payloadData #------------------------------------------------------------------------------- def send_api(): @@ -618,11 +415,11 @@ def send_api(): write_file(apiPath + 'notification_text.txt' , mail_text) write_file(apiPath + 'notification_text.html' , mail_html) - write_file(apiPath + 'notification_json_final.json' , json.dumps(json_final)) + write_file(apiPath + 'notification_json_final.json' , json.dumps(json_final)) #------------------------------------------------------------------------------- -def skip_repeated_notifications (db): +def skip_repeated_notifications (db): # Skip repeated notifications # due strfime : Overflow --> use "strftime / 60" @@ -640,7 +437,7 @@ def skip_repeated_notifications (db): """ ) mylog('verbose','[Skip Repeated Notifications] Skip Repeated end') - db.commitDB() + db.commitDB() #=============================================================================== @@ -651,10 +448,10 @@ def skip_repeated_notifications (db): def check_and_run_event(db): sql = db.sql # TO-DO sql.execute(""" select * from Parameters where par_ID = "Front_Event" """) - rows = sql.fetchall() + rows = sql.fetchall() event, param = ['',''] - if len(rows) > 0 and rows[0]['par_Value'] != 'finished': + if len(rows) > 0 and rows[0]['par_Value'] != 'finished': event = rows[0]['par_Value'].split('|')[0] param = rows[0]['par_Value'].split('|')[1] else: @@ -666,45 +463,47 @@ def check_and_run_event(db): handle_run(param) # clear event execution flag - sql.execute ("UPDATE Parameters SET par_Value='finished' WHERE par_ID='Front_Event'") + sql.execute ("UPDATE Parameters SET par_Value='finished' WHERE par_ID='Front_Event'") - # commit to DB + # commit to DB db.commitDB() #------------------------------------------------------------------------------- def handle_run(runType): global last_network_scan - mylog('info', ['[', timeNow(), '] START Run: ', runType]) + mylog('info', ['[', timeNow(), '] START Run: ', runType]) if runType == 'ENABLE_ARPSCAN': - last_network_scan = conf.time_started - datetime.timedelta(hours = 24) + last_network_scan = conf.time_started - datetime.timedelta(hours = 24) mylog('info', ['[', timeNow(), '] END Run: ', runType]) #------------------------------------------------------------------------------- def handle_test(testType): - mylog('info', ['[', timeNow(), '] START Test: ', testType]) + mylog('info', ['[', timeNow(), '] START Test: ', testType]) - # Open text sample + # Open text sample sample_txt = get_file_content(pialertPath + '/back/report_sample.txt') - # Open html sample + # Open html sample sample_html = get_file_content(pialertPath + '/back/report_sample.html') - # Open json sample and get only the payload part - sample_json_payload = json.loads(get_file_content(pialertPath + '/back/webhook_json_sample.json'))[0]["body"]["attachments"][0]["text"] - - if testType == 'REPORT_MAIL': - send_email(sample_txt, sample_html) - if testType == 'REPORT_WEBHOOK': - send_webhook (sample_json_payload, sample_txt) - if testType == 'REPORT_APPRISE': - send_apprise (sample_html, sample_txt) - if testType == 'REPORT_NTFY': - send_ntfy (sample_txt) - if testType == 'REPORT_PUSHSAFER': - send_pushsafer (sample_txt) + # Open json sample and get only the payload part + sample_json_payload = json.loads(get_file_content(pialertPath + '/back/webhook_json_sample.json'))[0]["body"]["attachments"][0]["text"] - mylog('info', ['[', timeNow(), '] END Test: ', testType]) \ No newline at end of file + sample_msg = noti_struc(sample_json_payload, sample_txt, sample_html ) + + if testType == 'REPORT_MAIL': + send_email(sample_msg) + if testType == 'REPORT_WEBHOOK': + send_webhook (sample_msg) + if testType == 'REPORT_APPRISE': + send_apprise (sample_msg) + if testType == 'REPORT_NTFY': + send_ntfy (sample_msg) + if testType == 'REPORT_PUSHSAFER': + send_pushsafer (sample_msg) + + mylog('info', ['[Test Publishers] END Test: ', testType]) \ No newline at end of file diff --git a/test/__init__.py b/test/__init__.py new file mode 100644 index 00000000..89c53da0 --- /dev/null +++ b/test/__init__.py @@ -0,0 +1 @@ +""" tests for Pi.Alert """ \ No newline at end of file diff --git a/test/test_helper.py b/test/test_helper.py new file mode 100644 index 00000000..ac31ee77 --- /dev/null +++ b/test/test_helper.py @@ -0,0 +1,29 @@ +import sys +import pathlib + +sys.path.append(str(pathlib.Path(__file__).parent.parent.resolve()) + "/pialert/") + + +import datetime + +from helper import timeNow, updateSubnets + + +# ------------------------------------------------------------------------------- +def test_helper(): + assert timeNow() == datetime.datetime.now().replace(microsecond=0) + + +# ------------------------------------------------------------------------------- +def test_updateSubnets(): + # test single subnet + subnet = "192.168.1.0/24 --interface=eth0" + result = updateSubnets(subnet) + assert type(result) is list + assert len(result) == 1 + + # test multip subnets + subnet = ["192.168.1.0/24 --interface=eth0", "192.168.2.0/24 --interface=eth1"] + result = updateSubnets(subnet) + assert type(result) is list + assert len(result) == 2 From 12bf4c7bccf1cbbd0e7a7d203623152536283fc8 Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Tue, 30 May 2023 18:47:28 +1000 Subject: [PATCH 18/20] more scanning --- pialert/__main__.py | 3 +- pialert/conf.py | 6 + pialert/database.py | 32 +++-- pialert/plugin.py | 65 ++++------ pialert/publishers/__init__.py | 2 +- pialert/publishers/email.py | 4 +- pialert/publishers/mqtt.py | 32 ++--- pialert/publishers/ntfy.py | 7 +- pialert/publishers/webhook.py | 2 +- pialert/reporting.py | 29 +++-- pialert/scanners/nmapscan.py | 228 +++++++++++++++++---------------- 11 files changed, 210 insertions(+), 200 deletions(-) diff --git a/pialert/__main__.py b/pialert/__main__.py index 095b0cce..341233a6 100755 --- a/pialert/__main__.py +++ b/pialert/__main__.py @@ -39,8 +39,7 @@ from scanners.pholusscan import performPholusScan from scanners.nmapscan import performNmapScan from scanners.internet import check_internet_IP -# Global variables -changedPorts_json_struc = None + #=============================================================================== #=============================================================================== diff --git a/pialert/conf.py b/pialert/conf.py index 74991966..d4062d91 100644 --- a/pialert/conf.py +++ b/pialert/conf.py @@ -17,6 +17,12 @@ time_started = '' check_report = [] log_timestamp = 0 arpscan_devices = [] +# for MQTT +mqtt_connected_to_broker = False +mqtt_sensors = [] +# for notifications +changedPorts_json_struc = None + # ACTUAL CONFIGRATION ITEMS set to defaults diff --git a/pialert/database.py b/pialert/database.py index c40a3732..e31be40c 100644 --- a/pialert/database.py +++ b/pialert/database.py @@ -367,17 +367,21 @@ class DB(): #------------------------------------------------------------------------------- def get_table_as_json(self, sqlQuery): - self.sql.execute(sqlQuery) - - columnNames = list(map(lambda x: x[0], self.sql.description)) - - rows = self.sql.fetchall() - + mylog('debug',[ '[Database] - get_table_as_json - Query: ', sqlQuery]) + try: + self.sql.execute(sqlQuery) + columnNames = list(map(lambda x: x[0], self.sql.description)) + rows = self.sql.fetchall() + except sqlite3.Error as e: + mylog('none',[ '[Database] - SQL ERROR: ', e]) + return None + result = {"data":[]} - for row in rows: tmp = row_to_json(columnNames, row) result["data"].append(tmp) + + mylog('debug',[ '[Database] - get_table_as_json - returning ', len(rows), " rows with columns: ", columnNames]) return json_struc(result, columnNames) #------------------------------------------------------------------------------- @@ -387,15 +391,15 @@ class DB(): """check the query and arguments are aligned and are read only""" mylog('debug',[ '[Database] - SELECT Query: ', query, " params: ", args]) try: - assert query.count('?') == len(args) - assert query.upper().strip().startswith('SELECT') - self.sql.execute(query, args) - rows = self.sql.fetchall() - return rows + assert query.count('?') == len(args) + assert query.upper().strip().startswith('SELECT') + self.sql.execute(query, args) + rows = self.sql.fetchall() + return rows except AssertionError: - mylog('none',[ '[Database] - ERROR: inconsistent query and/or arguments.', query, " params: ", args]) + mylog('none',[ '[Database] - ERROR: inconsistent query and/or arguments.', query, " params: ", args]) except sqlite3.Error as e: - mylog('none',[ '[Database] - SQL ERROR: ', e]) + mylog('none',[ '[Database] - SQL ERROR: ', e]) return None diff --git a/pialert/plugin.py b/pialert/plugin.py index 83714a62..dd68f4a6 100644 --- a/pialert/plugin.py +++ b/pialert/plugin.py @@ -8,27 +8,16 @@ from collections import namedtuple import conf from const import pluginsPath, logPath from logger import mylog -from helper import timeNowTZ, updateState, get_file_content, write_file +from helper import timeNow, updateState, get_file_content, write_file from api import update_api - - -#------------------------------------------------------------------------------- -# this is duplicated from helper to avoid circular reference !! TO-DO -#------------------------------------------------------------------------------- -def timeNow(): - return datetime.datetime.now().replace(microsecond=0) - - #------------------------------------------------------------------------------- def run_plugin_scripts(db, runType, plugins = conf.plugins): - - # global plugins, tz, mySchedules # Header updateState(db,"Run: Plugins") - mylog('debug', [' [Plugins] Check if any plugins need to be executed on run type: ', runType]) + mylog('debug', ['[Plugins] Check if any plugins need to be executed on run type: ', runType]) for plugin in plugins: @@ -49,12 +38,12 @@ def run_plugin_scripts(db, runType, plugins = conf.plugins): shouldRun = schd.runScheduleCheck() if shouldRun: # note the last time the scheduled plugin run was executed - schd.last_run = timeNowTZ() + schd.last_run = timeNow() if shouldRun: print_plugin_info(plugin, ['display_name']) - mylog('debug', [' [Plugins] CMD: ', get_plugin_setting(plugin, "CMD")["value"]]) + mylog('debug', ['[Plugins] CMD: ', get_plugin_setting(plugin, "CMD")["value"]]) execute_plugin(db, plugin) @@ -81,11 +70,11 @@ def get_plugins_configs(): #------------------------------------------------------------------------------- def print_plugin_info(plugin, elements = ['display_name']): - mylog('verbose', [' [Plugins] ---------------------------------------------']) + mylog('verbose', ['[Plugins] ---------------------------------------------']) for el in elements: res = get_plugin_string(plugin, el) - mylog('verbose', [' [Plugins] ', el ,': ', res]) + mylog('verbose', ['[Plugins] ', el ,': ', res]) #------------------------------------------------------------------------------- @@ -99,7 +88,7 @@ def get_plugin_setting(plugin, function_key): result = set if result == None: - mylog('none', [' [Plugins] Setting with "function":"', function_key, '" is missing in plugin: ', get_plugin_string(plugin, 'display_name')]) + mylog('none', ['[Plugins] Setting with "function":"', function_key, '" is missing in plugin: ', get_plugin_string(plugin, 'display_name')]) return result @@ -162,7 +151,7 @@ def execute_plugin(db, plugin): else: set_RUN_TIMEOUT = set["value"] - mylog('debug', [' [Plugins] Timeout: ', set_RUN_TIMEOUT]) + mylog('debug', ['[Plugins] Timeout: ', set_RUN_TIMEOUT]) # Prepare custom params params = [] @@ -183,7 +172,7 @@ def execute_plugin(db, plugin): resolved = flatten_array(db.get_sql_array(param["value"])) if resolved == None: - mylog('none', [' [Plugins] The parameter "name":"', param["name"], '" was resolved as None']) + mylog('none', ['[Plugins] The parameter "name":"', param["name"], '" was resolved as None']) else: params.append( [param["name"], resolved] ) @@ -199,8 +188,8 @@ def execute_plugin(db, plugin): command = resolve_wildcards_arr(set_CMD.split(), params) # Execute command - mylog('verbose', [' [Plugins] Executing: ', set_CMD]) - mylog('debug', [' [Plugins] Resolved : ', command]) + mylog('verbose', ['[Plugins] Executing: ', set_CMD]) + mylog('debug', ['[Plugins] Resolved : ', command]) try: # try runnning a subprocess with a forced timeout in case the subprocess hangs @@ -208,9 +197,9 @@ def execute_plugin(db, plugin): except subprocess.CalledProcessError as e: # An error occured, handle it mylog('none', [e.output]) - mylog('none', [' [Plugins] Error - enable LOG_LEVEL=debug and check logs']) + mylog('none', ['[Plugins] Error - enable LOG_LEVEL=debug and check logs']) except subprocess.TimeoutExpired as timeErr: - mylog('none', [' [Plugins] TIMEOUT - the process forcefully terminated as timeout reached']) + mylog('none', ['[Plugins] TIMEOUT - the process forcefully terminated as timeout reached']) # check the last run output @@ -231,7 +220,7 @@ def execute_plugin(db, plugin): if len(columns) == 9: sqlParams.append((plugin["unique_prefix"], columns[0], columns[1], 'null', columns[2], columns[3], columns[4], columns[5], columns[6], 0, columns[7], 'null', columns[8])) else: - mylog('none', [' [Plugins]: Skipped invalid line in the output: ', line]) + mylog('none', ['[Plugins]: Skipped invalid line in the output: ', line]) # pialert-db-query if plugin['data_source'] == 'pialert-db-query': @@ -239,7 +228,7 @@ def execute_plugin(db, plugin): q = set_CMD.replace("{s-quote}", '\'') # Execute command - mylog('verbose', [' [Plugins] Executing: ', q]) + mylog('verbose', ['[Plugins] Executing: ', q]) # set_CMD should contain a SQL query arr = db.get_sql_array (q) @@ -249,15 +238,15 @@ def execute_plugin(db, plugin): if len(row) == 9 and (row[0] in ['','null']) == False : sqlParams.append((plugin["unique_prefix"], row[0], handle_empty(row[1]), 'null', row[2], row[3], row[4], handle_empty(row[5]), handle_empty(row[6]), 0, row[7], 'null', row[8])) else: - mylog('none', [' [Plugins]: Skipped invalid sql result']) + mylog('none', ['[Plugins]: Skipped invalid sql result']) # check if the subprocess / SQL query failed / there was no valid output if len(sqlParams) == 0: - mylog('none', [' [Plugins] No output received from the plugin ', plugin["unique_prefix"], ' - enable LOG_LEVEL=debug and check logs']) + mylog('none', ['[Plugins] No output received from the plugin ', plugin["unique_prefix"], ' - enable LOG_LEVEL=debug and check logs']) return else: - mylog('verbose', ['[', timeNow(), '] [Plugins]: SUCCESS, received ', len(sqlParams), ' entries']) + mylog('verbose', ['[Plugins]: SUCCESS, received ', len(sqlParams), ' entries']) # process results if any if len(sqlParams) > 0: @@ -350,11 +339,11 @@ def flatten_array(arr): # Replace {wildcars} with parameters def resolve_wildcards_arr(commandArr, params): - mylog('debug', [' [Plugins]: Pre-Resolved CMD: '] + commandArr) + mylog('debug', ['[Plugins]: Pre-Resolved CMD: '] + commandArr) for param in params: - # mylog('debug', [' [Plugins]: key : {', param[0], '}']) - # mylog('debug', [' [Plugins]: resolved: ', param[1]]) + # mylog('debug', ['[Plugins]: key : {', param[0], '}']) + # mylog('debug', ['[Plugins]: resolved: ', param[1]]) i = 0 @@ -391,7 +380,7 @@ def process_plugin_events(db, plugin): pluginPref = plugin["unique_prefix"] - mylog('debug', [' [Plugins] Processing : ', pluginPref]) + mylog('debug', ['[Plugins] Processing : ', pluginPref]) plugObjectsArr = db.get_sql_array ("SELECT * FROM Plugins_Objects where Plugin = '" + str(pluginPref)+"'") plugEventsArr = db.get_sql_array ("SELECT * FROM Plugins_Events where Plugin = '" + str(pluginPref)+"'") @@ -404,8 +393,8 @@ def process_plugin_events(db, plugin): existingPluginObjectsCount = len(pluginObjects) - mylog('debug', [' [Plugins] Existing objects : ', existingPluginObjectsCount]) - mylog('debug', [' [Plugins] New and existing events : ', len(plugEventsArr)]) + mylog('debug', ['[Plugins] Existing objects : ', existingPluginObjectsCount]) + mylog('debug', ['[Plugins] New and existing events : ', len(plugEventsArr)]) # set status as new - will be changed later if conditions are fulfilled, e.g. entry found for eve in plugEventsArr: @@ -420,7 +409,7 @@ def process_plugin_events(db, plugin): # compare hash of the IDs for uniqueness if any(x.idsHash == tmpObject.idsHash for x in pluginObjects): - mylog('debug', [' [Plugins] Found existing object']) + mylog('debug', ['[Plugins] Found existing object']) pluginEvents[index].status = "exists" index += 1 @@ -488,7 +477,7 @@ def process_plugin_events(db, plugin): dbTable = plugin['mapped_to_table'] - mylog('debug', [' [Plugins] Mapping objects to database table: ', dbTable]) + mylog('debug', ['[Plugins] Mapping objects to database table: ', dbTable]) # collect all columns to be mapped mappedCols = [] @@ -542,7 +531,7 @@ def process_plugin_events(db, plugin): q = f'INSERT into {dbTable} ({columnsStr}) VALUES ({valuesStr})' - mylog('debug', [' [Plugins] SQL query for mapping: ', q ]) + mylog('debug', ['[Plugins] SQL query for mapping: ', q ]) sql.executemany (q, sqlParams) diff --git a/pialert/publishers/__init__.py b/pialert/publishers/__init__.py index 52c36748..f1d18ebd 100644 --- a/pialert/publishers/__init__.py +++ b/pialert/publishers/__init__.py @@ -5,4 +5,4 @@ each publisher exposes: - check_config () returning True / False - send (message) returning True / Fasle -""" \ No newline at end of file +""" diff --git a/pialert/publishers/email.py b/pialert/publishers/email.py index bae0ca1b..3d7bc3e7 100644 --- a/pialert/publishers/email.py +++ b/pialert/publishers/email.py @@ -1,3 +1,5 @@ +""" Pi.Alert module to send notification emails """ + from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText import smtplib @@ -20,7 +22,7 @@ def send (msg: noti_struc): pText = msg.text pHTML = msg.html - + mylog('debug', '[Send Email] REPORT_TO: ' + hide_email(str(conf.REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(conf.SMTP_USER))) # Compose email diff --git a/pialert/publishers/mqtt.py b/pialert/publishers/mqtt.py index 826a8ede..8e59d205 100644 --- a/pialert/publishers/mqtt.py +++ b/pialert/publishers/mqtt.py @@ -14,10 +14,6 @@ from helper import bytes_to_string, sanitize_string # MQTT #------------------------------------------------------------------------------- -mqtt_connected_to_broker = False -mqtt_sensors = [] - - #------------------------------------------------------------------------------- def check_config(): if conf.MQTT_BROKER == '' or conf.MQTT_PORT == '' or conf.MQTT_USER == '' or conf.MQTT_PASSWORD == '': @@ -76,11 +72,9 @@ def create_sensor(client, deviceId, deviceName, sensorType, sensorName, icon): new_sensor_config = sensor_config(deviceId, deviceName, sensorType, sensorName, icon) # check if config already in list and if not, add it, otherwise skip - global mqtt_sensors, uniqueSensorCount - is_unique = True - for sensor in mqtt_sensors: + for sensor in conf.mqtt_sensors: if sensor.hash == new_sensor_config.hash: is_unique = False break @@ -93,9 +87,7 @@ def create_sensor(client, deviceId, deviceName, sensorType, sensorName, icon): #------------------------------------------------------------------------------- -def publish_sensor(client, sensorConf): - - global mqtt_sensors +def publish_sensor(client, sensorConf): message = '{ \ "name":"'+ sensorConf.deviceName +' '+sensorConf.sensorName+'", \ @@ -118,26 +110,24 @@ def publish_sensor(client, sensorConf): # hack - delay adding to the queue in case the process is time.sleep(conf.MQTT_DELAY_SEC) # restarted and previous publish processes aborted # (it takes ~2s to update a sensor config on the broker) - mqtt_sensors.append(sensorConf) + conf.mqtt_sensors.append(sensorConf) #------------------------------------------------------------------------------- def mqtt_create_client(): def on_disconnect(client, userdata, rc): - global mqtt_connected_to_broker - mqtt_connected_to_broker = False + conf.mqtt_connected_to_broker = False # not sure is below line is correct / necessary # client = mqtt_create_client() def on_connect(client, userdata, flags, rc): - global mqtt_connected_to_broker if rc == 0: mylog('verbose', [" Connected to broker"]) - mqtt_connected_to_broker = True # Signal connection + conf.mqtt_connected_to_broker = True # Signal connection else: mylog('none', [" Connection failed"]) - mqtt_connected_to_broker = False + conf.mqtt_connected_to_broker = False client = mqtt_client.Client('PiAlert') # Set Connecting Client ID @@ -150,12 +140,12 @@ def mqtt_create_client(): return client #------------------------------------------------------------------------------- -def mqtt_start(): +def mqtt_start(db): - global client, mqtt_connected_to_broker + #global client - if mqtt_connected_to_broker == False: - mqtt_connected_to_broker = True + if conf.mqtt_connected_to_broker == False: + conf.mqtt_connected_to_broker = True client = mqtt_create_client() # General stats @@ -164,7 +154,7 @@ def mqtt_start(): create_generic_device(client) # Get the data - row = get_device_stats() + row = get_device_stats(db) columns = ["online","down","all","archived","new","unknown"] diff --git a/pialert/publishers/ntfy.py b/pialert/publishers/ntfy.py index 2dc5318f..363da466 100644 --- a/pialert/publishers/ntfy.py +++ b/pialert/publishers/ntfy.py @@ -3,7 +3,8 @@ import conf import requests from base64 import b64encode -from logger import mylog, noti_struc +from logger import mylog +from helper import noti_struc #------------------------------------------------------------------------------- def check_config(): @@ -15,7 +16,7 @@ def check_config(): #------------------------------------------------------------------------------- def send (msg: noti_struc): - _Text = msg.html + headers = { "Title": "Pi.Alert Notification", "Actions": "view, Open Dashboard, "+ conf.REPORT_DASHBOARD_URL, @@ -32,5 +33,5 @@ def send (msg: noti_struc): headers["Authorization"] = "Basic {}".format(basichash) requests.post("{}/{}".format( conf.NTFY_HOST, conf.NTFY_TOPIC), - data=_Text, + data=msg.html, headers=headers) diff --git a/pialert/publishers/webhook.py b/pialert/publishers/webhook.py index 850f5cc0..fc16a9c0 100644 --- a/pialert/publishers/webhook.py +++ b/pialert/publishers/webhook.py @@ -16,7 +16,7 @@ def check_config(): #------------------------------------------------------------------------------- -def send_webhook (msg: noti_struc): +def send (msg: noti_struc): # use data type based on specified payload type if conf.WEBHOOK_PAYLOAD == 'json': diff --git a/pialert/reporting.py b/pialert/reporting.py index 0bb5e57a..933ce65f 100644 --- a/pialert/reporting.py +++ b/pialert/reporting.py @@ -28,6 +28,8 @@ from publishers.pushsafer import (check_config as pushsafer_check_config, send as send_pushsafer) from publishers.mqtt import (check_config as mqtt_check_config, mqtt_start ) + + #=============================================================================== # REPORTING #=============================================================================== @@ -119,11 +121,13 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS): )""") # Open text Template + mylog('verbose', ['[Notification] Open text Template']) template_file = open(pialertPath + '/back/report_template.txt', 'r') mail_text = template_file.read() template_file.close() # Open html Template + mylog('verbose', ['[Notification] Open html Template']) template_file = open(pialertPath + '/back/report_template.html', 'r') if conf.newVersionAvailable : template_file = open(pialertPath + '/back/report_template_new_version.html', 'r') @@ -139,6 +143,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS): mail_text = mail_text.replace ('', socket.gethostname() ) mail_html = mail_html.replace ('', socket.gethostname() ) + mylog('verbose', ['[Notification] included sections: ',INCLUDED_SECTIONS]) if 'internet' in INCLUDED_SECTIONS: # Compose Internet Section sqlQuery = """SELECT eve_MAC as MAC, eve_IP as IP, eve_DateTime as Datetime, eve_EventType as "Event Type", eve_AdditionalInfo as "More info" FROM Events @@ -152,6 +157,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS): mail_text = mail_text.replace ('', notiStruc.text + '\n') mail_html = mail_html.replace ('', notiStruc.html) + mylog('verbose', ['[Notification] Internet sections done.']) if 'new_devices' in INCLUDED_SECTIONS: # Compose New Devices Section @@ -167,6 +173,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS): mail_text = mail_text.replace ('', notiStruc.text + '\n') mail_html = mail_html.replace ('', notiStruc.html) + mylog('verbose', ['[Notification] New Devices sections done.']) if 'down_devices' in INCLUDED_SECTIONS: # Compose Devices Down Section @@ -182,6 +189,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS): mail_text = mail_text.replace ('', notiStruc.text + '\n') mail_html = mail_html.replace ('', notiStruc.html) + mylog('verbose', ['[Notification] Down Devices sections done.']) if 'events' in INCLUDED_SECTIONS: # Compose Events Section @@ -198,21 +206,24 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS): mail_text = mail_text.replace ('', notiStruc.text + '\n') mail_html = mail_html.replace ('', notiStruc.html) + mylog('verbose', ['[Notification] Events sections done.']) if 'ports' in INCLUDED_SECTIONS: # collect "ports" for the webhook json - if changedPorts_json_struc is not None: - json_ports = changedPorts_json_struc.json["data"] - - notiStruc = construct_notifications(db, "", "Ports", True, changedPorts_json_struc) + mylog('verbose', ['[Notification] Ports: conf.changedPorts_json_struc:', conf.changedPorts_json_struc]) + if conf.changedPorts_json_struc is not None: + json_ports = conf.changedPorts_json_struc.json["data"] + notiStruc = construct_notifications(db, "", "Ports", True, conf.changedPorts_json_struc) + mylog('verbose', ['[Notification] Ports: notiStruc:', notiStruc ]) mail_html = mail_html.replace ('', notiStruc.html) portsTxt = "" - if changedPorts_json_struc is not None: + if conf.changedPorts_json_struc is not None: portsTxt = "Ports \n---------\n Ports changed! Check PiAlert for details!\n" mail_text = mail_text.replace ('', portsTxt ) + mylog('verbose', ['[Notification] Ports sections done.']) if 'plugins' in INCLUDED_SECTIONS and conf.ENABLE_PLUGINS: # Compose Plugins Section @@ -228,7 +239,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS): # check if we need to report something plugins_report = len(json_plugins) > 0 - + mylog('verbose', ['[Notification] Plugins sections done.']) json_final = { "internet": json_internet, @@ -293,7 +304,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS): if conf.REPORT_MQTT and check_config('mqtt'): updateState(db,"Send: MQTT") mylog('info', ['[Notification] Establishing MQTT thread']) - mqtt_start() + mqtt_start(db) else : mylog('verbose', ['[Notification] Skip MQTT']) else : @@ -310,7 +321,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS): # clear plugin events sql.execute ("DELETE FROM Plugins_Events") - changedPorts_json_struc = None + conf.changedPorts_json_struc = None # DEBUG - print number of rows updated mylog('info', ['[Notification] Notifications changes: ', sql.rowcount]) @@ -423,7 +434,7 @@ def skip_repeated_notifications (db): # Skip repeated notifications # due strfime : Overflow --> use "strftime / 60" - mylog('verbose','[Skip Repeated Notifications] Skip Repeated') + mylog('verbose','[Skip Repeated Notifications] Skip Repeated start') db.sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0 WHERE eve_PendingAlertEmail = 1 AND eve_MAC IN ( diff --git a/pialert/scanners/nmapscan.py b/pialert/scanners/nmapscan.py index b13cb5d8..58f0d3bf 100644 --- a/pialert/scanners/nmapscan.py +++ b/pialert/scanners/nmapscan.py @@ -24,12 +24,10 @@ class nmap_entry: #------------------------------------------------------------------------------- def performNmapScan(db, devicesToScan): - sql = db.sql # TO-DO - - global changedPorts_json_struc - - changedPortsTmp = [] - + """ + run nmap scan on a list of devices + discovers open ports and keeps track existing and new open ports + """ if len(devicesToScan) > 0: timeoutSec = conf.NMAP_TIMEOUT @@ -38,9 +36,8 @@ def performNmapScan(db, devicesToScan): updateState(db,"Scan: Nmap") - mylog('verbose', ['[', timeNow(), '] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device']) - - mylog('verbose', [" Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ]) + mylog('verbose', ['[NMAP Scan] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device']) + mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ]) devIndex = 0 for device in devicesToScan: @@ -56,15 +53,15 @@ def performNmapScan(db, devicesToScan): output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30)) except subprocess.CalledProcessError as e: # An error occured, handle it - mylog('none', [e.output]) - mylog('none', [" Error - Nmap Scan - check logs", progress]) + mylog('none', ["[NMAP Scan] " ,e.output]) + mylog('none', ["[NMAP Scan] Error - Nmap Scan - check logs", progress]) except subprocess.TimeoutExpired as timeErr: - mylog('verbose', [' Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', device["dev_LastIP"], progress]) + mylog('verbose', ['[NMAP Scan] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', device["dev_LastIP"], progress]) if output == "": # check if the subprocess failed - mylog('info', ['[', timeNow(), '] Scan: Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details']) + mylog('info', ['[NMAP Scan] Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details']) else: - mylog('verbose', ['[', timeNow(), '] Scan: Nmap SUCCESS for ', device["dev_LastIP"], progress]) + mylog('verbose', ['[NMAP Scan] Nmap SUCCESS for ', device["dev_LastIP"], progress]) devIndex += 1 @@ -94,110 +91,121 @@ def performNmapScan(db, devicesToScan): elif 'Nmap done' in line: duration = line.split('scanned in ')[1] index += 1 + mylog('verbose', ['[NMAP Scan] Ports found by NMAP: ', len(newEntriesTmp)]) + process_discovered_ports(db, device, newEntriesTmp) + #end for loop - # previous Nmap Entries - oldEntries = [] - mylog('verbose', ['[', timeNow(), '] Scan: Ports found by NMAP: ', len(newEntriesTmp)]) + +def process_discovered_ports(db, device, discoveredPorts): + """ + process ports discovered by nmap + compare to previosu ports + update DB + raise notifications + """ + sql = db.sql # TO-DO + # previous Nmap Entries + oldEntries = [] + changedPortsTmp = [] + + mylog('verbose', ['[NMAP Scan] Process ports found by NMAP: ', len(discoveredPorts)]) + + if len(discoveredPorts) > 0: + + # get all current NMAP ports from the DB + rows = db.read(sql_nmap_scan_all) + + for row in rows: + # only collect entries matching the current MAC address + if row["MAC"] == device["dev_MAC"]: + oldEntries.append(nmap_entry(row["MAC"], row["Time"], row["Port"], row["State"], row["Service"], device["dev_Name"], row["Extra"], row["Index"])) + + newEntries = [] + + # Collect all entries that don't match the ones in the DB + for discoveredPort in discoveredPorts: + + found = False - if len(newEntriesTmp) > 0: + # Check the new entry is already available in oldEntries and remove from processing if yes + for oldEntry in oldEntries: + if discoveredPort.hash == oldEntry.hash: + found = True - # get all current NMAP ports from the DB - sql.execute(sql_nmap_scan_all) - - rows = sql.fetchall() - - for row in rows: - # only collect entries matching the current MAC address - if row["MAC"] == device["dev_MAC"]: - oldEntries.append(nmap_entry(row["MAC"], row["Time"], row["Port"], row["State"], row["Service"], device["dev_Name"], row["Extra"], row["Index"])) + if not found: + newEntries.append(discoveredPort) - newEntries = [] + mylog('verbose', ['[NMAP Scan] Nmap newly discovered or changed ports: ', len(newEntries)]) - # Collect all entries that don't match the ones in the DB - for newTmpEntry in newEntriesTmp: + # collect new ports, find the corresponding old entry and return for notification purposes + # also update the DB with the new values after deleting the old ones + if len(newEntries) > 0: + + # params to build the SQL query + params = [] + indexesToDelete = "" - found = False - - # Check the new entry is already available in oldEntries and remove from processing if yes - for oldEntry in oldEntries: - if newTmpEntry.hash == oldEntry.hash: - found = True + # Find old entry matching the new entry hash + for newEntry in newEntries: - if not found: - newEntries.append(newTmpEntry) - - - mylog('verbose', ['[', timeNow(), '] Scan: Nmap newly discovered or changed ports: ', len(newEntries)]) - - # collect new ports, find the corresponding old entry and return for notification purposes - # also update the DB with the new values after deleting the old ones - if len(newEntries) > 0: - - # params to build the SQL query - params = [] - indexesToDelete = "" - - # Find old entry matching the new entry hash - for newEntry in newEntries: - - foundEntry = None - - for oldEntry in oldEntries: - if oldEntry.hash == newEntry.hash: - indexesToDelete = indexesToDelete + str(oldEntry.index) + ',' - foundEntry = oldEntry - - columnNames = ["Name", "MAC", "Port", "State", "Service", "Extra", "NewOrOld" ] - - # Old entry found - if foundEntry is not None: - # Build params for sql query - params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, oldEntry.extra)) - # Build JSON for API and notifications - changedPortsTmp.append({ - "Name" : foundEntry.name, - "MAC" : newEntry.mac, - "Port" : newEntry.port, - "State" : newEntry.state, - "Service" : newEntry.service, - "Extra" : foundEntry.extra, - "NewOrOld" : "New values" - }) - changedPortsTmp.append({ - "Name" : foundEntry.name, - "MAC" : foundEntry.mac, - "Port" : foundEntry.port, - "State" : foundEntry.state, - "Service" : foundEntry.service, - "Extra" : foundEntry.extra, - "NewOrOld" : "Old values" - }) - # New entry - no matching Old entry found - else: - # Build params for sql query - params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, '')) - # Build JSON for API and notifications - changedPortsTmp.append({ - "Name" : "New device", - "MAC" : newEntry.mac, - "Port" : newEntry.port, - "State" : newEntry.state, - "Service" : newEntry.service, - "Extra" : "", - "NewOrOld" : "New device" - }) - - changedPorts_json_struc = json_struc({ "data" : changedPortsTmp}, columnNames) - - # Delete old entries if available - if len(indexesToDelete) > 0: - sql.execute ("DELETE FROM Nmap_Scan where \"Index\" in (" + indexesToDelete[:-1] +")") - db.commitDB() - - # Insert new values into the DB - sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params) - db.commitDB() + foundEntry = None + + for oldEntry in oldEntries: + if oldEntry.hash == newEntry.hash: + indexesToDelete = indexesToDelete + str(oldEntry.index) + ',' + foundEntry = oldEntry + + columnNames = ["Name", "MAC", "Port", "State", "Service", "Extra", "NewOrOld" ] + + # Old entry found + if foundEntry is not None: + # Build params for sql query + params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, oldEntry.extra)) + # Build JSON for API and notifications + changedPortsTmp.append({ + "Name" : foundEntry.name, + "MAC" : newEntry.mac, + "Port" : newEntry.port, + "State" : newEntry.state, + "Service" : newEntry.service, + "Extra" : foundEntry.extra, + "NewOrOld" : "New values" + }) + changedPortsTmp.append({ + "Name" : foundEntry.name, + "MAC" : foundEntry.mac, + "Port" : foundEntry.port, + "State" : foundEntry.state, + "Service" : foundEntry.service, + "Extra" : foundEntry.extra, + "NewOrOld" : "Old values" + }) + # New entry - no matching Old entry found + else: + # Build params for sql query + params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, '')) + # Build JSON for API and notifications + changedPortsTmp.append({ + "Name" : "New device", + "MAC" : newEntry.mac, + "Port" : newEntry.port, + "State" : newEntry.state, + "Service" : newEntry.service, + "Extra" : "", + "NewOrOld" : "New device" + }) + + conf.changedPorts_json_struc = json_struc({ "data" : changedPortsTmp}, columnNames) + + # Delete old entries if available + if len(indexesToDelete) > 0: + sql.execute ("DELETE FROM Nmap_Scan where \"Index\" in (" + indexesToDelete[:-1] +")") + db.commitDB() + + # Insert new values into the DB + sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params) + db.commitDB() From 007611c4297707a3429d5cdf805d1f92352edfdf Mon Sep 17 00:00:00 2001 From: Data-Monkey Date: Tue, 30 May 2023 18:57:56 +1000 Subject: [PATCH 19/20] manually added es translations from master --- front/php/templates/language/es_es.php | 448 +++++++++++++++---------- 1 file changed, 267 insertions(+), 181 deletions(-) diff --git a/front/php/templates/language/es_es.php b/front/php/templates/language/es_es.php index e37379c1..118e57ed 100755 --- a/front/php/templates/language/es_es.php +++ b/front/php/templates/language/es_es.php @@ -5,6 +5,7 @@ $lang['es_es'] = array( ////////////////////////////////////////////////////////////////// // About - Update by @TeroRERO 07ago2022 ////////////////////////////////////////////////////////////////// + 'About_Title' => 'Guadián de Red
          (Código Abierto)', 'About_Design' => 'Diseñado para:', 'About_Exit' => 'Salir', @@ -12,20 +13,31 @@ $lang['es_es'] = array( ////////////////////////////////////////////////////////////////// // General - Update by @TeroRERO 01ago2022 ////////////////////////////////////////////////////////////////// + 'Gen_Delete' => 'Eliminar', +'Gen_DeleteAll' => 'Eliminar todo', 'Gen_Cancel' => 'Cancelar', -'Gen_Okay' => 'Ok', +'Gen_Okay' => 'Aceptar', +'Gen_Save' => 'Guardar', +'Gen_Saved' => 'Guardado', +'Gen_Run' => 'Ejecutar', +'Gen_Action' => 'Acción', 'Gen_Purge' => 'Purgar', 'Gen_Backup' => 'Ejecutar copia de seguridad', 'Gen_Restore' => 'Ejecutar restauración', 'Gen_Switch' => 'Cambiar', 'Gen_AreYouSure' => '¿Estás seguro de', +'Gen_Upd' => 'Actualizado correctamente', +'Gen_Upd_Fail' => 'Fallo al actualizar', +'Gen_Help' => 'Ayuda', +'Gen_DataUpdatedUITakesTime' => 'Correcto - La interfaz puede tardar en actualizarse si se está ejecutando un escaneo.', +'Gen_LockedDB' => 'Fallo - La base de datos puede estar bloqueada - Pulsa F1 -> Ajustes de desarrolladores -> Consola o prueba más tarde.', ////////////////////////////////////////////////////////////////// // Login Page - Update by @TeroRERO 03ago2022 ////////////////////////////////////////////////////////////////// - // TeroRERO Off 'Login_Box' => 'Inicie su sesión', + 'Login_Box' => 'Ingrese su contraseña', 'Login_Remember' => 'Recordar', 'Login_Remember_small' => '(válido por 7 días)', @@ -38,6 +50,7 @@ $lang['es_es'] = array( 'Login_Toggle_Info' => 'Información sobre la contraseña', 'Login_Toggle_Info_headline' => 'Información sobre la contraseña', 'Login_Toggle_Alert_headline' => 'Alerta de Contraseña!', +'Login_Default_PWD' => 'La contraseña por defecto "123456" sigue activa.', ////////////////////////////////////////////////////////////////// // Device Page - Update by @TeroRERO 03ago2022 @@ -46,10 +59,11 @@ $lang['es_es'] = array( 'Navigation_Devices' => 'Dispositivos', 'Navigation_Presence' => 'Historial', 'Navigation_Events' => 'Eventos', +'Navigation_Network' => 'Red', +'Navigation_Plugins' => 'Plugins', 'Navigation_Maintenance' => 'Mantenimiento', 'Navigation_Settings' => 'Configuración', -'Navigation_Network' => 'Red', -'Navigation_HelpFAQ' => 'Ayuda / FAQ', +'Navigation_HelpFAQ' => 'Ayuda / Preguntas frecuentes', 'Device_Title' => 'Dispositivos', 'Device_Shortcut_AllDevices' => 'Todos', 'Device_Shortcut_Connected' => 'Conectado(s)', @@ -62,14 +76,21 @@ $lang['es_es'] = array( 'Device_TableHead_Name' => 'Nombre', 'Device_TableHead_Owner' => 'Propietario', 'Device_TableHead_Type' => 'Tipo', +'Device_TableHead_Icon' => 'Icon', +'Device_TableHead_RowID' => 'Row ID', +'Device_TableHead_Rowid' => 'Row ID', +'Device_TableHead_Parent_MAC' => 'Nodo principal de la MAC', +'Device_TableHead_Connected_Devices' => 'Dispositivos conectados', +'Device_TableHead_Location' => 'Ubicación', +'Device_TableHead_Vendor' => 'Fabricante', 'Device_TableHead_Favorite' => 'Favorito', 'Device_TableHead_Group' => 'Grupo', 'Device_TableHead_FirstSession' => '1ra. sesión', 'Device_TableHead_LastSession' => 'Última sesión', 'Device_TableHead_LastIP' => 'Última IP', 'Device_TableHead_MAC' => 'MAC', +'Device_TableHead_MAC_full' => 'MAC completa', 'Device_TableHead_LastIPOrder' => 'Última orden de IP', -'Device_TableHead_Rowid' => 'Rowid', 'Device_TableHead_Status' => 'Estado', 'Device_Searchbox' => 'Búsqueda', 'Device_Tablelenght' => 'Mostrar _MENU_ entradas', @@ -153,17 +174,26 @@ $lang['es_es'] = array( 'DevDetail_Tab_Sessions' => 'Sesiones', 'DevDetail_Tab_Presence' => 'Historial', 'DevDetail_Tab_Events' => 'Eventos', +'DevDetail_Tab_Pholus' => ' Pholus', +'DevDetail_Tab_PholusEmpty' => 'No se ha encontrado nada para este dispositivo con Pholus.', +'DevDetail_Tab_NmapTableHeader' => 'Resultados del escaneo programado', +'DevDetail_Tab_NmapTableText' => 'Establece la programación en los Ajustes', +'DevDetail_Tab_NmapEmpty' => 'Ningún puerto detectado en este dispositivo con Nmap.', 'DevDetail_MainInfo_Title' => 'Información principal', 'DevDetail_MainInfo_mac' => 'MAC', 'DevDetail_MainInfo_Name' => 'Nombre', 'DevDetail_MainInfo_Owner' => 'Propietario', 'DevDetail_MainInfo_Type' => 'Tipo', +'DevDetail_Icon' => 'Icono', +'DevDetail_Icon_Descr' => 'Enter a font awesome icon name without the fa- prefix or with complete class, e.g.: fa fa-brands fa-apple.', 'DevDetail_MainInfo_Vendor' => 'Proveedor', 'DevDetail_MainInfo_Favorite' => 'Favorito', 'DevDetail_MainInfo_Group' => 'Grupo', 'DevDetail_MainInfo_Location' => 'Ubicación', 'DevDetail_MainInfo_Comments' => 'Comentario', 'DevDetail_MainInfo_Network' => 'Hardware de Red (ID)', +'DevDetail_MainInfo_Network' => ' Nodo (MAC)', +'DevDetail_GoToNetworkNode' => 'Navegar a la página de Internet del nodo seleccionado.', 'DevDetail_MainInfo_Network_Port' => 'Puerto de Red HW', 'DevDetail_SessionInfo_Title' => 'Información de sesión', 'DevDetail_SessionInfo_Status' => 'Estado', @@ -183,7 +213,12 @@ $lang['es_es'] = array( 'DevDetail_EveandAl_ScanCycle_z' => 'No Escanear Dispositivo', 'DevDetail_button_Delete' => 'Eliminar dispositivo', 'DevDetail_button_Reset' => 'Restablecer cambios', +'DevDetail_button_DeleteEvents_Warning' => '¿Desea eliminar todos los eventos de este dispositivo?

          (se eliminarán el Historial de eventos y las Sesiones, y puede ayudar en el caso de notificaciones constantes)', +'DevDetail_button_Reset' => 'Restablecer cambios', 'DevDetail_button_Save' => 'Guardar', +'DevDetail_button_OverwriteIcons' => 'Sobreescribir iconos', +'DevDetail_button_OverwriteIcons_Tooltip' => 'Sobreescribir los iconos de todos los dispositivos con el mismo tipo', +'DevDetail_button_OverwriteIcons_Warning' => '¿Sobreescribir todos los iconos de todos los dispositivos con el mismo tipo que el dispositivo actual?', 'DevDetail_SessionTable_Order' => 'Ordenar', 'DevDetail_SessionTable_Connection' => 'Conexión', 'DevDetail_SessionTable_Disconnection' => 'Desconexión', @@ -199,12 +234,27 @@ $lang['es_es'] = array( 'DevDetail_Nmap_buttonDetail_text' => 'Escaneo detallado: escaneo predeterminado con detección de sistema operativo habilitado, detección de versiones, escaneo de script y traceroute (hasta 30 segundos o más)', 'DevDetail_Nmap_buttonSkipDiscovery' => 'Omitir detección de host', 'DevDetail_Nmap_buttonSkipDiscovery_text' => 'Omitir detección de host (-Pn opción): Escaneo predeterminado sin detección de host', +'DevDetail_Nmap_resultsLink' => 'Puedes abandonar esta página después de empezar un escaneo. Los resultados también estarán disponibles en el archivo pialert_front.log.', +'BackDevDetail_Actions_Title_Run' => 'Ejecutar acción', +'BackDevDetail_Actions_Not_Registered' => 'Acción no registrada: ', +'BackDevDetail_Actions_Ask_Run' => '¿Desea ejecutar la acción?', +'BackDevDetail_Tools_WOL_okay' => 'El comando se ha ejecutado correctamente.', +'BackDevDetail_Tools_WOL_error' => 'Ha ocurrido un error al ejectuar el comando.', +'DevDetail_Tools_WOL_noti' => 'Wake-on-LAN', +'DevDetail_Tools_WOL_noti_text' => 'El comando de Wake-on-LAN en enviado a la dirección de escucha. Si el dispositivo no está en la misma subred/vlan que Pi.Alert, el dispositivo no responderá.', +'DevDetail_Tools_WOL' => 'Enviar comando WOL a ', +'DevDetail_WOL_Title' => ' Wake-on-LAN', +'DevDetail_Run_Actions_Title' => ' Ejecutar acción en el dispositivo', +'DevDetail_Run_Actions_Tooltip' => 'Ejecutar la acción del desplegable sobre el dispositivo actual.', ////////////////////////////////////////////////////////////////// // Maintenance Page - Update by @TeroRERO 07ago2022 ////////////////////////////////////////////////////////////////// 'Maintenance_Title' => 'Herramientas de mantenimiento', +'Maintenance_version' => 'Actualizaciones de la aplicación', +'Maintenance_new_version' => '🆕 Una nueva versión está disponible. Comprueba las notas de lanzamiento.', +'Maintenance_current_version' => 'No hay actualizaciones disponibles. Comprueba en que se está trabajando.', 'Maintenance_database_path' => 'Ruta de la base de datos:', 'Maintenance_database_size' => 'Tamaño de base de datos:', 'Maintenance_database_lastmod' => 'Última modificación:', @@ -214,10 +264,10 @@ $lang['es_es'] = array( 'Maintenance_arp_status' => 'Estado de escaneo:', 'Maintenance_arp_status_off' => 'está actualmente deshabilitado', 'Maintenance_arp_status_on' => 'escaneo(s) actualmente en ejecución', -'Maintenance_themeselector_lable' => 'Seleccionar Skin', -'Maintenance_themeselector_empty' => 'Elija un Skin', -'Maintenance_themeselector_text' => 'El cambio tiene lugar en el lado del servidor, por lo que afecta todos los dispositivos en uso.', +'Maintenance_themeselector_lable' => 'Seleccionar tema', +'Maintenance_themeselector_empty' => 'Elige un tema', 'Maintenance_themeselector_apply' => 'Aplicar', +'Maintenance_themeselector_text' => 'El cambio tiene lugar en el lado del servidor, por lo que afecta todos los dispositivos en uso.', 'Maintenance_lang_selector_lable' => 'Seleccione su idioma', 'Maintenance_lang_selector_empty' => 'Elija un idioma', 'Maintenance_lang_en_us' => 'English (US)', @@ -226,10 +276,15 @@ $lang['es_es'] = array( 'Maintenance_lang_selector_text' => 'El cambio tiene lugar en el lado del servidor, por lo que afecta todos los dispositivos en uso.', 'Maintenance_lang_selector_apply' => 'Aplicar', 'Maintenance_Tools_Tab_Settings' => 'Ajustes', -'Maintenance_Tools_Tab_Tools' => 'Tools', +'Maintenance_Tools_Tab_UISettings' => 'Ajustes de interfaz', +'Maintenance_Tools_Tab_Tools' => 'Herramientas', 'Maintenance_Tools_Tab_BackupRestore' => 'Respaldo / Restaurar', -'Maintenance_Tools_Tab_Logging' => 'Logs', +'Maintenance_Tools_Tab_Logging' => 'Registros', +'Maintenance_Tool_displayed_columns_text' => 'Cambia la visibilidad y el orden de las columnas en la página Dispositivos . (La función de coger y arrastrar funciona un poco mal, pero funciona. (Se intentó arreglar como por 3 horas - se agradecerían PRs para arreglarlo)).', +'Maintenance_Tool_order_columns_text' => '', 'Maintenance_Tool_darkmode' => 'Cambiar Modo (Dark/Light)', +'Maintenance_Tool_drag_me' => 'Coger para rearrastrar columnas.', +'Maintenance_Tool_check_visible' => 'Desactivar para ocultar columna.', 'Maintenance_Tool_darkmode_text' => 'Alternar entre el modo oscuro y el modo de luz. Si el interruptor no funciona correctamente, intente borrar el caché del navegador. El cambio tiene lugar en el lado del servidor, por lo que afecta todos los dispositivos en uso.', 'Maintenance_Tool_darkmode_noti' => 'Cambiar Modo', 'Maintenance_Tool_darkmode_noti_text' => 'Después del cambio de tema, la página intenta volver a cargar para activar el cambio. Si es necesario, el caché debe ser eliminado.', @@ -284,8 +339,6 @@ $lang['es_es'] = array( 'Maintenance_Tool_ImportCSV_text' => 'Antes de usar esta función, haga una copia de seguridad. Importe un archivo CSV (valor separado por comas) que contiene la lista de dispositivos, incluidas las relaciones de red entre nodos de red y dispositivos conectados. Para hacer eso, coloque el archivo CSV llamado devices.csv en su carpeta /config .', 'Maintenance_Tool_ImportCSV_noti' => 'Importación CSV', 'Maintenance_Tool_ImportCSV_noti_text' => '¿Está seguro de que quiere importar el archivo CSV? Esto sobrescribirá completamente los dispositivos de su base de datos.', -'Maintenance_Github_package_a' => 'La última versión de Pi.Alert (Fork leiweibau) se publicó en ', -'Maintenance_Github_package_b' => '', ////////////////////////////////////////////////////////////////// // Maintenance Page - Update by @TeroRERO 25jul2022 @@ -312,6 +365,10 @@ $lang['es_es'] = array( 'BackDevices_DBTools_Upgrade' => 'Base de datos actualizada correctamente', 'BackDevices_DBTools_UpgradeError' => 'Falló la actualización de la base de datos', 'BackDevices_DBTools_Purge' => 'Las copias de seguridad más antiguas fueron eliminadas', +'BackDevices_DBTools_ImportCSV' => 'Los dispositivos del archivo CSV han sido importados correctamente.', +'BackDevices_DBTools_ImportCSVError' => 'El archivo CSV no pudo ser importado. Asegúrate de que el formato es correcto.', +'BackDevices_DBTools_ImportCSVMissing' => 'El archivo CSV no se pudo encontrar en /config/devices.csv.', +'BackDevices_Device_UpdDevError' => 'Fallo al actualizar dispositivos, pruebe de nuevo más tarde. La base de datos probablemente esté bloqueada por una tarea en curso.', ////////////////////////////////////////////////////////////////// // Network Page - Update by @TeroRERO 01ago2022 @@ -320,6 +377,8 @@ $lang['es_es'] = array( 'Network_Title' => 'Descripción general de la red', 'Network_ManageDevices' => 'Administrar dispositivos', 'Network_ManageAdd' => 'Añadir dispositivo', +'Network_ManageAssign' => 'Asignar', +'Network_ManageUnassign' => 'Desasignar', 'Network_ManageEdit' => 'Actualizar dispositivo', 'Network_ManageDel' => 'Eliminar dispositivo', 'Network_ManageAdd_Name' => 'Nombre del dispositivo', @@ -344,6 +403,14 @@ $lang['es_es'] = array( 'Network_Table_State' => 'Estado', 'Network_Table_Hostname' => 'Nombre de host', 'Network_Table_IP' => 'Dirección IP', +'Network_UnassignedDevices' => 'Dispositivos sin asignar', +'Network_Assign' => 'Conectar al nodo de red', +'Network_Connected' => 'Dispositivos conectados', +'Network_ManageLeaf' => 'Gestionar asignación', +'Network_Node' => 'Nodo de red', +'Network_Node_Name' => 'Nombre de nodo', +'Network_Parent' => 'Dispositivo primario de la red', +'Network_NoAssignedDevices' => 'Este nodo de red no tiene asignado ningún dispositivo (nodo externo). Asigna uno de la lista o ve a la pestaña Detalles de cualquier dispositivo en la página Dispositivos, y asígnalo a un Nodo (MAC) Puerto de la red ahí.', ////////////////////////////////////////////////////////////////// // Help Page - Update by @TeroRERO 07ago2022 @@ -369,13 +436,13 @@ $lang['es_es'] = array( chmod -R 770 ~/pialert/db
          Si la base de datos sigue siendo de sólo lectura, intente reinstalar o restaurar una copia de seguridad de la base de datos desde la página de mantenimiento.', -'HelpFAQ_Cat_General_102docker_head' => '(🐳 Docker only) Database issues (AJAX errors, read-only, not found)', -'HelpFAQ_Cat_General_102docker_text' => 'Double-check you\'ve followed the dockerfile readme (most up-to-date info).

            -
          • Download the original DB from GitHub.
          • -
          • Map the pialert.db file ( not folder) from above to /home/pi/pialert/db/pialert.db (see Examples for details).
          • -
          • If facing issues (AJAX errors, can\'t write to DB, etc,) make sure permissions are set correctly, alternatively check the logs under /home/pi/pialert/front/log.
          • -
          • To solve permission issues you can also try to create a DB backup and then run a DB Restore via the Maintenance > Backup/Restore section.
          • -
          • If the database is in read-only mode you can solve this by setting the owner and group by executing the following command on the host system: docker exec pialert chown -R www-data:www-data /home/pi/pialert/db/pialert.db.
          • +'HelpFAQ_Cat_General_102docker_head' => '(🐳 Solo Docker) Problemas con la base de datos (errores de AJAX, solo lectura, no encontrado)', +'HelpFAQ_Cat_General_102docker_text' => 'Comprueba que has seguido las instrucciones del dockerfile (la información más actualizada).

              +
            • Descarga la base de datos original desde GitHub.
            • +
            • Mapea el archivo pialert.db ( no carpeta) de arriba a /home/pi/pialert/db/pialert.db (puedes comprobar los ejemplos para más detalles).
            • +
            • Si aparecen problemas (errores de AJAX, no se puede escribir a la base de datos, etc,) asegúrate que los permisos están establecidos correctamente. También puedes comprobar los registros en /home/pi/pialert/front/log.
            • +
            • Para arreglar los problemas de los permisos, puedes probar a crear una copia de seguridad de la base de datos y después restaurarla desde la sección Mantenimiento > Copia de seguridad/Restaurar.
            • +
            • Si la base de datos está en modo solo lectura, lo puedes arreglar ejecutando el siguiente comando para establecer el propietario y grupo en el sistema host: docker exec pialert chown -R www-data:www-data /home/pi/pialert/db/pialert.db.
            ', 'HelpFAQ_Cat_General_103_head' => 'La página de inicio de sesión no aparece, incluso después de cambiar la contraseña.', 'HelpFAQ_Cat_General_103_text' => 'Además de la contraseña, el archivo de configuración debe contener ~/pialert/config/pialert.conf @@ -412,196 +479,215 @@ $lang['es_es'] = array( puertos (agrupación de puertos), así como múltiples dispositivos a un puerto (máquinas virtuales).', ////////////////////////////////////////////////////////////////// -// Settings (based on work of https://github.com/mariorodriguezlopez/Pi.Alert/) +// Front end events ////////////////////////////////////////////////////////////////// -'API_settings_group' => ' API', +'test_event_tooltip' => 'Guarda tus cambios antes de probar nuevos ajustes.', +'test_event_icon' => 'fa-vial-circle-check', +'run_event_tooltip' => 'Activa el ajuste y guarda tus cambios antes de ejecutarlo.', +'run_event_icon' => 'fa-play', +'general_event_title' => 'Ejecutar un evento ad-hoc', +'general_event_description' => 'El evento que has ejecutado puede tardar un rato mientras finalizan procesos en segundo plano. La ejecución ha terminado cuando ves finalizado abajo. Comprueba el registro de error si no has obtenido el resultado esperado.

            Estado: ', + +////////////////////////////////////////////////////////////////// +// Plugins +////////////////////////////////////////////////////////////////// + +'Plugins_Unprocessed_Events' => 'Eventos sin procesar', +'Plugins_Objects' => 'Objetos del Plugin', +'Plugins_History' => 'Historial de eventos', + +////////////////////////////////////////////////////////////////// +// Settings +////////////////////////////////////////////////////////////////// + +'settings_missing' => 'Actualiza la página, no todos los ajustes se han cargado. Probablemente sea por una sobrecarga de la base de datos.', +'settings_missing_block' => 'No puedes guardar los ajustes sin establecer todas las claves. Actualiza la página. Problabmente esté causado por una sobrecarga de la base de datos.', +'settings_old' => 'Los ajustes mostrados en esta página están desactualizados. Probablemente sea por un escaneo en proceso. Los ajustes se guardan en el archivo pialert.conf, pero el proceso en segundo plano no las ha importado todavía a la base de datos. Puedes esperar a que los ajustes se actualicen para evitar sobreescribirlos con los ajustes antiguos. Si te da igual perder los ajustes desde la última vez que guardaste y ahora, siéntete libre de guardarlos de nuevo. También hay copias de seguridad creadas si necesitas comparar tus ajustes más tarde.', +'settings_imported' => 'Última vez que los ajustes fueron importados desde el archivo pialert.conf:', +'settings_expand_all' => 'Expandir todo', // General - -'DAYS_TO_KEEP_EVENTS_description' => 'Esta es una configuración de mantenimiento. Esto especifica el número de días de entradas de eventos que se guardarán. Todos los eventos anteriores se eliminarán periódicamente.', -'DAYS_TO_KEEP_EVENTS_name' => 'Eliminar eventos anteriores a', -'PIALERT_WEB_PASSWORD_description' => 'La contraseña predeterminada es 123456. Para cambiar la contraseña, ejecute /home/pi/pialert/back/pialert-cli en el contenedor', -'PIALERT_WEB_PASSWORD_name' => 'Contraseña de inicio de sesión', -'PIALERT_WEB_PROTECTION_description' => 'Cuando está habilitado, se muestra un cuadro de diálogo de inicio de sesión. Lea detenidamente a continuación si se le bloquea el acceso a su instancia.', -'PIALERT_WEB_PROTECTION_name' => 'Habilitar inicio de sesión', -'REPORT_DASHBOARD_URL_description' => 'Esta URL se utiliza como base para generar enlaces en los correos electrónicos. Ingrese la URL completa que comienza con http://, incluido el número de puerto (sin barra inclinada al final /).', -'REPORT_DASHBOARD_URL_name' => 'Pi.Alert URL', -'REPORT_FROM_description' => 'Asunto del correo electrónico de notificación.', -'REPORT_FROM_name' => 'Asunto del email', -'REPORT_MAIL_description' => 'Si está habilitado, se envía un correo electrónico con una lista de cambios a los que se ha suscrito. Complete también todas las configuraciones restantes relacionadas con la configuración de SMTP a continuación', -'REPORT_MAIL_name' => 'Habilitar email', -'REPORT_TO_description' => 'Dirección de correo electrónico a la que se enviará la notificación.', - 'REPORT_TO_name' => 'Enviar el email a', - 'SCAN_CYCLE_MINUTES_description' => 'El retraso entre escaneos. Si usa arp-scan, el tiempo de escaneo en sí depende de la cantidad de direcciones IP para verificar. Esto está influenciado por la máscara de red configurada en la configuración SCAN_SUBNETS en la parte superior. Cada IP toma un par de segundos para escanear.', - 'SCAN_CYCLE_MINUTES_name' => 'Retraso del ciclo de escaneo', - 'SCAN_SUBNETS_description' => 'El tiempo de escaneo arp en sí depende de la cantidad de direcciones IP para verificar. -El número de direcciones IP para comprobar depende de la máscara de red que establezca aquí. -Por ejemplo, una máscara /24 da como resultado 256 IP para verificar, mientras que /16 -controles de máscara alrededor de 65,536. Cada IP toma un par de segundos. Esto significa que con una configuración incorrecta -el arp-scan tardará horas en completarse en lugar de segundos. -
              -
            1. Especifique la máscara de red. Por ejemplo, el filtro 192.168.1.0/24 cubre los rangos de IP 192.168.1.0 a 192.168.1.255.
            2. -
            3. Ejecute ifconfig en su contenedor para encontrar los nombres de su interfaz (por ejemplo: eth0, eth1)
            4. -
            - ', +'General_display_name' => 'General', +'General_icon' => '', +'ENABLE_ARPSCAN_name' => 'Activar escaneo ARP', +'ENABLE_ARPSCAN_description' => 'El escaneo Arp es una herramienta de la línea de comandos que usa el protocolo ARP para encontrar e identificar la ip de los dispositivos. Una alternativa a este escaneo sería activar los ajustes de la PIHOLE_ACTIVEintegración con PiHole.', 'SCAN_SUBNETS_name' => 'Subredes para escanear', -'TIMEZONE_description' => 'Zona horaria para mostrar las estadísticas correctamente. Encuentra tu zona horariaaquí.', +'SCAN_SUBNETS_description' => 'El tiempo del escaneo ARP depende del número de ips a comprobar, así que es importante establecer correctamente la máscara y la interfaz de red. Comprueba la documentación sobre sudredes para obtener ayuda para establecer VLANs, cuáles son soportadas o como averiguar la máscara y la interfaz de red.', +'LOG_LEVEL_name' => 'Imprimir registros adicionales', +'LOG_LEVEL_description' => 'Esto hará que el registro tenga más información. Util para depurar que eventos se van guardando en la base de datos.', 'TIMEZONE_name' => 'Zona horaria', -'UI_LANG_description' => 'Seleccione el idioma de interfaz de usuario preferido.', +'TIMEZONE_description' => 'La zona horaria para mostrar las estadísticas correctamente. Encuentra tu zona horaria aquí.', +'ENABLE_PLUGINS_name' => 'Habilitar complementos', +'ENABLE_PLUGINS_description' => 'Habilita la funcionalidad de los complementos. Cargar los complementos requiere más recursos de hardware, así que quizás quieras desactivarlo en hardware poco potente.', +'PIALERT_WEB_PROTECTION_name' => 'Habilitar inicio de sesión', +'PIALERT_WEB_PROTECTION_description' => 'Cuando está habilitado, se muestra un cuadro de diálogo de inicio de sesión. Lea detenidamente a continuación si se le bloquea el acceso a su instancia.', +'PIALERT_WEB_PASSWORD_name' => 'Contraseña de inicio de sesión', +'PIALERT_WEB_PASSWORD_description' => 'La contraseña predeterminada es 123456. Para cambiar la contraseña, ejecute /home/pi/pialert/back/pialert-cli en el contenedor', +'INCLUDED_SECTIONS_name' => 'Notificar en', +'INCLUDED_SECTIONS_description' => 'Especifica que eventos envían notificaciones. Elimina los tipos de eventos de los que no quieras recibir notificaciones. Este ajuste sobreescribe los ajustes específicos de los dispositivos en la interfaz. (CTRL + Clic para seleccionar / deseleccionar).', +'SCAN_CYCLE_MINUTES_name' => 'Retraso del ciclo de escaneo', +'SCAN_CYCLE_MINUTES_description' => 'El retraso entre escaneos. Si usa arp-scan, el tiempo de escaneo en sí depende de la cantidad de direcciones IP para verificar. Esto está influenciado por la máscara de red configurada en la configuración SCAN_SUBNETS en la parte superior. Cada IP toma un par de segundos para escanear.', +'DAYS_TO_KEEP_EVENTS_name' => 'Eliminar eventos anteriores a', +'DAYS_TO_KEEP_EVENTS_description' => 'Esta es una configuración de mantenimiento. Esto especifica el número de días de entradas de eventos que se guardarán. Todos los eventos anteriores se eliminarán periódicamente.', +'REPORT_DASHBOARD_URL_name' => 'URL de Pi.Alert', +'REPORT_DASHBOARD_URL_description' => 'Esta URL se utiliza como base para generar enlaces en los correos electrónicos. Ingrese la URL completa que comienza con http://, incluido el número de puerto (sin barra inclinada al final /).', +'DIG_GET_IP_ARG_name' => 'Descubrir de IP de Internet', +'DIG_GET_IP_ARG_description' => 'Cambie los argumentos de la utilidad de dig si tiene problemas para resolver su IP de Internet. Los argumentos se agregan al final del siguiente comando: dig +short .', 'UI_LANG_name' => 'Idioma de interfaz', +'UI_LANG_description' => 'Seleccione el idioma de interfaz de usuario preferido.', +'UI_PRESENCE_name' => 'Mostrar en el gráfico de presencia', +'UI_PRESENCE_description' => 'Elige que estados del dispositivo deben mostrarse en la gráfica de Presencia del dispositivo a lo largo del tiempo de la página de Dispositivos. (CTRL + Clic para seleccionar / deseleccionar)', // Email - -'SMTP_FORCE_SSL_description' => 'Forzar SSL al conectarse a su servidor SMTP', -'SMTP_FORCE_SSL_name' => 'Forzar SSL', -'SMTP_PASS_description' => 'La contraseña del servidor SMTP.', -'SMTP_PASS_name' => 'SMTP password', -'SMTP_PORT_description' => 'Número de puerto utilizado para la conexión SMTP. Establézcalo en 0 si no desea utilizar un puerto al conectarse al servidor SMTP.', -'SMTP_PORT_name' => 'SMTP server PORT', +'Email_display_name' => 'Email', +'Email_icon' => '', +'REPORT_MAIL_name' => 'Habilitar email', +'REPORT_MAIL_description' => 'Si está habilitado, se envía un correo electrónico con una lista de cambios a los que se ha suscrito. Complete también todas las configuraciones restantes relacionadas con la configuración de SMTP a continuación', +'SMTP_SERVER_name' => 'URL del servidor SMTP', 'SMTP_SERVER_description' => 'La URL del host del servidor SMTP. Por ejemplo, smtp-relay.sendinblue.com. Para utilizar Gmail como servidor SMTP siga esta guía', -'SMTP_SERVER_name' => 'SMTP server URL', -'SMTP_SKIP_LOGIN_description' => 'No utilice la autenticación cuando se conecte al servidor SMTP.', +'SMTP_PORT_name' => 'Puerto del servidor SMTP', +'SMTP_PORT_description' => 'Número de puerto utilizado para la conexión SMTP. Establézcalo en 0 si no desea utilizar un puerto al conectarse al servidor SMTP.', 'SMTP_SKIP_LOGIN_name' => 'Omitir autenticación', -'SMTP_SKIP_TLS_description' => 'Deshabilite TLS cuando se conecte a su servidor SMTP.', -'SMTP_SKIP_TLS_name' => 'No usar TLS', +'SMTP_SKIP_LOGIN_description' => 'No utilice la autenticación cuando se conecte al servidor SMTP.', +'SMTP_USER_name' => 'Nombre de usuario SMTP', 'SMTP_USER_description' => 'El nombre de usuario utilizado para iniciar sesión en el servidor SMTP (a veces, una dirección de correo electrónico completa).', -'SMTP_USER_name' => 'SMTP user', - -//API - -'API_CUSTOM_SQL_description' => 'Puede especificar una consulta SQL personalizada que generará un archivo JSON y luego lo expondrá a través del archivo table_custom_endpoint.json.', -'API_CUSTOM_SQL_name' => 'Endpoint personalizado', - - +'SMTP_PASS_name' => 'Contraseña de SMTP', +'SMTP_PASS_description' => 'La contraseña del servidor SMTP.', +'SMTP_SKIP_TLS_name' => 'No usar TLS', +'SMTP_SKIP_TLS_description' => 'Deshabilite TLS cuando se conecte a su servidor SMTP.', +'SMTP_FORCE_SSL_name' => 'Forzar SSL', +'SMTP_FORCE_SSL_description' => 'Forzar SSL al conectarse a su servidor SMTP', +'REPORT_TO_name' => 'Enviar el email a', +'REPORT_TO_description' => 'Dirección de correo electrónico a la que se enviará la notificación.', +'REPORT_FROM_name' => 'Asunto del email', +'REPORT_FROM_description' => 'Asunto del correo electrónico de notificación.', + +// Webhooks +'Webhooks_display_name' => 'Webhooks', +'Webhooks_icon' => '', +'REPORT_WEBHOOK_name' => 'Habilitar webhooks', +'REPORT_WEBHOOK_description' => 'Habilite webhooks para notificaciones. Los webhooks lo ayudan a conectarse a muchas herramientas de terceros, como IFTTT, Zapier o n8n, por nombrar algunas. Consulte esta sencilla guía de n8n aquí para obtener comenzó. Si está habilitado, configure los ajustes relacionados a continuación.', +'WEBHOOK_URL_name' => 'URL de destino', +'WEBHOOK_URL_description' => 'URL de destino comienza con http:// o https://.', +'WEBHOOK_PAYLOAD_name' => 'Tipo de carga', +'WEBHOOK_PAYLOAD_description' => 'El formato de datos de carga de Webhook para el atributo body > attachments > text en el json de carga. Vea un ejemplo de la carga aquí. (por ejemplo: para discord use html)', +'WEBHOOK_REQUEST_METHOD_name' => 'Método de solicitud', +'WEBHOOK_REQUEST_METHOD_description' => 'El método de solicitud HTTP que se utilizará para la llamada de webhook.', +'Webhooks_settings_group' => ' Webhooks', + // Apprise - +'Apprise_display_name' => 'Apprise', +'Apprise_icon' => '', +'REPORT_APPRISE_name' => 'Habilitar Apprise', +'REPORT_APPRISE_description' => 'Habilitar el envío de notificaciones a través de Apprise.', 'APPRISE_HOST_description' => 'Apprise host URL que comienza con http:// o https://. (no olvide incluir /notify al final)', 'APPRISE_HOST_name' => 'Apprise host URL', -'APPRISE_PAYLOAD_description' => 'Seleccione el tipo de carga útil enviada a Apprise. Por ejemplo, html funciona bien con correos electrónicos, text con aplicaciones de chat, como Telegram.', -'APPRISE_PAYLOAD_name' => 'Tipo de carga', -'APPRISE_URL_description' => 'Informar de la URL de destino de la notificación. Por ejemplo, para Telegram sería tgram://{bot_token}/{chat_id}.', 'APPRISE_URL_name' => 'URL de notificación de Apprise', - -// Pushsafer -'REPORT_PUSHSAFER_description' => 'Habilitar el envío de notificaciones a través de Pushsafer.', -'REPORT_PUSHSAFER_name' => 'Habilitar Pushsafer', - - -//DYNDNS - - -'DDNS_ACTIVE_name' => 'Habilitar DynDNS', -'DDNS_DOMAIN_name' => 'URL del dominio DynDNS', -'DDNS_PASSWORD_name' => 'DynDNS password', -'DDNS_UPDATE_URL_description' => 'Actualice la URL que comienza con http:// o https://.', -'DDNS_UPDATE_URL_name' => 'DynDNS update URL', -'DDNS_USER_name' => 'DynDNS user', -'DHCP_ACTIVE_description' => 'Debe asignar :/etc/pihole/dhcp.leases en el archivo docker-compose.yml si habilita esta configuración.', -'DHCP_ACTIVE_name' => 'Habilitar PiHole DHCP', -'DIG_GET_IP_ARG_description' => 'Cambie los argumentos de la utilidad de dig si tiene problemas para resolver su IP de Internet. Los argumentos se agregan al final del siguiente comando: dig +short .', -'DIG_GET_IP_ARG_name' => 'Descubrir de IP de Internet', - -// MQTT -'REPORT_MQTT_description' => 'Habilitar el envío de notificaciones a través de MQTT a su Home Assistance.', -'REPORT_MQTT_name' => 'Habilitar MQTT', -'MQTT_BROKER_description' => 'URL del host MQTT (no incluya http:// o https://).', -'MQTT_BROKER_name' => 'MQTT broker URL', -'MQTT_DELAY_SEC_description' => 'Un pequeño truco: retrase la adición a la cola en caso de que el proceso se reinicie y los procesos de publicación anteriores se anulen (se necesitan ~2s para actualizar la configuración de un sensor en el intermediario). Probado con 2-3 segundos de retraso. Este retraso solo se aplica cuando se crean dispositivos (durante el primer bucle de notificación). No afecta los escaneos o notificaciones posteriores.', -'MQTT_DELAY_SEC_name' => 'Retraso de MQTT por dispositivo', -'MQTT_PASSWORD_description' => 'Contraseña utilizada para iniciar sesión en su instancia de agente de MQTT.', -'MQTT_PASSWORD_name' => 'MQTT password', -'MQTT_PORT_description' => 'Puerto donde escucha el broker MQTT. Normalmente 1883.', -'MQTT_PORT_name' => 'MQTT broker puerto', -'MQTT_QOS_description' => 'Configuración de calidad de servicio para el envío de mensajes MQTT. 0: baja calidad a 2: alta calidad. Cuanto mayor sea la calidad, mayor será el retraso.', -'MQTT_QOS_name' => 'Calidad de servicio MQTT', -'MQTT_USER_description' => 'Nombre de usuario utilizado para iniciar sesión en su instancia de agente de MQTT.', -'MQTT_USER_name' => 'MQTT user', -'MQTT_settings_group' => ' MQTT', - -// NMAP - -'NMAP_ACTIVE_description' => 'Si está habilitado, ejecutará un escaneo en un dispositivo recién encontrado. Para un análisis programado o único, verifique la configuración de NMAP_RUN.', -'NMAP_ACTIVE_name' => 'Ejecución del ciclo', -'NMAP_ARGS_description' => 'Argumentos utilizados para ejecutar el análisis de Nmap. Tenga cuidado de especificar los argumentos correctamente. Por ejemplo, -p -10000 escanea los puertos del 1 al 10000.', -'NMAP_ARGS_name' => 'Argumentos', -'NMAP_RUN_SCHD_description' => 'Solo está habilitado si selecciona programar en la configuración de NMAP_RUN. Asegúrese de ingresar el cronograma en el formato tipo cron correcto.', -'NMAP_RUN_SCHD_name' => 'Programar', -'NMAP_RUN_description' => 'Habilite un escaneo regular de Nmap en su red en todos los dispositivos. Los ajustes de programación se pueden encontrar a continuación. Si selecciona una vez, Nmap se ejecuta solo una vez al inicio durante el tiempo especificado en la configuración de NMAP_TIMEOUT.', -'NMAP_RUN_name' => 'Ejecución programada', -'NMAP_TIMEOUT_description' => 'Tiempo máximo en segundos para esperar a que finalice un escaneo de Nmap en cualquier dispositivo.', +'APPRISE_URL_description' => 'Informar de la URL de destino de la notificación. Por ejemplo, para Telegram sería tgram://{bot_token}/{chat_id}.', // NTFY -'REPORT_NTFY_description' => 'Habilitar el envío de notificaciones a través de NTFY.', +'NTFY_display_name' => 'NTFY', +'NTFY_icon' => '', 'REPORT_NTFY_name' => 'Habilitar NTFY', -'NTFY_HOST_description' => 'URL de host NTFY que comienza con http:// o https://. Puede usar la instancia alojada en https://ntfy.sh simplemente ingresando https://ntfy. sh.', +'REPORT_NTFY_description' => 'Habilitar el envío de notificaciones a través de NTFY.', 'NTFY_HOST_name' => 'NTFY host URL', -'NTFY_PASSWORD_description' => 'Ingrese la contraseña si necesita (host) una instancia con autenticación habilitada.', -'NTFY_PASSWORD_name' => 'NTFY password', -'NTFY_TOPIC_name' => 'NTFY topic', +'NTFY_HOST_description' => 'URL de host NTFY que comienza con http:// o https://. Puede usar la instancia alojada en https://ntfy.sh simplemente ingresando https://ntfy. sh.', +'NTFY_TOPIC_name' => 'Tema de NTFY', +'NTFY_TOPIC_description' => 'Tu tema secreto.', +'NTFY_USER_name' => 'Usuario de NTFY', 'NTFY_USER_description' => 'Ingrese usuario si necesita (alojar) una instancia con autenticación habilitada.', -'NTFY_USER_name' => 'NTFY user', -'NTFY_settings_group' => ' NTFY', +'NTFY_PASSWORD_name' => 'Contraseña de NTFY', +'NTFY_PASSWORD_description' => 'Ingrese la contraseña si necesita (host) una instancia con autenticación habilitada.', -// Pholus +// Pushsafer +'PUSHSAFER_display_name' => 'Pushsafer', +'PUSHSAFER_icon' => '', +'REPORT_PUSHSAFER_name' => 'Habilitar Pushsafer', +'REPORT_PUSHSAFER_description' => 'Habilitar el envío de notificaciones a través de Pushsafer.', +'PUSHSAFER_TOKEN_name' => 'Token de Pushsafer', +'PUSHSAFER_TOKEN_description' => 'Su clave secreta de la API de Pushsafer (token).', +'APPRISE_PAYLOAD_name' => 'Tipo de carga', +'APPRISE_PAYLOAD_description' => 'Seleccione el tipo de carga útil enviada a Apprise. Por ejemplo, html funciona bien con correos electrónicos, text con aplicaciones de chat, como Telegram.', -'Pholus_settings_group' => ' Pholus', -'PHOLUS_ACTIVE_description' => 'Pholus es una herramienta de rastreo para descubrir información adicional sobre los dispositivos en la red, incluido el nombre del dispositivo. Si está habilitado, ejecutará el escaneo antes de cada ciclo de escaneo de red hasta que no haya dispositivos (unknown) o (name not found). Tenga en cuenta que puede enviar spam a la red con tráfico innecesario. Depende de la configuración de SCAN_SUBNETS. Para un análisis programado o único, verifique la configuración de PHOLUS_RUN.', -'PHOLUS_ACTIVE_name' => 'Ejecución del ciclo', -'PHOLUS_DAYS_DATA_description' => 'Cuántos días de entradas de escaneo de Pholus deben conservarse (globalmente, ¡no específico del dispositivo!). El archivo pialert_pholus.log no se modifica. Introduzca 0 para desactivar.', -'PHOLUS_DAYS_DATA_name' => 'Retención de datos', -'PHOLUS_FORCE_description' => 'Fuerce el escaneo de cada escaneo de red, incluso si no hay dispositivos (unknown) o (name not found). Tenga cuidado al habilitar esto, ya que la detección puede inundar fácilmente su red.', -'PHOLUS_FORCE_name' => 'Escaneo de fuerza de ciclo', -'PHOLUS_RUN_SCHD_description' => 'Solo está habilitado si selecciona programar en la configuración de PHOLUS_RUN. Asegúrese de ingresar el horario en el formato similar a cron correcto -(por ejemplo, validar en crontab.guru). Por ejemplo, ingresar 0 4 * * * ejecutará el escaneo después de las 4 am en el TIMEZONE que configuró arriba. Se ejecutará la PRÓXIMA vez que pase el tiempo.', -'PHOLUS_RUN_SCHD_name' => 'Programar', -'PHOLUS_RUN_TIMEOUT_description' => 'El tiempo de espera en segundos para el escaneo Pholus programado. Se aplican las mismas notas con respecto a la duración que en la configuración de PHOLUS_TIMEOUT. Un escaneo programado no verifica si hay dispositivos (unknown) o (name not found), el escaneo se ejecuta de cualquier manera.', -'PHOLUS_RUN_TIMEOUT_name' => 'Tiempo de espera de ejecución programado', -'PHOLUS_RUN_description' => 'Habilite un escaneo regular de Pholus en su red. Los ajustes de programación se pueden encontrar a continuación. Si selecciona una vez, Pholus se ejecuta solo una vez al inicio durante el tiempo especificado en la configuración de PHOLUS_RUN_TIMEOUT.', -'PHOLUS_RUN_name' => 'Ejecución programada', -'PHOLUS_TIMEOUT_description' => '¿Cuánto tiempo en segundos debe rastrear Pholus en cada interfaz si se cumple la condición anterior? Cuanto más tiempo lo deje encendido, es más probable que los dispositivos transmitan más información. Este tiempo de espera se suma al tiempo que lleva realizar un escaneo arp en su red.', -'PHOLUS_TIMEOUT_name' => 'Tiempo de espera de ciclo', +// MQTT +'MQTT_display_name' => 'MQTT', +'MQTT_icon' => '', +'REPORT_MQTT_name' => 'Habilitar MQTT', +'REPORT_MQTT_description' => 'Habilitar el envío de notificaciones a través de MQTT a su Home Assistance.', +'MQTT_BROKER_name' => 'URL del broker MQTT', +'MQTT_BROKER_description' => 'URL del host MQTT (no incluya http:// o https://).', +'MQTT_PORT_name' => 'Puerto del broker MQTT', +'MQTT_PORT_description' => 'Puerto donde escucha el broker MQTT. Normalmente 1883.', +'MQTT_USER_name' => 'Usuario de MQTT', +'MQTT_USER_description' => 'Nombre de usuario utilizado para iniciar sesión en su instancia de agente de MQTT.', +'MQTT_PASSWORD_name' => 'Contraseña de MQTT', +'MQTT_PASSWORD_description' => 'Contraseña utilizada para iniciar sesión en su instancia de agente de MQTT.', +'MQTT_QOS_name' => 'Calidad de servicio MQTT', +'MQTT_QOS_description' => 'Configuración de calidad de servicio para el envío de mensajes MQTT. 0: baja calidad a 2: alta calidad. Cuanto mayor sea la calidad, mayor será el retraso.', +'MQTT_DELAY_SEC_name' => 'Retraso de MQTT por dispositivo', +'MQTT_DELAY_SEC_description' => 'Un pequeño truco: retrase la adición a la cola en caso de que el proceso se reinicie y los procesos de publicación anteriores se anulen (se necesitan ~2s para actualizar la configuración de un sensor en el intermediario). Probado con 2-3 segundos de retraso. Este retraso solo se aplica cuando se crean dispositivos (durante el primer bucle de notificación). No afecta los escaneos o notificaciones posteriores.', + +//DYNDNS +'DynDNS_display_name' => 'DynDNS', +'DynDNS_icon' => '', +'DDNS_ACTIVE_name' => 'Habilitar DynDNS', +'DDNS_ACTIVE_description' => '', +'DDNS_DOMAIN_name' => 'URL del dominio DynDNS', +'DDNS_DOMAIN_description' => '', +'DDNS_USER_name' => 'Usuario de DynDNS', +'DDNS_USER_description' => '', +'DDNS_PASSWORD_name' => 'Contraseña de DynDNS', +'DDNS_PASSWORD_description' => '', +'DDNS_UPDATE_URL_name' => 'URL de actualización de DynDNS', +'DDNS_UPDATE_URL_description' => 'Actualice la URL que comienza con http:// o https://.', // PiHole - -'PiHole_settings_group' => ' PiHole', -'PIHOLE_ACTIVE_description' => 'Debe mapear :/etc/pihole/pihole-FTL.db en el archivo docker-compose.yml si habilita esta configuración.', +'PiHole_display_name' => 'PiHole', +'PiHole_icon' => '', 'PIHOLE_ACTIVE_name' => 'Habilitar el mapeo de PiHole', -'PRINT_LOG_description' => 'Esta configuración habilitará un registro más detallado. Útil para depurar eventos que se escriben en la base de datos.', -'PRINT_LOG_name' => 'Imprimir registro adicional', -'PUSHSAFER_TOKEN_description' => 'Su clave secreta de la API de Pushsafer (token).', -'PUSHSAFER_TOKEN_name' => 'Pushsafer token', -'PUSHSAFER_settings_group' => ' Pushsafer', +'PIHOLE_ACTIVE_description' => 'Debe mapear :/etc/pihole/pihole-FTL.db en el archivo docker-compose.yml si habilita esta configuración.', +'DHCP_ACTIVE_name' => 'Habilitar PiHole DHCP', +'DHCP_ACTIVE_description' => 'Debe asignar :/etc/pihole/dhcp.leases en el archivo docker-compose.yml si habilita esta configuración.', -//Apprise +// Pholus +'Pholus_display_name' => 'Pholus', +'Pholus_icon' => '', +'PHOLUS_ACTIVE_name' => 'Ejecución del ciclo', +'PHOLUS_ACTIVE_description' => 'Pholus es una herramienta de rastreo para descubrir información adicional sobre los dispositivos en la red, incluido el nombre del dispositivo. Si está habilitado, ejecutará el escaneo antes de cada ciclo de escaneo de red hasta que no haya dispositivos (unknown) o (name not found). Tenga en cuenta que puede enviar spam a la red con tráfico innecesario. Depende de la configuración de SCAN_SUBNETS. Para un análisis programado o único, verifique la configuración de PHOLUS_RUN.', +'PHOLUS_TIMEOUT_name' => 'Tiempo de espera de ciclo', +'PHOLUS_TIMEOUT_description' => '¿Cuánto tiempo en segundos debe rastrear Pholus en cada interfaz si se cumple la condición anterior? Cuanto más tiempo lo deje encendido, es más probable que los dispositivos transmitan más información. Este tiempo de espera se suma al tiempo que lleva realizar un escaneo arp en su red.', +'PHOLUS_FORCE_name' => 'Escaneo de fuerza de ciclo', +'PHOLUS_FORCE_description' => 'Fuerce el escaneo de cada escaneo de red, incluso si no hay dispositivos (unknown) o (name not found). Tenga cuidado al habilitar esto, ya que la detección puede inundar fácilmente su red.', +'PHOLUS_RUN_name' => 'Ejecución programada', +'PHOLUS_RUN_description' => 'Habilite un escaneo regular de Pholus en su red. Los ajustes de programación se pueden encontrar a continuación. Si selecciona una vez, Pholus se ejecuta solo una vez al inicio durante el tiempo especificado en la configuración de PHOLUS_RUN_TIMEOUT.', +'PHOLUS_RUN_TIMEOUT_name' => 'Tiempo de espera de ejecución programado', +'PHOLUS_RUN_TIMEOUT_description' => 'El tiempo de espera en segundos para el escaneo Pholus programado. Se aplican las mismas notas con respecto a la duración que en la configuración de PHOLUS_TIMEOUT. Un escaneo programado no verifica si hay dispositivos (unknown) o (name not found), el escaneo se ejecuta de cualquier manera.', +'PHOLUS_RUN_SCHD_name' => 'Programar', +'PHOLUS_RUN_SCHD_description' => 'Solo está habilitado si selecciona programar en la configuración de PHOLUS_RUN. Asegúrese de ingresar el horario en el formato similar a cron correcto +(por ejemplo, validar en crontab.guru). Por ejemplo, ingresar 0 4 * * * ejecutará el escaneo después de las 4 am en el TIMEZONE que configuró arriba. Se ejecutará la PRÓXIMA vez que pase el tiempo.', +'PHOLUS_DAYS_DATA_name' => 'Retención de datos', +'PHOLUS_DAYS_DATA_description' => 'Cuántos días de entradas de escaneo de Pholus deben conservarse (globalmente, ¡no específico del dispositivo!). El archivo pialert_pholus.log no se modifica. Introduzca 0 para desactivar.', -'REPORT_APPRISE_description' => 'Habilitar el envío de notificaciones a través de Apprise.', -'REPORT_APPRISE_name' => 'Habilitar Apprise', +// NMAP +'Nmap_display_name' => 'Nmap', +'Nmap_icon' => '', +'NMAP_ACTIVE_name' => 'Ejecución del ciclo', +'NMAP_ACTIVE_description' => 'Si está habilitado, ejecutará un escaneo en un dispositivo recién encontrado. Para un análisis programado o único, verifique la configuración de NMAP_RUN.', +'PHOLUS_TIMEOUT_name' => 'Tiempo de espera de ciclo', +'NMAP_TIMEOUT_description' => 'Tiempo máximo en segundos para esperar a que finalice un escaneo de Nmap en cualquier dispositivo.', +'NMAP_RUN_name' => 'Ejecución programada', +'NMAP_RUN_description' => 'Habilite un escaneo regular de Nmap en su red en todos los dispositivos. Los ajustes de programación se pueden encontrar a continuación. Si selecciona una vez, Nmap se ejecuta solo una vez al inicio durante el tiempo especificado en la configuración de NMAP_TIMEOUT.', +'NMAP_RUN_SCHD_name' => 'Programar', +'NMAP_RUN_SCHD_description' => 'Solo está habilitado si selecciona programar en la configuración de NMAP_RUN. Asegúrese de ingresar el cronograma en el formato tipo cron correcto.', +'NMAP_ARGS_name' => 'Argumentos', +'NMAP_ARGS_description' => 'Argumentos utilizados para ejecutar el análisis de Nmap. Tenga cuidado de especificar los argumentos correctamente. Por ejemplo, -p -10000 escanea los puertos del 1 al 10000.', -// Webhooks -'REPORT_WEBHOOK_description' => 'Habilite webhooks para notificaciones. Los webhooks lo ayudan a conectarse a muchas herramientas de terceros, como IFTTT, Zapier o n8n, por nombrar algunas. Consulte esta sencilla guía de n8n aquí para obtener comenzó. Si está habilitado, configure los ajustes relacionados a continuación.', -'REPORT_WEBHOOK_name' => 'Habilitar webhooks', -'WEBHOOK_PAYLOAD_description' => 'El formato de datos de carga de Webhook para el atributo body > attachments > text en el json de carga. Vea un ejemplo de la carga aquí. (por ejemplo: para discord use html)', -'WEBHOOK_PAYLOAD_name' => 'Tipo de carga', -'WEBHOOK_REQUEST_METHOD_description' => 'El método de solicitud HTTP que se utilizará para la llamada de webhook.', -'WEBHOOK_REQUEST_METHOD_name' => 'Método de solicitud', -'WEBHOOK_URL_description' => 'URL de destino comienza con http:// o https://.', -'WEBHOOK_URL_name' => 'URL de destino', -'Webhooks_settings_group' => ' Webhooks', +//API +'API_display_name' => 'API', +'API_icon' => '', +'API_CUSTOM_SQL_name' => 'Endpoint personalizado', +'API_CUSTOM_SQL_description' => 'Puede especificar una consulta SQL personalizada que generará un archivo JSON y luego lo expondrá a través del archivo table_custom_endpoint.json.', -// Other - -'general_event_description' => 'El evento que ha activado puede tardar un tiempo hasta que finalicen los procesos en segundo plano. La ejecución terminó una vez que vea finished a continuación. Consulte el registro de errores si no obtuvo el resultado esperado.

            Estado:', -'general_event_title' => 'Ejecución de un evento ad-hoc', -'run_event_icon' => 'fa-play', -'run_event_tooltip' => 'Habilite la configuración y guarde sus cambios al principio antes de ejecutarlo.', -'settings_expand_all' => 'Expandir todo', -'settings_imported' => 'La última vez que se importó la configuración desde el archivo pialert.conf:', -'settings_missing' => 'No se han cargado todos los ajustes, actualice la página. Esto probablemente se deba a una gran carga en la base de datos.', -'settings_missing_block' => 'No puede guardar su configuración sin especificar todas las claves de configuración. Recarga la página. Esto probablemente se deba a una gran carga en la base de datos.', -'settings_old' => 'La configuración en la base de datos (que se muestra en esta página) está desactualizada. Esto probablemente se deba a un análisis en ejecución. La configuración se guardó en el archivo pialert.conf, pero el proceso en segundo plano aún no tuvo tiempo de importarlo a la base de datos. Puede esperar hasta que la configuración se actualice para no sobrescribir sus valores anteriores. Siéntase libre de guardar su configuración de cualquier manera si no le importa perder la configuración entre la última vez que guardó y ahora. También se crean archivos de respaldo si necesita comparar su configuración más adelante.', -'test_event_icon' => 'fa-vial-circle-check', -'test_event_tooltip' => 'Guarde sus cambios antes de probar su configuración.', - ); -?> +?> \ No newline at end of file From df499ea33c1c53607058241af4090bfb649e7c6c Mon Sep 17 00:00:00 2001 From: jokob-sk <96159884+jokob-sk@users.noreply.github.com> Date: Sat, 3 Jun 2023 09:04:09 +1000 Subject: [PATCH 20/20] PR work --- .gitignore | 3 +-- dockerfiles/start.sh | 2 +- front/buildtimestamp.txt | 1 - 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index a785a3b4..f908738e 100755 --- a/.gitignore +++ b/.gitignore @@ -9,5 +9,4 @@ front/plugins/**/*.log __pycache__/ *.py[cod] -*$py.class -db/pialert.db \ No newline at end of file +*$py.class \ No newline at end of file diff --git a/dockerfiles/start.sh b/dockerfiles/start.sh index 72c09e9b..70600fce 100755 --- a/dockerfiles/start.sh +++ b/dockerfiles/start.sh @@ -29,5 +29,5 @@ chmod -R a+rw /home/pi/pialert/config # cron -f #python /home/pi/pialert/back/pialert.py -echo "[DEBUG] DATA MONKEY VERSION ..." +# echo "[DEBUG] DATA MONKEY VERSION ..." python /home/pi/pialert/pialert/ diff --git a/front/buildtimestamp.txt b/front/buildtimestamp.txt index 00445f81..e69de29b 100644 --- a/front/buildtimestamp.txt +++ b/front/buildtimestamp.txt @@ -1 +0,0 @@ -1684563510