diff --git a/db/pialert.db b/db/pialert.db index 16f9e46f..62369780 100755 Binary files a/db/pialert.db and b/db/pialert.db differ diff --git a/pialert/conf.py b/pialert/conf.py index 3b3b7f3c..46cdcfec 100644 --- a/pialert/conf.py +++ b/pialert/conf.py @@ -4,7 +4,8 @@ mySettings = [] debug_force_notification = False cycle = 1 userSubnets = [] -mySchedules = [] +mySchedules = [] # bad solution for global - TO-DO +plugins = [] # bad solution for global - TO-DO # General ENABLE_ARPSCAN = True diff --git a/pialert/database.py b/pialert/database.py index 0ba19b89..a70e134a 100644 --- a/pialert/database.py +++ b/pialert/database.py @@ -4,8 +4,11 @@ import sqlite3 # pialert modules from const import fullDbPath + from logger import mylog -from helper import initOrSetParam, json_struc, row_to_json, timeNow +from helper import json_struc, initOrSetParam, row_to_json, timeNow #, updateState + + #=============================================================================== @@ -91,7 +94,7 @@ class DB(): #=============================================================================== def cleanup_database (self, startTime, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA): # Header - updateState(self,"Upkeep: Clean DB") + #updateState(self,"Upkeep: Clean DB") mylog('verbose', ['[', startTime, '] Upkeep Database:' ]) # Cleanup Online History @@ -145,14 +148,7 @@ class DB(): -#------------------------------------------------------------------------------- -def updateState(db, newState): - #sql = db.sql - mylog('debug', ' [updateState] changing state to: "' + newState +'"') - db.sql.execute ("UPDATE Parameters SET par_Value='"+ newState +"' WHERE par_ID='Back_App_State'") - - db.commitDB() #------------------------------------------------------------------------------- def get_table_as_json(db, sqlQuery): @@ -464,4 +460,5 @@ def insertOnlineHistory(db, cycle): sql.execute ("INSERT INTO Online_History (Scan_Date, Online_Devices, Down_Devices, All_Devices, Archived_Devices) "+ "VALUES ( ?, ?, ?, ?, ?)", (startTime, History_Online_Devices, History_Offline_Devices, History_All_Devices, History_Archived_Devices ) ) - db.commit() \ No newline at end of file + db.commit() + diff --git a/pialert/device.py b/pialert/device.py index 5888ed46..2a55a3fa 100644 --- a/pialert/device.py +++ b/pialert/device.py @@ -4,12 +4,12 @@ import subprocess -from pialert.conf import PHOLUS_ACTIVE, PHOLUS_FORCE, PHOLUS_TIMEOUT, cycle, DIG_GET_IP_ARG, userSubnets -from pialert.helper import timeNow -from pialert.internet import check_IP_format, get_internet_IP -from pialert.logger import mylog, print_log -from pialert.mac_vendor import query_MAC_vendor -from pialert.pholusscan import performPholusScan, resolve_device_name_pholus +from conf import PHOLUS_ACTIVE, PHOLUS_FORCE, PHOLUS_TIMEOUT, cycle, DIG_GET_IP_ARG, userSubnets +from helper import timeNow +from internet import check_IP_format, get_internet_IP +from logger import mylog, print_log +from mac_vendor import query_MAC_vendor +from pholusscan import performPholusScan, resolve_device_name_pholus #------------------------------------------------------------------------------- diff --git a/pialert/helper.py b/pialert/helper.py index ba5bffa8..18886cd6 100644 --- a/pialert/helper.py +++ b/pialert/helper.py @@ -16,11 +16,10 @@ import requests from const import * -from logger import mylog, logResult, print_log from conf import tz -from files import write_file +from logger import mylog, logResult, print_log # from api import update_api # to avoid circular reference -from plugin import get_plugins_configs, get_setting, print_plugin_info + #------------------------------------------------------------------------------- @@ -28,6 +27,14 @@ def timeNow(): return datetime.datetime.now().replace(microsecond=0) #------------------------------------------------------------------------------- +def updateState(db, newState): + #sql = db.sql + + mylog('debug', ' [updateState] changing state to: "' + newState +'"') + db.sql.execute ("UPDATE Parameters SET par_Value='"+ newState +"' WHERE par_ID='Back_App_State'") + + db.commitDB() +#------------------------------------------------------------------------------- def updateSubnets(SCAN_SUBNETS): # remove old list @@ -184,271 +191,11 @@ def collect_lang_strings(db, json, pref): import_language_string(db, language_string["language_code"], pref + "_" + prop, language_string["string"]) -#------------------------------------------------------------------------------- -def initOrSetParam(db, parID, parValue): - sql_connection = db.sql_connection - sql = db.sql - - sql.execute ("INSERT INTO Parameters(par_ID, par_Value) VALUES('"+str(parID)+"', '"+str(parValue)+"') ON CONFLICT(par_ID) DO UPDATE SET par_Value='"+str(parValue)+"' where par_ID='"+str(parID)+"'") - - db.commitDB() - - -#=============================================================================== -# Initialise user defined values -#=============================================================================== -# We need access to the DB to save new values so need to define DB access methods first -#------------------------------------------------------------------------------- - -#------------------------------------------------------------------------------- -# Import user values -# Check config dictionary -def ccd(key, default, config, name, inputtype, options, group, events=[], desc = "", regex = ""): - result = default - - # use existing value if already supplied, otherwise default value is used - if key in config: - result = config[key] - - if inputtype == 'text': - result = result.replace('\'', "{s-quote}") - - mySettingsSQLsafe.append((key, name, desc, inputtype, options, regex, str(result), group, str(events))) - mySettings.append((key, name, desc, inputtype, options, regex, result, group, str(events))) - - return result -#------------------------------------------------------------------------------- - -def importConfigs (db): - - sql = db.sql - - # Specify globals so they can be overwritten with the new config - global lastTimeImported, mySettings, mySettingsSQLsafe, plugins, plugins_once_run - lastTimeImported = 0 - # General - global ENABLE_ARPSCAN, SCAN_SUBNETS, LOG_LEVEL, TIMEZONE, ENABLE_PLUGINS, PIALERT_WEB_PROTECTION, PIALERT_WEB_PASSWORD, INCLUDED_SECTIONS, SCAN_CYCLE_MINUTES, DAYS_TO_KEEP_EVENTS, REPORT_DASHBOARD_URL, DIG_GET_IP_ARG, UI_LANG - # Email - global REPORT_MAIL, SMTP_SERVER, SMTP_PORT, REPORT_TO, REPORT_FROM, SMTP_SKIP_LOGIN, SMTP_USER, SMTP_PASS, SMTP_SKIP_TLS, SMTP_FORCE_SSL - # Webhooks - global REPORT_WEBHOOK, WEBHOOK_URL, WEBHOOK_PAYLOAD, WEBHOOK_REQUEST_METHOD - # Apprise - global REPORT_APPRISE, APPRISE_HOST, APPRISE_URL, APPRISE_PAYLOAD - # NTFY - global REPORT_NTFY, NTFY_HOST, NTFY_TOPIC, NTFY_USER, NTFY_PASSWORD - # PUSHSAFER - global REPORT_PUSHSAFER, PUSHSAFER_TOKEN - # MQTT - global REPORT_MQTT, MQTT_BROKER, MQTT_PORT, MQTT_USER, MQTT_PASSWORD, MQTT_QOS, MQTT_DELAY_SEC - # DynDNS - global DDNS_ACTIVE, DDNS_DOMAIN, DDNS_USER, DDNS_PASSWORD, DDNS_UPDATE_URL - # PiHole - global PIHOLE_ACTIVE, DHCP_ACTIVE - # Pholus - global PHOLUS_ACTIVE, PHOLUS_TIMEOUT, PHOLUS_FORCE, PHOLUS_DAYS_DATA, PHOLUS_RUN, PHOLUS_RUN_SCHD, PHOLUS_RUN_TIMEOUT - # Nmap - global NMAP_ACTIVE, NMAP_TIMEOUT, NMAP_RUN, NMAP_RUN_SCHD, NMAP_ARGS - # API - global API_CUSTOM_SQL - - # get config file - config_file = Path(fullConfPath) - - # Skip import if last time of import is NEWER than file age - if (os.path.getmtime(config_file) < lastTimeImported) : - return - - mySettings = [] # reset settings - mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query - - # load the variables from pialert.conf - code = compile(config_file.read_text(), config_file.name, "exec") - c_d = {} # config dictionary - exec(code, {"__builtins__": {}}, c_d) - - # Import setting if found in the dictionary - # General - ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'General', ['run']) - SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'General') - LOG_LEVEL = ccd('LOG_LEVEL', 'verbose' , c_d, 'Log verboseness', 'selecttext', "['none', 'minimal', 'verbose', 'debug']", 'General') - TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General') - ENABLE_PLUGINS = ccd('ENABLE_PLUGINS', True , c_d, 'Enable plugins', 'boolean', '', 'General') - PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General') - PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General') - INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General') - SCAN_CYCLE_MINUTES = ccd('SCAN_CYCLE_MINUTES', 5 , c_d, 'Scan cycle delay (m)', 'integer', '', 'General') - DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General') - REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General') - DIG_GET_IP_ARG = ccd('DIG_GET_IP_ARG', '-4 myip.opendns.com @resolver1.opendns.com' , c_d, 'DIG arguments', 'text', '', 'General') - UI_LANG = ccd('UI_LANG', 'English' , c_d, 'Language Interface', 'selecttext', "['English', 'German', 'Spanish']", 'General') - UI_PRESENCE = ccd('UI_PRESENCE', ['online', 'offline', 'archived'] , c_d, 'Include in presence', 'multiselect', "['online', 'offline', 'archived']", 'General') - - # Email - REPORT_MAIL = ccd('REPORT_MAIL', False , c_d, 'Enable email', 'boolean', '', 'Email', ['test']) - SMTP_SERVER = ccd('SMTP_SERVER', '' , c_d,'SMTP server URL', 'text', '', 'Email') - SMTP_PORT = ccd('SMTP_PORT', 587 , c_d, 'SMTP port', 'integer', '', 'Email') - REPORT_TO = ccd('REPORT_TO', 'user@gmail.com' , c_d, 'Email to', 'text', '', 'Email') - REPORT_FROM = ccd('REPORT_FROM', 'Pi.Alert ' , c_d, 'Email Subject', 'text', '', 'Email') - SMTP_SKIP_LOGIN = ccd('SMTP_SKIP_LOGIN', False , c_d, 'SMTP skip login', 'boolean', '', 'Email') - SMTP_USER = ccd('SMTP_USER', '' , c_d, 'SMTP user', 'text', '', 'Email') - SMTP_PASS = ccd('SMTP_PASS', '' , c_d, 'SMTP password', 'password', '', 'Email') - SMTP_SKIP_TLS = ccd('SMTP_SKIP_TLS', False , c_d, 'SMTP skip TLS', 'boolean', '', 'Email') - SMTP_FORCE_SSL = ccd('SMTP_FORCE_SSL', False , c_d, 'Force SSL', 'boolean', '', 'Email') - - # Webhooks - REPORT_WEBHOOK = ccd('REPORT_WEBHOOK', False , c_d, 'Enable Webhooks', 'boolean', '', 'Webhooks', ['test']) - WEBHOOK_URL = ccd('WEBHOOK_URL', '' , c_d, 'Target URL', 'text', '', 'Webhooks') - WEBHOOK_PAYLOAD = ccd('WEBHOOK_PAYLOAD', 'json' , c_d, 'Payload type', 'selecttext', "['json', 'html', 'text']", 'Webhooks') - WEBHOOK_REQUEST_METHOD = ccd('WEBHOOK_REQUEST_METHOD', 'GET' , c_d, 'Req type', 'selecttext', "['GET', 'POST', 'PUT']", 'Webhooks') - - # Apprise - REPORT_APPRISE = ccd('REPORT_APPRISE', False , c_d, 'Enable Apprise', 'boolean', '', 'Apprise', ['test']) - APPRISE_HOST = ccd('APPRISE_HOST', '' , c_d, 'Apprise host URL', 'text', '', 'Apprise') - APPRISE_URL = ccd('APPRISE_URL', '' , c_d, 'Apprise notification URL', 'text', '', 'Apprise') - APPRISE_PAYLOAD = ccd('APPRISE_PAYLOAD', 'html' , c_d, 'Payload type', 'selecttext', "['html', 'text']", 'Apprise') - - # NTFY - REPORT_NTFY = ccd('REPORT_NTFY', False , c_d, 'Enable NTFY', 'boolean', '', 'NTFY', ['test']) - NTFY_HOST = ccd('NTFY_HOST', 'https://ntfy.sh' , c_d, 'NTFY host URL', 'text', '', 'NTFY') - NTFY_TOPIC = ccd('NTFY_TOPIC', '' , c_d, 'NTFY topic', 'text', '', 'NTFY') - NTFY_USER = ccd('NTFY_USER', '' , c_d, 'NTFY user', 'text', '', 'NTFY') - NTFY_PASSWORD = ccd('NTFY_PASSWORD', '' , c_d, 'NTFY password', 'password', '', 'NTFY') - - # PUSHSAFER - REPORT_PUSHSAFER = ccd('REPORT_PUSHSAFER', False , c_d, 'Enable PUSHSAFER', 'boolean', '', 'PUSHSAFER', ['test']) - PUSHSAFER_TOKEN = ccd('PUSHSAFER_TOKEN', 'ApiKey' , c_d, 'PUSHSAFER token', 'text', '', 'PUSHSAFER') - - # MQTT - REPORT_MQTT = ccd('REPORT_MQTT', False , c_d, 'Enable MQTT', 'boolean', '', 'MQTT') - MQTT_BROKER = ccd('MQTT_BROKER', '' , c_d, 'MQTT broker', 'text', '', 'MQTT') - MQTT_PORT = ccd('MQTT_PORT', 1883 , c_d, 'MQTT broker port', 'integer', '', 'MQTT') - MQTT_USER = ccd('MQTT_USER', '' , c_d, 'MQTT user', 'text', '', 'MQTT') - MQTT_PASSWORD = ccd('MQTT_PASSWORD', '' , c_d, 'MQTT password', 'password', '', 'MQTT') - MQTT_QOS = ccd('MQTT_QOS', 0 , c_d, 'MQTT Quality of Service', 'selectinteger', "['0', '1', '2']", 'MQTT') - MQTT_DELAY_SEC = ccd('MQTT_DELAY_SEC', 2 , c_d, 'MQTT delay', 'selectinteger', "['2', '3', '4', '5']", 'MQTT') - - # DynDNS - DDNS_ACTIVE = ccd('DDNS_ACTIVE', False , c_d, 'Enable DynDNS', 'boolean', '', 'DynDNS') - DDNS_DOMAIN = ccd('DDNS_DOMAIN', 'your_domain.freeddns.org' , c_d, 'DynDNS domain URL', 'text', '', 'DynDNS') - DDNS_USER = ccd('DDNS_USER', 'dynu_user' , c_d, 'DynDNS user', 'text', '', 'DynDNS') - DDNS_PASSWORD = ccd('DDNS_PASSWORD', 'A0000000B0000000C0000000D0000000' , c_d, 'DynDNS password', 'password', '', 'DynDNS') - DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS') - - # PiHole - PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PiHole') - DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PiHole') - - # PHOLUS - PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus') - PHOLUS_TIMEOUT = ccd('PHOLUS_TIMEOUT', 20 , c_d, 'Pholus timeout', 'integer', '', 'Pholus') - PHOLUS_FORCE = ccd('PHOLUS_FORCE', False , c_d, 'Pholus force check', 'boolean', '', 'Pholus') - PHOLUS_RUN = ccd('PHOLUS_RUN', 'once' , c_d, 'Pholus enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Pholus') - PHOLUS_RUN_TIMEOUT = ccd('PHOLUS_RUN_TIMEOUT', 600 , c_d, 'Pholus timeout schedule', 'integer', '', 'Pholus') - PHOLUS_RUN_SCHD = ccd('PHOLUS_RUN_SCHD', '0 4 * * *' , c_d, 'Pholus schedule', 'text', '', 'Pholus') - PHOLUS_DAYS_DATA = ccd('PHOLUS_DAYS_DATA', 0 , c_d, 'Pholus keep days', 'integer', '', 'Pholus') - - # Nmap - NMAP_ACTIVE = ccd('NMAP_ACTIVE', True , c_d, 'Enable Nmap scans', 'boolean', '', 'Nmap') - NMAP_TIMEOUT = ccd('NMAP_TIMEOUT', 150 , c_d, 'Nmap timeout', 'integer', '', 'Nmap') - NMAP_RUN = ccd('NMAP_RUN', 'none' , c_d, 'Nmap enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Nmap') - NMAP_RUN_SCHD = ccd('NMAP_RUN_SCHD', '0 2 * * *' , c_d, 'Nmap schedule', 'text', '', 'Nmap') - NMAP_ARGS = ccd('NMAP_ARGS', '-p -10000' , c_d, 'Nmap custom arguments', 'text', '', 'Nmap') - - # API - API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', 'text', '', 'API') - - # Prepare scheduler - global tz, mySchedules, plugins - - # Init timezone in case it changed - tz = timezone(TIMEZONE) - - # reset schedules - mySchedules = [] - - # init pholus schedule - pholusSchedule = Cron(PHOLUS_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz)) - mySchedules.append(schedule_class("pholus", pholusSchedule, pholusSchedule.next(), False)) - - # init nmap schedule - nmapSchedule = Cron(NMAP_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz)) - mySchedules.append(schedule_class("nmap", nmapSchedule, nmapSchedule.next(), False)) - - # Format and prepare the list of subnets - userSubnets = updateSubnets(SCAN_SUBNETS) - # Plugins START - # ----------------- - if ENABLE_PLUGINS: - plugins = get_plugins_configs() - mylog('none', ['[', timeNow(), '] Plugins: Number of dynamically loaded plugins: ', len(plugins)]) - - # handle plugins - for plugin in plugins: - print_plugin_info(plugin, ['display_name','description']) - - pref = plugin["unique_prefix"] - - # if plugin["enabled"] == 'true': - - # collect plugin level language strings - collect_lang_strings(db, plugin, pref) - - for set in plugin["settings"]: - setFunction = set["function"] - # Setting code name / key - key = pref + "_" + setFunction - - v = ccd(key, set["default_value"], c_d, set["name"][0]["string"], set["type"] , str(set["options"]), pref) - - # Save the user defined value into the object - set["value"] = v - - # Setup schedules - if setFunction == 'RUN_SCHD': - newSchedule = Cron(v).schedule(start_date=datetime.datetime.now(tz)) - mySchedules.append(schedule_class(pref, newSchedule, newSchedule.next(), False)) - - # Collect settings related language strings - collect_lang_strings(db, set, pref + "_" + set["function"]) - - plugins_once_run = False - # ----------------- - # Plugins END - - - - - - # Insert settings into the DB - sql.execute ("DELETE FROM Settings") - sql.executemany ("""INSERT INTO Settings ("Code_Name", "Display_Name", "Description", "Type", "Options", - "RegEx", "Value", "Group", "Events" ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""", mySettingsSQLsafe) - - # Used to determine the next import - lastTimeImported = time.time() - - # Is used to display a message in the UI when old (outdated) settings are loaded - initOrSetParam(db, "Back_Settings_Imported",(round(time.time() * 1000),) ) - - #commitDB(sql_connection) - db.commitDB() - - # update only the settings datasource - # update_api(False, ["settings"]) - # TO DO this creates a circular reference between API and HELPER ! - - mylog('info', ['[', timeNow(), '] Config: Imported new config']) - - -#------------------------------------------------------------------------------- -class json_struc: - def __init__(self, jsn, columnNames): - self.json = jsn - self.columnNames = columnNames + #------------------------------------------------------------------------------- # Creates a JSON object from a DB row @@ -570,4 +317,20 @@ def generate_mac_links (html, deviceUrl): for mac in MACs: html = html.replace('' + mac + '','' + mac + '') - return html \ No newline at end of file + return html + + + +#------------------------------------------------------------------------------- +def initOrSetParam(db, parID, parValue): + sql = db.sql + + sql.execute ("INSERT INTO Parameters(par_ID, par_Value) VALUES('"+str(parID)+"', '"+str(parValue)+"') ON CONFLICT(par_ID) DO UPDATE SET par_Value='"+str(parValue)+"' where par_ID='"+str(parID)+"'") + + db.commitDB() + +#------------------------------------------------------------------------------- +class json_struc: + def __init__(self, jsn, columnNames): + self.json = jsn + self.columnNames = columnNames \ No newline at end of file diff --git a/pialert/initialise.py b/pialert/initialise.py new file mode 100644 index 00000000..6202fed1 --- /dev/null +++ b/pialert/initialise.py @@ -0,0 +1,263 @@ + +import os +import time +from pytz import timezone +from cron_converter import Cron +from pathlib import Path +import datetime + +from conf import * +from const import * +from helper import collect_lang_strings, schedule_class, timeNow, updateSubnets, initOrSetParam +from logger import mylog +from plugin import get_plugins_configs, print_plugin_info + +#=============================================================================== +# Initialise user defined values +#=============================================================================== +# We need access to the DB to save new values so need to define DB access methods first +#------------------------------------------------------------------------------- + +#------------------------------------------------------------------------------- +# Import user values +# Check config dictionary +def ccd(key, default, config, name, inputtype, options, group, events=[], desc = "", regex = ""): + result = default + + # use existing value if already supplied, otherwise default value is used + if key in config: + result = config[key] + + if inputtype == 'text': + result = result.replace('\'', "{s-quote}") + + mySettingsSQLsafe.append((key, name, desc, inputtype, options, regex, str(result), group, str(events))) + mySettings.append((key, name, desc, inputtype, options, regex, result, group, str(events))) + + return result +#------------------------------------------------------------------------------- + +def importConfigs (db): + + sql = db.sql + + # Specify globals so they can be overwritten with the new config + global lastTimeImported, mySettings, mySettingsSQLsafe, plugins, plugins_once_run + lastTimeImported = 0 + # General + global ENABLE_ARPSCAN, SCAN_SUBNETS, LOG_LEVEL, TIMEZONE, ENABLE_PLUGINS, PIALERT_WEB_PROTECTION, PIALERT_WEB_PASSWORD, INCLUDED_SECTIONS, SCAN_CYCLE_MINUTES, DAYS_TO_KEEP_EVENTS, REPORT_DASHBOARD_URL, DIG_GET_IP_ARG, UI_LANG + # Email + global REPORT_MAIL, SMTP_SERVER, SMTP_PORT, REPORT_TO, REPORT_FROM, SMTP_SKIP_LOGIN, SMTP_USER, SMTP_PASS, SMTP_SKIP_TLS, SMTP_FORCE_SSL + # Webhooks + global REPORT_WEBHOOK, WEBHOOK_URL, WEBHOOK_PAYLOAD, WEBHOOK_REQUEST_METHOD + # Apprise + global REPORT_APPRISE, APPRISE_HOST, APPRISE_URL, APPRISE_PAYLOAD + # NTFY + global REPORT_NTFY, NTFY_HOST, NTFY_TOPIC, NTFY_USER, NTFY_PASSWORD + # PUSHSAFER + global REPORT_PUSHSAFER, PUSHSAFER_TOKEN + # MQTT + global REPORT_MQTT, MQTT_BROKER, MQTT_PORT, MQTT_USER, MQTT_PASSWORD, MQTT_QOS, MQTT_DELAY_SEC + # DynDNS + global DDNS_ACTIVE, DDNS_DOMAIN, DDNS_USER, DDNS_PASSWORD, DDNS_UPDATE_URL + # PiHole + global PIHOLE_ACTIVE, DHCP_ACTIVE + # Pholus + global PHOLUS_ACTIVE, PHOLUS_TIMEOUT, PHOLUS_FORCE, PHOLUS_DAYS_DATA, PHOLUS_RUN, PHOLUS_RUN_SCHD, PHOLUS_RUN_TIMEOUT + # Nmap + global NMAP_ACTIVE, NMAP_TIMEOUT, NMAP_RUN, NMAP_RUN_SCHD, NMAP_ARGS + # API + global API_CUSTOM_SQL + + # get config file + config_file = Path(fullConfPath) + + # Skip import if last time of import is NEWER than file age + if (os.path.getmtime(config_file) < lastTimeImported) : + return + + mySettings = [] # reset settings + mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query + + # load the variables from pialert.conf + code = compile(config_file.read_text(), config_file.name, "exec") + c_d = {} # config dictionary + exec(code, {"__builtins__": {}}, c_d) + + # Import setting if found in the dictionary + # General + ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'General', ['run']) + SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'General') + LOG_LEVEL = ccd('LOG_LEVEL', 'verbose' , c_d, 'Log verboseness', 'selecttext', "['none', 'minimal', 'verbose', 'debug']", 'General') + TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General') + ENABLE_PLUGINS = ccd('ENABLE_PLUGINS', True , c_d, 'Enable plugins', 'boolean', '', 'General') + PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General') + PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General') + INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General') + SCAN_CYCLE_MINUTES = ccd('SCAN_CYCLE_MINUTES', 5 , c_d, 'Scan cycle delay (m)', 'integer', '', 'General') + DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General') + REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General') + DIG_GET_IP_ARG = ccd('DIG_GET_IP_ARG', '-4 myip.opendns.com @resolver1.opendns.com' , c_d, 'DIG arguments', 'text', '', 'General') + UI_LANG = ccd('UI_LANG', 'English' , c_d, 'Language Interface', 'selecttext', "['English', 'German', 'Spanish']", 'General') + UI_PRESENCE = ccd('UI_PRESENCE', ['online', 'offline', 'archived'] , c_d, 'Include in presence', 'multiselect', "['online', 'offline', 'archived']", 'General') + + # Email + REPORT_MAIL = ccd('REPORT_MAIL', False , c_d, 'Enable email', 'boolean', '', 'Email', ['test']) + SMTP_SERVER = ccd('SMTP_SERVER', '' , c_d,'SMTP server URL', 'text', '', 'Email') + SMTP_PORT = ccd('SMTP_PORT', 587 , c_d, 'SMTP port', 'integer', '', 'Email') + REPORT_TO = ccd('REPORT_TO', 'user@gmail.com' , c_d, 'Email to', 'text', '', 'Email') + REPORT_FROM = ccd('REPORT_FROM', 'Pi.Alert ' , c_d, 'Email Subject', 'text', '', 'Email') + SMTP_SKIP_LOGIN = ccd('SMTP_SKIP_LOGIN', False , c_d, 'SMTP skip login', 'boolean', '', 'Email') + SMTP_USER = ccd('SMTP_USER', '' , c_d, 'SMTP user', 'text', '', 'Email') + SMTP_PASS = ccd('SMTP_PASS', '' , c_d, 'SMTP password', 'password', '', 'Email') + SMTP_SKIP_TLS = ccd('SMTP_SKIP_TLS', False , c_d, 'SMTP skip TLS', 'boolean', '', 'Email') + SMTP_FORCE_SSL = ccd('SMTP_FORCE_SSL', False , c_d, 'Force SSL', 'boolean', '', 'Email') + + # Webhooks + REPORT_WEBHOOK = ccd('REPORT_WEBHOOK', False , c_d, 'Enable Webhooks', 'boolean', '', 'Webhooks', ['test']) + WEBHOOK_URL = ccd('WEBHOOK_URL', '' , c_d, 'Target URL', 'text', '', 'Webhooks') + WEBHOOK_PAYLOAD = ccd('WEBHOOK_PAYLOAD', 'json' , c_d, 'Payload type', 'selecttext', "['json', 'html', 'text']", 'Webhooks') + WEBHOOK_REQUEST_METHOD = ccd('WEBHOOK_REQUEST_METHOD', 'GET' , c_d, 'Req type', 'selecttext', "['GET', 'POST', 'PUT']", 'Webhooks') + + # Apprise + REPORT_APPRISE = ccd('REPORT_APPRISE', False , c_d, 'Enable Apprise', 'boolean', '', 'Apprise', ['test']) + APPRISE_HOST = ccd('APPRISE_HOST', '' , c_d, 'Apprise host URL', 'text', '', 'Apprise') + APPRISE_URL = ccd('APPRISE_URL', '' , c_d, 'Apprise notification URL', 'text', '', 'Apprise') + APPRISE_PAYLOAD = ccd('APPRISE_PAYLOAD', 'html' , c_d, 'Payload type', 'selecttext', "['html', 'text']", 'Apprise') + + # NTFY + REPORT_NTFY = ccd('REPORT_NTFY', False , c_d, 'Enable NTFY', 'boolean', '', 'NTFY', ['test']) + NTFY_HOST = ccd('NTFY_HOST', 'https://ntfy.sh' , c_d, 'NTFY host URL', 'text', '', 'NTFY') + NTFY_TOPIC = ccd('NTFY_TOPIC', '' , c_d, 'NTFY topic', 'text', '', 'NTFY') + NTFY_USER = ccd('NTFY_USER', '' , c_d, 'NTFY user', 'text', '', 'NTFY') + NTFY_PASSWORD = ccd('NTFY_PASSWORD', '' , c_d, 'NTFY password', 'password', '', 'NTFY') + + # PUSHSAFER + REPORT_PUSHSAFER = ccd('REPORT_PUSHSAFER', False , c_d, 'Enable PUSHSAFER', 'boolean', '', 'PUSHSAFER', ['test']) + PUSHSAFER_TOKEN = ccd('PUSHSAFER_TOKEN', 'ApiKey' , c_d, 'PUSHSAFER token', 'text', '', 'PUSHSAFER') + + # MQTT + REPORT_MQTT = ccd('REPORT_MQTT', False , c_d, 'Enable MQTT', 'boolean', '', 'MQTT') + MQTT_BROKER = ccd('MQTT_BROKER', '' , c_d, 'MQTT broker', 'text', '', 'MQTT') + MQTT_PORT = ccd('MQTT_PORT', 1883 , c_d, 'MQTT broker port', 'integer', '', 'MQTT') + MQTT_USER = ccd('MQTT_USER', '' , c_d, 'MQTT user', 'text', '', 'MQTT') + MQTT_PASSWORD = ccd('MQTT_PASSWORD', '' , c_d, 'MQTT password', 'password', '', 'MQTT') + MQTT_QOS = ccd('MQTT_QOS', 0 , c_d, 'MQTT Quality of Service', 'selectinteger', "['0', '1', '2']", 'MQTT') + MQTT_DELAY_SEC = ccd('MQTT_DELAY_SEC', 2 , c_d, 'MQTT delay', 'selectinteger', "['2', '3', '4', '5']", 'MQTT') + + # DynDNS + DDNS_ACTIVE = ccd('DDNS_ACTIVE', False , c_d, 'Enable DynDNS', 'boolean', '', 'DynDNS') + DDNS_DOMAIN = ccd('DDNS_DOMAIN', 'your_domain.freeddns.org' , c_d, 'DynDNS domain URL', 'text', '', 'DynDNS') + DDNS_USER = ccd('DDNS_USER', 'dynu_user' , c_d, 'DynDNS user', 'text', '', 'DynDNS') + DDNS_PASSWORD = ccd('DDNS_PASSWORD', 'A0000000B0000000C0000000D0000000' , c_d, 'DynDNS password', 'password', '', 'DynDNS') + DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS') + + # PiHole + PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PiHole') + DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PiHole') + + # PHOLUS + PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus') + PHOLUS_TIMEOUT = ccd('PHOLUS_TIMEOUT', 20 , c_d, 'Pholus timeout', 'integer', '', 'Pholus') + PHOLUS_FORCE = ccd('PHOLUS_FORCE', False , c_d, 'Pholus force check', 'boolean', '', 'Pholus') + PHOLUS_RUN = ccd('PHOLUS_RUN', 'once' , c_d, 'Pholus enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Pholus') + PHOLUS_RUN_TIMEOUT = ccd('PHOLUS_RUN_TIMEOUT', 600 , c_d, 'Pholus timeout schedule', 'integer', '', 'Pholus') + PHOLUS_RUN_SCHD = ccd('PHOLUS_RUN_SCHD', '0 4 * * *' , c_d, 'Pholus schedule', 'text', '', 'Pholus') + PHOLUS_DAYS_DATA = ccd('PHOLUS_DAYS_DATA', 0 , c_d, 'Pholus keep days', 'integer', '', 'Pholus') + + # Nmap + NMAP_ACTIVE = ccd('NMAP_ACTIVE', True , c_d, 'Enable Nmap scans', 'boolean', '', 'Nmap') + NMAP_TIMEOUT = ccd('NMAP_TIMEOUT', 150 , c_d, 'Nmap timeout', 'integer', '', 'Nmap') + NMAP_RUN = ccd('NMAP_RUN', 'none' , c_d, 'Nmap enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Nmap') + NMAP_RUN_SCHD = ccd('NMAP_RUN_SCHD', '0 2 * * *' , c_d, 'Nmap schedule', 'text', '', 'Nmap') + NMAP_ARGS = ccd('NMAP_ARGS', '-p -10000' , c_d, 'Nmap custom arguments', 'text', '', 'Nmap') + + # API + API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', 'text', '', 'API') + + # Prepare scheduler + global tz, mySchedules, plugins + + # Init timezone in case it changed + tz = timezone(TIMEZONE) + + # reset schedules + mySchedules = [] + + # init pholus schedule + pholusSchedule = Cron(PHOLUS_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz)) + mySchedules.append(schedule_class("pholus", pholusSchedule, pholusSchedule.next(), False)) + + # init nmap schedule + nmapSchedule = Cron(NMAP_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz)) + mySchedules.append(schedule_class("nmap", nmapSchedule, nmapSchedule.next(), False)) + + # Format and prepare the list of subnets + userSubnets = updateSubnets(SCAN_SUBNETS) + + + + # Plugins START + # ----------------- + if ENABLE_PLUGINS: + plugins = get_plugins_configs() + + mylog('none', ['[', timeNow(), '] Plugins: Number of dynamically loaded plugins: ', len(plugins)]) + + # handle plugins + for plugin in plugins: + print_plugin_info(plugin, ['display_name','description']) + + pref = plugin["unique_prefix"] + + # if plugin["enabled"] == 'true': + + # collect plugin level language strings + collect_lang_strings(db, plugin, pref) + + for set in plugin["settings"]: + setFunction = set["function"] + # Setting code name / key + key = pref + "_" + setFunction + + v = ccd(key, set["default_value"], c_d, set["name"][0]["string"], set["type"] , str(set["options"]), pref) + + # Save the user defined value into the object + set["value"] = v + + # Setup schedules + if setFunction == 'RUN_SCHD': + newSchedule = Cron(v).schedule(start_date=datetime.datetime.now(tz)) + mySchedules.append(schedule_class(pref, newSchedule, newSchedule.next(), False)) + + # Collect settings related language strings + collect_lang_strings(db, set, pref + "_" + set["function"]) + + plugins_once_run = False + # ----------------- + # Plugins END + + + + + + # Insert settings into the DB + sql.execute ("DELETE FROM Settings") + sql.executemany ("""INSERT INTO Settings ("Code_Name", "Display_Name", "Description", "Type", "Options", + "RegEx", "Value", "Group", "Events" ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""", mySettingsSQLsafe) + + # Used to determine the next import + lastTimeImported = time.time() + + # Is used to display a message in the UI when old (outdated) settings are loaded + initOrSetParam(db, "Back_Settings_Imported",(round(time.time() * 1000),) ) + + #commitDB(sql_connection) + db.commitDB() + + # update only the settings datasource + # update_api(False, ["settings"]) + # TO DO this creates a circular reference between API and HELPER ! + + mylog('info', ['[', timeNow(), '] Config: Imported new config']) + diff --git a/pialert/internet.py b/pialert/internet.py index 0f58f6b9..3b13db4f 100644 --- a/pialert/internet.py +++ b/pialert/internet.py @@ -4,8 +4,8 @@ import subprocess import re # pialert modules -from database import updateState -from helper import timeNow + +from helper import timeNow, updateState from logger import append_line_to_file, mylog from const import logPath from conf import DDNS_ACTIVE, DDNS_DOMAIN, DDNS_UPDATE_URL, DDNS_PASSWORD, DDNS_USER diff --git a/pialert/mac_vendor.py b/pialert/mac_vendor.py index f7ca6987..3d3fc83e 100644 --- a/pialert/mac_vendor.py +++ b/pialert/mac_vendor.py @@ -1,10 +1,10 @@ import subprocess -from pialert.database import updateState -from pialert.helper import timeNow -from pialert.logger import mylog -from conf import pialertPath, vendorsDB +from const import pialertPath, vendorsDB +from helper import timeNow, updateState +from logger import mylog + #=============================================================================== # UPDATE DEVICE MAC VENDORS diff --git a/pialert/networkscan.py b/pialert/networkscan.py index c3962ae3..63a81481 100644 --- a/pialert/networkscan.py +++ b/pialert/networkscan.py @@ -1,15 +1,17 @@ + +from conf import DHCP_ACTIVE, PIHOLE_ACTIVE, cycle, ENABLE_ARPSCAN from arpscan import execute_arpscan -from conf import DHCP_ACTIVE, ENABLE_PLUGINS, PIHOLE_ACTIVE, cycle, ENABLE_ARPSCAN -from database import insertOnlineHistory, updateState +from database import insertOnlineHistory from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, update_devices_names from helper import timeNow from logger import mylog, print_log -from pialert.plugin import run_plugin_scripts from pihole import copy_pihole_network, read_DHCP_leases from reporting import skip_repeated_notifications - + + + #=============================================================================== # SCAN NETWORK #=============================================================================== @@ -20,7 +22,8 @@ def scan_network (db): reporting = False # Header - updateState(db,"Scan: Network") + # moved updateState to main loop + # updateState(db,"Scan: Network") mylog('verbose', ['[', timeNow(), '] Scan Devices:' ]) # Query ScanCycle properties @@ -105,9 +108,9 @@ def scan_network (db): # Commit changes db.commitDB() - # Run splugin scripts which are set to run every timne after a scan finished - if ENABLE_PLUGINS: - run_plugin_scripts(db,'always_after_scan') + # moved plugin execution to main loop + # if ENABLE_PLUGINS: + # run_plugin_scripts(db,'always_after_scan') return reporting diff --git a/pialert/nmapscan.py b/pialert/nmapscan.py index 8a6aa9e6..e5f0e1d2 100644 --- a/pialert/nmapscan.py +++ b/pialert/nmapscan.py @@ -3,8 +3,8 @@ import subprocess from const import logPath from conf import NMAP_ARGS, NMAP_TIMEOUT -from database import updateState, sql_nmap_scan_all -from helper import json_struc, timeNow +from database import sql_nmap_scan_all +from helper import json_struc, timeNow, updateState from logger import append_line_to_file, mylog #------------------------------------------------------------------------------- diff --git a/pialert/pholusscan.py b/pialert/pholusscan.py index 41847d0f..6c6f79ec 100644 --- a/pialert/pholusscan.py +++ b/pialert/pholusscan.py @@ -2,9 +2,8 @@ import subprocess import re from const import fullPholusPath, logPath -from pialert.database import updateState -from pialert.helper import checkIPV4, timeNow -from pialert.logger import mylog +from helper import checkIPV4, timeNow, updateState +from logger import mylog #------------------------------------------------------------------------------- diff --git a/pialert/pialert.py b/pialert/pialert.py index f217bc0d..ae75f53a 100755 --- a/pialert/pialert.py +++ b/pialert/pialert.py @@ -44,24 +44,21 @@ import multiprocessing # pialert modules from const import * from conf import * -# from config import DIG_GET_IP_ARG, ENABLE_PLUGINS -from logger import append_line_to_file, mylog, print_log, logResult -from helper import checkIPV4, filePermissions, importConfigs, isNewVersion, removeDuplicateNewLines, timeNow, write_file -from database import * -from internet import check_IP_format, check_internet_IP, get_internet_IP +from logger import mylog +from helper import filePermissions, timeNow, updateState from api import update_api from files import get_file_content -from mqtt import mqtt_start -from pialert.arpscan import execute_arpscan -from pialert.mac_vendor import query_MAC_vendor, update_devices_MAC_vendors -from pialert.networkscan import scan_network -from pialert.nmapscan import performNmapScan -from pialert.pholusscan import performPholusScan, resolve_device_name_pholus -from pialert.pihole import copy_pihole_network, read_DHCP_leases -from pialert.reporting import send_apprise, send_email, send_notifications, send_ntfy, send_pushsafer, send_webhook, skip_repeated_notifications -from plugin import execute_plugin, get_plugin_setting, print_plugin_info, run_plugin_scripts - +from networkscan import scan_network +from initialise import importConfigs +from mac_vendor import update_devices_MAC_vendors +from database import DB, get_all_devices, upgradeDB, sql_new_devices +from reporting import send_apprise, send_email, send_notifications, send_ntfy, send_pushsafer, send_webhook +from plugin import run_plugin_scripts +# different scanners +from pholusscan import performPholusScan +from nmapscan import performNmapScan +from internet import check_internet_IP # Global variables @@ -224,13 +221,14 @@ def main (): last_network_scan = time_started cycle = 1 # network scan mylog('verbose', ['[', timeNow(), '] cycle:',cycle]) - + updateState(db,"Scan: Network") + # scan_network() # DEBUG start ++++++++++++++++++++++++++++++++++++++++++++++++++++++ # Start scan_network as a process - p = multiprocessing.Process(target=scan_network) + p = multiprocessing.Process(target=scan_network(db)) p.start() # Wait for 3600 seconds (max 1h) or until process finishes @@ -249,7 +247,10 @@ def main (): p.join() # DEBUG end ++++++++++++++++++++++++++++++++++++++++++++++++++++++ - + # Run splugin scripts which are set to run every timne after a scan finished + if ENABLE_PLUGINS: + run_plugin_scripts(db,'always_after_scan') + # Reporting if cycle in check_report: diff --git a/pialert/pihole.py b/pialert/pihole.py index 50d25e86..d13a1c04 100644 --- a/pialert/pihole.py +++ b/pialert/pihole.py @@ -1,8 +1,11 @@ - from const import piholeDB, piholeDhcpleases #------------------------------------------------------------------------------- def copy_pihole_network (db): + """ + attach the PiHole Database and copy the PiHole_Network table accross into the PiAlert DB + """ + sql = db.sql # TO-DO # Open Pi-hole DB sql.execute ("ATTACH DATABASE '"+ piholeDB +"' AS PH") @@ -29,6 +32,10 @@ def copy_pihole_network (db): #------------------------------------------------------------------------------- def read_DHCP_leases (db): + """ + read the PiHole DHCP file and insert all records into the DHCP_Leases table. + """ + sql = db.sql # TO-DO # Read DHCP Leases # Bugfix #1 - dhcp.leases: lines with different number of columns (5 col) @@ -45,4 +52,4 @@ def read_DHCP_leases (db): DHCP_IP, DHCP_Name, DHCP_MAC2) VALUES (?, ?, ?, ?, ?) """, data) - + db.commit() diff --git a/pialert/plugin.py b/pialert/plugin.py index df4c4e3f..f2ce9569 100644 --- a/pialert/plugin.py +++ b/pialert/plugin.py @@ -6,11 +6,11 @@ from collections import namedtuple # pialert modules from const import pluginsPath, logPath -from conf import mySettings +from conf import mySettings, plugins from files import get_file_content, write_file from logger import mylog -from database import updateState -#from api import update_api +from helper import updateState + #------------------------------------------------------------------------------- diff --git a/pialert/reporting.py b/pialert/reporting.py index 2277ed95..0100fc62 100644 --- a/pialert/reporting.py +++ b/pialert/reporting.py @@ -13,9 +13,9 @@ from json2table import convert from const import pialertPath, logPath # from pialert.api import update_api from conf import * -from database import get_table_as_json, updateState +from database import get_table_as_json from files import write_file -from helper import generate_mac_links, isNewVersion, removeDuplicateNewLines, timeNow, hide_email +from helper import generate_mac_links, isNewVersion, removeDuplicateNewLines, timeNow, hide_email, json_struc, updateState from logger import logResult, mylog, print_log from mqtt import mqtt_start