mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2025-12-07 09:36:05 -08:00
change to import conf
This commit is contained in:
@@ -1,17 +1,26 @@
|
||||
""" config related functions for Pi.Alert """
|
||||
|
||||
|
||||
# These are global variables, not config items and should not exist !
|
||||
mySettings = []
|
||||
mySettingsSQLsafe = []
|
||||
debug_force_notification = False
|
||||
cycle = 1
|
||||
userSubnets = []
|
||||
mySchedules = [] # bad solution for global - TO-DO
|
||||
plugins = [] # bad solution for global - TO-DO
|
||||
tz = ''
|
||||
lastTimeImported = '' # Time the config was last imported
|
||||
plugins_once_run = False
|
||||
newVersionAvailable = False
|
||||
|
||||
# ACTUAL CONFIGRATION ITEMS set to defaults
|
||||
|
||||
# General
|
||||
ENABLE_ARPSCAN = True
|
||||
SCAN_SUBNETS = ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0']
|
||||
LOG_LEVEL = 'verbose'
|
||||
TIMEZONE = 'Europe/Berlin'
|
||||
TIMEZONE = 'Europe/Berlin'
|
||||
ENABLE_PLUGINS = True
|
||||
PIALERT_WEB_PROTECTION = False
|
||||
PIALERT_WEB_PASSWORD = '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92'
|
||||
@@ -23,7 +32,7 @@ DIG_GET_IP_ARG = '-4 myip.opendns.com @resolver1.opendns.com'
|
||||
UI_LANG = 'English'
|
||||
UI_PRESENCE = ['online', 'offline', 'archived']
|
||||
|
||||
tz = ''
|
||||
|
||||
|
||||
# Email
|
||||
REPORT_MAIL = False
|
||||
|
||||
@@ -460,5 +460,5 @@ def insertOnlineHistory(db, cycle):
|
||||
|
||||
sql.execute ("INSERT INTO Online_History (Scan_Date, Online_Devices, Down_Devices, All_Devices, Archived_Devices) "+
|
||||
"VALUES ( ?, ?, ?, ?, ?)", (startTime, History_Online_Devices, History_Offline_Devices, History_All_Devices, History_Archived_Devices ) )
|
||||
db.commit()
|
||||
db.commitDB()
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
import subprocess
|
||||
|
||||
from conf import PHOLUS_ACTIVE, PHOLUS_FORCE, PHOLUS_TIMEOUT, cycle, DIG_GET_IP_ARG, userSubnets
|
||||
import conf
|
||||
from helper import timeNow
|
||||
from internet import check_IP_format, get_internet_IP
|
||||
from logger import mylog, print_log
|
||||
@@ -43,7 +43,7 @@ def save_scanned_devices (db, p_arpscan_devices, p_cycle_interval):
|
||||
cycle) )
|
||||
|
||||
# Check Internet connectivity
|
||||
internet_IP = get_internet_IP(DIG_GET_IP_ARG)
|
||||
internet_IP = get_internet_IP( conf.DIG_GET_IP_ARG )
|
||||
# TESTING - Force IP
|
||||
# internet_IP = ""
|
||||
if internet_IP != "" :
|
||||
@@ -81,19 +81,19 @@ def print_scan_stats (db):
|
||||
# Devices Detected
|
||||
sql.execute ("""SELECT COUNT(*) FROM CurrentScan
|
||||
WHERE cur_ScanCycle = ? """,
|
||||
(cycle,))
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' Devices Detected.......: ', str (sql.fetchone()[0]) ])
|
||||
|
||||
# Devices arp-scan
|
||||
sql.execute ("""SELECT COUNT(*) FROM CurrentScan
|
||||
WHERE cur_ScanMethod='arp-scan' AND cur_ScanCycle = ? """,
|
||||
(cycle,))
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' arp-scan detected..: ', str (sql.fetchone()[0]) ])
|
||||
|
||||
# Devices Pi-hole
|
||||
sql.execute ("""SELECT COUNT(*) FROM CurrentScan
|
||||
WHERE cur_ScanMethod='PiHole' AND cur_ScanCycle = ? """,
|
||||
(cycle,))
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' Pi-hole detected...: +' + str (sql.fetchone()[0]) ])
|
||||
|
||||
# New Devices
|
||||
@@ -101,14 +101,14 @@ def print_scan_stats (db):
|
||||
WHERE cur_ScanCycle = ?
|
||||
AND NOT EXISTS (SELECT 1 FROM Devices
|
||||
WHERE dev_MAC = cur_MAC) """,
|
||||
(cycle,))
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' New Devices........: ' + str (sql.fetchone()[0]) ])
|
||||
|
||||
# Devices in this ScanCycle
|
||||
sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan
|
||||
WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle
|
||||
AND dev_ScanCycle = ? """,
|
||||
(cycle,))
|
||||
(conf.cycle,))
|
||||
|
||||
mylog('verbose', [' Devices in this cycle..: ' + str (sql.fetchone()[0]) ])
|
||||
|
||||
@@ -119,7 +119,7 @@ def print_scan_stats (db):
|
||||
AND NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(cycle,))
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' Down Alerts........: ' + str (sql.fetchone()[0]) ])
|
||||
|
||||
# New Down Alerts
|
||||
@@ -130,7 +130,7 @@ def print_scan_stats (db):
|
||||
AND NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(cycle,))
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' New Down Alerts....: ' + str (sql.fetchone()[0]) ])
|
||||
|
||||
# New Connections
|
||||
@@ -138,7 +138,7 @@ def print_scan_stats (db):
|
||||
WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle
|
||||
AND dev_PresentLastScan = 0
|
||||
AND dev_ScanCycle = ? """,
|
||||
(cycle,))
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' New Connections....: ' + str ( sql.fetchone()[0]) ])
|
||||
|
||||
# Disconnections
|
||||
@@ -148,7 +148,7 @@ def print_scan_stats (db):
|
||||
AND NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(cycle,))
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' Disconnections.....: ' + str ( sql.fetchone()[0]) ])
|
||||
|
||||
# IP Changes
|
||||
@@ -156,7 +156,7 @@ def print_scan_stats (db):
|
||||
WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle
|
||||
AND dev_ScanCycle = ?
|
||||
AND dev_LastIP <> cur_IP """,
|
||||
(cycle,))
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' IP Changes.........: ' + str ( sql.fetchone()[0]) ])
|
||||
|
||||
|
||||
@@ -176,7 +176,7 @@ def create_new_devices (db):
|
||||
WHERE cur_ScanCycle = ?
|
||||
AND NOT EXISTS (SELECT 1 FROM Devices
|
||||
WHERE dev_MAC = cur_MAC) """,
|
||||
(startTime, cycle) )
|
||||
(startTime, conf.cycle) )
|
||||
|
||||
print_log ('New devices - Insert Connection into session table')
|
||||
sql.execute ("""INSERT INTO Sessions (ses_MAC, ses_IP, ses_EventTypeConnection, ses_DateTimeConnection,
|
||||
@@ -186,7 +186,7 @@ def create_new_devices (db):
|
||||
WHERE cur_ScanCycle = ?
|
||||
AND NOT EXISTS (SELECT 1 FROM Sessions
|
||||
WHERE ses_MAC = cur_MAC) """,
|
||||
(startTime, cycle) )
|
||||
(startTime, conf.cycle) )
|
||||
|
||||
# arpscan - Create new devices
|
||||
print_log ('New devices - 2 Create devices')
|
||||
@@ -200,7 +200,7 @@ def create_new_devices (db):
|
||||
WHERE cur_ScanCycle = ?
|
||||
AND NOT EXISTS (SELECT 1 FROM Devices
|
||||
WHERE dev_MAC = cur_MAC) """,
|
||||
(startTime, startTime, cycle) )
|
||||
(startTime, startTime, conf.cycle) )
|
||||
|
||||
# Pi-hole - Insert events for new devices
|
||||
# NOT STRICYLY NECESARY (Devices can be created through Current_Scan)
|
||||
@@ -277,7 +277,7 @@ def create_new_devices (db):
|
||||
# WHERE dev_MAC = DHCP_MAC) """,
|
||||
# (startTime, startTime) )
|
||||
print_log ('New Devices end')
|
||||
db.commit()
|
||||
db.commitDB()
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
@@ -293,7 +293,7 @@ def update_devices_data_from_scan (db):
|
||||
AND EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(startTime, cycle))
|
||||
(startTime, conf.cycle))
|
||||
|
||||
# Clean no active devices
|
||||
print_log ('Update devices - 2 Clean no active devices')
|
||||
@@ -302,7 +302,7 @@ def update_devices_data_from_scan (db):
|
||||
AND NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(cycle,))
|
||||
(conf.cycle,))
|
||||
|
||||
# Update IP & Vendor
|
||||
print_log ('Update devices - 3 LastIP & Vendor')
|
||||
@@ -317,7 +317,7 @@ def update_devices_data_from_scan (db):
|
||||
AND EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(cycle,))
|
||||
(conf.cycle,))
|
||||
|
||||
# Pi-hole Network - Update (unknown) Name
|
||||
print_log ('Update devices - 4 Unknown Name')
|
||||
@@ -379,11 +379,11 @@ def update_devices_names (db):
|
||||
db.commitDB()
|
||||
|
||||
# perform Pholus scan if (unknown) devices found
|
||||
if PHOLUS_ACTIVE and (len(unknownDevices) > 0 or PHOLUS_FORCE):
|
||||
performPholusScan(db, PHOLUS_TIMEOUT, userSubnets)
|
||||
if conf.PHOLUS_ACTIVE and (len(unknownDevices) > 0 or conf.PHOLUS_FORCE):
|
||||
performPholusScan(db, conf.PHOLUS_TIMEOUT, conf.userSubnets)
|
||||
|
||||
# skip checks if no unknown devices
|
||||
if len(unknownDevices) == 0 and PHOLUS_FORCE == False:
|
||||
if len(unknownDevices) == 0 and conf.PHOLUS_FORCE == False:
|
||||
return
|
||||
|
||||
# Devices without name
|
||||
|
||||
@@ -23,4 +23,15 @@ def get_file_content(path):
|
||||
content = f.read()
|
||||
f.close()
|
||||
|
||||
return content
|
||||
return content
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def read_config_file(filename):
|
||||
"""
|
||||
retuns dict on the config file key:value pairs
|
||||
"""
|
||||
# load the variables from pialert.conf
|
||||
code = compile(filename.read_text(), filename.name, "exec")
|
||||
confDict = {} # config dictionary
|
||||
exec(code, {"__builtins__": {}}, confDict)
|
||||
return confDict
|
||||
@@ -12,11 +12,8 @@ import time
|
||||
from pathlib import Path
|
||||
import requests
|
||||
|
||||
|
||||
|
||||
|
||||
import conf
|
||||
from const import *
|
||||
from conf import tz
|
||||
from logger import mylog, logResult, print_log
|
||||
# from api import update_api # to avoid circular reference
|
||||
|
||||
@@ -25,9 +22,15 @@ from logger import mylog, logResult, print_log
|
||||
#-------------------------------------------------------------------------------
|
||||
def timeNow():
|
||||
return datetime.datetime.now().replace(microsecond=0)
|
||||
#-------------------------------------------------------------------------------
|
||||
def timeNowTZ():
|
||||
return datetime.datetime.now(conf.tz).replace(microsecond=0)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def updateState(db, newState):
|
||||
def updateState(db, newState):
|
||||
|
||||
# ?? Why is the state written to the DB?
|
||||
|
||||
#sql = db.sql
|
||||
|
||||
mylog('debug', ' [updateState] changing state to: "' + newState +'"')
|
||||
@@ -35,18 +38,20 @@ def updateState(db, newState):
|
||||
|
||||
db.commitDB()
|
||||
#-------------------------------------------------------------------------------
|
||||
def updateSubnets(SCAN_SUBNETS):
|
||||
def updateSubnets(scan_subnets):
|
||||
|
||||
# remove old list
|
||||
userSubnets = []
|
||||
subnets = []
|
||||
|
||||
# multiple interfaces
|
||||
if type(SCAN_SUBNETS) is list:
|
||||
for interface in SCAN_SUBNETS :
|
||||
userSubnets.append(interface)
|
||||
if type(scan_subnets) is list:
|
||||
for interface in scan_subnets :
|
||||
subnets.append(interface)
|
||||
# one interface only
|
||||
else:
|
||||
userSubnets.append(SCAN_SUBNETS)
|
||||
subnets.append(scan_subnets)
|
||||
|
||||
return subnets
|
||||
|
||||
|
||||
|
||||
@@ -129,43 +134,6 @@ def filePermissions():
|
||||
# last attempt
|
||||
fixPermissions()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
class schedule_class:
|
||||
def __init__(self, service, scheduleObject, last_next_schedule, was_last_schedule_used, last_run = 0):
|
||||
self.service = service
|
||||
self.scheduleObject = scheduleObject
|
||||
self.last_next_schedule = last_next_schedule
|
||||
self.last_run = last_run
|
||||
self.was_last_schedule_used = was_last_schedule_used
|
||||
def runScheduleCheck(self):
|
||||
|
||||
result = False
|
||||
|
||||
# Initialize the last run time if never run before
|
||||
if self.last_run == 0:
|
||||
self.last_run = (datetime.datetime.now(tz) - timedelta(days=365)).replace(microsecond=0)
|
||||
|
||||
# get the current time with the currently specified timezone
|
||||
nowTime = datetime.datetime.now(tz).replace(microsecond=0)
|
||||
|
||||
# Run the schedule if the current time is past the schedule time we saved last time and
|
||||
# (maybe the following check is unnecessary:)
|
||||
# if the last run is past the last time we run a scheduled Pholus scan
|
||||
if nowTime > self.last_next_schedule and self.last_run < self.last_next_schedule:
|
||||
print_log(f'Scheduler run for {self.service}: YES')
|
||||
self.was_last_schedule_used = True
|
||||
result = True
|
||||
else:
|
||||
print_log(f'Scheduler run for {self.service}: NO')
|
||||
|
||||
if self.was_last_schedule_used:
|
||||
self.was_last_schedule_used = False
|
||||
self.last_next_schedule = self.scheduleObject.next()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
@@ -220,15 +188,11 @@ def import_language_string(db, code, key, value, extra = ""):
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Make a regular expression
|
||||
# for validating an Ip-address
|
||||
ipRegex = "^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])$"
|
||||
|
||||
# Define a function to
|
||||
# validate an Ip address
|
||||
def checkIPV4(ip):
|
||||
# pass the regular expression
|
||||
# and the string in search() method
|
||||
""" Define a function to validate an Ip address
|
||||
"""
|
||||
ipRegex = "^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])$"
|
||||
|
||||
if(re.search(ipRegex, ip)):
|
||||
return True
|
||||
else:
|
||||
@@ -236,10 +200,9 @@ def checkIPV4(ip):
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def isNewVersion(db):
|
||||
global newVersionAvailable
|
||||
def isNewVersion(newVersion: bool):
|
||||
|
||||
if newVersionAvailable == False:
|
||||
if newVersion == False:
|
||||
|
||||
f = open(pialertPath + '/front/buildtimestamp.txt', 'r')
|
||||
buildTimestamp = int(f.read().strip())
|
||||
@@ -264,10 +227,10 @@ def isNewVersion(db):
|
||||
|
||||
if realeaseTimestamp > buildTimestamp + 600:
|
||||
mylog('none', [" New version of the container available!"])
|
||||
newVersionAvailable = True
|
||||
newVersion = True
|
||||
# updateState(db, 'Back_New_Version_Available', str(newVersionAvailable)) ## TO DO add this back in but avoid circular ref with database
|
||||
|
||||
return newVersionAvailable
|
||||
return newVersion
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def hide_email(email):
|
||||
|
||||
@@ -6,10 +6,12 @@ from cron_converter import Cron
|
||||
from pathlib import Path
|
||||
import datetime
|
||||
|
||||
from conf import *
|
||||
import conf
|
||||
from const import *
|
||||
from helper import collect_lang_strings, schedule_class, timeNow, updateSubnets, initOrSetParam
|
||||
from helper import collect_lang_strings, timeNow, updateSubnets, initOrSetParam
|
||||
from logger import mylog
|
||||
from files import read_config_file
|
||||
from scheduler import schedule_class
|
||||
from plugin import get_plugins_configs, print_plugin_info
|
||||
|
||||
#===============================================================================
|
||||
@@ -21,18 +23,18 @@ from plugin import get_plugins_configs, print_plugin_info
|
||||
#-------------------------------------------------------------------------------
|
||||
# Import user values
|
||||
# Check config dictionary
|
||||
def ccd(key, default, config, name, inputtype, options, group, events=[], desc = "", regex = ""):
|
||||
def ccd(key, default, config_dir, name, inputtype, options, group, events=[], desc = "", regex = ""):
|
||||
result = default
|
||||
|
||||
# use existing value if already supplied, otherwise default value is used
|
||||
if key in config:
|
||||
result = config[key]
|
||||
if key in config_dir:
|
||||
result = config_dir[key]
|
||||
|
||||
if inputtype == 'text':
|
||||
result = result.replace('\'', "{s-quote}")
|
||||
|
||||
mySettingsSQLsafe.append((key, name, desc, inputtype, options, regex, str(result), group, str(events)))
|
||||
mySettings.append((key, name, desc, inputtype, options, regex, result, group, str(events)))
|
||||
conf.mySettingsSQLsafe.append((key, name, desc, inputtype, options, regex, str(result), group, str(events)))
|
||||
conf.mySettings.append((key, name, desc, inputtype, options, regex, result, group, str(events)))
|
||||
|
||||
return result
|
||||
#-------------------------------------------------------------------------------
|
||||
@@ -41,33 +43,7 @@ def importConfigs (db):
|
||||
|
||||
sql = db.sql
|
||||
|
||||
# Specify globals so they can be overwritten with the new config
|
||||
global lastTimeImported, mySettings, mySettingsSQLsafe, plugins, plugins_once_run
|
||||
lastTimeImported = 0
|
||||
# General
|
||||
global ENABLE_ARPSCAN, SCAN_SUBNETS, LOG_LEVEL, TIMEZONE, ENABLE_PLUGINS, PIALERT_WEB_PROTECTION, PIALERT_WEB_PASSWORD, INCLUDED_SECTIONS, SCAN_CYCLE_MINUTES, DAYS_TO_KEEP_EVENTS, REPORT_DASHBOARD_URL, DIG_GET_IP_ARG, UI_LANG
|
||||
# Email
|
||||
global REPORT_MAIL, SMTP_SERVER, SMTP_PORT, REPORT_TO, REPORT_FROM, SMTP_SKIP_LOGIN, SMTP_USER, SMTP_PASS, SMTP_SKIP_TLS, SMTP_FORCE_SSL
|
||||
# Webhooks
|
||||
global REPORT_WEBHOOK, WEBHOOK_URL, WEBHOOK_PAYLOAD, WEBHOOK_REQUEST_METHOD
|
||||
# Apprise
|
||||
global REPORT_APPRISE, APPRISE_HOST, APPRISE_URL, APPRISE_PAYLOAD
|
||||
# NTFY
|
||||
global REPORT_NTFY, NTFY_HOST, NTFY_TOPIC, NTFY_USER, NTFY_PASSWORD
|
||||
# PUSHSAFER
|
||||
global REPORT_PUSHSAFER, PUSHSAFER_TOKEN
|
||||
# MQTT
|
||||
global REPORT_MQTT, MQTT_BROKER, MQTT_PORT, MQTT_USER, MQTT_PASSWORD, MQTT_QOS, MQTT_DELAY_SEC
|
||||
# DynDNS
|
||||
global DDNS_ACTIVE, DDNS_DOMAIN, DDNS_USER, DDNS_PASSWORD, DDNS_UPDATE_URL
|
||||
# PiHole
|
||||
global PIHOLE_ACTIVE, DHCP_ACTIVE
|
||||
# Pholus
|
||||
global PHOLUS_ACTIVE, PHOLUS_TIMEOUT, PHOLUS_FORCE, PHOLUS_DAYS_DATA, PHOLUS_RUN, PHOLUS_RUN_SCHD, PHOLUS_RUN_TIMEOUT
|
||||
# Nmap
|
||||
global NMAP_ACTIVE, NMAP_TIMEOUT, NMAP_RUN, NMAP_RUN_SCHD, NMAP_ARGS
|
||||
# API
|
||||
global API_CUSTOM_SQL
|
||||
|
||||
# get config file
|
||||
config_file = Path(fullConfPath)
|
||||
@@ -76,136 +52,133 @@ def importConfigs (db):
|
||||
if (os.path.getmtime(config_file) < lastTimeImported) :
|
||||
return
|
||||
|
||||
mySettings = [] # reset settings
|
||||
mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query
|
||||
|
||||
# load the variables from pialert.conf
|
||||
code = compile(config_file.read_text(), config_file.name, "exec")
|
||||
c_d = {} # config dictionary
|
||||
exec(code, {"__builtins__": {}}, c_d)
|
||||
conf.mySettings = [] # reset settings
|
||||
conf.mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query
|
||||
|
||||
c_d = read_config_file(config_file)
|
||||
|
||||
# Import setting if found in the dictionary
|
||||
# General
|
||||
ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'General', ['run'])
|
||||
SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'General')
|
||||
LOG_LEVEL = ccd('LOG_LEVEL', 'verbose' , c_d, 'Log verboseness', 'selecttext', "['none', 'minimal', 'verbose', 'debug']", 'General')
|
||||
TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General')
|
||||
ENABLE_PLUGINS = ccd('ENABLE_PLUGINS', True , c_d, 'Enable plugins', 'boolean', '', 'General')
|
||||
PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General')
|
||||
PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General')
|
||||
INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General')
|
||||
SCAN_CYCLE_MINUTES = ccd('SCAN_CYCLE_MINUTES', 5 , c_d, 'Scan cycle delay (m)', 'integer', '', 'General')
|
||||
DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General')
|
||||
REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General')
|
||||
DIG_GET_IP_ARG = ccd('DIG_GET_IP_ARG', '-4 myip.opendns.com @resolver1.opendns.com' , c_d, 'DIG arguments', 'text', '', 'General')
|
||||
UI_LANG = ccd('UI_LANG', 'English' , c_d, 'Language Interface', 'selecttext', "['English', 'German', 'Spanish']", 'General')
|
||||
UI_PRESENCE = ccd('UI_PRESENCE', ['online', 'offline', 'archived'] , c_d, 'Include in presence', 'multiselect', "['online', 'offline', 'archived']", 'General')
|
||||
conf.ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'General', ['run'])
|
||||
conf.SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'General')
|
||||
conf.LOG_LEVEL = ccd('LOG_LEVEL', 'verbose' , c_d, 'Log verboseness', 'selecttext', "['none', 'minimal', 'verbose', 'debug']", 'General')
|
||||
conf.TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General')
|
||||
conf.ENABLE_PLUGINS = ccd('ENABLE_PLUGINS', True , c_d, 'Enable plugins', 'boolean', '', 'General')
|
||||
conf.PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General')
|
||||
conf.PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General')
|
||||
conf.INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General')
|
||||
conf.SCAN_CYCLE_MINUTES = ccd('SCAN_CYCLE_MINUTES', 5 , c_d, 'Scan cycle delay (m)', 'integer', '', 'General')
|
||||
conf.DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General')
|
||||
conf.REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General')
|
||||
conf.DIG_GET_IP_ARG = ccd('DIG_GET_IP_ARG', '-4 myip.opendns.com @resolver1.opendns.com' , c_d, 'DIG arguments', 'text', '', 'General')
|
||||
conf.UI_LANG = ccd('UI_LANG', 'English' , c_d, 'Language Interface', 'selecttext', "['English', 'German', 'Spanish']", 'General')
|
||||
conf.UI_PRESENCE = ccd('UI_PRESENCE', ['online', 'offline', 'archived'] , c_d, 'Include in presence', 'multiselect', "['online', 'offline', 'archived']", 'General')
|
||||
|
||||
# Email
|
||||
REPORT_MAIL = ccd('REPORT_MAIL', False , c_d, 'Enable email', 'boolean', '', 'Email', ['test'])
|
||||
SMTP_SERVER = ccd('SMTP_SERVER', '' , c_d,'SMTP server URL', 'text', '', 'Email')
|
||||
SMTP_PORT = ccd('SMTP_PORT', 587 , c_d, 'SMTP port', 'integer', '', 'Email')
|
||||
REPORT_TO = ccd('REPORT_TO', 'user@gmail.com' , c_d, 'Email to', 'text', '', 'Email')
|
||||
REPORT_FROM = ccd('REPORT_FROM', 'Pi.Alert <user@gmail.com>' , c_d, 'Email Subject', 'text', '', 'Email')
|
||||
SMTP_SKIP_LOGIN = ccd('SMTP_SKIP_LOGIN', False , c_d, 'SMTP skip login', 'boolean', '', 'Email')
|
||||
SMTP_USER = ccd('SMTP_USER', '' , c_d, 'SMTP user', 'text', '', 'Email')
|
||||
SMTP_PASS = ccd('SMTP_PASS', '' , c_d, 'SMTP password', 'password', '', 'Email')
|
||||
SMTP_SKIP_TLS = ccd('SMTP_SKIP_TLS', False , c_d, 'SMTP skip TLS', 'boolean', '', 'Email')
|
||||
SMTP_FORCE_SSL = ccd('SMTP_FORCE_SSL', False , c_d, 'Force SSL', 'boolean', '', 'Email')
|
||||
conf.REPORT_MAIL = ccd('REPORT_MAIL', False , c_d, 'Enable email', 'boolean', '', 'Email', ['test'])
|
||||
conf.SMTP_SERVER = ccd('SMTP_SERVER', '' , c_d,'SMTP server URL', 'text', '', 'Email')
|
||||
conf.SMTP_PORT = ccd('SMTP_PORT', 587 , c_d, 'SMTP port', 'integer', '', 'Email')
|
||||
conf.REPORT_TO = ccd('REPORT_TO', 'user@gmail.com' , c_d, 'Email to', 'text', '', 'Email')
|
||||
conf.REPORT_FROM = ccd('REPORT_FROM', 'Pi.Alert <user@gmail.com>' , c_d, 'Email Subject', 'text', '', 'Email')
|
||||
conf.SMTP_SKIP_LOGIN = ccd('SMTP_SKIP_LOGIN', False , c_d, 'SMTP skip login', 'boolean', '', 'Email')
|
||||
conf.SMTP_USER = ccd('SMTP_USER', '' , c_d, 'SMTP user', 'text', '', 'Email')
|
||||
conf.SMTP_PASS = ccd('SMTP_PASS', '' , c_d, 'SMTP password', 'password', '', 'Email')
|
||||
conf.SMTP_SKIP_TLS = ccd('SMTP_SKIP_TLS', False , c_d, 'SMTP skip TLS', 'boolean', '', 'Email')
|
||||
conf.SMTP_FORCE_SSL = ccd('SMTP_FORCE_SSL', False , c_d, 'Force SSL', 'boolean', '', 'Email')
|
||||
|
||||
# Webhooks
|
||||
REPORT_WEBHOOK = ccd('REPORT_WEBHOOK', False , c_d, 'Enable Webhooks', 'boolean', '', 'Webhooks', ['test'])
|
||||
WEBHOOK_URL = ccd('WEBHOOK_URL', '' , c_d, 'Target URL', 'text', '', 'Webhooks')
|
||||
WEBHOOK_PAYLOAD = ccd('WEBHOOK_PAYLOAD', 'json' , c_d, 'Payload type', 'selecttext', "['json', 'html', 'text']", 'Webhooks')
|
||||
WEBHOOK_REQUEST_METHOD = ccd('WEBHOOK_REQUEST_METHOD', 'GET' , c_d, 'Req type', 'selecttext', "['GET', 'POST', 'PUT']", 'Webhooks')
|
||||
conf.REPORT_WEBHOOK = ccd('REPORT_WEBHOOK', False , c_d, 'Enable Webhooks', 'boolean', '', 'Webhooks', ['test'])
|
||||
conf.WEBHOOK_URL = ccd('WEBHOOK_URL', '' , c_d, 'Target URL', 'text', '', 'Webhooks')
|
||||
conf.WEBHOOK_PAYLOAD = ccd('WEBHOOK_PAYLOAD', 'json' , c_d, 'Payload type', 'selecttext', "['json', 'html', 'text']", 'Webhooks')
|
||||
conf.WEBHOOK_REQUEST_METHOD = ccd('WEBHOOK_REQUEST_METHOD', 'GET' , c_d, 'Req type', 'selecttext', "['GET', 'POST', 'PUT']", 'Webhooks')
|
||||
|
||||
# Apprise
|
||||
REPORT_APPRISE = ccd('REPORT_APPRISE', False , c_d, 'Enable Apprise', 'boolean', '', 'Apprise', ['test'])
|
||||
APPRISE_HOST = ccd('APPRISE_HOST', '' , c_d, 'Apprise host URL', 'text', '', 'Apprise')
|
||||
APPRISE_URL = ccd('APPRISE_URL', '' , c_d, 'Apprise notification URL', 'text', '', 'Apprise')
|
||||
APPRISE_PAYLOAD = ccd('APPRISE_PAYLOAD', 'html' , c_d, 'Payload type', 'selecttext', "['html', 'text']", 'Apprise')
|
||||
conf.REPORT_APPRISE = ccd('REPORT_APPRISE', False , c_d, 'Enable Apprise', 'boolean', '', 'Apprise', ['test'])
|
||||
conf.APPRISE_HOST = ccd('APPRISE_HOST', '' , c_d, 'Apprise host URL', 'text', '', 'Apprise')
|
||||
conf.APPRISE_URL = ccd('APPRISE_URL', '' , c_d, 'Apprise notification URL', 'text', '', 'Apprise')
|
||||
conf.APPRISE_PAYLOAD = ccd('APPRISE_PAYLOAD', 'html' , c_d, 'Payload type', 'selecttext', "['html', 'text']", 'Apprise')
|
||||
|
||||
# NTFY
|
||||
REPORT_NTFY = ccd('REPORT_NTFY', False , c_d, 'Enable NTFY', 'boolean', '', 'NTFY', ['test'])
|
||||
NTFY_HOST = ccd('NTFY_HOST', 'https://ntfy.sh' , c_d, 'NTFY host URL', 'text', '', 'NTFY')
|
||||
NTFY_TOPIC = ccd('NTFY_TOPIC', '' , c_d, 'NTFY topic', 'text', '', 'NTFY')
|
||||
NTFY_USER = ccd('NTFY_USER', '' , c_d, 'NTFY user', 'text', '', 'NTFY')
|
||||
NTFY_PASSWORD = ccd('NTFY_PASSWORD', '' , c_d, 'NTFY password', 'password', '', 'NTFY')
|
||||
conf.REPORT_NTFY = ccd('REPORT_NTFY', False , c_d, 'Enable NTFY', 'boolean', '', 'NTFY', ['test'])
|
||||
conf.NTFY_HOST = ccd('NTFY_HOST', 'https://ntfy.sh' , c_d, 'NTFY host URL', 'text', '', 'NTFY')
|
||||
conf.NTFY_TOPIC = ccd('NTFY_TOPIC', '' , c_d, 'NTFY topic', 'text', '', 'NTFY')
|
||||
conf.NTFY_USER = ccd('NTFY_USER', '' , c_d, 'NTFY user', 'text', '', 'NTFY')
|
||||
conf.NTFY_PASSWORD = ccd('NTFY_PASSWORD', '' , c_d, 'NTFY password', 'password', '', 'NTFY')
|
||||
|
||||
# PUSHSAFER
|
||||
REPORT_PUSHSAFER = ccd('REPORT_PUSHSAFER', False , c_d, 'Enable PUSHSAFER', 'boolean', '', 'PUSHSAFER', ['test'])
|
||||
PUSHSAFER_TOKEN = ccd('PUSHSAFER_TOKEN', 'ApiKey' , c_d, 'PUSHSAFER token', 'text', '', 'PUSHSAFER')
|
||||
conf.REPORT_PUSHSAFER = ccd('REPORT_PUSHSAFER', False , c_d, 'Enable PUSHSAFER', 'boolean', '', 'PUSHSAFER', ['test'])
|
||||
conf.PUSHSAFER_TOKEN = ccd('PUSHSAFER_TOKEN', 'ApiKey' , c_d, 'PUSHSAFER token', 'text', '', 'PUSHSAFER')
|
||||
|
||||
# MQTT
|
||||
REPORT_MQTT = ccd('REPORT_MQTT', False , c_d, 'Enable MQTT', 'boolean', '', 'MQTT')
|
||||
MQTT_BROKER = ccd('MQTT_BROKER', '' , c_d, 'MQTT broker', 'text', '', 'MQTT')
|
||||
MQTT_PORT = ccd('MQTT_PORT', 1883 , c_d, 'MQTT broker port', 'integer', '', 'MQTT')
|
||||
MQTT_USER = ccd('MQTT_USER', '' , c_d, 'MQTT user', 'text', '', 'MQTT')
|
||||
MQTT_PASSWORD = ccd('MQTT_PASSWORD', '' , c_d, 'MQTT password', 'password', '', 'MQTT')
|
||||
MQTT_QOS = ccd('MQTT_QOS', 0 , c_d, 'MQTT Quality of Service', 'selectinteger', "['0', '1', '2']", 'MQTT')
|
||||
MQTT_DELAY_SEC = ccd('MQTT_DELAY_SEC', 2 , c_d, 'MQTT delay', 'selectinteger', "['2', '3', '4', '5']", 'MQTT')
|
||||
conf.REPORT_MQTT = ccd('REPORT_MQTT', False , c_d, 'Enable MQTT', 'boolean', '', 'MQTT')
|
||||
conf.MQTT_BROKER = ccd('MQTT_BROKER', '' , c_d, 'MQTT broker', 'text', '', 'MQTT')
|
||||
conf.MQTT_PORT = ccd('MQTT_PORT', 1883 , c_d, 'MQTT broker port', 'integer', '', 'MQTT')
|
||||
conf.MQTT_USER = ccd('MQTT_USER', '' , c_d, 'MQTT user', 'text', '', 'MQTT')
|
||||
conf.MQTT_PASSWORD = ccd('MQTT_PASSWORD', '' , c_d, 'MQTT password', 'password', '', 'MQTT')
|
||||
conf.MQTT_QOS = ccd('MQTT_QOS', 0 , c_d, 'MQTT Quality of Service', 'selectinteger', "['0', '1', '2']", 'MQTT')
|
||||
conf.MQTT_DELAY_SEC = ccd('MQTT_DELAY_SEC', 2 , c_d, 'MQTT delay', 'selectinteger', "['2', '3', '4', '5']", 'MQTT')
|
||||
|
||||
# DynDNS
|
||||
DDNS_ACTIVE = ccd('DDNS_ACTIVE', False , c_d, 'Enable DynDNS', 'boolean', '', 'DynDNS')
|
||||
DDNS_DOMAIN = ccd('DDNS_DOMAIN', 'your_domain.freeddns.org' , c_d, 'DynDNS domain URL', 'text', '', 'DynDNS')
|
||||
DDNS_USER = ccd('DDNS_USER', 'dynu_user' , c_d, 'DynDNS user', 'text', '', 'DynDNS')
|
||||
DDNS_PASSWORD = ccd('DDNS_PASSWORD', 'A0000000B0000000C0000000D0000000' , c_d, 'DynDNS password', 'password', '', 'DynDNS')
|
||||
DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS')
|
||||
conf.DDNS_ACTIVE = ccd('DDNS_ACTIVE', False , c_d, 'Enable DynDNS', 'boolean', '', 'DynDNS')
|
||||
conf.DDNS_DOMAIN = ccd('DDNS_DOMAIN', 'your_domain.freeddns.org' , c_d, 'DynDNS domain URL', 'text', '', 'DynDNS')
|
||||
conf.DDNS_USER = ccd('DDNS_USER', 'dynu_user' , c_d, 'DynDNS user', 'text', '', 'DynDNS')
|
||||
conf.DDNS_PASSWORD = ccd('DDNS_PASSWORD', 'A0000000B0000000C0000000D0000000' , c_d, 'DynDNS password', 'password', '', 'DynDNS')
|
||||
conf.DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS')
|
||||
|
||||
# PiHole
|
||||
PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PiHole')
|
||||
DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PiHole')
|
||||
conf.PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PiHole')
|
||||
conf.DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PiHole')
|
||||
|
||||
# PHOLUS
|
||||
PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus')
|
||||
PHOLUS_TIMEOUT = ccd('PHOLUS_TIMEOUT', 20 , c_d, 'Pholus timeout', 'integer', '', 'Pholus')
|
||||
PHOLUS_FORCE = ccd('PHOLUS_FORCE', False , c_d, 'Pholus force check', 'boolean', '', 'Pholus')
|
||||
PHOLUS_RUN = ccd('PHOLUS_RUN', 'once' , c_d, 'Pholus enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Pholus')
|
||||
PHOLUS_RUN_TIMEOUT = ccd('PHOLUS_RUN_TIMEOUT', 600 , c_d, 'Pholus timeout schedule', 'integer', '', 'Pholus')
|
||||
PHOLUS_RUN_SCHD = ccd('PHOLUS_RUN_SCHD', '0 4 * * *' , c_d, 'Pholus schedule', 'text', '', 'Pholus')
|
||||
PHOLUS_DAYS_DATA = ccd('PHOLUS_DAYS_DATA', 0 , c_d, 'Pholus keep days', 'integer', '', 'Pholus')
|
||||
conf.PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus')
|
||||
conf.PHOLUS_TIMEOUT = ccd('PHOLUS_TIMEOUT', 20 , c_d, 'Pholus timeout', 'integer', '', 'Pholus')
|
||||
conf.PHOLUS_FORCE = ccd('PHOLUS_FORCE', False , c_d, 'Pholus force check', 'boolean', '', 'Pholus')
|
||||
conf.PHOLUS_RUN = ccd('PHOLUS_RUN', 'once' , c_d, 'Pholus enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Pholus')
|
||||
conf.PHOLUS_RUN_TIMEOUT = ccd('PHOLUS_RUN_TIMEOUT', 600 , c_d, 'Pholus timeout schedule', 'integer', '', 'Pholus')
|
||||
conf.PHOLUS_RUN_SCHD = ccd('PHOLUS_RUN_SCHD', '0 4 * * *' , c_d, 'Pholus schedule', 'text', '', 'Pholus')
|
||||
conf.PHOLUS_DAYS_DATA = ccd('PHOLUS_DAYS_DATA', 0 , c_d, 'Pholus keep days', 'integer', '', 'Pholus')
|
||||
|
||||
# Nmap
|
||||
NMAP_ACTIVE = ccd('NMAP_ACTIVE', True , c_d, 'Enable Nmap scans', 'boolean', '', 'Nmap')
|
||||
NMAP_TIMEOUT = ccd('NMAP_TIMEOUT', 150 , c_d, 'Nmap timeout', 'integer', '', 'Nmap')
|
||||
NMAP_RUN = ccd('NMAP_RUN', 'none' , c_d, 'Nmap enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Nmap')
|
||||
NMAP_RUN_SCHD = ccd('NMAP_RUN_SCHD', '0 2 * * *' , c_d, 'Nmap schedule', 'text', '', 'Nmap')
|
||||
NMAP_ARGS = ccd('NMAP_ARGS', '-p -10000' , c_d, 'Nmap custom arguments', 'text', '', 'Nmap')
|
||||
conf.NMAP_ACTIVE = ccd('NMAP_ACTIVE', True , c_d, 'Enable Nmap scans', 'boolean', '', 'Nmap')
|
||||
conf.NMAP_TIMEOUT = ccd('NMAP_TIMEOUT', 150 , c_d, 'Nmap timeout', 'integer', '', 'Nmap')
|
||||
conf.NMAP_RUN = ccd('NMAP_RUN', 'none' , c_d, 'Nmap enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Nmap')
|
||||
conf.NMAP_RUN_SCHD = ccd('NMAP_RUN_SCHD', '0 2 * * *' , c_d, 'Nmap schedule', 'text', '', 'Nmap')
|
||||
conf.NMAP_ARGS = ccd('NMAP_ARGS', '-p -10000' , c_d, 'Nmap custom arguments', 'text', '', 'Nmap')
|
||||
|
||||
# API
|
||||
API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', 'text', '', 'API')
|
||||
conf.API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', 'text', '', 'API')
|
||||
|
||||
# Prepare scheduler
|
||||
global tz, mySchedules, plugins
|
||||
#global tz, mySchedules, plugins
|
||||
|
||||
# Init timezone in case it changed
|
||||
tz = timezone(TIMEZONE)
|
||||
|
||||
conf.tz = timezone(conf.TIMEZONE)
|
||||
# global mySchedules
|
||||
# reset schedules
|
||||
mySchedules = []
|
||||
conf.mySchedules = []
|
||||
|
||||
# init pholus schedule
|
||||
pholusSchedule = Cron(PHOLUS_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz))
|
||||
mySchedules.append(schedule_class("pholus", pholusSchedule, pholusSchedule.next(), False))
|
||||
pholusSchedule = Cron(conf.PHOLUS_RUN_SCHD).schedule(start_date=datetime.datetime.now(conf.tz))
|
||||
|
||||
conf.mySchedules.append(schedule_class("pholus", pholusSchedule, pholusSchedule.next(), False))
|
||||
mylog('debug', "schedules (appended) : " + str(conf.mySchedules))
|
||||
|
||||
# init nmap schedule
|
||||
nmapSchedule = Cron(NMAP_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz))
|
||||
mySchedules.append(schedule_class("nmap", nmapSchedule, nmapSchedule.next(), False))
|
||||
nmapSchedule = Cron(conf.NMAP_RUN_SCHD).schedule(start_date=datetime.datetime.now(conf.tz))
|
||||
conf.mySchedules.append(schedule_class("nmap", nmapSchedule, nmapSchedule.next(), False))
|
||||
|
||||
# Format and prepare the list of subnets
|
||||
userSubnets = updateSubnets(SCAN_SUBNETS)
|
||||
|
||||
|
||||
conf.userSubnets = updateSubnets(conf.SCAN_SUBNETS)
|
||||
|
||||
# Plugins START
|
||||
# -----------------
|
||||
if ENABLE_PLUGINS:
|
||||
plugins = get_plugins_configs()
|
||||
if conf.ENABLE_PLUGINS:
|
||||
conf.plugins = get_plugins_configs()
|
||||
|
||||
mylog('none', ['[', timeNow(), '] Plugins: Number of dynamically loaded plugins: ', len(plugins)])
|
||||
mylog('none', ['[', timeNow(), '] Plugins: Number of dynamically loaded plugins: ', len(conf.plugins)])
|
||||
|
||||
# handle plugins
|
||||
for plugin in plugins:
|
||||
for plugin in conf.plugins:
|
||||
print_plugin_info(plugin, ['display_name','description'])
|
||||
|
||||
pref = plugin["unique_prefix"]
|
||||
@@ -227,13 +200,13 @@ def importConfigs (db):
|
||||
|
||||
# Setup schedules
|
||||
if setFunction == 'RUN_SCHD':
|
||||
newSchedule = Cron(v).schedule(start_date=datetime.datetime.now(tz))
|
||||
mySchedules.append(schedule_class(pref, newSchedule, newSchedule.next(), False))
|
||||
newSchedule = Cron(v).schedule(start_date=datetime.datetime.now(conf.tz))
|
||||
conf.mySchedules.append(schedule_class(pref, newSchedule, newSchedule.next(), False))
|
||||
|
||||
# Collect settings related language strings
|
||||
collect_lang_strings(db, set, pref + "_" + set["function"])
|
||||
|
||||
plugins_once_run = False
|
||||
conf.plugins_once_run = False
|
||||
# -----------------
|
||||
# Plugins END
|
||||
|
||||
@@ -244,10 +217,10 @@ def importConfigs (db):
|
||||
# Insert settings into the DB
|
||||
sql.execute ("DELETE FROM Settings")
|
||||
sql.executemany ("""INSERT INTO Settings ("Code_Name", "Display_Name", "Description", "Type", "Options",
|
||||
"RegEx", "Value", "Group", "Events" ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""", mySettingsSQLsafe)
|
||||
"RegEx", "Value", "Group", "Events" ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""", conf.mySettingsSQLsafe)
|
||||
|
||||
# Used to determine the next import
|
||||
lastTimeImported = time.time()
|
||||
conf.lastTimeImported = time.time()
|
||||
|
||||
# Is used to display a message in the UI when old (outdated) settings are loaded
|
||||
initOrSetParam(db, "Back_Settings_Imported",(round(time.time() * 1000),) )
|
||||
|
||||
@@ -5,10 +5,10 @@ import re
|
||||
|
||||
# pialert modules
|
||||
|
||||
import conf
|
||||
from helper import timeNow, updateState
|
||||
from logger import append_line_to_file, mylog
|
||||
from const import logPath
|
||||
from conf import DDNS_ACTIVE, DDNS_DOMAIN, DDNS_UPDATE_URL, DDNS_PASSWORD, DDNS_USER
|
||||
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ from conf import DDNS_ACTIVE, DDNS_DOMAIN, DDNS_UPDATE_URL, DDNS_PASSWORD, DDNS_
|
||||
#===============================================================================
|
||||
# INTERNET IP CHANGE
|
||||
#===============================================================================
|
||||
def check_internet_IP (db, DIG_GET_IP_ARG):
|
||||
def check_internet_IP ( db ):
|
||||
|
||||
# Header
|
||||
updateState(db,"Scan: Internet IP")
|
||||
@@ -27,7 +27,7 @@ def check_internet_IP (db, DIG_GET_IP_ARG):
|
||||
|
||||
# Get Internet IP
|
||||
mylog('verbose', [' Retrieving Internet IP:'])
|
||||
internet_IP = get_internet_IP(DIG_GET_IP_ARG)
|
||||
internet_IP = get_internet_IP(conf.DIG_GET_IP_ARG)
|
||||
# TESTING - Force IP
|
||||
# internet_IP = "1.2.3.4"
|
||||
|
||||
@@ -52,7 +52,7 @@ def check_internet_IP (db, DIG_GET_IP_ARG):
|
||||
mylog('verbose', [' No changes to perform'])
|
||||
|
||||
# Get Dynamic DNS IP
|
||||
if DDNS_ACTIVE :
|
||||
if conf.DDNS_ACTIVE :
|
||||
mylog('verbose', [' Retrieving Dynamic DNS IP'])
|
||||
dns_IP = get_dynamic_DNS_IP()
|
||||
|
||||
@@ -157,7 +157,7 @@ def get_dynamic_DNS_IP ():
|
||||
# dig_args = ['dig', '+short', DDNS_DOMAIN, '@resolver1.opendns.com']
|
||||
|
||||
# Using default DNS server
|
||||
dig_args = ['dig', '+short', DDNS_DOMAIN]
|
||||
dig_args = ['dig', '+short', conf.DDNS_DOMAIN]
|
||||
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
@@ -182,10 +182,10 @@ def set_dynamic_DNS_IP ():
|
||||
# try runnning a subprocess
|
||||
# Update Dynamic IP
|
||||
curl_output = subprocess.check_output (['curl', '-s',
|
||||
DDNS_UPDATE_URL +
|
||||
'username=' + DDNS_USER +
|
||||
'&password=' + DDNS_PASSWORD +
|
||||
'&hostname=' + DDNS_DOMAIN],
|
||||
conf.DDNS_UPDATE_URL +
|
||||
'username=' + conf.DDNS_USER +
|
||||
'&password=' + conf.DDNS_PASSWORD +
|
||||
'&hostname=' + conf.DDNS_DOMAIN],
|
||||
universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
|
||||
@@ -3,8 +3,8 @@ import time
|
||||
import re
|
||||
from paho.mqtt import client as mqtt_client
|
||||
|
||||
import conf
|
||||
from logger import mylog
|
||||
from conf import MQTT_BROKER, MQTT_DELAY_SEC, MQTT_PASSWORD, MQTT_PORT, MQTT_QOS, MQTT_USER
|
||||
from database import get_all_devices, get_device_stats
|
||||
from helper import bytes_to_string, sanitize_string
|
||||
|
||||
@@ -35,7 +35,7 @@ def publish_mqtt(client, topic, message):
|
||||
result = client.publish(
|
||||
topic=topic,
|
||||
payload=message,
|
||||
qos=MQTT_QOS,
|
||||
qos=conf.MQTT_QOS,
|
||||
retain=True,
|
||||
)
|
||||
|
||||
@@ -106,7 +106,7 @@ def publish_sensor(client, sensorConf):
|
||||
# add the sensor to the global list to keep track of succesfully added sensors
|
||||
if publish_mqtt(client, topic, message):
|
||||
# hack - delay adding to the queue in case the process is
|
||||
time.sleep(MQTT_DELAY_SEC) # restarted and previous publish processes aborted
|
||||
time.sleep(conf.MQTT_DELAY_SEC) # restarted and previous publish processes aborted
|
||||
# (it takes ~2s to update a sensor config on the broker)
|
||||
mqtt_sensors.append(sensorConf)
|
||||
|
||||
@@ -131,10 +131,10 @@ def mqtt_create_client():
|
||||
|
||||
|
||||
client = mqtt_client.Client('PiAlert') # Set Connecting Client ID
|
||||
client.username_pw_set(MQTT_USER, MQTT_PASSWORD)
|
||||
client.username_pw_set(conf.MQTT_USER, conf.MQTT_PASSWORD)
|
||||
client.on_connect = on_connect
|
||||
client.on_disconnect = on_disconnect
|
||||
client.connect(MQTT_BROKER, MQTT_PORT)
|
||||
client.connect(conf.MQTT_BROKER, conf.MQTT_PORT)
|
||||
client.loop_start()
|
||||
|
||||
return client
|
||||
@@ -177,7 +177,7 @@ def mqtt_start():
|
||||
# Get all devices
|
||||
devices = get_all_devices()
|
||||
|
||||
sec_delay = len(devices) * int(MQTT_DELAY_SEC)*5
|
||||
sec_delay = len(devices) * int(conf.MQTT_DELAY_SEC)*5
|
||||
|
||||
mylog('info', [" Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60,1) , 'min)' ])
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
|
||||
|
||||
|
||||
from conf import DHCP_ACTIVE, PIHOLE_ACTIVE, cycle, ENABLE_ARPSCAN
|
||||
import conf
|
||||
from arpscan import execute_arpscan
|
||||
from database import insertOnlineHistory
|
||||
from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, update_devices_names
|
||||
@@ -27,10 +26,10 @@ def scan_network (db):
|
||||
mylog('verbose', ['[', timeNow(), '] Scan Devices:' ])
|
||||
|
||||
# Query ScanCycle properties
|
||||
scanCycle_data = query_ScanCycle_Data (True)
|
||||
scanCycle_data = query_ScanCycle_Data (db, True)
|
||||
if scanCycle_data is None:
|
||||
mylog('none', ['\n*************** ERROR ***************'])
|
||||
mylog('none', ['ScanCycle %s not found' % cycle ])
|
||||
mylog('none', ['ScanCycle %s not found' % conf.cycle ])
|
||||
mylog('none', [' Exiting...\n'])
|
||||
return False
|
||||
|
||||
@@ -41,45 +40,45 @@ def scan_network (db):
|
||||
|
||||
# arp-scan command
|
||||
arpscan_devices = []
|
||||
if ENABLE_ARPSCAN:
|
||||
if conf.ENABLE_ARPSCAN:
|
||||
mylog('verbose', [' arp-scan start'])
|
||||
arpscan_devices = execute_arpscan ()
|
||||
arpscan_devices = execute_arpscan (conf.userSubnets)
|
||||
print_log ('arp-scan ends')
|
||||
|
||||
# Pi-hole method
|
||||
if PIHOLE_ACTIVE :
|
||||
if conf.PIHOLE_ACTIVE :
|
||||
mylog('verbose', [' Pi-hole start'])
|
||||
copy_pihole_network(db)
|
||||
db.commitDB()
|
||||
|
||||
# DHCP Leases method
|
||||
if DHCP_ACTIVE :
|
||||
if conf.DHCP_ACTIVE :
|
||||
mylog('verbose', [' DHCP Leases start'])
|
||||
read_DHCP_leases (db)
|
||||
db.commitDB()
|
||||
|
||||
# Load current scan data
|
||||
mylog('verbose', [' Processing scan results'])
|
||||
save_scanned_devices (arpscan_devices, cycle_interval)
|
||||
save_scanned_devices (db, arpscan_devices, cycle_interval)
|
||||
|
||||
# Print stats
|
||||
print_log ('Print Stats')
|
||||
print_scan_stats()
|
||||
print_log ('Stats end')
|
||||
mylog ('none', 'Print Stats')
|
||||
print_scan_stats(db)
|
||||
mylog ('none', 'Stats end')
|
||||
|
||||
# Create Events
|
||||
mylog('verbose', [' Updating DB Info'])
|
||||
mylog('verbose', [' Sessions Events (connect / discconnect)'])
|
||||
insert_events()
|
||||
insert_events(db)
|
||||
|
||||
# Create New Devices
|
||||
# after create events -> avoid 'connection' event
|
||||
mylog('verbose', [' Creating new devices'])
|
||||
create_new_devices ()
|
||||
create_new_devices (db)
|
||||
|
||||
# Update devices info
|
||||
mylog('verbose', [' Updating Devices Info'])
|
||||
update_devices_data_from_scan ()
|
||||
update_devices_data_from_scan (db)
|
||||
|
||||
# Resolve devices names
|
||||
print_log (' Resolve devices names')
|
||||
@@ -99,7 +98,7 @@ def scan_network (db):
|
||||
|
||||
# Sessions snapshot
|
||||
mylog('verbose', [' Inserting scan results into Online_History'])
|
||||
insertOnlineHistory(db,cycle)
|
||||
insertOnlineHistory(db,conf.cycle)
|
||||
|
||||
# Skip repeated notifications
|
||||
mylog('verbose', [' Skipping repeated notifications'])
|
||||
@@ -150,7 +149,7 @@ def void_ghost_disconnections (db):
|
||||
AND eve_DateTime >=
|
||||
DATETIME (?, '-' || cic_EveryXmin ||' minutes')
|
||||
) """,
|
||||
(startTime, cycle, startTime) )
|
||||
(startTime, conf.cycle, startTime) )
|
||||
|
||||
# Void connect paired events
|
||||
print_log ('Void - 2 Paired events')
|
||||
@@ -168,7 +167,7 @@ def void_ghost_disconnections (db):
|
||||
AND eve_DateTime >=
|
||||
DATETIME (?, '-' || cic_EveryXmin ||' minutes')
|
||||
) """,
|
||||
(cycle, startTime) )
|
||||
(conf.cycle, startTime) )
|
||||
|
||||
# Void disconnect ghost events
|
||||
print_log ('Void - 3 Disconnect ghost events')
|
||||
@@ -187,7 +186,7 @@ def void_ghost_disconnections (db):
|
||||
AND eve_DateTime >=
|
||||
DATETIME (?, '-' || cic_EveryXmin ||' minutes')
|
||||
) """,
|
||||
(cycle, startTime) )
|
||||
(conf.cycle, startTime) )
|
||||
print_log ('Void end')
|
||||
db.commitDB()
|
||||
|
||||
@@ -267,7 +266,7 @@ def insert_events (db):
|
||||
AND NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(startTime, cycle) )
|
||||
(startTime, conf.cycle) )
|
||||
|
||||
# Check new connections
|
||||
print_log ('Events 2 - New Connections')
|
||||
@@ -279,7 +278,7 @@ def insert_events (db):
|
||||
WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle
|
||||
AND dev_PresentLastScan = 0
|
||||
AND dev_ScanCycle = ? """,
|
||||
(startTime, cycle) )
|
||||
(startTime, conf.cycle) )
|
||||
|
||||
# Check disconnections
|
||||
print_log ('Events 3 - Disconnections')
|
||||
@@ -295,7 +294,7 @@ def insert_events (db):
|
||||
AND NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(startTime, cycle) )
|
||||
(startTime, conf.cycle) )
|
||||
|
||||
# Check IP Changed
|
||||
print_log ('Events 4 - IP Changes')
|
||||
@@ -308,7 +307,7 @@ def insert_events (db):
|
||||
WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle
|
||||
AND dev_ScanCycle = ?
|
||||
AND dev_LastIP <> cur_IP """,
|
||||
(startTime, cycle) )
|
||||
(startTime, conf.cycle) )
|
||||
print_log ('Events end')
|
||||
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
|
||||
import subprocess
|
||||
|
||||
import conf
|
||||
from const import logPath
|
||||
from conf import NMAP_ARGS, NMAP_TIMEOUT
|
||||
from database import sql_nmap_scan_all
|
||||
from helper import json_struc, timeNow, updateState
|
||||
from logger import append_line_to_file, mylog
|
||||
@@ -33,7 +33,7 @@ def performNmapScan(db, devicesToScan):
|
||||
|
||||
if len(devicesToScan) > 0:
|
||||
|
||||
timeoutSec = NMAP_TIMEOUT
|
||||
timeoutSec = conf.NMAP_TIMEOUT
|
||||
|
||||
devTotal = len(devicesToScan)
|
||||
|
||||
@@ -48,7 +48,7 @@ def performNmapScan(db, devicesToScan):
|
||||
# Execute command
|
||||
output = ""
|
||||
# prepare arguments from user supplied ones
|
||||
nmapArgs = ['nmap'] + NMAP_ARGS.split() + [device["dev_LastIP"]]
|
||||
nmapArgs = ['nmap'] + conf.NMAP_ARGS.split() + [device["dev_LastIP"]]
|
||||
|
||||
progress = ' (' + str(devIndex+1) + '/' + str(devTotal) + ')'
|
||||
|
||||
|
||||
@@ -42,10 +42,10 @@ import multiprocessing
|
||||
|
||||
|
||||
# pialert modules
|
||||
import conf
|
||||
from const import *
|
||||
from conf import *
|
||||
from logger import mylog
|
||||
from helper import filePermissions, timeNow, updateState
|
||||
from helper import filePermissions, isNewVersion, timeNow, updateState
|
||||
from api import update_api
|
||||
from files import get_file_content
|
||||
from networkscan import scan_network
|
||||
@@ -62,10 +62,6 @@ from internet import check_internet_IP
|
||||
|
||||
|
||||
# Global variables
|
||||
|
||||
|
||||
|
||||
userSubnets = []
|
||||
changedPorts_json_struc = None
|
||||
time_started = datetime.datetime.now()
|
||||
cron_instance = Cron()
|
||||
@@ -83,7 +79,7 @@ sql_connection = None
|
||||
#===============================================================================
|
||||
cycle = ""
|
||||
check_report = [1, "internet_IP", "update_vendors_silent"]
|
||||
plugins_once_run = False
|
||||
conf.plugins_once_run = False
|
||||
|
||||
# timestamps of last execution times
|
||||
startTime = time_started
|
||||
@@ -103,9 +99,7 @@ def main ():
|
||||
global time_started, cycle, last_network_scan, last_internet_IP_scan, last_run, last_cleanup, last_update_vendors
|
||||
# second set of global variables
|
||||
global startTime, log_timestamp, plugins_once_run
|
||||
|
||||
# To-Do all these DB Globals need to be removed
|
||||
global db, sql, sql_connection
|
||||
|
||||
|
||||
# check file permissions and fix if required
|
||||
filePermissions()
|
||||
@@ -116,7 +110,7 @@ def main ():
|
||||
db.openDB()
|
||||
|
||||
# To-Do replace the following to lines with the db class
|
||||
sql_connection = db.sql_connection
|
||||
# sql_connection = db.sql_connection
|
||||
sql = db.sql
|
||||
|
||||
# Upgrade DB if needed
|
||||
@@ -134,10 +128,15 @@ def main ():
|
||||
mylog('debug', ['[', timeNow(), '] [MAIN] Stating loop'])
|
||||
|
||||
# re-load user configuration and plugins
|
||||
mylog('debug', "tz before config : " + str(conf.tz))
|
||||
importConfigs(db)
|
||||
|
||||
mylog('debug', "tz after config : " + str(conf.tz))
|
||||
|
||||
# check if new version is available
|
||||
conf.newVersionAvailable = isNewVersion(False)
|
||||
|
||||
# Handle plugins executed ONCE
|
||||
if ENABLE_PLUGINS and plugins_once_run == False:
|
||||
if conf.ENABLE_PLUGINS and conf.plugins_once_run == False:
|
||||
run_plugin_scripts(db, 'once')
|
||||
plugins_once_run = True
|
||||
|
||||
@@ -161,7 +160,7 @@ def main ():
|
||||
startTime = startTime.replace (microsecond=0)
|
||||
|
||||
# Check if any plugins need to run on schedule
|
||||
if ENABLE_PLUGINS:
|
||||
if conf.ENABLE_PLUGINS:
|
||||
run_plugin_scripts(db,'schedule')
|
||||
|
||||
# determine run/scan type based on passed time
|
||||
@@ -171,7 +170,7 @@ def main ():
|
||||
if last_internet_IP_scan + datetime.timedelta(minutes=3) < time_started:
|
||||
cycle = 'internet_IP'
|
||||
last_internet_IP_scan = time_started
|
||||
check_internet_IP(db,DIG_GET_IP_ARG)
|
||||
check_internet_IP(db)
|
||||
|
||||
# Update vendors once a week
|
||||
if last_update_vendors + datetime.timedelta(days = 7) < time_started:
|
||||
@@ -181,43 +180,48 @@ def main ():
|
||||
update_devices_MAC_vendors()
|
||||
|
||||
# Execute scheduled or one-off Pholus scan if enabled and run conditions fulfilled
|
||||
if PHOLUS_RUN == "schedule" or PHOLUS_RUN == "once":
|
||||
if conf.PHOLUS_RUN == "schedule" or conf.PHOLUS_RUN == "once":
|
||||
|
||||
pholusSchedule = [sch for sch in mySchedules if sch.service == "pholus"][0]
|
||||
mylog('debug', "PHOLUS_RUN_SCHD: " + conf.PHOLUS_RUN_SCHD)
|
||||
mylog('debug', "schedules : " + str(conf.mySchedules))
|
||||
|
||||
pholusSchedule = [sch for sch in conf.mySchedules if sch.service == "pholus"][0]
|
||||
run = False
|
||||
|
||||
# run once after application starts
|
||||
if PHOLUS_RUN == "once" and pholusSchedule.last_run == 0:
|
||||
|
||||
|
||||
if conf.PHOLUS_RUN == "once" and pholusSchedule.last_run == 0:
|
||||
run = True
|
||||
|
||||
# run if overdue scheduled time
|
||||
if PHOLUS_RUN == "schedule":
|
||||
if conf.PHOLUS_RUN == "schedule":
|
||||
run = pholusSchedule.runScheduleCheck()
|
||||
|
||||
if run:
|
||||
pholusSchedule.last_run = datetime.datetime.now(tz).replace(microsecond=0)
|
||||
performPholusScan(db, PHOLUS_RUN_TIMEOUT, userSubnets)
|
||||
pholusSchedule.last_run = datetime.datetime.now(conf.tz).replace(microsecond=0)
|
||||
performPholusScan(db, conf.PHOLUS_RUN_TIMEOUT, conf.userSubnets)
|
||||
|
||||
# Execute scheduled or one-off Nmap scan if enabled and run conditions fulfilled
|
||||
if NMAP_RUN == "schedule" or NMAP_RUN == "once":
|
||||
if conf.NMAP_RUN == "schedule" or conf.NMAP_RUN == "once":
|
||||
|
||||
nmapSchedule = [sch for sch in mySchedules if sch.service == "nmap"][0]
|
||||
nmapSchedule = [sch for sch in conf.mySchedules if sch.service == "nmap"][0]
|
||||
run = False
|
||||
|
||||
# run once after application starts
|
||||
if NMAP_RUN == "once" and nmapSchedule.last_run == 0:
|
||||
if conf.NMAP_RUN == "once" and conf.nmapSchedule.last_run == 0:
|
||||
run = True
|
||||
|
||||
# run if overdue scheduled time
|
||||
if NMAP_RUN == "schedule":
|
||||
if conf.NMAP_RUN == "schedule":
|
||||
run = nmapSchedule.runScheduleCheck()
|
||||
|
||||
if run:
|
||||
nmapSchedule.last_run = datetime.datetime.now(tz).replace(microsecond=0)
|
||||
conf.nmapSchedule.last_run = datetime.datetime.now(conf.tz).replace(microsecond=0)
|
||||
performNmapScan(db, get_all_devices(db))
|
||||
|
||||
# Perform a network scan via arp-scan or pihole
|
||||
if last_network_scan + datetime.timedelta(minutes=SCAN_CYCLE_MINUTES) < time_started:
|
||||
if last_network_scan + datetime.timedelta(minutes=conf.SCAN_CYCLE_MINUTES) < time_started:
|
||||
last_network_scan = time_started
|
||||
cycle = 1 # network scan
|
||||
mylog('verbose', ['[', timeNow(), '] cycle:',cycle])
|
||||
@@ -248,7 +252,7 @@ def main ():
|
||||
|
||||
# DEBUG end ++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
||||
# Run splugin scripts which are set to run every timne after a scan finished
|
||||
if ENABLE_PLUGINS:
|
||||
if conf.ENABLE_PLUGINS:
|
||||
run_plugin_scripts(db,'always_after_scan')
|
||||
|
||||
|
||||
@@ -262,11 +266,11 @@ def main ():
|
||||
# new devices were found
|
||||
if len(newDevices) > 0:
|
||||
# run all plugins registered to be run when new devices are found
|
||||
if ENABLE_PLUGINS:
|
||||
if conf.ENABLE_PLUGINS:
|
||||
run_plugin_scripts(db, 'on_new_device')
|
||||
|
||||
# Scan newly found devices with Nmap if enabled
|
||||
if NMAP_ACTIVE and len(newDevices) > 0:
|
||||
if conf.NMAP_ACTIVE and len(newDevices) > 0:
|
||||
performNmapScan( db, newDevices)
|
||||
|
||||
# send all configured notifications
|
||||
@@ -277,7 +281,7 @@ def main ():
|
||||
last_cleanup = time_started
|
||||
cycle = 'cleanup'
|
||||
mylog('verbose', ['[', timeNow(), '] cycle:',cycle])
|
||||
db.cleanup_database(startTime, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA)
|
||||
db.cleanup_database(startTime, conf.DAYS_TO_KEEP_EVENTS, conf.PHOLUS_DAYS_DATA)
|
||||
|
||||
# Commit SQL
|
||||
db.commitDB()
|
||||
@@ -314,6 +318,7 @@ def main ():
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def check_and_run_event(db):
|
||||
sql = db.sql # TO-DO
|
||||
sql.execute(""" select * from Parameters where par_ID = "Front_Event" """)
|
||||
rows = sql.fetchall()
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
""" module to import db and leases from PiHole """
|
||||
|
||||
from const import piholeDB, piholeDhcpleases
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
@@ -5,11 +5,11 @@ import datetime
|
||||
from collections import namedtuple
|
||||
|
||||
# pialert modules
|
||||
import conf
|
||||
from const import pluginsPath, logPath
|
||||
from conf import mySettings, plugins
|
||||
from files import get_file_content, write_file
|
||||
from logger import mylog
|
||||
from helper import updateState
|
||||
from helper import timeNowTZ, updateState
|
||||
|
||||
|
||||
|
||||
@@ -23,14 +23,14 @@ def timeNow():
|
||||
#-------------------------------------------------------------------------------
|
||||
def run_plugin_scripts(db, runType):
|
||||
|
||||
global plugins, tz, mySchedules
|
||||
# global plugins, tz, mySchedules
|
||||
|
||||
# Header
|
||||
updateState(db,"Run: Plugins")
|
||||
|
||||
mylog('debug', [' [Plugins] Check if any plugins need to be executed on run type: ', runType])
|
||||
|
||||
for plugin in plugins:
|
||||
for plugin in conf.plugins:
|
||||
|
||||
shouldRun = False
|
||||
|
||||
@@ -43,13 +43,13 @@ def run_plugin_scripts(db, runType):
|
||||
prefix = plugin["unique_prefix"]
|
||||
|
||||
# check scheduels if any contains a unique plugin prefix matching the current plugin
|
||||
for schd in mySchedules:
|
||||
for schd in conf.mySchedules:
|
||||
if schd.service == prefix:
|
||||
# Check if schedule overdue
|
||||
shouldRun = schd.runScheduleCheck()
|
||||
if shouldRun:
|
||||
# note the last time the scheduled plugin run was executed
|
||||
schd.last_run = datetime.datetime.now(tz).replace(microsecond=0)
|
||||
schd.last_run = timeNowTZ()
|
||||
|
||||
if shouldRun:
|
||||
|
||||
@@ -107,14 +107,14 @@ def get_plugin_setting(plugin, function_key):
|
||||
def get_setting(key):
|
||||
result = None
|
||||
# index order: key, name, desc, inputtype, options, regex, result, group, events
|
||||
for set in mySettings:
|
||||
for set in conf.mySettings:
|
||||
if set[0] == key:
|
||||
result = set
|
||||
|
||||
if result is None:
|
||||
mylog('info', [' Error - setting_missing - Setting not found for key: ', key])
|
||||
mylog('info', [' Error - logging the settings into file: ', logPath + '/setting_missing.json'])
|
||||
write_file (logPath + '/setting_missing.json', json.dumps({ 'data' : mySettings}))
|
||||
write_file (logPath + '/setting_missing.json', json.dumps({ 'data' : conf.mySettings}))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@@ -10,9 +10,9 @@ import subprocess
|
||||
import requests
|
||||
from json2table import convert
|
||||
|
||||
# pialert modules
|
||||
import conf
|
||||
from const import pialertPath, logPath
|
||||
# from pialert.api import update_api
|
||||
from conf import *
|
||||
from database import get_table_as_json
|
||||
from files import write_file
|
||||
from helper import generate_mac_links, isNewVersion, removeDuplicateNewLines, timeNow, hide_email, json_struc, updateState
|
||||
@@ -39,7 +39,7 @@ class noti_struc:
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def construct_notifications(sqlQuery, tableTitle, skipText = False, suppliedJsonStruct = None):
|
||||
def construct_notifications(db, sqlQuery, tableTitle, skipText = False, suppliedJsonStruct = None):
|
||||
|
||||
if suppliedJsonStruct is None and sqlQuery == "":
|
||||
return noti_struc("", "", "")
|
||||
@@ -52,7 +52,7 @@ def construct_notifications(sqlQuery, tableTitle, skipText = False, suppliedJson
|
||||
text_line = '{}\t{}\n'
|
||||
|
||||
if suppliedJsonStruct is None:
|
||||
json_struc = get_table_as_json(sqlQuery)
|
||||
json_struc = get_table_as_json(db, sqlQuery)
|
||||
else:
|
||||
json_struc = suppliedJsonStruct
|
||||
|
||||
@@ -92,7 +92,7 @@ def send_notifications (db):
|
||||
sql = db.sql #TO-DO
|
||||
global mail_text, mail_html, json_final, changedPorts_json_struc, partial_html, partial_txt, partial_json
|
||||
|
||||
deviceUrl = REPORT_DASHBOARD_URL + '/deviceDetails.php?mac='
|
||||
deviceUrl = conf.REPORT_DASHBOARD_URL + '/deviceDetails.php?mac='
|
||||
plugins_report = False
|
||||
|
||||
# Reporting section
|
||||
@@ -125,7 +125,7 @@ def send_notifications (db):
|
||||
|
||||
# Open html Template
|
||||
template_file = open(pialertPath + '/back/report_template.html', 'r')
|
||||
if isNewVersion(db):
|
||||
if conf.newVersionAvailable :
|
||||
template_file = open(pialertPath + '/back/report_template_new_version.html', 'r')
|
||||
|
||||
mail_html = template_file.read()
|
||||
@@ -139,13 +139,13 @@ def send_notifications (db):
|
||||
mail_text = mail_text.replace ('<SERVER_NAME>', socket.gethostname() )
|
||||
mail_html = mail_html.replace ('<SERVER_NAME>', socket.gethostname() )
|
||||
|
||||
if 'internet' in INCLUDED_SECTIONS:
|
||||
if 'internet' in conf.INCLUDED_SECTIONS:
|
||||
# Compose Internet Section
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_IP as IP, eve_DateTime as Datetime, eve_EventType as "Event Type", eve_AdditionalInfo as "More info" FROM Events
|
||||
WHERE eve_PendingAlertEmail = 1 AND eve_MAC = 'Internet'
|
||||
ORDER BY eve_DateTime"""
|
||||
|
||||
notiStruc = construct_notifications(sqlQuery, "Internet IP change")
|
||||
notiStruc = construct_notifications(db, sqlQuery, "Internet IP change")
|
||||
|
||||
# collect "internet" (IP changes) for the webhook json
|
||||
json_internet = notiStruc.json["data"]
|
||||
@@ -153,14 +153,14 @@ def send_notifications (db):
|
||||
mail_text = mail_text.replace ('<SECTION_INTERNET>', notiStruc.text + '\n')
|
||||
mail_html = mail_html.replace ('<INTERNET_TABLE>', notiStruc.html)
|
||||
|
||||
if 'new_devices' in INCLUDED_SECTIONS:
|
||||
if 'new_devices' in conf.INCLUDED_SECTIONS:
|
||||
# Compose New Devices Section
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType = 'New Device'
|
||||
ORDER BY eve_DateTime"""
|
||||
|
||||
notiStruc = construct_notifications(sqlQuery, "New devices")
|
||||
notiStruc = construct_notifications(db, sqlQuery, "New devices")
|
||||
|
||||
# collect "new_devices" for the webhook json
|
||||
json_new_devices = notiStruc.json["data"]
|
||||
@@ -168,14 +168,14 @@ def send_notifications (db):
|
||||
mail_text = mail_text.replace ('<SECTION_NEW_DEVICES>', notiStruc.text + '\n')
|
||||
mail_html = mail_html.replace ('<NEW_DEVICES_TABLE>', notiStruc.html)
|
||||
|
||||
if 'down_devices' in INCLUDED_SECTIONS:
|
||||
if 'down_devices' in conf.INCLUDED_SECTIONS:
|
||||
# Compose Devices Down Section
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType = 'Device Down'
|
||||
ORDER BY eve_DateTime"""
|
||||
|
||||
notiStruc = construct_notifications(sqlQuery, "Down devices")
|
||||
notiStruc = construct_notifications(db, sqlQuery, "Down devices")
|
||||
|
||||
# collect "new_devices" for the webhook json
|
||||
json_down_devices = notiStruc.json["data"]
|
||||
@@ -183,7 +183,7 @@ def send_notifications (db):
|
||||
mail_text = mail_text.replace ('<SECTION_DEVICES_DOWN>', notiStruc.text + '\n')
|
||||
mail_html = mail_html.replace ('<DOWN_DEVICES_TABLE>', notiStruc.html)
|
||||
|
||||
if 'events' in INCLUDED_SECTIONS:
|
||||
if 'events' in conf.INCLUDED_SECTIONS:
|
||||
# Compose Events Section
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
@@ -191,7 +191,7 @@ def send_notifications (db):
|
||||
'IP Changed')
|
||||
ORDER BY eve_DateTime"""
|
||||
|
||||
notiStruc = construct_notifications(sqlQuery, "Events")
|
||||
notiStruc = construct_notifications(db, sqlQuery, "Events")
|
||||
|
||||
# collect "events" for the webhook json
|
||||
json_events = notiStruc.json["data"]
|
||||
@@ -199,12 +199,12 @@ def send_notifications (db):
|
||||
mail_text = mail_text.replace ('<SECTION_EVENTS>', notiStruc.text + '\n')
|
||||
mail_html = mail_html.replace ('<EVENTS_TABLE>', notiStruc.html)
|
||||
|
||||
if 'ports' in INCLUDED_SECTIONS:
|
||||
if 'ports' in conf.INCLUDED_SECTIONS:
|
||||
# collect "ports" for the webhook json
|
||||
if changedPorts_json_struc is not None:
|
||||
json_ports = changedPorts_json_struc.json["data"]
|
||||
|
||||
notiStruc = construct_notifications("", "Ports", True, changedPorts_json_struc)
|
||||
notiStruc = construct_notifications(db, "", "Ports", True, changedPorts_json_struc)
|
||||
|
||||
mail_html = mail_html.replace ('<PORTS_TABLE>', notiStruc.html)
|
||||
|
||||
@@ -214,11 +214,11 @@ def send_notifications (db):
|
||||
|
||||
mail_text = mail_text.replace ('<PORTS_TABLE>', portsTxt )
|
||||
|
||||
if 'plugins' in INCLUDED_SECTIONS and ENABLE_PLUGINS:
|
||||
if 'plugins' in conf.INCLUDED_SECTIONS and conf.ENABLE_PLUGINS:
|
||||
# Compose Plugins Section
|
||||
sqlQuery = """SELECT Plugin, Object_PrimaryId, Object_SecondaryId, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status from Plugins_Events"""
|
||||
|
||||
notiStruc = construct_notifications(sqlQuery, "Plugins")
|
||||
notiStruc = construct_notifications(db, sqlQuery, "Plugins")
|
||||
|
||||
# collect "plugins" for the webhook json
|
||||
json_plugins = notiStruc.json["data"]
|
||||
@@ -250,44 +250,44 @@ def send_notifications (db):
|
||||
write_file (logPath + '/report_output.html', mail_html)
|
||||
|
||||
# Send Mail
|
||||
if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or json_ports != [] or debug_force_notification or plugins_report:
|
||||
if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or json_ports != [] or conf.debug_force_notification or plugins_report:
|
||||
|
||||
# update_api(True) # TO-DO
|
||||
|
||||
mylog('none', [' Changes detected, sending reports'])
|
||||
|
||||
if REPORT_MAIL and check_config('email'):
|
||||
if conf.REPORT_MAIL and check_config('email'):
|
||||
updateState(db,"Send: Email")
|
||||
mylog('info', [' Sending report by Email'])
|
||||
send_email (mail_text, mail_html)
|
||||
else :
|
||||
mylog('verbose', [' Skip email'])
|
||||
if REPORT_APPRISE and check_config('apprise'):
|
||||
if conf.REPORT_APPRISE and check_config('apprise'):
|
||||
updateState(db,"Send: Apprise")
|
||||
mylog('info', [' Sending report by Apprise'])
|
||||
send_apprise (mail_html, mail_text)
|
||||
else :
|
||||
mylog('verbose', [' Skip Apprise'])
|
||||
if REPORT_WEBHOOK and check_config('webhook'):
|
||||
if conf.REPORT_WEBHOOK and check_config('webhook'):
|
||||
updateState(db,"Send: Webhook")
|
||||
mylog('info', [' Sending report by Webhook'])
|
||||
send_webhook (json_final, mail_text)
|
||||
else :
|
||||
mylog('verbose', [' Skip webhook'])
|
||||
if REPORT_NTFY and check_config('ntfy'):
|
||||
if conf.REPORT_NTFY and check_config('ntfy'):
|
||||
updateState(db,"Send: NTFY")
|
||||
mylog('info', [' Sending report by NTFY'])
|
||||
send_ntfy (mail_text)
|
||||
else :
|
||||
mylog('verbose', [' Skip NTFY'])
|
||||
if REPORT_PUSHSAFER and check_config('pushsafer'):
|
||||
if conf.REPORT_PUSHSAFER and check_config('pushsafer'):
|
||||
updateState(db,"Send: PUSHSAFER")
|
||||
mylog('info', [' Sending report by PUSHSAFER'])
|
||||
send_pushsafer (mail_text)
|
||||
else :
|
||||
mylog('verbose', [' Skip PUSHSAFER'])
|
||||
# Update MQTT entities
|
||||
if REPORT_MQTT and check_config('mqtt'):
|
||||
if conf.REPORT_MQTT and check_config('mqtt'):
|
||||
updateState(db,"Send: MQTT")
|
||||
mylog('info', [' Establishing MQTT thread'])
|
||||
mqtt_start()
|
||||
@@ -320,42 +320,42 @@ def send_notifications (db):
|
||||
def check_config(service):
|
||||
|
||||
if service == 'email':
|
||||
if SMTP_SERVER == '' or REPORT_FROM == '' or REPORT_TO == '':
|
||||
if conf.SMTP_SERVER == '' or conf.REPORT_FROM == '' or conf.REPORT_TO == '':
|
||||
mylog('none', [' Error: Email service not set up correctly. Check your pialert.conf SMTP_*, REPORT_FROM and REPORT_TO variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
if service == 'apprise':
|
||||
if APPRISE_URL == '' or APPRISE_HOST == '':
|
||||
if conf.APPRISE_URL == '' or conf.APPRISE_HOST == '':
|
||||
mylog('none', [' Error: Apprise service not set up correctly. Check your pialert.conf APPRISE_* variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
if service == 'webhook':
|
||||
if WEBHOOK_URL == '':
|
||||
if conf.WEBHOOK_URL == '':
|
||||
mylog('none', [' Error: Webhook service not set up correctly. Check your pialert.conf WEBHOOK_* variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
if service == 'ntfy':
|
||||
if NTFY_HOST == '' or NTFY_TOPIC == '':
|
||||
if conf.NTFY_HOST == '' or conf.NTFY_TOPIC == '':
|
||||
mylog('none', [' Error: NTFY service not set up correctly. Check your pialert.conf NTFY_* variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
if service == 'pushsafer':
|
||||
if PUSHSAFER_TOKEN == 'ApiKey':
|
||||
if conf.PUSHSAFER_TOKEN == 'ApiKey':
|
||||
mylog('none', [' Error: Pushsafer service not set up correctly. Check your pialert.conf PUSHSAFER_TOKEN variable.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
if service == 'mqtt':
|
||||
if MQTT_BROKER == '' or MQTT_PORT == '' or MQTT_USER == '' or MQTT_PASSWORD == '':
|
||||
if conf.MQTT_BROKER == '' or conf.MQTT_PORT == '' or conf.MQTT_USER == '' or conf.MQTT_PASSWORD == '':
|
||||
mylog('none', [' Error: MQTT service not set up correctly. Check your pialert.conf MQTT_* variables.'])
|
||||
return False
|
||||
else:
|
||||
@@ -371,19 +371,18 @@ def format_table (html, thValue, props, newThValue = ''):
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def format_report_section (pActive, pSection, pTable, pText, pHTML):
|
||||
global mail_text
|
||||
global mail_html
|
||||
|
||||
|
||||
# Replace section text
|
||||
if pActive :
|
||||
mail_text = mail_text.replace ('<'+ pTable +'>', pText)
|
||||
mail_html = mail_html.replace ('<'+ pTable +'>', pHTML)
|
||||
conf.mail_text = conf.mail_text.replace ('<'+ pTable +'>', pText)
|
||||
conf.mail_html = conf.mail_html.replace ('<'+ pTable +'>', pHTML)
|
||||
|
||||
mail_text = remove_tag (mail_text, pSection)
|
||||
mail_html = remove_tag (mail_html, pSection)
|
||||
conf.mail_text = remove_tag (conf.mail_text, pSection)
|
||||
conf.mail_html = remove_tag (conf.mail_html, pSection)
|
||||
else:
|
||||
mail_text = remove_section (mail_text, pSection)
|
||||
mail_html = remove_section (mail_html, pSection)
|
||||
conf.mail_text = remove_section (conf.mail_text, pSection)
|
||||
conf.mail_html = remove_section (conf.mail_html, pSection)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def remove_section (pText, pSection):
|
||||
@@ -409,14 +408,14 @@ def remove_tag (pText, pTag):
|
||||
def send_email (pText, pHTML):
|
||||
|
||||
# Print more info for debugging if LOG_LEVEL == 'debug'
|
||||
if LOG_LEVEL == 'debug':
|
||||
print_log ('REPORT_TO: ' + hide_email(str(REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(SMTP_USER)))
|
||||
if conf.LOG_LEVEL == 'debug':
|
||||
print_log ('REPORT_TO: ' + hide_email(str(conf.REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(conf.SMTP_USER)))
|
||||
|
||||
# Compose email
|
||||
msg = MIMEMultipart('alternative')
|
||||
msg['Subject'] = 'Pi.Alert Report'
|
||||
msg['From'] = REPORT_FROM
|
||||
msg['To'] = REPORT_TO
|
||||
msg['From'] = conf.REPORT_FROM
|
||||
msg['To'] = conf.REPORT_TO
|
||||
msg.attach (MIMEText (pText, 'plain'))
|
||||
msg.attach (MIMEText (pHTML, 'html'))
|
||||
|
||||
@@ -426,46 +425,46 @@ def send_email (pText, pHTML):
|
||||
|
||||
try:
|
||||
# Send mail
|
||||
failedAt = print_log('Trying to open connection to ' + str(SMTP_SERVER) + ':' + str(SMTP_PORT))
|
||||
failedAt = print_log('Trying to open connection to ' + str(conf.SMTP_SERVER) + ':' + str(conf.SMTP_PORT))
|
||||
|
||||
if SMTP_FORCE_SSL:
|
||||
if conf.SMTP_FORCE_SSL:
|
||||
failedAt = print_log('SMTP_FORCE_SSL == True so using .SMTP_SSL()')
|
||||
if SMTP_PORT == 0:
|
||||
if conf.SMTP_PORT == 0:
|
||||
failedAt = print_log('SMTP_PORT == 0 so sending .SMTP_SSL(SMTP_SERVER)')
|
||||
smtp_connection = smtplib.SMTP_SSL(SMTP_SERVER)
|
||||
smtp_connection = smtplib.SMTP_SSL(conf.SMTP_SERVER)
|
||||
else:
|
||||
failedAt = print_log('SMTP_PORT == 0 so sending .SMTP_SSL(SMTP_SERVER, SMTP_PORT)')
|
||||
smtp_connection = smtplib.SMTP_SSL(SMTP_SERVER, SMTP_PORT)
|
||||
smtp_connection = smtplib.SMTP_SSL(conf.SMTP_SERVER, conf.SMTP_PORT)
|
||||
|
||||
else:
|
||||
failedAt = print_log('SMTP_FORCE_SSL == False so using .SMTP()')
|
||||
if SMTP_PORT == 0:
|
||||
if conf.SMTP_PORT == 0:
|
||||
failedAt = print_log('SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER)')
|
||||
smtp_connection = smtplib.SMTP (SMTP_SERVER)
|
||||
smtp_connection = smtplib.SMTP (conf.SMTP_SERVER)
|
||||
else:
|
||||
failedAt = print_log('SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER, SMTP_PORT)')
|
||||
smtp_connection = smtplib.SMTP (SMTP_SERVER, SMTP_PORT)
|
||||
smtp_connection = smtplib.SMTP (conf.SMTP_SERVER, conf.SMTP_PORT)
|
||||
|
||||
failedAt = print_log('Setting SMTP debug level')
|
||||
|
||||
# Log level set to debug of the communication between SMTP server and client
|
||||
if LOG_LEVEL == 'debug':
|
||||
if conf.LOG_LEVEL == 'debug':
|
||||
smtp_connection.set_debuglevel(1)
|
||||
|
||||
failedAt = print_log( 'Sending .ehlo()')
|
||||
smtp_connection.ehlo()
|
||||
|
||||
if not SMTP_SKIP_TLS:
|
||||
if not conf.SMTP_SKIP_TLS:
|
||||
failedAt = print_log('SMTP_SKIP_TLS == False so sending .starttls()')
|
||||
smtp_connection.starttls()
|
||||
failedAt = print_log('SMTP_SKIP_TLS == False so sending .ehlo()')
|
||||
smtp_connection.ehlo()
|
||||
if not SMTP_SKIP_LOGIN:
|
||||
if not conf.SMTP_SKIP_LOGIN:
|
||||
failedAt = print_log('SMTP_SKIP_LOGIN == False so sending .login()')
|
||||
smtp_connection.login (SMTP_USER, SMTP_PASS)
|
||||
smtp_connection.login (conf.SMTP_USER, conf.SMTP_PASS)
|
||||
|
||||
failedAt = print_log('Sending .sendmail()')
|
||||
smtp_connection.sendmail (REPORT_FROM, REPORT_TO, msg.as_string())
|
||||
smtp_connection.sendmail (conf.REPORT_FROM, conf.REPORT_TO, msg.as_string())
|
||||
smtp_connection.quit()
|
||||
except smtplib.SMTPAuthenticationError as e:
|
||||
mylog('none', [' ERROR: Failed at - ', failedAt])
|
||||
@@ -480,20 +479,20 @@ def send_email (pText, pHTML):
|
||||
def send_ntfy (_Text):
|
||||
headers = {
|
||||
"Title": "Pi.Alert Notification",
|
||||
"Actions": "view, Open Dashboard, "+ REPORT_DASHBOARD_URL,
|
||||
"Actions": "view, Open Dashboard, "+ conf.REPORT_DASHBOARD_URL,
|
||||
"Priority": "urgent",
|
||||
"Tags": "warning"
|
||||
}
|
||||
# if username and password are set generate hash and update header
|
||||
if NTFY_USER != "" and NTFY_PASSWORD != "":
|
||||
if conf.NTFY_USER != "" and conf.NTFY_PASSWORD != "":
|
||||
# Generate hash for basic auth
|
||||
usernamepassword = "{}:{}".format(NTFY_USER,NTFY_PASSWORD)
|
||||
basichash = b64encode(bytes(NTFY_USER + ':' + NTFY_PASSWORD, "utf-8")).decode("ascii")
|
||||
usernamepassword = "{}:{}".format(conf.NTFY_USER,conf.NTFY_PASSWORD)
|
||||
basichash = b64encode(bytes(conf.NTFY_USER + ':' + conf.NTFY_PASSWORD, "utf-8")).decode("ascii")
|
||||
|
||||
# add authorization header with hash
|
||||
headers["Authorization"] = "Basic {}".format(basichash)
|
||||
|
||||
requests.post("{}/{}".format( NTFY_HOST, NTFY_TOPIC),
|
||||
requests.post("{}/{}".format( conf.NTFY_HOST, conf.NTFY_TOPIC),
|
||||
data=_Text,
|
||||
headers=headers)
|
||||
|
||||
@@ -507,9 +506,9 @@ def send_pushsafer (_Text):
|
||||
"i" : 148,
|
||||
"c" : '#ef7f7f',
|
||||
"d" : 'a',
|
||||
"u" : REPORT_DASHBOARD_URL,
|
||||
"u" : conf.REPORT_DASHBOARD_URL,
|
||||
"ut" : 'Open Pi.Alert',
|
||||
"k" : PUSHSAFER_TOKEN,
|
||||
"k" : conf.PUSHSAFER_TOKEN,
|
||||
}
|
||||
requests.post(url, data=post_fields)
|
||||
|
||||
@@ -517,20 +516,20 @@ def send_pushsafer (_Text):
|
||||
def send_webhook (_json, _html):
|
||||
|
||||
# use data type based on specified payload type
|
||||
if WEBHOOK_PAYLOAD == 'json':
|
||||
if conf.WEBHOOK_PAYLOAD == 'json':
|
||||
payloadData = _json
|
||||
if WEBHOOK_PAYLOAD == 'html':
|
||||
if conf.WEBHOOK_PAYLOAD == 'html':
|
||||
payloadData = _html
|
||||
if WEBHOOK_PAYLOAD == 'text':
|
||||
if conf.WEBHOOK_PAYLOAD == 'text':
|
||||
payloadData = to_text(_json)
|
||||
|
||||
# Define slack-compatible payload
|
||||
_json_payload = { "text": payloadData } if WEBHOOK_PAYLOAD == 'text' else {
|
||||
_json_payload = { "text": payloadData } if conf.WEBHOOK_PAYLOAD == 'text' else {
|
||||
"username": "Pi.Alert",
|
||||
"text": "There are new notifications",
|
||||
"attachments": [{
|
||||
"title": "Pi.Alert Notifications",
|
||||
"title_link": REPORT_DASHBOARD_URL,
|
||||
"title_link": conf.REPORT_DASHBOARD_URL,
|
||||
"text": payloadData
|
||||
}]
|
||||
}
|
||||
@@ -539,12 +538,12 @@ def send_webhook (_json, _html):
|
||||
write_file (logPath + '/webhook_payload.json', json.dumps(_json_payload))
|
||||
|
||||
# Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both
|
||||
if(WEBHOOK_URL.startswith('https://discord.com/api/webhooks/') and not WEBHOOK_URL.endswith("/slack")):
|
||||
_WEBHOOK_URL = f"{WEBHOOK_URL}/slack"
|
||||
if(conf.WEBHOOK_URL.startswith('https://discord.com/api/webhooks/') and not conf.WEBHOOK_URL.endswith("/slack")):
|
||||
_WEBHOOK_URL = f"{conf.WEBHOOK_URL}/slack"
|
||||
curlParams = ["curl","-i","-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL]
|
||||
else:
|
||||
_WEBHOOK_URL = WEBHOOK_URL
|
||||
curlParams = ["curl","-i","-X", WEBHOOK_REQUEST_METHOD ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL]
|
||||
_WEBHOOK_URL = conf.WEBHOOK_URL
|
||||
curlParams = ["curl","-i","-X", conf.WEBHOOK_REQUEST_METHOD ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL]
|
||||
|
||||
# execute CURL call
|
||||
try:
|
||||
@@ -565,19 +564,19 @@ def send_apprise (html, text):
|
||||
#Define Apprise compatible payload (https://github.com/caronc/apprise-api#stateless-solution)
|
||||
payload = html
|
||||
|
||||
if APPRISE_PAYLOAD == 'text':
|
||||
if conf.APPRISE_PAYLOAD == 'text':
|
||||
payload = text
|
||||
|
||||
_json_payload={
|
||||
"urls": APPRISE_URL,
|
||||
"urls": conf.APPRISE_URL,
|
||||
"title": "Pi.Alert Notifications",
|
||||
"format": APPRISE_PAYLOAD,
|
||||
"format": conf.APPRISE_PAYLOAD,
|
||||
"body": payload
|
||||
}
|
||||
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
p = subprocess.Popen(["curl","-i","-X", "POST" ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), APPRISE_HOST], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
p = subprocess.Popen(["curl","-i","-X", "POST" ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), conf.APPRISE_HOST], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
stdout, stderr = p.communicate()
|
||||
# write stdout and stderr into .log files for debugging if needed
|
||||
logResult (stdout, stderr) # TO-DO should be changed to mylog
|
||||
@@ -588,19 +587,19 @@ def send_apprise (html, text):
|
||||
|
||||
def to_text(_json):
|
||||
payloadData = ""
|
||||
if len(_json['internet']) > 0 and 'internet' in INCLUDED_SECTIONS:
|
||||
if len(_json['internet']) > 0 and 'internet' in conf.INCLUDED_SECTIONS:
|
||||
payloadData += "INTERNET\n"
|
||||
for event in _json['internet']:
|
||||
payloadData += event[3] + ' on ' + event[2] + '. ' + event[4] + '. New address:' + event[1] + '\n'
|
||||
|
||||
if len(_json['new_devices']) > 0 and 'new_devices' in INCLUDED_SECTIONS:
|
||||
if len(_json['new_devices']) > 0 and 'new_devices' in conf.INCLUDED_SECTIONS:
|
||||
payloadData += "NEW DEVICES:\n"
|
||||
for event in _json['new_devices']:
|
||||
if event[4] is None:
|
||||
event[4] = event[11]
|
||||
payloadData += event[1] + ' - ' + event[4] + '\n'
|
||||
|
||||
if len(_json['down_devices']) > 0 and 'down_devices' in INCLUDED_SECTIONS:
|
||||
if len(_json['down_devices']) > 0 and 'down_devices' in conf.INCLUDED_SECTIONS:
|
||||
write_file (logPath + '/down_devices_example.log', _json['down_devices'])
|
||||
payloadData += 'DOWN DEVICES:\n'
|
||||
for event in _json['down_devices']:
|
||||
@@ -608,7 +607,7 @@ def to_text(_json):
|
||||
event[4] = event[11]
|
||||
payloadData += event[1] + ' - ' + event[4] + '\n'
|
||||
|
||||
if len(_json['events']) > 0 and 'events' in INCLUDED_SECTIONS:
|
||||
if len(_json['events']) > 0 and 'events' in conf.INCLUDED_SECTIONS:
|
||||
payloadData += "EVENTS:\n"
|
||||
for event in _json['events']:
|
||||
if event[8] != "Internet":
|
||||
|
||||
41
pialert/scheduler.py
Normal file
41
pialert/scheduler.py
Normal file
@@ -0,0 +1,41 @@
|
||||
""" class to manage schedules """
|
||||
import datetime
|
||||
|
||||
from logger import print_log
|
||||
import conf
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
class schedule_class:
|
||||
def __init__(self, service, scheduleObject, last_next_schedule, was_last_schedule_used, last_run = 0):
|
||||
self.service = service
|
||||
self.scheduleObject = scheduleObject
|
||||
self.last_next_schedule = last_next_schedule
|
||||
self.last_run = last_run
|
||||
self.was_last_schedule_used = was_last_schedule_used
|
||||
|
||||
def runScheduleCheck(self):
|
||||
|
||||
result = False
|
||||
|
||||
# Initialize the last run time if never run before
|
||||
if self.last_run == 0:
|
||||
self.last_run = (datetime.datetime.now(conf.tz) - datetime.timedelta(days=365)).replace(microsecond=0)
|
||||
|
||||
# get the current time with the currently specified timezone
|
||||
nowTime = datetime.datetime.now(conf.tz).replace(microsecond=0)
|
||||
|
||||
# Run the schedule if the current time is past the schedule time we saved last time and
|
||||
# (maybe the following check is unnecessary:)
|
||||
# if the last run is past the last time we run a scheduled Pholus scan
|
||||
if nowTime > self.last_next_schedule and self.last_run < self.last_next_schedule:
|
||||
print_log(f'Scheduler run for {self.service}: YES')
|
||||
self.was_last_schedule_used = True
|
||||
result = True
|
||||
else:
|
||||
print_log(f'Scheduler run for {self.service}: NO')
|
||||
|
||||
if self.was_last_schedule_used:
|
||||
self.was_last_schedule_used = False
|
||||
self.last_next_schedule = self.scheduleObject.next()
|
||||
|
||||
return result
|
||||
Reference in New Issue
Block a user