Merge branch 'main' into Pi.Alert-New-Style-Reports

This commit is contained in:
Carlos V
2023-08-07 12:41:45 +02:00
committed by GitHub
25 changed files with 304 additions and 247 deletions

View File

@@ -28,7 +28,7 @@
"Navigation_Events" : "Ereignisse",
"Navigation_Maintenance" : "Wartung",
"Navigation_Settings" : "Einstellung",
"Navigation_SystemInfo" : "Systeminformationen",
"Navigation_SystemInfo" : "System info",
"Navigation_Network" : "Netzwerk",
"Navigation_HelpFAQ" : "Hilfe / FAQ",
"Device_Title" : "Geräte",

View File

@@ -42,7 +42,7 @@
"Navigation_Plugins" : "Plugins",
"Navigation_Maintenance" : "Maintenance",
"Navigation_Settings" : "Settings",
"Navigation_SystemInfo" : "System Information",
"Navigation_SystemInfo" : "System info",
"Navigation_Flows" : "Flows",
"Navigation_HelpFAQ" : "Help / FAQ",
"Device_Title" : "Devices",

View File

@@ -41,8 +41,9 @@
"Navigation_Plugins" : "Plugins",
"Navigation_Maintenance" : "Mantenimiento",
"Navigation_Settings" : "Configuración",
"Navigation_SystemInfo" : "Información del sistema",
"Navigation_HelpFAQ" : "Ayuda / Preguntas frecuentes",
"Navigation_Flows" : "Flows",
"Navigation_SystemInfo" : "Info del sistema",
"Navigation_HelpFAQ" : "Ayuda / FAQ",
"Device_Title" : "Dispositivos",
"Device_Shortcut_AllDevices" : "Todos",
"Device_Shortcut_Connected" : "Conectado(s)",
@@ -138,6 +139,7 @@
"DevDetail_Tab_Events" : "Eventos",
"DevDetail_Tab_Pholus" : "<i class=\"fa fa-search\"></i> Pholus",
"DevDetail_Tab_PholusEmpty" : "No se ha encontrado nada para este dispositivo con Pholus.",
"DevDetail_Tab_Plugins" : "<i class=\"fa fa-plug\"></i> Plugins",
"DevDetail_Tab_NmapTableHeader" : "Resultados del escaneo programado",
"DevDetail_Tab_NmapTableText" : "Establece la programación en los <a href=\"/settings.php#NMAP_ACTIVE\">Ajustes</a>",
"DevDetail_Tab_NmapEmpty" : "Ningún puerto detectado en este dispositivo con Nmap.",
@@ -210,6 +212,7 @@
"Maintenance_version" : "Actualizaciones de la aplicación",
"Maintenance_new_version" : "🆕 Una nueva versión está disponible. Comprueba las <a href=\"https://github.com/jokob-sk/Pi.Alert/releases\" target=\"_blank\">notas de lanzamiento</a>.",
"Maintenance_current_version" : "No hay actualizaciones disponibles. Comprueba en que <a href=\"https://github.com/jokob-sk/Pi.Alert/issues/138\" target=\"_blank\">se está trabajando</a>.",
"Maintenance_built_on" : "Construido el",
"Maintenance_database_path" : "Ruta de la base de datos:",
"Maintenance_database_size" : "Tamaño de base de datos:",
"Maintenance_database_lastmod" : "Última modificación:",
@@ -390,6 +393,8 @@
"HelpFAQ_Cat_Presence_401_text" : "Si esto sucede, tiene la posibilidad de eliminar los eventos del dispositivo en cuestión (vista de detalles). Otra posibilidad sería encender el dispositivo y esperar hasta que Pi.Alert reconozca el dispositivo como \"online\" con el siguiente escaneo y luego simplemente apagar el dispositivo nuevamente. Ahora Pi.Alert debería anotar correctamente el estado del dispositivo en la base de datos con el próximo escaneo.",
"HelpFAQ_Cat_Network_600_head" : "¿Para qué sirve esta sección?",
"HelpFAQ_Cat_Network_600_text" : "Esta sección debería ofrecerle la posibilidad de mapear la asignación de sus dispositivos de red. Para ello, puede crear uno o más conmutadores, WLAN, enrutadores, etc., proporcionarles un número de puerto si es necesario y asignarles dispositivos ya detectados. Esta asignación se realiza en la vista detallada del dispositivo a asignar. Por lo tanto, es posible determinar rápidamente a qué puerto está conectado un host y si está en línea. Es posible asignar un dispositivo a múltiples puertos (agrupación de puertos), así como múltiples dispositivos a un puerto (máquinas virtuales).",
"HelpFAQ_Cat_Network_601_head" : "¿Hay otros documentos?",
"HelpFAQ_Cat_Network_601_text" : "¡Sí, los hay! Marque <a target=\"_blank\" href=\"https://github.com/jokob-sk/Pi.Alert/blob/main/docs/\">todos los documentos</a> para más información.",
"test_event_tooltip" : "Guarda tus cambios antes de probar nuevos ajustes.",
"test_event_icon" : "fa-vial-circle-check",
"run_event_tooltip" : "Activa el ajuste y guarda tus cambios antes de ejecutarlo.",
@@ -398,7 +403,9 @@
"general_event_description" : "El evento que has ejecutado puede tardar un rato mientras finalizan procesos en segundo plano. La ejecución ha terminado cuando ves <code>finalizado</code> abajo. Comprueba el <a href='/maintenance.php#tab_Logging'>registro de error</a> si no has obtenido el resultado esperado. <br/> <br/> Estado: ",
"Plugins_Unprocessed_Events" : "Eventos sin procesar",
"Plugins_Objects" : "Objetos del Plugin",
"Plugins_DeleteAll" : "Eliminar todo (se ignoran los filtros)",
"Plugins_History" : "Historial de eventos",
"Plugins_Filters_Mac" : "Filtro MAC",
"settings_missing" : "Actualiza la página, no todos los ajustes se han cargado. Probablemente sea por una sobrecarga de la base de datos.",
"settings_missing_block" : "No puedes guardar los ajustes sin establecer todas las claves. Actualiza la página. Problabmente esté causado por una sobrecarga de la base de datos.",
"settings_old" : "Los ajustes mostrados en esta página están desactualizados. Probablemente sea por un escaneo en proceso. Los ajustes se guardan en el archivo <code>pialert.conf</code>, pero el proceso en segundo plano no las ha importado todavía a la base de datos. Puedes esperar a que los ajustes se actualicen para evitar sobreescribirlos con los ajustes antiguos. Si te da igual perder los ajustes desde la última vez que guardaste y ahora, siéntete libre de guardarlos de nuevo. También hay copias de seguridad creadas si necesitas comparar tus ajustes más tarde.",

View File

@@ -63,7 +63,7 @@ UI displays outdated values until the API endpoints get refreshed.
## Plugin file structure overview
> Folder name must be the same as the code name value in: `"code_name": "<value>"`
> ⚠️Folder name must be the same as the code name value in: `"code_name": "<value>"`
> Unique prefix needs to be unique compared to the other settings prefixes, e.g.: the prefix `APPRISE` is already in use.
| File | Required (plugin type) | Description |

View File

@@ -1,10 +1,18 @@
{
"code_name": "arpscan",
"code_name": "arp_scan",
"unique_prefix": "ARPSCAN",
"enabled": true,
"data_source": "script",
"mapped_to_table": "CurrentScan",
"data_filters": [
{
"compare_column" : "Object_PrimaryID",
"compare_operator" : "==",
"compare_field_id": "txtMacFilter",
"compare_js_template": "'{value}'.toString()",
"compare_use_quotes": true
}
],
"localized": ["display_name", "description", "icon"],
"display_name": [
@@ -36,8 +44,8 @@
{
"function": "RUN",
"type": "text.select",
"default_value":"disabled",
"options": ["disabled", "once", "schedule", "scan_cycle", "always_after_scan", "on_new_device"],
"default_value":"schedule",
"options": ["disabled", "once", "schedule", "always_after_scan", "on_new_device"],
"localized": ["name", "description"],
"name" :[{
"language_code":"en_us",
@@ -120,7 +128,7 @@
{
"function": "REPORT_ON",
"type": "text.multiselect",
"default_value": ["new", "watched-changed"],
"default_value": ["new"],
"options": ["new", "watched-changed", "watched-not-changed"],
"localized": ["name", "description"],
"name": [

View File

@@ -5,6 +5,7 @@ import pathlib
import argparse
import sys
import re
import base64
import subprocess
from time import strftime
@@ -18,22 +19,54 @@ RESULT_FILE = os.path.join(CUR_PATH, 'last_result.log')
def main():
# sample
# /home/pi/pialert/front/plugins/arp_scan/script.py userSubnets=b'MTkyLjE2OC4xLjAvMjQgLS1pbnRlcmZhY2U9ZXRoMQ=='
# the script expects a parameter in the format of userSubnets=subnet1,subnet2,...
parser = argparse.ArgumentParser(description='Import devices from settings')
parser.add_argument('userSubnets', nargs='+', help="list of subnets with options")
values = parser.parse_args()
import base64
# Assuming Plugin_Objects is a class or function that reads data from the RESULT_FILE
# and returns a list of objects called 'devices'.
devices = Plugin_Objects(RESULT_FILE)
subnets_list = []
# Print a message to indicate that the script is starting.
print('In script:')
if isinstance(values.userSubnets, list):
subnets_list = values.userSubnets
# Assuming 'values' is a dictionary or object that contains a key 'userSubnets'
# which holds a list of user-submitted subnets.
# Printing the userSubnets list to check its content.
print(values.userSubnets)
# Extract the base64-encoded subnet information from the first element of the userSubnets list.
# The format of the element is assumed to be like 'userSubnets=b<base64-encoded-data>'.
userSubnetsParamBase64 = values.userSubnets[0].split('userSubnets=b')[1]
# Printing the extracted base64-encoded subnet information.
print(userSubnetsParamBase64)
# Decode the base64-encoded subnet information to get the actual subnet information in ASCII format.
userSubnetsParam = base64.b64decode(userSubnetsParamBase64).decode('ascii')
# Print the decoded subnet information.
print('userSubnetsParam:')
print(userSubnetsParam)
# Check if the decoded subnet information contains multiple subnets separated by commas.
# If it does, split the string into a list of individual subnets.
# Otherwise, create a list with a single element containing the subnet information.
if ',' in userSubnetsParam:
subnets_list = userSubnetsParam.split(',')
else:
subnets_list = [values.userSubnets]
subnets_list = [userSubnetsParam]
# Execute the ARP scanning process on the list of subnets (whether it's one or multiple subnets).
# The function 'execute_arpscan' is assumed to be defined elsewhere in the code.
unique_devices = execute_arpscan(subnets_list)
for device in unique_devices:
devices.add_object(
primaryId=device['mac'], # MAC (Device Name)

View File

@@ -2,7 +2,7 @@
"code_name": "snmp_discovery",
"unique_prefix": "SNMPDSC",
"enabled": true,
"data_source": "pyton-script",
"data_source": "script",
"data_filters": [
{
"compare_column" : "Object_PrimaryID",

View File

@@ -12,7 +12,6 @@
// cvc90 2023 https://github.com/cvc90 GNU GPLv3
//------------------------------------------------------------------------------
error_reporting(0);// Turn off php errors
require 'php/templates/header.php';
?>
<!-- Page ------------------------------------------------------------------ -->

View File

@@ -24,7 +24,7 @@ import multiprocessing
import conf
from const import *
from logger import mylog
from helper import filePermissions, isNewVersion, timeNow, updateState
from helper import filePermissions, isNewVersion, timeNowTZ, updateState
from api import update_api
from networkscan import process_scan, scan_network
from initialise import importConfigs
@@ -74,28 +74,10 @@ main structure of Pi Alert
"""
def main ():
mylog('debug', ['[MAIN] Setting up ...'])
mylog('none', ['[MAIN] Setting up ...']) # has to be level 'none' as user config not loaded yet
conf.time_started = datetime.datetime.now()
conf.cycle = ""
conf.check_report = [1, "internet_IP", "update_vendors_silent"]
conf.plugins_once_run = False
mylog('none', [f'[conf.tz] Setting up ...{conf.tz}'])
# to be deleted if not used
conf.log_timestamp = conf.time_started
#cron_instance = Cron()
# timestamps of last execution times
startTime = conf.time_started
now_minus_24h = conf.time_started - datetime.timedelta(hours = 24)
# set these times to the past to force the first run
last_network_scan = now_minus_24h
last_internet_IP_scan = now_minus_24h
last_scan_run = now_minus_24h
last_cleanup = now_minus_24h
last_update_vendors = conf.time_started - datetime.timedelta(days = 6) # update vendors 24h after first run and then once a week
last_version_check = now_minus_24h
# indicates, if a new version is available
conf.newVersionAvailable = False
@@ -120,17 +102,25 @@ def main ():
while True:
# update time started
loop_start_time = timeNow()
# re-load user configuration and plugins
importConfigs(db)
# update time started
conf.loop_start_time = timeNowTZ()
# TODO fix these
loop_start_time = conf.loop_start_time # TODO fix
last_update_vendors = conf.last_update_vendors
last_network_scan = conf.last_network_scan
last_cleanup = conf.last_cleanup
last_version_check = conf.last_version_check
# check if new version is available / only check once an hour
if last_version_check + datetime.timedelta(hours=1) < loop_start_time :
if conf.last_version_check + datetime.timedelta(hours=1) < loop_start_time :
# if newVersionAvailable is already true the function does nothing and returns true again
mylog('debug', [f"[Version check] Last version check timestamp: {last_version_check}"])
last_version_check = loop_start_time
mylog('debug', [f"[Version check] Last version check timestamp: {conf.last_version_check}"])
conf.last_version_check = loop_start_time
conf.newVersionAvailable = isNewVersion(conf.newVersionAvailable)
# Handle plugins executed ONCE
@@ -145,10 +135,11 @@ def main ():
update_api(db)
# proceed if 1 minute passed
if last_scan_run + datetime.timedelta(minutes=1) < loop_start_time :
if conf.last_scan_run + datetime.timedelta(minutes=1) < conf.loop_start_time :
# last time any scan or maintenance/upkeep was run
last_scan_run = loop_start_time
conf.last_scan_run = loop_start_time
last_internet_IP_scan = conf.last_internet_IP_scan
# Header
updateState(db,"Process: Start")
@@ -210,7 +201,7 @@ def main ():
run = nmapSchedule.runScheduleCheck()
if run:
nmapSchedule.last_run = timeNow()
nmapSchedule.last_run = timeNowTZ()
performNmapScan(db, get_all_devices(db))
# todo replace the scans with plugins

View File

@@ -92,5 +92,5 @@ class api_endpoint_class:
# update hash
apiEndpoints[changedIndex].hash = self.hash
else:
mylog('info', [f'[API] ERROR Updating {self.fileName}'])
mylog('minimal', [f'[API] ERROR Updating {self.fileName}'])

View File

@@ -18,9 +18,17 @@ lastImportedConfFile = 1.1
plugins_once_run = False
newVersionAvailable = False
time_started = ''
startTime = ''
last_network_scan = ''
last_internet_IP_scan = ''
last_scan_run = ''
last_cleanup = ''
last_update_vendors = ''
last_version_check = ''
check_report = []
log_timestamp = 0
arpscan_devices = []
# for MQTT
mqtt_connected_to_broker = False
mqtt_sensors = []
@@ -28,8 +36,6 @@ client = None # mqtt client
# for notifications
changedPorts_json_struc = None
# ACTUAL CONFIGRATION ITEMS set to defaults
# General

View File

@@ -6,7 +6,7 @@ import sqlite3
from const import fullDbPath, sql_devices_stats, sql_devices_all
from logger import mylog
from helper import json_struc, initOrSetParam, row_to_json, timeNow #, updateState
from helper import json_struc, initOrSetParam, row_to_json, timeNowTZ #, updateState
@@ -397,7 +397,7 @@ class DB():
self.sql.execute("DROP TABLE CurrentScan;")
self.sql.execute(""" CREATE TABLE CurrentScan (
cur_ScanCycle INTEGER NOT NULL,
cur_ScanCycle INTEGER,
cur_MAC STRING(50) NOT NULL COLLATE NOCASE,
cur_IP STRING(50) NOT NULL COLLATE NOCASE,
cur_Vendor STRING(250),
@@ -478,7 +478,7 @@ def get_all_devices(db):
#-------------------------------------------------------------------------------
def insertOnlineHistory(db):
sql = db.sql #TO-DO
startTime = timeNow()
startTime = timeNowTZ()
# Add to History
# only run this if the scans have run

View File

@@ -5,7 +5,7 @@
import subprocess
import conf
from helper import timeNow
from helper import timeNowTZ
from plugin import get_setting_value
from scanners.internet import check_IP_format, get_internet_IP
from logger import mylog, print_log
@@ -21,20 +21,10 @@ def save_scanned_devices (db):
# mylog('debug', ['[ARP Scan] Detected devices:', len(p_arpscan_devices)])
# handled by the ARPSCAN plugin
# # Delete previous scan data
# sql.execute ("DELETE FROM CurrentScan")
# if len(p_arpscan_devices) > 0:
# # Insert new arp-scan devices
# sql.executemany ("INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, "+
# " cur_IP, cur_Vendor, cur_ScanMethod) "+
# "VALUES (1, :mac, :ip, :hw, 'arp-scan')",
# p_arpscan_devices)
# ------------------------ TO CONVERT INTO PLUGIN
# # Insert Pi-hole devices
# startTime = timeNow()
# startTime = timeNowTZ()
# sql.execute ("""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC,
# cur_IP, cur_Vendor, cur_ScanMethod)
# SELECT ?, PH_MAC, PH_IP, PH_Vendor, 'Pi-hole'
@@ -144,7 +134,7 @@ def print_scan_stats (db):
#-------------------------------------------------------------------------------
def create_new_devices (db):
sql = db.sql # TO-DO
startTime = timeNow()
startTime = timeNowTZ()
# arpscan - Insert events for new devices
mylog('debug','[New Devices] New devices - 1 Events')
@@ -289,7 +279,7 @@ def create_new_devices (db):
#-------------------------------------------------------------------------------
def update_devices_data_from_scan (db):
sql = db.sql #TO-DO
startTime = timeNow()
startTime = timeNowTZ()
# Update Last Connection
mylog('debug','[Update Devices] 1 Last Connection')
sql.execute (f"""UPDATE Devices SET dev_LastConnection = '{startTime}',

View File

@@ -18,14 +18,13 @@ from const import *
from logger import mylog, logResult
#-------------------------------------------------------------------------------
def timeNow():
return datetime.datetime.now().replace(microsecond=0)
#-------------------------------------------------------------------------------
def timeNowTZ():
return datetime.datetime.now(conf.tz).replace(microsecond=0)
def timeNow():
return datetime.datetime.now().replace(microsecond=0)
#-------------------------------------------------------------------------------
def updateState(db, newState):
@@ -213,7 +212,7 @@ def isNewVersion(newVersion: bool):
text = url.text
data = json.loads(text)
except requests.exceptions.ConnectionError as e:
mylog('info', [" Couldn't check for new release."])
mylog('minimal', [" Couldn't check for new release."])
data = ""
# make sure we received a valid response and not an API rate limit exceeded message

View File

@@ -77,12 +77,13 @@ def importConfigs (db):
conf.mySettings = [] # reset settings
conf.mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query
# User values loaded from now
c_d = read_config_file(config_file)
# Import setting if found in the dictionary
# General
conf.ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'General', ['run'])
conf.SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'General')
conf.LOG_LEVEL = ccd('LOG_LEVEL', 'verbose' , c_d, 'Log verboseness', 'text.select', "['none', 'minimal', 'verbose', 'debug']", 'General')
conf.TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General')
conf.ENABLE_PLUGINS = ccd('ENABLE_PLUGINS', True , c_d, 'Enable plugins', 'boolean', '', 'General')
@@ -98,6 +99,10 @@ def importConfigs (db):
conf.DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General')
conf.HRS_TO_KEEP_NEWDEV = ccd('HRS_TO_KEEP_NEWDEV', 0 , c_d, 'Keep new devices for', 'integer', "0", 'General')
# ARPSCAN (+ other settings provided by the ARPSCAN plugin)
conf.ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'ARPSCAN', ['run'])
conf.SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'ARPSCAN')
# Email
conf.REPORT_MAIL = ccd('REPORT_MAIL', False , c_d, 'Enable email', 'boolean', '', 'Email', ['test'])
conf.SMTP_SERVER = ccd('SMTP_SERVER', '' , c_d,'SMTP server URL', 'text', '', 'Email')
@@ -176,6 +181,31 @@ def importConfigs (db):
# Init timezone in case it changed
conf.tz = timezone(conf.TIMEZONE)
# TODO cleanup later ----------------------------------------------------------------------------------
# init all time values as we have timezone - all this shoudl be moved into plugin/plugin settings
conf.time_started = datetime.datetime.now(conf.tz)
conf.cycle = ""
conf.check_report = [1, "internet_IP", "update_vendors_silent"]
conf.plugins_once_run = False
# to be deleted if not used
conf.log_timestamp = conf.time_started
#cron_instance = Cron()
# timestamps of last execution times
conf.startTime = conf.time_started
now_minus_24h = conf.time_started - datetime.timedelta(hours = 24)
# set these times to the past to force the first run
conf.last_network_scan = now_minus_24h
conf.last_internet_IP_scan = now_minus_24h
conf.last_scan_run = now_minus_24h
conf.last_cleanup = now_minus_24h
conf.last_update_vendors = conf.time_started - datetime.timedelta(days = 6) # update vendors 24h after first run and then once a week
conf.last_version_check = now_minus_24h
# TODO cleanup later ----------------------------------------------------------------------------------
# global mySchedules
# reset schedules
conf.mySchedules = []
@@ -265,7 +295,7 @@ def importConfigs (db):
#TO DO this creates a circular reference between API and HELPER !
mylog('info', '[Config] Imported new config')
mylog('minimal', '[Config] Imported new config')
@@ -274,7 +304,7 @@ def read_config_file(filename):
"""
retuns dict on the config file key:value pairs
"""
mylog('info', '[Config] reading config file')
mylog('minimal', '[Config] reading config file')
# load the variables from pialert.conf
code = compile(filename.read_text(), filename.name, "exec")
confDict = {} # config dictionary

View File

@@ -9,7 +9,10 @@ from const import *
#-------------------------------------------------------------------------------
# duplication from helper to avoid circle
#-------------------------------------------------------------------------------
def timeNow():
def timeNowTZ():
if conf.tz:
return datetime.datetime.now(conf.tz).replace(microsecond=0)
else:
return datetime.datetime.now().replace(microsecond=0)
@@ -36,7 +39,7 @@ def mylog(requestedDebugLevel, n):
#-------------------------------------------------------------------------------
def file_print (*args):
result = timeNow().strftime ('%H:%M:%S') + ' '
result = timeNowTZ().strftime ('%H:%M:%S') + ' '
for arg in args:
result += str(arg)

View File

@@ -3,7 +3,7 @@ import subprocess
import conf
from const import pialertPath, vendorsDB
from helper import timeNow, updateState
from helper import timeNowTZ, updateState
from logger import mylog
@@ -17,7 +17,7 @@ def update_devices_MAC_vendors (db, pArg = ''):
sql = db.sql # TO-DO
# Header
updateState(db,"Upkeep: Vendors")
mylog('verbose', ['[', timeNow(), '] Upkeep - Update HW Vendors:' ])
mylog('verbose', ['[', timeNowTZ(), '] Upkeep - Update HW Vendors:' ])
# Update vendors DB (iab oui)
mylog('verbose', [' Updating vendors DB (iab & oui)'])

View File

@@ -5,7 +5,7 @@ import conf
from scanners.pihole import copy_pihole_network, read_DHCP_leases
from database import insertOnlineHistory
from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, update_devices_names
from helper import timeNow
from helper import timeNowTZ
from logger import mylog
from reporting import skip_repeated_notifications
@@ -25,15 +25,6 @@ def scan_network (db):
# updateState(db,"Scan: Network")
mylog('verbose', ['[Network Scan] Scan Devices:' ])
# Query ScanCycle properties
scanCycle_data = query_ScanCycle_Data (db, True)
if scanCycle_data is None:
mylog('none', ['\n'])
mylog('none', ['[Network Scan]*************** ERROR ***************'])
mylog('none', ['[Network Scan] ScanCycle %s not found' % conf.cycle ])
mylog('none', ['[Network Scan] Exiting...\n'])
return False
db.commitDB()
# Pi-hole method
@@ -52,20 +43,6 @@ def scan_network (db):
def process_scan (db):
# Query ScanCycle properties
scanCycle_data = query_ScanCycle_Data (db, True)
if scanCycle_data is None:
mylog('none', ['\n'])
mylog('none', ['[Process Scan]*************** ERROR ***************'])
mylog('none', ['[Process Scan] ScanCycle %s not found' % conf.cycle ])
mylog('none', ['[Process Scan] Exiting...\n'])
return False
db.commitDB()
# ScanCycle data
cycle_interval = scanCycle_data['cic_EveryXmin']
# Load current scan data
mylog('verbose','[Process Scan] Processing scan results')
save_scanned_devices (db)
@@ -120,28 +97,10 @@ def process_scan (db):
# Commit changes
db.commitDB()
# moved plugin execution to main loop
# if ENABLE_PLUGINS:
# run_plugin_scripts(db,'always_after_scan')
#-------------------------------------------------------------------------------
def query_ScanCycle_Data (db, pOpenCloseDB = False, cycle = 1):
# Query Data
db.sql.execute ("""SELECT cic_arpscanCycles, cic_EveryXmin
FROM ScanCycles
WHERE cic_ID = ? """, (cycle,))
sqlRow = db.sql.fetchone()
# Return Row
return sqlRow
#-------------------------------------------------------------------------------
def void_ghost_disconnections (db):
sql = db.sql #TO-DO
startTime = timeNow()
startTime = timeNowTZ()
# Void connect ghost events (disconnect event exists in last X min.)
mylog('debug','[Void Ghost Con] - 1 Connect ghost events')
sql.execute("""UPDATE Events SET eve_PairEventRowid = Null,
@@ -151,17 +110,13 @@ def void_ghost_disconnections (db):
AND eve_DateTime = ?
AND eve_MAC IN (
SELECT Events.eve_MAC
FROM CurrentScan, Devices, ScanCycles, Events
WHERE cur_ScanCycle = ?
AND dev_MAC = cur_MAC
AND dev_ScanCycle = cic_ID
AND cic_ID = cur_ScanCycle
FROM CurrentScan, Devices, Events
WHERE dev_MAC = cur_MAC
AND eve_MAC = cur_MAC
AND eve_EventType = 'Disconnected'
AND eve_DateTime >=
DATETIME (?, '-' || cic_EveryXmin ||' minutes')
AND eve_DateTime >= DATETIME(?, '-3 minutes')
) """,
(startTime, conf.cycle, startTime) )
(startTime, startTime))
# Void connect paired events
mylog('debug','[Void Ghost Con] - 2 Paired events')
@@ -169,17 +124,13 @@ def void_ghost_disconnections (db):
WHERE eve_MAC != 'Internet'
AND eve_PairEventRowid IN (
SELECT Events.RowID
FROM CurrentScan, Devices, ScanCycles, Events
WHERE cur_ScanCycle = ?
AND dev_MAC = cur_MAC
AND dev_ScanCycle = cic_ID
AND cic_ID = cur_ScanCycle
FROM CurrentScan, Devices, Events
WHERE dev_MAC = cur_MAC
AND eve_MAC = cur_MAC
AND eve_EventType = 'Disconnected'
AND eve_DateTime >=
DATETIME (?, '-' || cic_EveryXmin ||' minutes')
AND eve_DateTime >= DATETIME(?, '-3 minutes')
) """,
(conf.cycle, startTime) )
(startTime,))
# Void disconnect ghost events
mylog('debug','[Void Ghost Con] - 3 Disconnect ghost events')
@@ -188,18 +139,16 @@ def void_ghost_disconnections (db):
WHERE eve_MAC != 'Internet'
AND ROWID IN (
SELECT Events.RowID
FROM CurrentScan, Devices, ScanCycles, Events
WHERE cur_ScanCycle = ?
AND dev_MAC = cur_MAC
AND dev_ScanCycle = cic_ID
AND cic_ID = cur_ScanCycle
FROM CurrentScan, Devices, Events
WHERE dev_MAC = cur_MAC
AND eve_MAC = cur_MAC
AND eve_EventType = 'Disconnected'
AND eve_DateTime >=
DATETIME (?, '-' || cic_EveryXmin ||' minutes')
AND eve_DateTime >= DATETIME(?, '-3 minutes')
) """,
(conf.cycle, startTime) )
(startTime,))
mylog('debug','[Void Ghost Con] Void Ghost Connections end')
db.commitDB()
#-------------------------------------------------------------------------------
@@ -256,7 +205,7 @@ def create_sessions_snapshot (db):
#-------------------------------------------------------------------------------
def insert_events (db):
sql = db.sql #TO-DO
startTime = timeNow()
startTime = timeNowTZ()
# Check device down
mylog('debug','[Events] - 1 - Devices down')

View File

@@ -2,13 +2,14 @@ import os
import json
import subprocess
import datetime
import base64
from collections import namedtuple
# pialert modules
import conf
from const import pluginsPath, logPath
from logger import mylog
from helper import timeNow, updateState, get_file_content, write_file
from helper import timeNowTZ, updateState, get_file_content, write_file
from api import update_api
#-------------------------------------------------------------------------------
@@ -38,7 +39,7 @@ def run_plugin_scripts(db, runType):
shouldRun = schd.runScheduleCheck()
if shouldRun:
# note the last time the scheduled plugin run was executed
schd.last_run = timeNow()
schd.last_run = timeNowTZ()
if shouldRun:
@@ -102,8 +103,8 @@ def get_setting(key):
result = set
if result is None:
mylog('info', [' Error - setting_missing - Setting not found for key: ', key])
mylog('info', [' Error - logging the settings into file: ', logPath + '/setting_missing.json'])
mylog('minimal', [' Error - setting_missing - Setting not found for key: ', key])
mylog('minimal', [' Error - logging the settings into file: ', logPath + '/setting_missing.json'])
write_file (logPath + '/setting_missing.json', json.dumps({ 'data' : conf.mySettings}))
return result
@@ -165,14 +166,14 @@ def execute_plugin(db, plugin):
resolved = get_setting(param["value"])
if resolved != None:
resolved = plugin_param_from_glob_set(resolved)
resolved = passable_string_from_setting(resolved)
# Get Sql result
if param["type"] == "sql":
resolved = flatten_array(db.get_sql_array(param["value"]))
if resolved == None:
mylog('none', ['[Plugins] The parameter "name":"', param["name"], '" was resolved as None'])
mylog('none', [f'[Plugins] The parameter "name":"{param["name"]}" for "value": {param["value"]} was resolved as None'])
else:
params.append( [param["name"], resolved] )
@@ -229,7 +230,7 @@ def execute_plugin(db, plugin):
if len(columns) == 9:
sqlParams.append((plugin["unique_prefix"], columns[0], columns[1], 'null', columns[2], columns[3], columns[4], columns[5], columns[6], 0, columns[7], 'null', columns[8]))
else:
mylog('none', ['[Plugins]: Skipped invalid line in the output: ', line])
mylog('none', ['[Plugins] Skipped invalid line in the output: ', line])
else:
mylog('debug', [f'[Plugins] The file {file_path} does not exist'])
@@ -249,7 +250,7 @@ def execute_plugin(db, plugin):
if len(row) == 9 and (row[0] in ['','null']) == False :
sqlParams.append((plugin["unique_prefix"], row[0], handle_empty(row[1]), 'null', row[2], row[3], row[4], handle_empty(row[5]), handle_empty(row[6]), 0, row[7], 'null', row[8]))
else:
mylog('none', ['[Plugins]: Skipped invalid sql result'])
mylog('none', ['[Plugins] Skipped invalid sql result'])
# check if the subprocess / SQL query failed / there was no valid output
@@ -257,7 +258,7 @@ def execute_plugin(db, plugin):
mylog('none', ['[Plugins] No output received from the plugin ', plugin["unique_prefix"], ' - enable LOG_LEVEL=debug and check logs'])
return
else:
mylog('verbose', ['[Plugins]: SUCCESS, received ', len(sqlParams), ' entries'])
mylog('verbose', ['[Plugins] SUCCESS, received ', len(sqlParams), ' entries'])
# process results if any
if len(sqlParams) > 0:
@@ -286,26 +287,34 @@ def handle_empty(value):
#-------------------------------------------------------------------------------
# Flattens a setting to make it passable to a script
def plugin_param_from_glob_set(globalSetting):
def passable_string_from_setting(globalSetting):
setVal = globalSetting[6] # setting value
setTyp = globalSetting[3] # setting type
noConversion = ['text', 'string', 'integer', 'boolean', 'password', 'readonly', 'integer.select', 'text.select', 'integer.checkbox' ]
arrayConversion = ['text.multiselect', 'list']
arrayConversionBase64 = ['subnets']
jsonConversion = ['.template']
mylog('debug', f'[Plugins] setTyp: {setTyp}')
if setTyp in noConversion:
return setVal
if setTyp in arrayConversion:
return flatten_array(setVal)
if setTyp in arrayConversionBase64:
return flatten_array(setVal, encodeBase64 = True)
for item in jsonConversion:
if setTyp.endswith(item):
return json.dumps(setVal)
mylog('none', ['[Plugins] ERROR: Parameter not converted.'])
#-------------------------------------------------------------------------------
@@ -335,33 +344,47 @@ def get_setting_value(key):
return ''
#-------------------------------------------------------------------------------
def flatten_array(arr):
def flatten_array(arr, encodeBase64=False):
tmp = ''
arrayItemStr = ''
mylog('debug', '[Plugins] Flattening the below array')
mylog('debug', f'[Plugins] Convert to Base64: {encodeBase64}')
mylog('debug', arr)
for arrayItem in arr:
# only one column flattening is supported
if isinstance(arrayItem, list):
arrayItem = str(arrayItem[0])
arrayItemStr = str(arrayItem[0]).replace("'", '') # removing single quotes - not allowed
else:
# is string already
arrayItemStr = arrayItem
tmp += arrayItem + ','
# tmp = tmp.replace("'","").replace(' ','') # No single quotes or empty spaces allowed
tmp = tmp.replace("'","") # No single quotes allowed
return tmp[:-1] # Remove last comma ','
tmp += f'{arrayItemStr},'
tmp = tmp[:-1] # Remove last comma ','
mylog('debug', f'[Plugins] Flattened array: {tmp}')
if encodeBase64:
tmp = str(base64.b64encode(tmp.encode('ascii')))
mylog('debug', f'[Plugins] Flattened array (base64): {tmp}')
return tmp
#-------------------------------------------------------------------------------
# Replace {wildcars} with parameters
def resolve_wildcards_arr(commandArr, params):
mylog('debug', ['[Plugins]: Pre-Resolved CMD: '] + commandArr)
mylog('debug', ['[Plugins] Pre-Resolved CMD: '] + commandArr)
for param in params:
# mylog('debug', ['[Plugins]: key : {', param[0], '}'])
# mylog('debug', ['[Plugins]: resolved: ', param[1]])
# mylog('debug', ['[Plugins] key : {', param[0], '}'])
# mylog('debug', ['[Plugins] resolved: ', param[1]])
i = 0
@@ -491,30 +514,35 @@ def process_plugin_events(db, plugin):
# Perform databse table mapping if enabled for the plugin
if len(pluginEvents) > 0 and "mapped_to_table" in plugin:
# Initialize an empty list to store SQL parameters.
sqlParams = []
# Get the database table name from the 'mapped_to_table' key in the 'plugin' dictionary.
dbTable = plugin['mapped_to_table']
# Log a debug message indicating the mapping of objects to the database table.
mylog('debug', ['[Plugins] Mapping objects to database table: ', dbTable])
# collect all columns to be mapped
# Initialize lists to hold mapped column names, columnsStr, and valuesStr for SQL query.
mappedCols = []
columnsStr = ''
valuesStr = ''
# Loop through the 'database_column_definitions' in the 'plugin' dictionary to collect mapped columns.
# Build the columnsStr and valuesStr for the SQL query.
for clmn in plugin['database_column_definitions']:
if 'mapped_to_column' in clmn:
mappedCols.append(clmn)
columnsStr = f'{columnsStr}, "{clmn["mapped_to_column"]}"'
valuesStr = f'{valuesStr}, ?'
# Remove the first ',' from columnsStr and valuesStr.
if len(columnsStr) > 0:
columnsStr = columnsStr[1:] # remove first ','
valuesStr = valuesStr[1:] # remove first ','
columnsStr = columnsStr[1:]
valuesStr = valuesStr[1:]
# map the column names to plugin object event values
# Map the column names to plugin object event values and create a list of tuples 'sqlParams'.
for plgEv in pluginEvents:
tmpList = []
for col in mappedCols:
@@ -545,14 +573,20 @@ def process_plugin_events(db, plugin):
elif col['column'] == 'Status':
tmpList.append(plgEv.status)
# Append the mapped values to the list 'sqlParams' as a tuple.
sqlParams.append(tuple(tmpList))
# Generate the SQL INSERT query using the collected information.
q = f'INSERT into {dbTable} ({columnsStr}) VALUES ({valuesStr})'
# Log a debug message showing the generated SQL query for mapping.
mylog('debug', ['[Plugins] SQL query for mapping: ', q])
# Execute the SQL query using 'sql.executemany()' and the 'sqlParams' list of tuples.
# This will insert multiple rows into the database in one go.
sql.executemany(q, sqlParams)
db.commitDB()

View File

@@ -48,7 +48,7 @@ def publish_mqtt(client, topic, message):
status = result[0]
if status != 0:
mylog('info', ["Waiting to reconnect to MQTT broker"])
mylog('minimal', ["Waiting to reconnect to MQTT broker"])
time.sleep(0.1)
return True
@@ -180,7 +180,7 @@ def mqtt_start(db):
sec_delay = len(devices) * int(conf.MQTT_DELAY_SEC)*5
mylog('info', [" Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60,1) , 'min)' ])
mylog('minimal', [" Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60,1) , 'min)' ])
for device in devices:

View File

@@ -23,8 +23,9 @@ from json2table import convert
import conf
import const
from const import pialertPath, logPath, apiPath
from helper import noti_struc, generate_mac_links, removeDuplicateNewLines, timeNow, hide_email, updateState, get_file_content, write_file
from helper import noti_struc, generate_mac_links, removeDuplicateNewLines, timeNowTZ, hide_email, updateState, get_file_content, write_file
from logger import logResult, mylog, print_log
from plugin import execute_plugin
from publishers.email import (check_config as email_check_config,
@@ -150,7 +151,7 @@ def send_notifications (db):
template_file.close()
# Report Header & footer
timeFormated = timeNow().strftime ('%Y-%m-%d %H:%M')
timeFormated = timeNowTZ().strftime ('%Y-%m-%d %H:%M')
mail_text = mail_text.replace ('<REPORT_DATE>', timeFormated)
mail_html = mail_html.replace ('<REPORT_DATE>', timeFormated)
@@ -282,43 +283,43 @@ def send_notifications (db):
msg = noti_struc(json_final, mail_text, mail_html)
mylog('info', ['[Notification] Udating API files'])
mylog('minimal', ['[Notification] Udating API files'])
send_api()
if conf.REPORT_MAIL and check_config('email'):
updateState(db,"Send: Email")
mylog('info', ['[Notification] Sending report by Email'])
mylog('minimal', ['[Notification] Sending report by Email'])
send_email (msg )
else :
mylog('verbose', ['[Notification] Skip email'])
if conf.REPORT_APPRISE and check_config('apprise'):
updateState(db,"Send: Apprise")
mylog('info', ['[Notification] Sending report by Apprise'])
mylog('minimal', ['[Notification] Sending report by Apprise'])
send_apprise (msg)
else :
mylog('verbose', ['[Notification] Skip Apprise'])
if conf.REPORT_WEBHOOK and check_config('webhook'):
updateState(db,"Send: Webhook")
mylog('info', ['[Notification] Sending report by Webhook'])
mylog('minimal', ['[Notification] Sending report by Webhook'])
send_webhook (msg)
else :
mylog('verbose', ['[Notification] Skip webhook'])
if conf.REPORT_NTFY and check_config('ntfy'):
updateState(db,"Send: NTFY")
mylog('info', ['[Notification] Sending report by NTFY'])
mylog('minimal', ['[Notification] Sending report by NTFY'])
send_ntfy (msg)
else :
mylog('verbose', ['[Notification] Skip NTFY'])
if conf.REPORT_PUSHSAFER and check_config('pushsafer'):
updateState(db,"Send: PUSHSAFER")
mylog('info', ['[Notification] Sending report by PUSHSAFER'])
mylog('minimal', ['[Notification] Sending report by PUSHSAFER'])
send_pushsafer (msg)
else :
mylog('verbose', ['[Notification] Skip PUSHSAFER'])
# Update MQTT entities
if conf.REPORT_MQTT and check_config('mqtt'):
updateState(db,"Send: MQTT")
mylog('info', ['[Notification] Establishing MQTT thread'])
mylog('minimal', ['[Notification] Establishing MQTT thread'])
mqtt_start(db)
else :
mylog('verbose', ['[Notification] Skip MQTT'])
@@ -329,7 +330,7 @@ def send_notifications (db):
sql.execute ("""UPDATE Devices SET dev_LastNotification = ?
WHERE dev_MAC IN (SELECT eve_MAC FROM Events
WHERE eve_PendingAlertEmail = 1)
""", (datetime.datetime.now(),) )
""", (datetime.datetime.now(conf.tz),) )
sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0
WHERE eve_PendingAlertEmail = 1""")
@@ -339,7 +340,7 @@ def send_notifications (db):
conf.changedPorts_json_struc = None
# DEBUG - print number of rows updated
mylog('info', ['[Notification] Notifications changes: ', sql.rowcount])
mylog('minimal', ['[Notification] Notifications changes: ', sql.rowcount])
# Commit changes
db.commitDB()
@@ -479,15 +480,18 @@ def check_and_run_event(db):
event, param = ['','']
if len(rows) > 0 and rows[0]['par_Value'] != 'finished':
event = rows[0]['par_Value'].split('|')[0]
param = rows[0]['par_Value'].split('|')[1]
keyValue = rows[0]['par_Value'].split('|')
if len(keyValue) == 2:
event = keyValue[0]
param = keyValue[1]
else:
return
if event == 'test':
handle_test(param)
if event == 'run':
handle_run(param)
handle_run(param, db)
# clear event execution flag
sql.execute ("UPDATE Parameters SET par_Value='finished' WHERE par_ID='Front_Event'")
@@ -496,20 +500,24 @@ def check_and_run_event(db):
db.commitDB()
#-------------------------------------------------------------------------------
def handle_run(runType):
global last_network_scan
def handle_run(runType, db):
mylog('info', ['[', timeNow(), '] START Run: ', runType])
mylog('minimal', ['[', timeNowTZ(), '] START Run: ', runType])
if runType == 'ENABLE_ARPSCAN':
last_network_scan = conf.time_started - datetime.timedelta(hours = 24)
# run the plugin to run
for plugin in conf.plugins:
if plugin["unique_prefix"] == 'ARPSCAN':
execute_plugin(db, plugin)
mylog('minimal', ['[', timeNowTZ(), '] END Run: ', runType])
mylog('info', ['[', timeNow(), '] END Run: ', runType])
#-------------------------------------------------------------------------------
def handle_test(testType):
mylog('info', ['[', timeNow(), '] START Test: ', testType])
mylog('minimal', ['[', timeNowTZ(), '] START Test: ', testType])
# Open text sample
sample_txt = get_file_content(pialertPath + '/back/report_sample.txt')
@@ -533,4 +541,4 @@ def handle_test(testType):
if testType == 'REPORT_PUSHSAFER':
send_pushsafer (sample_msg)
mylog('info', ['[Test Publishers] END Test: ', testType])
mylog('minimal', ['[Test Publishers] END Test: ', testType])

View File

@@ -6,7 +6,7 @@ import re
# pialert modules
import conf
from helper import timeNow, updateState
from helper import timeNowTZ, updateState
from logger import append_line_to_file, mylog
from const import logPath
@@ -45,7 +45,7 @@ def check_internet_IP ( db ):
# Check IP Change
if internet_IP != previous_IP :
mylog('info', ['[Internet IP] New internet IP: ', internet_IP])
mylog('minimal', ['[Internet IP] New internet IP: ', internet_IP])
save_new_internet_IP (db, internet_IP)
else :
@@ -116,7 +116,7 @@ def get_previous_internet_IP (db):
def save_new_internet_IP (db, pNewIP):
# Log new IP into logfile
append_line_to_file (logPath + '/IP_changes.log',
'['+str(timeNow()) +']\t'+ pNewIP +'\n')
'['+str(timeNowTZ()) +']\t'+ pNewIP +'\n')
prevIp = get_previous_internet_IP(db)
# Save event
@@ -125,7 +125,7 @@ def save_new_internet_IP (db, pNewIP):
eve_PendingAlertEmail)
VALUES ('Internet', ?, ?, 'Internet IP Changed',
'Previous Internet IP: '|| ?, 1) """,
(pNewIP, timeNow(), prevIp) )
(pNewIP, timeNowTZ(), prevIp) )
# Save new IP
db.sql.execute ("""UPDATE Devices SET dev_LastIP = ?

View File

@@ -3,7 +3,7 @@ import subprocess
import conf
from const import logPath, sql_nmap_scan_all
from helper import json_struc, timeNow, updateState
from helper import json_struc, timeNowTZ, updateState
from logger import append_line_to_file, mylog
#-------------------------------------------------------------------------------
@@ -59,7 +59,7 @@ def performNmapScan(db, devicesToScan):
mylog('verbose', ['[NMAP Scan] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', device["dev_LastIP"], progress])
if output == "": # check if the subprocess failed
mylog('info', ['[NMAP Scan] Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details'])
mylog('minimal', ['[NMAP Scan] Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details'])
else:
mylog('verbose', ['[NMAP Scan] Nmap SUCCESS for ', device["dev_LastIP"], progress])
@@ -87,7 +87,7 @@ def performNmapScan(db, devicesToScan):
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
startCollecting = False # end reached
elif startCollecting and len(line.split()) == 3:
newEntriesTmp.append(nmap_entry(device["dev_MAC"], timeNow(), line.split()[0], line.split()[1], line.split()[2], device["dev_Name"]))
newEntriesTmp.append(nmap_entry(device["dev_MAC"], timeNowTZ(), line.split()[0], line.split()[1], line.split()[2], device["dev_Name"]))
elif 'Nmap done' in line:
duration = line.split('scanned in ')[1]
index += 1

View File

@@ -2,7 +2,7 @@ import subprocess
import re
from const import fullPholusPath, logPath
from helper import checkIPV4, timeNow, updateState
from helper import checkIPV4, timeNowTZ, updateState
from logger import mylog
#-------------------------------------------------------------------------------
@@ -64,7 +64,7 @@ def performPholusScan (db, timeoutSec, userSubnets):
for line in newLines:
columns = line.split("|")
if len(columns) == 4:
params.append(( interface + " " + mask, timeNow() , columns[0].replace(" ", ""), columns[1].replace(" ", ""), columns[2].replace(" ", ""), columns[3], ''))
params.append(( interface + " " + mask, timeNowTZ() , columns[0].replace(" ", ""), columns[1].replace(" ", ""), columns[2].replace(" ", ""), columns[3], ''))
if len(params) > 0:
sql.executemany ("""INSERT INTO Pholus_Scan ("Info", "Time", "MAC", "IP_v4_or_v6", "Record_Type", "Value", "Extra") VALUES (?, ?, ?, ?, ?, ?, ?)""", params)

View File

@@ -6,7 +6,7 @@ sys.path.append(str(pathlib.Path(__file__).parent.parent.resolve()) + "/pialert/
import datetime
from helper import timeNow, updateSubnets
from helper import timeNowTZ, updateSubnets
# -------------------------------------------------------------------------------