From 2946e4832147e001f84cc1e14142f5f14581f764 Mon Sep 17 00:00:00 2001
From: Carlos V <76731844+cvc90@users.noreply.github.com>
Date: Sun, 6 Aug 2023 08:37:28 +0200
Subject: [PATCH 01/14] Update en_us.json
Changed the content of the "System info" menu variable ("Navigation_SystemInfo" : "System Information" --> "Navigation_SystemInfo" : "System info")
---
front/php/templates/language/en_us.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/front/php/templates/language/en_us.json b/front/php/templates/language/en_us.json
index cd9b8fbb..f871a8a2 100755
--- a/front/php/templates/language/en_us.json
+++ b/front/php/templates/language/en_us.json
@@ -42,7 +42,7 @@
"Navigation_Plugins" : "Plugins",
"Navigation_Maintenance" : "Maintenance",
"Navigation_Settings" : "Settings",
- "Navigation_SystemInfo" : "System Information",
+ "Navigation_SystemInfo" : "System info",
"Navigation_Flows" : "Flows",
"Navigation_HelpFAQ" : "Help / FAQ",
"Device_Title" : "Devices",
From 6009098c9cb147d285630a46020afd14b21474ad Mon Sep 17 00:00:00 2001
From: Carlos V <76731844+cvc90@users.noreply.github.com>
Date: Sun, 6 Aug 2023 08:51:26 +0200
Subject: [PATCH 02/14] Update es_es.json
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Changed the content of the "System info" menu variable ("Navigation_SystemInfo" : "Información del sistema" --> "Navigation_SystemInfo" : "Info del sistema")
---
front/php/templates/language/es_es.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/front/php/templates/language/es_es.json b/front/php/templates/language/es_es.json
index fe0cb40d..a9bede2f 100755
--- a/front/php/templates/language/es_es.json
+++ b/front/php/templates/language/es_es.json
@@ -41,7 +41,7 @@
"Navigation_Plugins" : "Plugins",
"Navigation_Maintenance" : "Mantenimiento",
"Navigation_Settings" : "Configuración",
- "Navigation_SystemInfo" : "Información del sistema",
+ "Navigation_SystemInfo" : "Info del sistema",
"Navigation_HelpFAQ" : "Ayuda / Preguntas frecuentes",
"Device_Title" : "Dispositivos",
"Device_Shortcut_AllDevices" : "Todos",
From 2575b79c8bd35f634325b7d39994d81dd016a6bf Mon Sep 17 00:00:00 2001
From: Carlos V <76731844+cvc90@users.noreply.github.com>
Date: Sun, 6 Aug 2023 08:54:41 +0200
Subject: [PATCH 03/14] Update de_de.json
Changed the content of the "System info" menu variable ("Navigation_SystemInfo" : "Systeminformationen" --> "Navigation_SystemInfo" : "System info")
---
front/php/templates/language/de_de.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/front/php/templates/language/de_de.json b/front/php/templates/language/de_de.json
index fbb308a2..b5bc24fd 100755
--- a/front/php/templates/language/de_de.json
+++ b/front/php/templates/language/de_de.json
@@ -28,7 +28,7 @@
"Navigation_Events" : "Ereignisse",
"Navigation_Maintenance" : "Wartung",
"Navigation_Settings" : "Einstellung",
- "Navigation_SystemInfo" : "Systeminformationen",
+ "Navigation_SystemInfo" : "System info",
"Navigation_Network" : "Netzwerk",
"Navigation_HelpFAQ" : "Hilfe / FAQ",
"Device_Title" : "Geräte",
From 634bc0744d92232eb5a42ac4aeaf9a130bfd01da Mon Sep 17 00:00:00 2001
From: Carlos V <76731844+cvc90@users.noreply.github.com>
Date: Sun, 6 Aug 2023 11:16:42 +0200
Subject: [PATCH 04/14] Update es_es.json
Updated Navigation_HelpFAQ menu variable too long ("Navigation_HelpFAQ" : "Ayuda / Preguntas frecuentes" --> "Navigation_HelpFAQ" : "Ayuda / FAQ")
Added variable Navigation_Flows ("Navigation_Flows" : "Flows")
---
front/php/templates/language/es_es.json | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/front/php/templates/language/es_es.json b/front/php/templates/language/es_es.json
index a9bede2f..7c9509d9 100755
--- a/front/php/templates/language/es_es.json
+++ b/front/php/templates/language/es_es.json
@@ -41,8 +41,9 @@
"Navigation_Plugins" : "Plugins",
"Navigation_Maintenance" : "Mantenimiento",
"Navigation_Settings" : "Configuración",
+ "Navigation_Flows" : "Flows",
"Navigation_SystemInfo" : "Info del sistema",
- "Navigation_HelpFAQ" : "Ayuda / Preguntas frecuentes",
+ "Navigation_HelpFAQ" : "Ayuda / FAQ",
"Device_Title" : "Dispositivos",
"Device_Shortcut_AllDevices" : "Todos",
"Device_Shortcut_Connected" : "Conectado(s)",
From 2e137b4e070f254e4df1132e29f09ef8d5482568 Mon Sep 17 00:00:00 2001
From: Carlos V <76731844+cvc90@users.noreply.github.com>
Date: Sun, 6 Aug 2023 11:27:59 +0200
Subject: [PATCH 05/14] Update es_es.json
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Added variable HelpFAQ_Cat_Network_601_head ("HelpFAQ_Cat_Network_601_head": "¿Hay otros documentos?")
Added variable HelpFAQ_Cat_Network_601_text ("HelpFAQ_Cat_Network_601_text": "¡Sí, los hay! Marque all docs para más información.")
---
front/php/templates/language/es_es.json | 2 ++
1 file changed, 2 insertions(+)
diff --git a/front/php/templates/language/es_es.json b/front/php/templates/language/es_es.json
index 7c9509d9..17064609 100755
--- a/front/php/templates/language/es_es.json
+++ b/front/php/templates/language/es_es.json
@@ -391,6 +391,8 @@
"HelpFAQ_Cat_Presence_401_text" : "Si esto sucede, tiene la posibilidad de eliminar los eventos del dispositivo en cuestión (vista de detalles). Otra posibilidad sería encender el dispositivo y esperar hasta que Pi.Alert reconozca el dispositivo como \"online\" con el siguiente escaneo y luego simplemente apagar el dispositivo nuevamente. Ahora Pi.Alert debería anotar correctamente el estado del dispositivo en la base de datos con el próximo escaneo.",
"HelpFAQ_Cat_Network_600_head" : "¿Para qué sirve esta sección?",
"HelpFAQ_Cat_Network_600_text" : "Esta sección debería ofrecerle la posibilidad de mapear la asignación de sus dispositivos de red. Para ello, puede crear uno o más conmutadores, WLAN, enrutadores, etc., proporcionarles un número de puerto si es necesario y asignarles dispositivos ya detectados. Esta asignación se realiza en la vista detallada del dispositivo a asignar. Por lo tanto, es posible determinar rápidamente a qué puerto está conectado un host y si está en línea. Es posible asignar un dispositivo a múltiples puertos (agrupación de puertos), así como múltiples dispositivos a un puerto (máquinas virtuales).",
+ "HelpFAQ_Cat_Network_601_head" : "¿Hay otros documentos?",
+ "HelpFAQ_Cat_Network_601_text" : "¡Sí, los hay! Marque all docs para más información.",
"test_event_tooltip" : "Guarda tus cambios antes de probar nuevos ajustes.",
"test_event_icon" : "fa-vial-circle-check",
"run_event_tooltip" : "Activa el ajuste y guarda tus cambios antes de ejecutarlo.",
From f33262d2624f5f126cdcd950e4980bfd70590f91 Mon Sep 17 00:00:00 2001
From: Carlos V <76731844+cvc90@users.noreply.github.com>
Date: Sun, 6 Aug 2023 11:35:23 +0200
Subject: [PATCH 06/14] Update es_es.json
Added variable "Maintenance_built_on" ("Maintenance_built_on" : "Construido el")
---
front/php/templates/language/es_es.json | 1 +
1 file changed, 1 insertion(+)
diff --git a/front/php/templates/language/es_es.json b/front/php/templates/language/es_es.json
index 17064609..cf752b6d 100755
--- a/front/php/templates/language/es_es.json
+++ b/front/php/templates/language/es_es.json
@@ -211,6 +211,7 @@
"Maintenance_version" : "Actualizaciones de la aplicación",
"Maintenance_new_version" : "🆕 Una nueva versión está disponible. Comprueba las notas de lanzamiento.",
"Maintenance_current_version" : "No hay actualizaciones disponibles. Comprueba en que se está trabajando.",
+ "Maintenance_built_on" : "Construido el",
"Maintenance_database_path" : "Ruta de la base de datos:",
"Maintenance_database_size" : "Tamaño de base de datos:",
"Maintenance_database_lastmod" : "Última modificación:",
From 90e94a14fb4c49dbead678920733c0609fd2ec1e Mon Sep 17 00:00:00 2001
From: Carlos V <76731844+cvc90@users.noreply.github.com>
Date: Sun, 6 Aug 2023 11:45:03 +0200
Subject: [PATCH 07/14] Update es_es.json
Added variable "Plugins_History" ("Plugins_History" : "Historial de eventos")
Added variable "Plugins_Filters_Mac" ("Plugins_Filters_Mac" : "Filtro MAC")
---
front/php/templates/language/es_es.json | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/front/php/templates/language/es_es.json b/front/php/templates/language/es_es.json
index cf752b6d..e25ce6e0 100755
--- a/front/php/templates/language/es_es.json
+++ b/front/php/templates/language/es_es.json
@@ -402,7 +402,9 @@
"general_event_description" : "El evento que has ejecutado puede tardar un rato mientras finalizan procesos en segundo plano. La ejecución ha terminado cuando ves finalizado abajo. Comprueba el registro de error si no has obtenido el resultado esperado.
Estado: ",
"Plugins_Unprocessed_Events" : "Eventos sin procesar",
"Plugins_Objects" : "Objetos del Plugin",
- "Plugins_History" : "Historial de eventos",
+ "Plugins_DeleteAll" : "Eliminar todo (se ignoran los filtros)",
+ "Plugins_History" : "Historial de eventos",
+ "Plugins_Filters_Mac" : "Filtro MAC",
"settings_missing" : "Actualiza la página, no todos los ajustes se han cargado. Probablemente sea por una sobrecarga de la base de datos.",
"settings_missing_block" : "No puedes guardar los ajustes sin establecer todas las claves. Actualiza la página. Problabmente esté causado por una sobrecarga de la base de datos.",
"settings_old" : "Los ajustes mostrados en esta página están desactualizados. Probablemente sea por un escaneo en proceso. Los ajustes se guardan en el archivo pialert.conf, pero el proceso en segundo plano no las ha importado todavía a la base de datos. Puedes esperar a que los ajustes se actualicen para evitar sobreescribirlos con los ajustes antiguos. Si te da igual perder los ajustes desde la última vez que guardaste y ahora, siéntete libre de guardarlos de nuevo. También hay copias de seguridad creadas si necesitas comparar tus ajustes más tarde.",
From 8da84e8d64bda16aeb3f7b1f41753254cfa86ee0 Mon Sep 17 00:00:00 2001
From: Carlos V <76731844+cvc90@users.noreply.github.com>
Date: Sun, 6 Aug 2023 11:52:16 +0200
Subject: [PATCH 08/14] Update es_es.json
Added variable "DevDetail_Tab_Plugins" ("DevDetail_Tab_Plugins" : " Plugins")
---
front/php/templates/language/es_es.json | 1 +
1 file changed, 1 insertion(+)
diff --git a/front/php/templates/language/es_es.json b/front/php/templates/language/es_es.json
index e25ce6e0..93189d3c 100755
--- a/front/php/templates/language/es_es.json
+++ b/front/php/templates/language/es_es.json
@@ -139,6 +139,7 @@
"DevDetail_Tab_Events" : "Eventos",
"DevDetail_Tab_Pholus" : " Pholus",
"DevDetail_Tab_PholusEmpty" : "No se ha encontrado nada para este dispositivo con Pholus.",
+ "DevDetail_Tab_Plugins" : " Plugins",
"DevDetail_Tab_NmapTableHeader" : "Resultados del escaneo programado",
"DevDetail_Tab_NmapTableText" : "Establece la programación en los Ajustes",
"DevDetail_Tab_NmapEmpty" : "Ningún puerto detectado en este dispositivo con Nmap.",
From ec669a701e69e625e40259b17e2a69bb1e88c47b Mon Sep 17 00:00:00 2001
From: Carlos V <76731844+cvc90@users.noreply.github.com>
Date: Sun, 6 Aug 2023 12:27:15 +0200
Subject: [PATCH 09/14] Update es_es.json
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Updated variable HelpFAQ_Cat_Network_601_text ("HelpFAQ_Cat_Network_601_text": "¡Sí, los hay! Marque all docs para más información." --> "HelpFAQ_Cat_Network_601_text": "¡Sí, los hay! Marque todos los documentos para más información.")
---
front/php/templates/language/es_es.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/front/php/templates/language/es_es.json b/front/php/templates/language/es_es.json
index 93189d3c..6c0edc68 100755
--- a/front/php/templates/language/es_es.json
+++ b/front/php/templates/language/es_es.json
@@ -394,7 +394,7 @@
"HelpFAQ_Cat_Network_600_head" : "¿Para qué sirve esta sección?",
"HelpFAQ_Cat_Network_600_text" : "Esta sección debería ofrecerle la posibilidad de mapear la asignación de sus dispositivos de red. Para ello, puede crear uno o más conmutadores, WLAN, enrutadores, etc., proporcionarles un número de puerto si es necesario y asignarles dispositivos ya detectados. Esta asignación se realiza en la vista detallada del dispositivo a asignar. Por lo tanto, es posible determinar rápidamente a qué puerto está conectado un host y si está en línea. Es posible asignar un dispositivo a múltiples puertos (agrupación de puertos), así como múltiples dispositivos a un puerto (máquinas virtuales).",
"HelpFAQ_Cat_Network_601_head" : "¿Hay otros documentos?",
- "HelpFAQ_Cat_Network_601_text" : "¡Sí, los hay! Marque all docs para más información.",
+ "HelpFAQ_Cat_Network_601_text" : "¡Sí, los hay! Marque todos los documentos para más información.",
"test_event_tooltip" : "Guarda tus cambios antes de probar nuevos ajustes.",
"test_event_icon" : "fa-vial-circle-check",
"run_event_tooltip" : "Activa el ajuste y guarda tus cambios antes de ejecutarlo.",
From 9a13133a5f0008379d191d544798f20ce5b428a7 Mon Sep 17 00:00:00 2001
From: Jokob-sk
Date: Mon, 7 Aug 2023 08:23:39 +1000
Subject: [PATCH 10/14] ARPSCAN to plugin rewrite
---
front/plugins/arp_scan/config.json | 4 +--
pialert/__main__.py | 41 +++++++----------------
pialert/api.py | 2 +-
pialert/conf.py | 10 ++++--
pialert/database.py | 4 +--
pialert/device.py | 18 +++-------
pialert/helper.py | 9 +++--
pialert/initialise.py | 40 +++++++++++++++++++---
pialert/logger.py | 9 +++--
pialert/mac_vendor.py | 4 +--
pialert/networkscan.py | 53 ++++--------------------------
pialert/plugin.py | 18 +++++-----
pialert/publishers/mqtt.py | 4 +--
pialert/reporting.py | 30 ++++++++---------
pialert/scanners/internet.py | 8 ++---
pialert/scanners/nmapscan.py | 6 ++--
pialert/scanners/pholusscan.py | 4 +--
test/test_helper.py | 2 +-
18 files changed, 119 insertions(+), 147 deletions(-)
diff --git a/front/plugins/arp_scan/config.json b/front/plugins/arp_scan/config.json
index 4be96348..0c9b53ab 100755
--- a/front/plugins/arp_scan/config.json
+++ b/front/plugins/arp_scan/config.json
@@ -36,8 +36,8 @@
{
"function": "RUN",
"type": "text.select",
- "default_value":"disabled",
- "options": ["disabled", "once", "schedule", "scan_cycle", "always_after_scan", "on_new_device"],
+ "default_value":"schedule",
+ "options": ["disabled", "once", "schedule", "always_after_scan", "on_new_device"],
"localized": ["name", "description"],
"name" :[{
"language_code":"en_us",
diff --git a/pialert/__main__.py b/pialert/__main__.py
index 1b7fb2db..5be82db0 100755
--- a/pialert/__main__.py
+++ b/pialert/__main__.py
@@ -24,7 +24,7 @@ import multiprocessing
import conf
from const import *
from logger import mylog
-from helper import filePermissions, isNewVersion, timeNow, updateState
+from helper import filePermissions, isNewVersion, timeNowTZ, updateState
from api import update_api
from networkscan import process_scan, scan_network
from initialise import importConfigs
@@ -74,29 +74,11 @@ main structure of Pi Alert
"""
def main ():
- mylog('debug', ['[MAIN] Setting up ...'])
+ mylog('none', ['[MAIN] Setting up ...']) # has to be level 'none' as user config not loaded yet
+
+ mylog('none', [f'[conf.tz] Setting up ...{conf.tz}'])
- conf.time_started = datetime.datetime.now()
- conf.cycle = ""
- conf.check_report = [1, "internet_IP", "update_vendors_silent"]
- conf.plugins_once_run = False
- # to be deleted if not used
- conf.log_timestamp = conf.time_started
- #cron_instance = Cron()
-
- # timestamps of last execution times
- startTime = conf.time_started
- now_minus_24h = conf.time_started - datetime.timedelta(hours = 24)
-
- # set these times to the past to force the first run
- last_network_scan = now_minus_24h
- last_internet_IP_scan = now_minus_24h
- last_scan_run = now_minus_24h
- last_cleanup = now_minus_24h
- last_update_vendors = conf.time_started - datetime.timedelta(days = 6) # update vendors 24h after first run and then once a week
- last_version_check = now_minus_24h
-
# indicates, if a new version is available
conf.newVersionAvailable = False
@@ -120,17 +102,18 @@ def main ():
while True:
- # update time started
- loop_start_time = timeNow()
-
# re-load user configuration and plugins
importConfigs(db)
+ # update time started
+ conf.loop_start_time = timeNowTZ()
+ loop_start_time = conf.loop_start_time # TODO fix
+
# check if new version is available / only check once an hour
- if last_version_check + datetime.timedelta(hours=1) < loop_start_time :
+ if conf.last_version_check + datetime.timedelta(hours=1) < loop_start_time :
# if newVersionAvailable is already true the function does nothing and returns true again
- mylog('debug', [f"[Version check] Last version check timestamp: {last_version_check}"])
- last_version_check = loop_start_time
+ mylog('debug', [f"[Version check] Last version check timestamp: {conf.last_version_check}"])
+ conf.last_version_check = loop_start_time
conf.newVersionAvailable = isNewVersion(conf.newVersionAvailable)
# Handle plugins executed ONCE
@@ -210,7 +193,7 @@ def main ():
run = nmapSchedule.runScheduleCheck()
if run:
- nmapSchedule.last_run = timeNow()
+ nmapSchedule.last_run = timeNowTZ()
performNmapScan(db, get_all_devices(db))
# todo replace the scans with plugins
diff --git a/pialert/api.py b/pialert/api.py
index 40d902b5..b9efc5e8 100755
--- a/pialert/api.py
+++ b/pialert/api.py
@@ -92,5 +92,5 @@ class api_endpoint_class:
# update hash
apiEndpoints[changedIndex].hash = self.hash
else:
- mylog('info', [f'[API] ERROR Updating {self.fileName}'])
+ mylog('minimal', [f'[API] ERROR Updating {self.fileName}'])
diff --git a/pialert/conf.py b/pialert/conf.py
index fdb918d3..2c4e30d4 100755
--- a/pialert/conf.py
+++ b/pialert/conf.py
@@ -18,9 +18,17 @@ lastImportedConfFile = 1.1
plugins_once_run = False
newVersionAvailable = False
time_started = ''
+startTime = ''
+last_network_scan = ''
+last_internet_IP_scan = ''
+last_scan_run = ''
+last_cleanup = ''
+last_update_vendors = ''
+last_version_check = ''
check_report = []
log_timestamp = 0
arpscan_devices = []
+
# for MQTT
mqtt_connected_to_broker = False
mqtt_sensors = []
@@ -28,8 +36,6 @@ client = None # mqtt client
# for notifications
changedPorts_json_struc = None
-
-
# ACTUAL CONFIGRATION ITEMS set to defaults
# General
diff --git a/pialert/database.py b/pialert/database.py
index f33c59ef..5c4073c5 100755
--- a/pialert/database.py
+++ b/pialert/database.py
@@ -6,7 +6,7 @@ import sqlite3
from const import fullDbPath, sql_devices_stats, sql_devices_all
from logger import mylog
-from helper import json_struc, initOrSetParam, row_to_json, timeNow #, updateState
+from helper import json_struc, initOrSetParam, row_to_json, timeNowTZ #, updateState
@@ -478,7 +478,7 @@ def get_all_devices(db):
#-------------------------------------------------------------------------------
def insertOnlineHistory(db):
sql = db.sql #TO-DO
- startTime = timeNow()
+ startTime = timeNowTZ()
# Add to History
# only run this if the scans have run
diff --git a/pialert/device.py b/pialert/device.py
index a6d766e3..c747bc9c 100755
--- a/pialert/device.py
+++ b/pialert/device.py
@@ -5,7 +5,7 @@
import subprocess
import conf
-from helper import timeNow
+from helper import timeNowTZ
from plugin import get_setting_value
from scanners.internet import check_IP_format, get_internet_IP
from logger import mylog, print_log
@@ -21,20 +21,10 @@ def save_scanned_devices (db):
# mylog('debug', ['[ARP Scan] Detected devices:', len(p_arpscan_devices)])
# handled by the ARPSCAN plugin
- # # Delete previous scan data
- # sql.execute ("DELETE FROM CurrentScan")
-
- # if len(p_arpscan_devices) > 0:
- # # Insert new arp-scan devices
- # sql.executemany ("INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, "+
- # " cur_IP, cur_Vendor, cur_ScanMethod) "+
- # "VALUES (1, :mac, :ip, :hw, 'arp-scan')",
- # p_arpscan_devices)
-
# ------------------------ TO CONVERT INTO PLUGIN
# # Insert Pi-hole devices
- # startTime = timeNow()
+ # startTime = timeNowTZ()
# sql.execute ("""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC,
# cur_IP, cur_Vendor, cur_ScanMethod)
# SELECT ?, PH_MAC, PH_IP, PH_Vendor, 'Pi-hole'
@@ -144,7 +134,7 @@ def print_scan_stats (db):
#-------------------------------------------------------------------------------
def create_new_devices (db):
sql = db.sql # TO-DO
- startTime = timeNow()
+ startTime = timeNowTZ()
# arpscan - Insert events for new devices
mylog('debug','[New Devices] New devices - 1 Events')
@@ -289,7 +279,7 @@ def create_new_devices (db):
#-------------------------------------------------------------------------------
def update_devices_data_from_scan (db):
sql = db.sql #TO-DO
- startTime = timeNow()
+ startTime = timeNowTZ()
# Update Last Connection
mylog('debug','[Update Devices] 1 Last Connection')
sql.execute (f"""UPDATE Devices SET dev_LastConnection = '{startTime}',
diff --git a/pialert/helper.py b/pialert/helper.py
index 4cac9713..cb6faa0e 100755
--- a/pialert/helper.py
+++ b/pialert/helper.py
@@ -18,14 +18,13 @@ from const import *
from logger import mylog, logResult
-
-#-------------------------------------------------------------------------------
-def timeNow():
- return datetime.datetime.now().replace(microsecond=0)
#-------------------------------------------------------------------------------
def timeNowTZ():
return datetime.datetime.now(conf.tz).replace(microsecond=0)
+def timeNow():
+ return datetime.datetime.now().replace(microsecond=0)
+
#-------------------------------------------------------------------------------
def updateState(db, newState):
@@ -213,7 +212,7 @@ def isNewVersion(newVersion: bool):
text = url.text
data = json.loads(text)
except requests.exceptions.ConnectionError as e:
- mylog('info', [" Couldn't check for new release."])
+ mylog('minimal', [" Couldn't check for new release."])
data = ""
# make sure we received a valid response and not an API rate limit exceeded message
diff --git a/pialert/initialise.py b/pialert/initialise.py
index 735a6cd3..2eb57716 100755
--- a/pialert/initialise.py
+++ b/pialert/initialise.py
@@ -77,12 +77,13 @@ def importConfigs (db):
conf.mySettings = [] # reset settings
conf.mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query
+ # User values loaded from now
c_d = read_config_file(config_file)
+
# Import setting if found in the dictionary
- # General
- conf.ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'General', ['run'])
- conf.SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'General')
+
+ # General
conf.LOG_LEVEL = ccd('LOG_LEVEL', 'verbose' , c_d, 'Log verboseness', 'text.select', "['none', 'minimal', 'verbose', 'debug']", 'General')
conf.TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General')
conf.ENABLE_PLUGINS = ccd('ENABLE_PLUGINS', True , c_d, 'Enable plugins', 'boolean', '', 'General')
@@ -98,6 +99,10 @@ def importConfigs (db):
conf.DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General')
conf.HRS_TO_KEEP_NEWDEV = ccd('HRS_TO_KEEP_NEWDEV', 0 , c_d, 'Keep new devices for', 'integer', "0", 'General')
+ # ARPSCAN (+ other settings provided by the ARPSCAN plugin)
+ conf.ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'ARPSCAN', ['run'])
+ conf.SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'ARPSCAN')
+
# Email
conf.REPORT_MAIL = ccd('REPORT_MAIL', False , c_d, 'Enable email', 'boolean', '', 'Email', ['test'])
conf.SMTP_SERVER = ccd('SMTP_SERVER', '' , c_d,'SMTP server URL', 'text', '', 'Email')
@@ -175,6 +180,31 @@ def importConfigs (db):
# Init timezone in case it changed
conf.tz = timezone(conf.TIMEZONE)
+
+ # TODO cleanup later ----------------------------------------------------------------------------------
+ # init all time values as we have timezone - all this shoudl be moved into plugin/plugin settings
+ conf.time_started = datetime.datetime.now(conf.tz)
+ conf.cycle = ""
+ conf.check_report = [1, "internet_IP", "update_vendors_silent"]
+ conf.plugins_once_run = False
+
+ # to be deleted if not used
+ conf.log_timestamp = conf.time_started
+ #cron_instance = Cron()
+
+ # timestamps of last execution times
+ conf.startTime = conf.time_started
+ now_minus_24h = conf.time_started - datetime.timedelta(hours = 24)
+
+ # set these times to the past to force the first run
+ conf.last_network_scan = now_minus_24h
+ conf.last_internet_IP_scan = now_minus_24h
+ conf.last_scan_run = now_minus_24h
+ conf.last_cleanup = now_minus_24h
+ conf.last_update_vendors = conf.time_started - datetime.timedelta(days = 6) # update vendors 24h after first run and then once a week
+ conf.last_version_check = now_minus_24h
+
+ # TODO cleanup later ----------------------------------------------------------------------------------
# global mySchedules
# reset schedules
@@ -265,7 +295,7 @@ def importConfigs (db):
#TO DO this creates a circular reference between API and HELPER !
- mylog('info', '[Config] Imported new config')
+ mylog('minimal', '[Config] Imported new config')
@@ -274,7 +304,7 @@ def read_config_file(filename):
"""
retuns dict on the config file key:value pairs
"""
- mylog('info', '[Config] reading config file')
+ mylog('minimal', '[Config] reading config file')
# load the variables from pialert.conf
code = compile(filename.read_text(), filename.name, "exec")
confDict = {} # config dictionary
diff --git a/pialert/logger.py b/pialert/logger.py
index a2d61bb5..fd0da4cb 100755
--- a/pialert/logger.py
+++ b/pialert/logger.py
@@ -9,7 +9,10 @@ from const import *
#-------------------------------------------------------------------------------
# duplication from helper to avoid circle
#-------------------------------------------------------------------------------
-def timeNow():
+def timeNowTZ():
+ if conf.tz:
+ return datetime.datetime.now(conf.tz).replace(microsecond=0)
+ else:
return datetime.datetime.now().replace(microsecond=0)
@@ -31,12 +34,12 @@ def mylog(requestedDebugLevel, n):
reqLvl = lvl[1]
if reqLvl <= setLvl:
- file_print (*n)
+ file_print (*n)
#-------------------------------------------------------------------------------
def file_print (*args):
- result = timeNow().strftime ('%H:%M:%S') + ' '
+ result = timeNowTZ().strftime ('%H:%M:%S') + ' '
for arg in args:
result += str(arg)
diff --git a/pialert/mac_vendor.py b/pialert/mac_vendor.py
index 056b1223..0fda67e7 100755
--- a/pialert/mac_vendor.py
+++ b/pialert/mac_vendor.py
@@ -3,7 +3,7 @@ import subprocess
import conf
from const import pialertPath, vendorsDB
-from helper import timeNow, updateState
+from helper import timeNowTZ, updateState
from logger import mylog
@@ -17,7 +17,7 @@ def update_devices_MAC_vendors (db, pArg = ''):
sql = db.sql # TO-DO
# Header
updateState(db,"Upkeep: Vendors")
- mylog('verbose', ['[', timeNow(), '] Upkeep - Update HW Vendors:' ])
+ mylog('verbose', ['[', timeNowTZ(), '] Upkeep - Update HW Vendors:' ])
# Update vendors DB (iab oui)
mylog('verbose', [' Updating vendors DB (iab & oui)'])
diff --git a/pialert/networkscan.py b/pialert/networkscan.py
index d210f922..c731a16e 100755
--- a/pialert/networkscan.py
+++ b/pialert/networkscan.py
@@ -5,7 +5,7 @@ import conf
from scanners.pihole import copy_pihole_network, read_DHCP_leases
from database import insertOnlineHistory
from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, update_devices_names
-from helper import timeNow
+from helper import timeNowTZ
from logger import mylog
from reporting import skip_repeated_notifications
@@ -25,15 +25,6 @@ def scan_network (db):
# updateState(db,"Scan: Network")
mylog('verbose', ['[Network Scan] Scan Devices:' ])
- # Query ScanCycle properties
- scanCycle_data = query_ScanCycle_Data (db, True)
- if scanCycle_data is None:
- mylog('none', ['\n'])
- mylog('none', ['[Network Scan]*************** ERROR ***************'])
- mylog('none', ['[Network Scan] ScanCycle %s not found' % conf.cycle ])
- mylog('none', ['[Network Scan] Exiting...\n'])
- return False
-
db.commitDB()
# Pi-hole method
@@ -52,21 +43,7 @@ def scan_network (db):
def process_scan (db):
- # Query ScanCycle properties
- scanCycle_data = query_ScanCycle_Data (db, True)
- if scanCycle_data is None:
- mylog('none', ['\n'])
- mylog('none', ['[Process Scan]*************** ERROR ***************'])
- mylog('none', ['[Process Scan] ScanCycle %s not found' % conf.cycle ])
- mylog('none', ['[Process Scan] Exiting...\n'])
- return False
-
- db.commitDB()
-
- # ScanCycle data
- cycle_interval = scanCycle_data['cic_EveryXmin']
-
- # Load current scan data
+ # Load current scan data
mylog('verbose','[Process Scan] Processing scan results')
save_scanned_devices (db)
@@ -114,34 +91,16 @@ def process_scan (db):
mylog('verbose','[Process Scan] Skipping repeated notifications')
skip_repeated_notifications (db)
- # Clear current scan as processed
- db.sql.execute ("DELETE FROM CurrentScan")
+ # Clear current scan as processed TODO uncomment
+ # db.sql.execute ("DELETE FROM CurrentScan")
# Commit changes
db.commitDB()
- # moved plugin execution to main loop
- # if ENABLE_PLUGINS:
- # run_plugin_scripts(db,'always_after_scan')
-
-
-#-------------------------------------------------------------------------------
-def query_ScanCycle_Data (db, pOpenCloseDB = False, cycle = 1):
- # Query Data
- db.sql.execute ("""SELECT cic_arpscanCycles, cic_EveryXmin
- FROM ScanCycles
- WHERE cic_ID = ? """, (cycle,))
- sqlRow = db.sql.fetchone()
-
- # Return Row
- return sqlRow
-
-
-
#-------------------------------------------------------------------------------
def void_ghost_disconnections (db):
sql = db.sql #TO-DO
- startTime = timeNow()
+ startTime = timeNowTZ()
# Void connect ghost events (disconnect event exists in last X min.)
mylog('debug','[Void Ghost Con] - 1 Connect ghost events')
sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null,
@@ -256,7 +215,7 @@ def create_sessions_snapshot (db):
#-------------------------------------------------------------------------------
def insert_events (db):
sql = db.sql #TO-DO
- startTime = timeNow()
+ startTime = timeNowTZ()
# Check device down
mylog('debug','[Events] - 1 - Devices down')
diff --git a/pialert/plugin.py b/pialert/plugin.py
index 87846678..1caaac9b 100755
--- a/pialert/plugin.py
+++ b/pialert/plugin.py
@@ -8,7 +8,7 @@ from collections import namedtuple
import conf
from const import pluginsPath, logPath
from logger import mylog
-from helper import timeNow, updateState, get_file_content, write_file
+from helper import timeNowTZ, updateState, get_file_content, write_file
from api import update_api
#-------------------------------------------------------------------------------
@@ -38,7 +38,7 @@ def run_plugin_scripts(db, runType):
shouldRun = schd.runScheduleCheck()
if shouldRun:
# note the last time the scheduled plugin run was executed
- schd.last_run = timeNow()
+ schd.last_run = timeNowTZ()
if shouldRun:
@@ -102,8 +102,8 @@ def get_setting(key):
result = set
if result is None:
- mylog('info', [' Error - setting_missing - Setting not found for key: ', key])
- mylog('info', [' Error - logging the settings into file: ', logPath + '/setting_missing.json'])
+ mylog('minimal', [' Error - setting_missing - Setting not found for key: ', key])
+ mylog('minimal', [' Error - logging the settings into file: ', logPath + '/setting_missing.json'])
write_file (logPath + '/setting_missing.json', json.dumps({ 'data' : conf.mySettings}))
return result
@@ -165,14 +165,14 @@ def execute_plugin(db, plugin):
resolved = get_setting(param["value"])
if resolved != None:
- resolved = plugin_param_from_glob_set(resolved)
+ resolved = passable_string_from_setting(resolved)
# Get Sql result
if param["type"] == "sql":
resolved = flatten_array(db.get_sql_array(param["value"]))
if resolved == None:
- mylog('none', ['[Plugins] The parameter "name":"', param["name"], '" was resolved as None'])
+ mylog('none', [f'[Plugins] The parameter "name":"{param["name"]}" for "value": {param["value"]} was resolved as None'])
else:
params.append( [param["name"], resolved] )
@@ -286,14 +286,14 @@ def handle_empty(value):
#-------------------------------------------------------------------------------
# Flattens a setting to make it passable to a script
-def plugin_param_from_glob_set(globalSetting):
+def passable_string_from_setting(globalSetting):
setVal = globalSetting[6] # setting value
setTyp = globalSetting[3] # setting type
noConversion = ['text', 'string', 'integer', 'boolean', 'password', 'readonly', 'integer.select', 'text.select', 'integer.checkbox' ]
- arrayConversion = ['text.multiselect', 'list']
+ arrayConversion = ['text.multiselect', 'list', 'subnets']
jsonConversion = ['.template']
if setTyp in noConversion:
@@ -306,6 +306,8 @@ def plugin_param_from_glob_set(globalSetting):
if setTyp.endswith(item):
return json.dumps(setVal)
+ mylog('none', ['[Plugins]: ERROR: Parameter not converted.'])
+
#-------------------------------------------------------------------------------
diff --git a/pialert/publishers/mqtt.py b/pialert/publishers/mqtt.py
index b955f22d..472e9b1b 100755
--- a/pialert/publishers/mqtt.py
+++ b/pialert/publishers/mqtt.py
@@ -48,7 +48,7 @@ def publish_mqtt(client, topic, message):
status = result[0]
if status != 0:
- mylog('info', ["Waiting to reconnect to MQTT broker"])
+ mylog('minimal', ["Waiting to reconnect to MQTT broker"])
time.sleep(0.1)
return True
@@ -180,7 +180,7 @@ def mqtt_start(db):
sec_delay = len(devices) * int(conf.MQTT_DELAY_SEC)*5
- mylog('info', [" Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60,1) , 'min)' ])
+ mylog('minimal', [" Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60,1) , 'min)' ])
for device in devices:
diff --git a/pialert/reporting.py b/pialert/reporting.py
index c3f6abaa..f63204ab 100755
--- a/pialert/reporting.py
+++ b/pialert/reporting.py
@@ -12,7 +12,7 @@ from json2table import convert
# pialert modules
import conf
from const import pialertPath, logPath, apiPath
-from helper import noti_struc, generate_mac_links, removeDuplicateNewLines, timeNow, hide_email, updateState, get_file_content, write_file
+from helper import noti_struc, generate_mac_links, removeDuplicateNewLines, timeNowTZ, hide_email, updateState, get_file_content, write_file
from logger import logResult, mylog, print_log
@@ -139,7 +139,7 @@ def send_notifications (db):
template_file.close()
# Report Header & footer
- timeFormated = timeNow().strftime ('%Y-%m-%d %H:%M')
+ timeFormated = timeNowTZ().strftime ('%Y-%m-%d %H:%M')
mail_text = mail_text.replace ('', timeFormated)
mail_html = mail_html.replace ('', timeFormated)
@@ -271,43 +271,43 @@ def send_notifications (db):
msg = noti_struc(json_final, mail_text, mail_html)
- mylog('info', ['[Notification] Udating API files'])
+ mylog('minimal', ['[Notification] Udating API files'])
send_api()
if conf.REPORT_MAIL and check_config('email'):
updateState(db,"Send: Email")
- mylog('info', ['[Notification] Sending report by Email'])
+ mylog('minimal', ['[Notification] Sending report by Email'])
send_email (msg )
else :
mylog('verbose', ['[Notification] Skip email'])
if conf.REPORT_APPRISE and check_config('apprise'):
updateState(db,"Send: Apprise")
- mylog('info', ['[Notification] Sending report by Apprise'])
+ mylog('minimal', ['[Notification] Sending report by Apprise'])
send_apprise (msg)
else :
mylog('verbose', ['[Notification] Skip Apprise'])
if conf.REPORT_WEBHOOK and check_config('webhook'):
updateState(db,"Send: Webhook")
- mylog('info', ['[Notification] Sending report by Webhook'])
+ mylog('minimal', ['[Notification] Sending report by Webhook'])
send_webhook (msg)
else :
mylog('verbose', ['[Notification] Skip webhook'])
if conf.REPORT_NTFY and check_config('ntfy'):
updateState(db,"Send: NTFY")
- mylog('info', ['[Notification] Sending report by NTFY'])
+ mylog('minimal', ['[Notification] Sending report by NTFY'])
send_ntfy (msg)
else :
mylog('verbose', ['[Notification] Skip NTFY'])
if conf.REPORT_PUSHSAFER and check_config('pushsafer'):
updateState(db,"Send: PUSHSAFER")
- mylog('info', ['[Notification] Sending report by PUSHSAFER'])
+ mylog('minimal', ['[Notification] Sending report by PUSHSAFER'])
send_pushsafer (msg)
else :
mylog('verbose', ['[Notification] Skip PUSHSAFER'])
# Update MQTT entities
if conf.REPORT_MQTT and check_config('mqtt'):
updateState(db,"Send: MQTT")
- mylog('info', ['[Notification] Establishing MQTT thread'])
+ mylog('minimal', ['[Notification] Establishing MQTT thread'])
mqtt_start(db)
else :
mylog('verbose', ['[Notification] Skip MQTT'])
@@ -318,7 +318,7 @@ def send_notifications (db):
sql.execute ("""UPDATE Devices SET dev_LastNotification = ?
WHERE dev_MAC IN (SELECT eve_MAC FROM Events
WHERE eve_PendingAlertEmail = 1)
- """, (datetime.datetime.now(),) )
+ """, (datetime.datetime.now(conf.tz),) )
sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0
WHERE eve_PendingAlertEmail = 1""")
@@ -328,7 +328,7 @@ def send_notifications (db):
conf.changedPorts_json_struc = None
# DEBUG - print number of rows updated
- mylog('info', ['[Notification] Notifications changes: ', sql.rowcount])
+ mylog('minimal', ['[Notification] Notifications changes: ', sql.rowcount])
# Commit changes
db.commitDB()
@@ -488,17 +488,17 @@ def check_and_run_event(db):
def handle_run(runType):
global last_network_scan
- mylog('info', ['[', timeNow(), '] START Run: ', runType])
+ mylog('minimal', ['[', timeNowTZ(), '] START Run: ', runType])
if runType == 'ENABLE_ARPSCAN':
last_network_scan = conf.time_started - datetime.timedelta(hours = 24)
- mylog('info', ['[', timeNow(), '] END Run: ', runType])
+ mylog('minimal', ['[', timeNowTZ(), '] END Run: ', runType])
#-------------------------------------------------------------------------------
def handle_test(testType):
- mylog('info', ['[', timeNow(), '] START Test: ', testType])
+ mylog('minimal', ['[', timeNowTZ(), '] START Test: ', testType])
# Open text sample
sample_txt = get_file_content(pialertPath + '/back/report_sample.txt')
@@ -522,4 +522,4 @@ def handle_test(testType):
if testType == 'REPORT_PUSHSAFER':
send_pushsafer (sample_msg)
- mylog('info', ['[Test Publishers] END Test: ', testType])
\ No newline at end of file
+ mylog('minimal', ['[Test Publishers] END Test: ', testType])
\ No newline at end of file
diff --git a/pialert/scanners/internet.py b/pialert/scanners/internet.py
index d309a5c7..fe5b977d 100755
--- a/pialert/scanners/internet.py
+++ b/pialert/scanners/internet.py
@@ -6,7 +6,7 @@ import re
# pialert modules
import conf
-from helper import timeNow, updateState
+from helper import timeNowTZ, updateState
from logger import append_line_to_file, mylog
from const import logPath
@@ -45,7 +45,7 @@ def check_internet_IP ( db ):
# Check IP Change
if internet_IP != previous_IP :
- mylog('info', ['[Internet IP] New internet IP: ', internet_IP])
+ mylog('minimal', ['[Internet IP] New internet IP: ', internet_IP])
save_new_internet_IP (db, internet_IP)
else :
@@ -116,7 +116,7 @@ def get_previous_internet_IP (db):
def save_new_internet_IP (db, pNewIP):
# Log new IP into logfile
append_line_to_file (logPath + '/IP_changes.log',
- '['+str(timeNow()) +']\t'+ pNewIP +'\n')
+ '['+str(timeNowTZ()) +']\t'+ pNewIP +'\n')
prevIp = get_previous_internet_IP(db)
# Save event
@@ -125,7 +125,7 @@ def save_new_internet_IP (db, pNewIP):
eve_PendingAlertEmail)
VALUES ('Internet', ?, ?, 'Internet IP Changed',
'Previous Internet IP: '|| ?, 1) """,
- (pNewIP, timeNow(), prevIp) )
+ (pNewIP, timeNowTZ(), prevIp) )
# Save new IP
db.sql.execute ("""UPDATE Devices SET dev_LastIP = ?
diff --git a/pialert/scanners/nmapscan.py b/pialert/scanners/nmapscan.py
index 58f0d3bf..3f64ef09 100755
--- a/pialert/scanners/nmapscan.py
+++ b/pialert/scanners/nmapscan.py
@@ -3,7 +3,7 @@ import subprocess
import conf
from const import logPath, sql_nmap_scan_all
-from helper import json_struc, timeNow, updateState
+from helper import json_struc, timeNowTZ, updateState
from logger import append_line_to_file, mylog
#-------------------------------------------------------------------------------
@@ -59,7 +59,7 @@ def performNmapScan(db, devicesToScan):
mylog('verbose', ['[NMAP Scan] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', device["dev_LastIP"], progress])
if output == "": # check if the subprocess failed
- mylog('info', ['[NMAP Scan] Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details'])
+ mylog('minimal', ['[NMAP Scan] Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details'])
else:
mylog('verbose', ['[NMAP Scan] Nmap SUCCESS for ', device["dev_LastIP"], progress])
@@ -87,7 +87,7 @@ def performNmapScan(db, devicesToScan):
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
startCollecting = False # end reached
elif startCollecting and len(line.split()) == 3:
- newEntriesTmp.append(nmap_entry(device["dev_MAC"], timeNow(), line.split()[0], line.split()[1], line.split()[2], device["dev_Name"]))
+ newEntriesTmp.append(nmap_entry(device["dev_MAC"], timeNowTZ(), line.split()[0], line.split()[1], line.split()[2], device["dev_Name"]))
elif 'Nmap done' in line:
duration = line.split('scanned in ')[1]
index += 1
diff --git a/pialert/scanners/pholusscan.py b/pialert/scanners/pholusscan.py
index 9dce94aa..837d116f 100755
--- a/pialert/scanners/pholusscan.py
+++ b/pialert/scanners/pholusscan.py
@@ -2,7 +2,7 @@ import subprocess
import re
from const import fullPholusPath, logPath
-from helper import checkIPV4, timeNow, updateState
+from helper import checkIPV4, timeNowTZ, updateState
from logger import mylog
#-------------------------------------------------------------------------------
@@ -64,7 +64,7 @@ def performPholusScan (db, timeoutSec, userSubnets):
for line in newLines:
columns = line.split("|")
if len(columns) == 4:
- params.append(( interface + " " + mask, timeNow() , columns[0].replace(" ", ""), columns[1].replace(" ", ""), columns[2].replace(" ", ""), columns[3], ''))
+ params.append(( interface + " " + mask, timeNowTZ() , columns[0].replace(" ", ""), columns[1].replace(" ", ""), columns[2].replace(" ", ""), columns[3], ''))
if len(params) > 0:
sql.executemany ("""INSERT INTO Pholus_Scan ("Info", "Time", "MAC", "IP_v4_or_v6", "Record_Type", "Value", "Extra") VALUES (?, ?, ?, ?, ?, ?, ?)""", params)
diff --git a/test/test_helper.py b/test/test_helper.py
index ac31ee77..1e678321 100755
--- a/test/test_helper.py
+++ b/test/test_helper.py
@@ -6,7 +6,7 @@ sys.path.append(str(pathlib.Path(__file__).parent.parent.resolve()) + "/pialert/
import datetime
-from helper import timeNow, updateSubnets
+from helper import timeNowTZ, updateSubnets
# -------------------------------------------------------------------------------
From 085e7c80e7bf1c303f3de90dfb720e3c92ac07ba Mon Sep 17 00:00:00 2001
From: jokob-sk <96159884+jokob-sk@users.noreply.github.com>
Date: Mon, 7 Aug 2023 08:40:27 +1000
Subject: [PATCH 11/14] Update systeminfo.php
Remove php error muting
---
front/systeminfo.php | 1 -
1 file changed, 1 deletion(-)
diff --git a/front/systeminfo.php b/front/systeminfo.php
index d62ece1e..c0f58eed 100644
--- a/front/systeminfo.php
+++ b/front/systeminfo.php
@@ -12,7 +12,6 @@
// cvc90 2023 https://github.com/cvc90 GNU GPLv3
//------------------------------------------------------------------------------
- error_reporting(0);// Turn off php errors
require 'php/templates/header.php';
?>
From ff9245c31dc5a139b3582923c82685d73a36fdcd Mon Sep 17 00:00:00 2001
From: Jokob-sk
Date: Mon, 7 Aug 2023 15:33:41 +1000
Subject: [PATCH 12/14] ARPSCAN to plugin rewrite
---
front/plugins/README.md | 2 +-
front/plugins/arp_scan/config.json | 12 +-
front/plugins/arp_scan/script.py | 41 +++++-
front/plugins/snmp_discovery/config.json | 2 +-
pialert/__main__.py | 12 +-
pialert/database.py | 2 +-
pialert/networkscan.py | 4 +-
pialert/plugin.py | 165 ++++++++++++++---------
8 files changed, 161 insertions(+), 79 deletions(-)
diff --git a/front/plugins/README.md b/front/plugins/README.md
index 9a33e305..a45a7a99 100755
--- a/front/plugins/README.md
+++ b/front/plugins/README.md
@@ -63,7 +63,7 @@ UI displays outdated values until the API endpoints get refreshed.
## Plugin file structure overview
-> Folder name must be the same as the code name value in: `"code_name": ""`
+> ⚠️Folder name must be the same as the code name value in: `"code_name": ""`
> Unique prefix needs to be unique compared to the other settings prefixes, e.g.: the prefix `APPRISE` is already in use.
| File | Required (plugin type) | Description |
diff --git a/front/plugins/arp_scan/config.json b/front/plugins/arp_scan/config.json
index 0c9b53ab..e45dacca 100755
--- a/front/plugins/arp_scan/config.json
+++ b/front/plugins/arp_scan/config.json
@@ -1,10 +1,18 @@
{
- "code_name": "arpscan",
+ "code_name": "arp_scan",
"unique_prefix": "ARPSCAN",
"enabled": true,
"data_source": "script",
"mapped_to_table": "CurrentScan",
-
+ "data_filters": [
+ {
+ "compare_column" : "Object_PrimaryID",
+ "compare_operator" : "==",
+ "compare_field_id": "txtMacFilter",
+ "compare_js_template": "'{value}'.toString()",
+ "compare_use_quotes": true
+ }
+ ],
"localized": ["display_name", "description", "icon"],
"display_name": [
diff --git a/front/plugins/arp_scan/script.py b/front/plugins/arp_scan/script.py
index 5e16ee96..7ebb4a92 100755
--- a/front/plugins/arp_scan/script.py
+++ b/front/plugins/arp_scan/script.py
@@ -5,6 +5,7 @@ import pathlib
import argparse
import sys
import re
+import base64
import subprocess
from time import strftime
@@ -18,22 +19,54 @@ RESULT_FILE = os.path.join(CUR_PATH, 'last_result.log')
def main():
+ # sample
+ # /home/pi/pialert/front/plugins/arp_scan/script.py userSubnets=b'MTkyLjE2OC4xLjAvMjQgLS1pbnRlcmZhY2U9ZXRoMQ=='
# the script expects a parameter in the format of userSubnets=subnet1,subnet2,...
parser = argparse.ArgumentParser(description='Import devices from settings')
parser.add_argument('userSubnets', nargs='+', help="list of subnets with options")
values = parser.parse_args()
+ import base64
+
+ # Assuming Plugin_Objects is a class or function that reads data from the RESULT_FILE
+ # and returns a list of objects called 'devices'.
devices = Plugin_Objects(RESULT_FILE)
- subnets_list = []
+ # Print a message to indicate that the script is starting.
+ print('In script:')
- if isinstance(values.userSubnets, list):
- subnets_list = values.userSubnets
+ # Assuming 'values' is a dictionary or object that contains a key 'userSubnets'
+ # which holds a list of user-submitted subnets.
+ # Printing the userSubnets list to check its content.
+ print(values.userSubnets)
+
+ # Extract the base64-encoded subnet information from the first element of the userSubnets list.
+ # The format of the element is assumed to be like 'userSubnets=b'.
+ userSubnetsParamBase64 = values.userSubnets[0].split('userSubnets=b')[1]
+
+ # Printing the extracted base64-encoded subnet information.
+ print(userSubnetsParamBase64)
+
+ # Decode the base64-encoded subnet information to get the actual subnet information in ASCII format.
+ userSubnetsParam = base64.b64decode(userSubnetsParamBase64).decode('ascii')
+
+ # Print the decoded subnet information.
+ print('userSubnetsParam:')
+ print(userSubnetsParam)
+
+ # Check if the decoded subnet information contains multiple subnets separated by commas.
+ # If it does, split the string into a list of individual subnets.
+ # Otherwise, create a list with a single element containing the subnet information.
+ if ',' in userSubnetsParam:
+ subnets_list = userSubnetsParam.split(',')
else:
- subnets_list = [values.userSubnets]
+ subnets_list = [userSubnetsParam]
+ # Execute the ARP scanning process on the list of subnets (whether it's one or multiple subnets).
+ # The function 'execute_arpscan' is assumed to be defined elsewhere in the code.
unique_devices = execute_arpscan(subnets_list)
+
for device in unique_devices:
devices.add_object(
primaryId=device['mac'], # MAC (Device Name)
diff --git a/front/plugins/snmp_discovery/config.json b/front/plugins/snmp_discovery/config.json
index 6b99c0bc..9a3b4f09 100755
--- a/front/plugins/snmp_discovery/config.json
+++ b/front/plugins/snmp_discovery/config.json
@@ -2,7 +2,7 @@
"code_name": "snmp_discovery",
"unique_prefix": "SNMPDSC",
"enabled": true,
- "data_source": "pyton-script",
+ "data_source": "script",
"data_filters": [
{
"compare_column" : "Object_PrimaryID",
diff --git a/pialert/__main__.py b/pialert/__main__.py
index 5be82db0..c24bf19d 100755
--- a/pialert/__main__.py
+++ b/pialert/__main__.py
@@ -107,7 +107,14 @@ def main ():
# update time started
conf.loop_start_time = timeNowTZ()
+
+ # TODO fix these
loop_start_time = conf.loop_start_time # TODO fix
+ last_update_vendors = conf.last_update_vendors
+ last_network_scan = conf.last_network_scan
+ last_cleanup = conf.last_cleanup
+ last_version_check = conf.last_version_check
+
# check if new version is available / only check once an hour
if conf.last_version_check + datetime.timedelta(hours=1) < loop_start_time :
@@ -128,10 +135,11 @@ def main ():
update_api(db)
# proceed if 1 minute passed
- if last_scan_run + datetime.timedelta(minutes=1) < loop_start_time :
+ if conf.last_scan_run + datetime.timedelta(minutes=1) < conf.loop_start_time :
# last time any scan or maintenance/upkeep was run
- last_scan_run = loop_start_time
+ conf.last_scan_run = loop_start_time
+ last_internet_IP_scan = conf.last_internet_IP_scan
# Header
updateState(db,"Process: Start")
diff --git a/pialert/database.py b/pialert/database.py
index 5c4073c5..437b9ea2 100755
--- a/pialert/database.py
+++ b/pialert/database.py
@@ -397,7 +397,7 @@ class DB():
self.sql.execute("DROP TABLE CurrentScan;")
self.sql.execute(""" CREATE TABLE CurrentScan (
- cur_ScanCycle INTEGER NOT NULL,
+ cur_ScanCycle INTEGER,
cur_MAC STRING(50) NOT NULL COLLATE NOCASE,
cur_IP STRING(50) NOT NULL COLLATE NOCASE,
cur_Vendor STRING(250),
diff --git a/pialert/networkscan.py b/pialert/networkscan.py
index c731a16e..d18c0179 100755
--- a/pialert/networkscan.py
+++ b/pialert/networkscan.py
@@ -91,8 +91,8 @@ def process_scan (db):
mylog('verbose','[Process Scan] Skipping repeated notifications')
skip_repeated_notifications (db)
- # Clear current scan as processed TODO uncomment
- # db.sql.execute ("DELETE FROM CurrentScan")
+ # Clear current scan as processed
+ db.sql.execute ("DELETE FROM CurrentScan")
# Commit changes
db.commitDB()
diff --git a/pialert/plugin.py b/pialert/plugin.py
index 1caaac9b..e2b9c69d 100755
--- a/pialert/plugin.py
+++ b/pialert/plugin.py
@@ -2,6 +2,7 @@ import os
import json
import subprocess
import datetime
+import base64
from collections import namedtuple
# pialert modules
@@ -229,7 +230,7 @@ def execute_plugin(db, plugin):
if len(columns) == 9:
sqlParams.append((plugin["unique_prefix"], columns[0], columns[1], 'null', columns[2], columns[3], columns[4], columns[5], columns[6], 0, columns[7], 'null', columns[8]))
else:
- mylog('none', ['[Plugins]: Skipped invalid line in the output: ', line])
+ mylog('none', ['[Plugins] Skipped invalid line in the output: ', line])
else:
mylog('debug', [f'[Plugins] The file {file_path} does not exist'])
@@ -249,7 +250,7 @@ def execute_plugin(db, plugin):
if len(row) == 9 and (row[0] in ['','null']) == False :
sqlParams.append((plugin["unique_prefix"], row[0], handle_empty(row[1]), 'null', row[2], row[3], row[4], handle_empty(row[5]), handle_empty(row[6]), 0, row[7], 'null', row[8]))
else:
- mylog('none', ['[Plugins]: Skipped invalid sql result'])
+ mylog('none', ['[Plugins] Skipped invalid sql result'])
# check if the subprocess / SQL query failed / there was no valid output
@@ -257,7 +258,7 @@ def execute_plugin(db, plugin):
mylog('none', ['[Plugins] No output received from the plugin ', plugin["unique_prefix"], ' - enable LOG_LEVEL=debug and check logs'])
return
else:
- mylog('verbose', ['[Plugins]: SUCCESS, received ', len(sqlParams), ' entries'])
+ mylog('verbose', ['[Plugins] SUCCESS, received ', len(sqlParams), ' entries'])
# process results if any
if len(sqlParams) > 0:
@@ -293,20 +294,27 @@ def passable_string_from_setting(globalSetting):
noConversion = ['text', 'string', 'integer', 'boolean', 'password', 'readonly', 'integer.select', 'text.select', 'integer.checkbox' ]
- arrayConversion = ['text.multiselect', 'list', 'subnets']
+ arrayConversion = ['text.multiselect', 'list']
+ arrayConversionBase64 = ['subnets']
jsonConversion = ['.template']
+ mylog('debug', f'[Plugins] setTyp: {setTyp}')
+
if setTyp in noConversion:
return setVal
if setTyp in arrayConversion:
return flatten_array(setVal)
+ if setTyp in arrayConversionBase64:
+
+ return flatten_array(setVal, encodeBase64 = True)
+
for item in jsonConversion:
if setTyp.endswith(item):
return json.dumps(setVal)
- mylog('none', ['[Plugins]: ERROR: Parameter not converted.'])
+ mylog('none', ['[Plugins] ERROR: Parameter not converted.'])
@@ -337,33 +345,47 @@ def get_setting_value(key):
return ''
#-------------------------------------------------------------------------------
-def flatten_array(arr):
-
+def flatten_array(arr, encodeBase64=False):
tmp = ''
+ arrayItemStr = ''
+ mylog('debug', '[Plugins] Flattening the below array')
+ mylog('debug', f'[Plugins] Convert to Base64: {encodeBase64}')
mylog('debug', arr)
- for arrayItem in arr:
+ for arrayItem in arr:
# only one column flattening is supported
if isinstance(arrayItem, list):
- arrayItem = str(arrayItem[0])
+ arrayItemStr = str(arrayItem[0]).replace("'", '') # removing single quotes - not allowed
+ else:
+ # is string already
+ arrayItemStr = arrayItem
- tmp += arrayItem + ','
- # tmp = tmp.replace("'","").replace(' ','') # No single quotes or empty spaces allowed
- tmp = tmp.replace("'","") # No single quotes allowed
- return tmp[:-1] # Remove last comma ','
+ tmp += f'{arrayItemStr},'
+
+ tmp = tmp[:-1] # Remove last comma ','
+
+ mylog('debug', f'[Plugins] Flattened array: {tmp}')
+
+ if encodeBase64:
+ tmp = str(base64.b64encode(tmp.encode('ascii')))
+ mylog('debug', f'[Plugins] Flattened array (base64): {tmp}')
+
+
+ return tmp
+
#-------------------------------------------------------------------------------
# Replace {wildcars} with parameters
def resolve_wildcards_arr(commandArr, params):
- mylog('debug', ['[Plugins]: Pre-Resolved CMD: '] + commandArr)
+ mylog('debug', ['[Plugins] Pre-Resolved CMD: '] + commandArr)
for param in params:
- # mylog('debug', ['[Plugins]: key : {', param[0], '}'])
- # mylog('debug', ['[Plugins]: resolved: ', param[1]])
+ # mylog('debug', ['[Plugins] key : {', param[0], '}'])
+ # mylog('debug', ['[Plugins] resolved: ', param[1]])
i = 0
@@ -493,67 +515,78 @@ def process_plugin_events(db, plugin):
# Perform databse table mapping if enabled for the plugin
if len(pluginEvents) > 0 and "mapped_to_table" in plugin:
- sqlParams = []
+ # Initialize an empty list to store SQL parameters.
+ sqlParams = []
- dbTable = plugin['mapped_to_table']
+ # Get the database table name from the 'mapped_to_table' key in the 'plugin' dictionary.
+ dbTable = plugin['mapped_to_table']
- mylog('debug', ['[Plugins] Mapping objects to database table: ', dbTable])
+ # Log a debug message indicating the mapping of objects to the database table.
+ mylog('debug', ['[Plugins] Mapping objects to database table: ', dbTable])
- # collect all columns to be mapped
- mappedCols = []
- columnsStr = ''
- valuesStr = ''
+ # Initialize lists to hold mapped column names, columnsStr, and valuesStr for SQL query.
+ mappedCols = []
+ columnsStr = ''
+ valuesStr = ''
- for clmn in plugin['database_column_definitions']:
- if 'mapped_to_column' in clmn:
- mappedCols.append(clmn)
- columnsStr = f'{columnsStr}, "{clmn["mapped_to_column"]}"'
- valuesStr = f'{valuesStr}, ?'
+ # Loop through the 'database_column_definitions' in the 'plugin' dictionary to collect mapped columns.
+ # Build the columnsStr and valuesStr for the SQL query.
+ for clmn in plugin['database_column_definitions']:
+ if 'mapped_to_column' in clmn:
+ mappedCols.append(clmn)
+ columnsStr = f'{columnsStr}, "{clmn["mapped_to_column"]}"'
+ valuesStr = f'{valuesStr}, ?'
- if len(columnsStr) > 0:
- columnsStr = columnsStr[1:] # remove first ','
- valuesStr = valuesStr[1:] # remove first ','
+ # Remove the first ',' from columnsStr and valuesStr.
+ if len(columnsStr) > 0:
+ columnsStr = columnsStr[1:]
+ valuesStr = valuesStr[1:]
- # map the column names to plugin object event values
- for plgEv in pluginEvents:
+ # Map the column names to plugin object event values and create a list of tuples 'sqlParams'.
+ for plgEv in pluginEvents:
+ tmpList = []
- tmpList = []
+ for col in mappedCols:
+ if col['column'] == 'Index':
+ tmpList.append(plgEv.index)
+ elif col['column'] == 'Plugin':
+ tmpList.append(plgEv.pluginPref)
+ elif col['column'] == 'Object_PrimaryID':
+ tmpList.append(plgEv.primaryId)
+ elif col['column'] == 'Object_SecondaryID':
+ tmpList.append(plgEv.secondaryId)
+ elif col['column'] == 'DateTimeCreated':
+ tmpList.append(plgEv.created)
+ elif col['column'] == 'DateTimeChanged':
+ tmpList.append(plgEv.changed)
+ elif col['column'] == 'Watched_Value1':
+ tmpList.append(plgEv.watched1)
+ elif col['column'] == 'Watched_Value2':
+ tmpList.append(plgEv.watched2)
+ elif col['column'] == 'Watched_Value3':
+ tmpList.append(plgEv.watched3)
+ elif col['column'] == 'Watched_Value4':
+ tmpList.append(plgEv.watched4)
+ elif col['column'] == 'UserData':
+ tmpList.append(plgEv.userData)
+ elif col['column'] == 'Extra':
+ tmpList.append(plgEv.extra)
+ elif col['column'] == 'Status':
+ tmpList.append(plgEv.status)
- for col in mappedCols:
- if col['column'] == 'Index':
- tmpList.append(plgEv.index)
- elif col['column'] == 'Plugin':
- tmpList.append(plgEv.pluginPref)
- elif col['column'] == 'Object_PrimaryID':
- tmpList.append(plgEv.primaryId)
- elif col['column'] == 'Object_SecondaryID':
- tmpList.append(plgEv.secondaryId)
- elif col['column'] == 'DateTimeCreated':
- tmpList.append(plgEv.created)
- elif col['column'] == 'DateTimeChanged':
- tmpList.append(plgEv.changed)
- elif col['column'] == 'Watched_Value1':
- tmpList.append(plgEv.watched1)
- elif col['column'] == 'Watched_Value2':
- tmpList.append(plgEv.watched2)
- elif col['column'] == 'Watched_Value3':
- tmpList.append(plgEv.watched3)
- elif col['column'] == 'Watched_Value4':
- tmpList.append(plgEv.watched4)
- elif col['column'] == 'UserData':
- tmpList.append(plgEv.userData)
- elif col['column'] == 'Extra':
- tmpList.append(plgEv.extra)
- elif col['column'] == 'Status':
- tmpList.append(plgEv.status)
-
- sqlParams.append(tuple(tmpList))
+ # Append the mapped values to the list 'sqlParams' as a tuple.
+ sqlParams.append(tuple(tmpList))
- q = f'INSERT into {dbTable} ({columnsStr}) VALUES ({valuesStr})'
+ # Generate the SQL INSERT query using the collected information.
+ q = f'INSERT into {dbTable} ({columnsStr}) VALUES ({valuesStr})'
- mylog('debug', ['[Plugins] SQL query for mapping: ', q ])
+ # Log a debug message showing the generated SQL query for mapping.
+ mylog('debug', ['[Plugins] SQL query for mapping: ', q])
+
+ # Execute the SQL query using 'sql.executemany()' and the 'sqlParams' list of tuples.
+ # This will insert multiple rows into the database in one go.
+ sql.executemany(q, sqlParams)
- sql.executemany (q, sqlParams)
db.commitDB()
From 5f3f4c1a73edb7f86c47431c16ad71d8acc3a61f Mon Sep 17 00:00:00 2001
From: Jokob-sk
Date: Mon, 7 Aug 2023 16:22:27 +1000
Subject: [PATCH 13/14] ARPSCAN to plugin rewrite
---
pialert/plugin.py | 121 +++++++++++++++++++++----------------------
pialert/reporting.py | 22 +++++---
2 files changed, 75 insertions(+), 68 deletions(-)
diff --git a/pialert/plugin.py b/pialert/plugin.py
index e2b9c69d..3fcf1cc4 100755
--- a/pialert/plugin.py
+++ b/pialert/plugin.py
@@ -292,7 +292,6 @@ def passable_string_from_setting(globalSetting):
setVal = globalSetting[6] # setting value
setTyp = globalSetting[3] # setting type
-
noConversion = ['text', 'string', 'integer', 'boolean', 'password', 'readonly', 'integer.select', 'text.select', 'integer.checkbox' ]
arrayConversion = ['text.multiselect', 'list']
arrayConversionBase64 = ['subnets']
@@ -515,77 +514,77 @@ def process_plugin_events(db, plugin):
# Perform databse table mapping if enabled for the plugin
if len(pluginEvents) > 0 and "mapped_to_table" in plugin:
- # Initialize an empty list to store SQL parameters.
- sqlParams = []
+ # Initialize an empty list to store SQL parameters.
+ sqlParams = []
- # Get the database table name from the 'mapped_to_table' key in the 'plugin' dictionary.
- dbTable = plugin['mapped_to_table']
+ # Get the database table name from the 'mapped_to_table' key in the 'plugin' dictionary.
+ dbTable = plugin['mapped_to_table']
- # Log a debug message indicating the mapping of objects to the database table.
- mylog('debug', ['[Plugins] Mapping objects to database table: ', dbTable])
+ # Log a debug message indicating the mapping of objects to the database table.
+ mylog('debug', ['[Plugins] Mapping objects to database table: ', dbTable])
- # Initialize lists to hold mapped column names, columnsStr, and valuesStr for SQL query.
- mappedCols = []
- columnsStr = ''
- valuesStr = ''
+ # Initialize lists to hold mapped column names, columnsStr, and valuesStr for SQL query.
+ mappedCols = []
+ columnsStr = ''
+ valuesStr = ''
- # Loop through the 'database_column_definitions' in the 'plugin' dictionary to collect mapped columns.
- # Build the columnsStr and valuesStr for the SQL query.
- for clmn in plugin['database_column_definitions']:
- if 'mapped_to_column' in clmn:
- mappedCols.append(clmn)
- columnsStr = f'{columnsStr}, "{clmn["mapped_to_column"]}"'
- valuesStr = f'{valuesStr}, ?'
+ # Loop through the 'database_column_definitions' in the 'plugin' dictionary to collect mapped columns.
+ # Build the columnsStr and valuesStr for the SQL query.
+ for clmn in plugin['database_column_definitions']:
+ if 'mapped_to_column' in clmn:
+ mappedCols.append(clmn)
+ columnsStr = f'{columnsStr}, "{clmn["mapped_to_column"]}"'
+ valuesStr = f'{valuesStr}, ?'
- # Remove the first ',' from columnsStr and valuesStr.
- if len(columnsStr) > 0:
- columnsStr = columnsStr[1:]
- valuesStr = valuesStr[1:]
+ # Remove the first ',' from columnsStr and valuesStr.
+ if len(columnsStr) > 0:
+ columnsStr = columnsStr[1:]
+ valuesStr = valuesStr[1:]
- # Map the column names to plugin object event values and create a list of tuples 'sqlParams'.
- for plgEv in pluginEvents:
- tmpList = []
+ # Map the column names to plugin object event values and create a list of tuples 'sqlParams'.
+ for plgEv in pluginEvents:
+ tmpList = []
- for col in mappedCols:
- if col['column'] == 'Index':
- tmpList.append(plgEv.index)
- elif col['column'] == 'Plugin':
- tmpList.append(plgEv.pluginPref)
- elif col['column'] == 'Object_PrimaryID':
- tmpList.append(plgEv.primaryId)
- elif col['column'] == 'Object_SecondaryID':
- tmpList.append(plgEv.secondaryId)
- elif col['column'] == 'DateTimeCreated':
- tmpList.append(plgEv.created)
- elif col['column'] == 'DateTimeChanged':
- tmpList.append(plgEv.changed)
- elif col['column'] == 'Watched_Value1':
- tmpList.append(plgEv.watched1)
- elif col['column'] == 'Watched_Value2':
- tmpList.append(plgEv.watched2)
- elif col['column'] == 'Watched_Value3':
- tmpList.append(plgEv.watched3)
- elif col['column'] == 'Watched_Value4':
- tmpList.append(plgEv.watched4)
- elif col['column'] == 'UserData':
- tmpList.append(plgEv.userData)
- elif col['column'] == 'Extra':
- tmpList.append(plgEv.extra)
- elif col['column'] == 'Status':
- tmpList.append(plgEv.status)
+ for col in mappedCols:
+ if col['column'] == 'Index':
+ tmpList.append(plgEv.index)
+ elif col['column'] == 'Plugin':
+ tmpList.append(plgEv.pluginPref)
+ elif col['column'] == 'Object_PrimaryID':
+ tmpList.append(plgEv.primaryId)
+ elif col['column'] == 'Object_SecondaryID':
+ tmpList.append(plgEv.secondaryId)
+ elif col['column'] == 'DateTimeCreated':
+ tmpList.append(plgEv.created)
+ elif col['column'] == 'DateTimeChanged':
+ tmpList.append(plgEv.changed)
+ elif col['column'] == 'Watched_Value1':
+ tmpList.append(plgEv.watched1)
+ elif col['column'] == 'Watched_Value2':
+ tmpList.append(plgEv.watched2)
+ elif col['column'] == 'Watched_Value3':
+ tmpList.append(plgEv.watched3)
+ elif col['column'] == 'Watched_Value4':
+ tmpList.append(plgEv.watched4)
+ elif col['column'] == 'UserData':
+ tmpList.append(plgEv.userData)
+ elif col['column'] == 'Extra':
+ tmpList.append(plgEv.extra)
+ elif col['column'] == 'Status':
+ tmpList.append(plgEv.status)
- # Append the mapped values to the list 'sqlParams' as a tuple.
- sqlParams.append(tuple(tmpList))
+ # Append the mapped values to the list 'sqlParams' as a tuple.
+ sqlParams.append(tuple(tmpList))
- # Generate the SQL INSERT query using the collected information.
- q = f'INSERT into {dbTable} ({columnsStr}) VALUES ({valuesStr})'
+ # Generate the SQL INSERT query using the collected information.
+ q = f'INSERT into {dbTable} ({columnsStr}) VALUES ({valuesStr})'
- # Log a debug message showing the generated SQL query for mapping.
- mylog('debug', ['[Plugins] SQL query for mapping: ', q])
+ # Log a debug message showing the generated SQL query for mapping.
+ mylog('debug', ['[Plugins] SQL query for mapping: ', q])
- # Execute the SQL query using 'sql.executemany()' and the 'sqlParams' list of tuples.
- # This will insert multiple rows into the database in one go.
- sql.executemany(q, sqlParams)
+ # Execute the SQL query using 'sql.executemany()' and the 'sqlParams' list of tuples.
+ # This will insert multiple rows into the database in one go.
+ sql.executemany(q, sqlParams)
db.commitDB()
diff --git a/pialert/reporting.py b/pialert/reporting.py
index f63204ab..f702ccb5 100755
--- a/pialert/reporting.py
+++ b/pialert/reporting.py
@@ -14,6 +14,7 @@ import conf
from const import pialertPath, logPath, apiPath
from helper import noti_struc, generate_mac_links, removeDuplicateNewLines, timeNowTZ, hide_email, updateState, get_file_content, write_file
from logger import logResult, mylog, print_log
+from plugin import execute_plugin
from publishers.email import (check_config as email_check_config,
@@ -468,15 +469,18 @@ def check_and_run_event(db):
event, param = ['','']
if len(rows) > 0 and rows[0]['par_Value'] != 'finished':
- event = rows[0]['par_Value'].split('|')[0]
- param = rows[0]['par_Value'].split('|')[1]
+ keyValue = rows[0]['par_Value'].split('|')
+
+ if len(keyValue) == 2:
+ event = keyValue[0]
+ param = keyValue[1]
else:
return
if event == 'test':
handle_test(param)
if event == 'run':
- handle_run(param)
+ handle_run(param, db)
# clear event execution flag
sql.execute ("UPDATE Parameters SET par_Value='finished' WHERE par_ID='Front_Event'")
@@ -485,16 +489,20 @@ def check_and_run_event(db):
db.commitDB()
#-------------------------------------------------------------------------------
-def handle_run(runType):
- global last_network_scan
-
+def handle_run(runType, db):
+
mylog('minimal', ['[', timeNowTZ(), '] START Run: ', runType])
if runType == 'ENABLE_ARPSCAN':
- last_network_scan = conf.time_started - datetime.timedelta(hours = 24)
+ # run the plugin to run
+ for plugin in conf.plugins:
+ if plugin["unique_prefix"] == 'ARPSCAN':
+ execute_plugin(db, plugin)
mylog('minimal', ['[', timeNowTZ(), '] END Run: ', runType])
+
+
#-------------------------------------------------------------------------------
def handle_test(testType):
From 80bda24b5f552ae6a723c8aaee922a5715648bcd Mon Sep 17 00:00:00 2001
From: Jokob-sk
Date: Mon, 7 Aug 2023 17:42:46 +1000
Subject: [PATCH 14/14] ARPSCAN to plugin rewrite
---
front/plugins/arp_scan/config.json | 2 +-
pialert/networkscan.py | 88 +++++++++++++-----------------
2 files changed, 40 insertions(+), 50 deletions(-)
diff --git a/front/plugins/arp_scan/config.json b/front/plugins/arp_scan/config.json
index e45dacca..9d3ab850 100755
--- a/front/plugins/arp_scan/config.json
+++ b/front/plugins/arp_scan/config.json
@@ -128,7 +128,7 @@
{
"function": "REPORT_ON",
"type": "text.multiselect",
- "default_value": ["new", "watched-changed"],
+ "default_value": ["new"],
"options": ["new", "watched-changed", "watched-not-changed"],
"localized": ["name", "description"],
"name": [
diff --git a/pialert/networkscan.py b/pialert/networkscan.py
index d18c0179..9509a0fb 100755
--- a/pialert/networkscan.py
+++ b/pialert/networkscan.py
@@ -103,62 +103,52 @@ def void_ghost_disconnections (db):
startTime = timeNowTZ()
# Void connect ghost events (disconnect event exists in last X min.)
mylog('debug','[Void Ghost Con] - 1 Connect ghost events')
- sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null,
- eve_EventType ='VOIDED - ' || eve_EventType
- WHERE eve_MAC != 'Internet'
- AND eve_EventType = 'Connected'
- AND eve_DateTime = ?
- AND eve_MAC IN (
- SELECT Events.eve_MAC
- FROM CurrentScan, Devices, ScanCycles, Events
- WHERE cur_ScanCycle = ?
- AND dev_MAC = cur_MAC
- AND dev_ScanCycle = cic_ID
- AND cic_ID = cur_ScanCycle
- AND eve_MAC = cur_MAC
- AND eve_EventType = 'Disconnected'
- AND eve_DateTime >=
- DATETIME (?, '-' || cic_EveryXmin ||' minutes')
- ) """,
- (startTime, conf.cycle, startTime) )
+ sql.execute("""UPDATE Events SET eve_PairEventRowid = Null,
+ eve_EventType ='VOIDED - ' || eve_EventType
+ WHERE eve_MAC != 'Internet'
+ AND eve_EventType = 'Connected'
+ AND eve_DateTime = ?
+ AND eve_MAC IN (
+ SELECT Events.eve_MAC
+ FROM CurrentScan, Devices, Events
+ WHERE dev_MAC = cur_MAC
+ AND eve_MAC = cur_MAC
+ AND eve_EventType = 'Disconnected'
+ AND eve_DateTime >= DATETIME(?, '-3 minutes')
+ ) """,
+ (startTime, startTime))
# Void connect paired events
mylog('debug','[Void Ghost Con] - 2 Paired events')
- sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null
- WHERE eve_MAC != 'Internet'
- AND eve_PairEventRowid IN (
- SELECT Events.RowID
- FROM CurrentScan, Devices, ScanCycles, Events
- WHERE cur_ScanCycle = ?
- AND dev_MAC = cur_MAC
- AND dev_ScanCycle = cic_ID
- AND cic_ID = cur_ScanCycle
- AND eve_MAC = cur_MAC
- AND eve_EventType = 'Disconnected'
- AND eve_DateTime >=
- DATETIME (?, '-' || cic_EveryXmin ||' minutes')
- ) """,
- (conf.cycle, startTime) )
+ sql.execute("""UPDATE Events SET eve_PairEventRowid = Null
+ WHERE eve_MAC != 'Internet'
+ AND eve_PairEventRowid IN (
+ SELECT Events.RowID
+ FROM CurrentScan, Devices, Events
+ WHERE dev_MAC = cur_MAC
+ AND eve_MAC = cur_MAC
+ AND eve_EventType = 'Disconnected'
+ AND eve_DateTime >= DATETIME(?, '-3 minutes')
+ ) """,
+ (startTime,))
# Void disconnect ghost events
mylog('debug','[Void Ghost Con] - 3 Disconnect ghost events')
- sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null,
- eve_EventType = 'VOIDED - '|| eve_EventType
- WHERE eve_MAC != 'Internet'
- AND ROWID IN (
- SELECT Events.RowID
- FROM CurrentScan, Devices, ScanCycles, Events
- WHERE cur_ScanCycle = ?
- AND dev_MAC = cur_MAC
- AND dev_ScanCycle = cic_ID
- AND cic_ID = cur_ScanCycle
- AND eve_MAC = cur_MAC
- AND eve_EventType = 'Disconnected'
- AND eve_DateTime >=
- DATETIME (?, '-' || cic_EveryXmin ||' minutes')
- ) """,
- (conf.cycle, startTime) )
+ sql.execute("""UPDATE Events SET eve_PairEventRowid = Null,
+ eve_EventType = 'VOIDED - '|| eve_EventType
+ WHERE eve_MAC != 'Internet'
+ AND ROWID IN (
+ SELECT Events.RowID
+ FROM CurrentScan, Devices, Events
+ WHERE dev_MAC = cur_MAC
+ AND eve_MAC = cur_MAC
+ AND eve_EventType = 'Disconnected'
+ AND eve_DateTime >= DATETIME(?, '-3 minutes')
+ ) """,
+ (startTime,))
+
mylog('debug','[Void Ghost Con] Void Ghost Connections end')
+
db.commitDB()
#-------------------------------------------------------------------------------