From e6644ad3ec5338d80753cf8d22c9687d46a26e23 Mon Sep 17 00:00:00 2001 From: Jokob-sk Date: Sun, 27 Aug 2023 18:45:07 +1000 Subject: [PATCH] NMAP plugin conversion v0.1 --- docs/API.md | 3 +- docs/DATABASE.md | 3 +- front/network.php | 18 +- front/php/templates/language/en_us.json | 12 - front/php/templates/language/es_es.json | 11 - front/plugins/README.md | 5 +- front/plugins/arp_scan/config.json | 3 +- front/plugins/arp_scan/script.py | 4 +- front/plugins/nmap_scan/README.md | 11 + .../{nmap_services => nmap_scan}/README_ES.md | 0 .../{nmap_services => nmap_scan}/config.json | 243 +++++++++------ front/plugins/nmap_scan/script.py | 280 ++++++++++++++++++ front/plugins/nmap_services/README.md | 11 - front/plugins/plugin_helper.py | 30 +- pialert/__main__.py | 29 +- pialert/api.py | 6 +- pialert/conf.py | 10 +- pialert/const.py | 2 - pialert/initialise.py | 23 +- pialert/plugin.py | 152 ++++++---- pialert/plugin_utils.py | 12 +- pialert/reporting.py | 32 +- pialert/scanners/nmapscan.py | 211 ------------- 23 files changed, 612 insertions(+), 499 deletions(-) create mode 100755 front/plugins/nmap_scan/README.md rename front/plugins/{nmap_services => nmap_scan}/README_ES.md (100%) rename front/plugins/{nmap_services => nmap_scan}/config.json (66%) create mode 100755 front/plugins/nmap_scan/script.py delete mode 100755 front/plugins/nmap_services/README.md delete mode 100755 pialert/scanners/nmapscan.py diff --git a/docs/API.md b/docs/API.md index 3b321f6d..3abf9396 100755 --- a/docs/API.md +++ b/docs/API.md @@ -20,8 +20,7 @@ You can access the following files: | `notification_text.txt` | The plain text version of the last notification. | | `notification_text.html` | The full HTML of the last email notification. | | `notification_json_final.json` | The json version of the last notification (e.g. used for webhooks - [sample JSON](https://github.com/jokob-sk/Pi.Alert/blob/main/back/webhook_json_sample.json)). | - | `table_devices.json` | The current (at the time of the last update as mentioned above on this page) state of all of the available Devices detected by the app. | - | `table_nmap_scan.json` | The current state of the discovered ports by the regular NMAP scans. | + | `table_devices.json` | The current (at the time of the last update as mentioned above on this page) state of all of the available Devices detected by the app. | | `table_pholus_scan.json` | The latest state of the [pholus](https://github.com/jokob-sk/Pi.Alert/tree/main/pholus) (A multicast DNS and DNS Service Discovery Security Assessment Tool) scan results. | | `table_plugins_events.json` | The list of the unprocessed (pending) notification events (plugins_events DB table). | | `table_plugins_history.json` | The list of notification events history. | diff --git a/docs/DATABASE.md b/docs/DATABASE.md index ce0e3600..6184cdb7 100755 --- a/docs/DATABASE.md +++ b/docs/DATABASE.md @@ -11,8 +11,7 @@ |----------------------|----------------------| ----------------------| | CurrentScan | Result of the current scan | ![Screen1][screen1] | | Devices | The main devices database that also contains the Network tree mappings. If `ScanCycle` is set to `0` device is not scanned. | ![Screen2][screen2] | - | Events | Used to collect connection/disconnection events. | ![Screen4][screen4] | - | Nmap_Scan | Contains results of the scheduled Nmap scan, that is also displayed in the Nmap tab on each device. | ![Screen5][screen5] | + | Events | Used to collect connection/disconnection events. | ![Screen4][screen4] | | Online_History | Used to display the `Device presence over time` chart | ![Screen6][screen6] | | Parameters | Used to pass values between the frontend and backend. | ![Screen7][screen7] | | Pholus_Scan | Scan results of the Pholus python network penetration script. | ![Screen8][screen8] | diff --git a/front/network.php b/front/network.php index 7089acf4..7f065de0 100755 --- a/front/network.php +++ b/front/network.php @@ -659,16 +659,16 @@ highlightedCss = nodeData.data.mac == selectedNodeMac ? " highlightedNode" : ""; - return result = "
\ - " + devicePort + deviceIcon + - ""+nodeData.data.name+"\ - " - +collapseExpandHtml+ - "
"; + ${devicePort} ${deviceIcon} + ${nodeData.data.name}\ + + ${collapseExpandHtml} + `; }, onNodeClick: nodeData => { diff --git a/front/php/templates/language/en_us.json b/front/php/templates/language/en_us.json index d4e02165..066fa34a 100755 --- a/front/php/templates/language/en_us.json +++ b/front/php/templates/language/en_us.json @@ -572,18 +572,6 @@ "DDNS_PASSWORD_description" : "", "DDNS_UPDATE_URL_name" : "DynDNS update URL", "DDNS_UPDATE_URL_description" : "Update URL starting with http:// or https://.", - "Nmap_display_name" : "Nmap", - "Nmap_icon" : "", - "NMAP_ACTIVE_name" : "Cycle run", - "NMAP_ACTIVE_description" : "If enabled this will execute a scan on a newly found device. For a scheduled or one-off scan, check the NMAP_RUN setting.", - "NMAP_TIMEOUT_name" : "Run timeout", - "NMAP_TIMEOUT_description" : "Maximum time in seconds to wait for an Nmap scan to finish on any device.", - "NMAP_RUN_name" : "Scheduled run", - "NMAP_RUN_description" : "Enable a regular Nmap scan on your network on all devices. The scheduling settings can be found below. If you select once Nmap is run only once on start for the time specified in NMAP_TIMEOUT setting.", - "NMAP_RUN_SCHD_name" : "Schedule", - "NMAP_RUN_SCHD_description" : "Only enabled if you select schedule in the NMAP_RUN setting. Make sure you enter the schedule in the correct cron-like format.", - "NMAP_ARGS_name" : "Arguments", - "NMAP_ARGS_description" : "Arguments used to run the Nmap scan. Be careful to specify the arguments correctly. For example -p -10000 scans ports from 1 to 10000.", "API_display_name" : "API", "API_icon" : "", "API_CUSTOM_SQL_name" : "Custom endpoint", diff --git a/front/php/templates/language/es_es.json b/front/php/templates/language/es_es.json index de591a65..4720d9c9 100755 --- a/front/php/templates/language/es_es.json +++ b/front/php/templates/language/es_es.json @@ -565,17 +565,6 @@ "DDNS_PASSWORD_description" : "", "DDNS_UPDATE_URL_name" : "URL de actualización de DynDNS", "DDNS_UPDATE_URL_description" : "Actualice la URL que comienza con http:// o https://.", - "Nmap_display_name" : "Nmap", - "Nmap_icon" : "", - "NMAP_ACTIVE_name" : "Ejecución del ciclo", - "NMAP_ACTIVE_description" : "Si está habilitado, ejecutará un escaneo en un dispositivo recién encontrado. Para un análisis programado o único, verifique la configuración de NMAP_RUN.", - "NMAP_TIMEOUT_description" : "Tiempo máximo en segundos para esperar a que finalice un escaneo de Nmap en cualquier dispositivo.", - "NMAP_RUN_name" : "Ejecución programada", - "NMAP_RUN_description" : "Habilite un escaneo regular de Nmap en su red en todos los dispositivos. Los ajustes de programación se pueden encontrar a continuación. Si selecciona una vez, Nmap se ejecuta solo una vez al inicio durante el tiempo especificado en la configuración de NMAP_TIMEOUT.", - "NMAP_RUN_SCHD_name" : "Programar", - "NMAP_RUN_SCHD_description" : "Solo está habilitado si selecciona programar en la configuración de NMAP_RUN. Asegúrese de ingresar el cronograma en el formato tipo cron correcto.", - "NMAP_ARGS_name" : "Argumentos", - "NMAP_ARGS_description" : "Argumentos utilizados para ejecutar el análisis de Nmap. Tenga cuidado de especificar los argumentos correctamente. Por ejemplo, -p -10000 escanea los puertos del 1 al 10000.", "API_display_name" : "API", "API_icon" : "", "API_CUSTOM_SQL_name" : "Endpoint personalizado", diff --git a/front/plugins/README.md b/front/plugins/README.md index e2e17eb2..16e35e6e 100755 --- a/front/plugins/README.md +++ b/front/plugins/README.md @@ -15,9 +15,12 @@ - [undiscoverables (UNDIS)](/front/plugins/undiscoverables/) - [pholus_scan (ARPSCAN)](/front/plugins/pholus_scan/) - [set_password (SETPWD)](/front/plugins/set_password/) +- [nmap_scan (NMAP)](/front/plugins/nmap_scan/) ### SQL query based plugins -- [nmap_services (NMAPSERV)](/front/plugins/nmap_services/) + +- N/A, but the External SQLite based plugins work very similar + ### template based plugins - [newdev_template (NEWDEV)](/front/plugins/newdev_template/) diff --git a/front/plugins/arp_scan/config.json b/front/plugins/arp_scan/config.json index dc235295..7720659a 100755 --- a/front/plugins/arp_scan/config.json +++ b/front/plugins/arp_scan/config.json @@ -50,7 +50,8 @@ { "name" : "subnets", "type" : "setting", - "value" : "SCAN_SUBNETS" + "value" : "SCAN_SUBNETS", + "base64": true }], "settings": [ diff --git a/front/plugins/arp_scan/script.py b/front/plugins/arp_scan/script.py index 7e1d3230..fcebf83a 100755 --- a/front/plugins/arp_scan/script.py +++ b/front/plugins/arp_scan/script.py @@ -28,9 +28,7 @@ def main(): # the script expects a parameter in the format of userSubnets=subnet1,subnet2,... parser = argparse.ArgumentParser(description='Import devices from settings') parser.add_argument('userSubnets', nargs='+', help="list of subnets with options") - values = parser.parse_args() - - import base64 + values = parser.parse_args() # Assuming Plugin_Objects is a class or function that reads data from the RESULT_FILE # and returns a list of objects called 'devices'. diff --git a/front/plugins/nmap_scan/README.md b/front/plugins/nmap_scan/README.md new file mode 100755 index 00000000..2be7eaad --- /dev/null +++ b/front/plugins/nmap_scan/README.md @@ -0,0 +1,11 @@ +## Overview + +This plugin scans your network for open ports. Only IPs are scanned that are accessible by the app container. + +### Usage + +- TBD + +### Notes + +- N/A \ No newline at end of file diff --git a/front/plugins/nmap_services/README_ES.md b/front/plugins/nmap_scan/README_ES.md similarity index 100% rename from front/plugins/nmap_services/README_ES.md rename to front/plugins/nmap_scan/README_ES.md diff --git a/front/plugins/nmap_services/config.json b/front/plugins/nmap_scan/config.json similarity index 66% rename from front/plugins/nmap_services/config.json rename to front/plugins/nmap_scan/config.json index a3891201..77c2c301 100755 --- a/front/plugins/nmap_services/config.json +++ b/front/plugins/nmap_scan/config.json @@ -1,8 +1,8 @@ { - "code_name": "nmap_services", - "unique_prefix": "NMAPSRV", + "code_name": "nmap_scan", + "unique_prefix": "NMAP", "enabled": true, - "data_source": "pialert-db-query", + "data_source": "script", "data_filters": [ { "compare_column" : "ForeignKey", @@ -38,7 +38,30 @@ "language_code":"es_es", "string" : "Este complemento muestra todos los servicios descubiertos por escaneos NMAP." }], - "params" : [], + "params" : [ + { + "name" : "ips", + "type" : "sql", + "value" : "SELECT dev_LastIP from DEVICES", + "timeoutMultiplier" : true + }, + { + "name" : "macs", + "type" : "sql", + "value" : "SELECT dev_MAC from DEVICES" + }, + { + "name" : "timeout", + "type" : "setting", + "value" : "NMAP_RUN_TIMEOUT" + }, + { + "name" : "args", + "type" : "setting", + "value" : "NMAP_ARGS", + "base64" : true + } + ], "database_column_definitions": [ { @@ -71,7 +94,7 @@ "column": "Object_PrimaryID", "css_classes": "col-sm-2", "show": true, - "type": "label", + "type": "device_name_mac", "default_value":"", "options": [], "localized": ["name"], @@ -94,17 +117,17 @@ "localized": ["name"], "name":[{ "language_code":"en_us", - "string" : "Ip and Port" + "string" : "Port" }, { "language_code":"es_es", - "string" : "IP y puerto" + "string" : "Puerto" }] } , { "column": "DateTimeCreated", "css_classes": "col-sm-2", - "show": false, + "show": true, "type": "label", "default_value":"", "options": [], @@ -141,22 +164,6 @@ "show": true, "type": "label", "default_value":"", - "localized": ["name"], - "name":[{ - "language_code":"en_us", - "string" : "Service" - }, - { - "language_code":"es_es", - "string" : "Servicio" - }] - }, - { - "column": "Watched_Value2", - "css_classes": "col-sm-1", - "show": true, - "type": "label", - "default_value":"", "options": [], "localized": ["name"], "name":[{ @@ -168,6 +175,22 @@ "string" : "Estado" }] }, + { + "column": "Watched_Value2", + "css_classes": "col-sm-1", + "show": true, + "type": "label", + "default_value":"", + "localized": ["name"], + "name":[{ + "language_code":"en_us", + "string" : "Service" + }, + { + "language_code":"es_es", + "string" : "Servicio" + }] + }, { "column": "Watched_Value3", "css_classes": "col-sm-2", @@ -272,75 +295,119 @@ } ], "settings":[ - { - "function": "RUN", - "type": "text.select", - "default_value":"disabled", - "options": ["disabled", "once", "schedule", "always_after_scan", "on_new_device"], - "localized": ["name", "description"], - "name" :[{ - "language_code":"en_us", - "string" : "When to run" - }, - { - "language_code":"es_es", - "string" : "Cuando ejecutar" - }], - "description": [{ - "language_code":"en_us", - "string" : "Specify when the SQL query is executed." - }, - { - "language_code":"es_es", - "string" : "Especificar cuándo se ejecuta la consulta SQL." - }] + { + "function": "RUN", + "type": "text.select", + "default_value":"disabled", + "options": ["disabled", "once", "schedule", "always_after_scan", "on_new_device"], + "localized": ["name", "description"], + "name" :[{ + "language_code":"en_us", + "string" : "When to run" }, { - "function": "CMD", - "type": "text", - "default_value":"SELECT ns.MAC as Object_PrimaryID, cast('http://' || dv.dev_LastIP as VARCHAR(100)) || ':' || cast( SUBSTR(ns.Port ,0, INSTR(ns.Port , '/')) as VARCHAR(100)) as Object_SecondaryID, datetime() as DateTime, ns.Service as Watched_Value1, ns.State as Watched_Value2, dv.dev_Name as Watched_Value3, 'null' as Watched_Value4, ns.Extra as Extra, ns.MAC as ForeignKey FROM (SELECT * FROM Nmap_Scan) ns left JOIN (SELECT dev_Name, dev_MAC, dev_LastIP FROM Devices) dv ON ns.MAC = dv.dev_MAC", - "options": [], - "localized": ["name", "description"], - "name" : [{ + "language_code":"es_es", + "string" : "Cuando ejecutar" + }], + "description": [{ + "language_code":"en_us", + "string" : "Enable a regular Nmap scan on your network on all devices. The scheduling settings can be found below. If you select once Nmap is run only once on start for the time specified in NMAP_TIMEOUT setting." + }, + { + "language_code":"es_es", + "string" : "Habilite un escaneo regular de Nmap en su red en todos los dispositivos. Los ajustes de programación se pueden encontrar a continuación. Si selecciona una vez, Nmap se ejecuta solo una vez al inicio durante el tiempo especificado en la configuración de NMAP_TIMEOUT" + }] + }, + { + "function": "CMD", + "type": "text", + "default_value":"python3 /home/pi/pialert/front/plugins/nmap_scan/script.py ips={ips} macs={macs} timeout={timeout} args={args}", + "options": [], + "localized": ["name", "description"], + "name" : [{ + "language_code":"en_us", + "string" : "SQL to run" + }, + { + "language_code":"es_es", + "string" : "Consulta SQL" + }], + "description": [{ + "language_code":"en_us", + "string" : "This calls the script responsible for executing the NMAP scan." + } + ] + }, + { + "function": "ARGS", + "type": "text", + "default_value":"-p -10000", + "options": [], + "localized": ["name", "description"], + "name" : [{ + "language_code":"en_us", + "string" : "Arguments" + }, + { + "language_code":"es_es", + "string" : "Argumentos" + }], + "description": [ + { "language_code":"en_us", - "string" : "SQL to run" + "string" : "Arguments used to run the Nmap scan. Be careful to specify the arguments correctly. For example -p -10000 scans ports from 1 to 10000." }, - { + { "language_code":"es_es", - "string" : "Consulta SQL" - }], - "description": [{ - "language_code":"en_us", - "string" : "This SQL query is used to populate the coresponding UI tables under the Plugins section." - }, - { - "language_code":"es_es", - "string" : "Esta consulta SQL se usa para completar las tablas de IU correspondientes en la sección Complementos." + "string" : "Argumentos utilizados para ejecutar el análisis de Nmap. Tenga cuidado de especificar los argumentos correctamente. Por ejemplo, -p -10000 escanea los puertos del 1 al 10000." } - ] + ] + }, + { + "function": "RUN_SCHD", + "type": "text", + "default_value":"0 2 * * *", + "options": [], + "localized": ["name", "description"], + "name" : [{ + "language_code":"en_us", + "string" : "Schedule" }, { - "function": "RUN_SCHD", - "type": "text", - "default_value":"0 2 * * *", - "options": [], - "localized": ["name", "description"], - "name" : [{ - "language_code":"en_us", - "string" : "Schedule" + "language_code":"es_es", + "string" : "Schedule" + }], + "description": [{ + "language_code":"en_us", + "string" : "Only enabled if you select schedule in the NMAP_RUN setting. Make sure you enter the schedule in the correct cron-like format." + }, + { + "language_code":"es_es", + "string" : "Solo está habilitado si selecciona programar en la configuración de NMAP_RUN. Asegúrese de ingresar el cronograma en el formato tipo cron correcto." + }] + }, + { + "function": "RUN_TIMEOUT", + "type": "integer", + "default_value": 300, + "options": [], + "localized": ["name", "description"], + "name": [ + { + "language_code": "en_us", + "string": "Run timeout" }, - { - "language_code":"es_es", - "string" : "Schedule" - }], - "description": [{ - "language_code":"en_us", - "string" : "Only enabled if you select schedule in the NMAPSRV_RUN setting. Make sure you enter the schedule in the correct cron-like format (e.g. validate at crontab.guru). For example entering 0 4 * * * will run the scan after 4 am in the TIMEZONE you set above. Will be run NEXT time the time passes." - }, - { - "language_code":"es_es", - "string" : "Solo está habilitado si selecciona schedule en la configuración NMAPSRV_RUN setting. Asegúrese de ingresar la programación en el formato similar a cron correcto (por ejemplo, valide en crontab.guru). Por ejemplo, ingresar 0 4 * * * ejecutará el escaneo después de las 4 a.m. en el TIMEZONE que configuró arriba. Se ejecutará la PRÓXIMA vez que pase el tiempo." - }] + { + "language_code": "es_es", + "string": "Tiempo límite de ejecución" + } + ], + "description": [ + { + "language_code": "en_us", + "string": "Max run time per device in seconds." + } + ] }, { "function": "WATCH", @@ -352,7 +419,7 @@ "language_code":"en_us", "string" : "Watched" }, - { + { "language_code":"es_es", "string" : "Visto" }] , @@ -360,7 +427,7 @@ "language_code":"en_us", "string" : "Send a notification if selected values change. Use CTRL + Click to select/deselect. " }, - { + { "language_code":"es_es", "string" : "Envíe una notificación si los valores seleccionados cambian. Utilice CTRL + clic para seleccionar/deseleccionar. " }] @@ -375,7 +442,7 @@ "language_code":"en_us", "string" : "Report on" }, - { + { "language_code":"es_es", "string" : "Informar sobre" }] , @@ -383,7 +450,7 @@ "language_code":"en_us", "string" : "Send a notification only on these statuses. new means a new unique (unique combination of PrimaryId and SecondaryId) object was discovered. watched-changed means that selected Watched_ValueN columns changed." }, - { + { "language_code":"es_es", "string" : "Envíe una notificación solo en estos estados. new significa que se descubrió un nuevo objeto único (combinación única de PrimaryId y SecondaryId). watched-changed significa que seleccionó Watched_ValueN Las columnas cambiaron." }] diff --git a/front/plugins/nmap_scan/script.py b/front/plugins/nmap_scan/script.py new file mode 100755 index 00000000..4f462686 --- /dev/null +++ b/front/plugins/nmap_scan/script.py @@ -0,0 +1,280 @@ + +#!/usr/bin/env python + +import os +import pathlib +import argparse +import sys +import re +import base64 +import subprocess +from time import strftime + +sys.path.append("/home/pi/pialert/front/plugins") +sys.path.append('/home/pi/pialert/pialert') + +from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 +from logger import mylog, append_line_to_file +from helper import timeNowTZ +from const import logPath, pialertPath + +CUR_PATH = str(pathlib.Path(__file__).parent.resolve()) +LOG_FILE = os.path.join(CUR_PATH, 'script.log') +RESULT_FILE = os.path.join(CUR_PATH, 'last_result.log') + +#------------------------------------------------------------------------------- +def main(): + # sample + # /home/pi/pialert/front/plugins/nmap_scan/script.py ips=192.168.1.66,192.168.1.9' + parser = argparse.ArgumentParser(description='Scan ports of devices specified by IP addresses') + parser.add_argument('ips', nargs='+', help="list of IPs to scan") + parser.add_argument('macs', nargs='+', help="list of MACs related to the supplied IPs in the same order") + parser.add_argument('timeout', nargs='+', help="timeout") + parser.add_argument('args', nargs='+', help="args") + values = parser.parse_args() + + # Plugin_Objects is a class that reads data from the RESULT_FILE + # and returns a list of results. + results = Plugin_Objects(RESULT_FILE) + + # Print a message to indicate that the script is starting. + mylog('debug', ['[NMAP Scan] In script ']) + + # Printing the params list to check its content. + mylog('debug', ['[NMAP Scan] values.ips: ', values.ips]) + mylog('debug', ['[NMAP Scan] values.macs: ', values.macs]) + mylog('debug', ['[NMAP Scan] values.timeout: ', values.timeout]) + mylog('debug', ['[NMAP Scan] values.args: ', values.args]) + + argsDecoded = decodeBase64(values.args) + + mylog('debug', ['[NMAP Scan] argsDecoded: ', argsDecoded]) + + entries = performNmapScan(values.ips.split('=')[1].split(','), values.macs.split('=')[1].split(',') , values.timeout.split('=')[1], argsDecoded) + + for entry in entries: + + results.add_object( + primaryId = entry.mac, # MAC (Device Name) + secondaryId = entry.port, # IP Address (always 0.0.0.0) + watched1 = entry.state, # Device Name + watched2 = entry.service, + watched3 = entry.ip + ":" + entry.port, + watched4 = "", + extra = "", + foreignKey = entry.extra + ) + + entries.write_result_file() + +#------------------------------------------------------------------------------- + +class nmap_entry: + def __init__(self, ip, mac, time, port, state, service, name = '', extra = '', index = 0): + self.ip = ip + self.mac = mac + self.time = time + self.port = port + self.state = state + self.service = service + self.extra = extra + self.index = index + self.hash = str(mac) + str(port)+ str(state)+ str(service) + + +#------------------------------------------------------------------------------- +def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args): + """ + run nmap scan on a list of devices + discovers open ports and keeps track existing and new open ports + """ + if len(deviceIPs) > 0: + + devTotal = len(deviceIPs) + + updateState(db,"Scan: Nmap") + + mylog('verbose', ['[NMAP Scan] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device']) + mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ]) + + # collect ports / new Nmap Entries + newEntriesTmp = [] + + devIndex = 0 + for ip in deviceIPs: + # Execute command + output = "" + # prepare arguments from user supplied ones + nmapArgs = ['nmap'] + args.split() + [ip] + + progress = ' (' + str(devIndex+1) + '/' + str(devTotal) + ')' + + try: + # try runnning a subprocess with a forced (timeout) in case the subprocess hangs + output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec)) + except subprocess.CalledProcessError as e: + # An error occured, handle it + mylog('none', ["[NMAP Scan] " ,e.output]) + mylog('none', ["[NMAP Scan] Error - Nmap Scan - check logs", progress]) + except subprocess.TimeoutExpired as timeErr: + mylog('verbose', ['[NMAP Scan] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', ip, progress]) + + if output == "": # check if the subprocess failed + mylog('minimal', ['[NMAP Scan] Nmap FAIL for ', ip, progress ,' check logs for details']) + else: + mylog('verbose', ['[NMAP Scan] Nmap SUCCESS for ', ip, progress]) + + + + # check the last run output + newLines = output.split('\n') + + # regular logging + for line in newLines: + append_line_to_file (logPath + '/pialert_nmap.log', line +'\n') + + + + index = 0 + startCollecting = False + duration = "" + for line in newLines: + if 'Starting Nmap' in line: + if len(newLines) > index+1 and 'Note: Host seems down' in newLines[index+1]: + break # this entry is empty + elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line: + startCollecting = True + elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line: + startCollecting = False # end reached + elif startCollecting and len(line.split()) == 3: + newEntriesTmp.append(nmap_entry(ip, deviceMACs[devIndex], timeNowTZ(), line.split()[0], line.split()[1], line.split()[2])) + elif 'Nmap done' in line: + duration = line.split('scanned in ')[1] + + index += 1 + devIndex += 1 + + mylog('verbose', ['[NMAP Scan] Ports found by NMAP: ', len(newEntriesTmp)]) + + #end for loop + + return newEntriesTmp + +#=============================================================================== +# BEGIN +#=============================================================================== +if __name__ == '__main__': + main() + +# def process_discovered_ports(db, device, discoveredPorts): +# """ +# process ports discovered by nmap +# compare to previosu ports +# update DB +# raise notifications +# """ +# sql = db.sql # TO-DO +# # previous Nmap Entries +# oldEntries = [] +# changedPortsTmp = [] + +# mylog('verbose', ['[NMAP Scan] Process ports found by NMAP: ', len(discoveredPorts)]) + +# if len(discoveredPorts) > 0: + +# # get all current NMAP ports from the DB +# rows = db.read(sql_nmap_scan_all) + +# for row in rows: +# # only collect entries matching the current MAC address +# if row["MAC"] == device["dev_MAC"]: +# oldEntries.append(nmap_entry(row["MAC"], row["Time"], row["Port"], row["State"], row["Service"], device["dev_Name"], row["Extra"], row["Index"])) + +# newEntries = [] + +# # Collect all entries that don't match the ones in the DB +# for discoveredPort in discoveredPorts: + +# found = False + +# # Check the new entry is already available in oldEntries and remove from processing if yes +# for oldEntry in oldEntries: +# if discoveredPort.hash == oldEntry.hash: +# found = True + +# if not found: +# newEntries.append(discoveredPort) + + +# mylog('verbose', ['[NMAP Scan] Nmap newly discovered or changed ports: ', len(newEntries)]) + +# # collect new ports, find the corresponding old entry and return for notification purposes +# # also update the DB with the new values after deleting the old ones +# if len(newEntries) > 0: + +# # params to build the SQL query +# params = [] +# indexesToDelete = "" + +# # Find old entry matching the new entry hash +# for newEntry in newEntries: + +# foundEntry = None + +# for oldEntry in oldEntries: +# if oldEntry.hash == newEntry.hash: +# indexesToDelete = indexesToDelete + str(oldEntry.index) + ',' +# foundEntry = oldEntry + +# columnNames = ["Name", "MAC", "Port", "State", "Service", "Extra", "NewOrOld" ] + +# # Old entry found +# if foundEntry is not None: +# # Build params for sql query +# params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, oldEntry.extra)) +# # Build JSON for API and notifications +# changedPortsTmp.append({ +# "Name" : foundEntry.name, +# "MAC" : newEntry.mac, +# "Port" : newEntry.port, +# "State" : newEntry.state, +# "Service" : newEntry.service, +# "Extra" : foundEntry.extra, +# "NewOrOld" : "New values" +# }) +# changedPortsTmp.append({ +# "Name" : foundEntry.name, +# "MAC" : foundEntry.mac, +# "Port" : foundEntry.port, +# "State" : foundEntry.state, +# "Service" : foundEntry.service, +# "Extra" : foundEntry.extra, +# "NewOrOld" : "Old values" +# }) +# # New entry - no matching Old entry found +# else: +# # Build params for sql query +# params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, '')) +# # Build JSON for API and notifications +# changedPortsTmp.append({ +# "Name" : "New device", +# "MAC" : newEntry.mac, +# "Port" : newEntry.port, +# "State" : newEntry.state, +# "Service" : newEntry.service, +# "Extra" : "", +# "NewOrOld" : "New device" +# }) + +# conf.changedPorts_json_struc = json_struc({ "data" : changedPortsTmp}, columnNames) + +# # Delete old entries if available +# if len(indexesToDelete) > 0: +# sql.execute ("DELETE FROM Nmap_Scan where \"Index\" in (" + indexesToDelete[:-1] +")") +# db.commitDB() + +# # Insert new values into the DB +# sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params) +# db.commitDB() + + diff --git a/front/plugins/nmap_services/README.md b/front/plugins/nmap_services/README.md deleted file mode 100755 index 1f7b52f0..00000000 --- a/front/plugins/nmap_services/README.md +++ /dev/null @@ -1,11 +0,0 @@ -## Overview - -This plugin shows all Services discovered by regular NMAP scans. It's also a sample plugin showcasing how to use a SQL Query to show existing data from the PiAlert database. - -### Usage - -- The sql query from the `NMAPSRV_CMD` setting is used to create source data for this plugin. Column order and values need to adhere to the ones specified in the [documentation](https://github.com/jokob-sk/Pi.Alert/tree/main/front/plugins). - -### Notes - -- N/A \ No newline at end of file diff --git a/front/plugins/plugin_helper.py b/front/plugins/plugin_helper.py index 0643391c..3f1d4d60 100755 --- a/front/plugins/plugin_helper.py +++ b/front/plugins/plugin_helper.py @@ -2,6 +2,11 @@ from time import strftime import pytz from datetime import datetime +sys.path.append("/home/pi/pialert/front/plugins") +sys.path.append('/home/pi/pialert/pialert') + +from logger import mylog + #------------------------------------------------------------------------------- def read_config_file(): """ @@ -20,14 +25,35 @@ def read_config_file(): confDict = {} # config dictionary exec(code, {"__builtins__": {}}, confDict) return confDict -# ------------------------------------------------------------------- - pialertConfigFile = read_config_file() timeZoneSetting = pialertConfigFile['TIMEZONE'] timeZone = pytz.timezone(timeZoneSetting) +# ------------------------------------------------------------------- +def decodeBase64(input): + + # Printing the input list to check its content. + mylog('debug', ['[Plugins] Helper base64 input: ', input]) + + + # Extract the base64-encoded subnet information from the first element + # The format of the element is assumed to be like 'param=b'. + inputParamBase64 = input.split('=b')[1] + + # Printing the extracted base64-encoded information. + mylog('debug', ['[Plugins] Helper base64 inputParamBase64: ', inputParamBase64]) + + + # Decode the base64-encoded subnet information to get the actual subnet information in ASCII format. + result = base64.b64decode(inputParamBase64).decode('ascii') + + # Print the decoded subnet information. + mylog('debug', ['[Plugins] Helper base64 result: ', result]) + + return result + # ------------------------------------------------------------------- class Plugin_Object: diff --git a/pialert/__main__.py b/pialert/__main__.py index 6c7551e3..274e766f 100755 --- a/pialert/__main__.py +++ b/pialert/__main__.py @@ -34,7 +34,6 @@ from reporting import check_and_run_event, send_notifications from plugin import run_plugin_scripts # different scanners -from scanners.nmapscan import performNmapScan from scanners.internet import check_internet_IP @@ -58,8 +57,7 @@ main structure of Pi Alert run scans run plugins (scheduled) check internet IP - check vendor - run NMAP + check vendor run "scan_network()" processing scan results run plugins (after Scan) @@ -160,25 +158,6 @@ def main (): conf.cycle = 'update_vendors' mylog('verbose', ['[MAIN] cycle:',conf.cycle]) update_devices_MAC_vendors(db) - - # Execute scheduled or one-off Nmap scan if enabled and run conditions fulfilled - if conf.NMAP_RUN == "schedule" or conf.NMAP_RUN == "once": - - nmapSchedule = [sch for sch in conf.mySchedules if sch.service == "nmap"][0] - run = False - - # run once after application starts - if conf.NMAP_RUN == "once" and nmapSchedule.last_run == 0: - run = True - - # run if overdue scheduled time - if conf.NMAP_RUN == "schedule": - run = nmapSchedule.runScheduleCheck() - - if run: - nmapSchedule.last_run = timeNowTZ() - performNmapScan(db, get_all_devices(db)) - # Run splugin scripts which are set to run every timne after a scans finished pluginsState = run_plugin_scripts(db,'always_after_scan', pluginsState) @@ -202,11 +181,7 @@ def main (): # new devices were found if len(newDevices) > 0: # run all plugins registered to be run when new devices are found - pluginsState = run_plugin_scripts(db, 'on_new_device', pluginsState) - - # Scan newly found devices with Nmap if enabled - if conf.NMAP_ACTIVE and len(newDevices) > 0: - performNmapScan( db, newDevices) + pluginsState = run_plugin_scripts(db, 'on_new_device', pluginsState) # send all configured notifications send_notifications(db) diff --git a/pialert/api.py b/pialert/api.py index 16332095..a3cd34b9 100755 --- a/pialert/api.py +++ b/pialert/api.py @@ -3,7 +3,7 @@ import json # pialert modules import conf -from const import (apiPath, sql_devices_all, sql_nmap_scan_all, sql_pholus_scan_all, sql_events_pending_alert, +from const import (apiPath, sql_devices_all, sql_events_pending_alert, sql_settings, sql_plugins_events, sql_plugins_history, sql_plugins_objects,sql_language_strings) from logger import mylog from helper import write_file @@ -26,9 +26,7 @@ def update_api(db, isNotification = False, updateOnlyDataSources = []): # prepare database tables we want to expose dataSourcesSQLs = [ - ["devices", sql_devices_all], - ["nmap_scan", sql_nmap_scan_all], - ["pholus_scan", sql_pholus_scan_all], + ["devices", sql_devices_all], ["events_pending_alert", sql_events_pending_alert], ["settings", sql_settings], ["plugins_events", sql_plugins_events], diff --git a/pialert/conf.py b/pialert/conf.py index a139db6e..36b3b11b 100755 --- a/pialert/conf.py +++ b/pialert/conf.py @@ -33,7 +33,6 @@ mqtt_connected_to_broker = False mqtt_sensors = [] client = None # mqtt client # for notifications -changedPorts_json_struc = None # ACTUAL CONFIGRATION ITEMS set to defaults @@ -43,7 +42,7 @@ LOG_LEVEL = 'verbose' TIMEZONE = 'Europe/Berlin' PIALERT_WEB_PROTECTION = False PIALERT_WEB_PASSWORD = '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' -INCLUDED_SECTIONS = ['internet', 'new_devices', 'down_devices', 'events', 'ports'] +INCLUDED_SECTIONS = ['internet', 'new_devices', 'down_devices', 'events'] DAYS_TO_KEEP_EVENTS = 90 REPORT_DASHBOARD_URL = 'http://pi.alert/' DIG_GET_IP_ARG = '-4 myip.opendns.com @resolver1.opendns.com' @@ -103,12 +102,5 @@ DDNS_USER = 'dynu_user' DDNS_PASSWORD = 'A0000000B0000000C0000000D0000000' DDNS_UPDATE_URL = 'https://api.dynu.com/nic/update?' -# Nmap -NMAP_ACTIVE = True -NMAP_TIMEOUT = 150 -NMAP_RUN = 'once' -NMAP_RUN_SCHD = '0 2 * * *' -NMAP_ARGS = '-p -10000 --max-parallelism 100' - # API API_CUSTOM_SQL = 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' \ No newline at end of file diff --git a/pialert/const.py b/pialert/const.py index 3a63a43f..d2a01d01 100755 --- a/pialert/const.py +++ b/pialert/const.py @@ -35,8 +35,6 @@ sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, A (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1""" -sql_nmap_scan_all = "SELECT * FROM Nmap_Scan" -sql_pholus_scan_all = "SELECT * FROM Pholus_Scan" sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0" sql_settings = "SELECT * FROM Settings" sql_plugins_objects = "SELECT * FROM Plugins_Objects" diff --git a/pialert/initialise.py b/pialert/initialise.py index 9d726839..03e78424 100755 --- a/pialert/initialise.py +++ b/pialert/initialise.py @@ -89,7 +89,7 @@ def importConfigs (db): conf.PLUGINS_KEEP_HIST = ccd('PLUGINS_KEEP_HIST', 250 , c_d, 'Keep history entries', 'integer', '', 'General') conf.PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General') conf.PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General') - conf.INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'text.multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General') + conf.INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events'] , c_d, 'Notify on', 'text.multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'plugins']", 'General') conf.REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General') conf.DIG_GET_IP_ARG = ccd('DIG_GET_IP_ARG', '-4 myip.opendns.com @resolver1.opendns.com' , c_d, 'DIG arguments', 'text', '', 'General') conf.UI_LANG = ccd('UI_LANG', 'English' , c_d, 'Language Interface', 'text.select', "['English', 'German', 'Spanish']", 'General') @@ -154,13 +154,7 @@ def importConfigs (db): conf.DDNS_PASSWORD = ccd('DDNS_PASSWORD', 'A0000000B0000000C0000000D0000000' , c_d, 'DynDNS password', 'password', '', 'DynDNS') conf.DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS') - # Nmap - conf.NMAP_ACTIVE = ccd('NMAP_ACTIVE', True , c_d, 'Enable Nmap scans', 'boolean', '', 'Nmap') - conf.NMAP_TIMEOUT = ccd('NMAP_TIMEOUT', 150 , c_d, 'Nmap timeout', 'integer', '', 'Nmap') - conf.NMAP_RUN = ccd('NMAP_RUN', 'disabled' , c_d, 'Nmap enable schedule', 'text.select', "['disabled', 'once', 'schedule']", 'Nmap') - conf.NMAP_RUN_SCHD = ccd('NMAP_RUN_SCHD', '0 2 * * *' , c_d, 'Nmap schedule', 'text', '', 'Nmap') - conf.NMAP_ARGS = ccd('NMAP_ARGS', '-p -10000' , c_d, 'Nmap custom arguments', 'text', '', 'Nmap') - + # Init timezone in case it changed conf.tz = timezone(conf.TIMEZONE) @@ -188,10 +182,6 @@ def importConfigs (db): # reset schedules conf.mySchedules = [] - # init nmap schedule - nmapSchedule = Cron(conf.NMAP_RUN_SCHD).schedule(start_date=datetime.datetime.now(conf.tz)) - conf.mySchedules.append(schedule_class("nmap", nmapSchedule, nmapSchedule.next(), False)) - # Format and prepare the list of subnets conf.userSubnets = updateSubnets(conf.SCAN_SUBNETS) @@ -252,10 +242,7 @@ def importConfigs (db): conf.plugins_once_run = False # ----------------- # Plugins END - - # write_file(self.path, json.dumps(self.jsonData)) - - + # Insert settings into the DB sql.execute ("DELETE FROM Settings") @@ -270,9 +257,9 @@ def importConfigs (db): # update only the settings datasource update_api(db, False, ["settings"]) - + # run plugins that are modifying the config - pluginsState = run_plugin_scripts(db, 'before_config_save') + run_plugin_scripts(db, 'before_config_save' ) # Used to determine the next import conf.lastImportedConfFile = os.path.getmtime(config_file) diff --git a/pialert/plugin.py b/pialert/plugin.py index 9d5c3fc4..8f276611 100755 --- a/pialert/plugin.py +++ b/pialert/plugin.py @@ -3,6 +3,7 @@ import sqlite3 import json import subprocess import datetime +import base64 from collections import namedtuple @@ -16,16 +17,88 @@ from plugin_utils import logEventStatusCounts, get_plugin_string, get_plugin_set #------------------------------------------------------------------------------- -class plugins_state: - def __init__(self, processScan = False): - self.processScan = processScan +class plugin_param: + def __init__(self, param, plugin, db): + + paramValuesCount = 1 + + # Get setting value + if param["type"] == "setting": + inputValue = get_setting(param["value"]) + + if inputValue != None: + setVal = inputValue[6] # setting value + setTyp = inputValue[3] # setting type + + noConversion = ['text', 'string', 'integer', 'boolean', 'password', 'readonly', 'integer.select', 'text.select', 'integer.checkbox' ] + arrayConversion = ['text.multiselect', 'list', 'subnets'] + jsonConversion = ['.template'] + + mylog('debug', f'[Plugins] setTyp: {setTyp}') + + if '.select' in setTyp or setTyp in arrayConversion: + paramValuesCount = len(setVal) + + if setTyp in noConversion: + resolved = setVal + + elif setTyp in arrayConversion: + resolved = flatten_array(setVal) + + elif setTyp in arrayConversionBase64: + + + resolved = flatten_array(setVal) + else: + for item in jsonConversion: + if setTyp.endswith(item): + return json.dumps(setVal) + else: + mylog('none', ['[Plugins] ERROR: Parameter not converted.']) + + + # Get SQL result + if param["type"] == "sql": + inputValue = db.get_sql_array(param["value"]) + + resolved = flatten_array(inputValue) + + + mylog('debug', f'[Plugins] Resolved value: {resolved}') + + # Handle timeout multiplier if script executes multiple time + multiplyTimeout = False + if 'timeoutMultiplier' in param and param['timeoutMultiplier']: + multiplyTimeout = True + + # Handle base64 encoding + encodeToBase64 = False + if 'base64' in param and param['base64']: + encodeToBase64 = True + + + mylog('debug', f'[Plugins] Convert to Base64: {encodeToBase64}') + if encodeToBase64: + resolved = str(base64.b64encode(resolved.encode('ascii'))) + mylog('debug', f'[Plugins] base64 value: {resolved}') + + + self.resolved = resolved + self.inputValue = inputValue + self.base64 = encodeToBase64 + self.name = param["name"] + self.type = param["type"] + self.value = param["value"] + self.paramValuesCount = paramValuesCount + self.multiplyTimeout = multiplyTimeout #------------------------------------------------------------------------------- -def run_plugin_scripts(db, runType, pluginsState = None): +class plugins_state: + def __init__(self, processScan = False): + self.processScan = processScan - if pluginsState == None: - mylog('debug', ['[Plugins] pluginsState initialized ']) - pluginsState = plugins_state() +#------------------------------------------------------------------------------- +def run_plugin_scripts(db, runType, pluginsState = plugins_state()): # Header updateState(db,"Run: Plugins") @@ -73,6 +146,11 @@ def run_plugin_scripts(db, runType, pluginsState = None): def execute_plugin(db, plugin, pluginsState = plugins_state() ): sql = db.sql + + if pluginsState is None: + mylog('debug', ['[Plugins] pluginsState is None']) + pluginsState = plugins_state() + # ------- necessary settings check -------- set = get_plugin_setting(plugin, "CMD") @@ -92,29 +170,26 @@ def execute_plugin(db, plugin, pluginsState = plugins_state() ): mylog('debug', ['[Plugins] Timeout: ', set_RUN_TIMEOUT]) - # Prepare custom params + # Prepare custom params params = [] if "params" in plugin: - for param in plugin["params"]: - resolved = "" + for param in plugin["params"]: - # Get setting value - if param["type"] == "setting": - resolved = get_setting(param["value"]) + tempParam = plugin_param(param, plugin, db) - if resolved != None: - resolved = passable_string_from_setting(resolved) - - # Get Sql result - if param["type"] == "sql": - resolved = flatten_array(db.get_sql_array(param["value"])) - - if resolved == None: - mylog('none', [f'[Plugins] The parameter "name":"{param["name"]}" for "value": {param["value"]} was resolved as None']) + if tempParam.resolved == None: + mylog('none', [f'[Plugins] The parameter "name":"{tempParam.name}" for "value": {tempParam.value} was resolved as None']) else: - params.append( [param["name"], resolved] ) + # params.append( [param["name"], resolved] ) + params.append( [tempParam.name, tempParam.resolved] ) + + if tempParam.multiplyTimeout: + + set_RUN_TIMEOUT = set_RUN_TIMEOUT*tempParam.paramValuesCount + + mylog('debug', [f'[Plugins] The parameter "name":"{param["name"]}" will multiply the timeout {tempParam.paramValuesCount} times. Total timeout: {set_RUN_TIMEOUT}s']) # build SQL query parameters to insert into the DB @@ -299,37 +374,6 @@ def execute_plugin(db, plugin, pluginsState = plugins_state() ): -#------------------------------------------------------------------------------- -# Flattens a setting to make it passable to a script -def passable_string_from_setting(globalSetting): - - setVal = globalSetting[6] # setting value - setTyp = globalSetting[3] # setting type - - noConversion = ['text', 'string', 'integer', 'boolean', 'password', 'readonly', 'integer.select', 'text.select', 'integer.checkbox' ] - arrayConversion = ['text.multiselect', 'list'] - arrayConversionBase64 = ['subnets'] - jsonConversion = ['.template'] - - mylog('debug', f'[Plugins] setTyp: {setTyp}') - - if setTyp in noConversion: - return setVal - - if setTyp in arrayConversion: - return flatten_array(setVal) - - if setTyp in arrayConversionBase64: - - return flatten_array(setVal, encodeBase64 = True) - - for item in jsonConversion: - if setTyp.endswith(item): - return json.dumps(setVal) - - mylog('none', ['[Plugins] ERROR: Parameter not converted.']) - - #------------------------------------------------------------------------------- # Check if watched values changed for the given plugin diff --git a/pialert/plugin_utils.py b/pialert/plugin_utils.py index 4ac687a9..bfe1b14f 100755 --- a/pialert/plugin_utils.py +++ b/pialert/plugin_utils.py @@ -1,5 +1,4 @@ import os -import base64 import json from logger import mylog @@ -72,12 +71,12 @@ def get_plugin_string(props, el): #------------------------------------------------------------------------------- -def flatten_array(arr, encodeBase64=False): +def flatten_array(arr): tmp = '' arrayItemStr = '' mylog('debug', '[Plugins] Flattening the below array') - mylog('debug', f'[Plugins] Convert to Base64: {encodeBase64}') + mylog('debug', arr) for arrayItem in arr: @@ -93,12 +92,7 @@ def flatten_array(arr, encodeBase64=False): tmp = tmp[:-1] # Remove last comma ',' - mylog('debug', f'[Plugins] Flattened array: {tmp}') - - if encodeBase64: - tmp = str(base64.b64encode(tmp.encode('ascii'))) - mylog('debug', f'[Plugins] Flattened array (base64): {tmp}') - + mylog('debug', f'[Plugins] Flattened array: {tmp}') return tmp diff --git a/pialert/reporting.py b/pialert/reporting.py index 21e6e84c..396307b8 100755 --- a/pialert/reporting.py +++ b/pialert/reporting.py @@ -108,7 +108,7 @@ def construct_notifications(db, sqlQuery, tableTitle, skipText = False, supplied def send_notifications (db): sql = db.sql #TO-DO - global mail_text, mail_html, json_final, changedPorts_json_struc, partial_html, partial_txt, partial_json + global mail_text, mail_html, json_final, partial_html, partial_txt, partial_json deviceUrl = conf.REPORT_DASHBOARD_URL + '/deviceDetails.php?mac=' plugins_report = False @@ -234,24 +234,7 @@ def send_notifications (db): mail_text = mail_text.replace ('', notiStruc.text + '\n') mail_html = mail_html.replace ('', notiStruc.html) - mylog('verbose', ['[Notification] Events sections done.']) - - if 'ports' in conf.INCLUDED_SECTIONS : - # collect "ports" for the webhook json - mylog('verbose', ['[Notification] Ports: conf.changedPorts_json_struc:', conf.changedPorts_json_struc]) - if conf.changedPorts_json_struc is not None: - json_ports = conf.changedPorts_json_struc.json["data"] - - notiStruc = construct_notifications(db, "", "Ports", True, conf.changedPorts_json_struc) - - mail_html = mail_html.replace ('', notiStruc.html) - - portsTxt = "" - if conf.changedPorts_json_struc is not None: - portsTxt = "Ports \n---------\n Ports changed! Check PiAlert for details!\n" - - mail_text = mail_text.replace ('', portsTxt ) - mylog('verbose', ['[Notification] Ports sections done.']) + mylog('verbose', ['[Notification] Events sections done.']) if 'plugins' in conf.INCLUDED_SECTIONS: # Compose Plugins Section @@ -347,9 +330,7 @@ def send_notifications (db): WHERE eve_PendingAlertEmail = 1""") # clear plugin events - sql.execute ("DELETE FROM Plugins_Events") - - conf.changedPorts_json_struc = None + sql.execute ("DELETE FROM Plugins_Events") # DEBUG - print number of rows updated mylog('minimal', ['[Notification] Notifications changes: ', sql.rowcount]) @@ -486,10 +467,13 @@ def skip_repeated_notifications (db): #------------------------------------------------------------------------------- def check_and_run_event(db, pluginsState): + mylog('debug', [f'[MAIN] processScan1: {pluginsState.processScan}']) sql = db.sql # TO-DO sql.execute(""" select * from Parameters where par_ID = "Front_Event" """) rows = sql.fetchall() + mylog('debug', [f'[MAIN] processScan2: {pluginsState.processScan}']) + event, param = ['',''] if len(rows) > 0 and rows[0]['par_Value'] != 'finished': keyValue = rows[0]['par_Value'].split('|') @@ -498,7 +482,7 @@ def check_and_run_event(db, pluginsState): event = keyValue[0] param = keyValue[1] else: - return + return pluginsState if event == 'test': handle_test(param) @@ -511,6 +495,8 @@ def check_and_run_event(db, pluginsState): # commit to DB db.commitDB() + mylog('debug', [f'[MAIN] processScan3: {pluginsState.processScan}']) + return pluginsState #------------------------------------------------------------------------------- diff --git a/pialert/scanners/nmapscan.py b/pialert/scanners/nmapscan.py deleted file mode 100755 index 3f64ef09..00000000 --- a/pialert/scanners/nmapscan.py +++ /dev/null @@ -1,211 +0,0 @@ - -import subprocess - -import conf -from const import logPath, sql_nmap_scan_all -from helper import json_struc, timeNowTZ, updateState -from logger import append_line_to_file, mylog -#------------------------------------------------------------------------------- - - - -class nmap_entry: - def __init__(self, mac, time, port, state, service, name = '', extra = '', index = 0): - self.mac = mac - self.time = time - self.port = port - self.state = state - self.service = service - self.name = name - self.extra = extra - self.index = index - self.hash = str(mac) + str(port)+ str(state)+ str(service) - - -#------------------------------------------------------------------------------- -def performNmapScan(db, devicesToScan): - """ - run nmap scan on a list of devices - discovers open ports and keeps track existing and new open ports - """ - if len(devicesToScan) > 0: - - timeoutSec = conf.NMAP_TIMEOUT - - devTotal = len(devicesToScan) - - updateState(db,"Scan: Nmap") - - mylog('verbose', ['[NMAP Scan] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device']) - mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ]) - - devIndex = 0 - for device in devicesToScan: - # Execute command - output = "" - # prepare arguments from user supplied ones - nmapArgs = ['nmap'] + conf.NMAP_ARGS.split() + [device["dev_LastIP"]] - - progress = ' (' + str(devIndex+1) + '/' + str(devTotal) + ')' - - try: - # try runnning a subprocess with a forced (timeout + 30 seconds) in case the subprocess hangs - output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30)) - except subprocess.CalledProcessError as e: - # An error occured, handle it - mylog('none', ["[NMAP Scan] " ,e.output]) - mylog('none', ["[NMAP Scan] Error - Nmap Scan - check logs", progress]) - except subprocess.TimeoutExpired as timeErr: - mylog('verbose', ['[NMAP Scan] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', device["dev_LastIP"], progress]) - - if output == "": # check if the subprocess failed - mylog('minimal', ['[NMAP Scan] Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details']) - else: - mylog('verbose', ['[NMAP Scan] Nmap SUCCESS for ', device["dev_LastIP"], progress]) - - devIndex += 1 - - # check the last run output - newLines = output.split('\n') - - # regular logging - for line in newLines: - append_line_to_file (logPath + '/pialert_nmap.log', line +'\n') - - # collect ports / new Nmap Entries - newEntriesTmp = [] - - index = 0 - startCollecting = False - duration = "" - for line in newLines: - if 'Starting Nmap' in line: - if len(newLines) > index+1 and 'Note: Host seems down' in newLines[index+1]: - break # this entry is empty - elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line: - startCollecting = True - elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line: - startCollecting = False # end reached - elif startCollecting and len(line.split()) == 3: - newEntriesTmp.append(nmap_entry(device["dev_MAC"], timeNowTZ(), line.split()[0], line.split()[1], line.split()[2], device["dev_Name"])) - elif 'Nmap done' in line: - duration = line.split('scanned in ')[1] - index += 1 - mylog('verbose', ['[NMAP Scan] Ports found by NMAP: ', len(newEntriesTmp)]) - process_discovered_ports(db, device, newEntriesTmp) - #end for loop - - - -def process_discovered_ports(db, device, discoveredPorts): - """ - process ports discovered by nmap - compare to previosu ports - update DB - raise notifications - """ - sql = db.sql # TO-DO - # previous Nmap Entries - oldEntries = [] - changedPortsTmp = [] - - mylog('verbose', ['[NMAP Scan] Process ports found by NMAP: ', len(discoveredPorts)]) - - if len(discoveredPorts) > 0: - - # get all current NMAP ports from the DB - rows = db.read(sql_nmap_scan_all) - - for row in rows: - # only collect entries matching the current MAC address - if row["MAC"] == device["dev_MAC"]: - oldEntries.append(nmap_entry(row["MAC"], row["Time"], row["Port"], row["State"], row["Service"], device["dev_Name"], row["Extra"], row["Index"])) - - newEntries = [] - - # Collect all entries that don't match the ones in the DB - for discoveredPort in discoveredPorts: - - found = False - - # Check the new entry is already available in oldEntries and remove from processing if yes - for oldEntry in oldEntries: - if discoveredPort.hash == oldEntry.hash: - found = True - - if not found: - newEntries.append(discoveredPort) - - - mylog('verbose', ['[NMAP Scan] Nmap newly discovered or changed ports: ', len(newEntries)]) - - # collect new ports, find the corresponding old entry and return for notification purposes - # also update the DB with the new values after deleting the old ones - if len(newEntries) > 0: - - # params to build the SQL query - params = [] - indexesToDelete = "" - - # Find old entry matching the new entry hash - for newEntry in newEntries: - - foundEntry = None - - for oldEntry in oldEntries: - if oldEntry.hash == newEntry.hash: - indexesToDelete = indexesToDelete + str(oldEntry.index) + ',' - foundEntry = oldEntry - - columnNames = ["Name", "MAC", "Port", "State", "Service", "Extra", "NewOrOld" ] - - # Old entry found - if foundEntry is not None: - # Build params for sql query - params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, oldEntry.extra)) - # Build JSON for API and notifications - changedPortsTmp.append({ - "Name" : foundEntry.name, - "MAC" : newEntry.mac, - "Port" : newEntry.port, - "State" : newEntry.state, - "Service" : newEntry.service, - "Extra" : foundEntry.extra, - "NewOrOld" : "New values" - }) - changedPortsTmp.append({ - "Name" : foundEntry.name, - "MAC" : foundEntry.mac, - "Port" : foundEntry.port, - "State" : foundEntry.state, - "Service" : foundEntry.service, - "Extra" : foundEntry.extra, - "NewOrOld" : "Old values" - }) - # New entry - no matching Old entry found - else: - # Build params for sql query - params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, '')) - # Build JSON for API and notifications - changedPortsTmp.append({ - "Name" : "New device", - "MAC" : newEntry.mac, - "Port" : newEntry.port, - "State" : newEntry.state, - "Service" : newEntry.service, - "Extra" : "", - "NewOrOld" : "New device" - }) - - conf.changedPorts_json_struc = json_struc({ "data" : changedPortsTmp}, columnNames) - - # Delete old entries if available - if len(indexesToDelete) > 0: - sql.execute ("DELETE FROM Nmap_Scan where \"Index\" in (" + indexesToDelete[:-1] +")") - db.commitDB() - - # Insert new values into the DB - sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params) - db.commitDB() - -