more scanning

This commit is contained in:
Data-Monkey
2023-05-30 18:47:28 +10:00
parent 5b05be24ad
commit 12bf4c7bcc
11 changed files with 210 additions and 200 deletions

View File

@@ -39,8 +39,7 @@ from scanners.pholusscan import performPholusScan
from scanners.nmapscan import performNmapScan
from scanners.internet import check_internet_IP
# Global variables
changedPorts_json_struc = None
#===============================================================================
#===============================================================================

View File

@@ -17,6 +17,12 @@ time_started = ''
check_report = []
log_timestamp = 0
arpscan_devices = []
# for MQTT
mqtt_connected_to_broker = False
mqtt_sensors = []
# for notifications
changedPorts_json_struc = None
# ACTUAL CONFIGRATION ITEMS set to defaults

View File

@@ -367,17 +367,21 @@ class DB():
#-------------------------------------------------------------------------------
def get_table_as_json(self, sqlQuery):
self.sql.execute(sqlQuery)
columnNames = list(map(lambda x: x[0], self.sql.description))
rows = self.sql.fetchall()
mylog('debug',[ '[Database] - get_table_as_json - Query: ', sqlQuery])
try:
self.sql.execute(sqlQuery)
columnNames = list(map(lambda x: x[0], self.sql.description))
rows = self.sql.fetchall()
except sqlite3.Error as e:
mylog('none',[ '[Database] - SQL ERROR: ', e])
return None
result = {"data":[]}
for row in rows:
tmp = row_to_json(columnNames, row)
result["data"].append(tmp)
mylog('debug',[ '[Database] - get_table_as_json - returning ', len(rows), " rows with columns: ", columnNames])
return json_struc(result, columnNames)
#-------------------------------------------------------------------------------
@@ -387,15 +391,15 @@ class DB():
"""check the query and arguments are aligned and are read only"""
mylog('debug',[ '[Database] - SELECT Query: ', query, " params: ", args])
try:
assert query.count('?') == len(args)
assert query.upper().strip().startswith('SELECT')
self.sql.execute(query, args)
rows = self.sql.fetchall()
return rows
assert query.count('?') == len(args)
assert query.upper().strip().startswith('SELECT')
self.sql.execute(query, args)
rows = self.sql.fetchall()
return rows
except AssertionError:
mylog('none',[ '[Database] - ERROR: inconsistent query and/or arguments.', query, " params: ", args])
mylog('none',[ '[Database] - ERROR: inconsistent query and/or arguments.', query, " params: ", args])
except sqlite3.Error as e:
mylog('none',[ '[Database] - SQL ERROR: ', e])
mylog('none',[ '[Database] - SQL ERROR: ', e])
return None

View File

@@ -8,27 +8,16 @@ from collections import namedtuple
import conf
from const import pluginsPath, logPath
from logger import mylog
from helper import timeNowTZ, updateState, get_file_content, write_file
from helper import timeNow, updateState, get_file_content, write_file
from api import update_api
#-------------------------------------------------------------------------------
# this is duplicated from helper to avoid circular reference !! TO-DO
#-------------------------------------------------------------------------------
def timeNow():
return datetime.datetime.now().replace(microsecond=0)
#-------------------------------------------------------------------------------
def run_plugin_scripts(db, runType, plugins = conf.plugins):
# global plugins, tz, mySchedules
# Header
updateState(db,"Run: Plugins")
mylog('debug', [' [Plugins] Check if any plugins need to be executed on run type: ', runType])
mylog('debug', ['[Plugins] Check if any plugins need to be executed on run type: ', runType])
for plugin in plugins:
@@ -49,12 +38,12 @@ def run_plugin_scripts(db, runType, plugins = conf.plugins):
shouldRun = schd.runScheduleCheck()
if shouldRun:
# note the last time the scheduled plugin run was executed
schd.last_run = timeNowTZ()
schd.last_run = timeNow()
if shouldRun:
print_plugin_info(plugin, ['display_name'])
mylog('debug', [' [Plugins] CMD: ', get_plugin_setting(plugin, "CMD")["value"]])
mylog('debug', ['[Plugins] CMD: ', get_plugin_setting(plugin, "CMD")["value"]])
execute_plugin(db, plugin)
@@ -81,11 +70,11 @@ def get_plugins_configs():
#-------------------------------------------------------------------------------
def print_plugin_info(plugin, elements = ['display_name']):
mylog('verbose', [' [Plugins] ---------------------------------------------'])
mylog('verbose', ['[Plugins] ---------------------------------------------'])
for el in elements:
res = get_plugin_string(plugin, el)
mylog('verbose', [' [Plugins] ', el ,': ', res])
mylog('verbose', ['[Plugins] ', el ,': ', res])
#-------------------------------------------------------------------------------
@@ -99,7 +88,7 @@ def get_plugin_setting(plugin, function_key):
result = set
if result == None:
mylog('none', [' [Plugins] Setting with "function":"', function_key, '" is missing in plugin: ', get_plugin_string(plugin, 'display_name')])
mylog('none', ['[Plugins] Setting with "function":"', function_key, '" is missing in plugin: ', get_plugin_string(plugin, 'display_name')])
return result
@@ -162,7 +151,7 @@ def execute_plugin(db, plugin):
else:
set_RUN_TIMEOUT = set["value"]
mylog('debug', [' [Plugins] Timeout: ', set_RUN_TIMEOUT])
mylog('debug', ['[Plugins] Timeout: ', set_RUN_TIMEOUT])
# Prepare custom params
params = []
@@ -183,7 +172,7 @@ def execute_plugin(db, plugin):
resolved = flatten_array(db.get_sql_array(param["value"]))
if resolved == None:
mylog('none', [' [Plugins] The parameter "name":"', param["name"], '" was resolved as None'])
mylog('none', ['[Plugins] The parameter "name":"', param["name"], '" was resolved as None'])
else:
params.append( [param["name"], resolved] )
@@ -199,8 +188,8 @@ def execute_plugin(db, plugin):
command = resolve_wildcards_arr(set_CMD.split(), params)
# Execute command
mylog('verbose', [' [Plugins] Executing: ', set_CMD])
mylog('debug', [' [Plugins] Resolved : ', command])
mylog('verbose', ['[Plugins] Executing: ', set_CMD])
mylog('debug', ['[Plugins] Resolved : ', command])
try:
# try runnning a subprocess with a forced timeout in case the subprocess hangs
@@ -208,9 +197,9 @@ def execute_plugin(db, plugin):
except subprocess.CalledProcessError as e:
# An error occured, handle it
mylog('none', [e.output])
mylog('none', [' [Plugins] Error - enable LOG_LEVEL=debug and check logs'])
mylog('none', ['[Plugins] Error - enable LOG_LEVEL=debug and check logs'])
except subprocess.TimeoutExpired as timeErr:
mylog('none', [' [Plugins] TIMEOUT - the process forcefully terminated as timeout reached'])
mylog('none', ['[Plugins] TIMEOUT - the process forcefully terminated as timeout reached'])
# check the last run output
@@ -231,7 +220,7 @@ def execute_plugin(db, plugin):
if len(columns) == 9:
sqlParams.append((plugin["unique_prefix"], columns[0], columns[1], 'null', columns[2], columns[3], columns[4], columns[5], columns[6], 0, columns[7], 'null', columns[8]))
else:
mylog('none', [' [Plugins]: Skipped invalid line in the output: ', line])
mylog('none', ['[Plugins]: Skipped invalid line in the output: ', line])
# pialert-db-query
if plugin['data_source'] == 'pialert-db-query':
@@ -239,7 +228,7 @@ def execute_plugin(db, plugin):
q = set_CMD.replace("{s-quote}", '\'')
# Execute command
mylog('verbose', [' [Plugins] Executing: ', q])
mylog('verbose', ['[Plugins] Executing: ', q])
# set_CMD should contain a SQL query
arr = db.get_sql_array (q)
@@ -249,15 +238,15 @@ def execute_plugin(db, plugin):
if len(row) == 9 and (row[0] in ['','null']) == False :
sqlParams.append((plugin["unique_prefix"], row[0], handle_empty(row[1]), 'null', row[2], row[3], row[4], handle_empty(row[5]), handle_empty(row[6]), 0, row[7], 'null', row[8]))
else:
mylog('none', [' [Plugins]: Skipped invalid sql result'])
mylog('none', ['[Plugins]: Skipped invalid sql result'])
# check if the subprocess / SQL query failed / there was no valid output
if len(sqlParams) == 0:
mylog('none', [' [Plugins] No output received from the plugin ', plugin["unique_prefix"], ' - enable LOG_LEVEL=debug and check logs'])
mylog('none', ['[Plugins] No output received from the plugin ', plugin["unique_prefix"], ' - enable LOG_LEVEL=debug and check logs'])
return
else:
mylog('verbose', ['[', timeNow(), '] [Plugins]: SUCCESS, received ', len(sqlParams), ' entries'])
mylog('verbose', ['[Plugins]: SUCCESS, received ', len(sqlParams), ' entries'])
# process results if any
if len(sqlParams) > 0:
@@ -350,11 +339,11 @@ def flatten_array(arr):
# Replace {wildcars} with parameters
def resolve_wildcards_arr(commandArr, params):
mylog('debug', [' [Plugins]: Pre-Resolved CMD: '] + commandArr)
mylog('debug', ['[Plugins]: Pre-Resolved CMD: '] + commandArr)
for param in params:
# mylog('debug', [' [Plugins]: key : {', param[0], '}'])
# mylog('debug', [' [Plugins]: resolved: ', param[1]])
# mylog('debug', ['[Plugins]: key : {', param[0], '}'])
# mylog('debug', ['[Plugins]: resolved: ', param[1]])
i = 0
@@ -391,7 +380,7 @@ def process_plugin_events(db, plugin):
pluginPref = plugin["unique_prefix"]
mylog('debug', [' [Plugins] Processing : ', pluginPref])
mylog('debug', ['[Plugins] Processing : ', pluginPref])
plugObjectsArr = db.get_sql_array ("SELECT * FROM Plugins_Objects where Plugin = '" + str(pluginPref)+"'")
plugEventsArr = db.get_sql_array ("SELECT * FROM Plugins_Events where Plugin = '" + str(pluginPref)+"'")
@@ -404,8 +393,8 @@ def process_plugin_events(db, plugin):
existingPluginObjectsCount = len(pluginObjects)
mylog('debug', [' [Plugins] Existing objects : ', existingPluginObjectsCount])
mylog('debug', [' [Plugins] New and existing events : ', len(plugEventsArr)])
mylog('debug', ['[Plugins] Existing objects : ', existingPluginObjectsCount])
mylog('debug', ['[Plugins] New and existing events : ', len(plugEventsArr)])
# set status as new - will be changed later if conditions are fulfilled, e.g. entry found
for eve in plugEventsArr:
@@ -420,7 +409,7 @@ def process_plugin_events(db, plugin):
# compare hash of the IDs for uniqueness
if any(x.idsHash == tmpObject.idsHash for x in pluginObjects):
mylog('debug', [' [Plugins] Found existing object'])
mylog('debug', ['[Plugins] Found existing object'])
pluginEvents[index].status = "exists"
index += 1
@@ -488,7 +477,7 @@ def process_plugin_events(db, plugin):
dbTable = plugin['mapped_to_table']
mylog('debug', [' [Plugins] Mapping objects to database table: ', dbTable])
mylog('debug', ['[Plugins] Mapping objects to database table: ', dbTable])
# collect all columns to be mapped
mappedCols = []
@@ -542,7 +531,7 @@ def process_plugin_events(db, plugin):
q = f'INSERT into {dbTable} ({columnsStr}) VALUES ({valuesStr})'
mylog('debug', [' [Plugins] SQL query for mapping: ', q ])
mylog('debug', ['[Plugins] SQL query for mapping: ', q ])
sql.executemany (q, sqlParams)

View File

@@ -5,4 +5,4 @@ each publisher exposes:
- check_config () returning True / False
- send (message) returning True / Fasle
"""
"""

View File

@@ -1,3 +1,5 @@
""" Pi.Alert module to send notification emails """
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import smtplib
@@ -20,7 +22,7 @@ def send (msg: noti_struc):
pText = msg.text
pHTML = msg.html
mylog('debug', '[Send Email] REPORT_TO: ' + hide_email(str(conf.REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(conf.SMTP_USER)))
# Compose email

View File

@@ -14,10 +14,6 @@ from helper import bytes_to_string, sanitize_string
# MQTT
#-------------------------------------------------------------------------------
mqtt_connected_to_broker = False
mqtt_sensors = []
#-------------------------------------------------------------------------------
def check_config():
if conf.MQTT_BROKER == '' or conf.MQTT_PORT == '' or conf.MQTT_USER == '' or conf.MQTT_PASSWORD == '':
@@ -76,11 +72,9 @@ def create_sensor(client, deviceId, deviceName, sensorType, sensorName, icon):
new_sensor_config = sensor_config(deviceId, deviceName, sensorType, sensorName, icon)
# check if config already in list and if not, add it, otherwise skip
global mqtt_sensors, uniqueSensorCount
is_unique = True
for sensor in mqtt_sensors:
for sensor in conf.mqtt_sensors:
if sensor.hash == new_sensor_config.hash:
is_unique = False
break
@@ -93,9 +87,7 @@ def create_sensor(client, deviceId, deviceName, sensorType, sensorName, icon):
#-------------------------------------------------------------------------------
def publish_sensor(client, sensorConf):
global mqtt_sensors
def publish_sensor(client, sensorConf):
message = '{ \
"name":"'+ sensorConf.deviceName +' '+sensorConf.sensorName+'", \
@@ -118,26 +110,24 @@ def publish_sensor(client, sensorConf):
# hack - delay adding to the queue in case the process is
time.sleep(conf.MQTT_DELAY_SEC) # restarted and previous publish processes aborted
# (it takes ~2s to update a sensor config on the broker)
mqtt_sensors.append(sensorConf)
conf.mqtt_sensors.append(sensorConf)
#-------------------------------------------------------------------------------
def mqtt_create_client():
def on_disconnect(client, userdata, rc):
global mqtt_connected_to_broker
mqtt_connected_to_broker = False
conf.mqtt_connected_to_broker = False
# not sure is below line is correct / necessary
# client = mqtt_create_client()
def on_connect(client, userdata, flags, rc):
global mqtt_connected_to_broker
if rc == 0:
mylog('verbose', [" Connected to broker"])
mqtt_connected_to_broker = True # Signal connection
conf.mqtt_connected_to_broker = True # Signal connection
else:
mylog('none', [" Connection failed"])
mqtt_connected_to_broker = False
conf.mqtt_connected_to_broker = False
client = mqtt_client.Client('PiAlert') # Set Connecting Client ID
@@ -150,12 +140,12 @@ def mqtt_create_client():
return client
#-------------------------------------------------------------------------------
def mqtt_start():
def mqtt_start(db):
global client, mqtt_connected_to_broker
#global client
if mqtt_connected_to_broker == False:
mqtt_connected_to_broker = True
if conf.mqtt_connected_to_broker == False:
conf.mqtt_connected_to_broker = True
client = mqtt_create_client()
# General stats
@@ -164,7 +154,7 @@ def mqtt_start():
create_generic_device(client)
# Get the data
row = get_device_stats()
row = get_device_stats(db)
columns = ["online","down","all","archived","new","unknown"]

View File

@@ -3,7 +3,8 @@ import conf
import requests
from base64 import b64encode
from logger import mylog, noti_struc
from logger import mylog
from helper import noti_struc
#-------------------------------------------------------------------------------
def check_config():
@@ -15,7 +16,7 @@ def check_config():
#-------------------------------------------------------------------------------
def send (msg: noti_struc):
_Text = msg.html
headers = {
"Title": "Pi.Alert Notification",
"Actions": "view, Open Dashboard, "+ conf.REPORT_DASHBOARD_URL,
@@ -32,5 +33,5 @@ def send (msg: noti_struc):
headers["Authorization"] = "Basic {}".format(basichash)
requests.post("{}/{}".format( conf.NTFY_HOST, conf.NTFY_TOPIC),
data=_Text,
data=msg.html,
headers=headers)

View File

@@ -16,7 +16,7 @@ def check_config():
#-------------------------------------------------------------------------------
def send_webhook (msg: noti_struc):
def send (msg: noti_struc):
# use data type based on specified payload type
if conf.WEBHOOK_PAYLOAD == 'json':

View File

@@ -28,6 +28,8 @@ from publishers.pushsafer import (check_config as pushsafer_check_config,
send as send_pushsafer)
from publishers.mqtt import (check_config as mqtt_check_config,
mqtt_start )
#===============================================================================
# REPORTING
#===============================================================================
@@ -119,11 +121,13 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
)""")
# Open text Template
mylog('verbose', ['[Notification] Open text Template'])
template_file = open(pialertPath + '/back/report_template.txt', 'r')
mail_text = template_file.read()
template_file.close()
# Open html Template
mylog('verbose', ['[Notification] Open html Template'])
template_file = open(pialertPath + '/back/report_template.html', 'r')
if conf.newVersionAvailable :
template_file = open(pialertPath + '/back/report_template_new_version.html', 'r')
@@ -139,6 +143,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
mail_text = mail_text.replace ('<SERVER_NAME>', socket.gethostname() )
mail_html = mail_html.replace ('<SERVER_NAME>', socket.gethostname() )
mylog('verbose', ['[Notification] included sections: ',INCLUDED_SECTIONS])
if 'internet' in INCLUDED_SECTIONS:
# Compose Internet Section
sqlQuery = """SELECT eve_MAC as MAC, eve_IP as IP, eve_DateTime as Datetime, eve_EventType as "Event Type", eve_AdditionalInfo as "More info" FROM Events
@@ -152,6 +157,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
mail_text = mail_text.replace ('<SECTION_INTERNET>', notiStruc.text + '\n')
mail_html = mail_html.replace ('<INTERNET_TABLE>', notiStruc.html)
mylog('verbose', ['[Notification] Internet sections done.'])
if 'new_devices' in INCLUDED_SECTIONS:
# Compose New Devices Section
@@ -167,6 +173,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
mail_text = mail_text.replace ('<SECTION_NEW_DEVICES>', notiStruc.text + '\n')
mail_html = mail_html.replace ('<NEW_DEVICES_TABLE>', notiStruc.html)
mylog('verbose', ['[Notification] New Devices sections done.'])
if 'down_devices' in INCLUDED_SECTIONS:
# Compose Devices Down Section
@@ -182,6 +189,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
mail_text = mail_text.replace ('<SECTION_DEVICES_DOWN>', notiStruc.text + '\n')
mail_html = mail_html.replace ('<DOWN_DEVICES_TABLE>', notiStruc.html)
mylog('verbose', ['[Notification] Down Devices sections done.'])
if 'events' in INCLUDED_SECTIONS:
# Compose Events Section
@@ -198,21 +206,24 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
mail_text = mail_text.replace ('<SECTION_EVENTS>', notiStruc.text + '\n')
mail_html = mail_html.replace ('<EVENTS_TABLE>', notiStruc.html)
mylog('verbose', ['[Notification] Events sections done.'])
if 'ports' in INCLUDED_SECTIONS:
# collect "ports" for the webhook json
if changedPorts_json_struc is not None:
json_ports = changedPorts_json_struc.json["data"]
notiStruc = construct_notifications(db, "", "Ports", True, changedPorts_json_struc)
mylog('verbose', ['[Notification] Ports: conf.changedPorts_json_struc:', conf.changedPorts_json_struc])
if conf.changedPorts_json_struc is not None:
json_ports = conf.changedPorts_json_struc.json["data"]
notiStruc = construct_notifications(db, "", "Ports", True, conf.changedPorts_json_struc)
mylog('verbose', ['[Notification] Ports: notiStruc:', notiStruc ])
mail_html = mail_html.replace ('<PORTS_TABLE>', notiStruc.html)
portsTxt = ""
if changedPorts_json_struc is not None:
if conf.changedPorts_json_struc is not None:
portsTxt = "Ports \n---------\n Ports changed! Check PiAlert for details!\n"
mail_text = mail_text.replace ('<PORTS_TABLE>', portsTxt )
mylog('verbose', ['[Notification] Ports sections done.'])
if 'plugins' in INCLUDED_SECTIONS and conf.ENABLE_PLUGINS:
# Compose Plugins Section
@@ -228,7 +239,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
# check if we need to report something
plugins_report = len(json_plugins) > 0
mylog('verbose', ['[Notification] Plugins sections done.'])
json_final = {
"internet": json_internet,
@@ -293,7 +304,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
if conf.REPORT_MQTT and check_config('mqtt'):
updateState(db,"Send: MQTT")
mylog('info', ['[Notification] Establishing MQTT thread'])
mqtt_start()
mqtt_start(db)
else :
mylog('verbose', ['[Notification] Skip MQTT'])
else :
@@ -310,7 +321,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
# clear plugin events
sql.execute ("DELETE FROM Plugins_Events")
changedPorts_json_struc = None
conf.changedPorts_json_struc = None
# DEBUG - print number of rows updated
mylog('info', ['[Notification] Notifications changes: ', sql.rowcount])
@@ -423,7 +434,7 @@ def skip_repeated_notifications (db):
# Skip repeated notifications
# due strfime : Overflow --> use "strftime / 60"
mylog('verbose','[Skip Repeated Notifications] Skip Repeated')
mylog('verbose','[Skip Repeated Notifications] Skip Repeated start')
db.sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0
WHERE eve_PendingAlertEmail = 1 AND eve_MAC IN
(

View File

@@ -24,12 +24,10 @@ class nmap_entry:
#-------------------------------------------------------------------------------
def performNmapScan(db, devicesToScan):
sql = db.sql # TO-DO
global changedPorts_json_struc
changedPortsTmp = []
"""
run nmap scan on a list of devices
discovers open ports and keeps track existing and new open ports
"""
if len(devicesToScan) > 0:
timeoutSec = conf.NMAP_TIMEOUT
@@ -38,9 +36,8 @@ def performNmapScan(db, devicesToScan):
updateState(db,"Scan: Nmap")
mylog('verbose', ['[', timeNow(), '] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device'])
mylog('verbose', [" Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ])
mylog('verbose', ['[NMAP Scan] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device'])
mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ])
devIndex = 0
for device in devicesToScan:
@@ -56,15 +53,15 @@ def performNmapScan(db, devicesToScan):
output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30))
except subprocess.CalledProcessError as e:
# An error occured, handle it
mylog('none', [e.output])
mylog('none', [" Error - Nmap Scan - check logs", progress])
mylog('none', ["[NMAP Scan] " ,e.output])
mylog('none', ["[NMAP Scan] Error - Nmap Scan - check logs", progress])
except subprocess.TimeoutExpired as timeErr:
mylog('verbose', [' Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', device["dev_LastIP"], progress])
mylog('verbose', ['[NMAP Scan] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', device["dev_LastIP"], progress])
if output == "": # check if the subprocess failed
mylog('info', ['[', timeNow(), '] Scan: Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details'])
mylog('info', ['[NMAP Scan] Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details'])
else:
mylog('verbose', ['[', timeNow(), '] Scan: Nmap SUCCESS for ', device["dev_LastIP"], progress])
mylog('verbose', ['[NMAP Scan] Nmap SUCCESS for ', device["dev_LastIP"], progress])
devIndex += 1
@@ -94,110 +91,121 @@ def performNmapScan(db, devicesToScan):
elif 'Nmap done' in line:
duration = line.split('scanned in ')[1]
index += 1
mylog('verbose', ['[NMAP Scan] Ports found by NMAP: ', len(newEntriesTmp)])
process_discovered_ports(db, device, newEntriesTmp)
#end for loop
# previous Nmap Entries
oldEntries = []
mylog('verbose', ['[', timeNow(), '] Scan: Ports found by NMAP: ', len(newEntriesTmp)])
def process_discovered_ports(db, device, discoveredPorts):
"""
process ports discovered by nmap
compare to previosu ports
update DB
raise notifications
"""
sql = db.sql # TO-DO
# previous Nmap Entries
oldEntries = []
changedPortsTmp = []
mylog('verbose', ['[NMAP Scan] Process ports found by NMAP: ', len(discoveredPorts)])
if len(discoveredPorts) > 0:
# get all current NMAP ports from the DB
rows = db.read(sql_nmap_scan_all)
for row in rows:
# only collect entries matching the current MAC address
if row["MAC"] == device["dev_MAC"]:
oldEntries.append(nmap_entry(row["MAC"], row["Time"], row["Port"], row["State"], row["Service"], device["dev_Name"], row["Extra"], row["Index"]))
newEntries = []
# Collect all entries that don't match the ones in the DB
for discoveredPort in discoveredPorts:
found = False
if len(newEntriesTmp) > 0:
# Check the new entry is already available in oldEntries and remove from processing if yes
for oldEntry in oldEntries:
if discoveredPort.hash == oldEntry.hash:
found = True
# get all current NMAP ports from the DB
sql.execute(sql_nmap_scan_all)
rows = sql.fetchall()
for row in rows:
# only collect entries matching the current MAC address
if row["MAC"] == device["dev_MAC"]:
oldEntries.append(nmap_entry(row["MAC"], row["Time"], row["Port"], row["State"], row["Service"], device["dev_Name"], row["Extra"], row["Index"]))
if not found:
newEntries.append(discoveredPort)
newEntries = []
mylog('verbose', ['[NMAP Scan] Nmap newly discovered or changed ports: ', len(newEntries)])
# Collect all entries that don't match the ones in the DB
for newTmpEntry in newEntriesTmp:
# collect new ports, find the corresponding old entry and return for notification purposes
# also update the DB with the new values after deleting the old ones
if len(newEntries) > 0:
# params to build the SQL query
params = []
indexesToDelete = ""
found = False
# Check the new entry is already available in oldEntries and remove from processing if yes
for oldEntry in oldEntries:
if newTmpEntry.hash == oldEntry.hash:
found = True
# Find old entry matching the new entry hash
for newEntry in newEntries:
if not found:
newEntries.append(newTmpEntry)
mylog('verbose', ['[', timeNow(), '] Scan: Nmap newly discovered or changed ports: ', len(newEntries)])
# collect new ports, find the corresponding old entry and return for notification purposes
# also update the DB with the new values after deleting the old ones
if len(newEntries) > 0:
# params to build the SQL query
params = []
indexesToDelete = ""
# Find old entry matching the new entry hash
for newEntry in newEntries:
foundEntry = None
for oldEntry in oldEntries:
if oldEntry.hash == newEntry.hash:
indexesToDelete = indexesToDelete + str(oldEntry.index) + ','
foundEntry = oldEntry
columnNames = ["Name", "MAC", "Port", "State", "Service", "Extra", "NewOrOld" ]
# Old entry found
if foundEntry is not None:
# Build params for sql query
params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, oldEntry.extra))
# Build JSON for API and notifications
changedPortsTmp.append({
"Name" : foundEntry.name,
"MAC" : newEntry.mac,
"Port" : newEntry.port,
"State" : newEntry.state,
"Service" : newEntry.service,
"Extra" : foundEntry.extra,
"NewOrOld" : "New values"
})
changedPortsTmp.append({
"Name" : foundEntry.name,
"MAC" : foundEntry.mac,
"Port" : foundEntry.port,
"State" : foundEntry.state,
"Service" : foundEntry.service,
"Extra" : foundEntry.extra,
"NewOrOld" : "Old values"
})
# New entry - no matching Old entry found
else:
# Build params for sql query
params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, ''))
# Build JSON for API and notifications
changedPortsTmp.append({
"Name" : "New device",
"MAC" : newEntry.mac,
"Port" : newEntry.port,
"State" : newEntry.state,
"Service" : newEntry.service,
"Extra" : "",
"NewOrOld" : "New device"
})
changedPorts_json_struc = json_struc({ "data" : changedPortsTmp}, columnNames)
# Delete old entries if available
if len(indexesToDelete) > 0:
sql.execute ("DELETE FROM Nmap_Scan where \"Index\" in (" + indexesToDelete[:-1] +")")
db.commitDB()
# Insert new values into the DB
sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params)
db.commitDB()
foundEntry = None
for oldEntry in oldEntries:
if oldEntry.hash == newEntry.hash:
indexesToDelete = indexesToDelete + str(oldEntry.index) + ','
foundEntry = oldEntry
columnNames = ["Name", "MAC", "Port", "State", "Service", "Extra", "NewOrOld" ]
# Old entry found
if foundEntry is not None:
# Build params for sql query
params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, oldEntry.extra))
# Build JSON for API and notifications
changedPortsTmp.append({
"Name" : foundEntry.name,
"MAC" : newEntry.mac,
"Port" : newEntry.port,
"State" : newEntry.state,
"Service" : newEntry.service,
"Extra" : foundEntry.extra,
"NewOrOld" : "New values"
})
changedPortsTmp.append({
"Name" : foundEntry.name,
"MAC" : foundEntry.mac,
"Port" : foundEntry.port,
"State" : foundEntry.state,
"Service" : foundEntry.service,
"Extra" : foundEntry.extra,
"NewOrOld" : "Old values"
})
# New entry - no matching Old entry found
else:
# Build params for sql query
params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, ''))
# Build JSON for API and notifications
changedPortsTmp.append({
"Name" : "New device",
"MAC" : newEntry.mac,
"Port" : newEntry.port,
"State" : newEntry.state,
"Service" : newEntry.service,
"Extra" : "",
"NewOrOld" : "New device"
})
conf.changedPorts_json_struc = json_struc({ "data" : changedPortsTmp}, columnNames)
# Delete old entries if available
if len(indexesToDelete) > 0:
sql.execute ("DELETE FROM Nmap_Scan where \"Index\" in (" + indexesToDelete[:-1] +")")
db.commitDB()
# Insert new values into the DB
sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params)
db.commitDB()