mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2025-12-07 09:36:05 -08:00
more scanning
This commit is contained in:
@@ -39,8 +39,7 @@ from scanners.pholusscan import performPholusScan
|
|||||||
from scanners.nmapscan import performNmapScan
|
from scanners.nmapscan import performNmapScan
|
||||||
from scanners.internet import check_internet_IP
|
from scanners.internet import check_internet_IP
|
||||||
|
|
||||||
# Global variables
|
|
||||||
changedPorts_json_struc = None
|
|
||||||
|
|
||||||
#===============================================================================
|
#===============================================================================
|
||||||
#===============================================================================
|
#===============================================================================
|
||||||
|
|||||||
@@ -17,6 +17,12 @@ time_started = ''
|
|||||||
check_report = []
|
check_report = []
|
||||||
log_timestamp = 0
|
log_timestamp = 0
|
||||||
arpscan_devices = []
|
arpscan_devices = []
|
||||||
|
# for MQTT
|
||||||
|
mqtt_connected_to_broker = False
|
||||||
|
mqtt_sensors = []
|
||||||
|
# for notifications
|
||||||
|
changedPorts_json_struc = None
|
||||||
|
|
||||||
|
|
||||||
# ACTUAL CONFIGRATION ITEMS set to defaults
|
# ACTUAL CONFIGRATION ITEMS set to defaults
|
||||||
|
|
||||||
|
|||||||
@@ -367,17 +367,21 @@ class DB():
|
|||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
def get_table_as_json(self, sqlQuery):
|
def get_table_as_json(self, sqlQuery):
|
||||||
|
|
||||||
self.sql.execute(sqlQuery)
|
mylog('debug',[ '[Database] - get_table_as_json - Query: ', sqlQuery])
|
||||||
|
try:
|
||||||
columnNames = list(map(lambda x: x[0], self.sql.description))
|
self.sql.execute(sqlQuery)
|
||||||
|
columnNames = list(map(lambda x: x[0], self.sql.description))
|
||||||
rows = self.sql.fetchall()
|
rows = self.sql.fetchall()
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
mylog('none',[ '[Database] - SQL ERROR: ', e])
|
||||||
|
return None
|
||||||
|
|
||||||
result = {"data":[]}
|
result = {"data":[]}
|
||||||
|
|
||||||
for row in rows:
|
for row in rows:
|
||||||
tmp = row_to_json(columnNames, row)
|
tmp = row_to_json(columnNames, row)
|
||||||
result["data"].append(tmp)
|
result["data"].append(tmp)
|
||||||
|
|
||||||
|
mylog('debug',[ '[Database] - get_table_as_json - returning ', len(rows), " rows with columns: ", columnNames])
|
||||||
return json_struc(result, columnNames)
|
return json_struc(result, columnNames)
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
@@ -387,15 +391,15 @@ class DB():
|
|||||||
"""check the query and arguments are aligned and are read only"""
|
"""check the query and arguments are aligned and are read only"""
|
||||||
mylog('debug',[ '[Database] - SELECT Query: ', query, " params: ", args])
|
mylog('debug',[ '[Database] - SELECT Query: ', query, " params: ", args])
|
||||||
try:
|
try:
|
||||||
assert query.count('?') == len(args)
|
assert query.count('?') == len(args)
|
||||||
assert query.upper().strip().startswith('SELECT')
|
assert query.upper().strip().startswith('SELECT')
|
||||||
self.sql.execute(query, args)
|
self.sql.execute(query, args)
|
||||||
rows = self.sql.fetchall()
|
rows = self.sql.fetchall()
|
||||||
return rows
|
return rows
|
||||||
except AssertionError:
|
except AssertionError:
|
||||||
mylog('none',[ '[Database] - ERROR: inconsistent query and/or arguments.', query, " params: ", args])
|
mylog('none',[ '[Database] - ERROR: inconsistent query and/or arguments.', query, " params: ", args])
|
||||||
except sqlite3.Error as e:
|
except sqlite3.Error as e:
|
||||||
mylog('none',[ '[Database] - SQL ERROR: ', e])
|
mylog('none',[ '[Database] - SQL ERROR: ', e])
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -8,27 +8,16 @@ from collections import namedtuple
|
|||||||
import conf
|
import conf
|
||||||
from const import pluginsPath, logPath
|
from const import pluginsPath, logPath
|
||||||
from logger import mylog
|
from logger import mylog
|
||||||
from helper import timeNowTZ, updateState, get_file_content, write_file
|
from helper import timeNow, updateState, get_file_content, write_file
|
||||||
from api import update_api
|
from api import update_api
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
# this is duplicated from helper to avoid circular reference !! TO-DO
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
def timeNow():
|
|
||||||
return datetime.datetime.now().replace(microsecond=0)
|
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
def run_plugin_scripts(db, runType, plugins = conf.plugins):
|
def run_plugin_scripts(db, runType, plugins = conf.plugins):
|
||||||
|
|
||||||
# global plugins, tz, mySchedules
|
|
||||||
|
|
||||||
# Header
|
# Header
|
||||||
updateState(db,"Run: Plugins")
|
updateState(db,"Run: Plugins")
|
||||||
|
|
||||||
mylog('debug', [' [Plugins] Check if any plugins need to be executed on run type: ', runType])
|
mylog('debug', ['[Plugins] Check if any plugins need to be executed on run type: ', runType])
|
||||||
|
|
||||||
for plugin in plugins:
|
for plugin in plugins:
|
||||||
|
|
||||||
@@ -49,12 +38,12 @@ def run_plugin_scripts(db, runType, plugins = conf.plugins):
|
|||||||
shouldRun = schd.runScheduleCheck()
|
shouldRun = schd.runScheduleCheck()
|
||||||
if shouldRun:
|
if shouldRun:
|
||||||
# note the last time the scheduled plugin run was executed
|
# note the last time the scheduled plugin run was executed
|
||||||
schd.last_run = timeNowTZ()
|
schd.last_run = timeNow()
|
||||||
|
|
||||||
if shouldRun:
|
if shouldRun:
|
||||||
|
|
||||||
print_plugin_info(plugin, ['display_name'])
|
print_plugin_info(plugin, ['display_name'])
|
||||||
mylog('debug', [' [Plugins] CMD: ', get_plugin_setting(plugin, "CMD")["value"]])
|
mylog('debug', ['[Plugins] CMD: ', get_plugin_setting(plugin, "CMD")["value"]])
|
||||||
execute_plugin(db, plugin)
|
execute_plugin(db, plugin)
|
||||||
|
|
||||||
|
|
||||||
@@ -81,11 +70,11 @@ def get_plugins_configs():
|
|||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
def print_plugin_info(plugin, elements = ['display_name']):
|
def print_plugin_info(plugin, elements = ['display_name']):
|
||||||
|
|
||||||
mylog('verbose', [' [Plugins] ---------------------------------------------'])
|
mylog('verbose', ['[Plugins] ---------------------------------------------'])
|
||||||
|
|
||||||
for el in elements:
|
for el in elements:
|
||||||
res = get_plugin_string(plugin, el)
|
res = get_plugin_string(plugin, el)
|
||||||
mylog('verbose', [' [Plugins] ', el ,': ', res])
|
mylog('verbose', ['[Plugins] ', el ,': ', res])
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
@@ -99,7 +88,7 @@ def get_plugin_setting(plugin, function_key):
|
|||||||
result = set
|
result = set
|
||||||
|
|
||||||
if result == None:
|
if result == None:
|
||||||
mylog('none', [' [Plugins] Setting with "function":"', function_key, '" is missing in plugin: ', get_plugin_string(plugin, 'display_name')])
|
mylog('none', ['[Plugins] Setting with "function":"', function_key, '" is missing in plugin: ', get_plugin_string(plugin, 'display_name')])
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@@ -162,7 +151,7 @@ def execute_plugin(db, plugin):
|
|||||||
else:
|
else:
|
||||||
set_RUN_TIMEOUT = set["value"]
|
set_RUN_TIMEOUT = set["value"]
|
||||||
|
|
||||||
mylog('debug', [' [Plugins] Timeout: ', set_RUN_TIMEOUT])
|
mylog('debug', ['[Plugins] Timeout: ', set_RUN_TIMEOUT])
|
||||||
|
|
||||||
# Prepare custom params
|
# Prepare custom params
|
||||||
params = []
|
params = []
|
||||||
@@ -183,7 +172,7 @@ def execute_plugin(db, plugin):
|
|||||||
resolved = flatten_array(db.get_sql_array(param["value"]))
|
resolved = flatten_array(db.get_sql_array(param["value"]))
|
||||||
|
|
||||||
if resolved == None:
|
if resolved == None:
|
||||||
mylog('none', [' [Plugins] The parameter "name":"', param["name"], '" was resolved as None'])
|
mylog('none', ['[Plugins] The parameter "name":"', param["name"], '" was resolved as None'])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
params.append( [param["name"], resolved] )
|
params.append( [param["name"], resolved] )
|
||||||
@@ -199,8 +188,8 @@ def execute_plugin(db, plugin):
|
|||||||
command = resolve_wildcards_arr(set_CMD.split(), params)
|
command = resolve_wildcards_arr(set_CMD.split(), params)
|
||||||
|
|
||||||
# Execute command
|
# Execute command
|
||||||
mylog('verbose', [' [Plugins] Executing: ', set_CMD])
|
mylog('verbose', ['[Plugins] Executing: ', set_CMD])
|
||||||
mylog('debug', [' [Plugins] Resolved : ', command])
|
mylog('debug', ['[Plugins] Resolved : ', command])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# try runnning a subprocess with a forced timeout in case the subprocess hangs
|
# try runnning a subprocess with a forced timeout in case the subprocess hangs
|
||||||
@@ -208,9 +197,9 @@ def execute_plugin(db, plugin):
|
|||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
# An error occured, handle it
|
# An error occured, handle it
|
||||||
mylog('none', [e.output])
|
mylog('none', [e.output])
|
||||||
mylog('none', [' [Plugins] Error - enable LOG_LEVEL=debug and check logs'])
|
mylog('none', ['[Plugins] Error - enable LOG_LEVEL=debug and check logs'])
|
||||||
except subprocess.TimeoutExpired as timeErr:
|
except subprocess.TimeoutExpired as timeErr:
|
||||||
mylog('none', [' [Plugins] TIMEOUT - the process forcefully terminated as timeout reached'])
|
mylog('none', ['[Plugins] TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||||
|
|
||||||
|
|
||||||
# check the last run output
|
# check the last run output
|
||||||
@@ -231,7 +220,7 @@ def execute_plugin(db, plugin):
|
|||||||
if len(columns) == 9:
|
if len(columns) == 9:
|
||||||
sqlParams.append((plugin["unique_prefix"], columns[0], columns[1], 'null', columns[2], columns[3], columns[4], columns[5], columns[6], 0, columns[7], 'null', columns[8]))
|
sqlParams.append((plugin["unique_prefix"], columns[0], columns[1], 'null', columns[2], columns[3], columns[4], columns[5], columns[6], 0, columns[7], 'null', columns[8]))
|
||||||
else:
|
else:
|
||||||
mylog('none', [' [Plugins]: Skipped invalid line in the output: ', line])
|
mylog('none', ['[Plugins]: Skipped invalid line in the output: ', line])
|
||||||
|
|
||||||
# pialert-db-query
|
# pialert-db-query
|
||||||
if plugin['data_source'] == 'pialert-db-query':
|
if plugin['data_source'] == 'pialert-db-query':
|
||||||
@@ -239,7 +228,7 @@ def execute_plugin(db, plugin):
|
|||||||
q = set_CMD.replace("{s-quote}", '\'')
|
q = set_CMD.replace("{s-quote}", '\'')
|
||||||
|
|
||||||
# Execute command
|
# Execute command
|
||||||
mylog('verbose', [' [Plugins] Executing: ', q])
|
mylog('verbose', ['[Plugins] Executing: ', q])
|
||||||
|
|
||||||
# set_CMD should contain a SQL query
|
# set_CMD should contain a SQL query
|
||||||
arr = db.get_sql_array (q)
|
arr = db.get_sql_array (q)
|
||||||
@@ -249,15 +238,15 @@ def execute_plugin(db, plugin):
|
|||||||
if len(row) == 9 and (row[0] in ['','null']) == False :
|
if len(row) == 9 and (row[0] in ['','null']) == False :
|
||||||
sqlParams.append((plugin["unique_prefix"], row[0], handle_empty(row[1]), 'null', row[2], row[3], row[4], handle_empty(row[5]), handle_empty(row[6]), 0, row[7], 'null', row[8]))
|
sqlParams.append((plugin["unique_prefix"], row[0], handle_empty(row[1]), 'null', row[2], row[3], row[4], handle_empty(row[5]), handle_empty(row[6]), 0, row[7], 'null', row[8]))
|
||||||
else:
|
else:
|
||||||
mylog('none', [' [Plugins]: Skipped invalid sql result'])
|
mylog('none', ['[Plugins]: Skipped invalid sql result'])
|
||||||
|
|
||||||
|
|
||||||
# check if the subprocess / SQL query failed / there was no valid output
|
# check if the subprocess / SQL query failed / there was no valid output
|
||||||
if len(sqlParams) == 0:
|
if len(sqlParams) == 0:
|
||||||
mylog('none', [' [Plugins] No output received from the plugin ', plugin["unique_prefix"], ' - enable LOG_LEVEL=debug and check logs'])
|
mylog('none', ['[Plugins] No output received from the plugin ', plugin["unique_prefix"], ' - enable LOG_LEVEL=debug and check logs'])
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
mylog('verbose', ['[', timeNow(), '] [Plugins]: SUCCESS, received ', len(sqlParams), ' entries'])
|
mylog('verbose', ['[Plugins]: SUCCESS, received ', len(sqlParams), ' entries'])
|
||||||
|
|
||||||
# process results if any
|
# process results if any
|
||||||
if len(sqlParams) > 0:
|
if len(sqlParams) > 0:
|
||||||
@@ -350,11 +339,11 @@ def flatten_array(arr):
|
|||||||
# Replace {wildcars} with parameters
|
# Replace {wildcars} with parameters
|
||||||
def resolve_wildcards_arr(commandArr, params):
|
def resolve_wildcards_arr(commandArr, params):
|
||||||
|
|
||||||
mylog('debug', [' [Plugins]: Pre-Resolved CMD: '] + commandArr)
|
mylog('debug', ['[Plugins]: Pre-Resolved CMD: '] + commandArr)
|
||||||
|
|
||||||
for param in params:
|
for param in params:
|
||||||
# mylog('debug', [' [Plugins]: key : {', param[0], '}'])
|
# mylog('debug', ['[Plugins]: key : {', param[0], '}'])
|
||||||
# mylog('debug', [' [Plugins]: resolved: ', param[1]])
|
# mylog('debug', ['[Plugins]: resolved: ', param[1]])
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
|
|
||||||
@@ -391,7 +380,7 @@ def process_plugin_events(db, plugin):
|
|||||||
|
|
||||||
pluginPref = plugin["unique_prefix"]
|
pluginPref = plugin["unique_prefix"]
|
||||||
|
|
||||||
mylog('debug', [' [Plugins] Processing : ', pluginPref])
|
mylog('debug', ['[Plugins] Processing : ', pluginPref])
|
||||||
|
|
||||||
plugObjectsArr = db.get_sql_array ("SELECT * FROM Plugins_Objects where Plugin = '" + str(pluginPref)+"'")
|
plugObjectsArr = db.get_sql_array ("SELECT * FROM Plugins_Objects where Plugin = '" + str(pluginPref)+"'")
|
||||||
plugEventsArr = db.get_sql_array ("SELECT * FROM Plugins_Events where Plugin = '" + str(pluginPref)+"'")
|
plugEventsArr = db.get_sql_array ("SELECT * FROM Plugins_Events where Plugin = '" + str(pluginPref)+"'")
|
||||||
@@ -404,8 +393,8 @@ def process_plugin_events(db, plugin):
|
|||||||
|
|
||||||
existingPluginObjectsCount = len(pluginObjects)
|
existingPluginObjectsCount = len(pluginObjects)
|
||||||
|
|
||||||
mylog('debug', [' [Plugins] Existing objects : ', existingPluginObjectsCount])
|
mylog('debug', ['[Plugins] Existing objects : ', existingPluginObjectsCount])
|
||||||
mylog('debug', [' [Plugins] New and existing events : ', len(plugEventsArr)])
|
mylog('debug', ['[Plugins] New and existing events : ', len(plugEventsArr)])
|
||||||
|
|
||||||
# set status as new - will be changed later if conditions are fulfilled, e.g. entry found
|
# set status as new - will be changed later if conditions are fulfilled, e.g. entry found
|
||||||
for eve in plugEventsArr:
|
for eve in plugEventsArr:
|
||||||
@@ -420,7 +409,7 @@ def process_plugin_events(db, plugin):
|
|||||||
|
|
||||||
# compare hash of the IDs for uniqueness
|
# compare hash of the IDs for uniqueness
|
||||||
if any(x.idsHash == tmpObject.idsHash for x in pluginObjects):
|
if any(x.idsHash == tmpObject.idsHash for x in pluginObjects):
|
||||||
mylog('debug', [' [Plugins] Found existing object'])
|
mylog('debug', ['[Plugins] Found existing object'])
|
||||||
pluginEvents[index].status = "exists"
|
pluginEvents[index].status = "exists"
|
||||||
index += 1
|
index += 1
|
||||||
|
|
||||||
@@ -488,7 +477,7 @@ def process_plugin_events(db, plugin):
|
|||||||
|
|
||||||
dbTable = plugin['mapped_to_table']
|
dbTable = plugin['mapped_to_table']
|
||||||
|
|
||||||
mylog('debug', [' [Plugins] Mapping objects to database table: ', dbTable])
|
mylog('debug', ['[Plugins] Mapping objects to database table: ', dbTable])
|
||||||
|
|
||||||
# collect all columns to be mapped
|
# collect all columns to be mapped
|
||||||
mappedCols = []
|
mappedCols = []
|
||||||
@@ -542,7 +531,7 @@ def process_plugin_events(db, plugin):
|
|||||||
|
|
||||||
q = f'INSERT into {dbTable} ({columnsStr}) VALUES ({valuesStr})'
|
q = f'INSERT into {dbTable} ({columnsStr}) VALUES ({valuesStr})'
|
||||||
|
|
||||||
mylog('debug', [' [Plugins] SQL query for mapping: ', q ])
|
mylog('debug', ['[Plugins] SQL query for mapping: ', q ])
|
||||||
|
|
||||||
sql.executemany (q, sqlParams)
|
sql.executemany (q, sqlParams)
|
||||||
|
|
||||||
|
|||||||
@@ -5,4 +5,4 @@ each publisher exposes:
|
|||||||
|
|
||||||
- check_config () returning True / False
|
- check_config () returning True / False
|
||||||
- send (message) returning True / Fasle
|
- send (message) returning True / Fasle
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
""" Pi.Alert module to send notification emails """
|
||||||
|
|
||||||
from email.mime.multipart import MIMEMultipart
|
from email.mime.multipart import MIMEMultipart
|
||||||
from email.mime.text import MIMEText
|
from email.mime.text import MIMEText
|
||||||
import smtplib
|
import smtplib
|
||||||
@@ -20,7 +22,7 @@ def send (msg: noti_struc):
|
|||||||
|
|
||||||
pText = msg.text
|
pText = msg.text
|
||||||
pHTML = msg.html
|
pHTML = msg.html
|
||||||
|
|
||||||
mylog('debug', '[Send Email] REPORT_TO: ' + hide_email(str(conf.REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(conf.SMTP_USER)))
|
mylog('debug', '[Send Email] REPORT_TO: ' + hide_email(str(conf.REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(conf.SMTP_USER)))
|
||||||
|
|
||||||
# Compose email
|
# Compose email
|
||||||
|
|||||||
@@ -14,10 +14,6 @@ from helper import bytes_to_string, sanitize_string
|
|||||||
# MQTT
|
# MQTT
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
|
|
||||||
mqtt_connected_to_broker = False
|
|
||||||
mqtt_sensors = []
|
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
def check_config():
|
def check_config():
|
||||||
if conf.MQTT_BROKER == '' or conf.MQTT_PORT == '' or conf.MQTT_USER == '' or conf.MQTT_PASSWORD == '':
|
if conf.MQTT_BROKER == '' or conf.MQTT_PORT == '' or conf.MQTT_USER == '' or conf.MQTT_PASSWORD == '':
|
||||||
@@ -76,11 +72,9 @@ def create_sensor(client, deviceId, deviceName, sensorType, sensorName, icon):
|
|||||||
new_sensor_config = sensor_config(deviceId, deviceName, sensorType, sensorName, icon)
|
new_sensor_config = sensor_config(deviceId, deviceName, sensorType, sensorName, icon)
|
||||||
|
|
||||||
# check if config already in list and if not, add it, otherwise skip
|
# check if config already in list and if not, add it, otherwise skip
|
||||||
global mqtt_sensors, uniqueSensorCount
|
|
||||||
|
|
||||||
is_unique = True
|
is_unique = True
|
||||||
|
|
||||||
for sensor in mqtt_sensors:
|
for sensor in conf.mqtt_sensors:
|
||||||
if sensor.hash == new_sensor_config.hash:
|
if sensor.hash == new_sensor_config.hash:
|
||||||
is_unique = False
|
is_unique = False
|
||||||
break
|
break
|
||||||
@@ -93,9 +87,7 @@ def create_sensor(client, deviceId, deviceName, sensorType, sensorName, icon):
|
|||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
def publish_sensor(client, sensorConf):
|
def publish_sensor(client, sensorConf):
|
||||||
|
|
||||||
global mqtt_sensors
|
|
||||||
|
|
||||||
message = '{ \
|
message = '{ \
|
||||||
"name":"'+ sensorConf.deviceName +' '+sensorConf.sensorName+'", \
|
"name":"'+ sensorConf.deviceName +' '+sensorConf.sensorName+'", \
|
||||||
@@ -118,26 +110,24 @@ def publish_sensor(client, sensorConf):
|
|||||||
# hack - delay adding to the queue in case the process is
|
# hack - delay adding to the queue in case the process is
|
||||||
time.sleep(conf.MQTT_DELAY_SEC) # restarted and previous publish processes aborted
|
time.sleep(conf.MQTT_DELAY_SEC) # restarted and previous publish processes aborted
|
||||||
# (it takes ~2s to update a sensor config on the broker)
|
# (it takes ~2s to update a sensor config on the broker)
|
||||||
mqtt_sensors.append(sensorConf)
|
conf.mqtt_sensors.append(sensorConf)
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
def mqtt_create_client():
|
def mqtt_create_client():
|
||||||
def on_disconnect(client, userdata, rc):
|
def on_disconnect(client, userdata, rc):
|
||||||
global mqtt_connected_to_broker
|
conf.mqtt_connected_to_broker = False
|
||||||
mqtt_connected_to_broker = False
|
|
||||||
|
|
||||||
# not sure is below line is correct / necessary
|
# not sure is below line is correct / necessary
|
||||||
# client = mqtt_create_client()
|
# client = mqtt_create_client()
|
||||||
|
|
||||||
def on_connect(client, userdata, flags, rc):
|
def on_connect(client, userdata, flags, rc):
|
||||||
global mqtt_connected_to_broker
|
|
||||||
|
|
||||||
if rc == 0:
|
if rc == 0:
|
||||||
mylog('verbose', [" Connected to broker"])
|
mylog('verbose', [" Connected to broker"])
|
||||||
mqtt_connected_to_broker = True # Signal connection
|
conf.mqtt_connected_to_broker = True # Signal connection
|
||||||
else:
|
else:
|
||||||
mylog('none', [" Connection failed"])
|
mylog('none', [" Connection failed"])
|
||||||
mqtt_connected_to_broker = False
|
conf.mqtt_connected_to_broker = False
|
||||||
|
|
||||||
|
|
||||||
client = mqtt_client.Client('PiAlert') # Set Connecting Client ID
|
client = mqtt_client.Client('PiAlert') # Set Connecting Client ID
|
||||||
@@ -150,12 +140,12 @@ def mqtt_create_client():
|
|||||||
return client
|
return client
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
def mqtt_start():
|
def mqtt_start(db):
|
||||||
|
|
||||||
global client, mqtt_connected_to_broker
|
#global client
|
||||||
|
|
||||||
if mqtt_connected_to_broker == False:
|
if conf.mqtt_connected_to_broker == False:
|
||||||
mqtt_connected_to_broker = True
|
conf.mqtt_connected_to_broker = True
|
||||||
client = mqtt_create_client()
|
client = mqtt_create_client()
|
||||||
|
|
||||||
# General stats
|
# General stats
|
||||||
@@ -164,7 +154,7 @@ def mqtt_start():
|
|||||||
create_generic_device(client)
|
create_generic_device(client)
|
||||||
|
|
||||||
# Get the data
|
# Get the data
|
||||||
row = get_device_stats()
|
row = get_device_stats(db)
|
||||||
|
|
||||||
columns = ["online","down","all","archived","new","unknown"]
|
columns = ["online","down","all","archived","new","unknown"]
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,8 @@ import conf
|
|||||||
import requests
|
import requests
|
||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
|
|
||||||
from logger import mylog, noti_struc
|
from logger import mylog
|
||||||
|
from helper import noti_struc
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
def check_config():
|
def check_config():
|
||||||
@@ -15,7 +16,7 @@ def check_config():
|
|||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
def send (msg: noti_struc):
|
def send (msg: noti_struc):
|
||||||
_Text = msg.html
|
|
||||||
headers = {
|
headers = {
|
||||||
"Title": "Pi.Alert Notification",
|
"Title": "Pi.Alert Notification",
|
||||||
"Actions": "view, Open Dashboard, "+ conf.REPORT_DASHBOARD_URL,
|
"Actions": "view, Open Dashboard, "+ conf.REPORT_DASHBOARD_URL,
|
||||||
@@ -32,5 +33,5 @@ def send (msg: noti_struc):
|
|||||||
headers["Authorization"] = "Basic {}".format(basichash)
|
headers["Authorization"] = "Basic {}".format(basichash)
|
||||||
|
|
||||||
requests.post("{}/{}".format( conf.NTFY_HOST, conf.NTFY_TOPIC),
|
requests.post("{}/{}".format( conf.NTFY_HOST, conf.NTFY_TOPIC),
|
||||||
data=_Text,
|
data=msg.html,
|
||||||
headers=headers)
|
headers=headers)
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ def check_config():
|
|||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
|
|
||||||
def send_webhook (msg: noti_struc):
|
def send (msg: noti_struc):
|
||||||
|
|
||||||
# use data type based on specified payload type
|
# use data type based on specified payload type
|
||||||
if conf.WEBHOOK_PAYLOAD == 'json':
|
if conf.WEBHOOK_PAYLOAD == 'json':
|
||||||
|
|||||||
@@ -28,6 +28,8 @@ from publishers.pushsafer import (check_config as pushsafer_check_config,
|
|||||||
send as send_pushsafer)
|
send as send_pushsafer)
|
||||||
from publishers.mqtt import (check_config as mqtt_check_config,
|
from publishers.mqtt import (check_config as mqtt_check_config,
|
||||||
mqtt_start )
|
mqtt_start )
|
||||||
|
|
||||||
|
|
||||||
#===============================================================================
|
#===============================================================================
|
||||||
# REPORTING
|
# REPORTING
|
||||||
#===============================================================================
|
#===============================================================================
|
||||||
@@ -119,11 +121,13 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
|
|||||||
)""")
|
)""")
|
||||||
|
|
||||||
# Open text Template
|
# Open text Template
|
||||||
|
mylog('verbose', ['[Notification] Open text Template'])
|
||||||
template_file = open(pialertPath + '/back/report_template.txt', 'r')
|
template_file = open(pialertPath + '/back/report_template.txt', 'r')
|
||||||
mail_text = template_file.read()
|
mail_text = template_file.read()
|
||||||
template_file.close()
|
template_file.close()
|
||||||
|
|
||||||
# Open html Template
|
# Open html Template
|
||||||
|
mylog('verbose', ['[Notification] Open html Template'])
|
||||||
template_file = open(pialertPath + '/back/report_template.html', 'r')
|
template_file = open(pialertPath + '/back/report_template.html', 'r')
|
||||||
if conf.newVersionAvailable :
|
if conf.newVersionAvailable :
|
||||||
template_file = open(pialertPath + '/back/report_template_new_version.html', 'r')
|
template_file = open(pialertPath + '/back/report_template_new_version.html', 'r')
|
||||||
@@ -139,6 +143,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
|
|||||||
mail_text = mail_text.replace ('<SERVER_NAME>', socket.gethostname() )
|
mail_text = mail_text.replace ('<SERVER_NAME>', socket.gethostname() )
|
||||||
mail_html = mail_html.replace ('<SERVER_NAME>', socket.gethostname() )
|
mail_html = mail_html.replace ('<SERVER_NAME>', socket.gethostname() )
|
||||||
|
|
||||||
|
mylog('verbose', ['[Notification] included sections: ',INCLUDED_SECTIONS])
|
||||||
if 'internet' in INCLUDED_SECTIONS:
|
if 'internet' in INCLUDED_SECTIONS:
|
||||||
# Compose Internet Section
|
# Compose Internet Section
|
||||||
sqlQuery = """SELECT eve_MAC as MAC, eve_IP as IP, eve_DateTime as Datetime, eve_EventType as "Event Type", eve_AdditionalInfo as "More info" FROM Events
|
sqlQuery = """SELECT eve_MAC as MAC, eve_IP as IP, eve_DateTime as Datetime, eve_EventType as "Event Type", eve_AdditionalInfo as "More info" FROM Events
|
||||||
@@ -152,6 +157,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
|
|||||||
|
|
||||||
mail_text = mail_text.replace ('<SECTION_INTERNET>', notiStruc.text + '\n')
|
mail_text = mail_text.replace ('<SECTION_INTERNET>', notiStruc.text + '\n')
|
||||||
mail_html = mail_html.replace ('<INTERNET_TABLE>', notiStruc.html)
|
mail_html = mail_html.replace ('<INTERNET_TABLE>', notiStruc.html)
|
||||||
|
mylog('verbose', ['[Notification] Internet sections done.'])
|
||||||
|
|
||||||
if 'new_devices' in INCLUDED_SECTIONS:
|
if 'new_devices' in INCLUDED_SECTIONS:
|
||||||
# Compose New Devices Section
|
# Compose New Devices Section
|
||||||
@@ -167,6 +173,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
|
|||||||
|
|
||||||
mail_text = mail_text.replace ('<SECTION_NEW_DEVICES>', notiStruc.text + '\n')
|
mail_text = mail_text.replace ('<SECTION_NEW_DEVICES>', notiStruc.text + '\n')
|
||||||
mail_html = mail_html.replace ('<NEW_DEVICES_TABLE>', notiStruc.html)
|
mail_html = mail_html.replace ('<NEW_DEVICES_TABLE>', notiStruc.html)
|
||||||
|
mylog('verbose', ['[Notification] New Devices sections done.'])
|
||||||
|
|
||||||
if 'down_devices' in INCLUDED_SECTIONS:
|
if 'down_devices' in INCLUDED_SECTIONS:
|
||||||
# Compose Devices Down Section
|
# Compose Devices Down Section
|
||||||
@@ -182,6 +189,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
|
|||||||
|
|
||||||
mail_text = mail_text.replace ('<SECTION_DEVICES_DOWN>', notiStruc.text + '\n')
|
mail_text = mail_text.replace ('<SECTION_DEVICES_DOWN>', notiStruc.text + '\n')
|
||||||
mail_html = mail_html.replace ('<DOWN_DEVICES_TABLE>', notiStruc.html)
|
mail_html = mail_html.replace ('<DOWN_DEVICES_TABLE>', notiStruc.html)
|
||||||
|
mylog('verbose', ['[Notification] Down Devices sections done.'])
|
||||||
|
|
||||||
if 'events' in INCLUDED_SECTIONS:
|
if 'events' in INCLUDED_SECTIONS:
|
||||||
# Compose Events Section
|
# Compose Events Section
|
||||||
@@ -198,21 +206,24 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
|
|||||||
|
|
||||||
mail_text = mail_text.replace ('<SECTION_EVENTS>', notiStruc.text + '\n')
|
mail_text = mail_text.replace ('<SECTION_EVENTS>', notiStruc.text + '\n')
|
||||||
mail_html = mail_html.replace ('<EVENTS_TABLE>', notiStruc.html)
|
mail_html = mail_html.replace ('<EVENTS_TABLE>', notiStruc.html)
|
||||||
|
mylog('verbose', ['[Notification] Events sections done.'])
|
||||||
|
|
||||||
if 'ports' in INCLUDED_SECTIONS:
|
if 'ports' in INCLUDED_SECTIONS:
|
||||||
# collect "ports" for the webhook json
|
# collect "ports" for the webhook json
|
||||||
if changedPorts_json_struc is not None:
|
mylog('verbose', ['[Notification] Ports: conf.changedPorts_json_struc:', conf.changedPorts_json_struc])
|
||||||
json_ports = changedPorts_json_struc.json["data"]
|
if conf.changedPorts_json_struc is not None:
|
||||||
|
json_ports = conf.changedPorts_json_struc.json["data"]
|
||||||
notiStruc = construct_notifications(db, "", "Ports", True, changedPorts_json_struc)
|
|
||||||
|
|
||||||
|
notiStruc = construct_notifications(db, "", "Ports", True, conf.changedPorts_json_struc)
|
||||||
|
mylog('verbose', ['[Notification] Ports: notiStruc:', notiStruc ])
|
||||||
mail_html = mail_html.replace ('<PORTS_TABLE>', notiStruc.html)
|
mail_html = mail_html.replace ('<PORTS_TABLE>', notiStruc.html)
|
||||||
|
|
||||||
portsTxt = ""
|
portsTxt = ""
|
||||||
if changedPorts_json_struc is not None:
|
if conf.changedPorts_json_struc is not None:
|
||||||
portsTxt = "Ports \n---------\n Ports changed! Check PiAlert for details!\n"
|
portsTxt = "Ports \n---------\n Ports changed! Check PiAlert for details!\n"
|
||||||
|
|
||||||
mail_text = mail_text.replace ('<PORTS_TABLE>', portsTxt )
|
mail_text = mail_text.replace ('<PORTS_TABLE>', portsTxt )
|
||||||
|
mylog('verbose', ['[Notification] Ports sections done.'])
|
||||||
|
|
||||||
if 'plugins' in INCLUDED_SECTIONS and conf.ENABLE_PLUGINS:
|
if 'plugins' in INCLUDED_SECTIONS and conf.ENABLE_PLUGINS:
|
||||||
# Compose Plugins Section
|
# Compose Plugins Section
|
||||||
@@ -228,7 +239,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
|
|||||||
|
|
||||||
# check if we need to report something
|
# check if we need to report something
|
||||||
plugins_report = len(json_plugins) > 0
|
plugins_report = len(json_plugins) > 0
|
||||||
|
mylog('verbose', ['[Notification] Plugins sections done.'])
|
||||||
|
|
||||||
json_final = {
|
json_final = {
|
||||||
"internet": json_internet,
|
"internet": json_internet,
|
||||||
@@ -293,7 +304,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
|
|||||||
if conf.REPORT_MQTT and check_config('mqtt'):
|
if conf.REPORT_MQTT and check_config('mqtt'):
|
||||||
updateState(db,"Send: MQTT")
|
updateState(db,"Send: MQTT")
|
||||||
mylog('info', ['[Notification] Establishing MQTT thread'])
|
mylog('info', ['[Notification] Establishing MQTT thread'])
|
||||||
mqtt_start()
|
mqtt_start(db)
|
||||||
else :
|
else :
|
||||||
mylog('verbose', ['[Notification] Skip MQTT'])
|
mylog('verbose', ['[Notification] Skip MQTT'])
|
||||||
else :
|
else :
|
||||||
@@ -310,7 +321,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
|
|||||||
# clear plugin events
|
# clear plugin events
|
||||||
sql.execute ("DELETE FROM Plugins_Events")
|
sql.execute ("DELETE FROM Plugins_Events")
|
||||||
|
|
||||||
changedPorts_json_struc = None
|
conf.changedPorts_json_struc = None
|
||||||
|
|
||||||
# DEBUG - print number of rows updated
|
# DEBUG - print number of rows updated
|
||||||
mylog('info', ['[Notification] Notifications changes: ', sql.rowcount])
|
mylog('info', ['[Notification] Notifications changes: ', sql.rowcount])
|
||||||
@@ -423,7 +434,7 @@ def skip_repeated_notifications (db):
|
|||||||
|
|
||||||
# Skip repeated notifications
|
# Skip repeated notifications
|
||||||
# due strfime : Overflow --> use "strftime / 60"
|
# due strfime : Overflow --> use "strftime / 60"
|
||||||
mylog('verbose','[Skip Repeated Notifications] Skip Repeated')
|
mylog('verbose','[Skip Repeated Notifications] Skip Repeated start')
|
||||||
db.sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0
|
db.sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0
|
||||||
WHERE eve_PendingAlertEmail = 1 AND eve_MAC IN
|
WHERE eve_PendingAlertEmail = 1 AND eve_MAC IN
|
||||||
(
|
(
|
||||||
|
|||||||
@@ -24,12 +24,10 @@ class nmap_entry:
|
|||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
def performNmapScan(db, devicesToScan):
|
def performNmapScan(db, devicesToScan):
|
||||||
sql = db.sql # TO-DO
|
"""
|
||||||
|
run nmap scan on a list of devices
|
||||||
global changedPorts_json_struc
|
discovers open ports and keeps track existing and new open ports
|
||||||
|
"""
|
||||||
changedPortsTmp = []
|
|
||||||
|
|
||||||
if len(devicesToScan) > 0:
|
if len(devicesToScan) > 0:
|
||||||
|
|
||||||
timeoutSec = conf.NMAP_TIMEOUT
|
timeoutSec = conf.NMAP_TIMEOUT
|
||||||
@@ -38,9 +36,8 @@ def performNmapScan(db, devicesToScan):
|
|||||||
|
|
||||||
updateState(db,"Scan: Nmap")
|
updateState(db,"Scan: Nmap")
|
||||||
|
|
||||||
mylog('verbose', ['[', timeNow(), '] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device'])
|
mylog('verbose', ['[NMAP Scan] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device'])
|
||||||
|
mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ])
|
||||||
mylog('verbose', [" Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ])
|
|
||||||
|
|
||||||
devIndex = 0
|
devIndex = 0
|
||||||
for device in devicesToScan:
|
for device in devicesToScan:
|
||||||
@@ -56,15 +53,15 @@ def performNmapScan(db, devicesToScan):
|
|||||||
output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30))
|
output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30))
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
# An error occured, handle it
|
# An error occured, handle it
|
||||||
mylog('none', [e.output])
|
mylog('none', ["[NMAP Scan] " ,e.output])
|
||||||
mylog('none', [" Error - Nmap Scan - check logs", progress])
|
mylog('none', ["[NMAP Scan] Error - Nmap Scan - check logs", progress])
|
||||||
except subprocess.TimeoutExpired as timeErr:
|
except subprocess.TimeoutExpired as timeErr:
|
||||||
mylog('verbose', [' Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', device["dev_LastIP"], progress])
|
mylog('verbose', ['[NMAP Scan] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', device["dev_LastIP"], progress])
|
||||||
|
|
||||||
if output == "": # check if the subprocess failed
|
if output == "": # check if the subprocess failed
|
||||||
mylog('info', ['[', timeNow(), '] Scan: Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details'])
|
mylog('info', ['[NMAP Scan] Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details'])
|
||||||
else:
|
else:
|
||||||
mylog('verbose', ['[', timeNow(), '] Scan: Nmap SUCCESS for ', device["dev_LastIP"], progress])
|
mylog('verbose', ['[NMAP Scan] Nmap SUCCESS for ', device["dev_LastIP"], progress])
|
||||||
|
|
||||||
devIndex += 1
|
devIndex += 1
|
||||||
|
|
||||||
@@ -94,110 +91,121 @@ def performNmapScan(db, devicesToScan):
|
|||||||
elif 'Nmap done' in line:
|
elif 'Nmap done' in line:
|
||||||
duration = line.split('scanned in ')[1]
|
duration = line.split('scanned in ')[1]
|
||||||
index += 1
|
index += 1
|
||||||
|
mylog('verbose', ['[NMAP Scan] Ports found by NMAP: ', len(newEntriesTmp)])
|
||||||
|
process_discovered_ports(db, device, newEntriesTmp)
|
||||||
|
#end for loop
|
||||||
|
|
||||||
# previous Nmap Entries
|
|
||||||
oldEntries = []
|
|
||||||
|
|
||||||
mylog('verbose', ['[', timeNow(), '] Scan: Ports found by NMAP: ', len(newEntriesTmp)])
|
|
||||||
|
def process_discovered_ports(db, device, discoveredPorts):
|
||||||
|
"""
|
||||||
|
process ports discovered by nmap
|
||||||
|
compare to previosu ports
|
||||||
|
update DB
|
||||||
|
raise notifications
|
||||||
|
"""
|
||||||
|
sql = db.sql # TO-DO
|
||||||
|
# previous Nmap Entries
|
||||||
|
oldEntries = []
|
||||||
|
changedPortsTmp = []
|
||||||
|
|
||||||
|
mylog('verbose', ['[NMAP Scan] Process ports found by NMAP: ', len(discoveredPorts)])
|
||||||
|
|
||||||
|
if len(discoveredPorts) > 0:
|
||||||
|
|
||||||
|
# get all current NMAP ports from the DB
|
||||||
|
rows = db.read(sql_nmap_scan_all)
|
||||||
|
|
||||||
|
for row in rows:
|
||||||
|
# only collect entries matching the current MAC address
|
||||||
|
if row["MAC"] == device["dev_MAC"]:
|
||||||
|
oldEntries.append(nmap_entry(row["MAC"], row["Time"], row["Port"], row["State"], row["Service"], device["dev_Name"], row["Extra"], row["Index"]))
|
||||||
|
|
||||||
|
newEntries = []
|
||||||
|
|
||||||
|
# Collect all entries that don't match the ones in the DB
|
||||||
|
for discoveredPort in discoveredPorts:
|
||||||
|
|
||||||
|
found = False
|
||||||
|
|
||||||
if len(newEntriesTmp) > 0:
|
# Check the new entry is already available in oldEntries and remove from processing if yes
|
||||||
|
for oldEntry in oldEntries:
|
||||||
|
if discoveredPort.hash == oldEntry.hash:
|
||||||
|
found = True
|
||||||
|
|
||||||
# get all current NMAP ports from the DB
|
if not found:
|
||||||
sql.execute(sql_nmap_scan_all)
|
newEntries.append(discoveredPort)
|
||||||
|
|
||||||
rows = sql.fetchall()
|
|
||||||
|
|
||||||
for row in rows:
|
|
||||||
# only collect entries matching the current MAC address
|
|
||||||
if row["MAC"] == device["dev_MAC"]:
|
|
||||||
oldEntries.append(nmap_entry(row["MAC"], row["Time"], row["Port"], row["State"], row["Service"], device["dev_Name"], row["Extra"], row["Index"]))
|
|
||||||
|
|
||||||
|
|
||||||
newEntries = []
|
mylog('verbose', ['[NMAP Scan] Nmap newly discovered or changed ports: ', len(newEntries)])
|
||||||
|
|
||||||
# Collect all entries that don't match the ones in the DB
|
# collect new ports, find the corresponding old entry and return for notification purposes
|
||||||
for newTmpEntry in newEntriesTmp:
|
# also update the DB with the new values after deleting the old ones
|
||||||
|
if len(newEntries) > 0:
|
||||||
|
|
||||||
|
# params to build the SQL query
|
||||||
|
params = []
|
||||||
|
indexesToDelete = ""
|
||||||
|
|
||||||
found = False
|
# Find old entry matching the new entry hash
|
||||||
|
for newEntry in newEntries:
|
||||||
# Check the new entry is already available in oldEntries and remove from processing if yes
|
|
||||||
for oldEntry in oldEntries:
|
|
||||||
if newTmpEntry.hash == oldEntry.hash:
|
|
||||||
found = True
|
|
||||||
|
|
||||||
if not found:
|
foundEntry = None
|
||||||
newEntries.append(newTmpEntry)
|
|
||||||
|
for oldEntry in oldEntries:
|
||||||
|
if oldEntry.hash == newEntry.hash:
|
||||||
mylog('verbose', ['[', timeNow(), '] Scan: Nmap newly discovered or changed ports: ', len(newEntries)])
|
indexesToDelete = indexesToDelete + str(oldEntry.index) + ','
|
||||||
|
foundEntry = oldEntry
|
||||||
# collect new ports, find the corresponding old entry and return for notification purposes
|
|
||||||
# also update the DB with the new values after deleting the old ones
|
columnNames = ["Name", "MAC", "Port", "State", "Service", "Extra", "NewOrOld" ]
|
||||||
if len(newEntries) > 0:
|
|
||||||
|
# Old entry found
|
||||||
# params to build the SQL query
|
if foundEntry is not None:
|
||||||
params = []
|
# Build params for sql query
|
||||||
indexesToDelete = ""
|
params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, oldEntry.extra))
|
||||||
|
# Build JSON for API and notifications
|
||||||
# Find old entry matching the new entry hash
|
changedPortsTmp.append({
|
||||||
for newEntry in newEntries:
|
"Name" : foundEntry.name,
|
||||||
|
"MAC" : newEntry.mac,
|
||||||
foundEntry = None
|
"Port" : newEntry.port,
|
||||||
|
"State" : newEntry.state,
|
||||||
for oldEntry in oldEntries:
|
"Service" : newEntry.service,
|
||||||
if oldEntry.hash == newEntry.hash:
|
"Extra" : foundEntry.extra,
|
||||||
indexesToDelete = indexesToDelete + str(oldEntry.index) + ','
|
"NewOrOld" : "New values"
|
||||||
foundEntry = oldEntry
|
})
|
||||||
|
changedPortsTmp.append({
|
||||||
columnNames = ["Name", "MAC", "Port", "State", "Service", "Extra", "NewOrOld" ]
|
"Name" : foundEntry.name,
|
||||||
|
"MAC" : foundEntry.mac,
|
||||||
# Old entry found
|
"Port" : foundEntry.port,
|
||||||
if foundEntry is not None:
|
"State" : foundEntry.state,
|
||||||
# Build params for sql query
|
"Service" : foundEntry.service,
|
||||||
params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, oldEntry.extra))
|
"Extra" : foundEntry.extra,
|
||||||
# Build JSON for API and notifications
|
"NewOrOld" : "Old values"
|
||||||
changedPortsTmp.append({
|
})
|
||||||
"Name" : foundEntry.name,
|
# New entry - no matching Old entry found
|
||||||
"MAC" : newEntry.mac,
|
else:
|
||||||
"Port" : newEntry.port,
|
# Build params for sql query
|
||||||
"State" : newEntry.state,
|
params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, ''))
|
||||||
"Service" : newEntry.service,
|
# Build JSON for API and notifications
|
||||||
"Extra" : foundEntry.extra,
|
changedPortsTmp.append({
|
||||||
"NewOrOld" : "New values"
|
"Name" : "New device",
|
||||||
})
|
"MAC" : newEntry.mac,
|
||||||
changedPortsTmp.append({
|
"Port" : newEntry.port,
|
||||||
"Name" : foundEntry.name,
|
"State" : newEntry.state,
|
||||||
"MAC" : foundEntry.mac,
|
"Service" : newEntry.service,
|
||||||
"Port" : foundEntry.port,
|
"Extra" : "",
|
||||||
"State" : foundEntry.state,
|
"NewOrOld" : "New device"
|
||||||
"Service" : foundEntry.service,
|
})
|
||||||
"Extra" : foundEntry.extra,
|
|
||||||
"NewOrOld" : "Old values"
|
conf.changedPorts_json_struc = json_struc({ "data" : changedPortsTmp}, columnNames)
|
||||||
})
|
|
||||||
# New entry - no matching Old entry found
|
# Delete old entries if available
|
||||||
else:
|
if len(indexesToDelete) > 0:
|
||||||
# Build params for sql query
|
sql.execute ("DELETE FROM Nmap_Scan where \"Index\" in (" + indexesToDelete[:-1] +")")
|
||||||
params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, ''))
|
db.commitDB()
|
||||||
# Build JSON for API and notifications
|
|
||||||
changedPortsTmp.append({
|
# Insert new values into the DB
|
||||||
"Name" : "New device",
|
sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params)
|
||||||
"MAC" : newEntry.mac,
|
db.commitDB()
|
||||||
"Port" : newEntry.port,
|
|
||||||
"State" : newEntry.state,
|
|
||||||
"Service" : newEntry.service,
|
|
||||||
"Extra" : "",
|
|
||||||
"NewOrOld" : "New device"
|
|
||||||
})
|
|
||||||
|
|
||||||
changedPorts_json_struc = json_struc({ "data" : changedPortsTmp}, columnNames)
|
|
||||||
|
|
||||||
# Delete old entries if available
|
|
||||||
if len(indexesToDelete) > 0:
|
|
||||||
sql.execute ("DELETE FROM Nmap_Scan where \"Index\" in (" + indexesToDelete[:-1] +")")
|
|
||||||
db.commitDB()
|
|
||||||
|
|
||||||
# Insert new values into the DB
|
|
||||||
sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params)
|
|
||||||
db.commitDB()
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user