BE: linting fixes

Signed-off-by: jokob-sk <jokob.sk@gmail.com>
This commit is contained in:
jokob-sk
2025-11-22 13:14:06 +11:00
parent f0abd500d9
commit 5c14b34a8b
104 changed files with 2163 additions and 2199 deletions

View File

@@ -1,6 +1,6 @@
import json import json
import os import os
import sys
def merge_translations(main_file, other_files): def merge_translations(main_file, other_files):
# Load main file # Load main file
@@ -30,10 +30,14 @@ def merge_translations(main_file, other_files):
json.dump(data, f, indent=4, ensure_ascii=False) json.dump(data, f, indent=4, ensure_ascii=False)
f.truncate() f.truncate()
if __name__ == "__main__": if __name__ == "__main__":
current_path = os.path.dirname(os.path.abspath(__file__)) current_path = os.path.dirname(os.path.abspath(__file__))
# language codes can be found here: http://www.lingoes.net/en/translator/langcode.htm # language codes can be found here: http://www.lingoes.net/en/translator/langcode.htm
# "en_us.json" has to be first! # "en_us.json" has to be first!
json_files = [ "en_us.json", "ar_ar.json", "ca_ca.json", "cs_cz.json", "de_de.json", "es_es.json", "fa_fa.json", "fr_fr.json", "it_it.json", "ja_jp.json", "nb_no.json", "pl_pl.json", "pt_br.json", "pt_pt.json", "ru_ru.json", "sv_sv.json", "tr_tr.json", "uk_ua.json", "zh_cn.json"] json_files = ["en_us.json", "ar_ar.json", "ca_ca.json", "cs_cz.json", "de_de.json",
"es_es.json", "fa_fa.json", "fr_fr.json", "it_it.json", "ja_jp.json",
"nb_no.json", "pl_pl.json", "pt_br.json", "pt_pt.json", "ru_ru.json",
"sv_sv.json", "tr_tr.json", "uk_ua.json", "zh_cn.json"]
file_paths = [os.path.join(current_path, file) for file in json_files] file_paths = [os.path.join(current_path, file) for file in json_files]
merge_translations(file_paths[0], file_paths[1:]) merge_translations(file_paths[0], file_paths[1:])

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
@@ -8,12 +8,12 @@ from pytz import timezone
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from const import logPath from const import logPath # noqa: E402, E261 [flake8 lint suppression]
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402, E261 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402, E261 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402, E261 [flake8 lint suppression]
import conf import conf # noqa: E402, E261 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -32,9 +32,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE) plugin_objects = Plugin_Objects(RESULT_FILE)
def main(): def main():
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
# Retrieve configuration settings # Retrieve configuration settings
some_setting = get_setting_value('SYNC_plugins') some_setting = get_setting_value('SYNC_plugins')
@@ -47,14 +46,14 @@ def main():
# Process the data into native application tables # Process the data into native application tables
if len(device_data) > 0: if len(device_data) > 0:
# insert devices into the lats_result.log # insert devices into the lats_result.log
# make sure the below mapping is mapped in config.json, for example: # make sure the below mapping is mapped in config.json, for example:
# "database_column_definitions": [ # "database_column_definitions": [
# { # {
# "column": "Object_PrimaryID", <--------- the value I save into primaryId # "column": "Object_PrimaryID", <--------- the value I save into primaryId
# "mapped_to_column": "cur_MAC", <--------- gets inserted into the CurrentScan DB # "mapped_to_column": "cur_MAC", <--------- gets inserted into the CurrentScan DB
# table column cur_MAC # table column cur_MAC
# #
for device in device_data: for device in device_data:
plugin_objects.add_object( plugin_objects.add_object(
primaryId = device['mac_address'], primaryId = device['mac_address'],
@@ -65,11 +64,11 @@ def main():
watched4 = device['last_seen'], watched4 = device['last_seen'],
extra = '', extra = '',
foreignKey = device['mac_address'] foreignKey = device['mac_address']
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app # helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too # helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details: # helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
# helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md # helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md
) )
mylog('verbose', [f'[{pluginName}] New entries: "{len(device_data)}"']) mylog('verbose', [f'[{pluginName}] New entries: "{len(device_data)}"'])
@@ -78,14 +77,15 @@ def main():
return 0 return 0
# retrieve data # retrieve data
def get_device_data(some_setting): def get_device_data(some_setting):
device_data = [] device_data = []
# do some processing, call exteranl APIs, and return a device_data list # do some processing, call exteranl APIs, and return a device_data list
# ... # ...
# #
# Sample data for testing purposes, you can adjust the processing in main() as needed # Sample data for testing purposes, you can adjust the processing in main() as needed
# ... before adding it to the plugin_objects.add_object(...) # ... before adding it to the plugin_objects.add_object(...)
device_data = [ device_data = [
@@ -113,8 +113,9 @@ def get_device_data(some_setting):
} }
] ]
# Return the data to be detected by the main application # Return the data to be detected by the main application
return device_data return device_data
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
# Just a testing library plugin for development purposes # Just a testing library plugin for development purposes
import os import os
import sys import sys
@@ -11,10 +11,10 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# NetAlertX modules # NetAlertX modules
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog from logger import mylog # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
pluginName = 'TESTONLY' pluginName = 'TESTONLY'
@@ -28,14 +28,11 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
md5_hash = hashlib.md5() md5_hash = hashlib.md5()
# globals # globals
def main(): def main():
# START # START
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
# SPACE FOR TESTING 🔽 # SPACE FOR TESTING 🔽
str = "ABC-MBP._another.localdomain." str = "ABC-MBP._another.localdomain."
@@ -43,28 +40,23 @@ def main():
# result = cleanDeviceName(str, True) # result = cleanDeviceName(str, True)
regexes = get_setting_value('NEWDEV_NAME_CLEANUP_REGEX') regexes = get_setting_value('NEWDEV_NAME_CLEANUP_REGEX')
print(regexes) print(regexes)
subnets = get_setting_value('SCAN_SUBNETS') subnets = get_setting_value('SCAN_SUBNETS')
print(subnets) print(subnets)
for rgx in regexes: for rgx in regexes:
mylog('trace', ["[cleanDeviceName] applying regex : " + rgx]) mylog('trace', ["[cleanDeviceName] applying regex : " + rgx])
mylog('trace', ["[cleanDeviceName] name before regex : " + str]) mylog('trace', ["[cleanDeviceName] name before regex : " + str])
str = re.sub(rgx, "", str) str = re.sub(rgx, "", str)
mylog('trace', ["[cleanDeviceName] name after regex : " + str]) mylog('trace', ["[cleanDeviceName] name after regex : " + str])
mylog('debug', ["[cleanDeviceName] output: " + str]) mylog('debug', ["[cleanDeviceName] output: " + str])
# SPACE FOR TESTING 🔼 # SPACE FOR TESTING 🔼
# END # END
mylog('verbose', [f'[{pluginName}] result "{str}"']) mylog('verbose', [f'[{pluginName}] result "{str}"'])
# -------------INIT--------------------- # -------------INIT---------------------

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import json import json
import subprocess import subprocess
@@ -9,15 +9,15 @@ import sys
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE")) conf.tz = timezone(get_setting_value("TIMEZONE"))
@@ -35,7 +35,7 @@ def main():
mylog("verbose", [f"[{pluginName}](publisher) In script"]) mylog("verbose", [f"[{pluginName}](publisher) In script"])
# Check if basic config settings supplied # Check if basic config settings supplied
if check_config() == False: if check_config() is False:
mylog( mylog(
"none", "none",
[ [
@@ -65,9 +65,9 @@ def main():
# Log result # Log result
plugin_objects.add_object( plugin_objects.add_object(
primaryId = pluginName, primaryId = pluginName,
secondaryId = timeNowDB(), secondaryId = timeNowDB(),
watched1 = notification["GUID"], watched1 = notification["GUID"],
watched2 = result, watched2 = result,
watched3 = 'null', watched3 = 'null',
watched4 = 'null', watched4 = 'null',
extra = 'null', extra = 'null',
@@ -80,8 +80,7 @@ def main():
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
def check_config(): def check_config():
if get_setting_value("APPRISE_HOST") == "" or ( if get_setting_value("APPRISE_HOST") == "" or (
get_setting_value("APPRISE_URL") == "" get_setting_value("APPRISE_URL") == "" and get_setting_value("APPRISE_TAG") == ""
and get_setting_value("APPRISE_TAG") == ""
): ):
return False return False
else: else:

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
import re import re
@@ -16,15 +16,15 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# NetAlertX modules # NetAlertX modules
import conf import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, hide_email from helper import get_setting_value, hide_email # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -38,13 +38,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main(): def main():
mylog('verbose', [f'[{pluginName}](publisher) In script']) mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied # Check if basic config settings supplied
if check_config() == False: if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.']) mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return return
@@ -61,7 +60,7 @@ def main():
# Retrieve new notifications # Retrieve new notifications
new_notifications = notifications.getNew() new_notifications = notifications.getNew()
# mylog('verbose', [f'[{pluginName}] new_notifications: ', new_notifications]) # mylog('verbose', [f'[{pluginName}] new_notifications: ', new_notifications])
mylog('verbose', [f'[{pluginName}] SMTP_SERVER: ', get_setting_value("SMTP_SERVER")]) mylog('verbose', [f'[{pluginName}] SMTP_SERVER: ', get_setting_value("SMTP_SERVER")])
mylog('verbose', [f'[{pluginName}] SMTP_PORT: ', get_setting_value("SMTP_PORT")]) mylog('verbose', [f'[{pluginName}] SMTP_PORT: ', get_setting_value("SMTP_PORT")])
mylog('verbose', [f'[{pluginName}] SMTP_SKIP_LOGIN: ', get_setting_value("SMTP_SKIP_LOGIN")]) mylog('verbose', [f'[{pluginName}] SMTP_SKIP_LOGIN: ', get_setting_value("SMTP_SKIP_LOGIN")])
@@ -72,19 +71,18 @@ def main():
# mylog('verbose', [f'[{pluginName}] SMTP_REPORT_TO: ', get_setting_value("SMTP_REPORT_TO")]) # mylog('verbose', [f'[{pluginName}] SMTP_REPORT_TO: ', get_setting_value("SMTP_REPORT_TO")])
# mylog('verbose', [f'[{pluginName}] SMTP_REPORT_FROM: ', get_setting_value("SMTP_REPORT_FROM")]) # mylog('verbose', [f'[{pluginName}] SMTP_REPORT_FROM: ', get_setting_value("SMTP_REPORT_FROM")])
# Process the new notifications (see the Notifications DB table for structure or check the /php/server/query_json.php?file=table_notifications.json endpoint) # Process the new notifications (see the Notifications DB table for structure or check the /php/server/query_json.php?file=table_notifications.json endpoint)
for notification in new_notifications: for notification in new_notifications:
# Send notification # Send notification
result = send(notification["HTML"], notification["Text"]) result = send(notification["HTML"], notification["Text"])
# Log result # Log result
plugin_objects.add_object( plugin_objects.add_object(
primaryId = pluginName, primaryId = pluginName,
secondaryId = timeNowDB(), secondaryId = timeNowDB(),
watched1 = notification["GUID"], watched1 = notification["GUID"],
watched2 = result, watched2 = result,
watched3 = 'null', watched3 = 'null',
watched4 = 'null', watched4 = 'null',
extra = 'null', extra = 'null',
@@ -93,25 +91,33 @@ def main():
plugin_objects.write_result_file() plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
def check_config (): # -------------------------------------------------------------------------------
def check_config():
server = get_setting_value('SMTP_SERVER') server = get_setting_value('SMTP_SERVER')
report_to = get_setting_value("SMTP_REPORT_TO") report_to = get_setting_value("SMTP_REPORT_TO")
report_from = get_setting_value("SMTP_REPORT_FROM") report_from = get_setting_value("SMTP_REPORT_FROM")
if server == '' or report_from == '' or report_to == '': if server == '' or report_from == '' or report_to == '':
mylog('none', [f'[Email Check Config] ⚠ ERROR: Email service not set up correctly. Check your {confFileName} SMTP_*, SMTP_REPORT_FROM and SMTP_REPORT_TO variables.']) mylog('none', [f'[Email Check Config] ⚠ ERROR: Email service not set up correctly. Check your {confFileName} SMTP_*, SMTP_REPORT_FROM and SMTP_REPORT_TO variables.'])
return False return False
else: else:
return True return True
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def send(pHTML, pText): def send(pHTML, pText):
mylog('debug', [f'[{pluginName}] SMTP_REPORT_TO: {hide_email(str(get_setting_value("SMTP_REPORT_TO")))} SMTP_USER: {hide_email(str(get_setting_value("SMTP_USER")))}']) mylog('debug', [f'[{pluginName}] SMTP_REPORT_TO: {hide_email(str(get_setting_value("SMTP_REPORT_TO")))} SMTP_USER: {hide_email(str(get_setting_value("SMTP_USER")))}'])
subject, from_email, to_email, message_html, message_text = sanitize_email_content(str(get_setting_value("SMTP_SUBJECT")), get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), pHTML, pText) subject, from_email, to_email, message_html, message_text = sanitize_email_content(
str(get_setting_value("SMTP_SUBJECT")),
get_setting_value("SMTP_REPORT_FROM"),
get_setting_value("SMTP_REPORT_TO"),
pHTML,
pText
)
emails = [] emails = []
@@ -132,10 +138,10 @@ def send(pHTML, pText):
msg['Subject'] = subject msg['Subject'] = subject
msg['From'] = from_email msg['From'] = from_email
msg['To'] = mail_addr msg['To'] = mail_addr
msg['Date'] = formatdate(localtime=True) msg['Date'] = formatdate(localtime=True)
msg.attach (MIMEText (message_text, 'plain')) msg.attach(MIMEText(message_text, 'plain'))
msg.attach (MIMEText (message_html, 'html')) msg.attach(MIMEText(message_html, 'html'))
# Set a timeout for the SMTP connection (in seconds) # Set a timeout for the SMTP connection (in seconds)
smtp_timeout = 30 smtp_timeout = 30
@@ -144,30 +150,31 @@ def send(pHTML, pText):
if get_setting_value("LOG_LEVEL") == 'debug': if get_setting_value("LOG_LEVEL") == 'debug':
send_email(msg,smtp_timeout) send_email(msg, smtp_timeout)
else: else:
try: try:
send_email(msg,smtp_timeout) send_email(msg, smtp_timeout)
except smtplib.SMTPAuthenticationError as e: except smtplib.SMTPAuthenticationError as e:
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPAuthenticationError)']) mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPAuthenticationError)'])
mylog('none', [' ERROR: Double-check your SMTP_USER and SMTP_PASS settings.)']) mylog('none', [' ERROR: Double-check your SMTP_USER and SMTP_PASS settings.)'])
mylog('none', [' ERROR: ', str(e)]) mylog('none', [' ERROR: ', str(e)])
except smtplib.SMTPServerDisconnected as e: except smtplib.SMTPServerDisconnected as e:
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPServerDisconnected)']) mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPServerDisconnected)'])
mylog('none', [' ERROR: ', str(e)]) mylog('none', [' ERROR: ', str(e)])
except socket.gaierror as e: except socket.gaierror as e:
mylog('none', [' ERROR: Could not resolve hostname (socket.gaierror)']) mylog('none', [' ERROR: Could not resolve hostname (socket.gaierror)'])
mylog('none', [' ERROR: ', str(e)]) mylog('none', [' ERROR: ', str(e)])
except ssl.SSLError as e: except ssl.SSLError as e:
mylog('none', [' ERROR: Could not establish SSL connection (ssl.SSLError)']) mylog('none', [' ERROR: Could not establish SSL connection (ssl.SSLError)'])
mylog('none', [' ERROR: Are you sure you need SMTP_FORCE_SSL enabled? Check your SMTP provider docs.']) mylog('none', [' ERROR: Are you sure you need SMTP_FORCE_SSL enabled? Check your SMTP provider docs.'])
mylog('none', [' ERROR: ', str(e)]) mylog('none', [' ERROR: ', str(e)])
# ---------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------
def send_email(msg,smtp_timeout): def send_email(msg, smtp_timeout):
# Send mail # Send mail
if get_setting_value('SMTP_FORCE_SSL'): if get_setting_value('SMTP_FORCE_SSL'):
mylog('debug', ['SMTP_FORCE_SSL == True so using .SMTP_SSL()']) mylog('debug', ['SMTP_FORCE_SSL == True so using .SMTP_SSL()'])
@@ -182,10 +189,10 @@ def send_email(msg,smtp_timeout):
mylog('debug', ['SMTP_FORCE_SSL == False so using .SMTP()']) mylog('debug', ['SMTP_FORCE_SSL == False so using .SMTP()'])
if get_setting_value("SMTP_PORT") == 0: if get_setting_value("SMTP_PORT") == 0:
mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER)']) mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER)'])
smtp_connection = smtplib.SMTP (get_setting_value('SMTP_SERVER')) smtp_connection = smtplib.SMTP(get_setting_value('SMTP_SERVER'))
else: else:
mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER, SMTP_PORT)']) mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER, SMTP_PORT)'])
smtp_connection = smtplib.SMTP (get_setting_value('SMTP_SERVER'), get_setting_value('SMTP_PORT')) smtp_connection = smtplib.SMTP(get_setting_value('SMTP_SERVER'), get_setting_value('SMTP_PORT'))
mylog('debug', ['Setting SMTP debug level']) mylog('debug', ['Setting SMTP debug level'])
@@ -193,7 +200,7 @@ def send_email(msg,smtp_timeout):
if get_setting_value('LOG_LEVEL') == 'debug': if get_setting_value('LOG_LEVEL') == 'debug':
smtp_connection.set_debuglevel(1) smtp_connection.set_debuglevel(1)
mylog('debug', [ 'Sending .ehlo()']) mylog('debug', ['Sending .ehlo()'])
smtp_connection.ehlo() smtp_connection.ehlo()
if not get_setting_value('SMTP_SKIP_TLS'): if not get_setting_value('SMTP_SKIP_TLS'):
@@ -203,12 +210,13 @@ def send_email(msg,smtp_timeout):
smtp_connection.ehlo() smtp_connection.ehlo()
if not get_setting_value('SMTP_SKIP_LOGIN'): if not get_setting_value('SMTP_SKIP_LOGIN'):
mylog('debug', ['SMTP_SKIP_LOGIN == False so sending .login()']) mylog('debug', ['SMTP_SKIP_LOGIN == False so sending .login()'])
smtp_connection.login (get_setting_value('SMTP_USER'), get_setting_value('SMTP_PASS')) smtp_connection.login(get_setting_value('SMTP_USER'), get_setting_value('SMTP_PASS'))
mylog('debug', ['Sending .sendmail()']) mylog('debug', ['Sending .sendmail()'])
smtp_connection.sendmail (get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), msg.as_string()) smtp_connection.sendmail(get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), msg.as_string())
smtp_connection.quit() smtp_connection.quit()
# ---------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------
def sanitize_email_content(subject, from_email, to_email, message_html, message_text): def sanitize_email_content(subject, from_email, to_email, message_html, message_text):
# Validate and sanitize subject # Validate and sanitize subject
@@ -229,6 +237,7 @@ def sanitize_email_content(subject, from_email, to_email, message_html, message_
return subject, from_email, to_email, message_html, message_text return subject, from_email, to_email, message_html, message_text
# ---------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import json import json
import os import os
@@ -18,15 +18,14 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# NetAlertX modules # NetAlertX modules
import conf import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from utils.plugin_utils import getPluginObject from utils.plugin_utils import getPluginObject # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, bytes_to_string, \ from helper import get_setting_value, bytes_to_string, \
sanitize_string, normalize_string sanitize_string, normalize_string # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB from database import DB, get_device_stats # noqa: E402 [flake8 lint suppression]
from database import DB, get_device_stats
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
@@ -287,11 +286,11 @@ def publish_mqtt(mqtt_client, topic, message):
# mylog('verbose', [f"[{pluginName}] mqtt_client.is_connected(): {mqtt_client.is_connected()} "]) # mylog('verbose', [f"[{pluginName}] mqtt_client.is_connected(): {mqtt_client.is_connected()} "])
result = mqtt_client.publish( result = mqtt_client.publish(
topic=topic, topic=topic,
payload=message, payload=message,
qos=qos, qos=qos,
retain=True, retain=True,
) )
status = result[0] status = result[0]
@@ -303,6 +302,7 @@ def publish_mqtt(mqtt_client, topic, message):
time.sleep(0.1) time.sleep(0.1)
return True return True
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Create a generic device for overal stats # Create a generic device for overal stats
def create_generic_device(mqtt_client, deviceId, deviceName): def create_generic_device(mqtt_client, deviceId, deviceName):
@@ -434,7 +434,6 @@ def mqtt_start(db):
if not mqtt_connected_to_broker: if not mqtt_connected_to_broker:
mqtt_client = mqtt_create_client() mqtt_client = mqtt_create_client()
deviceName = get_setting_value('MQTT_DEVICE_NAME') deviceName = get_setting_value('MQTT_DEVICE_NAME')
deviceId = get_setting_value('MQTT_DEVICE_ID') deviceId = get_setting_value('MQTT_DEVICE_ID')
@@ -449,16 +448,18 @@ def mqtt_start(db):
row = get_device_stats(db) row = get_device_stats(db)
# Publish (wrap into {} and remove last ',' from above) # Publish (wrap into {} and remove last ',' from above)
publish_mqtt(mqtt_client, f"{topic_root}/sensor/{deviceId}/state", publish_mqtt(
{ mqtt_client,
"online": row[0], f"{topic_root}/sensor/{deviceId}/state",
"down": row[1], {
"all": row[2], "online": row[0],
"archived": row[3], "down": row[1],
"new": row[4], "all": row[2],
"unknown": row[5] "archived": row[3],
} "new": row[4],
) "unknown": row[5]
}
)
# Generate device-specific MQTT messages if enabled # Generate device-specific MQTT messages if enabled
if get_setting_value('MQTT_SEND_DEVICES'): if get_setting_value('MQTT_SEND_DEVICES'):
@@ -466,11 +467,11 @@ def mqtt_start(db):
# Specific devices processing # Specific devices processing
# Get all devices # Get all devices
devices = db.read(get_setting_value('MQTT_DEVICES_SQL').replace('{s-quote}',"'")) devices = db.read(get_setting_value('MQTT_DEVICES_SQL').replace('{s-quote}', "'"))
sec_delay = len(devices) * int(get_setting_value('MQTT_DELAY_SEC'))*5 sec_delay = len(devices) * int(get_setting_value('MQTT_DELAY_SEC')) * 5
mylog('verbose', [f"[{pluginName}] Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60, 1), 'min)']) mylog('verbose', [f"[{pluginName}] Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay / 60, 1), 'min)'])
for device in devices: for device in devices:
@@ -495,27 +496,29 @@ def mqtt_start(db):
# handle device_tracker # handle device_tracker
# IMPORTANT: shared payload - device_tracker attributes and individual sensors # IMPORTANT: shared payload - device_tracker attributes and individual sensors
devJson = { devJson = {
"last_ip": device["devLastIP"], "last_ip": device["devLastIP"],
"is_new": str(device["devIsNew"]), "is_new": str(device["devIsNew"]),
"alert_down": str(device["devAlertDown"]), "alert_down": str(device["devAlertDown"]),
"vendor": sanitize_string(device["devVendor"]), "vendor": sanitize_string(device["devVendor"]),
"mac_address": str(device["devMac"]), "mac_address": str(device["devMac"]),
"model": devDisplayName, "model": devDisplayName,
"last_connection": prepTimeStamp(str(device["devLastConnection"])), "last_connection": prepTimeStamp(str(device["devLastConnection"])),
"first_connection": prepTimeStamp(str(device["devFirstConnection"])), "first_connection": prepTimeStamp(str(device["devFirstConnection"])),
"sync_node": device["devSyncHubNode"], "sync_node": device["devSyncHubNode"],
"group": device["devGroup"], "group": device["devGroup"],
"location": device["devLocation"], "location": device["devLocation"],
"network_parent_mac": device["devParentMAC"], "network_parent_mac": device["devParentMAC"],
"network_parent_name": next((dev["devName"] for dev in devices if dev["devMAC"] == device["devParentMAC"]), "") "network_parent_name": next((dev["devName"] for dev in devices if dev["devMAC"] == device["devParentMAC"]), "")
} }
# bulk update device sensors in home assistant # bulk update device sensors in home assistant
publish_mqtt(mqtt_client, sensorConfig.state_topic, devJson) # REQUIRED, DON'T DELETE publish_mqtt(mqtt_client, sensorConfig.state_topic, devJson) # REQUIRED, DON'T DELETE
# create and update is_present sensor # create and update is_present sensor
sensorConfig = create_sensor(mqtt_client, deviceId, devDisplayName, 'binary_sensor', 'is_present', 'wifi', device["devMac"]) sensorConfig = create_sensor(mqtt_client, deviceId, devDisplayName, 'binary_sensor', 'is_present', 'wifi', device["devMac"])
publish_mqtt(mqtt_client, sensorConfig.state_topic, publish_mqtt(
mqtt_client,
sensorConfig.state_topic,
{ {
"is_present": to_binary_sensor(str(device["devPresentLastScan"])) "is_present": to_binary_sensor(str(device["devPresentLastScan"]))
} }
@@ -547,7 +550,7 @@ def to_binary_sensor(input):
elif isinstance(input, bool) and input: elif isinstance(input, bool) and input:
return "ON" return "ON"
elif isinstance(input, str) and input == "1": elif isinstance(input, str) and input == "1":
return "ON" return "ON"
elif isinstance(input, bytes) and bytes_to_string(input) == "1": elif isinstance(input, bytes) and bytes_to_string(input) == "1":
return "ON" return "ON"
return "OFF" return "OFF"

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python # !/usr/bin/env python
import json import json
import os import os
@@ -11,15 +11,15 @@ from base64 import b64encode
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -33,13 +33,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main(): def main():
mylog('verbose', [f'[{pluginName}](publisher) In script']) mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied # Check if basic config settings supplied
if check_config() == False: if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.']) mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return return
@@ -65,9 +64,9 @@ def main():
# Log result # Log result
plugin_objects.add_object( plugin_objects.add_object(
primaryId = pluginName, primaryId = pluginName,
secondaryId = timeNowDB(), secondaryId = timeNowDB(),
watched1 = notification["GUID"], watched1 = notification["GUID"],
watched2 = handleEmpty(response_text), watched2 = handleEmpty(response_text),
watched3 = response_status_code, watched3 = response_status_code,
watched4 = 'null', watched4 = 'null',
extra = 'null', extra = 'null',
@@ -77,15 +76,15 @@ def main():
plugin_objects.write_result_file() plugin_objects.write_result_file()
# -------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
def check_config(): def check_config():
if get_setting_value('NTFY_HOST') == '' or get_setting_value('NTFY_TOPIC') == '': if get_setting_value('NTFY_HOST') == '' or get_setting_value('NTFY_TOPIC') == '':
return False return False
else: else:
return True return True
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def send(html, text): def send(html, text):
response_text = '' response_text = ''
@@ -100,7 +99,7 @@ def send(html, text):
# prepare request headers # prepare request headers
headers = { headers = {
"Title": "NetAlertX Notification", "Title": "NetAlertX Notification",
"Actions": "view, Open Dashboard, "+ get_setting_value('REPORT_DASHBOARD_URL'), "Actions": "view, Open Dashboard, " + get_setting_value('REPORT_DASHBOARD_URL'),
"Priority": get_setting_value('NTFY_PRIORITY'), "Priority": get_setting_value('NTFY_PRIORITY'),
"Tags": "warning" "Tags": "warning"
} }
@@ -109,37 +108,38 @@ def send(html, text):
if token != '': if token != '':
headers["Authorization"] = "Bearer {}".format(token) headers["Authorization"] = "Bearer {}".format(token)
elif user != "" and pwd != "": elif user != "" and pwd != "":
# Generate hash for basic auth # Generate hash for basic auth
basichash = b64encode(bytes(user + ':' + pwd, "utf-8")).decode("ascii") basichash = b64encode(bytes(user + ':' + pwd, "utf-8")).decode("ascii")
# add authorization header with hash # add authorization header with hash
headers["Authorization"] = "Basic {}".format(basichash) headers["Authorization"] = "Basic {}".format(basichash)
# call NTFY service # call NTFY service
try: try:
response = requests.post("{}/{}".format( get_setting_value('NTFY_HOST'), response = requests.post("{}/{}".format(
get_setting_value('NTFY_TOPIC')), get_setting_value('NTFY_HOST'),
data = text, get_setting_value('NTFY_TOPIC')),
headers = headers, data = text,
verify = verify_ssl) headers = headers,
verify = verify_ssl
)
response_status_code = response.status_code response_status_code = response.status_code
# Check if the request was successful (status code 200) # Check if the request was successful (status code 200)
if response_status_code == 200: if response_status_code == 200:
response_text = response.text # This captures the response body/message response_text = response.text # This captures the response body/message
else: else:
response_text = json.dumps(response.text) response_text = json.dumps(response.text)
except requests.exceptions.RequestException as e: except requests.exceptions.RequestException as e:
mylog('none', [f'[{pluginName}] ⚠ ERROR: ', e]) mylog('none', [f'[{pluginName}] ⚠ ERROR: ', e])
response_text = e response_text = e
return response_text, response_status_code return response_text, response_status_code
return response_text, response_status_code return response_text, response_status_code
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3 # !/usr/bin/env python3
import conf import conf
from const import confFileName, logPath from const import confFileName, logPath
from pytz import timezone from pytz import timezone
@@ -12,12 +12,12 @@ import requests
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, hide_string # noqa: E402 from helper import get_setting_value, hide_string # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 from database import DB # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE")) conf.tz = timezone(get_setting_value("TIMEZONE"))

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python # !/usr/bin/env python
import json import json
import os import os
@@ -10,15 +10,15 @@ import requests
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, hide_string from helper import get_setting_value, hide_string # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -32,13 +32,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main(): def main():
mylog('verbose', [f'[{pluginName}](publisher) In script']) mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied # Check if basic config settings supplied
if check_config() == False: if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.']) mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return return
@@ -59,14 +58,14 @@ def main():
for notification in new_notifications: for notification in new_notifications:
# Send notification # Send notification
response_text, response_status_code = send(notification["Text"]) response_text, response_status_code = send(notification["Text"])
# Log result # Log result
plugin_objects.add_object( plugin_objects.add_object(
primaryId = pluginName, primaryId = pluginName,
secondaryId = timeNowDB(), secondaryId = timeNowDB(),
watched1 = notification["GUID"], watched1 = notification["GUID"],
watched2 = handleEmpty(response_text), watched2 = handleEmpty(response_text),
watched3 = response_status_code, watched3 = response_status_code,
watched4 = 'null', watched4 = 'null',
extra = 'null', extra = 'null',
@@ -76,8 +75,7 @@ def main():
plugin_objects.write_result_file() plugin_objects.write_result_file()
# -------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
def send(text): def send(text):
response_text = '' response_text = ''
@@ -85,8 +83,7 @@ def send(text):
token = get_setting_value('PUSHSAFER_TOKEN') token = get_setting_value('PUSHSAFER_TOKEN')
mylog('verbose', [f'[{pluginName}] PUSHSAFER_TOKEN: "{hide_string(token)}"']) mylog('verbose', [f'[{pluginName}] PUSHSAFER_TOKEN: "{hide_string(token)}"'])
try: try:
url = 'https://www.pushsafer.com/api' url = 'https://www.pushsafer.com/api'
@@ -101,40 +98,34 @@ def send(text):
"u" : get_setting_value('REPORT_DASHBOARD_URL'), "u" : get_setting_value('REPORT_DASHBOARD_URL'),
"ut" : 'Open NetAlertX', "ut" : 'Open NetAlertX',
"k" : token, "k" : token,
} }
response = requests.post(url, data=post_fields) response = requests.post(url, data=post_fields)
response_status_code = response.status_code response_status_code = response.status_code
# Check if the request was successful (status code 200) # Check if the request was successful (status code 200)
if response_status_code == 200: if response_status_code == 200:
response_text = response.text # This captures the response body/message response_text = response.text # This captures the response body/message
else: else:
response_text = json.dumps(response.text) response_text = json.dumps(response.text)
except requests.exceptions.RequestException as e: except requests.exceptions.RequestException as e:
mylog('none', [f'[{pluginName}] ⚠ ERROR: ', e]) mylog('none', [f'[{pluginName}] ⚠ ERROR: ', e])
response_text = e response_text = e
return response_text, response_status_code return response_text, response_status_code
return response_text, response_status_code return response_text, response_status_code
# -------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
def check_config(): def check_config():
if get_setting_value('PUSHSAFER_TOKEN') == 'ApiKey': if get_setting_value('PUSHSAFER_TOKEN') == 'ApiKey':
return False return False
else: else:
return True return True
# ------------------------------------------------------- # -------------------------------------------------------
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import subprocess import subprocess
import os import os
@@ -8,15 +8,15 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -30,13 +30,11 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main(): def main():
mylog('verbose', [f'[{pluginName}](publisher) In script']) mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied # Check if basic config settings supplied
if check_config() == False: if check_config() is False:
mylog('none', [ mylog('none', [
f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.']) f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return return

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python # !/usr/bin/env python
import json import json
import subprocess import subprocess
@@ -13,15 +13,15 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf import conf # noqa: E402 [flake8 lint suppression]
from const import logPath, confFileName from const import logPath, confFileName # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, write_file from helper import get_setting_value, write_file # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -35,13 +35,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main(): def main():
mylog('verbose', [f'[{pluginName}](publisher) In script']) mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied # Check if basic config settings supplied
if check_config() == False: if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.']) mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return return
@@ -62,15 +61,19 @@ def main():
for notification in new_notifications: for notification in new_notifications:
# Send notification # Send notification
response_stdout, response_stderr = send(notification["Text"], notification["HTML"], notification["JSON"]) response_stdout, response_stderr = send(
notification["Text"],
notification["HTML"],
notification["JSON"]
)
# Log result # Log result
plugin_objects.add_object( plugin_objects.add_object(
primaryId = pluginName, primaryId = pluginName,
secondaryId = timeNowDB(), secondaryId = timeNowDB(),
watched1 = notification["GUID"], watched1 = notification["GUID"],
watched2 = handleEmpty(response_stdout), watched2 = handleEmpty(response_stdout),
watched3 = handleEmpty(response_stderr), watched3 = handleEmpty(response_stderr),
watched4 = 'null', watched4 = 'null',
extra = 'null', extra = 'null',
foreignKey = notification["GUID"] foreignKey = notification["GUID"]
@@ -79,16 +82,16 @@ def main():
plugin_objects.write_result_file() plugin_objects.write_result_file()
#------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
def check_config(): def check_config():
if get_setting_value('WEBHOOK_URL') == '': if get_setting_value('WEBHOOK_URL') == '':
return False return False
else: else:
return True return True
#-------------------------------------------------------------------------------
def send (text_data, html_data, json_data):
# -------------------------------------------------------------------------------
def send(text_data, html_data, json_data):
response_stderr = '' response_stderr = ''
response_stdout = '' response_stdout = ''
@@ -102,9 +105,9 @@ def send (text_data, html_data, json_data):
# use data type based on specified payload type # use data type based on specified payload type
if payloadType == 'json': if payloadType == 'json':
# In this code, the truncate_json function is used to recursively traverse the JSON object # In this code, the truncate_json function is used to recursively traverse the JSON object
# and remove nodes that exceed the size limit. It checks the size of each node's JSON representation # and remove nodes that exceed the size limit. It checks the size of each node's JSON representation
# using json.dumps and includes only the nodes that are within the limit. # using json.dumps and includes only the nodes that are within the limit.
json_str = json.dumps(json_data) json_str = json.dumps(json_data)
if len(json_str) <= limit: if len(json_str) <= limit:
@@ -127,45 +130,48 @@ def send (text_data, html_data, json_data):
return obj return obj
payloadData = truncate_json(json_data) payloadData = truncate_json(json_data)
if payloadType == 'html': if payloadType == 'html':
if len(html_data) > limit: if len(html_data) > limit:
payloadData = html_data[:limit] + " <h1>(text was truncated)</h1>" payloadData = html_data[:limit] + " <h1>(text was truncated)</h1>"
else: else:
payloadData = html_data payloadData = html_data
if payloadType == 'text': if payloadType == 'text':
if len(text_data) > limit: if len(text_data) > limit:
payloadData = text_data[:limit] + " (text was truncated)" payloadData = text_data[:limit] + " (text was truncated)"
else: else:
payloadData = text_data payloadData = text_data
# Define slack-compatible payload # Define slack-compatible payload
_json_payload = { "text": payloadData } if payloadType == 'text' else { if payloadType == 'text':
"username": "NetAlertX", _json_payload = {"text": payloadData}
"text": "There are new notifications", else:
"attachments": [{ _json_payload = {
"title": "NetAlertX Notifications", "username": "NetAlertX",
"title_link": get_setting_value('REPORT_DASHBOARD_URL'), "text": "There are new notifications",
"text": payloadData "attachments": [{
}] "title": "NetAlertX Notifications",
} "title_link": get_setting_value('REPORT_DASHBOARD_URL'),
"text": payloadData
}]
}
# DEBUG - Write the json payload into a log file for debugging # DEBUG - Write the json payload into a log file for debugging
write_file (logPath + '/webhook_payload.json', json.dumps(_json_payload)) write_file(logPath + '/webhook_payload.json', json.dumps(_json_payload))
# Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both # Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both
# Consider: curl has the ability to load in data to POST from a file + piping # Consider: curl has the ability to load in data to POST from a file + piping
if(endpointUrl.startswith('https://discord.com/api/webhooks/') and not endpointUrl.endswith("/slack")): if (endpointUrl.startswith('https://discord.com/api/webhooks/') and not endpointUrl.endswith("/slack")):
_WEBHOOK_URL = f"{endpointUrl}/slack" _WEBHOOK_URL = f"{endpointUrl}/slack"
curlParams = ["curl","-i","-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL] curlParams = ["curl", "-i", "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
else: else:
_WEBHOOK_URL = endpointUrl _WEBHOOK_URL = endpointUrl
curlParams = ["curl","-i","-X", requestMethod , "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL] curlParams = ["curl", "-i", "-X", requestMethod , "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
# Add HMAC signature if configured # Add HMAC signature if configured
if(secret != ''): if (secret != ''):
h = hmac.new(secret.encode("UTF-8"), json.dumps(_json_payload, separators=(',', ':')).encode(), hashlib.sha256).hexdigest() h = hmac.new(secret.encode("UTF-8"), json.dumps(_json_payload, separators=(',', ':')).encode(), hashlib.sha256).hexdigest()
curlParams.insert(4,"-H") curlParams.insert(4, "-H")
curlParams.insert(5,f"X-Webhook-Signature: sha256={h}") curlParams.insert(5, f"X-Webhook-Signature: sha256={h}")
try: try:
# Execute CURL call # Execute CURL call
@@ -173,13 +179,11 @@ def send (text_data, html_data, json_data):
result = subprocess.run(curlParams, capture_output=True, text=True) result = subprocess.run(curlParams, capture_output=True, text=True)
response_stderr = result.stderr response_stderr = result.stderr
response_stdout = result.stdout response_stdout = result.stdout
# Write stdout and stderr into .log files for debugging if needed # Write stdout and stderr into .log files for debugging if needed
mylog('debug', [f'[{pluginName}] stdout: ', response_stdout]) mylog('debug', [f'[{pluginName}] stdout: ', response_stdout])
mylog('debug', [f'[{pluginName}] stderr: ', response_stderr]) mylog('debug', [f'[{pluginName}] stderr: ', response_stderr])
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
# An error occurred, handle it # An error occurred, handle it
@@ -187,10 +191,9 @@ def send (text_data, html_data, json_data):
response_stderr = e.output response_stderr = e.output
return response_stdout, response_stderr
return response_stdout, response_stderr
# ------------------------------------------------------- # -------------------------------------------------------
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View File

@@ -1,7 +1,6 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import time import time
import pathlib
import argparse import argparse
import sys import sys
import re import re
@@ -9,16 +8,16 @@ import base64
import subprocess import subprocess
# Register NetAlertX directories # Register NetAlertX directories
INSTALL_PATH="/app" INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import DB from database import DB # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger, append_line_to_file from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath, applicationPath from const import logPath # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE")) conf.tz = timezone(get_setting_value("TIMEZONE"))

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
@@ -6,17 +6,16 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
pluginName = "ASUSWRT" pluginName = "ASUSWRT"
import asyncio import asyncio # noqa: E402 [flake8 lint suppression]
import aiohttp # noqa: E402 [flake8 lint suppression]
import aiohttp import conf # noqa: E402 [flake8 lint suppression]
import conf from asusrouter import AsusData, AsusRouter # noqa: E402 [flake8 lint suppression]
from asusrouter import AsusData, AsusRouter from asusrouter.modules.connection import ConnectionState # noqa: E402 [flake8 lint suppression]
from asusrouter.modules.connection import ConnectionState from const import logPath # noqa: E402 [flake8 lint suppression]
from const import logPath from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from logger import Logger, mylog # noqa: E402 [flake8 lint suppression]
from logger import Logger, mylog from plugin_helper import (Plugin_Objects, handleEmpty) # noqa: E402 [flake8 lint suppression]
from plugin_helper import (Plugin_Objects, handleEmpty) from pytz import timezone # noqa: E402 [flake8 lint suppression]
from pytz import timezone
conf.tz = timezone(get_setting_value("TIMEZONE")) conf.tz = timezone(get_setting_value("TIMEZONE"))

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3 # !/usr/bin/env python3
import os import os
import sys import sys
import socket import socket
@@ -8,14 +8,14 @@ from zeroconf import Zeroconf
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Configure timezone and logging # Configure timezone and logging
conf.tz = timezone(get_setting_value("TIMEZONE")) conf.tz = timezone(get_setting_value("TIMEZONE"))
@@ -67,7 +67,7 @@ def resolve_mdns_name(ip: str, timeout: int = 5) -> str:
hostname = socket.getnameinfo((ip, 0), socket.NI_NAMEREQD)[0] hostname = socket.getnameinfo((ip, 0), socket.NI_NAMEREQD)[0]
zeroconf.close() zeroconf.close()
if hostname and hostname != ip: if hostname and hostname != ip:
mylog("debug", [f"[{pluginName}] Found mDNS name: {hostname}"]) mylog("debug", [f"[{pluginName}] Found mDNS name (rev_name): {hostname} ({rev_name})"])
return hostname return hostname
except Exception as e: except Exception as e:
mylog("debug", [f"[{pluginName}] Zeroconf lookup failed for {ip}: {e}"]) mylog("debug", [f"[{pluginName}] Zeroconf lookup failed for {ip}: {e}"])
@@ -89,7 +89,7 @@ def main():
timeout = get_setting_value("AVAHISCAN_RUN_TIMEOUT") timeout = get_setting_value("AVAHISCAN_RUN_TIMEOUT")
use_mock = "--mockdata" in sys.argv use_mock = "--mockdata" in sys.argv
if use_mock: if use_mock:
mylog("verbose", [f"[{pluginName}] Running in MOCK mode"]) mylog("verbose", [f"[{pluginName}] Running in MOCK mode"])
devices = [ devices = [
@@ -137,4 +137,4 @@ def main():
# Entrypoint # Entrypoint
# ============================================================================= # =============================================================================
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import argparse import argparse
@@ -11,11 +11,11 @@ from datetime import datetime
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath, fullDbPath from const import logPath, fullDbPath # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -29,6 +29,7 @@ LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log') LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main(): def main():
# the script expects a parameter in the format of devices=device1,device2,... # the script expects a parameter in the format of devices=device1,device2,...
@@ -44,7 +45,7 @@ def main():
else: else:
overwrite = False overwrite = False
mylog('verbose', ['[CSVBCKP] In script']) mylog('verbose', ['[CSVBCKP] In script'])
# Connect to the App database # Connect to the App database
conn = sqlite3.connect(fullDbPath) conn = sqlite3.connect(fullDbPath)
@@ -64,7 +65,7 @@ def main():
fullPath = os.path.join(values.location.split('=')[1], filename) fullPath = os.path.join(values.location.split('=')[1], filename)
mylog('verbose', ['[CSVBCKP] Writing file ', fullPath]) mylog('verbose', ['[CSVBCKP] Writing file ', fullPath])
# Create a CSV file in the specified location # Create a CSV file in the specified location
with open(fullPath, 'w', newline='') as csvfile: with open(fullPath, 'w', newline='') as csvfile:
@@ -72,7 +73,7 @@ def main():
csv_writer = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL) csv_writer = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL)
# Wrap the header values in double quotes and write the header row # Wrap the header values in double quotes and write the header row
csv_writer.writerow([ '"' + col + '"' for col in columns]) csv_writer.writerow(['"' + col + '"' for col in columns])
# Fetch and write data rows # Fetch and write data rows
for row in cursor.fetchall(): for row in cursor.fetchall():
@@ -96,8 +97,8 @@ def main():
return 0 return 0
#=============================================================================== # ===============================================================================
# BEGIN # BEGIN
#=============================================================================== # ===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
@@ -8,11 +8,11 @@ import sqlite3
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath, fullDbPath from const import logPath, fullDbPath # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE")) conf.tz = timezone(get_setting_value("TIMEZONE"))
@@ -81,7 +81,7 @@ def cleanup_database(
) )
cursor.execute( cursor.execute(
"""DELETE from Online_History where "Index" not in ( """DELETE from Online_History where "Index" not in (
SELECT "Index" from Online_History SELECT "Index" from Online_History
order by Scan_Date desc limit 150)""" order by Scan_Date desc limit 150)"""
) )
@@ -94,7 +94,7 @@ def cleanup_database(
], ],
) )
cursor.execute( cursor.execute(
f"""DELETE FROM Events f"""DELETE FROM Events
WHERE eve_DateTime <= date('now', '-{str(DAYS_TO_KEEP_EVENTS)} day')""" WHERE eve_DateTime <= date('now', '-{str(DAYS_TO_KEEP_EVENTS)} day')"""
) )
# ----------------------------------------------------- # -----------------------------------------------------
@@ -107,11 +107,11 @@ def cleanup_database(
) )
# Build the SQL query to delete entries that exceed the limit per unique "Plugin" column entry # Build the SQL query to delete entries that exceed the limit per unique "Plugin" column entry
delete_query = f"""DELETE FROM Plugins_History delete_query = f"""DELETE FROM Plugins_History
WHERE "Index" NOT IN ( WHERE "Index" NOT IN (
SELECT "Index" SELECT "Index"
FROM ( FROM (
SELECT "Index", SELECT "Index",
ROW_NUMBER() OVER(PARTITION BY "Plugin" ORDER BY DateTimeChanged DESC) AS row_num ROW_NUMBER() OVER(PARTITION BY "Plugin" ORDER BY DateTimeChanged DESC) AS row_num
FROM Plugins_History FROM Plugins_History
) AS ranked_objects ) AS ranked_objects
@@ -133,11 +133,11 @@ def cleanup_database(
) )
# Build the SQL query to delete entries # Build the SQL query to delete entries
delete_query = f"""DELETE FROM Notifications delete_query = f"""DELETE FROM Notifications
WHERE "Index" NOT IN ( WHERE "Index" NOT IN (
SELECT "Index" SELECT "Index"
FROM ( FROM (
SELECT "Index", SELECT "Index",
ROW_NUMBER() OVER(PARTITION BY "Notifications" ORDER BY DateTimeCreated DESC) AS row_num ROW_NUMBER() OVER(PARTITION BY "Notifications" ORDER BY DateTimeCreated DESC) AS row_num
FROM Notifications FROM Notifications
) AS ranked_objects ) AS ranked_objects
@@ -153,11 +153,11 @@ def cleanup_database(
mylog("verbose", [f"[{pluginName}] Trim AppEvents to less than {histCount}"]) mylog("verbose", [f"[{pluginName}] Trim AppEvents to less than {histCount}"])
# Build the SQL query to delete entries # Build the SQL query to delete entries
delete_query = f"""DELETE FROM AppEvents delete_query = f"""DELETE FROM AppEvents
WHERE "Index" NOT IN ( WHERE "Index" NOT IN (
SELECT "Index" SELECT "Index"
FROM ( FROM (
SELECT "Index", SELECT "Index",
ROW_NUMBER() OVER(PARTITION BY "AppEvents" ORDER BY DateTimeCreated DESC) AS row_num ROW_NUMBER() OVER(PARTITION BY "AppEvents" ORDER BY DateTimeCreated DESC) AS row_num
FROM AppEvents FROM AppEvents
) AS ranked_objects ) AS ranked_objects

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import argparse import argparse
@@ -9,11 +9,11 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, check_IP_format from helper import get_setting_value, check_IP_format # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,91 +28,88 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main(): def main():
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
parser = argparse.ArgumentParser(description='Check internet connectivity and IP') parser = argparse.ArgumentParser(description='Check internet connectivity and IP')
parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP") parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP")
parser.add_argument('DDNS_UPDATE_URL', action="store", help="URL for updating Dynamic DNS (DDNS)") parser.add_argument('DDNS_UPDATE_URL', action="store", help="URL for updating Dynamic DNS (DDNS)")
parser.add_argument('DDNS_USER', action="store", help="Username for Dynamic DNS (DDNS) authentication") parser.add_argument('DDNS_USER', action="store", help="Username for Dynamic DNS (DDNS) authentication")
parser.add_argument('DDNS_PASSWORD', action="store", help="Password for Dynamic DNS (DDNS) authentication") parser.add_argument('DDNS_PASSWORD', action="store", help="Password for Dynamic DNS (DDNS) authentication")
parser.add_argument('DDNS_DOMAIN', action="store", help="Dynamic DNS (DDNS) domain name") parser.add_argument('DDNS_DOMAIN', action="store", help="Dynamic DNS (DDNS) domain name")
values = parser.parse_args() values = parser.parse_args()
PREV_IP = values.prev_ip.split('=')[1] PREV_IP = values.prev_ip.split('=')[1]
DDNS_UPDATE_URL = values.DDNS_UPDATE_URL.split('=')[1] DDNS_UPDATE_URL = values.DDNS_UPDATE_URL.split('=')[1]
DDNS_USER = values.DDNS_USER.split('=')[1] DDNS_USER = values.DDNS_USER.split('=')[1]
DDNS_PASSWORD = values.DDNS_PASSWORD.split('=')[1] DDNS_PASSWORD = values.DDNS_PASSWORD.split('=')[1]
DDNS_DOMAIN = values.DDNS_DOMAIN.split('=')[1] DDNS_DOMAIN = values.DDNS_DOMAIN.split('=')[1]
# perform the new IP lookup and DDNS tasks if enabled # perform the new IP lookup and DDNS tasks if enabled
ddns_update( DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP) ddns_update(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP)
mylog('verbose', [f'[{pluginName}] Finished '])
mylog('verbose', [f'[{pluginName}] Finished '])
return 0 return 0
#=============================================================================== # ===============================================================================
# INTERNET IP CHANGE # INTERNET IP CHANGE
#=============================================================================== # ===============================================================================
def ddns_update ( DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP ): def ddns_update(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP):
# Update DDNS record if enabled and IP is different # Update DDNS record if enabled and IP is different
# Get Dynamic DNS IP # Get Dynamic DNS IP
mylog('verbose', [f'[{pluginName}] Retrieving Dynamic DNS IP']) mylog('verbose', [f'[{pluginName}] Retrieving Dynamic DNS IP'])
dns_IP = get_dynamic_DNS_IP(DDNS_DOMAIN) dns_IP = get_dynamic_DNS_IP(DDNS_DOMAIN)
# Check Dynamic DNS IP # Check Dynamic DNS IP
if dns_IP == "" or dns_IP == "0.0.0.0" : if dns_IP == "" or dns_IP == "0.0.0.0" :
mylog('none', [f'[{pluginName}] Error retrieving Dynamic DNS IP']) mylog('none', [f'[{pluginName}] Error retrieving Dynamic DNS IP'])
mylog('none', [f'[{pluginName}] ', dns_IP]) mylog('none', [f'[{pluginName}] ', dns_IP])
# Check DNS Change # Check DNS Change
if dns_IP != PREV_IP : if dns_IP != PREV_IP :
mylog('none', [f'[{pluginName}] Updating Dynamic DNS IP']) mylog('none', [f'[{pluginName}] Updating Dynamic DNS IP'])
message = set_dynamic_DNS_IP (DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN) message = set_dynamic_DNS_IP(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN)
mylog('none', [f'[{pluginName}] ', message]) mylog('none', [f'[{pluginName}] ', message])
# plugin_objects = Plugin_Objects(RESULT_FILE) # plugin_objects = Plugin_Objects(RESULT_FILE)
# plugin_objects.add_object( # plugin_objects.add_object(
# primaryId = 'Internet', # MAC (Device Name) # primaryId = 'Internet', # MAC (Device Name)
# secondaryId = new_internet_IP, # IP Address # secondaryId = new_internet_IP, # IP Address
# watched1 = f'Previous IP: {PREV_IP}', # watched1 = f'Previous IP: {PREV_IP}',
# watched2 = '', # watched2 = '',
# watched3 = '', # watched3 = '',
# watched4 = '', # watched4 = '',
# extra = f'Previous IP: {PREV_IP}', # extra = f'Previous IP: {PREV_IP}',
# foreignKey = 'Internet') # foreignKey = 'Internet')
# plugin_objects.write_result_file() # plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
def get_dynamic_DNS_IP (DDNS_DOMAIN): # -------------------------------------------------------------------------------
def get_dynamic_DNS_IP(DDNS_DOMAIN):
# Using supplied DNS server # Using supplied DNS server
dig_args = ['dig', '+short', DDNS_DOMAIN] dig_args = ['dig', '+short', DDNS_DOMAIN]
try: try:
# try runnning a subprocess # try runnning a subprocess
dig_output = subprocess.check_output (dig_args, universal_newlines=True) dig_output = subprocess.check_output(dig_args, universal_newlines=True)
mylog('none', [f'[{pluginName}] DIG output :', dig_output]) mylog('none', [f'[{pluginName}] DIG output :', dig_output])
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
# An error occured, handle it # An error occured, handle it
mylog('none', [f'[{pluginName}] ⚠ ERROR - ', e.output]) mylog('none', [f'[{pluginName}] ⚠ ERROR - ', e.output])
dig_output = '' # probably no internet dig_output = '' # probably no internet
# Check result is an IP # Check result is an IP
IP = check_IP_format (dig_output) IP = check_IP_format(dig_output)
# Handle invalid response # Handle invalid response
if IP == '': if IP == '':
@@ -120,28 +117,27 @@ def get_dynamic_DNS_IP (DDNS_DOMAIN):
return IP return IP
#-------------------------------------------------------------------------------
def set_dynamic_DNS_IP (DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN): # -------------------------------------------------------------------------------
def set_dynamic_DNS_IP(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN):
try: try:
# try runnning a subprocess # try runnning a subprocess
# Update Dynamic IP # Update Dynamic IP
curl_output = subprocess.check_output (['curl', curl_output = subprocess.check_output([
'-s', 'curl',
DDNS_UPDATE_URL + '-s',
'username=' + DDNS_USER + DDNS_UPDATE_URL + 'username=' + DDNS_USER + '&password=' + DDNS_PASSWORD + '&hostname=' + DDNS_DOMAIN],
'&password=' + DDNS_PASSWORD + universal_newlines=True)
'&hostname=' + DDNS_DOMAIN],
universal_newlines=True)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
# An error occured, handle it # An error occured, handle it
mylog('none', [f'[{pluginName}] ⚠ ERROR - ',e.output]) mylog('none', [f'[{pluginName}] ⚠ ERROR - ', e.output])
curl_output = "" curl_output = ""
return curl_output return curl_output
#=============================================================================== # ===============================================================================
# BEGIN # BEGIN
#=============================================================================== # ===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,22 +1,22 @@
#!/usr/bin/env python # !/usr/bin/env python
from __future__ import unicode_literals from __future__ import unicode_literals
import argparse import argparse
import os import os
import sys import sys
import chardet import chardet
# Register NetAlertX directories # Register NetAlertX directories
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty, is_mac from plugin_helper import Plugin_Objects, handleEmpty, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from dhcp_leases import DhcpLeases from dhcp_leases import DhcpLeases # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -24,34 +24,38 @@ conf.tz = timezone(get_setting_value('TIMEZONE'))
# Make sure log level is initialized correctly # Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL')) Logger(get_setting_value('LOG_LEVEL'))
pluginName= 'DHCPLSS' pluginName = 'DHCPLSS'
LOG_PATH = logPath + '/plugins' LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log') LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
# ------------------------------------------------------------- # -------------------------------------------------------------
def main(): def main():
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
last_run_logfile = open(RESULT_FILE, 'a') last_run_logfile = open(RESULT_FILE, 'a')
last_run_logfile.write("") last_run_logfile.write("")
parser = argparse.ArgumentParser(description='Import devices from dhcp.leases files') parser = argparse.ArgumentParser(description='Import devices from dhcp.leases files')
parser.add_argument('paths', action="store", help="absolute dhcp.leases file paths to check separated by ','") parser.add_argument(
'paths',
action="store",
help="absolute dhcp.leases file paths to check separated by ','"
)
values = parser.parse_args() values = parser.parse_args()
plugin_objects = Plugin_Objects(RESULT_FILE) plugin_objects = Plugin_Objects(RESULT_FILE)
if values.paths: if values.paths:
for path in values.paths.split('=')[1].split(','): for path in values.paths.split('=')[1].split(','):
plugin_objects = get_entries(path, plugin_objects) plugin_objects = get_entries(path, plugin_objects)
mylog('verbose', [f'[{pluginName}] {len(plugin_objects)} Entries found in "{path}"']) mylog('verbose', [f'[{pluginName}] {len(plugin_objects)} Entries found in "{path}"'])
plugin_objects.write_result_file() plugin_objects.write_result_file()
# ------------------------------------------------------------- # -------------------------------------------------------------
def get_entries(path, plugin_objects): def get_entries(path, plugin_objects):
@@ -66,7 +70,7 @@ def get_entries(path, plugin_objects):
# Use the detected encoding # Use the detected encoding
encoding = result['encoding'] encoding = result['encoding']
# Order: MAC, IP, IsActive, NAME, Hardware # Order: MAC, IP, IsActive, NAME, Hardware
# Handle pihole-specific dhcp.leases files # Handle pihole-specific dhcp.leases files
if 'pihole' in path: if 'pihole' in path:
with open(path, 'r', encoding=encoding, errors='replace') as f: with open(path, 'r', encoding=encoding, errors='replace') as f:
@@ -111,9 +115,9 @@ def get_entries(path, plugin_objects):
if is_mac(lease.ethernet): if is_mac(lease.ethernet):
plugin_objects.add_object( plugin_objects.add_object(
primaryId = handleEmpty(lease.ethernet), primaryId = handleEmpty(lease.ethernet),
secondaryId = handleEmpty(lease.ip), secondaryId = handleEmpty(lease.ip),
watched1 = handleEmpty(lease.active), watched1 = handleEmpty(lease.active),
watched2 = handleEmpty(lease.hostname), watched2 = handleEmpty(lease.hostname),
watched3 = handleEmpty(lease.hardware), watched3 = handleEmpty(lease.hardware),
watched4 = handleEmpty(lease.binding_state), watched4 = handleEmpty(lease.binding_state),
@@ -122,5 +126,6 @@ def get_entries(path, plugin_objects):
) )
return plugin_objects return plugin_objects
if __name__ == '__main__':
main() if __name__ == '__main__':
main()

View File

@@ -1,9 +1,8 @@
#!/usr/bin/env python # !/usr/bin/env python
# Based on the work of https://github.com/leiweibau/Pi.Alert # Based on the work of https://github.com/leiweibau/Pi.Alert
import subprocess import subprocess
import os import os
from datetime import datetime
import sys import sys
@@ -11,12 +10,12 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, Plugin_Object from plugin_helper import Plugin_Objects, Plugin_Object # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
@@ -31,13 +30,14 @@ LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log') LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main(): def main():
mylog('verbose', ['[DHCPSRVS] In script']) mylog('verbose', ['[DHCPSRVS] In script'])
last_run_logfile = open(RESULT_FILE, 'a') last_run_logfile = open(RESULT_FILE, 'a')
last_run_logfile.write("") last_run_logfile.write("")
plugin_objects = Plugin_Objects(RESULT_FILE) plugin_objects = Plugin_Objects(RESULT_FILE)
timeoutSec = get_setting_value('DHCPSRVS_RUN_TIMEOUT') timeoutSec = get_setting_value('DHCPSRVS_RUN_TIMEOUT')
@@ -46,10 +46,10 @@ def main():
try: try:
# Number of DHCP discovery probes to send # Number of DHCP discovery probes to send
dhcp_probes = 1 dhcp_probes = 1
# Initialize a list to store output lines from the scan # Initialize a list to store output lines from the scan
newLines = [] newLines = []
for _ in range(dhcp_probes): for _ in range(dhcp_probes):
output = subprocess.check_output(nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=timeoutSec) output = subprocess.check_output(nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=timeoutSec)
newLines += output.split("\n") newLines += output.split("\n")
@@ -57,9 +57,9 @@ def main():
newEntries = [] newEntries = []
for line in newLines: for line in newLines:
mylog('verbose', [f'[DHCPSRVS] Processing line: {line} ']) mylog('verbose', [f'[DHCPSRVS] Processing line: {line} '])
if 'Response ' in line and ' of ' in line: if 'Response ' in line and ' of ' in line:
newEntries.append(Plugin_Object()) newEntries.append(Plugin_Object())
elif 'Server Identifier' in line: elif 'Server Identifier' in line:
@@ -85,7 +85,7 @@ def main():
newEntries[-1].extra += ',' + newVal newEntries[-1].extra += ',' + newVal
for e in newEntries: for e in newEntries:
plugin_objects.add_object( plugin_objects.add_object(
primaryId=e.primaryId, primaryId=e.primaryId,
secondaryId=e.secondaryId, secondaryId=e.secondaryId,
@@ -101,5 +101,6 @@ def main():
except Exception as e: except Exception as e:
mylog('verbose', ['[DHCPSRVS] Error in main:', str(e)]) mylog('verbose', ['[DHCPSRVS] Error in main:', str(e)])
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
import subprocess import subprocess
@@ -8,14 +7,14 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -35,7 +34,7 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
def main(): def main():
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
timeout = get_setting_value('DIGSCAN_RUN_TIMEOUT') timeout = get_setting_value('DIGSCAN_RUN_TIMEOUT')
@@ -50,13 +49,13 @@ def main():
device_handler = DeviceInstance(db) device_handler = DeviceInstance(db)
# Retrieve devices # Retrieve devices
if get_setting_value("REFRESH_FQDN"): if get_setting_value("REFRESH_FQDN"):
devices = device_handler.getAll() devices = device_handler.getAll()
else: else:
devices = device_handler.getUnknown() devices = device_handler.getUnknown()
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}']) mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
# TEST - below is a WINDOWS host IP # TEST - below is a WINDOWS host IP
# execute_name_lookup('192.168.1.121', timeout) # execute_name_lookup('192.168.1.121', timeout)
@@ -65,27 +64,27 @@ def main():
if domain_name != '': if domain_name != '':
plugin_objects.add_object( plugin_objects.add_object(
# "MAC", "IP", "Server", "Name" primaryId = device['devMac'],
primaryId = device['devMac'], secondaryId = device['devLastIP'],
secondaryId = device['devLastIP'], watched1 = dns_server,
watched1 = dns_server, watched2 = domain_name,
watched2 = domain_name, watched3 = '',
watched3 = '', watched4 = '',
watched4 = '', extra = '',
extra = '', foreignKey = device['devMac']
foreignKey = device['devMac']) )
plugin_objects.write_result_file() plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0 return 0
#===============================================================================
# ===============================================================================
# Execute scan # Execute scan
#=============================================================================== # ===============================================================================
def execute_name_lookup (ip, timeout): def execute_name_lookup(ip, timeout):
""" """
Execute the DIG command on IP. Execute the DIG command on IP.
""" """
@@ -97,32 +96,38 @@ def execute_name_lookup (ip, timeout):
try: try:
mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args]) mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args])
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs # try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True).strip() output = subprocess.check_output(
args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
).strip()
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}']) mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
domain_name = output domain_name = output
dns_server = '' dns_server = ''
mylog('verbose', [f'[{pluginName}] Domain Name: {domain_name}']) mylog('verbose', [f'[{pluginName}] Domain Name: {domain_name}'])
return domain_name, dns_server return domain_name, dns_server
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}']) mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
if output == "": # check if the subprocess failed except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs']) mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
else:
if output == "": # check if the subprocess failed
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
else:
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS']) mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
return '', '' return '', ''
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
@@ -17,11 +17,11 @@ from aiofreepybox.exceptions import NotOpenError, AuthorizationError
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE")) conf.tz = timezone(get_setting_value("TIMEZONE"))
@@ -79,6 +79,7 @@ def map_device_type(type: str):
mylog("minimal", [f"[{pluginName}] Unknown device type: {type}"]) mylog("minimal", [f"[{pluginName}] Unknown device type: {type}"])
return device_type_map["other"] return device_type_map["other"]
async def get_device_data(api_version: int, api_address: str, api_port: int): async def get_device_data(api_version: int, api_address: str, api_port: int):
# ensure existence of db path # ensure existence of db path
config_base = Path(os.getenv("NETALERTX_CONFIG", "/data/config")) config_base = Path(os.getenv("NETALERTX_CONFIG", "/data/config"))

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
# test script by running: # test script by running:
# tbc # tbc
@@ -11,14 +11,14 @@ import re
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from database import DB from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -33,16 +33,14 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main(): def main():
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
timeout = get_setting_value('ICMP_RUN_TIMEOUT') timeout = get_setting_value('ICMP_RUN_TIMEOUT')
args = get_setting_value('ICMP_ARGS') args = get_setting_value('ICMP_ARGS')
in_regex = get_setting_value('ICMP_IN_REGEX') in_regex = get_setting_value('ICMP_IN_REGEX')
# Create a database connection # Create a database connection
db = DB() # instance of class DB db = DB() # instance of class DB
db.open() db.open()
@@ -61,46 +59,45 @@ def main():
# Filter devices based on the regex match # Filter devices based on the regex match
filtered_devices = [ filtered_devices = [
device for device in all_devices device for device in all_devices
if regex_pattern.match(device['devLastIP']) if regex_pattern.match(device['devLastIP'])
] ]
mylog('verbose', [f'[{pluginName}] Devices to PING: {len(filtered_devices)}'])
mylog('verbose', [f'[{pluginName}] Devices to PING: {len(filtered_devices)}'])
for device in filtered_devices: for device in filtered_devices:
is_online, output = execute_scan(device['devLastIP'], timeout, args) is_online, output = execute_scan(device['devLastIP'], timeout, args)
mylog('verbose', [f"[{pluginName}] ip: {device['devLastIP']} is_online: {is_online}"]) mylog('verbose', [f"[{pluginName}] ip: {device['devLastIP']} is_online: {is_online}"])
if is_online: if is_online:
plugin_objects.add_object( plugin_objects.add_object(
# "MAC", "IP", "Name", "Output" # "MAC", "IP", "Name", "Output"
primaryId = device['devMac'], primaryId = device['devMac'],
secondaryId = device['devLastIP'], secondaryId = device['devLastIP'],
watched1 = device['devName'], watched1 = device['devName'],
watched2 = output.replace('\n',''), watched2 = output.replace('\n', ''),
watched3 = '', watched3 = '',
watched4 = '', watched4 = '',
extra = '', extra = '',
foreignKey = device['devMac']) foreignKey = device['devMac']
)
plugin_objects.write_result_file() plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0 return 0
#===============================================================================
# ===============================================================================
# Execute scan # Execute scan
#=============================================================================== # ===============================================================================
def execute_scan (ip, timeout, args): def execute_scan(ip, timeout, args):
""" """
Execute the ICMP command on IP. Execute the ICMP command on IP.
""" """
icmp_args = ['ping'] + args.split() + [ip] icmp_args = ['ping'] + args.split() + [ip]
# Execute command # Execute command
@@ -108,12 +105,18 @@ def execute_scan (ip, timeout, args):
try: try:
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs # try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (icmp_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True) output = subprocess.check_output(
icmp_args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
)
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}']) mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
# Parse output using case-insensitive regular expressions # Parse output using case-insensitive regular expressions
#Synology-NAS:/# ping -i 0.5 -c 3 -W 8 -w 9 192.168.1.82 # Synology-NAS:/# ping -i 0.5 -c 3 -W 8 -w 9 192.168.1.82
# PING 192.168.1.82 (192.168.1.82): 56 data bytes # PING 192.168.1.82 (192.168.1.82): 56 data bytes
# 64 bytes from 192.168.1.82: seq=0 ttl=64 time=0.080 ms # 64 bytes from 192.168.1.82: seq=0 ttl=64 time=0.080 ms
# 64 bytes from 192.168.1.82: seq=1 ttl=64 time=0.081 ms # 64 bytes from 192.168.1.82: seq=1 ttl=64 time=0.081 ms
@@ -130,7 +133,7 @@ def execute_scan (ip, timeout, args):
# --- 192.168.1.92 ping statistics --- # --- 192.168.1.92 ping statistics ---
# 3 packets transmitted, 0 packets received, 100% packet loss # 3 packets transmitted, 0 packets received, 100% packet loss
# TODO: parse output and return True if online, False if Offline (100% packet loss, bad address) # TODO: parse output and return True if online, False if Offline (100% packet loss, bad address)
is_online = True is_online = True
# Check for 0% packet loss in the output # Check for 0% packet loss in the output
@@ -145,22 +148,20 @@ def execute_scan (ip, timeout, args):
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
# An error occurred, handle it # An error occurred, handle it
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs']) mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
mylog('verbose', [f'[{pluginName}]', e.output]) mylog('verbose', [f'[{pluginName}]', e.output])
return False, output return False, output
except subprocess.TimeoutExpired: except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached']) mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
return False, output return False, output
return False, output return False, output
#===============================================================================
# ===============================================================================
# BEGIN # BEGIN
#=============================================================================== # ===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import time import time
@@ -11,13 +11,13 @@ import re
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger, append_line_to_file from logger import mylog, Logger, append_line_to_file # noqa: E402 [flake8 lint suppression]
from helper import check_IP_format, get_setting_value from helper import check_IP_format, get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -31,39 +31,39 @@ LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log') LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
no_internet_ip = '0.0.0.0' no_internet_ip = '0.0.0.0'
def main(): def main():
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
parser = argparse.ArgumentParser(description='Check internet connectivity and IP') parser = argparse.ArgumentParser(description='Check internet connectivity and IP')
parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP") parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP")
parser.add_argument('DIG_GET_IP_ARG', action="store", help="Arguments for the 'dig' command to retrieve the IP address") # unused parser.add_argument('DIG_GET_IP_ARG', action="store", help="Arguments for the 'dig' command to retrieve the IP address") # unused
values = parser.parse_args() values = parser.parse_args()
PREV_IP = values.prev_ip.split('=')[1] PREV_IP = values.prev_ip.split('=')[1]
DIG_GET_IP_ARG = get_setting_value("INTRNT_DIG_GET_IP_ARG") DIG_GET_IP_ARG = get_setting_value("INTRNT_DIG_GET_IP_ARG")
new_internet_IP = no_internet_ip new_internet_IP = no_internet_ip
mylog('verbose', [f'[{pluginName}] INTRNT_DIG_GET_IP_ARG: ', DIG_GET_IP_ARG]) mylog('verbose', [f'[{pluginName}] INTRNT_DIG_GET_IP_ARG: ', DIG_GET_IP_ARG])
# METHOD 1: dig # METHOD 1: dig
# perform the new IP lookup N times specified by the INTRNT_TRIES setting # perform the new IP lookup N times specified by the INTRNT_TRIES setting
INTRNT_RETRIES = get_setting_value("INTRNT_RETRIES") INTRNT_RETRIES = get_setting_value("INTRNT_RETRIES")
retries_needed = 0 retries_needed = 0
for i in range(INTRNT_RETRIES + 1): for i in range(INTRNT_RETRIES + 1):
new_internet_IP, cmd_output = check_internet_IP( PREV_IP, DIG_GET_IP_ARG) new_internet_IP, cmd_output = check_internet_IP(PREV_IP, DIG_GET_IP_ARG)
if new_internet_IP == no_internet_ip: if new_internet_IP == no_internet_ip:
time.sleep(1*i) # Exponential backoff strategy time.sleep(1 * i) # Exponential backoff strategy
else: else:
retries_needed = i retries_needed = i
break break
@@ -71,68 +71,69 @@ def main():
# METHOD 2: curl # METHOD 2: curl
if new_internet_IP == no_internet_ip: if new_internet_IP == no_internet_ip:
new_internet_IP, cmd_output = fallback_check_ip() new_internet_IP, cmd_output = fallback_check_ip()
mylog('verbose', [f'[{pluginName}] Curl Fallback (new_internet_IP|cmd_output): {new_internet_IP} | {cmd_output}']) mylog('verbose', [f'[{pluginName}] Curl Fallback (new_internet_IP|cmd_output): {new_internet_IP} | {cmd_output}'])
# logging # logging
append_line_to_file (logPath + '/IP_changes.log', '['+str(timeNowDB()) +']\t'+ new_internet_IP +'\n') append_line_to_file(logPath + '/IP_changes.log', '[' + str(timeNowDB()) + ']\t' + new_internet_IP + '\n')
plugin_objects = Plugin_Objects(RESULT_FILE)
plugin_objects = Plugin_Objects(RESULT_FILE)
plugin_objects.add_object( plugin_objects.add_object(
primaryId = 'Internet', # MAC (Device Name) primaryId = 'Internet', # MAC (Device Name)
secondaryId = new_internet_IP, # IP Address secondaryId = new_internet_IP, # IP Address
watched1 = f'Previous IP: {PREV_IP}', watched1 = f'Previous IP: {PREV_IP}',
watched2 = cmd_output.replace('\n',''), watched2 = cmd_output.replace('\n', ''),
watched3 = retries_needed, watched3 = retries_needed,
watched4 = 'Gateway', watched4 = 'Gateway',
extra = f'Previous IP: {PREV_IP}', extra = f'Previous IP: {PREV_IP}',
foreignKey = 'Internet') foreignKey = 'Internet'
)
plugin_objects.write_result_file() plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Finished '])
mylog('verbose', [f'[{pluginName}] Finished '])
return 0 return 0
#=============================================================================== # ===============================================================================
# INTERNET IP CHANGE # INTERNET IP CHANGE
#=============================================================================== # ===============================================================================
def check_internet_IP ( PREV_IP, DIG_GET_IP_ARG ): def check_internet_IP(PREV_IP, DIG_GET_IP_ARG):
# Get Internet IP # Get Internet IP
mylog('verbose', [f'[{pluginName}] - Retrieving Internet IP']) mylog('verbose', [f'[{pluginName}] - Retrieving Internet IP'])
internet_IP, cmd_output = get_internet_IP(DIG_GET_IP_ARG) internet_IP, cmd_output = get_internet_IP(DIG_GET_IP_ARG)
mylog('verbose', [f'[{pluginName}] Current internet_IP : {internet_IP}']) mylog('verbose', [f'[{pluginName}] Current internet_IP : {internet_IP}'])
# Check previously stored IP # Check previously stored IP
previous_IP = no_internet_ip previous_IP = no_internet_ip
if PREV_IP is not None and len(PREV_IP) > 0 : if PREV_IP is not None and len(PREV_IP) > 0 :
previous_IP = PREV_IP previous_IP = PREV_IP
mylog('verbose', [f'[{pluginName}] previous_IP : {previous_IP}']) mylog('verbose', [f'[{pluginName}] previous_IP : {previous_IP}'])
return internet_IP, cmd_output return internet_IP, cmd_output
#-------------------------------------------------------------------------------
def get_internet_IP (DIG_GET_IP_ARG): # -------------------------------------------------------------------------------
def get_internet_IP(DIG_GET_IP_ARG):
cmd_output = '' cmd_output = ''
# Using 'dig' # Using 'dig'
dig_args = ['dig', '+short'] + DIG_GET_IP_ARG.strip().split() dig_args = ['dig', '+short'] + DIG_GET_IP_ARG.strip().split()
try: try:
cmd_output = subprocess.check_output (dig_args, universal_newlines=True) cmd_output = subprocess.check_output(dig_args, universal_newlines=True)
mylog('verbose', [f'[{pluginName}] DIG result : {cmd_output}']) mylog('verbose', [f'[{pluginName}] DIG result : {cmd_output}'])
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
mylog('verbose', [e.output]) mylog('verbose', [e.output])
cmd_output = '' # no internet cmd_output = '' # no internet
# Check result is an IP # Check result is an IP
IP = check_IP_format (cmd_output) IP = check_IP_format(cmd_output)
# Handle invalid response # Handle invalid response
if IP == '': if IP == '':
@@ -140,7 +141,8 @@ def get_internet_IP (DIG_GET_IP_ARG):
return IP, cmd_output return IP, cmd_output
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def fallback_check_ip(): def fallback_check_ip():
"""Fallback mechanism using `curl ifconfig.me/ip`.""" """Fallback mechanism using `curl ifconfig.me/ip`."""
try: try:
@@ -155,8 +157,9 @@ def fallback_check_ip():
mylog('none', [f'[{pluginName}] Fallback curl exception: {e}']) mylog('none', [f'[{pluginName}] Fallback curl exception: {e}'])
return no_internet_ip, f'Fallback via curl exception: "{e}"' return no_internet_ip, f'Fallback via curl exception: "{e}"'
#===============================================================================
# ===============================================================================
# BEGIN # BEGIN
#=============================================================================== # ===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,6 +1,5 @@
#!/usr/bin/env python # !/usr/bin/env python
import argparse
import os import os
import sys import sys
import speedtest import speedtest
@@ -9,13 +8,13 @@ import speedtest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,18 +27,16 @@ pluginName = 'INTRSPD'
LOG_PATH = logPath + '/plugins' LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', ['[INTRSPD] In script'])
parser = argparse.ArgumentParser(description='Speedtest Plugin for NetAlertX') def main():
values = parser.parse_args()
mylog('verbose', ['[INTRSPD] In script'])
plugin_objects = Plugin_Objects(RESULT_FILE) plugin_objects = Plugin_Objects(RESULT_FILE)
speedtest_result = run_speedtest() speedtest_result = run_speedtest()
plugin_objects.add_object( plugin_objects.add_object(
primaryId = 'Speedtest', primaryId = 'Speedtest',
secondaryId = timeNowDB(), secondaryId = timeNowDB(),
watched1 = speedtest_result['download_speed'], watched1 = speedtest_result['download_speed'],
watched2 = speedtest_result['upload_speed'], watched2 = speedtest_result['upload_speed'],
watched3 = 'null', watched3 = 'null',
@@ -49,25 +46,27 @@ def main():
) )
plugin_objects.write_result_file() plugin_objects.write_result_file()
def run_speedtest(): def run_speedtest():
try: try:
st = speedtest.Speedtest(secure=True) st = speedtest.Speedtest(secure=True)
st.get_best_server() st.get_best_server()
download_speed = round(st.download() / 10**6, 2) # Convert to Mbps download_speed = round(st.download() / 10**6, 2) # Convert to Mbps
upload_speed = round(st.upload() / 10**6, 2) # Convert to Mbps upload_speed = round(st.upload() / 10**6, 2) # Convert to Mbps
mylog('verbose', [f"[INTRSPD] Result (down|up): {str(download_speed)} Mbps|{upload_speed} Mbps"]) mylog('verbose', [f"[INTRSPD] Result (down|up): {str(download_speed)} Mbps|{upload_speed} Mbps"])
return { return {
'download_speed': download_speed, 'download_speed': download_speed,
'upload_speed': upload_speed, 'upload_speed': upload_speed,
} }
except Exception as e: except Exception as e:
mylog('verbose', [f"[INTRSPD] Error running speedtest: {str(e)}"]) mylog('verbose', [f"[INTRSPD] Error running speedtest: {str(e)}"])
return { return {
'download_speed': -1, 'download_speed': -1,
'upload_speed': -1, 'upload_speed': -1,
} }
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
@@ -11,11 +11,11 @@ from functools import reduce
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -34,9 +34,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE) plugin_objects = Plugin_Objects(RESULT_FILE)
def main(): def main():
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
# Retrieve configuration settings # Retrieve configuration settings
SCAN_SUBNETS = get_setting_value('SCAN_SUBNETS') SCAN_SUBNETS = get_setting_value('SCAN_SUBNETS')
@@ -48,33 +47,33 @@ def main():
entry.split('--interface=')[-1].strip() for entry in SCAN_SUBNETS if '--interface=' in entry entry.split('--interface=')[-1].strip() for entry in SCAN_SUBNETS if '--interface=' in entry
) )
mylog('verbose', [f'[{pluginName}] Interfaces value: "{interfaces}"']) mylog('verbose', [f'[{pluginName}] Interfaces value: "{interfaces}"'])
# retrieve data # retrieve data
raw_neighbors = get_neighbors(interfaces) raw_neighbors = get_neighbors(interfaces)
neighbors = parse_neighbors(raw_neighbors) neighbors = parse_neighbors(raw_neighbors)
# Process the data into native application tables # Process the data into native application tables
if len(neighbors) > 0: if len(neighbors) > 0:
for device in neighbors: for device in neighbors:
plugin_objects.add_object( plugin_objects.add_object(
primaryId = device['mac'], primaryId = device['mac'],
secondaryId = device['ip'], secondaryId = device['ip'],
watched4 = device['last_seen'], watched4 = device['last_seen'],
# The following are always unknown # The following are always unknown
watched1 = device['hostname'], # don't use these --> handleEmpty(device['hostname']), watched1 = device['hostname'], # don't use these --> handleEmpty(device['hostname']),
watched2 = device['vendor'], # handleEmpty(device['vendor']), watched2 = device['vendor'], # don't use these --> handleEmpty(device['vendor']),
watched3 = device['device_type'], # handleEmpty(device['device_type']), watched3 = device['device_type'], # don't use these --> handleEmpty(device['device_type']),
extra = '', extra = '',
foreignKey = "" #device['mac'] foreignKey = "" # device['mac']
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app # helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too # helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details: # helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
# helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md # helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md
) )
mylog('verbose', [f'[{pluginName}] New entries: "{len(neighbors)}"']) mylog('verbose', [f'[{pluginName}] New entries: "{len(neighbors)}"'])
@@ -83,13 +82,14 @@ def main():
return 0 return 0
def parse_neighbors(raw_neighbors: list[str]): def parse_neighbors(raw_neighbors: list[str]):
neighbors = [] neighbors = []
for line in raw_neighbors: for line in raw_neighbors:
if "lladdr" in line and "REACHABLE" in line: if "lladdr" in line and "REACHABLE" in line:
# Known data # Known data
fields = line.split() fields = line.split()
if not is_multicast(fields[0]): if not is_multicast(fields[0]):
# mylog('verbose', [f'[{pluginName}] adding ip {fields[0]}"']) # mylog('verbose', [f'[{pluginName}] adding ip {fields[0]}"'])
neighbor = {} neighbor = {}
@@ -101,9 +101,9 @@ def parse_neighbors(raw_neighbors: list[str]):
neighbor['hostname'] = '(unknown)' neighbor['hostname'] = '(unknown)'
neighbor['vendor'] = '(unknown)' neighbor['vendor'] = '(unknown)'
neighbor['device_type'] = '(unknown)' neighbor['device_type'] = '(unknown)'
neighbors.append(neighbor) neighbors.append(neighbor)
return neighbors return neighbors
@@ -111,6 +111,7 @@ def is_multicast(ip):
prefixes = ['ff', '224', '231', '232', '233', '234', '238', '239'] prefixes = ['ff', '224', '231', '232', '233', '234', '238', '239']
return reduce(lambda acc, prefix: acc or ip.startswith(prefix), prefixes, False) return reduce(lambda acc, prefix: acc or ip.startswith(prefix), prefixes, False)
# retrieve data # retrieve data
def get_neighbors(interfaces): def get_neighbors(interfaces):
@@ -119,7 +120,7 @@ def get_neighbors(interfaces):
for interface in interfaces.split(","): for interface in interfaces.split(","):
try: try:
# Ping all IPv6 devices in multicast to trigger NDP # Ping all IPv6 devices in multicast to trigger NDP
mylog('verbose', [f'[{pluginName}] Pinging on interface: "{interface}"']) mylog('verbose', [f'[{pluginName}] Pinging on interface: "{interface}"'])
command = f"ping ff02::1%{interface} -c 2".split() command = f"ping ff02::1%{interface} -c 2".split()
@@ -136,11 +137,11 @@ def get_neighbors(interfaces):
mylog('verbose', [f'[{pluginName}] Scanning interface succeded: "{interface}"']) mylog('verbose', [f'[{pluginName}] Scanning interface succeded: "{interface}"'])
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
# An error occurred, handle it # An error occurred, handle it
mylog('verbose', [f'[{pluginName}] Scanning interface failed: "{interface}"'])
error_type = type(e).__name__ # Capture the error type error_type = type(e).__name__ # Capture the error type
mylog('verbose', [f'[{pluginName}] Scanning interface failed: "{interface}" ({error_type})'])
return results return results
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
@@ -7,18 +7,18 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
pluginName = 'LUCIRPC' pluginName = 'LUCIRPC'
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
try: try:
from openwrt_luci_rpc import OpenWrtRpc from openwrt_luci_rpc import OpenWrtRpc
except: except ImportError as e:
mylog('error', [f'[{pluginName}] Failed import openwrt_luci_rpc']) mylog('error', [f'[{pluginName}] Failed import openwrt_luci_rpc: {e}'])
exit() exit(1)
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -30,13 +30,14 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE) plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] start script.']) def main():
mylog('verbose', [f'[{pluginName}] start script.'])
device_data = get_device_data() device_data = get_device_data()
for entry in device_data: for entry in device_data:
mylog('verbose', [f'[{pluginName}] found: ', str(entry.mac).lower()]) mylog('verbose', [f'[{pluginName}] found: ', str(entry.mac).lower()])
name = str(entry.hostname) name = str(entry.hostname)
@@ -45,36 +46,38 @@ def main():
plugin_objects.add_object( plugin_objects.add_object(
primaryId = str(entry.mac).lower(), primaryId = str(entry.mac).lower(),
secondaryId = entry.ip, secondaryId = entry.ip,
watched1 = entry.host, watched1 = entry.host,
watched2 = name, watched2 = name,
watched3 = "", watched3 = "",
watched4 = "", watched4 = "",
extra = pluginName, extra = pluginName,
foreignKey = str(entry.mac).lower()) foreignKey = str(entry.mac).lower())
plugin_objects.write_result_file() plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished']) mylog('verbose', [f'[{pluginName}] Script finished'])
return 0 return 0
def get_device_data(): def get_device_data():
router = OpenWrtRpc( router = OpenWrtRpc(
get_setting_value("LUCIRPC_host"), get_setting_value("LUCIRPC_host"),
get_setting_value("LUCIRPC_user"), get_setting_value("LUCIRPC_user"),
get_setting_value("LUCIRPC_password"), get_setting_value("LUCIRPC_password"),
get_setting_value("LUCIRPC_ssl"), get_setting_value("LUCIRPC_ssl"),
get_setting_value("LUCIRPC_verify_ssl") get_setting_value("LUCIRPC_verify_ssl")
) )
if router.is_logged_in(): if router.is_logged_in():
mylog('verbose', [f'[{pluginName}] login successfully.']) mylog('verbose', [f'[{pluginName}] login successfully.'])
else: else:
mylog('error', [f'[{pluginName}] login fail.']) mylog('error', [f'[{pluginName}] login fail.'])
device_data = router.get_all_connected_devices(only_reachable=get_setting_value("LUCIRPC_only_reachable")) device_data = router.get_all_connected_devices(only_reachable=get_setting_value("LUCIRPC_only_reachable"))
return device_data return device_data
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
@@ -8,12 +8,12 @@ from collections import deque
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from messaging.in_app import remove_old from messaging.in_app import remove_old # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,10 +28,9 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main(): def main():
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
MAINT_LOG_LENGTH = int(get_setting_value('MAINT_LOG_LENGTH')) MAINT_LOG_LENGTH = int(get_setting_value('MAINT_LOG_LENGTH'))
MAINT_NOTI_LENGTH = int(get_setting_value('MAINT_NOTI_LENGTH')) MAINT_NOTI_LENGTH = int(get_setting_value('MAINT_NOTI_LENGTH'))
@@ -39,7 +38,7 @@ def main():
# Check if set # Check if set
if MAINT_LOG_LENGTH != 0: if MAINT_LOG_LENGTH != 0:
mylog('verbose', [f'[{pluginName}] Cleaning file']) mylog('verbose', [f'[{pluginName}] Cleaning file'])
logFile = logPath + "/app.log" logFile = logPath + "/app.log"
@@ -54,19 +53,19 @@ def main():
with open(logFile, 'w') as file: with open(logFile, 'w') as file:
# Write the last N lines back to the file # Write the last N lines back to the file
file.writelines(lines_to_keep) file.writelines(lines_to_keep)
mylog('verbose', [f'[{pluginName}] Cleanup finished']) mylog('verbose', [f'[{pluginName}] Cleanup finished'])
# Check if set # Check if set
if MAINT_NOTI_LENGTH != 0: if MAINT_NOTI_LENGTH != 0:
mylog('verbose', [f'[{pluginName}] Cleaning in-app notification history']) mylog('verbose', [f'[{pluginName}] Cleaning in-app notification history'])
remove_old(MAINT_NOTI_LENGTH) remove_old(MAINT_NOTI_LENGTH)
return 0 return 0
#=============================================================================== # ===============================================================================
# BEGIN # BEGIN
#=============================================================================== # ===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
@@ -7,14 +7,14 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
from librouteros import connect from librouteros import connect # noqa: E402 [flake8 lint suppression]
from librouteros.exceptions import TrapError from librouteros.exceptions import TrapError # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -29,7 +29,6 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main(): def main():
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
@@ -49,7 +48,7 @@ def main():
plugin_objects = get_entries(plugin_objects) plugin_objects = get_entries(plugin_objects)
plugin_objects.write_result_file() plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices']) mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices'])
@@ -58,10 +57,10 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
try: try:
# connect router # connect router
api = connect(username=MT_USER, password=MT_PASS, host=MT_HOST, port=MT_PORT) api = connect(username=MT_USER, password=MT_PASS, host=MT_HOST, port=MT_PORT)
# get dhcp leases # get dhcp leases
leases = api('/ip/dhcp-server/lease/print') leases = api('/ip/dhcp-server/lease/print')
for lease in leases: for lease in leases:
lease_id = lease.get('.id') lease_id = lease.get('.id')
address = lease.get('address') address = lease.get('address')
@@ -71,8 +70,11 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
last_seen = lease.get('last-seen') last_seen = lease.get('last-seen')
status = lease.get('status') status = lease.get('status')
device_name = comment or host_name or "(unknown)" device_name = comment or host_name or "(unknown)"
mylog('verbose', [f"ID: {lease_id}, Address: {address}, MAC Address: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}"]) mylog(
'verbose',
[f"ID: {lease_id}, Address: {address}, MAC Address: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}"]
)
if (status == "bound"): if (status == "bound"):
plugin_objects.add_object( plugin_objects.add_object(
@@ -83,7 +85,7 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
watched3 = host_name, watched3 = host_name,
watched4 = last_seen, watched4 = last_seen,
extra = '', extra = '',
helpVal1 = comment, helpVal1 = comment,
foreignKey = mac_address) foreignKey = mac_address)
except TrapError as e: except TrapError as e:
@@ -91,13 +93,13 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
except Exception as e: except Exception as e:
mylog('error', [f"Failed to connect to MikroTik API: {e}"]) mylog('error', [f"Failed to connect to MikroTik API: {e}"])
mylog('verbose', [f'[{pluginName}] Script finished']) mylog('verbose', [f'[{pluginName}] Script finished'])
return plugin_objects return plugin_objects
#=============================================================================== # ===============================================================================
# BEGIN # BEGIN
#=============================================================================== # ===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
@@ -8,14 +8,14 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -34,9 +34,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE) plugin_objects = Plugin_Objects(RESULT_FILE)
def main(): def main():
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
# timeout = get_setting_value('NBLOOKUP_RUN_TIMEOUT') # timeout = get_setting_value('NBLOOKUP_RUN_TIMEOUT')
timeout = 20 timeout = 20
@@ -52,13 +51,13 @@ def main():
device_handler = DeviceInstance(db) device_handler = DeviceInstance(db)
# Retrieve devices # Retrieve devices
if get_setting_value("REFRESH_FQDN"): if get_setting_value("REFRESH_FQDN"):
devices = device_handler.getAll() devices = device_handler.getAll()
else: else:
devices = device_handler.getUnknown() devices = device_handler.getUnknown()
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}']) mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
# TEST - below is a WINDOWS host IP # TEST - below is a WINDOWS host IP
# execute_name_lookup('192.168.1.121', timeout) # execute_name_lookup('192.168.1.121', timeout)
@@ -67,31 +66,32 @@ def main():
if domain_name != '': if domain_name != '':
plugin_objects.add_object( plugin_objects.add_object(
# "MAC", "IP", "Server", "Name" # "MAC", "IP", "Server", "Name"
primaryId = device['devMac'], primaryId = device['devMac'],
secondaryId = device['devLastIP'], secondaryId = device['devLastIP'],
watched1 = dns_server, watched1 = dns_server,
watched2 = domain_name, watched2 = domain_name,
watched3 = '', watched3 = '',
watched4 = '', watched4 = '',
extra = '', extra = '',
foreignKey = device['devMac']) foreignKey = device['devMac']
)
plugin_objects.write_result_file() plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0 return 0
#===============================================================================
# ===============================================================================
# Execute scan # Execute scan
#=============================================================================== # ===============================================================================
def execute_name_lookup (ip, timeout): def execute_name_lookup(ip, timeout):
""" """
Execute the NBTSCAN command on IP. Execute the NBTSCAN command on IP.
""" """
args = ['nbtscan', ip] args = ['nbtscan', ip]
# Execute command # Execute command
@@ -99,20 +99,25 @@ def execute_name_lookup (ip, timeout):
try: try:
mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args]) mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args])
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs # try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True) output = subprocess.check_output(
args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
)
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}']) mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
domain_name = '' domain_name = ''
dns_server = '' dns_server = ''
# Split the output into lines # Split the output into lines
lines = output.splitlines() lines = output.splitlines()
# Look for the first line containing a valid NetBIOS name entry # Look for the first line containing a valid NetBIOS name entry
index = 0
for line in lines: for line in lines:
if 'Doing NBT name scan' not in line and ip in line: if 'Doing NBT name scan' not in line and ip in line:
# Split the line and extract the primary NetBIOS name # Split the line and extract the primary NetBIOS name
@@ -121,7 +126,6 @@ def execute_name_lookup (ip, timeout):
domain_name = parts[1] domain_name = parts[1]
else: else:
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - Unexpected output format: {line}']) mylog('verbose', [f'[{pluginName}] ⚠ ERROR - Unexpected output format: {line}'])
mylog('verbose', [f'[{pluginName}] Domain Name: {domain_name}']) mylog('verbose', [f'[{pluginName}] Domain Name: {domain_name}'])
@@ -132,18 +136,21 @@ def execute_name_lookup (ip, timeout):
# if "NXDOMAIN" in e.output: # if "NXDOMAIN" in e.output:
# mylog('verbose', [f'[{pluginName}]', f"No PTR record found for IP: {ip}"]) # mylog('verbose', [f'[{pluginName}]', f"No PTR record found for IP: {ip}"])
# else: # else:
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}']) mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
if output == "": # check if the subprocess failed except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs']) mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
else:
if output == "": # check if the subprocess failed
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
else:
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS']) mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
return '', '' return '', ''
# ===============================================================================
# BEGIN
# ===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
# test script by running: # test script by running:
# tbc # tbc
@@ -7,19 +7,18 @@ import subprocess
import sys import sys
import hashlib import hashlib
import re import re
import nmap import nmap
# Register NetAlertX directories # Register NetAlertX directories
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from database import DB import conf # noqa: E402 [flake8 lint suppression]
import conf from pytz import timezone # noqa: E402 [flake8 lint suppression]
from pytz import timezone
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -37,46 +36,46 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main(): def main():
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
timeout = get_setting_value('NMAPDEV_RUN_TIMEOUT') timeout = get_setting_value('NMAPDEV_RUN_TIMEOUT')
fakeMac = get_setting_value('NMAPDEV_FAKE_MAC') fakeMac = get_setting_value('NMAPDEV_FAKE_MAC')
subnets = get_setting_value('SCAN_SUBNETS') subnets = get_setting_value('SCAN_SUBNETS')
args = get_setting_value('NMAPDEV_ARGS') args = get_setting_value('NMAPDEV_ARGS')
mylog('verbose', [f'[{pluginName}] subnets: ', subnets]) mylog('verbose', [f'[{pluginName}] subnets: ', subnets])
# Initialize the Plugin obj output file # Initialize the Plugin obj output file
plugin_objects = Plugin_Objects(RESULT_FILE) plugin_objects = Plugin_Objects(RESULT_FILE)
unique_devices = execute_scan(subnets, timeout, fakeMac, args) unique_devices = execute_scan(subnets, timeout, fakeMac, args)
mylog('verbose', [f'[{pluginName}] Devices found: {len(unique_devices)}']) mylog('verbose', [f'[{pluginName}] Devices found: {len(unique_devices)}'])
for device in unique_devices: for device in unique_devices:
plugin_objects.add_object( plugin_objects.add_object(
# "MAC", "IP", "Name", "Vendor", "Interface" # "MAC", "IP", "Name", "Vendor", "Interface"
primaryId = device['mac'].lower(), primaryId = device['mac'].lower(),
secondaryId = device['ip'], secondaryId = device['ip'],
watched1 = device['name'], watched1 = device['name'],
watched2 = device['vendor'], watched2 = device['vendor'],
watched3 = device['interface'], watched3 = device['interface'],
watched4 = '', watched4 = '',
extra = '', extra = '',
foreignKey = device['mac']) foreignKey = device['mac']
)
plugin_objects.write_result_file() plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0 return 0
#===============================================================================
# ===============================================================================
# Execute scan # Execute scan
#=============================================================================== # ===============================================================================
def execute_scan(subnets_list, timeout, fakeMac, args): def execute_scan(subnets_list, timeout, fakeMac, args):
devices_list = [] devices_list = []
@@ -103,22 +102,21 @@ def execute_scan(subnets_list, timeout, fakeMac, args):
return devices_list return devices_list
def execute_scan_on_interface(interface, timeout, args):
def execute_scan_on_interface (interface, timeout, args): # Remove unsupported VLAN flags
# Remove unsupported VLAN flags
interface = re.sub(r'--vlan=\S+', '', interface).strip() interface = re.sub(r'--vlan=\S+', '', interface).strip()
# Prepare command arguments # Prepare command arguments
scan_args = args.split() + interface.replace('--interface=','-e ').split() scan_args = args.split() + interface.replace('--interface=', '-e ').split()
mylog('verbose', [f'[{pluginName}] scan_args: ', scan_args])
mylog('verbose', [f'[{pluginName}] scan_args: ', scan_args])
try: try:
result = subprocess.check_output(scan_args, universal_newlines=True) result = subprocess.check_output(scan_args, universal_newlines=True)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
error_type = type(e).__name__ error_type = type(e).__name__
result = "" result = ""
mylog('verbose', [f'[{pluginName}] ERROR: ', error_type]) mylog('verbose', [f'[{pluginName}] ERROR: ', error_type])
return result return result
@@ -130,28 +128,25 @@ def parse_nmap_xml(xml_output, interface, fakeMac):
nm = nmap.PortScanner() nm = nmap.PortScanner()
nm.analyse_nmap_xml_scan(xml_output) nm.analyse_nmap_xml_scan(xml_output)
mylog('verbose', [f'[{pluginName}] Number of hosts: ', len(nm.all_hosts())]) mylog('verbose', [f'[{pluginName}] Number of hosts: ', len(nm.all_hosts())])
for host in nm.all_hosts(): for host in nm.all_hosts():
hostname = nm[host].hostname() or '(unknown)' hostname = nm[host].hostname() or '(unknown)'
ip = nm[host]['addresses']['ipv4'] if 'ipv4' in nm[host]['addresses'] else '' ip = nm[host]['addresses']['ipv4'] if 'ipv4' in nm[host]['addresses'] else ''
mac = nm[host]['addresses']['mac'] if 'mac' in nm[host]['addresses'] else '' mac = nm[host]['addresses']['mac'] if 'mac' in nm[host]['addresses'] else ''
mylog('verbose', [f'[{pluginName}] nm[host]: ', nm[host]])
mylog('verbose', [f'[{pluginName}] nm[host]: ', nm[host]])
vendor = '' vendor = ''
if nm[host]['vendor']: if nm[host]['vendor']:
mylog('verbose', [f'[{pluginName}] entry: ', nm[host]['vendor']]) mylog('verbose', [f'[{pluginName}] entry: ', nm[host]['vendor']])
for key, value in nm[host]['vendor'].items(): for key, value in nm[host]['vendor'].items():
vendor = value vendor = value
break break
# Log debug information # Log debug information
mylog('verbose', [f"[{pluginName}] Hostname: {hostname}, IP: {ip}, MAC: {mac}, Vendor: {vendor}"]) mylog('verbose', [f"[{pluginName}] Hostname: {hostname}, IP: {ip}, MAC: {mac}, Vendor: {vendor}"])
@@ -172,24 +167,24 @@ def parse_nmap_xml(xml_output, interface, fakeMac):
# MAC or IP missing # MAC or IP missing
mylog('verbose', [f"[{pluginName}] Skipping: {hostname}, IP or MAC missing, or NMAPDEV_GENERATE_MAC setting not enabled"]) mylog('verbose', [f"[{pluginName}] Skipping: {hostname}, IP or MAC missing, or NMAPDEV_GENERATE_MAC setting not enabled"])
except Exception as e: except Exception as e:
mylog('verbose', [f"[{pluginName}] Error parsing nmap XML: ", str(e)]) mylog('verbose', [f"[{pluginName}] Error parsing nmap XML: ", str(e)])
return devices_list return devices_list
def string_to_mac_hash(input_string): def string_to_mac_hash(input_string):
# Calculate a hash using SHA-256 # Calculate a hash using SHA-256
sha256_hash = hashlib.sha256(input_string.encode()).hexdigest() sha256_hash = hashlib.sha256(input_string.encode()).hexdigest()
# Take the first 12 characters of the hash and format as a MAC address # Take the first 12 characters of the hash and format as a MAC address
mac_hash = ':'.join(sha256_hash[i:i+2] for i in range(0, 12, 2)) mac_hash = ':'.join(sha256_hash[i:i + 2] for i in range(0, 12, 2))
return mac_hash return mac_hash
#===============================================================================
# ===============================================================================
# BEGIN # BEGIN
#=============================================================================== # ===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import argparse import argparse
@@ -9,13 +9,13 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger, append_line_to_file from logger import mylog, Logger, append_line_to_file # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -32,7 +32,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
# Initialize the Plugin obj output file # Initialize the Plugin obj output file
plugin_objects = Plugin_Objects(RESULT_FILE) plugin_objects = Plugin_Objects(RESULT_FILE)
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def main(): def main():
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description='Scan ports of devices specified by IP addresses' description='Scan ports of devices specified by IP addresses'
@@ -85,7 +86,7 @@ def main():
mylog('verbose', [f'[{pluginName}] Total number of ports found by NMAP: ', len(entries)]) mylog('verbose', [f'[{pluginName}] Total number of ports found by NMAP: ', len(entries)])
for entry in entries: for entry in entries:
plugin_objects.add_object( plugin_objects.add_object(
primaryId = entry.mac, # MAC (Device Name) primaryId = entry.mac, # MAC (Device Name)
@@ -94,14 +95,14 @@ def main():
watched2 = entry.service, watched2 = entry.service,
watched3 = entry.ip + ":" + entry.port, watched3 = entry.ip + ":" + entry.port,
watched4 = "", watched4 = "",
extra = entry.extra, extra = entry.extra,
foreignKey = entry.mac foreignKey = entry.mac
) )
plugin_objects.write_result_file() plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
class nmap_entry: class nmap_entry:
def __init__(self, ip, mac, time, port, state, service, name = '', extra = '', index = 0): def __init__(self, ip, mac, time, port, state, service, name = '', extra = '', index = 0):
self.ip = ip self.ip = ip
@@ -109,13 +110,13 @@ class nmap_entry:
self.time = time self.time = time
self.port = port self.port = port
self.state = state self.state = state
self.service = service self.service = service
self.extra = extra self.extra = extra
self.index = index self.index = index
self.hash = str(mac) + str(port)+ str(state)+ str(service) self.hash = str(mac) + str(port) + str(state) + str(service)
#------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
def parse_kv_args(raw_args): def parse_kv_args(raw_args):
""" """
Converts ['ips=a,b,c', 'macs=x,y,z', 'timeout=5'] to a dict. Converts ['ips=a,b,c', 'macs=x,y,z', 'timeout=5'] to a dict.
@@ -125,26 +126,28 @@ def parse_kv_args(raw_args):
for item in raw_args: for item in raw_args:
if '=' not in item: if '=' not in item:
mylog('none', [f"[{pluginName}] Scan: Invalid parameter (missing '='): {item}"]) mylog('none', [f"[{pluginName}] Scan: Invalid parameter (missing '='): {item}"])
key, value = item.split('=', 1) key, value = item.split('=', 1)
if key in parsed: if key in parsed:
mylog('none', [f"[{pluginName}] Scan: Duplicate parameter supplied: {key}"]) mylog('none', [f"[{pluginName}] Scan: Duplicate parameter supplied: {key}"])
parsed[key] = value parsed[key] = value
return parsed return parsed
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def safe_split_list(value, keyname): def safe_split_list(value, keyname):
"""Split comma list safely and ensure no empty items.""" """Split comma list safely and ensure no empty items."""
items = [x.strip() for x in value.split(',') if x.strip()] items = [x.strip() for x in value.split(',') if x.strip()]
if not items: if not items:
mylog('none', [f"[{pluginName}] Scan: {keyname} list is empty or invalid"]) mylog('none', [f"[{pluginName}] Scan: {keyname} list is empty or invalid"])
return items return items
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args): def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
""" """
run nmap scan on a list of devices run nmap scan on a list of devices
@@ -154,15 +157,12 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
# collect ports / new Nmap Entries # collect ports / new Nmap Entries
newEntriesTmp = [] newEntriesTmp = []
if len(deviceIPs) > 0:
if len(deviceIPs) > 0:
devTotal = len(deviceIPs) devTotal = len(deviceIPs)
mylog('verbose', [f'[{pluginName}] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device'])
mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ])
mylog('verbose', [f'[{pluginName}] Scan: Nmap for max ', str(timeoutSec), 's (' + str(round(int(timeoutSec) / 60, 1)) + 'min) per device'])
mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec)) / 60, 1) , 'min)'])
devIndex = 0 devIndex = 0
for ip in deviceIPs: for ip in deviceIPs:
@@ -171,67 +171,63 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
# prepare arguments from user supplied ones # prepare arguments from user supplied ones
nmapArgs = ['nmap'] + args.split() + [ip] nmapArgs = ['nmap'] + args.split() + [ip]
progress = ' (' + str(devIndex+1) + '/' + str(devTotal) + ')' progress = ' (' + str(devIndex + 1) + '/' + str(devTotal) + ')'
try: try:
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs # try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(float(timeoutSec))) output = subprocess.check_output(
nmapArgs,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(float(timeoutSec))
)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
# An error occured, handle it # An error occured, handle it
mylog('none', ["[NMAP Scan] " ,e.output]) mylog('none', ["[NMAP Scan] ", e.output])
mylog('none', ["[NMAP Scan] ⚠ ERROR - Nmap Scan - check logs", progress]) mylog('none', ["[NMAP Scan] ⚠ ERROR - Nmap Scan - check logs", progress])
except subprocess.TimeoutExpired: except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', ip, progress]) mylog('verbose', [f'[{pluginName}] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', ip, progress])
if output == "": # check if the subprocess failed if output == "": # check if the subprocess failed
mylog('minimal', [f'[{pluginName}] Nmap FAIL for ', ip, progress ,' check logs for details']) mylog('minimal', [f'[{pluginName}] Nmap FAIL for ', ip, progress, ' check logs for details'])
else: else:
mylog('verbose', [f'[{pluginName}] Nmap SUCCESS for ', ip, progress]) mylog('verbose', [f'[{pluginName}] Nmap SUCCESS for ', ip, progress])
# check the last run output
# check the last run output
newLines = output.split('\n') newLines = output.split('\n')
# regular logging # regular logging
for line in newLines: for line in newLines:
append_line_to_file (logPath + '/app_nmap.log', line +'\n') append_line_to_file(logPath + '/app_nmap.log', line + '\n')
index = 0 index = 0
startCollecting = False startCollecting = False
duration = "" duration = ""
newPortsPerDevice = 0 newPortsPerDevice = 0
for line in newLines: for line in newLines:
if 'Starting Nmap' in line: if 'Starting Nmap' in line:
if len(newLines) > index+1 and 'Note: Host seems down' in newLines[index+1]: if len(newLines) > index + 1 and 'Note: Host seems down' in newLines[index + 1]:
break # this entry is empty break # this entry is empty
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line: elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
startCollecting = True startCollecting = True
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line: elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
startCollecting = False # end reached startCollecting = False # end reached
elif startCollecting and len(line.split()) == 3: elif startCollecting and len(line.split()) == 3:
newEntriesTmp.append(nmap_entry(ip, deviceMACs[devIndex], timeNowDB(), line.split()[0], line.split()[1], line.split()[2])) newEntriesTmp.append(nmap_entry(ip, deviceMACs[devIndex], timeNowDB(), line.split()[0], line.split()[1], line.split()[2]))
newPortsPerDevice += 1 newPortsPerDevice += 1
elif 'Nmap done' in line: elif 'Nmap done' in line:
duration = line.split('scanned in ')[1] duration = line.split('scanned in ')[1]
mylog('verbose', [f'[{pluginName}] {newPortsPerDevice} ports found on {deviceMACs[devIndex]}']) mylog('verbose', [f'[{pluginName}] {newPortsPerDevice} ports found on {deviceMACs[devIndex]} after {duration}'])
index += 1 index += 1
devIndex += 1 devIndex += 1
#end for loop
return newEntriesTmp return newEntriesTmp
#===============================================================================
# ===============================================================================
# BEGIN # BEGIN
#=============================================================================== # ===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
# test script by running: # test script by running:
# tbc # tbc
@@ -11,14 +11,14 @@ import re
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from database import DB from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -33,11 +33,9 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main(): def main():
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
timeout = get_setting_value('NSLOOKUP_RUN_TIMEOUT') timeout = get_setting_value('NSLOOKUP_RUN_TIMEOUT')
@@ -52,13 +50,13 @@ def main():
device_handler = DeviceInstance(db) device_handler = DeviceInstance(db)
# Retrieve devices # Retrieve devices
if get_setting_value("REFRESH_FQDN"): if get_setting_value("REFRESH_FQDN"):
devices = device_handler.getAll() devices = device_handler.getAll()
else: else:
devices = device_handler.getUnknown() devices = device_handler.getUnknown()
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}']) mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
# TEST - below is a WINDOWS host IP # TEST - below is a WINDOWS host IP
# execute_name_lookup('192.168.1.121', timeout) # execute_name_lookup('192.168.1.121', timeout)
@@ -67,31 +65,32 @@ def main():
if domain_name != '': if domain_name != '':
plugin_objects.add_object( plugin_objects.add_object(
# "MAC", "IP", "Server", "Name" # "MAC", "IP", "Server", "Name"
primaryId = device['devMac'], primaryId = device['devMac'],
secondaryId = device['devLastIP'], secondaryId = device['devLastIP'],
watched1 = dns_server, watched1 = dns_server,
watched2 = domain_name, watched2 = domain_name,
watched3 = '', watched3 = '',
watched4 = '', watched4 = '',
extra = '', extra = '',
foreignKey = device['devMac']) foreignKey = device['devMac']
)
plugin_objects.write_result_file() plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0 return 0
#===============================================================================
# ===============================================================================
# Execute scan # Execute scan
#=============================================================================== # ===============================================================================
def execute_nslookup (ip, timeout): def execute_nslookup(ip, timeout):
""" """
Execute the NSLOOKUP command on IP. Execute the NSLOOKUP command on IP.
""" """
nslookup_args = ['nslookup', ip] nslookup_args = ['nslookup', ip]
# Execute command # Execute command
@@ -99,7 +98,13 @@ def execute_nslookup (ip, timeout):
try: try:
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs # try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (nslookup_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True) output = subprocess.check_output(
nslookup_args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
)
domain_name = '' domain_name = ''
dns_server = '' dns_server = ''
@@ -110,8 +115,7 @@ def execute_nslookup (ip, timeout):
domain_pattern = re.compile(r'name\s*=\s*([^\s]+)', re.IGNORECASE) domain_pattern = re.compile(r'name\s*=\s*([^\s]+)', re.IGNORECASE)
server_pattern = re.compile(r'Server:\s+(.+)', re.IGNORECASE) server_pattern = re.compile(r'Server:\s+(.+)', re.IGNORECASE)
domain_match = domain_pattern.search(output)
domain_match = domain_pattern.search(output)
server_match = server_pattern.search(output) server_match = server_pattern.search(output)
if domain_match: if domain_match:
@@ -131,24 +135,20 @@ def execute_nslookup (ip, timeout):
else: else:
mylog('verbose', [f'[{pluginName}]', e.output]) mylog('verbose', [f'[{pluginName}]', e.output])
# Handle other errors here # Handle other errors here
# mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs']) # mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
except subprocess.TimeoutExpired: except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached']) mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
if output != "": # check if the subprocess failed
if output == "": # check if the subprocess failed
tmp = 1 # can't have empty
# mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
else:
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS']) mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
return '', '' return '', ''
#===============================================================================
# ===============================================================================
# BEGIN # BEGIN
#=============================================================================== # ===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
__author__ = "ffsb" __author__ = "ffsb"
__version__ = "0.1" # initial __version__ = "0.1" # initial
__version__ = "0.2" # added logic to retry omada api call once as it seems to sometimes fail for some reasons, and error handling logic... __version__ = "0.2" # added logic to retry omada api call once as it seems to sometimes fail for some reasons, and error handling logic...
@@ -15,10 +15,9 @@ __version__ = "1.3" # fix detection of the default gateway IP address that woul
# try to identify and populate their connections by switch/accesspoints and ports/SSID # try to identify and populate their connections by switch/accesspoints and ports/SSID
# try to differentiate root bridges from accessory # try to differentiate root bridges from accessory
#
# sample code to update unbound on opnsense - for reference... # sample code to update unbound on opnsense - for reference...
# curl -X POST -d '{"host":{"enabled":"1","hostname":"test","domain":"testdomain.com","rr":"A","mxprio":"","mx":"","server":"10.0.1.1","description":""}}' -H "Content-Type: application/json" -k -u $OPNS_KEY:$OPNS_SECRET https://$IPFW/api/unbound/settings/AddHostOverride # curl -X POST -d '{"host":{"enabled":"1","hostname":"test","domain":"testdomain.com","rr":"A","mxprio":"","mx":"","server":"10.0.1.1","description":""}}'\
# -H "Content-Type: application/json" -k -u $OPNS_KEY:$OPNS_SECRET https://$IPFW/api/unbound/settings/AddHostOverride
# #
import os import os
import sys import sys
@@ -35,12 +34,12 @@ import multiprocessing
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -87,8 +86,6 @@ cMAC, cIP, cNAME, cSWITCH_AP, cPORT_SSID = range(5)
OMDLOGLEVEL = "debug" OMDLOGLEVEL = "debug"
#
# translate MAC address from standard ieee model to ietf draft # translate MAC address from standard ieee model to ietf draft
# AA-BB-CC-DD-EE-FF to aa:bb:cc:dd:ee:ff # AA-BB-CC-DD-EE-FF to aa:bb:cc:dd:ee:ff
# tplink adheres to ieee, Nax adheres to ietf # tplink adheres to ieee, Nax adheres to ietf
@@ -142,7 +139,7 @@ def callomada(myargs):
try: try:
mf = io.StringIO() mf = io.StringIO()
with redirect_stdout(mf): with redirect_stdout(mf):
bar = omada(myargs) omada(myargs)
omada_output = mf.getvalue() omada_output = mf.getvalue()
except Exception: except Exception:
mylog( mylog(
@@ -190,12 +187,12 @@ def add_uplink(
if switch_mac not in device_data_bymac: if switch_mac not in device_data_bymac:
mylog("none", [f"[{pluginName}] switch_mac '{switch_mac}' not found in device_data_bymac"]) mylog("none", [f"[{pluginName}] switch_mac '{switch_mac}' not found in device_data_bymac"])
return return
# Ensure SWITCH_AP key exists in the dictionary # Ensure SWITCH_AP key exists in the dictionary
if SWITCH_AP not in device_data_bymac[switch_mac]: if SWITCH_AP not in device_data_bymac[switch_mac]:
mylog("none", [f"[{pluginName}] Missing key '{SWITCH_AP}' in device_data_bymac[{switch_mac}]"]) mylog("none", [f"[{pluginName}] Missing key '{SWITCH_AP}' in device_data_bymac[{switch_mac}]"])
return return
# Check if uplink should be added # Check if uplink should be added
if device_data_bymac[switch_mac][SWITCH_AP] in [None, "null"]: if device_data_bymac[switch_mac][SWITCH_AP] in [None, "null"]:
device_data_bymac[switch_mac][SWITCH_AP] = uplink_mac device_data_bymac[switch_mac][SWITCH_AP] = uplink_mac
@@ -204,11 +201,10 @@ def add_uplink(
if uplink_mac not in device_data_bymac: if uplink_mac not in device_data_bymac:
mylog("none", [f"[{pluginName}] uplink_mac '{uplink_mac}' not found in device_data_bymac"]) mylog("none", [f"[{pluginName}] uplink_mac '{uplink_mac}' not found in device_data_bymac"])
return return
# Determine port to uplink # Determine port to uplink
if ( if (
device_data_bymac[switch_mac].get(TYPE) == "Switch" device_data_bymac[switch_mac].get(TYPE) == "Switch" and device_data_bymac[uplink_mac].get(TYPE) == "Switch"
and device_data_bymac[uplink_mac].get(TYPE) == "Switch"
): ):
port_to_uplink = port_byswitchmac_byclientmac.get(switch_mac, {}).get(uplink_mac) port_to_uplink = port_byswitchmac_byclientmac.get(switch_mac, {}).get(uplink_mac)
if port_to_uplink is None: if port_to_uplink is None:
@@ -216,16 +212,14 @@ def add_uplink(
return return
else: else:
port_to_uplink = device_data_bymac[uplink_mac].get(PORT_SSID) port_to_uplink = device_data_bymac[uplink_mac].get(PORT_SSID)
# Assign port to switch_mac # Assign port to switch_mac
device_data_bymac[switch_mac][PORT_SSID] = port_to_uplink device_data_bymac[switch_mac][PORT_SSID] = port_to_uplink
# Recursively add uplinks for linked devices # Recursively add uplinks for linked devices
for link in sadevices_linksbymac.get(switch_mac, []): for link in sadevices_linksbymac.get(switch_mac, []):
if ( if (
link in device_data_bymac link in device_data_bymac and device_data_bymac[link].get(SWITCH_AP) in [None, "null"] and device_data_bymac[switch_mac].get(TYPE) == "Switch"
and device_data_bymac[link].get(SWITCH_AP) in [None, "null"]
and device_data_bymac[switch_mac].get(TYPE) == "Switch"
): ):
add_uplink( add_uplink(
switch_mac, switch_mac,
@@ -236,7 +230,6 @@ def add_uplink(
) )
# ---------------------------------------------- # ----------------------------------------------
# Main initialization # Main initialization
def main(): def main():
@@ -324,16 +317,16 @@ def main():
) )
mymac = ieee2ietf_mac_formater(device[MAC]) mymac = ieee2ietf_mac_formater(device[MAC])
plugin_objects.add_object( plugin_objects.add_object(
primaryId=mymac, # MAC primaryId=mymac, # MAC
secondaryId=device[IP], # IP secondaryId=device[IP], # IP
watched1=device[NAME], # NAME/HOSTNAME watched1=device[NAME], # NAME/HOSTNAME
watched2=ParentNetworkNode, # PARENT NETWORK NODE MAC watched2=ParentNetworkNode, # PARENT NETWORK NODE MAC
watched3=myport, # PORT watched3=myport, # PORT
watched4=myssid, # SSID watched4=myssid, # SSID
extra=device[TYPE], extra=device[TYPE],
# omada_site, # SITENAME (cur_NetworkSite) or VENDOR (cur_Vendor) (PICK one and adjust config.json -> "column": "Extra") # omada_site, # SITENAME (cur_NetworkSite) or VENDOR (cur_Vendor) (PICK one and adjust config.json -> "column": "Extra")
foreignKey=device[MAC].lower().replace("-", ":"), foreignKey=device[MAC].lower().replace("-", ":"),
) # usually MAC ) # usually MAC
mylog( mylog(
"verbose", "verbose",
@@ -369,7 +362,6 @@ def get_omada_devices_details(msadevice_data):
mswitch_dump = callomada(["-t", "myomada", "switch", "-d", mthisswitch]) mswitch_dump = callomada(["-t", "myomada", "switch", "-d", mthisswitch])
else: else:
mswitch_detail = "" mswitch_detail = ""
nswitch_dump = ""
return mswitch_detail, mswitch_dump return mswitch_detail, mswitch_dump
@@ -414,7 +406,6 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
# 17:27:10 [<unique_prefix>] token: "['1A-2B-3C-4D-5E-6F', '192.168.0.217', '1A-2B-3C-4D-5E-6F', '17', '40-AE-30-A5-A7-50, 'Switch']" # 17:27:10 [<unique_prefix>] token: "['1A-2B-3C-4D-5E-6F', '192.168.0.217', '1A-2B-3C-4D-5E-6F', '17', '40-AE-30-A5-A7-50, 'Switch']"
# constants # constants
sadevices_macbyname = {} sadevices_macbyname = {}
sadevices_macbymac = {}
sadevices_linksbymac = {} sadevices_linksbymac = {}
port_byswitchmac_byclientmac = {} port_byswitchmac_byclientmac = {}
device_data_bymac = {} device_data_bymac = {}
@@ -427,7 +418,7 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
def run_command(command, index): def run_command(command, index):
result = subprocess.run(command, capture_output=True, text=True, shell=True) result = subprocess.run(command, capture_output=True, text=True, shell=True)
return str(index), result.stdout.strip() return str(index), result.stdout.strip()
myindex, command_output= run_command(command, 2) myindex, command_output= run_command(command, 2)
mylog('verbose', [f'[{pluginName}] command={command} index={myindex} results={command_output}']) mylog('verbose', [f'[{pluginName}] command={command} index={myindex} results={command_output}'])
""" """
@@ -556,11 +547,11 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
# #
naxname = real_naxname naxname = real_naxname
if real_naxname != None: if real_naxname is not None:
if "(" in real_naxname: if "(" in real_naxname:
# removing parenthesis and domains from the name # removing parenthesis and domains from the name
naxname = real_naxname.split("(")[0] naxname = real_naxname.split("(")[0]
if naxname != None and "." in naxname: if naxname is not None and "." in naxname:
naxname = naxname.split(".")[0] naxname = naxname.split(".")[0]
if naxname in (None, "null", ""): if naxname in (None, "null", ""):
naxname = ( naxname = (

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
""" """
This plugin imports devices and clients from Omada Controller using their OpenAPI. This plugin imports devices and clients from Omada Controller using their OpenAPI.
@@ -25,7 +25,6 @@ import sys
import urllib3 import urllib3
import requests import requests
import time import time
import datetime
import pytz import pytz
from datetime import datetime from datetime import datetime
@@ -35,11 +34,11 @@ from typing import Literal, Any, Dict
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, is_typical_router_ip, is_mac from plugin_helper import Plugin_Objects, is_typical_router_ip, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = pytz.timezone(get_setting_value('TIMEZONE')) conf.tz = pytz.timezone(get_setting_value('TIMEZONE'))
@@ -176,7 +175,10 @@ class OmadaHelper:
# If it's not a gateway try to assign parent node MAC # If it's not a gateway try to assign parent node MAC
if data.get("type", "") != "gateway": if data.get("type", "") != "gateway":
parent_mac = OmadaHelper.normalize_mac(data.get("uplinkDeviceMac")) parent_mac = OmadaHelper.normalize_mac(data.get("uplinkDeviceMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
# Applicable only for CLIENT # Applicable only for CLIENT
if input_type == "client": if input_type == "client":
@@ -185,15 +187,26 @@ class OmadaHelper:
# Try to assign parent node MAC and PORT/SSID to the CLIENT # Try to assign parent node MAC and PORT/SSID to the CLIENT
if data.get("connectDevType", "") == "gateway": if data.get("connectDevType", "") == "gateway":
parent_mac = OmadaHelper.normalize_mac(data.get("gatewayMac")) parent_mac = OmadaHelper.normalize_mac(data.get("gatewayMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
entry["parent_node_port"] = data.get("port", "") entry["parent_node_port"] = data.get("port", "")
elif data.get("connectDevType", "") == "switch": elif data.get("connectDevType", "") == "switch":
parent_mac = OmadaHelper.normalize_mac(data.get("switchMac")) parent_mac = OmadaHelper.normalize_mac(data.get("switchMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
entry["parent_node_port"] = data.get("port", "") entry["parent_node_port"] = data.get("port", "")
elif data.get("connectDevType", "") == "ap": elif data.get("connectDevType", "") == "ap":
parent_mac = OmadaHelper.normalize_mac(data.get("apMac")) parent_mac = OmadaHelper.normalize_mac(data.get("apMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
entry["parent_node_ssid"] = data.get("ssid", "") entry["parent_node_ssid"] = data.get("ssid", "")
# Add the entry to the result # Add the entry to the result
@@ -253,7 +266,7 @@ class OmadaAPI:
"""Return request headers.""" """Return request headers."""
headers = {"Content-type": "application/json"} headers = {"Content-type": "application/json"}
# Add access token to header if requested and available # Add access token to header if requested and available
if include_auth == True: if include_auth is True:
if not self.access_token: if not self.access_token:
OmadaHelper.debug("No access token available for headers") OmadaHelper.debug("No access token available for headers")
else: else:
@@ -368,7 +381,7 @@ class OmadaAPI:
# Failed site population # Failed site population
OmadaHelper.debug(f"Site population response: {response}") OmadaHelper.debug(f"Site population response: {response}")
return OmadaHelper.response("error", f"Site population failed - error: {response.get('response_message', 'Not provided')}") return OmadaHelper.response("error", f"Site population failed - error: {response.get('response_message', 'Not provided')}")
def requested_sites(self) -> list: def requested_sites(self) -> list:
"""Returns sites requested by user.""" """Returns sites requested by user."""

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
""" """
NetAlertX plugin: PIHOLEAPI NetAlertX plugin: PIHOLEAPI
Imports devices from Pi-hole v6 API (Network endpoints) into NetAlertX plugin results. Imports devices from Pi-hole v6 API (Network endpoints) into NetAlertX plugin results.
@@ -17,12 +17,12 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
pluginName = 'PIHOLEAPI' pluginName = 'PIHOLEAPI'
from plugin_helper import Plugin_Objects, is_mac from plugin_helper import Plugin_Objects, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Setup timezone & logger using standard NAX helpers # Setup timezone & logger using standard NAX helpers
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -179,7 +179,7 @@ def get_pihole_network_devices():
resp = requests.get(PIHOLEAPI_URL + 'api/network/devices', headers=headers, params=params, verify=PIHOLEAPI_VERIFY_SSL, timeout=PIHOLEAPI_RUN_TIMEOUT) resp = requests.get(PIHOLEAPI_URL + 'api/network/devices', headers=headers, params=params, verify=PIHOLEAPI_VERIFY_SSL, timeout=PIHOLEAPI_RUN_TIMEOUT)
resp.raise_for_status() resp.raise_for_status()
data = resp.json() data = resp.json()
mylog('debug', [f'[{pluginName}] Pi-hole API returned data: {json.dumps(data)}']) mylog('debug', [f'[{pluginName}] Pi-hole API returned data: {json.dumps(data)}'])
except Exception as e: except Exception as e:
@@ -267,7 +267,7 @@ def main():
for entry in device_entries: for entry in device_entries:
if is_mac(entry['mac']): if is_mac(entry['mac']):
# Map to Plugin_Objects fields # Map to Plugin_Objects fields
mylog('verbose', [f'[{pluginName}] found: {entry['name']}|{entry['mac']}|{entry['ip']}']) mylog('verbose', [f'[{pluginName}] found: {entry['name']}|{entry['mac']}|{entry['ip']}'])
plugin_objects.add_object( plugin_objects.add_object(

View File

@@ -5,18 +5,18 @@ import os
import re import re
import base64 import base64
import json import json
from datetime import datetime
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.append(f"{INSTALL_PATH}/front/plugins") sys.path.append(f"{INSTALL_PATH}/front/plugins")
sys.path.append(f'{INSTALL_PATH}/server') sys.path.append(f'{INSTALL_PATH}/server')
from logger import mylog, Logger from logger import mylog # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from const import default_tz, fullConfPath from const import default_tz, fullConfPath # noqa: E402 [flake8 lint suppression]
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def read_config_file(): def read_config_file():
""" """
retuns dict on the config file key:value pairs retuns dict on the config file key:value pairs
@@ -25,15 +25,15 @@ def read_config_file():
filename = fullConfPath filename = fullConfPath
print('[plugin_helper] reading config file') print('[plugin_helper] reading config file')
# load the variables from .conf # load the variables from .conf
with open(filename, "r") as file: with open(filename, "r") as file:
code = compile(file.read(), filename, "exec") code = compile(file.read(), filename, "exec")
confDict = {} # config dictionary confDict = {} # config dictionary
exec(code, {"__builtins__": {}}, confDict) exec(code, {"__builtins__": {}}, confDict)
return confDict return confDict
configFile = read_config_file() configFile = read_config_file()
@@ -42,6 +42,7 @@ if timeZoneSetting not in all_timezones:
timeZoneSetting = default_tz timeZoneSetting = default_tz
timeZone = pytz.timezone(timeZoneSetting) timeZone = pytz.timezone(timeZoneSetting)
# ------------------------------------------------------------------- # -------------------------------------------------------------------
# Sanitizes plugin output # Sanitizes plugin output
def handleEmpty(input): def handleEmpty(input):
@@ -54,70 +55,72 @@ def handleEmpty(input):
input = re.sub(r'[^\x00-\x7F]+', ' ', input) input = re.sub(r'[^\x00-\x7F]+', ' ', input)
input = input.replace('\n', '') # Removing new lines input = input.replace('\n', '') # Removing new lines
return input return input
# ------------------------------------------------------------------- # -------------------------------------------------------------------
# Sanitizes string # Sanitizes string
def rmBadChars(input): def rmBadChars(input):
input = handleEmpty(input) input = handleEmpty(input)
input = input.replace("'", '_') # Removing ' (single quotes) input = input.replace("'", '_') # Removing ' (single quotes)
return input return input
# ------------------------------------------------------------------- # -------------------------------------------------------------------
# check if this is a router IP # check if this is a router IP
def is_typical_router_ip(ip_address): def is_typical_router_ip(ip_address):
# List of common default gateway IP addresses # List of common default gateway IP addresses
common_router_ips = [ common_router_ips = [
"192.168.0.1", "192.168.1.1", "192.168.1.254", "192.168.0.254", "192.168.0.1", "192.168.1.1", "192.168.1.254", "192.168.0.254",
"10.0.0.1", "10.1.1.1", "192.168.2.1", "192.168.10.1", "192.168.11.1", "10.0.0.1", "10.1.1.1", "192.168.2.1", "192.168.10.1", "192.168.11.1",
"192.168.100.1", "192.168.101.1", "192.168.123.254", "192.168.223.1", "192.168.100.1", "192.168.101.1", "192.168.123.254", "192.168.223.1",
"192.168.31.1", "192.168.8.1", "192.168.254.254", "192.168.50.1", "192.168.31.1", "192.168.8.1", "192.168.254.254", "192.168.50.1",
"192.168.3.1", "192.168.4.1", "192.168.5.1", "192.168.9.1", "192.168.3.1", "192.168.4.1", "192.168.5.1", "192.168.9.1",
"192.168.15.1", "192.168.16.1", "192.168.20.1", "192.168.30.1", "192.168.15.1", "192.168.16.1", "192.168.20.1", "192.168.30.1",
"192.168.42.1", "192.168.62.1", "192.168.178.1", "192.168.1.1", "192.168.42.1", "192.168.62.1", "192.168.178.1", "192.168.1.1",
"192.168.1.254", "192.168.0.1", "192.168.0.10", "192.168.0.100", "192.168.1.254", "192.168.0.1", "192.168.0.10", "192.168.0.100",
"192.168.0.254" "192.168.0.254"
] ]
return ip_address in common_router_ips return ip_address in common_router_ips
# ------------------------------------------------------------------- # -------------------------------------------------------------------
# Check if a valid MAC address # Check if a valid MAC address
def is_mac(input): def is_mac(input):
input_str = str(input).lower() # Convert to string and lowercase so non-string values won't raise errors input_str = str(input).lower() # Convert to string and lowercase so non-string values won't raise errors
isMac = bool(re.match("[0-9a-f]{2}([-:]?)[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$", input_str)) isMac = bool(re.match("[0-9a-f]{2}([-:]?)[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$", input_str))
if not isMac: # If it's not a MAC address, log the input if not isMac: # If it's not a MAC address, log the input
mylog('verbose', [f'[is_mac] not a MAC: {input_str}']) mylog('verbose', [f'[is_mac] not a MAC: {input_str}'])
return isMac return isMac
# ------------------------------------------------------------------- # -------------------------------------------------------------------
def decodeBase64(inputParamBase64): def decodeBase64(inputParamBase64):
# Printing the input list to check its content. # Printing the input list to check its content.
mylog('debug', ['[Plugins] Helper base64 input: ', input]) mylog('debug', ['[Plugins] Helper base64 input: ', input])
print('[Plugins] Helper base64 input: ') print('[Plugins] Helper base64 input: ')
print(input) print(input)
# Extract the base64-encoded subnet information from the first element
# The format of the element is assumed to be like 'param=b<base64-encoded-data>'.
# Extract the base64-encoded subnet information from the first element
# The format of the element is assumed to be like 'param=b<base64-encoded-data>'.
# Printing the extracted base64-encoded information. # Printing the extracted base64-encoded information.
mylog('debug', ['[Plugins] Helper base64 inputParamBase64: ', inputParamBase64]) mylog('debug', ['[Plugins] Helper base64 inputParamBase64: ', inputParamBase64])
# Decode the base64-encoded subnet information to get the actual subnet information in ASCII format. # Decode the base64-encoded subnet information to get the actual subnet information in ASCII format.
result = base64.b64decode(inputParamBase64).decode('ascii') result = base64.b64decode(inputParamBase64).decode('ascii')
# Print the decoded subnet information. # Print the decoded subnet information.
mylog('debug', ['[Plugins] Helper base64 result: ', result]) mylog('debug', ['[Plugins] Helper base64 result: ', result])
return result return result
# ------------------------------------------------------------------- # -------------------------------------------------------------------
def decode_settings_base64(encoded_str, convert_types=True): def decode_settings_base64(encoded_str, convert_types=True):
""" """
@@ -167,7 +170,7 @@ def decode_settings_base64(encoded_str, convert_types=True):
def normalize_mac(mac): def normalize_mac(mac):
# Split the MAC address by colon (:) or hyphen (-) and convert each part to uppercase # Split the MAC address by colon (:) or hyphen (-) and convert each part to uppercase
parts = mac.upper().split(':') parts = mac.upper().split(':')
# If the MAC address is split by hyphen instead of colon # If the MAC address is split by hyphen instead of colon
if len(parts) == 1: if len(parts) == 1:
parts = mac.upper().split('-') parts = mac.upper().split('-')
@@ -177,14 +180,15 @@ def normalize_mac(mac):
# Join the parts with colon (:) # Join the parts with colon (:)
normalized_mac = ':'.join(normalized_parts) normalized_mac = ':'.join(normalized_parts)
return normalized_mac return normalized_mac
# ------------------------------------------------------------------- # -------------------------------------------------------------------
class Plugin_Object: class Plugin_Object:
""" """
Plugin_Object class to manage one object introduced by the plugin. Plugin_Object class to manage one object introduced by the plugin.
An object typically is a device but could also be a website or something An object typically is a device but could also be a website or something
else that is monitored by the plugin. else that is monitored by the plugin.
""" """
@@ -222,8 +226,8 @@ class Plugin_Object:
self.helpVal4 = helpVal4 or "" self.helpVal4 = helpVal4 or ""
def write(self): def write(self):
""" """
Write the object details as a string in the Write the object details as a string in the
format required to write the result file. format required to write the result file.
""" """
line = "{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}\n".format( line = "{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}\n".format(
@@ -243,6 +247,7 @@ class Plugin_Object:
) )
return line return line
class Plugin_Objects: class Plugin_Objects:
""" """
Plugin_Objects is the class that manages and holds all the objects created by the plugin. Plugin_Objects is the class that manages and holds all the objects created by the plugin.
@@ -303,7 +308,3 @@ class Plugin_Objects:
def __len__(self): def __len__(self):
return len(self.objects) return len(self.objects)

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
from __future__ import unicode_literals from __future__ import unicode_literals
import subprocess import subprocess
@@ -10,12 +10,12 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty, normalize_mac from plugin_helper import Plugin_Objects, handleEmpty, normalize_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,53 +28,60 @@ pluginName = "SNMPDSC"
LOG_PATH = logPath + '/plugins' LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
# Workflow
def main(): def main():
mylog('verbose', ['[SNMPDSC] In script ']) mylog('verbose', ['[SNMPDSC] In script '])
# init global variables # init global variables
global snmpWalkCmds global snmpWalkCmds
parser = argparse.ArgumentParser(description='This plugin is used to discover devices via the arp table(s) of a RFC1213 compliant router or switch.')
parser = argparse.ArgumentParser(description='This plugin is used to discover devices via the arp table(s) of a RFC1213 compliant router or switch.') parser.add_argument(
parser.add_argument('routers', action="store", help="IP(s) of routers, separated by comma (,) if passing multiple") 'routers',
action="store",
help="IP(s) of routers, separated by comma (,) if passing multiple"
)
values = parser.parse_args() values = parser.parse_args()
timeoutSetting = get_setting_value("SNMPDSC_RUN_TIMEOUT") timeoutSetting = get_setting_value("SNMPDSC_RUN_TIMEOUT")
plugin_objects = Plugin_Objects(RESULT_FILE) plugin_objects = Plugin_Objects(RESULT_FILE)
if values.routers: if values.routers:
snmpWalkCmds = values.routers.split('=')[1].replace('\'','') snmpWalkCmds = values.routers.split('=')[1].replace('\'', '')
if ',' in snmpWalkCmds: if ',' in snmpWalkCmds:
commands = snmpWalkCmds.split(',') commands = snmpWalkCmds.split(',')
else: else:
commands = [snmpWalkCmds] commands = [snmpWalkCmds]
for cmd in commands: for cmd in commands:
mylog('verbose', ['[SNMPDSC] Router snmpwalk command: ', cmd]) mylog('verbose', ['[SNMPDSC] Router snmpwalk command: ', cmd])
# split the string, remove white spaces around each item, and exclude any empty strings # split the string, remove white spaces around each item, and exclude any empty strings
snmpwalkArgs = [arg.strip() for arg in cmd.split(' ') if arg.strip()] snmpwalkArgs = [arg.strip() for arg in cmd.split(' ') if arg.strip()]
# Execute N probes and insert in list # Execute N probes and insert in list
probes = 1 # N probes probes = 1 # N probes
for _ in range(probes):
output = subprocess.check_output (snmpwalkArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSetting))
mylog('verbose', ['[SNMPDSC] output: ', output]) for _ in range(probes):
output = subprocess.check_output(
snmpwalkArgs,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeoutSetting)
)
mylog('verbose', ['[SNMPDSC] output: ', output])
lines = output.split('\n') lines = output.split('\n')
for line in lines: for line in lines:
tmpSplt = line.split('"') tmpSplt = line.split('"')
if len(tmpSplt) == 3: if len(tmpSplt) == 3:
ipStr = tmpSplt[0].split('.')[-4:] # Get the last 4 elements to extract the IP ipStr = tmpSplt[0].split('.')[-4:] # Get the last 4 elements to extract the IP
macStr = tmpSplt[1].strip().split(' ') # Remove leading/trailing spaces from MAC macStr = tmpSplt[1].strip().split(' ') # Remove leading/trailing spaces from MAC
@@ -82,19 +89,18 @@ def main():
macAddress = ':'.join(macStr) macAddress = ':'.join(macStr)
ipAddress = '.'.join(ipStr) ipAddress = '.'.join(ipStr)
mylog('verbose', [f'[SNMPDSC] IP: {ipAddress} MAC: {macAddress}']) mylog('verbose', [f'[SNMPDSC] IP: {ipAddress} MAC: {macAddress}'])
plugin_objects.add_object( plugin_objects.add_object(
primaryId = handleEmpty(macAddress), primaryId = handleEmpty(macAddress),
secondaryId = handleEmpty(ipAddress.strip()), # Remove leading/trailing spaces from IP secondaryId = handleEmpty(ipAddress.strip()), # Remove leading/trailing spaces from IP
watched1 = '(unknown)', watched1 = '(unknown)',
watched2 = handleEmpty(snmpwalkArgs[6]), # router IP watched2 = handleEmpty(snmpwalkArgs[6]), # router IP
extra = handleEmpty(line), extra = handleEmpty(line),
foreignKey = handleEmpty(macAddress) # Use the primary ID as the foreign key foreignKey = handleEmpty(macAddress) # Use the primary ID as the foreign key
) )
else: else:
mylog('verbose', ['[SNMPDSC] ipStr does not seem to contain a valid IP:', ipStr]) mylog('verbose', ['[SNMPDSC] ipStr does not seem to contain a valid IP:', ipStr])
elif line.startswith('ipNetToMediaPhysAddress'): elif line.startswith('ipNetToMediaPhysAddress'):
# Format: snmpwalk -OXsq output # Format: snmpwalk -OXsq output
@@ -115,12 +121,11 @@ def main():
foreignKey = handleEmpty(macAddress) foreignKey = handleEmpty(macAddress)
) )
mylog('verbose', ['[SNMPDSC] Entries found: ', len(plugin_objects)]) mylog('verbose', ['[SNMPDSC] Entries found: ', len(plugin_objects)])
plugin_objects.write_result_file() plugin_objects.write_result_file()
# BEGIN # BEGIN
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
@@ -12,16 +12,16 @@ import base64
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.plugin_utils import get_plugins_configs, decode_and_rename_files from utils.plugin_utils import get_plugins_configs, decode_and_rename_files # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import fullDbPath, logPath from const import fullDbPath, logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from utils.crypto_utils import encrypt_data from utils.crypto_utils import encrypt_data # noqa: E402 [flake8 lint suppression]
from messaging.in_app import write_notification from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -41,21 +41,21 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
def main(): def main():
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
# Retrieve configuration settings # Retrieve configuration settings
plugins_to_sync = get_setting_value('SYNC_plugins') plugins_to_sync = get_setting_value('SYNC_plugins')
api_token = get_setting_value('API_TOKEN') api_token = get_setting_value('API_TOKEN')
encryption_key = get_setting_value('SYNC_encryption_key') encryption_key = get_setting_value('SYNC_encryption_key')
hub_url = get_setting_value('SYNC_hub_url') hub_url = get_setting_value('SYNC_hub_url')
node_name = get_setting_value('SYNC_node_name') node_name = get_setting_value('SYNC_node_name')
send_devices = get_setting_value('SYNC_devices') send_devices = get_setting_value('SYNC_devices')
pull_nodes = get_setting_value('SYNC_nodes') pull_nodes = get_setting_value('SYNC_nodes')
# variables to determine operation mode # variables to determine operation mode
is_hub = False is_hub = False
is_node = False is_node = False
# Check if api_token set # Check if api_token set
if not api_token: if not api_token:
mylog('verbose', [f'[{pluginName}] ⚠ ERROR api_token not defined - quitting.']) mylog('verbose', [f'[{pluginName}] ⚠ ERROR api_token not defined - quitting.'])
@@ -63,23 +63,23 @@ def main():
# check if this is a hub or a node # check if this is a hub or a node
if len(hub_url) > 0 and (send_devices or plugins_to_sync): if len(hub_url) > 0 and (send_devices or plugins_to_sync):
is_node = True is_node = True
mylog('verbose', [f'[{pluginName}] Mode 1: PUSH (NODE) - This is a NODE as SYNC_hub_url, SYNC_devices or SYNC_plugins are set']) mylog('verbose', [f'[{pluginName}] Mode 1: PUSH (NODE) - This is a NODE as SYNC_hub_url, SYNC_devices or SYNC_plugins are set'])
if len(pull_nodes) > 0: if len(pull_nodes) > 0:
is_hub = True is_hub = True
mylog('verbose', [f'[{pluginName}] Mode 2: PULL (HUB) - This is a HUB as SYNC_nodes is set']) mylog('verbose', [f'[{pluginName}] Mode 2: PULL (HUB) - This is a HUB as SYNC_nodes is set'])
# Mode 1: PUSH/SEND (NODE) # Mode 1: PUSH/SEND (NODE)
if is_node: if is_node:
# PUSHING/SENDING Plugins # PUSHING/SENDING Plugins
# Get all plugin configurations # Get all plugin configurations
all_plugins = get_plugins_configs(False) all_plugins = get_plugins_configs(False)
mylog('verbose', [f'[{pluginName}] plugins_to_sync {plugins_to_sync}']) mylog('verbose', [f'[{pluginName}] plugins_to_sync {plugins_to_sync}'])
for plugin in all_plugins: for plugin in all_plugins:
pref = plugin["unique_prefix"] pref = plugin["unique_prefix"]
index = 0 index = 0
if pref in plugins_to_sync: if pref in plugins_to_sync:
@@ -100,9 +100,8 @@ def main():
send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url) send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url)
else: else:
mylog('verbose', [f'[{pluginName}] {file_path} not found']) mylog('verbose', [f'[{pluginName}] {file_path} not found'])
# PUSHING/SENDING devices # PUSHING/SENDING devices
if send_devices: if send_devices:
@@ -117,27 +116,27 @@ def main():
mylog('verbose', [f'[{pluginName}] Sending file_content: "{file_content}"']) mylog('verbose', [f'[{pluginName}] Sending file_content: "{file_content}"'])
send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url) send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url)
else: else:
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Devices" data']) mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Devices" data'])
else: else:
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Plugins" and "Devices" data']) mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Plugins" and "Devices" data'])
# Mode 2: PULL/GET (HUB) # Mode 2: PULL/GET (HUB)
# PULLING DEVICES # PULLING DEVICES
file_prefix = 'last_result' file_prefix = 'last_result'
# pull data from nodes if specified # pull data from nodes if specified
if is_hub: if is_hub:
for node_url in pull_nodes: for node_url in pull_nodes:
response_json = get_data(api_token, node_url) response_json = get_data(api_token, node_url)
# Extract node_name and base64 data # Extract node_name and base64 data
node_name = response_json.get('node_name', 'unknown_node') node_name = response_json.get('node_name', 'unknown_node')
data_base64 = response_json.get('data_base64', '') data_base64 = response_json.get('data_base64', '')
# Decode base64 data # Decode base64 data
decoded_data = base64.b64decode(data_base64) decoded_data = base64.b64decode(data_base64)
# Create log file name using node name # Create log file name using node name
log_file_name = f'{file_prefix}.{node_name}.log' log_file_name = f'{file_prefix}.{node_name}.log'
@@ -148,18 +147,17 @@ def main():
message = f'[{pluginName}] Device data from node "{node_name}" written to {log_file_name}' message = f'[{pluginName}] Device data from node "{node_name}" written to {log_file_name}'
mylog('verbose', [message]) mylog('verbose', [message])
if lggr.isAbove('verbose'): if lggr.isAbove('verbose'):
write_notification(message, 'info', timeNowDB()) write_notification(message, 'info', timeNowDB())
# Process any received data for the Device DB table (ONLY JSON) # Process any received data for the Device DB table (ONLY JSON)
# Create the file path # Create the file path
# Get all "last_result" files from the sync folder, decode, rename them, and get the list of files # Get all "last_result" files from the sync folder, decode, rename them, and get the list of files
files_to_process = decode_and_rename_files(LOG_PATH, file_prefix) files_to_process = decode_and_rename_files(LOG_PATH, file_prefix)
if len(files_to_process) > 0: if len(files_to_process) > 0:
mylog('verbose', [f'[{pluginName}] Mode 3: RECEIVE (HUB) - This is a HUB as received data found']) mylog('verbose', [f'[{pluginName}] Mode 3: RECEIVE (HUB) - This is a HUB as received data found'])
# Connect to the App database # Connect to the App database
conn = sqlite3.connect(fullDbPath) conn = sqlite3.connect(fullDbPath)
@@ -176,24 +174,24 @@ def main():
# only process received .log files, skipping the one logging the progress of this plugin # only process received .log files, skipping the one logging the progress of this plugin
if file_name != 'last_result.log': if file_name != 'last_result.log':
mylog('verbose', [f'[{pluginName}] Processing: "{file_name}"']) mylog('verbose', [f'[{pluginName}] Processing: "{file_name}"'])
# make sure the file has the correct name (e.g last_result.encoded.Node_1.1.log) to skip any otehr plugin files # make sure the file has the correct name (e.g last_result.encoded.Node_1.1.log) to skip any otehr plugin files
if len(file_name.split('.')) > 2: if len(file_name.split('.')) > 2:
# Extract node name from either last_result.decoded.Node_1.1.log or last_result.Node_1.log # Extract node name from either last_result.decoded.Node_1.1.log or last_result.Node_1.log
parts = file_name.split('.') parts = file_name.split('.')
# If decoded/encoded file, node name is at index 2; otherwise at index 1 # If decoded/encoded file, node name is at index 2; otherwise at index 1
syncHubNodeName = parts[2] if 'decoded' in file_name or 'encoded' in file_name else parts[1] syncHubNodeName = parts[2] if 'decoded' in file_name or 'encoded' in file_name else parts[1]
file_path = f"{LOG_PATH}/{file_name}" file_path = f"{LOG_PATH}/{file_name}"
with open(file_path, 'r') as f: with open(file_path, 'r') as f:
data = json.load(f) data = json.load(f)
for device in data['data']: for device in data['data']:
if device['devMac'] not in unique_mac_addresses: if device['devMac'] not in unique_mac_addresses:
device['devSyncHubNode'] = syncHubNodeName device['devSyncHubNode'] = syncHubNodeName
unique_mac_addresses.add(device['devMac']) unique_mac_addresses.add(device['devMac'])
device_data.append(device) device_data.append(device)
# Rename the file to "processed_" + current name # Rename the file to "processed_" + current name
new_file_name = f"processed_{file_name}" new_file_name = f"processed_{file_name}"
new_file_path = os.path.join(LOG_PATH, new_file_name) new_file_path = os.path.join(LOG_PATH, new_file_name)
@@ -209,7 +207,6 @@ def main():
placeholders = ', '.join('?' for _ in unique_mac_addresses) placeholders = ', '.join('?' for _ in unique_mac_addresses)
cursor.execute(f'SELECT devMac FROM Devices WHERE devMac IN ({placeholders})', tuple(unique_mac_addresses)) cursor.execute(f'SELECT devMac FROM Devices WHERE devMac IN ({placeholders})', tuple(unique_mac_addresses))
existing_mac_addresses = set(row[0] for row in cursor.fetchall()) existing_mac_addresses = set(row[0] for row in cursor.fetchall())
# insert devices into the last_result.log and thus CurrentScan table to manage state # insert devices into the last_result.log and thus CurrentScan table to manage state
for device in device_data: for device in device_data:
@@ -228,7 +225,7 @@ def main():
# Filter out existing devices # Filter out existing devices
new_devices = [device for device in device_data if device['devMac'] not in existing_mac_addresses] new_devices = [device for device in device_data if device['devMac'] not in existing_mac_addresses]
# Remove 'rowid' key if it exists # Remove 'rowid' key if it exists
for device in new_devices: for device in new_devices:
device.pop('rowid', None) device.pop('rowid', None)
device.pop('devStatus', None) device.pop('devStatus', None)
@@ -257,7 +254,6 @@ def main():
mylog('verbose', [message]) mylog('verbose', [message])
write_notification(message, 'info', timeNowDB()) write_notification(message, 'info', timeNowDB())
# Commit and close the connection # Commit and close the connection
conn.commit() conn.commit()
@@ -268,6 +264,7 @@ def main():
return 0 return 0
# ------------------------------------------------------------------ # ------------------------------------------------------------------
# Data retrieval methods # Data retrieval methods
api_endpoints = [ api_endpoints = [
@@ -275,6 +272,7 @@ api_endpoints = [
"/plugins/sync/hub.php" # Legacy PHP endpoint "/plugins/sync/hub.php" # Legacy PHP endpoint
] ]
# send data to the HUB # send data to the HUB
def send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url): def send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url):
"""Send encrypted data to HUB, preferring /sync endpoint and falling back to PHP version.""" """Send encrypted data to HUB, preferring /sync endpoint and falling back to PHP version."""
@@ -345,6 +343,5 @@ def get_data(api_token, node_url):
return "" return ""
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
@@ -10,12 +10,11 @@ from unifi_sm_api.api import SiteManagerAPI
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, decode_settings_base64 from plugin_helper import Plugin_Objects, decode_settings_base64 # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
import conf
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -35,13 +34,13 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
def main(): def main():
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
# Retrieve configuration settings # Retrieve configuration settings
unifi_sites_configs = get_setting_value('UNIFIAPI_sites') unifi_sites_configs = get_setting_value('UNIFIAPI_sites')
mylog('verbose', [f'[{pluginName}] number of unifi_sites_configs: {len(unifi_sites_configs)}']) mylog('verbose', [f'[{pluginName}] number of unifi_sites_configs: {len(unifi_sites_configs)}'])
for site_config in unifi_sites_configs: for site_config in unifi_sites_configs:
siteDict = decode_settings_base64(site_config) siteDict = decode_settings_base64(site_config)
@@ -50,11 +49,11 @@ def main():
mylog('none', [f'[{pluginName}] Connecting to: {siteDict["UNIFIAPI_site_name"]}']) mylog('none', [f'[{pluginName}] Connecting to: {siteDict["UNIFIAPI_site_name"]}'])
api = SiteManagerAPI( api = SiteManagerAPI(
api_key=siteDict["UNIFIAPI_api_key"], api_key=siteDict["UNIFIAPI_api_key"],
version=siteDict["UNIFIAPI_api_version"], version=siteDict["UNIFIAPI_api_version"],
base_url=siteDict["UNIFIAPI_base_url"], base_url=siteDict["UNIFIAPI_base_url"],
verify_ssl=siteDict["UNIFIAPI_verify_ssl"] verify_ssl=siteDict["UNIFIAPI_verify_ssl"]
) )
sites_resp = api.get_sites() sites_resp = api.get_sites()
sites = sites_resp.get("data", []) sites = sites_resp.get("data", [])
@@ -67,18 +66,18 @@ def main():
# Process the data into native application tables # Process the data into native application tables
if len(device_data) > 0: if len(device_data) > 0:
# insert devices into the lats_result.log # insert devices into the lats_result.log
for device in device_data: for device in device_data:
plugin_objects.add_object( plugin_objects.add_object(
primaryId = device['dev_mac'], # mac primaryId = device['dev_mac'], # mac
secondaryId = device['dev_ip'], # IP secondaryId = device['dev_ip'], # IP
watched1 = device['dev_name'], # name watched1 = device['dev_name'], # name
watched2 = device['dev_type'], # device_type (AP/Switch etc) watched2 = device['dev_type'], # device_type (AP/Switch etc)
watched3 = device['dev_connected'], # connectedAt or empty watched3 = device['dev_connected'], # connectedAt or empty
watched4 = device['dev_parent_mac'],# parent_mac or "Internet" watched4 = device['dev_parent_mac'], # parent_mac or "Internet"
extra = '', extra = '',
foreignKey = device['dev_mac'] foreignKey = device['dev_mac']
) )
mylog('verbose', [f'[{pluginName}] New entries: "{len(device_data)}"']) mylog('verbose', [f'[{pluginName}] New entries: "{len(device_data)}"'])
@@ -87,6 +86,7 @@ def main():
return 0 return 0
# retrieve data # retrieve data
def get_device_data(site, api): def get_device_data(site, api):
device_data = [] device_data = []
@@ -146,8 +146,8 @@ def get_device_data(site, api):
dev_parent_mac = resolve_parent_mac(uplinkDeviceId) dev_parent_mac = resolve_parent_mac(uplinkDeviceId)
device_data.append({ device_data.append({
"dev_mac": dev_mac, "dev_mac": dev_mac,
"dev_ip": dev_ip, "dev_ip": dev_ip,
"dev_name": dev_name, "dev_name": dev_name,
"dev_type": dev_type, "dev_type": dev_type,
"dev_connected": dev_connected, "dev_connected": dev_connected,

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
# Inspired by https://github.com/stevehoek/Pi.Alert # Inspired by https://github.com/stevehoek/Pi.Alert
from __future__ import unicode_literals from __future__ import unicode_literals
@@ -14,12 +14,12 @@ from pyunifi.controller import Controller
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, rmBadChars, is_typical_router_ip, is_mac from plugin_helper import Plugin_Objects, rmBadChars, is_typical_router_ip, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, normalize_string from helper import get_setting_value, normalize_string # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -37,21 +37,16 @@ LOCK_FILE = os.path.join(LOG_PATH, f'full_run.{pluginName}.lock')
urllib3.disable_warnings(InsecureRequestWarning) urllib3.disable_warnings(InsecureRequestWarning)
# Workflow
def main(): def main():
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
# init global variables # init global variables
global UNIFI_USERNAME, UNIFI_PASSWORD, UNIFI_HOST, UNIFI_SITES, PORT, VERIFYSSL, VERSION, FULL_IMPORT global UNIFI_USERNAME, UNIFI_PASSWORD, UNIFI_HOST, UNIFI_SITES, PORT, VERIFYSSL, VERSION, FULL_IMPORT
# parse output # parse output
plugin_objects = Plugin_Objects(RESULT_FILE) plugin_objects = Plugin_Objects(RESULT_FILE)
UNIFI_USERNAME = get_setting_value("UNFIMP_username") UNIFI_USERNAME = get_setting_value("UNFIMP_username")
UNIFI_PASSWORD = get_setting_value("UNFIMP_password") UNIFI_PASSWORD = get_setting_value("UNFIMP_password")
UNIFI_HOST = get_setting_value("UNFIMP_host") UNIFI_HOST = get_setting_value("UNFIMP_host")
@@ -64,12 +59,11 @@ def main():
plugin_objects = get_entries(plugin_objects) plugin_objects = get_entries(plugin_objects)
plugin_objects.write_result_file() plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices']) mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices'])
# .............................................
# .............................................
def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects: def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
global VERIFYSSL global VERIFYSSL
@@ -79,27 +73,26 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
mylog('verbose', [f'[{pluginName}] sites: {UNIFI_SITES}']) mylog('verbose', [f'[{pluginName}] sites: {UNIFI_SITES}'])
if (VERIFYSSL.upper() == "TRUE"): if (VERIFYSSL.upper() == "TRUE"):
VERIFYSSL = True VERIFYSSL = True
else: else:
VERIFYSSL = False VERIFYSSL = False
# mylog('verbose', [f'[{pluginName}] sites: {sites}']) # mylog('verbose', [f'[{pluginName}] sites: {sites}'])
for site in UNIFI_SITES: for site in UNIFI_SITES:
mylog('verbose', [f'[{pluginName}] site: {site}']) mylog('verbose', [f'[{pluginName}] site: {site}'])
c = Controller( c = Controller(
UNIFI_HOST, UNIFI_HOST,
UNIFI_USERNAME, UNIFI_USERNAME,
UNIFI_PASSWORD, UNIFI_PASSWORD,
port=PORT, port=PORT,
version=VERSION, version=VERSION,
ssl_verify=VERIFYSSL, ssl_verify=VERIFYSSL,
site_id=site) site_id=site)
online_macs = set() online_macs = set()
processed_macs = [] processed_macs = []
@@ -114,7 +107,7 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
plugin_objects=plugin_objects, plugin_objects=plugin_objects,
device_label='client', device_label='client',
device_vendor="", device_vendor="",
force_import=True # These are online clients, force import force_import=True # These are online clients, force import
) )
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Online Devices']) mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Online Devices'])
@@ -154,11 +147,9 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Users']) mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Users'])
mylog('verbose', [f'[{pluginName}] check if Lock file needs to be modified']) mylog('verbose', [f'[{pluginName}] check if Lock file needs to be modified'])
set_lock_file_value(FULL_IMPORT, lock_file_value) set_lock_file_value(FULL_IMPORT, lock_file_value)
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Clients overall']) mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Clients overall'])
return plugin_objects return plugin_objects
@@ -173,19 +164,19 @@ def collect_details(device_type, devices, online_macs, processed_macs, plugin_ob
name = get_name(get_unifi_val(device, 'name'), get_unifi_val(device, 'hostname')) name = get_name(get_unifi_val(device, 'name'), get_unifi_val(device, 'hostname'))
ipTmp = get_ip(get_unifi_val(device, 'lan_ip'), get_unifi_val(device, 'last_ip'), get_unifi_val(device, 'fixed_ip'), get_unifi_val(device, 'ip')) ipTmp = get_ip(get_unifi_val(device, 'lan_ip'), get_unifi_val(device, 'last_ip'), get_unifi_val(device, 'fixed_ip'), get_unifi_val(device, 'ip'))
macTmp = device['mac'] macTmp = device['mac']
# continue only if valid MAC address # continue only if valid MAC address
if is_mac(macTmp): if is_mac(macTmp):
status = 1 if macTmp in online_macs else device.get('state', 0) status = 1 if macTmp in online_macs else device.get('state', 0)
deviceType = device_type.get(device.get('type'), '') deviceType = device_type.get(device.get('type'), '')
parentMac = get_parent_mac(get_unifi_val(device, 'uplink_mac'), get_unifi_val(device, 'ap_mac'), get_unifi_val(device, 'sw_mac')) parentMac = get_parent_mac(get_unifi_val(device, 'uplink_mac'), get_unifi_val(device, 'ap_mac'), get_unifi_val(device, 'sw_mac'))
# override parent MAC if this is a router # override parent MAC if this is a router
if parentMac == 'null' and is_typical_router_ip(ipTmp): if parentMac == 'null' and is_typical_router_ip(ipTmp):
parentMac = 'Internet' parentMac = 'Internet'
# Add object only if not processed # Add object only if not processed
if macTmp not in processed_macs and ( status == 1 or force_import is True ): if macTmp not in processed_macs and (status == 1 or force_import is True):
plugin_objects.add_object( plugin_objects.add_object(
primaryId=macTmp, primaryId=macTmp,
secondaryId=ipTmp, secondaryId=ipTmp,
@@ -203,7 +194,8 @@ def collect_details(device_type, devices, online_macs, processed_macs, plugin_ob
processed_macs.append(macTmp) processed_macs.append(macTmp)
else: else:
mylog('verbose', [f'[{pluginName}] Skipping, not a valid MAC address: {macTmp}']) mylog('verbose', [f'[{pluginName}] Skipping, not a valid MAC address: {macTmp}'])
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
def get_unifi_val(obj, key, default='null'): def get_unifi_val(obj, key, default='null'):
if isinstance(obj, dict): if isinstance(obj, dict):
@@ -212,9 +204,9 @@ def get_unifi_val(obj, key, default='null'):
for k, v in obj.items(): for k, v in obj.items():
if isinstance(v, dict): if isinstance(v, dict):
result = get_unifi_val(v, key, default) result = get_unifi_val(v, key, default)
if result not in ['','None', None, 'null']: if result not in ['', 'None', None, 'null']:
return result return result
mylog('trace', [f'[{pluginName}] Value not found for key "{key}" in obj "{json.dumps(obj)}"']) mylog('trace', [f'[{pluginName}] Value not found for key "{key}" in obj "{json.dumps(obj)}"'])
return default return default
@@ -226,6 +218,7 @@ def get_name(*names: str) -> str:
return rmBadChars(name) return rmBadChars(name)
return 'null' return 'null'
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
def get_parent_mac(*macs: str) -> str: def get_parent_mac(*macs: str) -> str:
for mac in macs: for mac in macs:
@@ -233,6 +226,7 @@ def get_parent_mac(*macs: str) -> str:
return mac return mac
return 'null' return 'null'
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
def get_port(*ports: str) -> str: def get_port(*ports: str) -> str:
for port in ports: for port in ports:
@@ -240,12 +234,6 @@ def get_port(*ports: str) -> str:
return port return port
return 'null' return 'null'
# -----------------------------------------------------------------------------
def get_port(*macs: str) -> str:
for mac in macs:
if mac and mac != 'null':
return mac
return 'null'
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
def get_ip(*ips: str) -> str: def get_ip(*ips: str) -> str:
@@ -271,7 +259,7 @@ def set_lock_file_value(config_value: str, lock_file_value: bool) -> None:
mylog('verbose', [f'[{pluginName}] Setting lock value for "full import" to {out}']) mylog('verbose', [f'[{pluginName}] Setting lock value for "full import" to {out}'])
with open(LOCK_FILE, 'w') as lock_file: with open(LOCK_FILE, 'w') as lock_file:
lock_file.write(str(out)) lock_file.write(str(out))
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
@@ -286,15 +274,16 @@ def read_lock_file() -> bool:
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
def check_full_run_state(config_value: str, lock_file_value: bool) -> bool: def check_full_run_state(config_value: str, lock_file_value: bool) -> bool:
if config_value == 'always' or (config_value == 'once' and lock_file_value == False): if config_value == 'always' or (config_value == 'once' and lock_file_value is False):
mylog('verbose', [f'[{pluginName}] Full import needs to be done: config_value: {config_value} and lock_file_value: {lock_file_value}']) mylog('verbose', [f'[{pluginName}] Full import needs to be done: config_value: {config_value} and lock_file_value: {lock_file_value}'])
return True return True
else: else:
mylog('verbose', [f'[{pluginName}] Full import NOT needed: config_value: {config_value} and lock_file_value: {lock_file_value}']) mylog('verbose', [f'[{pluginName}] Full import NOT needed: config_value: {config_value} and lock_file_value: {lock_file_value}'])
return False return False
#===============================================================================
# ===============================================================================
# BEGIN # BEGIN
#=============================================================================== # ===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
@@ -9,13 +9,13 @@ import sqlite3
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath, applicationPath, fullDbPath from const import logPath, applicationPath, fullDbPath # noqa: E402 [flake8 lint suppression]
from scan.device_handling import query_MAC_vendor from scan.device_handling import query_MAC_vendor # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -25,17 +25,17 @@ Logger(get_setting_value('LOG_LEVEL'))
pluginName = 'VNDRPDT' pluginName = 'VNDRPDT'
LOG_PATH = logPath + '/plugins' LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log') LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', ['[VNDRPDT] In script']) def main():
# Get newest DB mylog('verbose', ['[VNDRPDT] In script'])
update_vendor_database()
# Get newest DB
update_vendor_database()
# Resolve missing vendors # Resolve missing vendors
plugin_objects = Plugin_Objects(RESULT_FILE) plugin_objects = Plugin_Objects(RESULT_FILE)
@@ -43,67 +43,67 @@ def main():
plugin_objects = update_vendors(fullDbPath, plugin_objects) plugin_objects = update_vendors(fullDbPath, plugin_objects)
plugin_objects.write_result_file() plugin_objects.write_result_file()
mylog('verbose', ['[VNDRPDT] Update complete']) mylog('verbose', ['[VNDRPDT] Update complete'])
return 0 return 0
#===============================================================================
# ===============================================================================
# Update device vendors database # Update device vendors database
#=============================================================================== # ===============================================================================
def update_vendor_database(): def update_vendor_database():
# Update vendors DB (iab oui) # Update vendors DB (iab oui)
mylog('verbose', [' Updating vendors DB (iab & oui)']) mylog('verbose', [' Updating vendors DB (iab & oui)'])
update_args = ['sh', applicationPath + '/services/update_vendors.sh'] update_args = ['sh', applicationPath + '/services/update_vendors.sh']
# Execute command # Execute command
try: try:
# try runnning a subprocess safely # try runnning a subprocess safely
update_output = subprocess.check_output (update_args) subprocess.check_output(update_args)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
# An error occured, handle it # An error occured, handle it
mylog('verbose', [' FAILED: Updating vendors DB, set LOG_LEVEL=debug for more info']) mylog('verbose', [' FAILED: Updating vendors DB, set LOG_LEVEL=debug for more info'])
mylog('verbose', [e.output]) mylog('verbose', [e.output])
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# resolve missing vendors # resolve missing vendors
def update_vendors (dbPath, plugin_objects): def update_vendors(dbPath, plugin_objects):
# Connect to the App SQLite database # Connect to the App SQLite database
conn = sqlite3.connect(dbPath) conn = sqlite3.connect(dbPath)
sql = conn.cursor() sql = conn.cursor()
# Initialize variables # Initialize variables
recordsToUpdate = []
ignored = 0 ignored = 0
notFound = 0 notFound = 0
mylog('verbose', [' Searching devices vendor'])
mylog('verbose', [' Searching devices vendor'])
# Get devices without a vendor # Get devices without a vendor
sql.execute ("""SELECT sql.execute("""SELECT
devMac, devMac,
devLastIP, devLastIP,
devName, devName,
devVendor devVendor
FROM Devices FROM Devices
WHERE devVendor = '(unknown)' WHERE devVendor = '(unknown)'
OR devVendor = '(Unknown)' OR devVendor = '(Unknown)'
OR devVendor = '' OR devVendor = ''
OR devVendor IS NULL OR devVendor IS NULL
""") """)
devices = sql.fetchall() devices = sql.fetchall()
conn.commit() conn.commit()
# Close the database connection # Close the database connection
conn.close() conn.close()
# All devices loop # All devices loop
for device in devices: for device in devices:
# Search vendor in HW Vendors DB # Search vendor in HW Vendors DB
vendor = query_MAC_vendor (device[0]) vendor = query_MAC_vendor(device[0])
if vendor == -1 : if vendor == -1 :
notFound += 1 notFound += 1
elif vendor == -2 : elif vendor == -2 :
@@ -112,27 +112,25 @@ def update_vendors (dbPath, plugin_objects):
plugin_objects.add_object( plugin_objects.add_object(
primaryId = handleEmpty(device[0]), # MAC (Device Name) primaryId = handleEmpty(device[0]), # MAC (Device Name)
secondaryId = handleEmpty(device[1]), # IP Address (always 0.0.0.0) secondaryId = handleEmpty(device[1]), # IP Address (always 0.0.0.0)
watched1 = handleEmpty(vendor), watched1 = handleEmpty(vendor),
watched2 = handleEmpty(device[2]), # Device name watched2 = handleEmpty(device[2]), # Device name
watched3 = "", watched3 = "",
watched4 = "", watched4 = "",
extra = "", extra = "",
foreignKey = handleEmpty(device[0]) foreignKey = handleEmpty(device[0])
) )
# Print log # Print log
mylog('verbose', [" Devices Ignored : ", ignored]) mylog('verbose', [" Devices Ignored : ", ignored])
mylog('verbose', [" Devices with missing vendor : ", len(devices)]) mylog('verbose', [" Devices with missing vendor : ", len(devices)])
mylog('verbose', [" Vendors Not Found : ", notFound]) mylog('verbose', [" Vendors Not Found : ", notFound])
mylog('verbose', [" Vendors updated : ", len(plugin_objects) ]) mylog('verbose', [" Vendors updated : ", len(plugin_objects)])
return plugin_objects return plugin_objects
#=============================================================================== # ===============================================================================
# BEGIN # BEGIN
#=============================================================================== # ===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
@@ -9,13 +9,13 @@ from wakeonlan import send_magic_packet
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -34,9 +34,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE) plugin_objects = Plugin_Objects(RESULT_FILE)
def main(): def main():
mylog('none', [f'[{pluginName}] In script']) mylog('none', [f'[{pluginName}] In script'])
# Retrieve configuration settings # Retrieve configuration settings
broadcast_ips = get_setting_value('WOL_broadcast_ips') broadcast_ips = get_setting_value('WOL_broadcast_ips')
@@ -58,7 +57,7 @@ def main():
devices_to_wake = device_handler.getOffline() devices_to_wake = device_handler.getOffline()
elif 'down' in devices_to_wake: elif 'down' in devices_to_wake:
devices_to_wake = device_handler.getDown() devices_to_wake = device_handler.getDown()
else: else:
@@ -89,15 +88,16 @@ def main():
# log result # log result
plugin_objects.write_result_file() plugin_objects.write_result_file()
else: else:
mylog('none', [f'[{pluginName}] No devices to wake']) mylog('none', [f'[{pluginName}] No devices to wake'])
mylog('none', [f'[{pluginName}] Script finished']) mylog('none', [f'[{pluginName}] Script finished'])
return 0 return 0
# wake # wake
def execute(port, ip, mac, name): def execute(port, ip, mac, name):
result = 'null' result = 'null'
try: try:
# Send the magic packet to wake up the device # Send the magic packet to wake up the device
@@ -105,7 +105,7 @@ def execute(port, ip, mac, name):
mylog('verbose', [f'[{pluginName}] Magic packet sent to {mac} ({name})']) mylog('verbose', [f'[{pluginName}] Magic packet sent to {mac} ({name})'])
result = 'success' result = 'success'
except Exception as e: except Exception as e:
result = str(e) result = str(e)
mylog('verbose', [f'[{pluginName}] Failed to send magic packet to {mac} ({name}): {e}']) mylog('verbose', [f'[{pluginName}] Failed to send magic packet to {mac} ({name}): {e}'])
@@ -113,5 +113,6 @@ def execute(port, ip, mac, name):
# Return the data result # Return the data result
return result return result
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
# Based on the work of https://github.com/leiweibau/Pi.Alert # Based on the work of https://github.com/leiweibau/Pi.Alert
import requests import requests
@@ -12,12 +12,12 @@ from urllib3.exceptions import InsecureRequestWarning
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone from pytz import timezone # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct # Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE')) conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -30,15 +30,14 @@ pluginName = 'WEBMON'
LOG_PATH = logPath + '/plugins' LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log') RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
def main(): def main():
values = get_setting_value('WEBMON_urls_to_check') values = get_setting_value('WEBMON_urls_to_check')
mylog('verbose', [f'[{pluginName}] Checking URLs: {values}']) mylog('verbose', [f'[{pluginName}] Checking URLs: {values}'])
if len(values) > 0: if len(values) > 0:
plugin_objects = Plugin_Objects(RESULT_FILE) plugin_objects = Plugin_Objects(RESULT_FILE)
@@ -48,12 +47,13 @@ def main():
else: else:
return return
def check_services_health(site): def check_services_health(site):
mylog('verbose', [f'[{pluginName}] Checking {site}']) mylog('verbose', [f'[{pluginName}] Checking {site}'])
urllib3.disable_warnings(InsecureRequestWarning) urllib3.disable_warnings(InsecureRequestWarning)
try: try:
resp = requests.get(site, verify=False, timeout=get_setting_value('WEBMON_RUN_TIMEOUT'), headers={"User-Agent": "NetAlertX"}) resp = requests.get(site, verify=False, timeout=get_setting_value('WEBMON_RUN_TIMEOUT'), headers={"User-Agent": "NetAlertX"})
latency = resp.elapsed.total_seconds() latency = resp.elapsed.total_seconds()
@@ -79,12 +79,13 @@ def check_services_health(site):
return status, latency return status, latency
def service_monitoring(urls, plugin_objects): def service_monitoring(urls, plugin_objects):
for site in urls: for site in urls:
status, latency = check_services_health(site) status, latency = check_services_health(site)
plugin_objects.add_object( plugin_objects.add_object(
primaryId=site, primaryId=site,
secondaryId='null', secondaryId='null',
watched1=status, watched1=status,
watched2=latency, watched2=latency,
watched3='null', watched3='null',
@@ -94,7 +95,6 @@ def service_monitoring(urls, plugin_objects):
) )
return plugin_objects return plugin_objects
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3 # !/usr/bin/env python3
import os import os
import sys import sys

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python3 # !/usr/bin/env python3
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
""" """
NetAlertX-New-Devices-Checkmk-Script NetAlertX-New-Devices-Checkmk-Script
@@ -19,11 +19,12 @@ import subprocess
import json import json
import os import os
def check_new_devices(): def check_new_devices():
# Get API path from environment variable, fallback to /tmp/api # Get API path from environment variable, fallback to /tmp/api
api_path = os.environ.get('NETALERTX_API', '/tmp/api') api_path = os.environ.get('NETALERTX_API', '/tmp/api')
table_devices_path = f'{api_path}/table_devices.json' table_devices_path = f'{api_path}/table_devices.json'
try: try:
# Rufe die JSON-Datei aus dem Docker-Container ab # Rufe die JSON-Datei aus dem Docker-Container ab
result = subprocess.run( result = subprocess.run(
@@ -73,6 +74,6 @@ def check_new_devices():
) )
print(f"1 NetAlertX_New_Devices - WARNING - Found {len(new_devices)} new device(s): {device_list_str}") print(f"1 NetAlertX_New_Devices - WARNING - Found {len(new_devices)} new device(s): {device_list_str}")
if __name__ == "__main__": if __name__ == "__main__":
check_new_devices() check_new_devices()

View File

@@ -1,8 +1,8 @@
#!/usr/bin/env python3 # !/usr/bin/env python3
import subprocess import subprocess
import sys
import os import os
def run_sqlite_command(command): def run_sqlite_command(command):
# Use environment variable with fallback # Use environment variable with fallback
db_path = os.path.join( db_path = os.path.join(
@@ -19,18 +19,19 @@ def run_sqlite_command(command):
print(f"Error executing command: {e}") print(f"Error executing command: {e}")
return None return None
def check_and_clean_device(): def check_and_clean_device():
while True: while True:
print("\nDevice Cleanup Tool") print("\nDevice Cleanup Tool")
print("1. Check/Clean by MAC address") print("1. Check/Clean by MAC address")
print("2. Check/Clean by IP address") print("2. Check/Clean by IP address")
print("3. Exit") print("3. Exit")
choice = input("\nSelect option (1-3): ") choice = input("\nSelect option (1-3): ")
if choice == "1": if choice == "1":
mac = input("Enter MAC address (format: xx:xx:xx:xx:xx:xx): ").lower() mac = input("Enter MAC address (format: xx:xx:xx:xx:xx:xx): ").lower()
# Check all tables for MAC # Check all tables for MAC
tables_checks = [ tables_checks = [
f"SELECT 'Events' as source, * FROM Events WHERE eve_MAC='{mac}'", f"SELECT 'Events' as source, * FROM Events WHERE eve_MAC='{mac}'",
@@ -40,14 +41,14 @@ def check_and_clean_device():
f"SELECT 'AppEvents' as source, * FROM AppEvents WHERE ObjectPrimaryID LIKE '%{mac}%' OR ObjectSecondaryID LIKE '%{mac}%'", f"SELECT 'AppEvents' as source, * FROM AppEvents WHERE ObjectPrimaryID LIKE '%{mac}%' OR ObjectSecondaryID LIKE '%{mac}%'",
f"SELECT 'Plugins_Objects' as source, * FROM Plugins_Objects WHERE Object_PrimaryID LIKE '%{mac}%'" f"SELECT 'Plugins_Objects' as source, * FROM Plugins_Objects WHERE Object_PrimaryID LIKE '%{mac}%'"
] ]
found = False found = False
for check in tables_checks: for check in tables_checks:
result = run_sqlite_command(check) result = run_sqlite_command(check)
if result and result.strip(): if result and result.strip():
found = True found = True
print(f"\nFound entries:\n{result}") print(f"\nFound entries:\n{result}")
if found: if found:
confirm = input("\nWould you like to clean these entries? (y/n): ") confirm = input("\nWould you like to clean these entries? (y/n): ")
if confirm.lower() == 'y': if confirm.lower() == 'y':
@@ -60,16 +61,16 @@ def check_and_clean_device():
f"DELETE FROM AppEvents WHERE ObjectPrimaryID LIKE '%{mac}%' OR ObjectSecondaryID LIKE '%{mac}%'", f"DELETE FROM AppEvents WHERE ObjectPrimaryID LIKE '%{mac}%' OR ObjectSecondaryID LIKE '%{mac}%'",
f"DELETE FROM Plugins_Objects WHERE Object_PrimaryID LIKE '%{mac}%'" f"DELETE FROM Plugins_Objects WHERE Object_PrimaryID LIKE '%{mac}%'"
] ]
for delete in deletes: for delete in deletes:
run_sqlite_command(delete) run_sqlite_command(delete)
print("Cleanup completed!") print("Cleanup completed!")
else: else:
print("\nNo entries found for this MAC address") print("\nNo entries found for this MAC address")
elif choice == "2": elif choice == "2":
ip = input("Enter IP address (format: xxx.xxx.xxx.xxx): ") ip = input("Enter IP address (format: xxx.xxx.xxx.xxx): ")
# Check all tables for IP # Check all tables for IP
tables_checks = [ tables_checks = [
f"SELECT 'Events' as source, * FROM Events WHERE eve_IP='{ip}'", f"SELECT 'Events' as source, * FROM Events WHERE eve_IP='{ip}'",
@@ -79,14 +80,14 @@ def check_and_clean_device():
f"SELECT 'AppEvents' as source, * FROM AppEvents WHERE ObjectSecondaryID LIKE '%{ip}%'", f"SELECT 'AppEvents' as source, * FROM AppEvents WHERE ObjectSecondaryID LIKE '%{ip}%'",
f"SELECT 'Plugins_Objects' as source, * FROM Plugins_Objects WHERE Object_SecondaryID LIKE '%{ip}%'" f"SELECT 'Plugins_Objects' as source, * FROM Plugins_Objects WHERE Object_SecondaryID LIKE '%{ip}%'"
] ]
found = False found = False
for check in tables_checks: for check in tables_checks:
result = run_sqlite_command(check) result = run_sqlite_command(check)
if result and result.strip(): if result and result.strip():
found = True found = True
print(f"\nFound entries:\n{result}") print(f"\nFound entries:\n{result}")
if found: if found:
confirm = input("\nWould you like to clean these entries? (y/n): ") confirm = input("\nWould you like to clean these entries? (y/n): ")
if confirm.lower() == 'y': if confirm.lower() == 'y':
@@ -99,19 +100,20 @@ def check_and_clean_device():
f"DELETE FROM AppEvents WHERE ObjectSecondaryID LIKE '%{ip}%'", f"DELETE FROM AppEvents WHERE ObjectSecondaryID LIKE '%{ip}%'",
f"DELETE FROM Plugins_Objects WHERE Object_SecondaryID LIKE '%{ip}%'" f"DELETE FROM Plugins_Objects WHERE Object_SecondaryID LIKE '%{ip}%'"
] ]
for delete in deletes: for delete in deletes:
run_sqlite_command(delete) run_sqlite_command(delete)
print("Cleanup completed!") print("Cleanup completed!")
else: else:
print("\nNo entries found for this IP address") print("\nNo entries found for this IP address")
elif choice == "3": elif choice == "3":
print("\nExiting...") print("\nExiting...")
break break
else: else:
print("\nInvalid option, please try again") print("\nInvalid option, please try again")
if __name__ == "__main__": if __name__ == "__main__":
check_and_clean_device() check_and_clean_device()

View File

@@ -1,6 +1,5 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import paramiko import paramiko
import re
from datetime import datetime from datetime import datetime
import argparse import argparse
import sys import sys
@@ -8,6 +7,9 @@ from pathlib import Path
import time import time
import logging import logging
logger = None
def setup_logging(debug=False): def setup_logging(debug=False):
"""Configure logging based on debug flag.""" """Configure logging based on debug flag."""
level = logging.DEBUG if debug else logging.INFO level = logging.DEBUG if debug else logging.INFO
@@ -18,6 +20,7 @@ def setup_logging(debug=False):
) )
return logging.getLogger(__name__) return logging.getLogger(__name__)
def parse_timestamp(date_str): def parse_timestamp(date_str):
"""Convert OPNsense timestamp to Unix epoch time.""" """Convert OPNsense timestamp to Unix epoch time."""
try: try:
@@ -27,7 +30,7 @@ def parse_timestamp(date_str):
dt = datetime.strptime(clean_date, '%Y/%m/%d %H:%M:%S') dt = datetime.strptime(clean_date, '%Y/%m/%d %H:%M:%S')
return int(dt.timestamp()) return int(dt.timestamp())
except Exception as e: except Exception as e:
logger.error(f"Failed to parse timestamp: {date_str}") logger.error(f"Failed to parse timestamp: {date_str} ({e})")
return None return None
@@ -39,8 +42,14 @@ def get_lease_file(hostname, username, password=None, key_filename=None, port=22
try: try:
logger.debug(f"Attempting to connect to {hostname}:{port} as {username}") logger.debug(f"Attempting to connect to {hostname}:{port} as {username}")
ssh.connect(hostname, port=port, username=username,
password=password, key_filename=key_filename) ssh.connect(
hostname,
port=port,
username=username,
password=password,
key_filename=key_filename
)
# Get an interactive shell session # Get an interactive shell session
logger.debug("Opening interactive SSH channel") logger.debug("Opening interactive SSH channel")
@@ -75,10 +84,23 @@ def get_lease_file(hostname, username, password=None, key_filename=None, port=22
# Clean up the output by removing the command echo and shell prompts # Clean up the output by removing the command echo and shell prompts
lines = output.split('\n') lines = output.split('\n')
# Remove first line (command echo) and any lines containing shell prompts # Remove first line (command echo) and any lines containing shell prompts
cleaned_lines = [line for line in lines # cleaned_lines = [line for line in lines
if not line.strip().startswith(command.strip()) # if not line.strip().startswith(command.strip()) and not line.strip().endswith('> ') and not line.strip().endswith('# ')]
and not line.strip().endswith('> ') cmd = command.strip()
and not line.strip().endswith('# ')]
cleaned_lines = []
for line in lines:
stripped = line.strip()
if stripped.startswith(cmd):
continue
if stripped.endswith('> '):
continue
if stripped.endswith('# '):
continue
cleaned_lines.append(line)
cleaned_output = '\n'.join(cleaned_lines) cleaned_output = '\n'.join(cleaned_lines)
logger.debug(f"Final cleaned output length: {len(cleaned_output)} characters") logger.debug(f"Final cleaned output length: {len(cleaned_output)} characters")
@@ -156,9 +178,7 @@ def parse_lease_file(lease_content):
# Filter only active leases # Filter only active leases
active_leases = [lease for lease in leases active_leases = [lease for lease in leases
if lease.get('state') == 'active' if lease.get('state') == 'active' and 'mac' in lease and 'ip' in lease]
and 'mac' in lease
and 'ip' in lease]
logger.debug(f"Found {len(active_leases)} active leases out of {len(leases)} total leases") logger.debug(f"Found {len(active_leases)} active leases out of {len(leases)} total leases")
logger.debug("Active leases:") logger.debug("Active leases:")
@@ -206,6 +226,7 @@ def convert_to_dnsmasq(leases):
return dnsmasq_lines return dnsmasq_lines
def main(): def main():
parser = argparse.ArgumentParser(description='Convert OPNsense DHCP leases to dnsmasq format') parser = argparse.ArgumentParser(description='Convert OPNsense DHCP leases to dnsmasq format')
parser.add_argument('--host', required=True, help='OPNsense hostname or IP') parser.add_argument('--host', required=True, help='OPNsense hostname or IP')
@@ -219,6 +240,7 @@ def main():
args = parser.parse_args() args = parser.parse_args()
# Setup logging # Setup logging
global logger
logger = setup_logging(args.debug) logger = setup_logging(args.debug)
try: try:
@@ -255,5 +277,6 @@ def main():
logger.error(f"Error: {str(e)}") logger.error(f"Error: {str(e)}")
sys.exit(1) sys.exit(1)
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
# #
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
# NetAlertX v2.70 / 2021-02-01 # NetAlertX v2.70 / 2021-02-01
@@ -22,9 +22,9 @@ from pathlib import Path
# Register NetAlertX modules # Register NetAlertX modules
import conf import conf
from const import * from const import fullConfPath, sql_new_devices
from logger import mylog from logger import mylog
from helper import filePermissions from helper import filePermissions
from utils.datetime_utils import timeNowTZ from utils.datetime_utils import timeNowTZ
from app_state import updateState from app_state import updateState
from api import update_api from api import update_api
@@ -48,12 +48,12 @@ main structure of NetAlertX
Initialise All Initialise All
Rename old settings Rename old settings
start Loop forever start Loop forever
initialise loop initialise loop
(re)import config (re)import config
(re)import plugin config (re)import plugin config
run plugins (once) run plugins (once)
run frontend events run frontend events
update API update API
run plugins (scheduled) run plugins (scheduled)
processing scan results processing scan results
run plugins (after Scan) run plugins (after Scan)
@@ -111,7 +111,7 @@ def main():
loop_start_time = conf.loop_start_time # TODO fix loop_start_time = conf.loop_start_time # TODO fix
# Handle plugins executed ONCE # Handle plugins executed ONCE
if conf.plugins_once_run == False: if conf.plugins_once_run is False:
pm.run_plugin_scripts("once") pm.run_plugin_scripts("once")
conf.plugins_once_run = True conf.plugins_once_run = True
@@ -146,7 +146,7 @@ def main():
processScan = updateState("Check scan").processScan processScan = updateState("Check scan").processScan
mylog("debug", [f"[MAIN] processScan: {processScan}"]) mylog("debug", [f"[MAIN] processScan: {processScan}"])
if processScan == True: if processScan is True:
mylog("debug", "[MAIN] start processing scan results") mylog("debug", "[MAIN] start processing scan results")
process_scan(db) process_scan(db)
updateState("Scan processed", None, None, None, None, False) updateState("Scan processed", None, None, None, None, False)

View File

@@ -1,3 +1,4 @@
# !/usr/bin/env python
import json import json
import time import time
import threading import threading
@@ -145,8 +146,7 @@ class api_endpoint_class:
self.needsUpdate = True self.needsUpdate = True
# Only update changeDetectedWhen if it hasn't been set recently # Only update changeDetectedWhen if it hasn't been set recently
if not self.changeDetectedWhen or current_time > ( if not self.changeDetectedWhen or current_time > (
self.changeDetectedWhen self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
+ datetime.timedelta(seconds=self.debounce_interval)
): ):
self.changeDetectedWhen = ( self.changeDetectedWhen = (
current_time # Set timestamp for change detection current_time # Set timestamp for change detection
@@ -164,8 +164,7 @@ class api_endpoint_class:
self.needsUpdate = True self.needsUpdate = True
# Only update changeDetectedWhen if it hasn't been set recently # Only update changeDetectedWhen if it hasn't been set recently
if not self.changeDetectedWhen or current_time > ( if not self.changeDetectedWhen or current_time > (
self.changeDetectedWhen self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
+ datetime.timedelta(seconds=self.debounce_interval)
): ):
self.changeDetectedWhen = ( self.changeDetectedWhen = (
current_time # Initialize timestamp for new endpoint current_time # Initialize timestamp for new endpoint
@@ -180,17 +179,15 @@ class api_endpoint_class:
current_time = timeNowTZ() current_time = timeNowTZ()
# Debugging info to understand the issue # Debugging info to understand the issue
# mylog('debug', [f'[API] api_endpoint_class: {self.fileName} is_ad_hoc_user_event {self.is_ad_hoc_user_event} last_update_time={self.last_update_time}, debounce time={self.last_update_time + datetime.timedelta(seconds=self.debounce_interval)}.']) # mylog('debug', [f'[API] api_endpoint_class: {self.fileName} is_ad_hoc_user_event
# {self.is_ad_hoc_user_event} last_update_time={self.last_update_time},
# debounce time={self.last_update_time + datetime.timedelta(seconds=self.debounce_interval)}.'])
# Only attempt to write if the debounce time has passed # Only attempt to write if the debounce time has passed
if forceUpdate == True or ( if forceUpdate is True or (
self.needsUpdate self.needsUpdate and (
and ( self.changeDetectedWhen is None or current_time > (
self.changeDetectedWhen is None self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
or current_time
> (
self.changeDetectedWhen
+ datetime.timedelta(seconds=self.debounce_interval)
) )
) )
): ):

View File

@@ -9,25 +9,68 @@ from flask_cors import CORS
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog from logger import mylog # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from db.db_helper import get_date_from_period from db.db_helper import get_date_from_period # noqa: E402 [flake8 lint suppression]
from app_state import updateState from app_state import updateState # noqa: E402 [flake8 lint suppression]
from .graphql_endpoint import devicesSchema # noqa: E402 [flake8 lint suppression]
from .graphql_endpoint import devicesSchema from .device_endpoint import ( # noqa: E402 [flake8 lint suppression]
from .device_endpoint import get_device_data, set_device_data, delete_device, delete_device_events, reset_device_props, copy_device, update_device_column get_device_data,
from .devices_endpoint import get_all_devices, delete_unknown_devices, delete_all_with_empty_macs, delete_devices, export_devices, import_csv, devices_totals, devices_by_status set_device_data,
from .events_endpoint import delete_events, delete_events_older_than, get_events, create_event, get_events_totals delete_device,
from .history_endpoint import delete_online_history delete_device_events,
from .prometheus_endpoint import get_metric_stats reset_device_props,
from .sessions_endpoint import get_sessions, delete_session, create_session, get_sessions_calendar, get_device_sessions, get_session_events copy_device,
from .nettools_endpoint import wakeonlan, traceroute, speedtest, nslookup, nmap_scan, internet_info update_device_column
from .dbquery_endpoint import read_query, write_query, update_query, delete_query )
from .sync_endpoint import handle_sync_post, handle_sync_get from .devices_endpoint import ( # noqa: E402 [flake8 lint suppression]
from .logs_endpoint import clean_log get_all_devices,
from models.user_events_queue_instance import UserEventsQueueInstance delete_unknown_devices,
from messaging.in_app import write_notification, mark_all_notifications_read, delete_notifications, get_unread_notifications, delete_notification, mark_notification_as_read delete_all_with_empty_macs,
delete_devices,
export_devices,
import_csv,
devices_totals,
devices_by_status
)
from .events_endpoint import ( # noqa: E402 [flake8 lint suppression]
delete_events,
delete_events_older_than,
get_events,
create_event,
get_events_totals
)
from .history_endpoint import delete_online_history # noqa: E402 [flake8 lint suppression]
from .prometheus_endpoint import get_metric_stats # noqa: E402 [flake8 lint suppression]
from .sessions_endpoint import ( # noqa: E402 [flake8 lint suppression]
get_sessions,
delete_session,
create_session,
get_sessions_calendar,
get_device_sessions,
get_session_events
)
from .nettools_endpoint import ( # noqa: E402 [flake8 lint suppression]
wakeonlan,
traceroute,
speedtest,
nslookup,
nmap_scan,
internet_info
)
from .dbquery_endpoint import read_query, write_query, update_query, delete_query # noqa: E402 [flake8 lint suppression]
from .sync_endpoint import handle_sync_post, handle_sync_get # noqa: E402 [flake8 lint suppression]
from .logs_endpoint import clean_log # noqa: E402 [flake8 lint suppression]
from models.user_events_queue_instance import UserEventsQueueInstance # noqa: E402 [flake8 lint suppression]
from messaging.in_app import ( # noqa: E402 [flake8 lint suppression]
write_notification,
mark_all_notifications_read,
delete_notifications,
get_unread_notifications,
delete_notification,
mark_notification_as_read
)
# Flask application # Flask application
app = Flask(__name__) app = Flask(__name__)
@@ -50,13 +93,14 @@ CORS(
allow_headers=["Authorization", "Content-Type"], allow_headers=["Authorization", "Content-Type"],
) )
# ------------------------------------------------------------------- # -------------------------------------------------------------------
# Custom handler for 404 - Route not found # Custom handler for 404 - Route not found
# ------------------------------------------------------------------- # -------------------------------------------------------------------
@app.errorhandler(404) @app.errorhandler(404)
def not_found(error): def not_found(error):
response = { response = {
"success": False, "success": False,
"error": "API route not found", "error": "API route not found",
"message": f"The requested URL {error.description if hasattr(error, 'description') else ''} was not found on the server.", "message": f"The requested URL {error.description if hasattr(error, 'description') else ''} was not found on the server.",
} }
@@ -200,7 +244,7 @@ def api_get_devices():
def api_delete_devices(): def api_delete_devices():
if not is_authorized(): if not is_authorized():
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
macs = request.json.get("macs") if request.is_json else None macs = request.json.get("macs") if request.is_json else None
return delete_devices(macs) return delete_devices(macs)
@@ -338,7 +382,7 @@ def dbquery_read():
if not raw_sql_b64: if not raw_sql_b64:
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400 return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
return read_query(raw_sql_b64) return read_query(raw_sql_b64)
@@ -350,7 +394,7 @@ def dbquery_write():
data = request.get_json() or {} data = request.get_json() or {}
raw_sql_b64 = data.get("rawSql") raw_sql_b64 = data.get("rawSql")
if not raw_sql_b64: if not raw_sql_b64:
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400 return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
return write_query(raw_sql_b64) return write_query(raw_sql_b64)
@@ -363,7 +407,13 @@ def dbquery_update():
data = request.get_json() or {} data = request.get_json() or {}
required = ["columnName", "id", "dbtable", "columns", "values"] required = ["columnName", "id", "dbtable", "columns", "values"]
if not all(data.get(k) for k in required): if not all(data.get(k) for k in required):
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"}), 400 return jsonify(
{
"success": False,
"message": "ERROR: Missing parameters",
"error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"
}
), 400
return update_query( return update_query(
column_name=data["columnName"], column_name=data["columnName"],
@@ -418,12 +468,13 @@ def api_clean_log():
return clean_log(file) return clean_log(file)
@app.route("/logs/add-to-execution-queue", methods=["POST"]) @app.route("/logs/add-to-execution-queue", methods=["POST"])
def api_add_to_execution_queue(): def api_add_to_execution_queue():
if not is_authorized(): if not is_authorized():
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
queue = UserEventsQueueInstance() queue = UserEventsQueueInstance()
# Get JSON payload safely # Get JSON payload safely
@@ -499,7 +550,7 @@ def api_delete_old_events(days: int):
""" """
if not is_authorized(): if not is_authorized():
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403 return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return delete_events_older_than(days) return delete_events_older_than(days)
@@ -619,7 +670,7 @@ def api_write_notification():
if not content: if not content:
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing content"}), 400 return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing content"}), 400
write_notification(content, level) write_notification(content, level)
return jsonify({"success": True}) return jsonify({"success": True})
@@ -672,7 +723,8 @@ def api_mark_notification_read(guid):
return jsonify({"success": True}) return jsonify({"success": True})
else: else:
return jsonify({"success": False, "message": "ERROR", "error": result.get("error")}), 500 return jsonify({"success": False, "message": "ERROR", "error": result.get("error")}), 500
# -------------------------- # --------------------------
# SYNC endpoint # SYNC endpoint
# -------------------------- # --------------------------

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import base64 import base64
@@ -9,7 +9,7 @@ from flask import jsonify
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
def read_query(raw_sql_b64): def read_query(raw_sql_b64):

View File

@@ -1,18 +1,17 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
from datetime import datetime
from flask import jsonify, request from flask import jsonify, request
# Register NetAlertX directories # Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
from helper import is_random_mac, get_setting_value from helper import is_random_mac, get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB, format_date from utils.datetime_utils import timeNowDB, format_date # noqa: E402 [flake8 lint suppression]
from db.db_helper import row_to_json, get_date_from_period from db.db_helper import row_to_json, get_date_from_period # noqa: E402 [flake8 lint suppression]
# -------------------------- # --------------------------
# Device Endpoints Functions # Device Endpoints Functions
@@ -27,10 +26,10 @@ def get_device_data(mac):
cur = conn.cursor() cur = conn.cursor()
now = timeNowDB() now = timeNowDB()
# Special case for new device # Special case for new device
if mac.lower() == "new": if mac.lower() == "new":
device_data = { device_data = {
"devMac": "", "devMac": "",
"devName": "", "devName": "",
@@ -89,10 +88,10 @@ def get_device_data(mac):
ELSE 'Off-line' ELSE 'Off-line'
END AS devStatus, END AS devStatus,
(SELECT COUNT(*) FROM Sessions (SELECT COUNT(*) FROM Sessions
WHERE ses_MAC = d.devMac AND ( WHERE ses_MAC = d.devMac AND (
ses_DateTimeConnection >= {period_date_sql} OR ses_DateTimeConnection >= {period_date_sql} OR
ses_DateTimeDisconnection >= {period_date_sql} OR ses_DateTimeDisconnection >= {period_date_sql} OR
ses_StillConnected = 1 ses_StillConnected = 1
)) AS devSessions, )) AS devSessions,

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import base64 import base64
@@ -14,16 +14,13 @@ from logger import mylog
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
from db.db_helper import get_table_json, get_device_condition_by_status from db.db_helper import get_table_json, get_device_condition_by_status # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import format_date
# -------------------------- # --------------------------
# Device Endpoints Functions # Device Endpoints Functions
# -------------------------- # --------------------------
def get_all_devices(): def get_all_devices():
"""Retrieve all devices from the database.""" """Retrieve all devices from the database."""
conn = get_temp_db_connection() conn = get_temp_db_connection()
@@ -139,7 +136,6 @@ def export_devices(export_format):
def import_csv(file_storage=None): def import_csv(file_storage=None):
data = "" data = ""
skipped = [] skipped = []
error = None
# 1. Try JSON `content` (base64-encoded CSV) # 1. Try JSON `content` (base64-encoded CSV)
if request.is_json and request.json.get("content"): if request.is_json and request.json.get("content"):

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
@@ -9,10 +9,10 @@ from flask import jsonify
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
from helper import is_random_mac, mylog from helper import mylog # noqa: E402 [flake8 lint suppression]
from db.db_helper import row_to_json, get_date_from_period from db.db_helper import row_to_json, get_date_from_period # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import format_date, format_date_iso, format_event_date, ensure_datetime from utils.datetime_utils import ensure_datetime # noqa: E402 [flake8 lint suppression]
# -------------------------- # --------------------------
@@ -120,14 +120,14 @@ def get_events_totals(period: str = "7 days"):
cur = conn.cursor() cur = conn.cursor()
sql = f""" sql = f"""
SELECT SELECT
(SELECT COUNT(*) FROM Events WHERE eve_DateTime >= {period_date_sql}) AS all_events, (SELECT COUNT(*) FROM Events WHERE eve_DateTime >= {period_date_sql}) AS all_events,
(SELECT COUNT(*) FROM Sessions WHERE (SELECT COUNT(*) FROM Sessions WHERE
ses_DateTimeConnection >= {period_date_sql} ses_DateTimeConnection >= {period_date_sql}
OR ses_DateTimeDisconnection >= {period_date_sql} OR ses_DateTimeDisconnection >= {period_date_sql}
OR ses_StillConnected = 1 OR ses_StillConnected = 1
) AS sessions, ) AS sessions,
(SELECT COUNT(*) FROM Sessions WHERE (SELECT COUNT(*) FROM Sessions WHERE
(ses_DateTimeConnection IS NULL AND ses_DateTimeDisconnection >= {period_date_sql}) (ses_DateTimeConnection IS NULL AND ses_DateTimeDisconnection >= {period_date_sql})
OR (ses_DateTimeDisconnection IS NULL AND ses_StillConnected = 0 AND ses_DateTimeConnection >= {period_date_sql}) OR (ses_DateTimeDisconnection IS NULL AND ses_StillConnected = 0 AND ses_DateTimeConnection >= {period_date_sql})
) AS missing, ) AS missing,

View File

@@ -1,5 +1,7 @@
import graphene import graphene
from graphene import ObjectType, String, Int, Boolean, List, Field, InputObjectType, Argument from graphene import (
ObjectType, String, Int, Boolean, List, Field, InputObjectType, Argument
)
import json import json
import sys import sys
import os import os
@@ -8,9 +10,9 @@ import os
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog from logger import mylog # noqa: E402 [flake8 lint suppression]
from const import apiPath from const import apiPath # noqa: E402 [flake8 lint suppression]
from helper import ( from helper import ( # noqa: E402 [flake8 lint suppression]
is_random_mac, is_random_mac,
get_number_of_children, get_number_of_children,
format_ip_long, format_ip_long,
@@ -111,12 +113,14 @@ class SettingResult(ObjectType):
settings = List(Setting) settings = List(Setting)
count = Int() count = Int()
# --- LANGSTRINGS --- # --- LANGSTRINGS ---
# In-memory cache for lang strings # In-memory cache for lang strings
_langstrings_cache = {} # caches lists per file (core JSON or plugin) _langstrings_cache = {} # caches lists per file (core JSON or plugin)
_langstrings_cache_mtime = {} # tracks last modified times _langstrings_cache_mtime = {} # tracks last modified times
# LangString ObjectType # LangString ObjectType
class LangString(ObjectType): class LangString(ObjectType):
langCode = String() langCode = String()
@@ -128,6 +132,7 @@ class LangStringResult(ObjectType):
langStrings = List(LangString) langStrings = List(LangString)
count = Int() count = Int()
# Define Query Type with Pagination Support # Define Query Type with Pagination Support
class Query(ObjectType): class Query(ObjectType):
# --- DEVICES --- # --- DEVICES ---
@@ -184,31 +189,39 @@ class Query(ObjectType):
if (device.get("devParentRelType") not in hidden_relationships) if (device.get("devParentRelType") not in hidden_relationships)
] ]
devices_data = [ filtered = []
device
for device in devices_data for device in devices_data:
if ( is_online = (
( device["devPresentLastScan"] == 1 and "online" in allowed_statuses
device["devPresentLastScan"] == 1
and "online" in allowed_statuses
)
or (device["devIsNew"] == 1 and "new" in allowed_statuses)
or (
device["devPresentLastScan"] == 0
and device["devAlertDown"]
and "down" in allowed_statuses
)
or (
device["devPresentLastScan"] == 0
and "offline" in allowed_statuses
)
and device["devIsArchived"] == 0
or (
device["devIsArchived"] == 1
and "archived" in allowed_statuses
)
) )
]
is_new = (
device["devIsNew"] == 1 and "new" in allowed_statuses
)
is_down = (
device["devPresentLastScan"] == 0 and device["devAlertDown"] and "down" in allowed_statuses
)
is_offline = (
device["devPresentLastScan"] == 0 and "offline" in allowed_statuses
)
is_archived = (
device["devIsArchived"] == 1 and "archived" in allowed_statuses
)
# Matches if not archived and status matches OR it is archived and allowed
matches = (
(is_online or is_new or is_down or is_offline) and device["devIsArchived"] == 0
) or is_archived
if matches:
filtered.append(device)
devices_data = filtered
elif status == "connected": elif status == "connected":
devices_data = [ devices_data = [
device device
@@ -257,8 +270,7 @@ class Query(ObjectType):
devices_data = [ devices_data = [
device device
for device in devices_data for device in devices_data
if str(device.get(filter.filterColumn, "")).lower() if str(device.get(filter.filterColumn, "")).lower() == str(filter.filterValue).lower()
== str(filter.filterValue).lower()
] ]
# Search data if a search term is provided # Search data if a search term is provided
@@ -340,7 +352,7 @@ class Query(ObjectType):
return SettingResult(settings=settings, count=len(settings)) return SettingResult(settings=settings, count=len(settings))
# --- LANGSTRINGS --- # --- LANGSTRINGS ---
langStrings = Field( langStrings = Field(
LangStringResult, LangStringResult,
langCode=Argument(String, required=False), langCode=Argument(String, required=False),
@@ -437,11 +449,11 @@ class Query(ObjectType):
if en_fallback: if en_fallback:
langStrings[i] = en_fallback[0] langStrings[i] = en_fallback[0]
mylog('trace', f'[graphql_schema] Collected {len(langStrings)} language strings ' mylog('trace', f'[graphql_schema] Collected {len(langStrings)} language strings (langCode={langCode}, key={langStringKey}, fallback_to_en={fallback_to_en})')
f'(langCode={langCode}, key={langStringKey}, fallback_to_en={fallback_to_en})')
return LangStringResult(langStrings=langStrings, count=len(langStrings)) return LangStringResult(langStrings=langStrings, count=len(langStrings))
# helps sorting inconsistent dataset mixed integers and strings # helps sorting inconsistent dataset mixed integers and strings
def mixed_type_sort_key(value): def mixed_type_sort_key(value):
if value is None or value == "": if value is None or value == "":

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sys import sys
@@ -8,7 +8,7 @@ from flask import jsonify
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
# -------------------------------------------------- # --------------------------------------------------

View File

@@ -3,18 +3,18 @@ import sys
from flask import jsonify from flask import jsonify
# Register NetAlertX directories # Register NetAlertX directories
INSTALL_PATH="/app" INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from const import logPath from const import logPath # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
from messaging.in_app import write_notification
# Make sure log level is initialized correctly # Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL')) Logger(get_setting_value('LOG_LEVEL'))
def clean_log(log_file): def clean_log(log_file):
""" """
Purge the content of an allowed log file within the /app/log/ directory. Purge the content of an allowed log file within the /app/log/ directory.
@@ -55,4 +55,3 @@ def clean_log(log_file):
mylog('none', [msg]) mylog('none', [msg])
write_notification(msg, 'interrupt') write_notification(msg, 'interrupt')
return jsonify({"success": False, "message": msg}), 500 return jsonify({"success": False, "message": msg}), 500

View File

@@ -6,8 +6,8 @@ import os
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog from logger import mylog # noqa: E402 [flake8 lint suppression]
from const import apiPath from const import apiPath # noqa: E402 [flake8 lint suppression]
def escape_label_value(val): def escape_label_value(val):

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python # !/usr/bin/env python
import os import os
import sqlite3 import sqlite3
@@ -9,10 +9,10 @@ from flask import jsonify
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
from helper import is_random_mac, get_setting_value, mylog, format_ip_long from helper import get_setting_value, format_ip_long # noqa: E402 [flake8 lint suppression]
from db.db_helper import row_to_json, get_date_from_period from db.db_helper import get_date_from_period # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import format_date_iso, format_event_date, format_date_diff, parse_datetime, format_date from utils.datetime_utils import format_date_iso, format_event_date, format_date_diff, format_date # noqa: E402 [flake8 lint suppression]
# -------------------------- # --------------------------
@@ -33,7 +33,7 @@ def create_session(
cur.execute( cur.execute(
""" """
INSERT INTO Sessions (ses_MAC, ses_IP, ses_DateTimeConnection, ses_DateTimeDisconnection, INSERT INTO Sessions (ses_MAC, ses_IP, ses_DateTimeConnection, ses_DateTimeDisconnection,
ses_EventTypeConnection, ses_EventTypeDisconnection) ses_EventTypeConnection, ses_EventTypeDisconnection)
VALUES (?, ?, ?, ?, ?, ?) VALUES (?, ?, ?, ?, ?, ?)
""", """,
@@ -105,7 +105,7 @@ def get_sessions_calendar(start_date, end_date):
-- If ses_EventTypeConnection is missing, backfill from last disconnection -- If ses_EventTypeConnection is missing, backfill from last disconnection
-- If ses_EventTypeDisconnection is missing, forward-fill from next connection -- If ses_EventTypeDisconnection is missing, forward-fill from next connection
SELECT SELECT
SES1.ses_MAC, SES1.ses_EventTypeConnection, SES1.ses_DateTimeConnection, SES1.ses_MAC, SES1.ses_EventTypeConnection, SES1.ses_DateTimeConnection,
SES1.ses_EventTypeDisconnection, SES1.ses_DateTimeDisconnection, SES1.ses_IP, SES1.ses_EventTypeDisconnection, SES1.ses_DateTimeDisconnection, SES1.ses_IP,
SES1.ses_AdditionalInfo, SES1.ses_StillConnected, SES1.ses_AdditionalInfo, SES1.ses_StillConnected,
@@ -113,9 +113,9 @@ def get_sessions_calendar(start_date, end_date):
CASE CASE
WHEN SES1.ses_EventTypeConnection = '<missing event>' THEN WHEN SES1.ses_EventTypeConnection = '<missing event>' THEN
IFNULL( IFNULL(
(SELECT MAX(SES2.ses_DateTimeDisconnection) (SELECT MAX(SES2.ses_DateTimeDisconnection)
FROM Sessions AS SES2 FROM Sessions AS SES2
WHERE SES2.ses_MAC = SES1.ses_MAC WHERE SES2.ses_MAC = SES1.ses_MAC
AND SES2.ses_DateTimeDisconnection < SES1.ses_DateTimeDisconnection AND SES2.ses_DateTimeDisconnection < SES1.ses_DateTimeDisconnection
AND SES2.ses_DateTimeDisconnection BETWEEN Date(?) AND Date(?) AND SES2.ses_DateTimeDisconnection BETWEEN Date(?) AND Date(?)
), ),
@@ -126,9 +126,9 @@ def get_sessions_calendar(start_date, end_date):
CASE CASE
WHEN SES1.ses_EventTypeDisconnection = '<missing event>' THEN WHEN SES1.ses_EventTypeDisconnection = '<missing event>' THEN
(SELECT MIN(SES2.ses_DateTimeConnection) (SELECT MIN(SES2.ses_DateTimeConnection)
FROM Sessions AS SES2 FROM Sessions AS SES2
WHERE SES2.ses_MAC = SES1.ses_MAC WHERE SES2.ses_MAC = SES1.ses_MAC
AND SES2.ses_DateTimeConnection > SES1.ses_DateTimeConnection AND SES2.ses_DateTimeConnection > SES1.ses_DateTimeConnection
AND SES2.ses_DateTimeConnection BETWEEN Date(?) AND Date(?) AND SES2.ses_DateTimeConnection BETWEEN Date(?) AND Date(?)
) )
@@ -162,8 +162,7 @@ def get_sessions_calendar(start_date, end_date):
# Determine color # Determine color
if ( if (
row["ses_EventTypeConnection"] == "<missing event>" row["ses_EventTypeConnection"] == "<missing event>" or row["ses_EventTypeDisconnection"] == "<missing event>"
or row["ses_EventTypeDisconnection"] == "<missing event>"
): ):
color = "#f39c12" color = "#f39c12"
elif row["ses_StillConnected"] == 1: elif row["ses_StillConnected"] == 1:
@@ -205,7 +204,7 @@ def get_device_sessions(mac, period):
cur = conn.cursor() cur = conn.cursor()
sql = f""" sql = f"""
SELECT SELECT
IFNULL(ses_DateTimeConnection, ses_DateTimeDisconnection) AS ses_DateTimeOrder, IFNULL(ses_DateTimeConnection, ses_DateTimeDisconnection) AS ses_DateTimeOrder,
ses_EventTypeConnection, ses_EventTypeConnection,
ses_DateTimeConnection, ses_DateTimeConnection,
@@ -293,7 +292,7 @@ def get_session_events(event_type, period_date):
# Base SQLs # Base SQLs
sql_events = f""" sql_events = f"""
SELECT SELECT
eve_DateTime AS eve_DateTimeOrder, eve_DateTime AS eve_DateTimeOrder,
devName, devName,
devOwner, devOwner,
@@ -314,7 +313,7 @@ def get_session_events(event_type, period_date):
""" """
sql_sessions = """ sql_sessions = """
SELECT SELECT
IFNULL(ses_DateTimeConnection, ses_DateTimeDisconnection) AS ses_DateTimeOrder, IFNULL(ses_DateTimeConnection, ses_DateTimeDisconnection) AS ses_DateTimeOrder,
devName, devName,
devOwner, devOwner,
@@ -337,8 +336,7 @@ def get_session_events(event_type, period_date):
sql = sql_events sql = sql_events
elif event_type == "sessions": elif event_type == "sessions":
sql = ( sql = (
sql_sessions sql_sessions + f"""
+ f"""
WHERE ( WHERE (
ses_DateTimeConnection >= {period_date} ses_DateTimeConnection >= {period_date}
OR ses_DateTimeDisconnection >= {period_date} OR ses_DateTimeDisconnection >= {period_date}
@@ -348,8 +346,7 @@ def get_session_events(event_type, period_date):
) )
elif event_type == "missing": elif event_type == "missing":
sql = ( sql = (
sql_sessions sql_sessions + f"""
+ f"""
WHERE ( WHERE (
(ses_DateTimeConnection IS NULL AND ses_DateTimeDisconnection >= {period_date}) (ses_DateTimeConnection IS NULL AND ses_DateTimeDisconnection >= {period_date})
OR (ses_DateTimeDisconnection IS NULL AND ses_StillConnected = 0 AND ses_DateTimeConnection >= {period_date}) OR (ses_DateTimeDisconnection IS NULL AND ses_StillConnected = 0 AND ses_DateTimeConnection >= {period_date})

View File

@@ -1,7 +1,7 @@
import os import os
import json import json
from const import * from const import applicationPath, apiPath
from logger import mylog from logger import mylog
from helper import checkNewVersion from helper import checkNewVersion
from utils.datetime_utils import timeNowDB, timeNow from utils.datetime_utils import timeNowDB, timeNow
@@ -32,14 +32,17 @@ class app_state_class:
isNewVersionChecked (int): Timestamp of last version check. isNewVersionChecked (int): Timestamp of last version check.
""" """
def __init__(self, currentState=None, def __init__(
settingsSaved=None, self,
settingsImported=None, currentState=None,
showSpinner=None, settingsSaved=None,
graphQLServerStarted=0, settingsImported=None,
processScan=False, showSpinner=None,
pluginsStates=None, graphQLServerStarted=0,
appVersion=None): processScan=False,
pluginsStates=None,
appVersion=None
):
""" """
Initialize the application state, optionally overwriting previous values. Initialize the application state, optionally overwriting previous values.
@@ -62,7 +65,7 @@ class app_state_class:
# Update self # Update self
self.lastUpdated = str(timeNowDB()) self.lastUpdated = str(timeNowDB())
if os.path.exists(stateFile): if os.path.exists(stateFile):
try: try:
with open(stateFile, "r") as json_file: with open(stateFile, "r") as json_file:
@@ -73,7 +76,7 @@ class app_state_class:
) )
# Check if the file exists and recover previous values # Check if the file exists and recover previous values
if previousState != "": if previousState != "":
self.settingsSaved = previousState.get("settingsSaved", 0) self.settingsSaved = previousState.get("settingsSaved", 0)
self.settingsImported = previousState.get("settingsImported", 0) self.settingsImported = previousState.get("settingsImported", 0)
self.processScan = previousState.get("processScan", False) self.processScan = previousState.get("processScan", False)
@@ -82,9 +85,9 @@ class app_state_class:
self.isNewVersionChecked = previousState.get("isNewVersionChecked", 0) self.isNewVersionChecked = previousState.get("isNewVersionChecked", 0)
self.graphQLServerStarted = previousState.get("graphQLServerStarted", 0) self.graphQLServerStarted = previousState.get("graphQLServerStarted", 0)
self.currentState = previousState.get("currentState", "Init") self.currentState = previousState.get("currentState", "Init")
self.pluginsStates = previousState.get("pluginsStates", {}) self.pluginsStates = previousState.get("pluginsStates", {})
self.appVersion = previousState.get("appVersion", "") self.appVersion = previousState.get("appVersion", "")
else: # init first time values else: # init first time values
self.settingsSaved = 0 self.settingsSaved = 0
self.settingsImported = 0 self.settingsImported = 0
self.showSpinner = False self.showSpinner = False
@@ -158,12 +161,12 @@ class app_state_class:
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
# method to update the state # method to update the state
def updateState(newState = None, def updateState(newState = None,
settingsSaved = None, settingsSaved = None,
settingsImported = None, settingsImported = None,
showSpinner = None, showSpinner = None,
graphQLServerStarted = None, graphQLServerStarted = None,
processScan = None, processScan = None,
pluginsStates=None, pluginsStates=None,
appVersion=None): appVersion=None):
""" """
@@ -182,14 +185,16 @@ def updateState(newState = None,
Returns: Returns:
app_state_class: Updated state object. app_state_class: Updated state object.
""" """
return app_state_class( newState, return app_state_class(
settingsSaved, newState,
settingsImported, settingsSaved,
showSpinner, settingsImported,
graphQLServerStarted, showSpinner,
processScan, graphQLServerStarted,
pluginsStates, processScan,
appVersion) pluginsStates,
appVersion
)
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------

View File

@@ -52,7 +52,7 @@ default_tz = "Europe/Berlin"
# SQL queries # SQL queries
# =============================================================================== # ===============================================================================
sql_devices_all = """ sql_devices_all = """
SELECT SELECT
rowid, rowid,
IFNULL(devMac, '') AS devMac, IFNULL(devMac, '') AS devMac,
IFNULL(devName, '') AS devName, IFNULL(devName, '') AS devName,
@@ -88,7 +88,7 @@ sql_devices_all = """
IFNULL(devFQDN, '') AS devFQDN, IFNULL(devFQDN, '') AS devFQDN,
IFNULL(devParentRelType, '') AS devParentRelType, IFNULL(devParentRelType, '') AS devParentRelType,
IFNULL(devReqNicsOnline, '') AS devReqNicsOnline, IFNULL(devReqNicsOnline, '') AS devReqNicsOnline,
CASE CASE
WHEN devIsNew = 1 THEN 'New' WHEN devIsNew = 1 THEN 'New'
WHEN devPresentLastScan = 1 THEN 'On-line' WHEN devPresentLastScan = 1 THEN 'On-line'
WHEN devPresentLastScan = 0 AND devAlertDown != 0 THEN 'Down' WHEN devPresentLastScan = 0 AND devAlertDown != 0 THEN 'Down'
@@ -133,7 +133,7 @@ sql_devices_tiles = """
(SELECT COUNT(*) FROM Devices) AS "all_devices", (SELECT COUNT(*) FROM Devices) AS "all_devices",
-- My Devices count -- My Devices count
(SELECT COUNT(*) FROM MyDevicesFilter) AS my_devices (SELECT COUNT(*) FROM MyDevicesFilter) AS my_devices
FROM Statuses; FROM Statuses;
""" """
sql_devices_filters = """ sql_devices_filters = """
SELECT DISTINCT 'devSite' AS columnName, devSite AS columnValue SELECT DISTINCT 'devSite' AS columnName, devSite AS columnValue
@@ -164,9 +164,9 @@ sql_devices_filters = """
FROM Devices WHERE devSSID NOT IN ('', 'null') AND devSSID IS NOT NULL FROM Devices WHERE devSSID NOT IN ('', 'null') AND devSSID IS NOT NULL
ORDER BY columnName; ORDER BY columnName;
""" """
sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived,
(select count(*) from Devices a where devIsNew = 1 ) as new, (select count(*) from Devices a where devIsNew = 1 ) as new,
(select count(*) from Devices a where devName = '(unknown)' or devName = '(name not found)' ) as unknown (select count(*) from Devices a where devName = '(unknown)' or devName = '(name not found)' ) as unknown
from Online_History order by Scan_Date desc limit 1""" from Online_History order by Scan_Date desc limit 1"""
sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0" sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0"
sql_settings = "SELECT * FROM Settings" sql_settings = "SELECT * FROM Settings"
@@ -176,23 +176,23 @@ sql_notifications_all = "SELECT * FROM Notifications"
sql_online_history = "SELECT * FROM Online_History" sql_online_history = "SELECT * FROM Online_History"
sql_plugins_events = "SELECT * FROM Plugins_Events" sql_plugins_events = "SELECT * FROM Plugins_Events"
sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY DateTimeChanged DESC" sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY DateTimeChanged DESC"
sql_new_devices = """SELECT * FROM ( sql_new_devices = """SELECT * FROM (
SELECT eve_IP as devLastIP, eve_MAC as devMac SELECT eve_IP as devLastIP, eve_MAC as devMac
FROM Events_Devices FROM Events_Devices
WHERE eve_PendingAlertEmail = 1 WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device' AND eve_EventType = 'New Device'
ORDER BY eve_DateTime ) t1 ORDER BY eve_DateTime ) t1
LEFT JOIN LEFT JOIN
( SELECT devName, devMac as devMac_t2 FROM Devices) t2 ( SELECT devName, devMac as devMac_t2 FROM Devices) t2
ON t1.devMac = t2.devMac_t2""" ON t1.devMac = t2.devMac_t2"""
sql_generateGuid = """ sql_generateGuid = """
lower( lower(
hex(randomblob(4)) || '-' || hex(randomblob(2)) || '-' || '4' || hex(randomblob(4)) || '-' || hex(randomblob(2)) || '-' || '4' ||
substr(hex( randomblob(2)), 2) || '-' || substr(hex( randomblob(2)), 2) || '-' ||
substr('AB89', 1 + (abs(random()) % 4) , 1) || substr('AB89', 1 + (abs(random()) % 4) , 1) ||
substr(hex(randomblob(2)), 2) || '-' || substr(hex(randomblob(2)), 2) || '-' ||
hex(randomblob(6)) hex(randomblob(6))
) )
""" """

View File

@@ -180,7 +180,7 @@ class DB:
# Init the AppEvent database table # Init the AppEvent database table
AppEvent_obj(self) AppEvent_obj(self)
# #------------------------------------------------------------------------------- # # -------------------------------------------------------------------------------
# def get_table_as_json(self, sqlQuery): # def get_table_as_json(self, sqlQuery):
# # mylog('debug',[ '[Database] - get_table_as_json - Query: ', sqlQuery]) # # mylog('debug',[ '[Database] - get_table_as_json - Query: ', sqlQuery])

View File

@@ -6,8 +6,8 @@ import os
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/server"])
from helper import if_byte_then_to_str from helper import if_byte_then_to_str # noqa: E402 [flake8 lint suppression]
from logger import mylog from logger import mylog # noqa: E402 [flake8 lint suppression]
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------

View File

@@ -5,8 +5,8 @@ import os
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog from logger import mylog # noqa: E402 [flake8 lint suppression]
from messaging.in_app import write_notification from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
def ensure_column(sql, table: str, column_name: str, column_type: str) -> bool: def ensure_column(sql, table: str, column_name: str, column_type: str) -> bool:
@@ -108,23 +108,23 @@ def ensure_views(sql) -> bool:
- sql: database cursor or connection wrapper (must support execute() and fetchall()). - sql: database cursor or connection wrapper (must support execute() and fetchall()).
""" """
sql.execute(""" DROP VIEW IF EXISTS Events_Devices;""") sql.execute(""" DROP VIEW IF EXISTS Events_Devices;""")
sql.execute(""" CREATE VIEW Events_Devices AS sql.execute(""" CREATE VIEW Events_Devices AS
SELECT * SELECT *
FROM Events FROM Events
LEFT JOIN Devices ON eve_MAC = devMac; LEFT JOIN Devices ON eve_MAC = devMac;
""") """)
sql.execute(""" DROP VIEW IF EXISTS LatestEventsPerMAC;""") sql.execute(""" DROP VIEW IF EXISTS LatestEventsPerMAC;""")
sql.execute("""CREATE VIEW LatestEventsPerMAC AS sql.execute("""CREATE VIEW LatestEventsPerMAC AS
WITH RankedEvents AS ( WITH RankedEvents AS (
SELECT SELECT
e.*, e.*,
ROW_NUMBER() OVER (PARTITION BY e.eve_MAC ORDER BY e.eve_DateTime DESC) AS row_num ROW_NUMBER() OVER (PARTITION BY e.eve_MAC ORDER BY e.eve_DateTime DESC) AS row_num
FROM Events AS e FROM Events AS e
) )
SELECT SELECT
e.*, e.*,
d.*, d.*,
c.* c.*
FROM RankedEvents AS e FROM RankedEvents AS e
LEFT JOIN Devices AS d ON e.eve_MAC = d.devMac LEFT JOIN Devices AS d ON e.eve_MAC = d.devMac
@@ -138,14 +138,14 @@ def ensure_views(sql) -> bool:
sql.execute(""" CREATE VIEW IF NOT EXISTS LatestEventsPerMAC AS sql.execute(""" CREATE VIEW IF NOT EXISTS LatestEventsPerMAC AS
WITH RankedEvents AS ( WITH RankedEvents AS (
SELECT SELECT
e.*, e.*,
ROW_NUMBER() OVER (PARTITION BY e.eve_MAC ORDER BY e.eve_DateTime DESC) AS row_num ROW_NUMBER() OVER (PARTITION BY e.eve_MAC ORDER BY e.eve_DateTime DESC) AS row_num
FROM Events AS e FROM Events AS e
) )
SELECT SELECT
e.*, e.*,
d.*, d.*,
c.* c.*
FROM RankedEvents AS e FROM RankedEvents AS e
LEFT JOIN Devices AS d ON e.eve_MAC = d.devMac LEFT JOIN Devices AS d ON e.eve_MAC = d.devMac
@@ -272,7 +272,7 @@ def ensure_CurrentScan(sql) -> bool:
""" """
# 🐛 CurrentScan DEBUG: comment out below when debugging to keep the CurrentScan table after restarts/scan finishes # 🐛 CurrentScan DEBUG: comment out below when debugging to keep the CurrentScan table after restarts/scan finishes
sql.execute("DROP TABLE IF EXISTS CurrentScan;") sql.execute("DROP TABLE IF EXISTS CurrentScan;")
sql.execute(""" CREATE TABLE IF NOT EXISTS CurrentScan ( sql.execute(""" CREATE TABLE IF NOT EXISTS CurrentScan (
cur_MAC STRING(50) NOT NULL COLLATE NOCASE, cur_MAC STRING(50) NOT NULL COLLATE NOCASE,
cur_IP STRING(50) NOT NULL COLLATE NOCASE, cur_IP STRING(50) NOT NULL COLLATE NOCASE,
cur_Vendor STRING(250), cur_Vendor STRING(250),
@@ -354,7 +354,7 @@ def ensure_plugins_tables(sql) -> bool:
# Plugin state # Plugin state
sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects( sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects(
"Index" INTEGER, "Index" INTEGER,
Plugin TEXT NOT NULL, Plugin TEXT NOT NULL,
Object_PrimaryID TEXT NOT NULL, Object_PrimaryID TEXT NOT NULL,
Object_SecondaryID TEXT NOT NULL, Object_SecondaryID TEXT NOT NULL,
DateTimeCreated TEXT NOT NULL, DateTimeCreated TEXT NOT NULL,

View File

@@ -18,7 +18,7 @@ from typing import Dict, List, Tuple, Any, Optional
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog from logger import mylog # noqa: E402 [flake8 lint suppression]
class SafeConditionBuilder: class SafeConditionBuilder:
@@ -494,8 +494,6 @@ class SafeConditionBuilder:
if logical_op and not self._validate_logical_operator(logical_op): if logical_op and not self._validate_logical_operator(logical_op):
raise ValueError(f"Invalid logical operator: {logical_op}") raise ValueError(f"Invalid logical operator: {logical_op}")
# Parse values from the IN clause
values = []
# Simple regex to extract quoted values # Simple regex to extract quoted values
value_pattern = r"'([^']*)'" value_pattern = r"'([^']*)'"
matches = re.findall(value_pattern, values_str) matches = re.findall(value_pattern, values_str)

View File

@@ -7,25 +7,22 @@ import os
import re import re
import unicodedata import unicodedata
import subprocess import subprocess
import pytz
import json import json
import requests import requests
import base64 import base64
import hashlib import hashlib
import random import random
import email
import string import string
import ipaddress import ipaddress
import conf import conf
from const import * from const import applicationPath, fullConfPath, fullDbPath, dbPath, confPath, apiPath
from logger import mylog, logResult from logger import mylog, logResult
# Register NetAlertX directories using runtime configuration # Register NetAlertX directories using runtime configuration
INSTALL_PATH = applicationPath INSTALL_PATH = applicationPath
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
# File system permission handling # File system permission handling
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
@@ -58,12 +55,6 @@ def checkPermissionsOK():
return (confR_access, dbR_access) return (confR_access, dbR_access)
# -------------------------------------------------------------------------------
def fixPermissions():
# Try fixing access rights if needed
chmodCommands = []
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
def initialiseFile(pathToCheck, defaultFile): def initialiseFile(pathToCheck, defaultFile):
# if file not readable (missing?) try to copy over the backed-up (default) one # if file not readable (missing?) try to copy over the backed-up (default) one
@@ -71,9 +62,7 @@ def initialiseFile(pathToCheck, defaultFile):
mylog( mylog(
"none", "none",
[ [
"[Setup] (" "[Setup] (" + pathToCheck + ") file is not readable or missing. Trying to copy over the default one."
+ pathToCheck
+ ") file is not readable or missing. Trying to copy over the default one."
], ],
) )
try: try:
@@ -89,22 +78,14 @@ def initialiseFile(pathToCheck, defaultFile):
mylog( mylog(
"none", "none",
[ [
"[Setup] ⚠ ERROR copying (" "[Setup] ⚠ ERROR copying (" + defaultFile + ") to (" + pathToCheck + "). Make sure the app has Read & Write access to the parent directory."
+ defaultFile
+ ") to ("
+ pathToCheck
+ "). Make sure the app has Read & Write access to the parent directory."
], ],
) )
else: else:
mylog( mylog(
"none", "none",
[ [
"[Setup] (" "[Setup] (" + defaultFile + ") copied over successfully to (" + pathToCheck + ")."
+ defaultFile
+ ") copied over successfully to ("
+ pathToCheck
+ ")."
], ],
) )
@@ -116,10 +97,7 @@ def initialiseFile(pathToCheck, defaultFile):
mylog( mylog(
"none", "none",
[ [
"[Setup] ⚠ ERROR copying (" "[Setup] ⚠ ERROR copying (" + defaultFile + "). Make sure the app has Read & Write access to " + pathToCheck
+ defaultFile
+ "). Make sure the app has Read & Write access to "
+ pathToCheck
], ],
) )
mylog("none", [e.output]) mylog("none", [e.output])
@@ -130,16 +108,13 @@ def filePermissions():
# check and initialize .conf # check and initialize .conf
(confR_access, dbR_access) = checkPermissionsOK() # Initial check (confR_access, dbR_access) = checkPermissionsOK() # Initial check
if confR_access == False: if confR_access is False:
initialiseFile(fullConfPath, f"{INSTALL_PATH}/back/app.conf") initialiseFile(fullConfPath, f"{INSTALL_PATH}/back/app.conf")
# check and initialize .db # check and initialize .db
if dbR_access == False: if dbR_access is False:
initialiseFile(fullDbPath, f"{INSTALL_PATH}/back/app.db") initialiseFile(fullDbPath, f"{INSTALL_PATH}/back/app.db")
# last attempt
fixPermissions()
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
# File manipulation methods # File manipulation methods
@@ -292,7 +267,7 @@ def get_setting_value(key):
value = setting_value_to_python_type(set_type, set_value) value = setting_value_to_python_type(set_type, set_value)
else: else:
value = setting_value_to_python_type(set_type, str(set_value)) value = setting_value_to_python_type(set_type, str(set_value))
SETTINGS_SECONDARYCACHE[key] = value SETTINGS_SECONDARYCACHE[key] = value
return value return value
@@ -382,7 +357,7 @@ def setting_value_to_python_type(set_type, set_value):
if isinstance(set_value, str): if isinstance(set_value, str):
try: try:
value = json.loads(set_value.replace("'", "\"")) value = json.loads(set_value.replace("'", "\""))
except json.JSONDecodeError as e: except json.JSONDecodeError as e:
mylog( mylog(
"none", "none",
@@ -413,17 +388,12 @@ def setting_value_to_python_type(set_type, set_value):
value = set_value value = set_value
elif ( elif (
dataType == "string" dataType == "string" and elementType == "input" and any(opt.get("readonly") == "true" for opt in elementOptions)
and elementType == "input"
and any(opt.get("readonly") == "true" for opt in elementOptions)
): ):
value = reverseTransformers(str(set_value), transformers) value = reverseTransformers(str(set_value), transformers)
elif ( elif (
dataType == "string" dataType == "string" and elementType == "input" and any(opt.get("type") == "password" for opt in elementOptions) and "sha256" in transformers
and elementType == "input"
and any(opt.get("type") == "password" for opt in elementOptions)
and "sha256" in transformers
): ):
value = hashlib.sha256(set_value.encode()).hexdigest() value = hashlib.sha256(set_value.encode()).hexdigest()
@@ -602,23 +572,23 @@ def normalize_string(text):
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------- # # -------------------------------------------------------------------------------------------
def is_random_mac(mac: str) -> bool: # def is_random_mac(mac: str) -> bool:
"""Determine if a MAC address is random, respecting user-defined prefixes not to mark as random.""" # """Determine if a MAC address is random, respecting user-defined prefixes not to mark as random."""
is_random = mac[1].upper() in ["2", "6", "A", "E"] # is_random = mac[1].upper() in ["2", "6", "A", "E"]
# Get prefixes from settings # # Get prefixes from settings
prefixes = get_setting_value("UI_NOT_RANDOM_MAC") # prefixes = get_setting_value("UI_NOT_RANDOM_MAC")
# If detected as random, make sure it doesn't start with a prefix the user wants to exclude # # If detected as random, make sure it doesn't start with a prefix the user wants to exclude
if is_random: # if is_random:
for prefix in prefixes: # for prefix in prefixes:
if mac.upper().startswith(prefix.upper()): # if mac.upper().startswith(prefix.upper()):
is_random = False # is_random = False
break # break
return is_random # return is_random
# ------------------------------------------------------------------------------------------- # -------------------------------------------------------------------------------------------
@@ -653,6 +623,7 @@ def extract_ip_addresses(text):
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
# Helper function to determine if a MAC address is random # Helper function to determine if a MAC address is random
def is_random_mac(mac): def is_random_mac(mac):
"""Determine if a MAC address is random, respecting user-defined prefixes not to mark as random."""
# Check if second character matches "2", "6", "A", "E" (case insensitive) # Check if second character matches "2", "6", "A", "E" (case insensitive)
is_random = mac[1].upper() in ["2", "6", "A", "E"] is_random = mac[1].upper() in ["2", "6", "A", "E"]
@@ -773,7 +744,6 @@ def getBuildTimeStampAndVersion():
return tuple(results) return tuple(results)
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
def checkNewVersion(): def checkNewVersion():
mylog("debug", ["[Version check] Checking if new version available"]) mylog("debug", ["[Version check] Checking if new version available"])

View File

@@ -8,9 +8,9 @@ import shutil
import re import re
# Register NetAlertX libraries # Register NetAlertX libraries
import conf import conf
from const import fullConfPath, applicationPath, fullConfFolder, default_tz from const import fullConfPath, fullConfFolder, default_tz
from helper import getBuildTimeStampAndVersion, fixPermissions, collect_lang_strings, updateSubnets, isJsonObject, setting_value_to_python_type, get_setting_value, generate_random_string from helper import getBuildTimeStampAndVersion, fixPermissions, collect_lang_strings, updateSubnets, generate_random_string
from utils.datetime_utils import timeNowDB from utils.datetime_utils import timeNowDB
from app_state import updateState from app_state import updateState
from logger import mylog from logger import mylog
@@ -19,7 +19,6 @@ from scheduler import schedule_class
from plugin import plugin_manager, print_plugin_info from plugin import plugin_manager, print_plugin_info
from utils.plugin_utils import get_plugins_configs, get_set_value_for_init from utils.plugin_utils import get_plugins_configs, get_set_value_for_init
from messaging.in_app import write_notification from messaging.in_app import write_notification
from utils.crypto_utils import get_random_bytes
# =============================================================================== # ===============================================================================
# Initialise user defined values # Initialise user defined values
@@ -59,7 +58,7 @@ def ccd(
result = default result = default
# Use existing value if already supplied, otherwise default value is used # Use existing value if already supplied, otherwise default value is used
if forceDefault == False and key in config_dir: if forceDefault is False and key in config_dir:
result = config_dir[key] result = config_dir[key]
# Single quotes might break SQL queries, replacing them # Single quotes might break SQL queries, replacing them
@@ -216,7 +215,7 @@ def importConfigs(pm, db, all_plugins):
[], [],
c_d, c_d,
"Loaded plugins", "Loaded plugins",
'{"dataType":"array","elements":[{"elementType":"select","elementHasInputValue":1,"elementOptions":[{"multiple":"true","ordeable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"selectChange(this)"},{"getStringKey":"Gen_Change"}],"transformers":[]}]}', '{"dataType":"array","elements":[{"elementType":"select","elementHasInputValue":1,"elementOptions":[{"multiple":"true","ordeable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"selectChange(this)"},{"getStringKey":"Gen_Change"}],"transformers":[]}]}', # noqa: E501
"[]", "[]",
"General", "General",
) )
@@ -234,7 +233,7 @@ def importConfigs(pm, db, all_plugins):
["192.168.1.0/24 --interface=eth1", "192.168.1.0/24 --interface=eth0"], ["192.168.1.0/24 --interface=eth1", "192.168.1.0/24 --interface=eth0"],
c_d, c_d,
"Subnets to scan", "Subnets to scan",
"""{"dataType": "array","elements": [{"elementType": "input","elementOptions": [{"placeholder": "192.168.1.0/24 --interface=eth1"},{"suffix": "_in"},{"cssClasses": "col-sm-10"},{"prefillValue": "null"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": ["_in"]},{"separator": ""},{"cssClasses": "col-xs-12"},{"onClick": "addList(this, false)"},{"getStringKey": "Gen_Add"}],"transformers": []},{"elementType": "select","elementHasInputValue": 1,"elementOptions": [{"multiple": "true"},{"readonly": "true"},{"editable": "true"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeAllOptions(this)"},{"getStringKey": "Gen_Remove_All"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeFromList(this)"},{"getStringKey": "Gen_Remove_Last"}],"transformers": []}]}""", """{"dataType": "array","elements": [{"elementType": "input","elementOptions": [{"placeholder": "192.168.1.0/24 --interface=eth1"},{"suffix": "_in"},{"cssClasses": "col-sm-10"},{"prefillValue": "null"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": ["_in"]},{"separator": ""},{"cssClasses": "col-xs-12"},{"onClick": "addList(this, false)"},{"getStringKey": "Gen_Add"}],"transformers": []},{"elementType": "select","elementHasInputValue": 1,"elementOptions": [{"multiple": "true"},{"readonly": "true"},{"editable": "true"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeAllOptions(this)"},{"getStringKey": "Gen_Remove_All"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeFromList(this)"},{"getStringKey": "Gen_Remove_Last"}],"transformers": []}]}""", # noqa: E501 - inline JSON
"[]", "[]",
"General", "General",
) )
@@ -356,7 +355,7 @@ def importConfigs(pm, db, all_plugins):
], ],
c_d, c_d,
"Network device types", "Network device types",
'{"dataType":"array","elements":[{"elementType":"input","elementOptions":[{"placeholder":"Enter value"},{"suffix":"_in"},{"cssClasses":"col-sm-10"},{"prefillValue":"null"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":["_in"]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"addList(this,false)"},{"getStringKey":"Gen_Add"}],"transformers":[]},{"elementType":"select", "elementHasInputValue":1,"elementOptions":[{"multiple":"true"},{"readonly":"true"},{"editable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeAllOptions(this)"},{"getStringKey":"Gen_Remove_All"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeFromList(this)"},{"getStringKey":"Gen_Remove_Last"}],"transformers":[]}]}', '{"dataType":"array","elements":[{"elementType":"input","elementOptions":[{"placeholder":"Enter value"},{"suffix":"_in"},{"cssClasses":"col-sm-10"},{"prefillValue":"null"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":["_in"]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"addList(this,false)"},{"getStringKey":"Gen_Add"}],"transformers":[]},{"elementType":"select", "elementHasInputValue":1,"elementOptions":[{"multiple":"true"},{"readonly":"true"},{"editable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeAllOptions(this)"},{"getStringKey":"Gen_Remove_All"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeFromList(this)"},{"getStringKey":"Gen_Remove_Last"}],"transformers":[]}]}', # noqa: E501 - inline JSON
"[]", "[]",
"General", "General",
) )
@@ -374,7 +373,7 @@ def importConfigs(pm, db, all_plugins):
"t_" + generate_random_string(20), "t_" + generate_random_string(20),
c_d, c_d,
"API token", "API token",
'{"dataType": "string","elements": [{"elementType": "input","elementHasInputValue": 1,"elementOptions": [{ "cssClasses": "col-xs-12" }],"transformers": []},{"elementType": "button","elementOptions": [{ "getStringKey": "Gen_Generate" },{ "customParams": "API_TOKEN" },{ "onClick": "generateApiToken(this, 20)" },{ "cssClasses": "col-xs-12" }],"transformers": []}]}', '{"dataType": "string","elements": [{"elementType": "input","elementHasInputValue": 1,"elementOptions": [{ "cssClasses": "col-xs-12" }],"transformers": []},{"elementType": "button","elementOptions": [{ "getStringKey": "Gen_Generate" },{ "customParams": "API_TOKEN" },{ "onClick": "generateApiToken(this, 20)" },{ "cssClasses": "col-xs-12" }],"transformers": []}]}', # noqa: E501 - inline JSON
"[]", "[]",
"General", "General",
) )
@@ -386,7 +385,7 @@ def importConfigs(pm, db, all_plugins):
c_d, c_d,
"Language Interface", "Language Interface",
'{"dataType":"string", "elements": [{"elementType" : "select", "elementOptions" : [] ,"transformers": []}]}', '{"dataType":"string", "elements": [{"elementType" : "select", "elementOptions" : [] ,"transformers": []}]}',
"['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Japanese (ja_jp)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Chinese (zh_cn)']", "['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Japanese (ja_jp)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Chinese (zh_cn)']", # noqa: E501 - inline JSON
"UI", "UI",
) )
@@ -483,9 +482,7 @@ def importConfigs(pm, db, all_plugins):
# only include loaded plugins, and the ones that are enabled # only include loaded plugins, and the ones that are enabled
if ( if (
pref in conf.LOADED_PLUGINS pref in conf.LOADED_PLUGINS or plugin_run != "disabled" or plugin_run is None
or plugin_run != "disabled"
or plugin_run is None
): ):
print_plugin_info(plugin, ["display_name", "description"]) print_plugin_info(plugin, ["display_name", "description"])
@@ -524,9 +521,7 @@ def importConfigs(pm, db, all_plugins):
if "popupForm" in option: if "popupForm" in option:
for popup_entry in option["popupForm"]: for popup_entry in option["popupForm"]:
popup_pref = ( popup_pref = (
key key + "_popupform_" + popup_entry.get("function", "")
+ "_popupform_"
+ popup_entry.get("function", "")
) )
stringSqlParams = collect_lang_strings( stringSqlParams = collect_lang_strings(
popup_entry, popup_pref, stringSqlParams popup_entry, popup_pref, stringSqlParams
@@ -606,7 +601,7 @@ def importConfigs(pm, db, all_plugins):
# Loop through settings_override dictionary # Loop through settings_override dictionary
for setting_name, value in settings_override.items(): for setting_name, value in settings_override.items():
# Ensure the value is treated as a string and passed directly # Ensure the value is treated as a string and passed directly
if isinstance(value, str) == False: if isinstance(value, str) is False:
value = str(value) value = str(value)
# Log the value being passed # Log the value being passed
@@ -669,23 +664,31 @@ def importConfigs(pm, db, all_plugins):
# ----------------- # -----------------
# HANDLE APP was upgraded message - clear cache # HANDLE APP was upgraded message - clear cache
# Check if app was upgraded # Check if app was upgraded
buildTimestamp, new_version = getBuildTimeStampAndVersion() buildTimestamp, new_version = getBuildTimeStampAndVersion()
prev_version = conf.VERSION if conf.VERSION != '' else "unknown" prev_version = conf.VERSION if conf.VERSION != '' else "unknown"
mylog('debug', [f"[Config] buildTimestamp | prev_version | .VERSION file: '{buildTimestamp}|{prev_version}|{new_version}'"]) mylog('debug', [f"[Config] buildTimestamp | prev_version | .VERSION file: '{buildTimestamp}|{prev_version}|{new_version}'"])
if str(prev_version) != str(new_version): if str(prev_version) != str(new_version):
mylog('none', ['[Config] App upgraded 🚀']) mylog('none', ['[Config] App upgraded 🚀'])
# ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False) # ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False)
ccd('VERSION', new_version , c_d, '_KEEP_', '_KEEP_', '_KEEP_', '_KEEP_', None, "_KEEP_", None, None, True) ccd('VERSION', new_version , c_d, '_KEEP_', '_KEEP_', '_KEEP_', '_KEEP_', None, "_KEEP_", None, None, True)
write_notification(f'[Upgrade] : App upgraded from <code>{prev_version}</code> to <code>{new_version}</code> 🚀 Please clear the cache: <ol> <li>Click OK below</li> <li>Clear the browser cache (shift + browser refresh button)</li> <li> Clear app cache with the <i class="fa-solid fa-rotate"></i> (reload) button in the header</li><li>Go to Settings and click Save</li> </ol> Check out new features and what has changed in the <a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.', 'interrupt', timeNowDB()) write_notification(f'[Upgrade]: App upgraded from <code>{prev_version}</code> to \
<code>{new_version}</ code> 🚀 Please clear the cache: \
<ol> <li>Click OK below</li> <li>Clear the browser cache (shift + \
browser refresh button)</li> <li> Clear app cache with the <i class="fa-solid fa-rotate"></i> \
(reload) button in the header</li><li>Go to Settings and click Save</li> </ol>\
Check out new features and what has changed in the \
<a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.',
'interrupt',
timeNowDB()
)
# ----------------- # -----------------
# Initialization finished, update DB and API endpoints # Initialization finished, update DB and API endpoints
@@ -717,13 +720,13 @@ def importConfigs(pm, db, all_plugins):
# settingsImported = None (timestamp), # settingsImported = None (timestamp),
# showSpinner = False (1/0), # showSpinner = False (1/0),
# graphQLServerStarted = 1 (1/0)) # graphQLServerStarted = 1 (1/0))
updateState("Config imported", conf.lastImportedConfFile, conf.lastImportedConfFile, False, 1, None, None, new_version) updateState("Config imported", conf.lastImportedConfFile, conf.lastImportedConfFile, False, 1, None, None, new_version)
msg = '[Config] Imported new settings config' msg = '[Config] Imported new settings config'
mylog('minimal', msg) mylog('minimal', msg)
# front end app log loggging # front end app log loggging
write_notification(msg, 'info', timeNowDB()) write_notification(msg, 'info', timeNowDB())
return pm, all_plugins, True return pm, all_plugins, True

View File

@@ -1,19 +1,14 @@
import sys import sys
import io import io
import datetime # import datetime
import threading import threading
import queue import queue
import logging import logging
from zoneinfo import ZoneInfo # from zoneinfo import ZoneInfo
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# NetAlertX imports # NetAlertX imports
import conf import conf
from const import * from const import logPath
from utils.datetime_utils import timeNowTZ from utils.datetime_utils import timeNowTZ

View File

@@ -11,13 +11,9 @@ from flask import jsonify
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/server"])
from const import apiPath from const import apiPath # noqa: E402 [flake8 lint suppression]
from logger import mylog from logger import mylog # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
import conf
from const import applicationPath, logPath, apiPath, confFileName, reportTemplatesPath
from logger import mylog
from utils.datetime_utils import timeNowDB
NOTIFICATION_API_FILE = apiPath + 'user_notifications.json' NOTIFICATION_API_FILE = apiPath + 'user_notifications.json'
@@ -38,7 +34,7 @@ def write_notification(content, level="alert", timestamp=None):
None None
""" """
if timestamp is None: if timestamp is None:
timestamp = timeNowDB() timestamp = timeNowDB()
# Generate GUID # Generate GUID
guid = str(uuid.uuid4()) guid = str(uuid.uuid4())

View File

@@ -18,12 +18,12 @@ import sys
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/server"])
from helper import ( from helper import ( # noqa: E402 [flake8 lint suppression]
get_setting_value, get_setting_value,
) )
from logger import mylog from logger import mylog # noqa: E402 [flake8 lint suppression]
from db.sql_safe_builder import create_safe_condition_builder from db.sql_safe_builder import create_safe_condition_builder # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import get_timezone_offset from utils.datetime_utils import get_timezone_offset # noqa: E402 [flake8 lint suppression]
# =============================================================================== # ===============================================================================
# REPORTING # REPORTING
@@ -56,14 +56,14 @@ def get_notifications(db):
WHERE eve_PendingAlertEmail = 1 AND eve_EventType not in ('Device Down', 'Down Reconnected', 'New Device' ) AND eve_MAC IN WHERE eve_PendingAlertEmail = 1 AND eve_EventType not in ('Device Down', 'Down Reconnected', 'New Device' ) AND eve_MAC IN
( (
SELECT devMac FROM Devices WHERE devAlertEvents = 0 SELECT devMac FROM Devices WHERE devAlertEvents = 0
)""") )""")
# Disable down/down reconnected notifications on devices where devAlertDown is disabled # Disable down/down reconnected notifications on devices where devAlertDown is disabled
sql.execute("""UPDATE Events SET eve_PendingAlertEmail = 0 sql.execute("""UPDATE Events SET eve_PendingAlertEmail = 0
WHERE eve_PendingAlertEmail = 1 AND eve_EventType in ('Device Down', 'Down Reconnected') AND eve_MAC IN WHERE eve_PendingAlertEmail = 1 AND eve_EventType in ('Device Down', 'Down Reconnected') AND eve_MAC IN
( (
SELECT devMac FROM Devices WHERE devAlertDown = 0 SELECT devMac FROM Devices WHERE devAlertDown = 0
)""") )""")
sections = get_setting_value("NTFPRCS_INCLUDED_SECTIONS") sections = get_setting_value("NTFPRCS_INCLUDED_SECTIONS")
@@ -79,20 +79,32 @@ def get_notifications(db):
safe_condition, parameters = condition_builder.get_safe_condition_legacy( safe_condition, parameters = condition_builder.get_safe_condition_legacy(
new_dev_condition_setting new_dev_condition_setting
) )
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices sqlQuery = """SELECT
WHERE eve_PendingAlertEmail = 1 eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device' {} AND eve_EventType = 'New Device' {}
ORDER BY eve_DateTime""".format(safe_condition) ORDER BY eve_DateTime""".format(safe_condition)
except Exception as e: except Exception as e:
mylog( mylog(
"verbose", "verbose",
["[Notification] Error building safe condition for new devices: ", e], ["[Notification] Error building safe condition for new devices: ", e],
) )
# Fall back to safe default (no additional conditions) # Fall back to safe default (no additional conditions)
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices sqlQuery = """SELECT
WHERE eve_PendingAlertEmail = 1 eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device' AND eve_EventType = 'New Device'
ORDER BY eve_DateTime""" ORDER BY eve_DateTime"""
parameters = {} parameters = {}
mylog("debug", ["[Notification] new_devices SQL query: ", sqlQuery]) mylog("debug", ["[Notification] new_devices SQL query: ", sqlQuery])
@@ -114,17 +126,17 @@ def get_notifications(db):
minutes = int(get_setting_value("NTFPRCS_alert_down_time") or 0) minutes = int(get_setting_value("NTFPRCS_alert_down_time") or 0)
tz_offset = get_timezone_offset() tz_offset = get_timezone_offset()
sqlQuery = f""" sqlQuery = f"""
SELECT devName, eve_MAC, devVendor, eve_IP, eve_DateTime, eve_EventType SELECT devName, eve_MAC, devVendor, eve_IP, eve_DateTime, eve_EventType
FROM Events_Devices AS down_events FROM Events_Devices AS down_events
WHERE eve_PendingAlertEmail = 1 WHERE eve_PendingAlertEmail = 1
AND down_events.eve_EventType = 'Device Down' AND down_events.eve_EventType = 'Device Down'
AND eve_DateTime < datetime('now', '-{minutes} minutes', '{tz_offset}') AND eve_DateTime < datetime('now', '-{minutes} minutes', '{tz_offset}')
AND NOT EXISTS ( AND NOT EXISTS (
SELECT 1 SELECT 1
FROM Events AS connected_events FROM Events AS connected_events
WHERE connected_events.eve_MAC = down_events.eve_MAC WHERE connected_events.eve_MAC = down_events.eve_MAC
AND connected_events.eve_EventType = 'Connected' AND connected_events.eve_EventType = 'Connected'
AND connected_events.eve_DateTime > down_events.eve_DateTime AND connected_events.eve_DateTime > down_events.eve_DateTime
) )
ORDER BY down_events.eve_DateTime; ORDER BY down_events.eve_DateTime;
""" """
@@ -181,20 +193,32 @@ def get_notifications(db):
safe_condition, parameters = condition_builder.get_safe_condition_legacy( safe_condition, parameters = condition_builder.get_safe_condition_legacy(
event_condition_setting event_condition_setting
) )
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices sqlQuery = """SELECT
WHERE eve_PendingAlertEmail = 1 eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed') {} AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed') {}
ORDER BY eve_DateTime""".format(safe_condition) ORDER BY eve_DateTime""".format(safe_condition)
except Exception as e: except Exception as e:
mylog( mylog(
"verbose", "verbose",
["[Notification] Error building safe condition for events: ", e], ["[Notification] Error building safe condition for events: ", e],
) )
# Fall back to safe default (no additional conditions) # Fall back to safe default (no additional conditions)
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices sqlQuery = """SELECT
WHERE eve_PendingAlertEmail = 1 eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed') AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed')
ORDER BY eve_DateTime""" ORDER BY eve_DateTime"""
parameters = {} parameters = {}
mylog("debug", ["[Notification] events SQL query: ", sqlQuery]) mylog("debug", ["[Notification] events SQL query: ", sqlQuery])
@@ -208,7 +232,17 @@ def get_notifications(db):
if "plugins" in sections: if "plugins" in sections:
# Compose Plugins Section # Compose Plugins Section
sqlQuery = """SELECT Plugin, Object_PrimaryId, Object_SecondaryId, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status from Plugins_Events""" sqlQuery = """SELECT
Plugin,
Object_PrimaryId,
Object_SecondaryId,
DateTimeChanged,
Watched_Value1,
Watched_Value2,
Watched_Value3,
Watched_Value4,
Status
from Plugins_Events"""
# Get the events as JSON # Get the events as JSON
json_obj = db.get_table_as_json(sqlQuery) json_obj = db.get_table_as_json(sqlQuery)

View File

@@ -1,13 +1,12 @@
import json import json
import uuid import uuid
import socket import socket
import subprocess
from yattag import indent from yattag import indent
from json2table import convert from json2table import convert
# Register NetAlertX modules # Register NetAlertX modules
import conf import conf
from const import applicationPath, logPath, apiPath, reportTemplatesPath from const import logPath, apiPath, reportTemplatesPath
from logger import mylog, Logger from logger import mylog, Logger
from helper import ( from helper import (
generate_mac_links, generate_mac_links,
@@ -62,11 +61,7 @@ class NotificationInstance:
# Check if nothing to report, end # Check if nothing to report, end
if ( if (
JSON["new_devices"] == [] JSON["new_devices"] == [] and JSON["down_devices"] == [] and JSON["events"] == [] and JSON["plugins"] == [] and JSON["down_reconnected"] == []
and JSON["down_devices"] == []
and JSON["events"] == []
and JSON["plugins"] == []
and JSON["down_reconnected"] == []
): ):
self.HasNotifications = False self.HasNotifications = False
else: else:
@@ -88,8 +83,6 @@ class NotificationInstance:
# else: # else:
# mylog('debug', ['[Notification] notiStruc:', json.dumps(notiStruc.__dict__, indent=4)]) # mylog('debug', ['[Notification] notiStruc:', json.dumps(notiStruc.__dict__, indent=4)])
Text = ""
HTML = ""
template_file_path = reportTemplatesPath + "report_template.html" template_file_path = reportTemplatesPath + "report_template.html"
# Open text Template # Open text Template
@@ -274,7 +267,7 @@ class NotificationInstance:
# Clear the Pending Email flag from all events and devices # Clear the Pending Email flag from all events and devices
def clearPendingEmailFlag(self): def clearPendingEmailFlag(self):
# Clean Pending Alert Events # Clean Pending Alert Events
self.db.sql.execute(""" self.db.sql.execute("""
UPDATE Devices SET devLastNotification = ? UPDATE Devices SET devLastNotification = ?
WHERE devMac IN ( WHERE devMac IN (

View File

@@ -100,7 +100,7 @@ class UserEventsQueueInstance:
if not action or not isinstance(action, str): if not action or not isinstance(action, str):
msg = "[UserEventsQueueInstance] Invalid or missing action" msg = "[UserEventsQueueInstance] Invalid or missing action"
mylog('none', [msg]) mylog('none', [msg])
return False, msg return False, msg
try: try:
@@ -109,15 +109,11 @@ class UserEventsQueueInstance:
msg = f'[UserEventsQueueInstance] Action "{action}" added to the execution queue.' msg = f'[UserEventsQueueInstance] Action "{action}" added to the execution queue.'
mylog('minimal', [msg]) mylog('minimal', [msg])
return True, msg return True, msg
except Exception as e: except Exception as e:
msg = f"[UserEventsQueueInstance] ERROR Failed to write to {self.log_file}: {e}" msg = f"[UserEventsQueueInstance] ERROR Failed to write to {self.log_file}: {e}"
mylog('none', [msg]) mylog('none', [msg])
return False, msg return False, msg

View File

@@ -9,12 +9,21 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
# Register NetAlertX modules # Register NetAlertX modules
import conf import conf
from const import pluginsPath, logPath, applicationPath, reportTemplatesPath from const import pluginsPath, logPath, applicationPath, reportTemplatesPath
from logger import mylog, Logger from logger import mylog, Logger
from helper import get_file_content, write_file, get_setting, get_setting_value from helper import get_file_content, get_setting, get_setting_value
from utils.datetime_utils import timeNowTZ, timeNowDB from utils.datetime_utils import timeNowTZ, timeNowDB
from app_state import updateState from app_state import updateState
from api import update_api from api import update_api
from utils.plugin_utils import logEventStatusCounts, get_plugin_string, get_plugin_setting_obj, print_plugin_info, list_to_csv, combine_plugin_objects, resolve_wildcards_arr, handle_empty, custom_plugin_decoder, decode_and_rename_files from utils.plugin_utils import (
logEventStatusCounts,
get_plugin_setting_obj,
print_plugin_info,
list_to_csv,
combine_plugin_objects,
resolve_wildcards_arr,
handle_empty,
decode_and_rename_files
)
from models.notification_instance import NotificationInstance from models.notification_instance import NotificationInstance
from messaging.in_app import write_notification from messaging.in_app import write_notification
from models.user_events_queue_instance import UserEventsQueueInstance from models.user_events_queue_instance import UserEventsQueueInstance
@@ -57,13 +66,7 @@ class plugin_manager:
# Header # Header
updateState("Run: Plugins") updateState("Run: Plugins")
mylog( mylog("debug", f"[Plugins] Check if any plugins need to be executed on run type: {runType}")
"debug",
[
"[Plugins] Check if any plugins need to be executed on run type: ",
runType,
],
)
for plugin in self.all_plugins: for plugin in self.all_plugins:
shouldRun = False shouldRun = False
@@ -72,7 +75,7 @@ class plugin_manager:
# 🔹 Lookup RUN setting from cache instead of calling get_plugin_setting_obj each time # 🔹 Lookup RUN setting from cache instead of calling get_plugin_setting_obj each time
run_setting = self._cache["settings"].get(prefix, {}).get("RUN") run_setting = self._cache["settings"].get(prefix, {}).get("RUN")
if run_setting != None and run_setting["value"] == runType: if run_setting is not None and run_setting["value"] == runType:
if runType != "schedule": if runType != "schedule":
shouldRun = True shouldRun = True
elif runType == "schedule": elif runType == "schedule":
@@ -91,10 +94,7 @@ class plugin_manager:
# 🔹 CMD also retrieved from cache # 🔹 CMD also retrieved from cache
cmd_setting = self._cache["settings"].get(prefix, {}).get("CMD") cmd_setting = self._cache["settings"].get(prefix, {}).get("CMD")
mylog( mylog("debug", f"[Plugins] CMD: {cmd_setting["value"] if cmd_setting else None}")
"debug",
["[Plugins] CMD: ", cmd_setting["value"] if cmd_setting else None],
)
execute_plugin(self.db, self.all_plugins, plugin) execute_plugin(self.db, self.all_plugins, plugin)
@@ -130,13 +130,7 @@ class plugin_manager:
mylog("debug", ["[check_and_run_user_event] User Execution Queue is empty"]) mylog("debug", ["[check_and_run_user_event] User Execution Queue is empty"])
return # Exit early if the log file is empty return # Exit early if the log file is empty
else: else:
mylog( mylog("debug", "[check_and_run_user_event] Process User Execution Queue:" + ", ".join(map(str, lines)))
"debug",
[
"[check_and_run_user_event] Process User Execution Queue:"
+ ", ".join(map(str, lines))
],
)
for line in lines: for line in lines:
# Extract event name and parameters from the log line # Extract event name and parameters from the log line
@@ -160,15 +154,7 @@ class plugin_manager:
update_api(self.db, self.all_plugins, False, param.split(","), True) update_api(self.db, self.all_plugins, False, param.split(","), True)
else: else:
mylog( mylog("minimal", f"[check_and_run_user_event] WARNING: Unhandled event in execution queue: {event} | {param}")
"minimal",
[
"[check_and_run_user_event] WARNING: Unhandled event in execution queue: ",
event,
" | ",
param,
],
)
execution_log.finalize_event( execution_log.finalize_event(
event event
) # Finalize unknown events to remove them ) # Finalize unknown events to remove them
@@ -183,9 +169,9 @@ class plugin_manager:
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
def handle_run(self, runType): def handle_run(self, runType):
mylog('minimal', ['[', timeNowDB(), '] START Run: ', runType]) mylog('minimal', ['[', timeNowDB(), '] START Run: ', runType])
# run the plugin # run the plugin
for plugin in self.all_plugins: for plugin in self.all_plugins:
if plugin["unique_prefix"] == runType: if plugin["unique_prefix"] == runType:
@@ -201,7 +187,7 @@ class plugin_manager:
pluginsStates={pluginName: current_plugin_state.get(pluginName, {})} pluginsStates={pluginName: current_plugin_state.get(pluginName, {})}
) )
mylog('minimal', ['[', timeNowDB(), '] END Run: ', runType]) mylog('minimal', ['[', timeNowDB(), '] END Run: ', runType])
return return
@@ -210,7 +196,7 @@ class plugin_manager:
mylog("minimal", ["[", timeNowTZ(), "] [Test] START Test: ", runType]) mylog("minimal", ["[", timeNowTZ(), "] [Test] START Test: ", runType])
mylog('minimal', ['[', timeNowDB(), '] [Test] START Test: ', runType]) mylog('minimal', ['[', timeNowDB(), '] [Test] START Test: ', runType])
# Prepare test samples # Prepare test samples
sample_json = json.loads( sample_json = json.loads(
get_file_content(reportTemplatesPath + "webhook_json_sample.json") get_file_content(reportTemplatesPath + "webhook_json_sample.json")
@@ -312,7 +298,7 @@ class plugin_param:
if param["type"] == "setting": if param["type"] == "setting":
inputValue = get_setting(param["value"]) inputValue = get_setting(param["value"])
if inputValue != None: if inputValue is not None:
setVal = inputValue["setValue"] # setting value setVal = inputValue["setValue"] # setting value
setTyp = inputValue["setType"] # setting type setTyp = inputValue["setType"] # setting type
@@ -337,9 +323,7 @@ class plugin_param:
resolved = list_to_csv(setVal) resolved = list_to_csv(setVal)
else: else:
mylog( mylog("none", "[Plugins] ⚠ ERROR: Parameter probably not converted.")
"none", ["[Plugins] ⚠ ERROR: Parameter probably not converted."]
)
return json.dumps(setVal) return json.dumps(setVal)
# Get SQL result # Get SQL result
@@ -390,15 +374,10 @@ def run_plugin(command, set_RUN_TIMEOUT, plugin):
) )
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
mylog("none", [e.output]) mylog("none", [e.output])
mylog("none", ["[Plugins] ⚠ ERROR - enable LOG_LEVEL=debug and check logs"]) mylog("none", "[Plugins] ⚠ ERROR - enable LOG_LEVEL=debug and check logs")
return None return None
except subprocess.TimeoutExpired: except subprocess.TimeoutExpired:
mylog( mylog("none", f"[Plugins] ⚠ ERROR - TIMEOUT - the plugin {plugin['unique_prefix']} forcefully terminated as timeout reached. Increase TIMEOUT setting and scan interval.")
"none",
[
f"[Plugins] ⚠ ERROR - TIMEOUT - the plugin {plugin['unique_prefix']} forcefully terminated as timeout reached. Increase TIMEOUT setting and scan interval."
],
)
return None return None
@@ -411,11 +390,11 @@ def execute_plugin(db, all_plugins, plugin):
set = get_plugin_setting_obj(plugin, "CMD") set = get_plugin_setting_obj(plugin, "CMD")
# handle missing "function":"CMD" setting # handle missing "function":"CMD" setting
if set == None: if set is None:
return return
set_CMD = set["value"] set_CMD = set["value"]
# Replace hardcoded /app paths with environment-aware path # Replace hardcoded /app paths with environment-aware path
if "/app/front/plugins" in set_CMD: if "/app/front/plugins" in set_CMD:
set_CMD = set_CMD.replace("/app/front/plugins", str(pluginsPath)) set_CMD = set_CMD.replace("/app/front/plugins", str(pluginsPath))
@@ -441,13 +420,8 @@ def execute_plugin(db, all_plugins, plugin):
for param in plugin["params"]: for param in plugin["params"]:
tempParam = plugin_param(param, plugin, db) tempParam = plugin_param(param, plugin, db)
if tempParam.resolved == None: if tempParam.resolved is None:
mylog( mylog("none", f'[Plugins] The parameter "name":"{tempParam.name}" for "value": {tempParam.value} was resolved as None')
"none",
[
f'[Plugins] The parameter "name":"{tempParam.name}" for "value": {tempParam.value} was resolved as None'
],
)
else: else:
# params.append( [param["name"], resolved] ) # params.append( [param["name"], resolved] )
@@ -456,14 +430,9 @@ def execute_plugin(db, all_plugins, plugin):
if tempParam.multiplyTimeout: if tempParam.multiplyTimeout:
set_RUN_TIMEOUT = set_RUN_TIMEOUT * tempParam.paramValuesCount set_RUN_TIMEOUT = set_RUN_TIMEOUT * tempParam.paramValuesCount
mylog( mylog("debug", f'[Plugins] The parameter "name":"{param["name"]}" will multiply timeout {tempParam.paramValuesCount}x. Total timeout: {set_RUN_TIMEOUT}s')
"debug",
[
f'[Plugins] The parameter "name":"{param["name"]}" will multiply the timeout {tempParam.paramValuesCount} times. Total timeout: {set_RUN_TIMEOUT}s'
],
)
mylog("debug", ["[Plugins] Timeout: ", set_RUN_TIMEOUT]) mylog("debug", f"[Plugins] Timeout: {set_RUN_TIMEOUT}")
# build SQL query parameters to insert into the DB # build SQL query parameters to insert into the DB
sqlParams = [] sqlParams = []
@@ -475,8 +444,8 @@ def execute_plugin(db, all_plugins, plugin):
command = resolve_wildcards_arr(set_CMD.split(), params) command = resolve_wildcards_arr(set_CMD.split(), params)
# Execute command # Execute command
mylog("verbose", ["[Plugins] Executing: ", set_CMD]) mylog("verbose", f"[Plugins] Executing: {set_CMD}")
mylog("debug", ["[Plugins] Resolved : ", command]) mylog("debug", f"[Plugins] Resolved : {command}")
# Using ThreadPoolExecutor to handle concurrent subprocesses # Using ThreadPoolExecutor to handle concurrent subprocesses
with ThreadPoolExecutor(max_workers=5) as executor: with ThreadPoolExecutor(max_workers=5) as executor:
@@ -521,12 +490,7 @@ def execute_plugin(db, all_plugins, plugin):
columns = line.split("|") columns = line.split("|")
# There have to be 9 or 13 columns # There have to be 9 or 13 columns
if len(columns) not in [9, 13]: if len(columns) not in [9, 13]:
mylog( mylog("none", f"[Plugins] Wrong number of input values, must be 9 or 13, got {len(columns)} from: {line}")
"none",
[
f"[Plugins] Wrong number of input values, must be 9 or 13, got {len(columns)} from: {line}"
],
)
continue # Skip lines with incorrect number of columns continue # Skip lines with incorrect number of columns
# Common part of the SQL parameters # Common part of the SQL parameters
@@ -581,9 +545,7 @@ def execute_plugin(db, all_plugins, plugin):
# keep current instance log file, delete all from other nodes # keep current instance log file, delete all from other nodes
if filename != "last_result.log" and os.path.exists(full_path): if filename != "last_result.log" and os.path.exists(full_path):
os.remove(full_path) # DEBUG:TODO uncomment 🐛 os.remove(full_path) # DEBUG:TODO uncomment 🐛
mylog( mylog("verbose", f"[Plugins] Processed and deleted file: {full_path} ")
"verbose", [f"[Plugins] Processed and deleted file: {full_path} "]
)
# app-db-query # app-db-query
if plugin["data_source"] == "app-db-query": if plugin["data_source"] == "app-db-query":
@@ -591,7 +553,7 @@ def execute_plugin(db, all_plugins, plugin):
q = set_CMD.replace("{s-quote}", "'") q = set_CMD.replace("{s-quote}", "'")
# Execute command # Execute command
mylog("verbose", ["[Plugins] Executing: ", q]) mylog("verbose", f"[Plugins] Executing: {q}")
# set_CMD should contain a SQL query # set_CMD should contain a SQL query
arr = db.get_sql_array(q) arr = db.get_sql_array(q)
@@ -650,7 +612,7 @@ def execute_plugin(db, all_plugins, plugin):
# Append the final parameters to sqlParams # Append the final parameters to sqlParams
sqlParams.append(tuple(base_params)) sqlParams.append(tuple(base_params))
else: else:
mylog("none", ["[Plugins] Skipped invalid sql result"]) mylog("none", "[Plugins] Skipped invalid sql result")
# app-db-query # app-db-query
if plugin["data_source"] == "sqlite-db-query": if plugin["data_source"] == "sqlite-db-query":
@@ -659,19 +621,14 @@ def execute_plugin(db, all_plugins, plugin):
q = set_CMD.replace("{s-quote}", "'") q = set_CMD.replace("{s-quote}", "'")
# Execute command # Execute command
mylog("verbose", ["[Plugins] Executing: ", q]) mylog("verbose", f"[Plugins] Executing: {q}")
# ------- necessary settings check -------- # ------- necessary settings check --------
set = get_plugin_setting_obj(plugin, "DB_PATH") set = get_plugin_setting_obj(plugin, "DB_PATH")
# handle missing "function":"DB_PATH" setting # handle missing "function":"DB_PATH" setting
if set == None: if set is None:
mylog( mylog("none", "[Plugins] ⚠ ERROR: DB_PATH setting for plugin type sqlite-db-query missing.")
"none",
[
"[Plugins] ⚠ ERROR: DB_PATH setting for plugin type sqlite-db-query missing."
],
)
return return
fullSqlitePath = set["value"] fullSqlitePath = set["value"]
@@ -679,25 +636,14 @@ def execute_plugin(db, all_plugins, plugin):
# try attaching the sqlite DB # try attaching the sqlite DB
try: try:
sql.execute( sql.execute(
"ATTACH DATABASE '" "ATTACH DATABASE '" + fullSqlitePath + "' AS EXTERNAL_" + plugin["unique_prefix"]
+ fullSqlitePath
+ "' AS EXTERNAL_"
+ plugin["unique_prefix"]
) )
arr = db.get_sql_array(q) arr = db.get_sql_array(q)
sql.execute("DETACH DATABASE EXTERNAL_" + plugin["unique_prefix"]) sql.execute("DETACH DATABASE EXTERNAL_" + plugin["unique_prefix"])
except sqlite3.Error as e: except sqlite3.Error as e:
mylog( mylog("none", f"[Plugins] ⚠ ERROR: DB_PATH setting ({fullSqlitePath}) for plugin {plugin['unique_prefix']}. Did you mount it correctly?")
"none", mylog("none", f"[Plugins] ⚠ ERROR: ATTACH DATABASE failed with SQL ERROR: {e}")
[
f"[Plugins] ⚠ ERROR: DB_PATH setting ({fullSqlitePath}) for plugin {plugin['unique_prefix']}. Did you mount it correctly?"
],
)
mylog(
"none",
["[Plugins] ⚠ ERROR: ATTACH DATABASE failed with SQL ERROR: ", e],
)
return return
for row in arr: for row in arr:
@@ -748,24 +694,14 @@ def execute_plugin(db, all_plugins, plugin):
# Append the final parameters to sqlParams # Append the final parameters to sqlParams
sqlParams.append(tuple(base_params)) sqlParams.append(tuple(base_params))
else: else:
mylog("none", ["[Plugins] Skipped invalid sql result"]) mylog("none", "[Plugins] Skipped invalid sql result")
# check if the subprocess / SQL query failed / there was no valid output # check if the subprocess / SQL query failed / there was no valid output
if len(sqlParams) == 0: if len(sqlParams) == 0:
mylog( mylog("none", f'[Plugins] No output received from the plugin "{plugin["unique_prefix"]}"')
"none",
[
f'[Plugins] No output received from the plugin "{plugin["unique_prefix"]}"'
],
)
else: else:
mylog( mylog("verbose", f"[Plugins] SUCCESS for {plugin['unique_prefix']} received {len(sqlParams)} entries")
"verbose",
[
f"[Plugins] SUCCESS for {plugin['unique_prefix']} received {len(sqlParams)} entries"
],
)
# mylog('debug', ['[Plugins] sqlParam entries: ', sqlParams]) # mylog('debug', ['[Plugins] sqlParam entries: ', sqlParams])
# create objects # create objects
@@ -782,12 +718,7 @@ def execute_plugin(db, all_plugins, plugin):
# check if we need to update devices api endpoint as well to prevent long user waits on Loading... # check if we need to update devices api endpoint as well to prevent long user waits on Loading...
userUpdatedDevices = UserEventsQueueInstance().has_update_devices() userUpdatedDevices = UserEventsQueueInstance().has_update_devices()
mylog( mylog("verbose", f"[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}")
"verbose",
[
f"[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}"
],
)
if userUpdatedDevices: if userUpdatedDevices:
endpoints += ["devices"] endpoints += ["devices"]
@@ -807,7 +738,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
pluginPref = plugin["unique_prefix"] pluginPref = plugin["unique_prefix"]
mylog("verbose", ["[Plugins] Processing : ", pluginPref]) mylog("verbose", f"[Plugins] Processing : {pluginPref}")
try: try:
# Begin a transaction # Begin a transaction
@@ -827,20 +758,8 @@ def process_plugin_events(db, plugin, plugEventsArr):
for eve in plugEventsArr: for eve in plugEventsArr:
pluginEvents.append(plugin_object_class(plugin, eve)) pluginEvents.append(plugin_object_class(plugin, eve))
mylog( mylog("debug", f"[Plugins] Existing objects from Plugins_Objects: {len(pluginObjects)}")
"debug", mylog("debug", f"[Plugins] Logged events from the plugin run : {len(pluginEvents)}")
[
"[Plugins] Existing objects from Plugins_Objects: ",
len(pluginObjects),
],
)
mylog(
"debug",
[
"[Plugins] Logged events from the plugin run : ",
len(pluginEvents),
],
)
# Loop thru all current events and update the status to "exists" if the event matches an existing object # Loop thru all current events and update the status to "exists" if the event matches an existing object
index = 0 index = 0
@@ -857,8 +776,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
if tmpObjFromEvent.status == "exists": if tmpObjFromEvent.status == "exists":
# compare hash of the changed watched columns for uniqueness - make sure you compare the values with the same idsHash before checking watchedHash # compare hash of the changed watched columns for uniqueness - make sure you compare the values with the same idsHash before checking watchedHash
if any( if any(
x.idsHash == tmpObjFromEvent.idsHash x.idsHash == tmpObjFromEvent.idsHash and x.watchedHash != tmpObjFromEvent.watchedHash
and x.watchedHash != tmpObjFromEvent.watchedHash
for x in pluginObjects for x in pluginObjects
): ):
pluginEvents[index].status = "watched-changed" pluginEvents[index].status = "watched-changed"
@@ -879,7 +797,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
# if wasn't missing before, mark as changed # if wasn't missing before, mark as changed
if tmpObj.status != "missing-in-last-scan": if tmpObj.status != "missing-in-last-scan":
tmpObj.changed = timeNowDB() tmpObj.changed = timeNowDB()
tmpObj.status = "missing-in-last-scan" tmpObj.status = "missing-in-last-scan"
# mylog('debug', [f'[Plugins] Missing from last scan (PrimaryID | SecondaryID): {tmpObj.primaryId} | {tmpObj.secondaryId}']) # mylog('debug', [f'[Plugins] Missing from last scan (PrimaryID | SecondaryID): {tmpObj.primaryId} | {tmpObj.secondaryId}'])
# Merge existing plugin objects with newly discovered ones and update existing ones with new values # Merge existing plugin objects with newly discovered ones and update existing ones with new values
@@ -955,25 +873,17 @@ def process_plugin_events(db, plugin, plugEventsArr):
# combine all DB insert and update events into one for history # combine all DB insert and update events into one for history
history_to_insert.append(values) history_to_insert.append(values)
mylog("debug", ["[Plugins] pluginEvents count: ", len(pluginEvents)]) mylog("debug", f"[Plugins] pluginEvents count: {len(pluginEvents)}")
mylog("debug", ["[Plugins] pluginObjects count: ", len(pluginObjects)]) mylog("debug", f"[Plugins] pluginObjects count: {len(pluginObjects)}")
mylog( mylog("debug", f"[Plugins] events_to_insert count: {len(events_to_insert)}")
"debug", ["[Plugins] events_to_insert count: ", len(events_to_insert)] mylog("debug", f"[Plugins] history_to_insert count: {len(history_to_insert)}")
) mylog("debug", f"[Plugins] objects_to_insert count: {len(objects_to_insert)}")
mylog( mylog("debug", f"[Plugins] objects_to_update count: {len(objects_to_update)}")
"debug", ["[Plugins] history_to_insert count: ", len(history_to_insert)]
)
mylog(
"debug", ["[Plugins] objects_to_insert count: ", len(objects_to_insert)]
)
mylog(
"debug", ["[Plugins] objects_to_update count: ", len(objects_to_update)]
)
mylog("trace", ["[Plugins] objects_to_update: ", objects_to_update]) mylog("trace", f"[Plugins] objects_to_update: {objects_to_update}")
mylog("trace", ["[Plugins] events_to_insert: ", events_to_insert]) mylog("trace", f"[Plugins] events_to_insert: {events_to_insert}")
mylog("trace", ["[Plugins] history_to_insert: ", history_to_insert]) mylog("trace", f"[Plugins] history_to_insert: {history_to_insert}")
logEventStatusCounts("pluginEvents", pluginEvents) logEventStatusCounts("pluginEvents", pluginEvents)
logEventStatusCounts("pluginObjects", pluginObjects) logEventStatusCounts("pluginObjects", pluginObjects)
@@ -982,12 +892,12 @@ def process_plugin_events(db, plugin, plugEventsArr):
if objects_to_insert: if objects_to_insert:
sql.executemany( sql.executemany(
""" """
INSERT INTO Plugins_Objects INSERT INTO Plugins_Objects
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName", "Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4", "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
"ObjectGUID") "ObjectGUID")
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", """,
objects_to_insert, objects_to_insert,
@@ -998,10 +908,10 @@ def process_plugin_events(db, plugin, plugEventsArr):
sql.executemany( sql.executemany(
""" """
UPDATE Plugins_Objects UPDATE Plugins_Objects
SET "Plugin" = ?, "Object_PrimaryID" = ?, "Object_SecondaryID" = ?, "DateTimeCreated" = ?, SET "Plugin" = ?, "Object_PrimaryID" = ?, "Object_SecondaryID" = ?, "DateTimeCreated" = ?,
"DateTimeChanged" = ?, "Watched_Value1" = ?, "Watched_Value2" = ?, "Watched_Value3" = ?, "DateTimeChanged" = ?, "Watched_Value1" = ?, "Watched_Value2" = ?, "Watched_Value3" = ?,
"Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ?, "Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ?,
"HelpVal1" = ?, "HelpVal2" = ?, "HelpVal3" = ?, "HelpVal4" = ?, "HelpVal1" = ?, "HelpVal2" = ?, "HelpVal3" = ?, "HelpVal4" = ?,
"ObjectGUID" = ? "ObjectGUID" = ?
WHERE "Index" = ? WHERE "Index" = ?
""", """,
@@ -1012,12 +922,12 @@ def process_plugin_events(db, plugin, plugEventsArr):
if events_to_insert: if events_to_insert:
sql.executemany( sql.executemany(
""" """
INSERT INTO Plugins_Events INSERT INTO Plugins_Events
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName", "Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4", "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
"ObjectGUID") "ObjectGUID")
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", """,
events_to_insert, events_to_insert,
@@ -1027,12 +937,12 @@ def process_plugin_events(db, plugin, plugEventsArr):
if history_to_insert: if history_to_insert:
sql.executemany( sql.executemany(
""" """
INSERT INTO Plugins_History INSERT INTO Plugins_History
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName", "Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4", "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
"ObjectGUID") "ObjectGUID")
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", """,
history_to_insert, history_to_insert,
@@ -1044,7 +954,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
except Exception as e: except Exception as e:
# Rollback the transaction in case of an error # Rollback the transaction in case of an error
conn.rollback() conn.rollback()
mylog("none", ["[Plugins] ⚠ ERROR: ", e]) mylog("none", f"[Plugins] ⚠ ERROR: {e}")
raise e raise e
# Perform database table mapping if enabled for the plugin # Perform database table mapping if enabled for the plugin
@@ -1056,7 +966,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
dbTable = plugin["mapped_to_table"] dbTable = plugin["mapped_to_table"]
# Log a debug message indicating the mapping of objects to the database table. # Log a debug message indicating the mapping of objects to the database table.
mylog("debug", ["[Plugins] Mapping objects to database table: ", dbTable]) mylog("debug", f"[Plugins] Mapping objects to database table: {dbTable}")
# Initialize lists to hold mapped column names, columnsStr, and valuesStr for SQL query. # Initialize lists to hold mapped column names, columnsStr, and valuesStr for SQL query.
mappedCols = [] mappedCols = []
@@ -1121,8 +1031,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
# Check if there's a default value specified for this column in the JSON. # Check if there's a default value specified for this column in the JSON.
if ( if (
"mapped_to_column_data" in col "mapped_to_column_data" in col and "value" in col["mapped_to_column_data"]
and "value" in col["mapped_to_column_data"]
): ):
tmpList.append(col["mapped_to_column_data"]["value"]) tmpList.append(col["mapped_to_column_data"]["value"])
@@ -1133,8 +1042,8 @@ def process_plugin_events(db, plugin, plugEventsArr):
q = f"INSERT OR IGNORE INTO {dbTable} ({columnsStr}) VALUES ({valuesStr})" q = f"INSERT OR IGNORE INTO {dbTable} ({columnsStr}) VALUES ({valuesStr})"
# Log a debug message showing the generated SQL query for mapping. # Log a debug message showing the generated SQL query for mapping.
mylog("debug", ["[Plugins] SQL query for mapping: ", q]) mylog("debug", f"[Plugins] SQL query for mapping: {q}")
mylog("debug", ["[Plugins] SQL sqlParams for mapping: ", sqlParams]) mylog("debug", f"[Plugins] SQL sqlParams for mapping: {sqlParams}")
# Execute the SQL query using 'sql.executemany()' and the 'sqlParams' list of tuples. # Execute the SQL query using 'sql.executemany()' and the 'sqlParams' list of tuples.
# This will insert multiple rows into the database in one go. # This will insert multiple rows into the database in one go.

View File

@@ -1,14 +1,6 @@
import sys
import subprocess import subprocess
import os import os
import re import re
import datetime
from dateutil import parser
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from helper import get_setting_value, check_IP_format from helper import get_setting_value, check_IP_format
from utils.datetime_utils import timeNowDB, normalizeTimeStamp from utils.datetime_utils import timeNowDB, normalizeTimeStamp
from logger import mylog, Logger from logger import mylog, Logger
@@ -44,7 +36,7 @@ def exclude_ignored_devices(db):
# Join conditions and prepare the query # Join conditions and prepare the query
conditions_str = " OR ".join(conditions) conditions_str = " OR ".join(conditions)
if conditions_str: if conditions_str:
query = f"""DELETE FROM CurrentScan WHERE query = f"""DELETE FROM CurrentScan WHERE
1=1 1=1
AND ( AND (
{conditions_str} {conditions_str}
@@ -57,22 +49,23 @@ def exclude_ignored_devices(db):
sql.execute(query) sql.execute(query)
#-------------------------------------------------------------------------------
def update_devices_data_from_scan (db): # -------------------------------------------------------------------------------
sql = db.sql #TO-DO def update_devices_data_from_scan(db):
sql = db.sql # TO-DO
startTime = timeNowDB() startTime = timeNowDB()
# Update Last Connection # Update Last Connection
mylog("debug", "[Update Devices] 1 Last Connection") mylog("debug", "[Update Devices] 1 Last Connection")
sql.execute(f"""UPDATE Devices SET devLastConnection = '{startTime}', sql.execute(f"""UPDATE Devices SET devLastConnection = '{startTime}',
devPresentLastScan = 1 devPresentLastScan = 1
WHERE EXISTS (SELECT 1 FROM CurrentScan WHERE EXISTS (SELECT 1 FROM CurrentScan
WHERE devMac = cur_MAC) """) WHERE devMac = cur_MAC) """)
# Clean no active devices # Clean no active devices
mylog("debug", "[Update Devices] 2 Clean no active devices") mylog("debug", "[Update Devices] 2 Clean no active devices")
sql.execute("""UPDATE Devices SET devPresentLastScan = 0 sql.execute("""UPDATE Devices SET devPresentLastScan = 0
WHERE NOT EXISTS (SELECT 1 FROM CurrentScan WHERE NOT EXISTS (SELECT 1 FROM CurrentScan
WHERE devMac = cur_MAC) """) WHERE devMac = cur_MAC) """)
# Update IP # Update IP
@@ -103,7 +96,7 @@ def update_devices_data_from_scan (db):
FROM CurrentScan FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC WHERE Devices.devMac = CurrentScan.cur_MAC
) )
WHERE WHERE
(devVendor IS NULL OR devVendor IN ("", "null", "(unknown)", "(Unknown)")) (devVendor IS NULL OR devVendor IN ("", "null", "(unknown)", "(Unknown)"))
AND EXISTS ( AND EXISTS (
SELECT 1 SELECT 1
@@ -116,12 +109,12 @@ def update_devices_data_from_scan (db):
sql.execute("""UPDATE Devices sql.execute("""UPDATE Devices
SET devParentPort = ( SET devParentPort = (
SELECT cur_Port SELECT cur_Port
FROM CurrentScan FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC WHERE Devices.devMac = CurrentScan.cur_MAC
) )
WHERE WHERE
(devParentPort IS NULL OR devParentPort IN ("", "null", "(unknown)", "(Unknown)")) (devParentPort IS NULL OR devParentPort IN ("", "null", "(unknown)", "(Unknown)"))
AND AND
EXISTS ( EXISTS (
SELECT 1 SELECT 1
FROM CurrentScan FROM CurrentScan
@@ -139,9 +132,9 @@ def update_devices_data_from_scan (db):
FROM CurrentScan FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC WHERE Devices.devMac = CurrentScan.cur_MAC
) )
WHERE WHERE
(devParentMAC IS NULL OR devParentMAC IN ("", "null", "(unknown)", "(Unknown)")) (devParentMAC IS NULL OR devParentMAC IN ("", "null", "(unknown)", "(Unknown)"))
AND AND
EXISTS ( EXISTS (
SELECT 1 SELECT 1
FROM CurrentScan FROM CurrentScan
@@ -161,7 +154,7 @@ def update_devices_data_from_scan (db):
FROM CurrentScan FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC WHERE Devices.devMac = CurrentScan.cur_MAC
) )
WHERE WHERE
(devSite IS NULL OR devSite IN ("", "null")) (devSite IS NULL OR devSite IN ("", "null"))
AND EXISTS ( AND EXISTS (
SELECT 1 SELECT 1
@@ -178,7 +171,7 @@ def update_devices_data_from_scan (db):
FROM CurrentScan FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC WHERE Devices.devMac = CurrentScan.cur_MAC
) )
WHERE WHERE
(devSSID IS NULL OR devSSID IN ("", "null")) (devSSID IS NULL OR devSSID IN ("", "null"))
AND EXISTS ( AND EXISTS (
SELECT 1 SELECT 1
@@ -195,7 +188,7 @@ def update_devices_data_from_scan (db):
FROM CurrentScan FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC WHERE Devices.devMac = CurrentScan.cur_MAC
) )
WHERE WHERE
(devType IS NULL OR devType IN ("", "null")) (devType IS NULL OR devType IN ("", "null"))
AND EXISTS ( AND EXISTS (
SELECT 1 SELECT 1
@@ -208,17 +201,17 @@ def update_devices_data_from_scan (db):
mylog("debug", "[Update Devices] - (if not empty) cur_Name -> (if empty) devName") mylog("debug", "[Update Devices] - (if not empty) cur_Name -> (if empty) devName")
sql.execute(""" UPDATE Devices sql.execute(""" UPDATE Devices
SET devName = COALESCE(( SET devName = COALESCE((
SELECT cur_Name SELECT cur_Name
FROM CurrentScan FROM CurrentScan
WHERE cur_MAC = devMac WHERE cur_MAC = devMac
AND cur_Name IS NOT NULL AND cur_Name IS NOT NULL
AND cur_Name <> 'null' AND cur_Name <> 'null'
AND cur_Name <> '' AND cur_Name <> ''
), devName) ), devName)
WHERE (devName IN ('(unknown)', '(name not found)', '') WHERE (devName IN ('(unknown)', '(name not found)', '')
OR devName IS NULL) OR devName IS NULL)
AND EXISTS ( AND EXISTS (
SELECT 1 SELECT 1
FROM CurrentScan FROM CurrentScan
WHERE cur_MAC = devMac WHERE cur_MAC = devMac
AND cur_Name IS NOT NULL AND cur_Name IS NOT NULL
@@ -425,9 +418,9 @@ def print_scan_stats(db):
mylog("verbose", f" {row['cur_ScanMethod']}: {row['scan_method_count']}") mylog("verbose", f" {row['cur_ScanMethod']}: {row['scan_method_count']}")
#------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
def create_new_devices (db): def create_new_devices(db):
sql = db.sql # TO-DO sql = db.sql # TO-DO
startTime = timeNowDB() startTime = timeNowDB()
# Insert events for new devices from CurrentScan (not yet in Devices) # Insert events for new devices from CurrentScan (not yet in Devices)
@@ -474,36 +467,36 @@ def create_new_devices (db):
mylog("debug", "[New Devices] 2 Create devices") mylog("debug", "[New Devices] 2 Create devices")
# default New Device values preparation # default New Device values preparation
newDevColumns = """devAlertEvents, newDevColumns = """devAlertEvents,
devAlertDown, devAlertDown,
devPresentLastScan, devPresentLastScan,
devIsArchived, devIsArchived,
devIsNew, devIsNew,
devSkipRepeated, devSkipRepeated,
devScan, devScan,
devOwner, devOwner,
devFavorite, devFavorite,
devGroup, devGroup,
devComments, devComments,
devLogEvents, devLogEvents,
devLocation, devLocation,
devCustomProps, devCustomProps,
devParentRelType, devParentRelType,
devReqNicsOnline devReqNicsOnline
""" """
newDevDefaults = f"""{get_setting_value("NEWDEV_devAlertEvents")}, newDevDefaults = f"""{get_setting_value("NEWDEV_devAlertEvents")},
{get_setting_value("NEWDEV_devAlertDown")}, {get_setting_value("NEWDEV_devAlertDown")},
{get_setting_value("NEWDEV_devPresentLastScan")}, {get_setting_value("NEWDEV_devPresentLastScan")},
{get_setting_value("NEWDEV_devIsArchived")}, {get_setting_value("NEWDEV_devIsArchived")},
{get_setting_value("NEWDEV_devIsNew")}, {get_setting_value("NEWDEV_devIsNew")},
{get_setting_value("NEWDEV_devSkipRepeated")}, {get_setting_value("NEWDEV_devSkipRepeated")},
{get_setting_value("NEWDEV_devScan")}, {get_setting_value("NEWDEV_devScan")},
'{sanitize_SQL_input(get_setting_value("NEWDEV_devOwner"))}', '{sanitize_SQL_input(get_setting_value("NEWDEV_devOwner"))}',
{get_setting_value("NEWDEV_devFavorite")}, {get_setting_value("NEWDEV_devFavorite")},
'{sanitize_SQL_input(get_setting_value("NEWDEV_devGroup"))}', '{sanitize_SQL_input(get_setting_value("NEWDEV_devGroup"))}',
'{sanitize_SQL_input(get_setting_value("NEWDEV_devComments"))}', '{sanitize_SQL_input(get_setting_value("NEWDEV_devComments"))}',
{get_setting_value("NEWDEV_devLogEvents")}, {get_setting_value("NEWDEV_devLogEvents")},
'{sanitize_SQL_input(get_setting_value("NEWDEV_devLocation"))}', '{sanitize_SQL_input(get_setting_value("NEWDEV_devLocation"))}',
'{sanitize_SQL_input(get_setting_value("NEWDEV_devCustomProps"))}', '{sanitize_SQL_input(get_setting_value("NEWDEV_devCustomProps"))}',
'{sanitize_SQL_input(get_setting_value("NEWDEV_devParentRelType"))}', '{sanitize_SQL_input(get_setting_value("NEWDEV_devParentRelType"))}',
@@ -511,7 +504,7 @@ def create_new_devices (db):
""" """
# Fetch data from CurrentScan skipping ignored devices by IP and MAC # Fetch data from CurrentScan skipping ignored devices by IP and MAC
query = """SELECT cur_MAC, cur_Name, cur_Vendor, cur_ScanMethod, cur_IP, cur_SyncHubNodeName, cur_NetworkNodeMAC, cur_PORT, cur_NetworkSite, cur_SSID, cur_Type query = """SELECT cur_MAC, cur_Name, cur_Vendor, cur_ScanMethod, cur_IP, cur_SyncHubNodeName, cur_NetworkNodeMAC, cur_PORT, cur_NetworkSite, cur_SSID, cur_Type
FROM CurrentScan """ FROM CurrentScan """
mylog("debug", f"[New Devices] Collecting New Devices Query: {query}") mylog("debug", f"[New Devices] Collecting New Devices Query: {query}")
@@ -554,40 +547,40 @@ def create_new_devices (db):
) )
# Preparing the individual insert statement # Preparing the individual insert statement
sqlQuery = f"""INSERT OR IGNORE INTO Devices sqlQuery = f"""INSERT OR IGNORE INTO Devices
( (
devMac, devMac,
devName, devName,
devVendor, devVendor,
devLastIP, devLastIP,
devFirstConnection, devFirstConnection,
devLastConnection, devLastConnection,
devSyncHubNode, devSyncHubNode,
devGUID, devGUID,
devParentMAC, devParentMAC,
devParentPort, devParentPort,
devSite, devSite,
devSSID, devSSID,
devType, devType,
devSourcePlugin, devSourcePlugin,
{newDevColumns} {newDevColumns}
) )
VALUES VALUES
( (
'{sanitize_SQL_input(cur_MAC)}', '{sanitize_SQL_input(cur_MAC)}',
'{sanitize_SQL_input(cur_Name)}', '{sanitize_SQL_input(cur_Name)}',
'{sanitize_SQL_input(cur_Vendor)}', '{sanitize_SQL_input(cur_Vendor)}',
'{sanitize_SQL_input(cur_IP)}', '{sanitize_SQL_input(cur_IP)}',
?, ?,
?, ?,
'{sanitize_SQL_input(cur_SyncHubNodeName)}', '{sanitize_SQL_input(cur_SyncHubNodeName)}',
{sql_generateGuid}, {sql_generateGuid},
'{sanitize_SQL_input(cur_NetworkNodeMAC)}', '{sanitize_SQL_input(cur_NetworkNodeMAC)}',
'{sanitize_SQL_input(cur_PORT)}', '{sanitize_SQL_input(cur_PORT)}',
'{sanitize_SQL_input(cur_NetworkSite)}', '{sanitize_SQL_input(cur_NetworkSite)}',
'{sanitize_SQL_input(cur_SSID)}', '{sanitize_SQL_input(cur_SSID)}',
'{sanitize_SQL_input(cur_Type)}', '{sanitize_SQL_input(cur_Type)}',
'{sanitize_SQL_input(cur_ScanMethod)}', '{sanitize_SQL_input(cur_ScanMethod)}',
{newDevDefaults} {newDevDefaults}
)""" )"""
@@ -598,7 +591,8 @@ def create_new_devices (db):
mylog("debug", "[New Devices] New Devices end") mylog("debug", "[New Devices] New Devices end")
db.commitDB() db.commitDB()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
# Check if plugins data changed # Check if plugins data changed
def check_plugin_data_changed(pm, plugins_to_check): def check_plugin_data_changed(pm, plugins_to_check):
""" """
@@ -630,7 +624,7 @@ def check_plugin_data_changed(pm, plugins_to_check):
for plugin_name in plugins_to_check: for plugin_name in plugins_to_check:
last_data_change = pm.plugin_states.get(plugin_name, {}).get("lastDataChange") last_data_change = pm.plugin_states.get(plugin_name, {}).get("lastDataChange")
last_data_check = pm.plugin_checks.get(plugin_name, "") last_data_check = pm.plugin_checks.get(plugin_name, "")
if not last_data_change: if not last_data_change:
@@ -639,13 +633,13 @@ def check_plugin_data_changed(pm, plugins_to_check):
# Normalize and validate last_changed timestamp # Normalize and validate last_changed timestamp
last_changed_ts = normalizeTimeStamp(last_data_change) last_changed_ts = normalizeTimeStamp(last_data_change)
if last_changed_ts == None: if last_changed_ts is None:
mylog('none', f'[check_plugin_data_changed] Unexpected last_data_change timestamp for {plugin_name} (input|output): ({last_data_change}|{last_changed_ts})') mylog('none', f'[check_plugin_data_changed] Unexpected last_data_change timestamp for {plugin_name} (input|output): ({last_data_change}|{last_changed_ts})')
# Normalize and validate last_data_check timestamp # Normalize and validate last_data_check timestamp
last_data_check_ts = normalizeTimeStamp(last_data_check) last_data_check_ts = normalizeTimeStamp(last_data_check)
if last_data_check_ts == None: if last_data_check_ts is None:
mylog('none', f'[check_plugin_data_changed] Unexpected last_data_check timestamp for {plugin_name} (input|output): ({last_data_check}|{last_data_check_ts})') mylog('none', f'[check_plugin_data_changed] Unexpected last_data_check timestamp for {plugin_name} (input|output): ({last_data_check}|{last_data_check_ts})')
# Track which plugins have newer state than last_checked # Track which plugins have newer state than last_checked
@@ -660,15 +654,19 @@ def check_plugin_data_changed(pm, plugins_to_check):
# Continue if changes detected # Continue if changes detected
for p in plugins_changed: for p in plugins_changed:
mylog('debug', f'[check_plugin_data_changed] {p} changed (last_data_change|last_data_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})') mylog(
'debug',
f'[check_plugin_data_changed] {p} changed (last_data_change|last_data_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})'
)
return True return True
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def update_devices_names(pm): def update_devices_names(pm):
# --- Short-circuit if no name-resolution plugin has changed --- # --- Short-circuit if no name-resolution plugin has changed ---
if check_plugin_data_changed(pm, ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]) == False: if check_plugin_data_changed(pm, ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]) is False:
mylog('debug', '[Update Device Name] No relevant plugin changes since last check.') mylog('debug', '[Update Device Name] No relevant plugin changes since last check.')
return return
@@ -676,8 +674,8 @@ def update_devices_names(pm):
sql = pm.db.sql sql = pm.db.sql
resolver = NameResolver(pm.db) resolver = NameResolver(pm.db)
device_handler = DeviceInstance(pm.db) device_handler = DeviceInstance(pm.db)
nameNotFound = "(name not found)" nameNotFound = "(name not found)"
# Define resolution strategies in priority order # Define resolution strategies in priority order
@@ -722,8 +720,7 @@ def update_devices_names(pm):
# If a valid result is found, record it and stop further attempts # If a valid result is found, record it and stop further attempts
if ( if (
newFQDN not in [nameNotFound, "", "localhost."] newFQDN not in [nameNotFound, "", "localhost."] and " communications error to " not in newFQDN
and " communications error to " not in newFQDN
): ):
foundStats[label] += 1 foundStats[label] += 1
@@ -750,14 +747,14 @@ def update_devices_names(pm):
) )
# Try resolving both name and FQDN # Try resolving both name and FQDN
recordsToUpdate, recordsNotFound, foundStats, notFound = resolve_devices( recordsToUpdate, recordsNotFound, fs, notFound = resolve_devices(
unknownDevices unknownDevices
) )
# Log summary # Log summary
mylog( mylog(
"verbose", "verbose",
f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}/{foundStats['NBTSCAN']})", f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']})",
) )
mylog("verbose", f"[Update Device Name] Names Not Found : {notFound}") mylog("verbose", f"[Update Device Name] Names Not Found : {notFound}")
@@ -780,16 +777,14 @@ def update_devices_names(pm):
) )
# Try resolving only FQDN # Try resolving only FQDN
recordsToUpdate, _, foundStats, notFound = resolve_devices( recordsToUpdate, _, fs, notFound = resolve_devices(
allDevices, resolve_both_name_and_fqdn=False allDevices, resolve_both_name_and_fqdn=False
) )
# Log summary # Log summary
mylog( mylog(
"verbose", "verbose",
f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)}"+ f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)}({fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']})",
f"({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}"+
f"/{foundStats['NBTSCAN']})",
) )
mylog("verbose", f"[Update FQDN] Names Not Found : {notFound}") mylog("verbose", f"[Update FQDN] Names Not Found : {notFound}")
@@ -803,7 +798,7 @@ def update_devices_names(pm):
# --- Step 3: Log last checked time --- # --- Step 3: Log last checked time ---
# After resolving names, update last checked # After resolving names, update last checked
pm.plugin_checks = {"DIGSCAN": timeNowDB(), "AVAHISCAN": timeNowDB(), "NSLOOKUP": timeNowDB(), "NBTSCAN": timeNowDB() } pm.plugin_checks = {"DIGSCAN": timeNowDB(), "AVAHISCAN": timeNowDB(), "NSLOOKUP": timeNowDB(), "NBTSCAN": timeNowDB()}
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
@@ -901,7 +896,6 @@ def query_MAC_vendor(pMAC):
# Search vendor in HW Vendors DB # Search vendor in HW Vendors DB
mac_start_string6 = mac[0:6] mac_start_string6 = mac[0:6]
mac_start_string9 = mac[0:9]
try: try:
with open(filePath, "r") as f: with open(filePath, "r") as f:

View File

@@ -1,16 +1,13 @@
import sys
import os import os
import re import re
import json import json
import base64 import base64
from pathlib import Path from pathlib import Path
from typing import Optional, Tuple from typing import Optional, Tuple
from logger import mylog
# Register NetAlertX directories # Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
# Load MAC/device-type/icon rules from external file # Load MAC/device-type/icon rules from external file
MAC_TYPE_ICON_PATH = Path(f"{INSTALL_PATH}/back/device_heuristics_rules.json") MAC_TYPE_ICON_PATH = Path(f"{INSTALL_PATH}/back/device_heuristics_rules.json")
@@ -83,7 +80,7 @@ def match_vendor(vendor: str, default_type: str, default_icon: str) -> Tuple[str
for pattern in patterns: for pattern in patterns:
# Only apply fallback when no MAC prefix is specified # Only apply fallback when no MAC prefix is specified
mac_prefix = pattern.get("mac_prefix", "") # mac_prefix = pattern.get("mac_prefix", "")
vendor_pattern = pattern.get("vendor", "").lower() vendor_pattern = pattern.get("vendor", "").lower()
if vendor_pattern and vendor_pattern in vendor_lc: if vendor_pattern and vendor_pattern in vendor_lc:

View File

@@ -1,11 +1,4 @@
import sys
import os
import re import re
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog from logger import mylog
from helper import get_setting_value from helper import get_setting_value
@@ -31,7 +24,7 @@ class NameResolver:
# Check by MAC # Check by MAC
sql.execute(f""" sql.execute(f"""
SELECT Watched_Value2 FROM Plugins_Objects SELECT Watched_Value2 FROM Plugins_Objects
WHERE Plugin = '{plugin}' AND Object_PrimaryID = '{pMAC}' WHERE Plugin = '{plugin}' AND Object_PrimaryID = '{pMAC}'
""") """)
result = sql.fetchall() result = sql.fetchall()
@@ -42,9 +35,9 @@ class NameResolver:
# Check name by IP if enabled # Check name by IP if enabled
if get_setting_value('NEWDEV_IP_MATCH_NAME'): if get_setting_value('NEWDEV_IP_MATCH_NAME'):
sql.execute(f""" sql.execute(f"""
SELECT Watched_Value2 FROM Plugins_Objects SELECT Watched_Value2 FROM Plugins_Objects
WHERE Plugin = '{plugin}' AND Object_SecondaryID = '{pIP}' WHERE Plugin = '{plugin}' AND Object_SecondaryID = '{pIP}'
""") """)
result = sql.fetchall() result = sql.fetchall()

View File

@@ -1,10 +1,3 @@
import sys
import os
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from scan.device_handling import ( from scan.device_handling import (
create_new_devices, create_new_devices,
print_scan_stats, print_scan_stats,
@@ -14,7 +7,7 @@ from scan.device_handling import (
) )
from helper import get_setting_value from helper import get_setting_value
from db.db_helper import print_table_schema from db.db_helper import print_table_schema
from utils.datetime_utils import timeNowDB, timeNowTZ from utils.datetime_utils import timeNowDB
from logger import mylog, Logger from logger import mylog, Logger
from messaging.reporting import skip_repeated_notifications from messaging.reporting import skip_repeated_notifications
@@ -133,20 +126,20 @@ def create_sessions_snapshot(db):
db.commitDB() db.commitDB()
#------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
def insert_events (db): def insert_events(db):
sql = db.sql #TO-DO sql = db.sql # TO-DO
startTime = timeNowDB() startTime = timeNowDB()
# Check device down # Check device down
mylog("debug", "[Events] - 1 - Devices down") mylog("debug", "[Events] - 1 - Devices down")
sql.execute(f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, sql.execute(f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo, eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail) eve_PendingAlertEmail)
SELECT devMac, devLastIP, '{startTime}', 'Device Down', '', 1 SELECT devMac, devLastIP, '{startTime}', 'Device Down', '', 1
FROM Devices FROM Devices
WHERE devAlertDown != 0 WHERE devAlertDown != 0
AND devPresentLastScan = 1 AND devPresentLastScan = 1
AND NOT EXISTS (SELECT 1 FROM CurrentScan AND NOT EXISTS (SELECT 1 FROM CurrentScan
WHERE devMac = cur_MAC WHERE devMac = cur_MAC
) """) ) """)
@@ -156,15 +149,15 @@ def insert_events (db):
sql.execute(f""" INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, sql.execute(f""" INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo, eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail) eve_PendingAlertEmail)
SELECT DISTINCT c.cur_MAC, c.cur_IP, '{startTime}', SELECT DISTINCT c.cur_MAC, c.cur_IP, '{startTime}',
CASE CASE
WHEN last_event.eve_EventType = 'Device Down' and last_event.eve_PendingAlertEmail = 0 THEN 'Down Reconnected' WHEN last_event.eve_EventType = 'Device Down' and last_event.eve_PendingAlertEmail = 0 THEN 'Down Reconnected'
ELSE 'Connected' ELSE 'Connected'
END, END,
'', '',
1 1
FROM CurrentScan AS c FROM CurrentScan AS c
LEFT JOIN LatestEventsPerMAC AS last_event ON c.cur_MAC = last_event.eve_MAC LEFT JOIN LatestEventsPerMAC AS last_event ON c.cur_MAC = last_event.eve_MAC
WHERE last_event.devPresentLastScan = 0 OR last_event.eve_MAC IS NULL WHERE last_event.devPresentLastScan = 0 OR last_event.eve_MAC IS NULL
""") """)
@@ -190,7 +183,7 @@ def insert_events (db):
SELECT cur_MAC, cur_IP, '{startTime}', 'IP Changed', SELECT cur_MAC, cur_IP, '{startTime}', 'IP Changed',
'Previous IP: '|| devLastIP, devAlertEvents 'Previous IP: '|| devLastIP, devAlertEvents
FROM Devices, CurrentScan FROM Devices, CurrentScan
WHERE devMac = cur_MAC WHERE devMac = cur_MAC
AND devLastIP <> cur_IP """) AND devLastIP <> cur_IP """)
mylog("debug", "[Events] - Events end") mylog("debug", "[Events] - Events end")

View File

@@ -1,49 +1,43 @@
#!/usr/bin/env python # !/usr/bin/env python
import os # from datetime import datetime
import pathlib
import sys
from datetime import datetime
from dateutil import parser from dateutil import parser
import datetime import datetime
import re import re
import pytz import pytz
from pytz import timezone
from typing import Union from typing import Union
from zoneinfo import ZoneInfo from zoneinfo import ZoneInfo
import email.utils import email.utils
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf import conf
from const import * # from const import *
# -------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# DateTime # DateTime
#------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
DATETIME_PATTERN = "%Y-%m-%d %H:%M:%S" DATETIME_PATTERN = "%Y-%m-%d %H:%M:%S"
DATETIME_REGEX = re.compile(r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$') DATETIME_REGEX = re.compile(r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$')
def timeNowTZ(): def timeNowTZ():
if conf.tz: if conf.tz:
return datetime.datetime.now(conf.tz).replace(microsecond=0) return datetime.datetime.now(conf.tz).replace(microsecond=0)
else: else:
return datetime.datetime.now().replace(microsecond=0) return datetime.datetime.now().replace(microsecond=0)
def timeNow(): def timeNow():
return datetime.datetime.now().replace(microsecond=0) return datetime.datetime.now().replace(microsecond=0)
def get_timezone_offset():
def get_timezone_offset():
now = datetime.datetime.now(conf.tz) now = datetime.datetime.now(conf.tz)
offset_hours = now.utcoffset().total_seconds() / 3600 offset_hours = now.utcoffset().total_seconds() / 3600
offset_formatted = "{:+03d}:{:02d}".format(int(offset_hours), int((offset_hours % 1) * 60)) offset_formatted = "{:+03d}:{:02d}".format(int(offset_hours), int((offset_hours % 1) * 60))
return offset_formatted return offset_formatted
def timeNowDB(local=True): def timeNowDB(local=True):
""" """
Return the current time (local or UTC) as ISO 8601 for DB storage. Return the current time (local or UTC) as ISO 8601 for DB storage.
@@ -67,9 +61,9 @@ def timeNowDB(local=True):
return datetime.datetime.now(datetime.UTC).strftime(DATETIME_PATTERN) return datetime.datetime.now(datetime.UTC).strftime(DATETIME_PATTERN)
#------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
# Date and time methods # Date and time methods
#------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
def normalizeTimeStamp(inputTimeStamp): def normalizeTimeStamp(inputTimeStamp):
""" """
@@ -91,7 +85,7 @@ def normalizeTimeStamp(inputTimeStamp):
# Epoch timestamp (integer or float) # Epoch timestamp (integer or float)
if isinstance(inputTimeStamp, (int, float)): if isinstance(inputTimeStamp, (int, float)):
try: try:
return datetime.datetime.fromtimestamp(inputTimeStamp) return datetime.datetime.fromtimestamp(inputTimeStamp)
except (OSError, OverflowError, ValueError): except (OSError, OverflowError, ValueError):
return None return None
@@ -125,6 +119,7 @@ def format_date_iso(date1: str) -> str:
dt = datetime.datetime.fromisoformat(date1) if isinstance(date1, str) else date1 dt = datetime.datetime.fromisoformat(date1) if isinstance(date1, str) else date1
return dt.isoformat() return dt.isoformat()
# ------------------------------------------------------------------------------------------- # -------------------------------------------------------------------------------------------
def format_event_date(date_str: str, event_type: str) -> str: def format_event_date(date_str: str, event_type: str) -> str:
"""Format event date with fallback rules.""" """Format event date with fallback rules."""
@@ -135,6 +130,7 @@ def format_event_date(date_str: str, event_type: str) -> str:
else: else:
return "<still connected>" return "<still connected>"
# ------------------------------------------------------------------------------------------- # -------------------------------------------------------------------------------------------
def ensure_datetime(dt: Union[str, datetime.datetime, None]) -> datetime.datetime: def ensure_datetime(dt: Union[str, datetime.datetime, None]) -> datetime.datetime:
if dt is None: if dt is None:
@@ -157,6 +153,7 @@ def parse_datetime(dt_str):
except ValueError: except ValueError:
return None return None
def format_date(date_str: str) -> str: def format_date(date_str: str) -> str:
try: try:
dt = parse_datetime(date_str) dt = parse_datetime(date_str)
@@ -168,13 +165,14 @@ def format_date(date_str: str) -> str:
except (ValueError, AttributeError, TypeError): except (ValueError, AttributeError, TypeError):
return "invalid" return "invalid"
def format_date_diff(date1, date2, tz_name): def format_date_diff(date1, date2, tz_name):
""" """
Return difference between two datetimes as 'Xd HH:MM'. Return difference between two datetimes as 'Xd HH:MM'.
Uses app timezone if datetime is naive. Uses app timezone if datetime is naive.
date2 can be None (uses now). date2 can be None (uses now).
""" """
# Get timezone from settings # Get timezone from settings
tz = pytz.timezone(tz_name) tz = pytz.timezone(tz_name)
def parse_dt(dt): def parse_dt(dt):
@@ -184,8 +182,8 @@ def format_date_diff(date1, date2, tz_name):
try: try:
dt_parsed = email.utils.parsedate_to_datetime(dt) dt_parsed = email.utils.parsedate_to_datetime(dt)
except (ValueError, TypeError): except (ValueError, TypeError):
# fallback: parse ISO string # fallback: parse ISO string
dt_parsed = datetime.datetime.fromisoformat(dt) dt_parsed = datetime.datetime.fromisoformat(dt)
# convert naive GMT/UTC to app timezone # convert naive GMT/UTC to app timezone
if dt_parsed.tzinfo is None: if dt_parsed.tzinfo is None:
dt_parsed = tz.localize(dt_parsed) dt_parsed = tz.localize(dt_parsed)
@@ -208,4 +206,4 @@ def format_date_diff(date1, date2, tz_name):
"hours": hours, "hours": hours,
"minutes": minutes, "minutes": minutes,
"total_minutes": total_minutes "total_minutes": total_minutes
} }

View File

@@ -1,6 +1,6 @@
import os import os
import json import json
from collections import namedtuple
import conf import conf
from logger import mylog from logger import mylog
from utils.crypto_utils import decrypt_data from utils.crypto_utils import decrypt_data
@@ -220,9 +220,7 @@ def get_plugins_configs(loadAll):
# Load all plugins if `loadAll` is True, the plugin is in the enabled list, # Load all plugins if `loadAll` is True, the plugin is in the enabled list,
# or no specific plugins are enabled (enabledPlugins is empty) # or no specific plugins are enabled (enabledPlugins is empty)
if ( if (
loadAll loadAll or plugJson["unique_prefix"] in enabledPlugins or enabledPlugins == []
or plugJson["unique_prefix"] in enabledPlugins
or enabledPlugins == []
): ):
# Load the contents of the config.json file as a JSON object and append it to pluginsList # Load the contents of the config.json file as a JSON object and append it to pluginsList
pluginsList.append(plugJson) pluginsList.append(plugJson)

View File

@@ -1,11 +1,4 @@
import sqlite3 import sqlite3
import os
import sys
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog, Logger from logger import mylog, Logger
from helper import get_setting_value from helper import get_setting_value
from models.device_instance import DeviceInstance from models.device_instance import DeviceInstance
@@ -15,7 +8,6 @@ from models.plugin_object_instance import PluginObjectInstance
Logger(get_setting_value("LOG_LEVEL")) Logger(get_setting_value("LOG_LEVEL"))
class Action: class Action:
"""Base class for all actions.""" """Base class for all actions."""

View File

@@ -1,10 +1,3 @@
import os
import sys
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from helper import get_setting_value from helper import get_setting_value
from logger import Logger from logger import Logger
from const import sql_generateGuid from const import sql_generateGuid
@@ -96,11 +89,11 @@ class AppEvent_obj:
"ObjectPrimaryID" TEXT, "ObjectPrimaryID" TEXT,
"ObjectSecondaryID" TEXT, "ObjectSecondaryID" TEXT,
"ObjectForeignKey" TEXT, "ObjectForeignKey" TEXT,
"ObjectIndex" TEXT, "ObjectIndex" TEXT,
"ObjectIsNew" BOOLEAN, "ObjectIsNew" BOOLEAN,
"ObjectIsArchived" BOOLEAN, "ObjectIsArchived" BOOLEAN,
"ObjectStatusColumn" TEXT, "ObjectStatusColumn" TEXT,
"ObjectStatus" TEXT, "ObjectStatus" TEXT,
"AppEventType" TEXT, "AppEventType" TEXT,
"Helper1" TEXT, "Helper1" TEXT,
"Helper2" TEXT, "Helper2" TEXT,
@@ -117,11 +110,11 @@ class AppEvent_obj:
CREATE TRIGGER IF NOT EXISTS "{trigger_name}" CREATE TRIGGER IF NOT EXISTS "{trigger_name}"
AFTER {event.upper()} ON "{table_name}" AFTER {event.upper()} ON "{table_name}"
WHEN NOT EXISTS ( WHEN NOT EXISTS (
SELECT 1 FROM AppEvents SELECT 1 FROM AppEvents
WHERE AppEventProcessed = 0 WHERE AppEventProcessed = 0
AND ObjectType = '{table_name}' AND ObjectType = '{table_name}'
AND ObjectGUID = {manage_prefix(config["fields"]["ObjectGUID"], event)} AND ObjectGUID = {manage_prefix(config["fields"]["ObjectGUID"], event)}
AND ObjectStatus = {manage_prefix(config["fields"]["ObjectStatus"], event)} AND ObjectStatus = {manage_prefix(config["fields"]["ObjectStatus"], event)}
AND AppEventType = '{event.lower()}' AND AppEventType = '{event.lower()}'
) )
BEGIN BEGIN
@@ -142,10 +135,10 @@ class AppEvent_obj:
"AppEventType" "AppEventType"
) )
VALUES ( VALUES (
{sql_generateGuid}, {sql_generateGuid},
DATETIME('now'), DATETIME('now'),
FALSE, FALSE,
'{table_name}', '{table_name}',
{manage_prefix(config["fields"]["ObjectGUID"], event)}, -- ObjectGUID {manage_prefix(config["fields"]["ObjectGUID"], event)}, -- ObjectGUID
{manage_prefix(config["fields"]["ObjectPrimaryID"], event)}, -- ObjectPrimaryID {manage_prefix(config["fields"]["ObjectPrimaryID"], event)}, -- ObjectPrimaryID
{manage_prefix(config["fields"]["ObjectSecondaryID"], event)}, -- ObjectSecondaryID {manage_prefix(config["fields"]["ObjectSecondaryID"], event)}, -- ObjectSecondaryID

View File

@@ -1,12 +1,5 @@
import re import re
import json import json
import os
import sys
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog, Logger from logger import mylog, Logger
from helper import get_setting_value from helper import get_setting_value

View File

@@ -1,22 +1,17 @@
import json import json
import os
import sys
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from const import fullConfFolder from const import fullConfFolder
from logger import mylog, Logger from logger import mylog, Logger
from helper import get_setting_value from helper import get_setting_value
# Make sure log level is initialized correctly
Logger(get_setting_value("LOG_LEVEL"))
from workflows.triggers import Trigger from workflows.triggers import Trigger
from workflows.conditions import ConditionGroup from workflows.conditions import ConditionGroup
from workflows.actions import DeleteObjectAction, RunPluginAction, UpdateFieldAction from workflows.actions import DeleteObjectAction, RunPluginAction, UpdateFieldAction
# Make sure log level is initialized correctly
Logger(get_setting_value("LOG_LEVEL"))
class WorkflowManager: class WorkflowManager:
def __init__(self, db): def __init__(self, db):
self.db = db self.db = db

View File

@@ -1,11 +1,4 @@
import json import json
import os
import sys
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog, Logger from logger import mylog, Logger
from helper import get_setting_value from helper import get_setting_value
from database import get_array_from_sql_rows from database import get_array_from_sql_rows
@@ -28,8 +21,7 @@ class Trigger:
self.event_type = triggerJson["event_type"] self.event_type = triggerJson["event_type"]
self.event = event # Store the triggered event context, if provided self.event = event # Store the triggered event context, if provided
self.triggered = ( self.triggered = (
self.object_type == event["ObjectType"] self.object_type == event["ObjectType"] and self.event_type == event["AppEventType"]
and self.event_type == event["AppEventType"]
) )
mylog( mylog(
@@ -53,9 +45,9 @@ class Trigger:
raise ValueError(m) raise ValueError(m)
query = f""" query = f"""
SELECT * FROM SELECT * FROM
{db_table} {db_table}
WHERE {refField} = '{event["ObjectGUID"]}' WHERE {refField} = '{event["ObjectGUID"]}'
""" """
mylog("debug", [query]) mylog("debug", [query])

View File

@@ -1 +1 @@
""" tests for NetAlertX """ """ tests for NetAlertX """

View File

@@ -7,9 +7,9 @@ import pytest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
@@ -26,7 +26,7 @@ def client():
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def test_mac(): def test_mac():
# Generate a unique MAC for each test run # Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3)) return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token): def auth_headers(token):

View File

@@ -1,17 +1,17 @@
import sys import sys
import pathlib # import pathlib
import sqlite3 # import sqlite3
import random import random
import string # import string
import uuid # import uuid
import os import os
import pytest import pytest
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
@@ -28,7 +28,7 @@ def client():
@pytest.fixture @pytest.fixture
def test_mac(): def test_mac():
# Generate a unique MAC for each test run # Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3)) return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token): def auth_headers(token):
@@ -38,7 +38,6 @@ def auth_headers(token):
def test_create_device(client, api_token, test_mac): def test_create_device(client, api_token, test_mac):
payload = { payload = {
"createNew": True, "createNew": True,
"devType": "Test Device",
"devOwner": "Unit Test", "devOwner": "Unit Test",
"devType": "Router", "devType": "Router",
"devVendor": "TestVendor", "devVendor": "TestVendor",
@@ -103,7 +102,7 @@ def test_copy_device(client, api_token, test_mac):
# Step 2: Generate a target MAC # Step 2: Generate a target MAC
target_mac = "AA:BB:CC:" + ":".join( target_mac = "AA:BB:CC:" + ":".join(
f"{random.randint(0,255):02X}" for _ in range(3) f"{random.randint(0, 255):02X}" for _ in range(3)
) )
# Step 3: Copy device # Step 3: Copy device

View File

@@ -1,32 +1,36 @@
import sys import sys
import pathlib # import pathlib
import sqlite3 # import sqlite3
import base64 import base64
import random import random
import string # import string
import uuid # import uuid
import os import os
import pytest import pytest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def api_token(): def api_token():
return get_setting_value("API_TOKEN") return get_setting_value("API_TOKEN")
@pytest.fixture @pytest.fixture
def client(): def client():
with app.test_client() as client: with app.test_client() as client:
yield client yield client
@pytest.fixture @pytest.fixture
def test_mac(): def test_mac():
# Generate a unique MAC for each test run # Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3)) return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token): def auth_headers(token):
return {"Authorization": f"Bearer {token}"} return {"Authorization": f"Bearer {token}"}
@@ -40,12 +44,13 @@ def create_dummy(client, api_token, test_mac):
"devType": "Router", "devType": "Router",
"devVendor": "TestVendor", "devVendor": "TestVendor",
} }
resp = client.post(f"/device/{test_mac}", json=payload, headers=auth_headers(api_token)) client.post(f"/device/{test_mac}", json=payload, headers=auth_headers(api_token))
def test_get_all_devices(client, api_token, test_mac): def test_get_all_devices(client, api_token, test_mac):
# Ensure there is at least one device # Ensure there is at least one device
create_dummy(client, api_token, test_mac) create_dummy(client, api_token, test_mac)
# Fetch all devices # Fetch all devices
resp = client.get("/devices", headers=auth_headers(api_token)) resp = client.get("/devices", headers=auth_headers(api_token))
assert resp.status_code == 200 assert resp.status_code == 200
@@ -59,7 +64,7 @@ def test_get_all_devices(client, api_token, test_mac):
def test_delete_devices_with_macs(client, api_token, test_mac): def test_delete_devices_with_macs(client, api_token, test_mac):
# First create device so it exists # First create device so it exists
create_dummy(client, api_token, test_mac) create_dummy(client, api_token, test_mac)
client.post(f"/device/{test_mac}", json={"createNew": True}, headers=auth_headers(api_token)) client.post(f"/device/{test_mac}", json={"createNew": True}, headers=auth_headers(api_token))
# Delete by MAC # Delete by MAC
@@ -67,6 +72,7 @@ def test_delete_devices_with_macs(client, api_token, test_mac):
assert resp.status_code == 200 assert resp.status_code == 200
assert resp.json.get("success") is True assert resp.json.get("success") is True
def test_delete_all_empty_macs(client, api_token): def test_delete_all_empty_macs(client, api_token):
resp = client.delete("/devices/empty-macs", headers=auth_headers(api_token)) resp = client.delete("/devices/empty-macs", headers=auth_headers(api_token))
assert resp.status_code == 200 assert resp.status_code == 200
@@ -79,6 +85,7 @@ def test_delete_unknown_devices(client, api_token):
assert resp.status_code == 200 assert resp.status_code == 200
assert resp.json.get("success") is True assert resp.json.get("success") is True
def test_export_devices_csv(client, api_token, test_mac): def test_export_devices_csv(client, api_token, test_mac):
# Create a device first # Create a device first
create_dummy(client, api_token, test_mac) create_dummy(client, api_token, test_mac)
@@ -92,6 +99,7 @@ def test_export_devices_csv(client, api_token, test_mac):
# CSV should contain test_mac # CSV should contain test_mac
assert test_mac in resp.data.decode() assert test_mac in resp.data.decode()
def test_export_devices_json(client, api_token, test_mac): def test_export_devices_json(client, api_token, test_mac):
# Create a device first # Create a device first
create_dummy(client, api_token, test_mac) create_dummy(client, api_token, test_mac)
@@ -101,7 +109,7 @@ def test_export_devices_json(client, api_token, test_mac):
assert resp.status_code == 200 assert resp.status_code == 200
assert resp.is_json assert resp.is_json
data = resp.get_json() data = resp.get_json()
assert any(dev.get("devMac") == test_mac for dev in data["data"]) assert any(dev.get("devMac") == test_mac for dev in data["data"])
def test_export_devices_invalid_format(client, api_token): def test_export_devices_invalid_format(client, api_token):
@@ -143,6 +151,7 @@ def test_export_import_cycle_base64(client, api_token, test_mac):
assert resp.json.get("inserted") >= 1 assert resp.json.get("inserted") >= 1
assert resp.json.get("skipped_lines") == [] assert resp.json.get("skipped_lines") == []
def test_devices_totals(client, api_token, test_mac): def test_devices_totals(client, api_token, test_mac):
# 1. Create a dummy device # 1. Create a dummy device
create_dummy(client, api_token, test_mac) create_dummy(client, api_token, test_mac)
@@ -189,9 +198,10 @@ def test_devices_by_status(client, api_token, test_mac):
assert fav_data is not None assert fav_data is not None
assert "&#9733" in fav_data["title"] assert "&#9733" in fav_data["title"]
def test_delete_test_devices(client, api_token, test_mac): def test_delete_test_devices(client, api_token, test_mac):
# Delete by MAC # Delete by MAC
resp = client.delete("/devices", json={"macs": ["AA:BB:CC:*"]}, headers=auth_headers(api_token)) resp = client.delete("/devices", json={"macs": ["AA:BB:CC:*"]}, headers=auth_headers(api_token))
assert resp.status_code == 200 assert resp.status_code == 200
assert resp.json.get("success") is True assert resp.json.get("success") is True

View File

@@ -1,37 +1,38 @@
import sys import sys
import pathlib
import sqlite3
import random
import string
import uuid
import os import os
import pytest import pytest
from datetime import datetime, timedelta import random
from datetime import timedelta
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowTZ from utils.datetime_utils import timeNowTZ # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def api_token(): def api_token():
return get_setting_value("API_TOKEN") return get_setting_value("API_TOKEN")
@pytest.fixture @pytest.fixture
def client(): def client():
with app.test_client() as client: with app.test_client() as client:
yield client yield client
@pytest.fixture @pytest.fixture
def test_mac(): def test_mac():
# Generate a unique MAC for each test run # Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3)) return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token): def auth_headers(token):
return {"Authorization": f"Bearer {token}"} return {"Authorization": f"Bearer {token}"}
def create_event(client, api_token, mac, event="UnitTest Event", days_old=None): def create_event(client, api_token, mac, event="UnitTest Event", days_old=None):
payload = {"ip": "0.0.0.0", "event_type": event} payload = {"ip": "0.0.0.0", "event_type": event}
@@ -43,10 +44,12 @@ def create_event(client, api_token, mac, event="UnitTest Event", days_old=None):
return client.post(f"/events/create/{mac}", json=payload, headers=auth_headers(api_token)) return client.post(f"/events/create/{mac}", json=payload, headers=auth_headers(api_token))
def list_events(client, api_token, mac=None): def list_events(client, api_token, mac=None):
url = "/events" if mac is None else f"/events?mac={mac}" url = "/events" if mac is None else f"/events?mac={mac}"
return client.get(url, headers=auth_headers(api_token)) return client.get(url, headers=auth_headers(api_token))
def test_create_event(client, api_token, test_mac): def test_create_event(client, api_token, test_mac):
# create event # create event
resp = create_event(client, api_token, test_mac) resp = create_event(client, api_token, test_mac)
@@ -82,6 +85,7 @@ def test_delete_events_for_mac(client, api_token, test_mac):
assert resp.status_code == 200 assert resp.status_code == 200
assert len(resp.json.get("events", [])) == 0 assert len(resp.json.get("events", [])) == 0
def test_get_events_totals(client, api_token): def test_get_events_totals(client, api_token):
# 1. Request totals with default period # 1. Request totals with default period
resp = client.get( resp = client.get(
@@ -108,7 +112,6 @@ def test_get_events_totals(client, api_token):
assert len(data_month) == 6 assert len(data_month) == 6
def test_delete_all_events(client, api_token, test_mac): def test_delete_all_events(client, api_token, test_mac):
# create two events # create two events
create_event(client, api_token, test_mac) create_event(client, api_token, test_mac)
@@ -146,5 +149,3 @@ def test_delete_events_dynamic_days(client, api_token, test_mac):
events = resp.get_json().get("events", []) events = resp.get_json().get("events", [])
mac_events = [ev for ev in events if ev.get("eve_MAC") == test_mac] mac_events = [ev for ev in events if ev.get("eve_MAC") == test_mac]
assert len(mac_events) == 1 assert len(mac_events) == 1

View File

@@ -1,31 +1,30 @@
import sys import sys
import pathlib
import sqlite3
import random import random
import string
import uuid
import pytest import pytest
from datetime import datetime, timedelta
INSTALL_PATH = "/app" INSTALL_PATH = "/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def api_token(): def api_token():
return get_setting_value("API_TOKEN") return get_setting_value("API_TOKEN")
@pytest.fixture @pytest.fixture
def client(): def client():
with app.test_client() as client: with app.test_client() as client:
yield client yield client
@pytest.fixture @pytest.fixture
def test_mac(): def test_mac():
# Generate a unique MAC for each test run # Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3)) return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token): def auth_headers(token):
return {"Authorization": f"Bearer {token}"} return {"Authorization": f"Bearer {token}"}
@@ -37,6 +36,7 @@ def test_graphql_debug_get(client):
assert resp.status_code == 200 assert resp.status_code == 200
assert resp.data.decode() == "NetAlertX GraphQL server running." assert resp.data.decode() == "NetAlertX GraphQL server running."
def test_graphql_post_unauthorized(client): def test_graphql_post_unauthorized(client):
"""POST /graphql without token should return 401""" """POST /graphql without token should return 401"""
query = {"query": "{ devices { devName devMac } }"} query = {"query": "{ devices { devName devMac } }"}
@@ -47,13 +47,14 @@ def test_graphql_post_unauthorized(client):
# --- DEVICES TESTS --- # --- DEVICES TESTS ---
def test_graphql_post_devices(client, api_token): def test_graphql_post_devices(client, api_token):
"""POST /graphql with a valid token should return device data""" """POST /graphql with a valid token should return device data"""
query = { query = {
"query": """ "query": """
{ {
devices { devices {
devices { devices {
devGUID devGUID
devGroup devGroup
devIsRandomMac devIsRandomMac
@@ -77,8 +78,8 @@ def test_graphql_post_devices(client, api_token):
assert isinstance(data["devices"]["devices"], list) assert isinstance(data["devices"]["devices"], list)
assert isinstance(data["devices"]["count"], int) assert isinstance(data["devices"]["count"], int)
# --- SETTINGS TESTS ---
# --- SETTINGS TESTS ---
def test_graphql_post_settings(client, api_token): def test_graphql_post_settings(client, api_token):
"""POST /graphql should return settings data""" """POST /graphql should return settings data"""
query = { query = {
@@ -97,8 +98,8 @@ def test_graphql_post_settings(client, api_token):
assert "settings" in data assert "settings" in data
assert isinstance(data["settings"]["settings"], list) assert isinstance(data["settings"]["settings"], list)
# --- LANGSTRINGS TESTS ---
# --- LANGSTRINGS TESTS ---
def test_graphql_post_langstrings_specific(client, api_token): def test_graphql_post_langstrings_specific(client, api_token):
"""Retrieve a specific langString in a given language""" """Retrieve a specific langString in a given language"""
query = { query = {
@@ -167,4 +168,4 @@ def test_graphql_post_langstrings_all_languages(client, api_token):
assert data["enStrings"]["count"] >= 1 assert data["enStrings"]["count"] >= 1
assert data["deStrings"]["count"] >= 1 assert data["deStrings"]["count"] >= 1
# Ensure langCode matches # Ensure langCode matches
assert all(e["langCode"] == "en_us" for e in data["enStrings"]["langStrings"]) assert all(e["langCode"] == "en_us" for e in data["enStrings"]["langStrings"])

View File

@@ -1,17 +1,13 @@
import sys import sys
import pathlib
import sqlite3
import random import random
import string
import uuid
import os import os
import pytest import pytest
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app") INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
@@ -28,7 +24,7 @@ def client():
@pytest.fixture @pytest.fixture
def test_mac(): def test_mac():
# Generate a unique MAC for each test run # Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3)) return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token): def auth_headers(token):
@@ -36,6 +32,6 @@ def auth_headers(token):
def test_delete_history(client, api_token): def test_delete_history(client, api_token):
resp = client.delete(f"/history", headers=auth_headers(api_token)) resp = client.delete("/history", headers=auth_headers(api_token))
assert resp.status_code == 200 assert resp.status_code == 200
assert resp.json.get("success") is True assert resp.json.get("success") is True

View File

@@ -5,8 +5,9 @@ import pytest
INSTALL_PATH = "/app" INSTALL_PATH = "/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
# ---------------------------- # ----------------------------
# Fixtures # Fixtures
@@ -15,14 +16,17 @@ from api_server.api_server_start import app
def api_token(): def api_token():
return get_setting_value("API_TOKEN") return get_setting_value("API_TOKEN")
@pytest.fixture @pytest.fixture
def client(): def client():
with app.test_client() as client: with app.test_client() as client:
yield client yield client
def auth_headers(token): def auth_headers(token):
return {"Authorization": f"Bearer {token}"} return {"Authorization": f"Bearer {token}"}
# ---------------------------- # ----------------------------
# Logs Endpoint Tests # Logs Endpoint Tests
# ---------------------------- # ----------------------------
@@ -31,16 +35,18 @@ def test_clean_log(client, api_token):
assert resp.status_code == 200 assert resp.status_code == 200
assert resp.json.get("success") is True assert resp.json.get("success") is True
def test_clean_log_not_allowed(client, api_token): def test_clean_log_not_allowed(client, api_token):
resp = client.delete("/logs?file=not_allowed.log", headers=auth_headers(api_token)) resp = client.delete("/logs?file=not_allowed.log", headers=auth_headers(api_token))
assert resp.status_code == 400 assert resp.status_code == 400
assert resp.json.get("success") is False assert resp.json.get("success") is False
# ---------------------------- # ----------------------------
# Execution Queue Endpoint Tests # Execution Queue Endpoint Tests
# ---------------------------- # ----------------------------
def test_add_to_execution_queue(client, api_token): def test_add_to_execution_queue(client, api_token):
action_name = f"test_action_{random.randint(0,9999)}" action_name = f"test_action_{random.randint(0, 9999)}"
resp = client.post( resp = client.post(
"/logs/add-to-execution-queue", "/logs/add-to-execution-queue",
json={"action": action_name}, json={"action": action_name},
@@ -50,6 +56,7 @@ def test_add_to_execution_queue(client, api_token):
assert resp.json.get("success") is True assert resp.json.get("success") is True
assert action_name in resp.json.get("message", "") assert action_name in resp.json.get("message", "")
def test_add_to_execution_queue_missing_action(client, api_token): def test_add_to_execution_queue_missing_action(client, api_token):
resp = client.post( resp = client.post(
"/logs/add-to-execution-queue", "/logs/add-to-execution-queue",

View File

@@ -1,11 +1,8 @@
# ----------------------------- # -----------------------------
# In-app notifications tests with cleanup # In-app notifications tests with cleanup
# ----------------------------- # -----------------------------
import json
import random import random
import string import string
import uuid
import pytest import pytest
import os import os
import sys import sys
@@ -14,26 +11,31 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from api_server.api_server_start import app from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
from messaging.in_app import NOTIFICATION_API_FILE # Import the path to notifications file from messaging.in_app import NOTIFICATION_API_FILE # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def api_token(): def api_token():
return get_setting_value("API_TOKEN") return get_setting_value("API_TOKEN")
@pytest.fixture @pytest.fixture
def client(): def client():
with app.test_client() as client: with app.test_client() as client:
yield client yield client
def auth_headers(token): def auth_headers(token):
return {"Authorization": f"Bearer {token}"} return {"Authorization": f"Bearer {token}"}
@pytest.fixture @pytest.fixture
def random_content(): def random_content():
return "Test Notification " + "".join(random.choices(string.ascii_letters + string.digits, k=6)) return "Test Notification " + "".join(random.choices(string.ascii_letters + string.digits, k=6))
@pytest.fixture @pytest.fixture
def notification_guid(client, api_token, random_content): def notification_guid(client, api_token, random_content):
# Write a notification and return its GUID # Write a notification and return its GUID
@@ -50,6 +52,7 @@ def notification_guid(client, api_token, random_content):
assert guid is not None assert guid is not None
return guid return guid
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def cleanup_notifications(): def cleanup_notifications():
# Runs before and after each test # Runs before and after each test
@@ -70,6 +73,7 @@ def cleanup_notifications():
with open(NOTIFICATION_API_FILE, "w") as f: with open(NOTIFICATION_API_FILE, "w") as f:
f.write(backup) f.write(backup)
# ----------------------------- # -----------------------------
def test_write_notification(client, api_token, random_content): def test_write_notification(client, api_token, random_content):
resp = client.post( resp = client.post(
@@ -80,6 +84,7 @@ def test_write_notification(client, api_token, random_content):
assert resp.status_code == 200 assert resp.status_code == 200
assert resp.json.get("success") is True assert resp.json.get("success") is True
def test_get_unread_notifications(client, api_token, random_content): def test_get_unread_notifications(client, api_token, random_content):
client.post("/messaging/in-app/write", json={"content": random_content}, headers=auth_headers(api_token)) client.post("/messaging/in-app/write", json={"content": random_content}, headers=auth_headers(api_token))
resp = client.get("/messaging/in-app/unread", headers=auth_headers(api_token)) resp = client.get("/messaging/in-app/unread", headers=auth_headers(api_token))
@@ -87,22 +92,26 @@ def test_get_unread_notifications(client, api_token, random_content):
notifications = resp.json notifications = resp.json
assert any(n["content"] == random_content for n in notifications) assert any(n["content"] == random_content for n in notifications)
def test_mark_all_notifications_read(client, api_token, random_content): def test_mark_all_notifications_read(client, api_token, random_content):
client.post("/messaging/in-app/write", json={"content": random_content}, headers=auth_headers(api_token)) client.post("/messaging/in-app/write", json={"content": random_content}, headers=auth_headers(api_token))
resp = client.post("/messaging/in-app/read/all", headers=auth_headers(api_token)) resp = client.post("/messaging/in-app/read/all", headers=auth_headers(api_token))
assert resp.status_code == 200 assert resp.status_code == 200
assert resp.json.get("success") is True assert resp.json.get("success") is True
def test_mark_single_notification_read(client, api_token, notification_guid): def test_mark_single_notification_read(client, api_token, notification_guid):
resp = client.post(f"/messaging/in-app/read/{notification_guid}", headers=auth_headers(api_token)) resp = client.post(f"/messaging/in-app/read/{notification_guid}", headers=auth_headers(api_token))
assert resp.status_code == 200 assert resp.status_code == 200
assert resp.json.get("success") is True assert resp.json.get("success") is True
def test_delete_single_notification(client, api_token, notification_guid): def test_delete_single_notification(client, api_token, notification_guid):
resp = client.delete(f"/messaging/in-app/delete/{notification_guid}", headers=auth_headers(api_token)) resp = client.delete(f"/messaging/in-app/delete/{notification_guid}", headers=auth_headers(api_token))
assert resp.status_code == 200 assert resp.status_code == 200
assert resp.json.get("success") is True assert resp.json.get("success") is True
def test_delete_all_notifications(client, api_token, random_content): def test_delete_all_notifications(client, api_token, random_content):
# Add a notification first # Add a notification first
client.post("/messaging/in-app/write", json={"content": random_content}, headers=auth_headers(api_token)) client.post("/messaging/in-app/write", json={"content": random_content}, headers=auth_headers(api_token))

View File

@@ -1,32 +1,31 @@
import sys import sys
import pathlib
import sqlite3
import base64
import random import random
import string
import uuid
import os import os
import pytest import pytest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def api_token(): def api_token():
return get_setting_value("API_TOKEN") return get_setting_value("API_TOKEN")
@pytest.fixture @pytest.fixture
def client(): def client():
with app.test_client() as client: with app.test_client() as client:
yield client yield client
@pytest.fixture @pytest.fixture
def test_mac(): def test_mac():
# Generate a unique MAC for each test run # Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3)) return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token): def auth_headers(token):
return {"Authorization": f"Bearer {token}"} return {"Authorization": f"Bearer {token}"}
@@ -40,7 +39,8 @@ def create_dummy(client, api_token, test_mac):
"devType": "Router", "devType": "Router",
"devVendor": "TestVendor", "devVendor": "TestVendor",
} }
resp = client.post(f"/device/{test_mac}", json=payload, headers=auth_headers(api_token)) client.post(f"/device/{test_mac}", json=payload, headers=auth_headers(api_token))
def test_wakeonlan_device(client, api_token, test_mac): def test_wakeonlan_device(client, api_token, test_mac):
# 1. Ensure at least one device exists # 1. Ensure at least one device exists
@@ -73,6 +73,7 @@ def test_wakeonlan_device(client, api_token, test_mac):
assert data.get("success") is True assert data.get("success") is True
assert "WOL packet sent" in data.get("message", "") assert "WOL packet sent" in data.get("message", "")
def test_speedtest_endpoint(client, api_token): def test_speedtest_endpoint(client, api_token):
# 1. Call the speedtest endpoint # 1. Call the speedtest endpoint
resp = client.get("/nettools/speedtest", headers=auth_headers(api_token)) resp = client.get("/nettools/speedtest", headers=auth_headers(api_token))
@@ -92,7 +93,8 @@ def test_speedtest_endpoint(client, api_token):
assert isinstance(data["output"], list) assert isinstance(data["output"], list)
# Optionally check that output lines are strings # Optionally check that output lines are strings
assert all(isinstance(line, str) for line in data["output"]) assert all(isinstance(line, str) for line in data["output"])
def test_traceroute_device(client, api_token, test_mac): def test_traceroute_device(client, api_token, test_mac):
# 1. Ensure at least one device exists # 1. Ensure at least one device exists
create_dummy(client, api_token, test_mac) create_dummy(client, api_token, test_mac)
@@ -127,6 +129,7 @@ def test_traceroute_device(client, api_token, test_mac):
assert "output" in data assert "output" in data
assert isinstance(data["output"], str) assert isinstance(data["output"], str)
@pytest.mark.parametrize("ip,expected_status", [ @pytest.mark.parametrize("ip,expected_status", [
("8.8.8.8", 200), ("8.8.8.8", 200),
("256.256.256.256", 400), # Invalid IP ("256.256.256.256", 400), # Invalid IP
@@ -147,6 +150,7 @@ def test_nslookup_endpoint(client, api_token, ip, expected_status):
assert data.get("success") is False assert data.get("success") is False
assert "error" in data assert "error" in data
@pytest.mark.parametrize("ip,mode,expected_status", [ @pytest.mark.parametrize("ip,mode,expected_status", [
("127.0.0.1", "fast", 200), ("127.0.0.1", "fast", 200),
pytest.param("127.0.0.1", "normal", 200, marks=pytest.mark.feature_complete), pytest.param("127.0.0.1", "normal", 200, marks=pytest.mark.feature_complete),
@@ -172,6 +176,7 @@ def test_nmap_endpoint(client, api_token, ip, mode, expected_status):
assert data.get("success") is False assert data.get("success") is False
assert "error" in data assert "error" in data
def test_nslookup_unauthorized(client): def test_nslookup_unauthorized(client):
# No auth headers # No auth headers
resp = client.post("/nettools/nslookup", json={"devLastIP": "8.8.8.8"}) resp = client.post("/nettools/nslookup", json={"devLastIP": "8.8.8.8"})
@@ -180,6 +185,7 @@ def test_nslookup_unauthorized(client):
assert data.get("success") is False assert data.get("success") is False
assert data.get("error") == "Forbidden" assert data.get("error") == "Forbidden"
def test_nmap_unauthorized(client): def test_nmap_unauthorized(client):
# No auth headers # No auth headers
resp = client.post("/nettools/nmap", json={"scan": "127.0.0.1", "mode": "fast"}) resp = client.post("/nettools/nmap", json={"scan": "127.0.0.1", "mode": "fast"})
@@ -201,4 +207,4 @@ def test_internet_info_endpoint(client, api_token):
# Handle errors, e.g., curl failure # Handle errors, e.g., curl failure
assert data.get("success") is False assert data.get("success") is False
assert "error" in data assert "error" in data
assert "details" in data assert "details" in data

View File

@@ -1,9 +1,5 @@
import sys import sys
import pathlib
import sqlite3
import random import random
import string
import uuid
import os import os
import pytest import pytest
from datetime import datetime, timedelta from datetime import datetime, timedelta
@@ -11,31 +7,35 @@ from datetime import datetime, timedelta
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowTZ, timeNowDB from utils.datetime_utils import timeNowTZ, timeNowDB # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def api_token(): def api_token():
return get_setting_value("API_TOKEN") return get_setting_value("API_TOKEN")
@pytest.fixture @pytest.fixture
def client(): def client():
with app.test_client() as client: with app.test_client() as client:
yield client yield client
@pytest.fixture @pytest.fixture
def test_mac(): def test_mac():
# Generate a unique MAC for each test run # Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3)) return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token): def auth_headers(token):
return {"Authorization": f"Bearer {token}"} return {"Authorization": f"Bearer {token}"}
def test_create_device(client, api_token, test_mac): def test_create_device(client, api_token, test_mac):
payload = { payload = {
"createNew": True, "createNew": True,
"devType": "Test Device",
"devOwner": "Unit Test", "devOwner": "Unit Test",
"devType": "Router", "devType": "Router",
"devVendor": "TestVendor", "devVendor": "TestVendor",
@@ -129,7 +129,7 @@ def test_device_session_events(client, api_token, test_mac):
# 2. Fetch session events with default type ('all') and period ('7 days') # 2. Fetch session events with default type ('all') and period ('7 days')
resp = client.get( resp = client.get(
f"/sessions/session-events?type=all&period=7 days", "/sessions/session-events?type=all&period=7 days",
headers=auth_headers(api_token) headers=auth_headers(api_token)
) )
assert resp.status_code == 200 assert resp.status_code == 200
@@ -159,6 +159,7 @@ def test_device_session_events(client, api_token, test_mac):
sessions = resp_sessions.json["data"] sessions = resp_sessions.json["data"]
assert isinstance(sessions, list) assert isinstance(sessions, list)
# ----------------------------- # -----------------------------
def test_delete_session(client, api_token, test_mac): def test_delete_session(client, api_token, test_mac):
# First create session # First create session
@@ -180,15 +181,12 @@ def test_delete_session(client, api_token, test_mac):
assert not any(ses["ses_MAC"] == test_mac for ses in sessions) assert not any(ses["ses_MAC"] == test_mac for ses in sessions)
def test_get_sessions_calendar(client, api_token, test_mac): def test_get_sessions_calendar(client, api_token, test_mac):
""" """
Test the /sessions/calendar endpoint. Test the /sessions/calendar endpoint.
Creates session and ensures the calendar output is correct. Creates session and ensures the calendar output is correct.
Cleans up test sessions after test. Cleans up test sessions after test.
""" """
# --- Setup: create two sessions for the test MAC --- # --- Setup: create two sessions for the test MAC ---
now = timeNowTZ() now = timeNowTZ()
start1 = (now - timedelta(days=2)).isoformat(timespec="seconds") start1 = (now - timedelta(days=2)).isoformat(timespec="seconds")
@@ -256,4 +254,4 @@ def test_get_sessions_calendar(client, api_token, test_mac):
assert "<still connected>" in ses["tooltip"], f"End is None but session not marked as still connected: {ses}" assert "<still connected>" in ses["tooltip"], f"End is None but session not marked as still connected: {ses}"
# --- Cleanup: delete all test sessions for this MAC --- # --- Cleanup: delete all test sessions for this MAC ---
client.delete(f"/sessions/delete?mac={test_mac}", headers=auth_headers(api_token)) client.delete(f"/sessions/delete?mac={test_mac}", headers=auth_headers(api_token))

View File

@@ -1,36 +1,36 @@
import sys import sys
import pathlib
import sqlite3
import random import random
import string
import uuid
import os import os
import pytest import pytest
from datetime import datetime, timedelta
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def api_token(): def api_token():
return get_setting_value("API_TOKEN") return get_setting_value("API_TOKEN")
@pytest.fixture @pytest.fixture
def client(): def client():
with app.test_client() as client: with app.test_client() as client:
yield client yield client
@pytest.fixture @pytest.fixture
def test_mac(): def test_mac():
# Generate a unique MAC for each test run # Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3)) return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token): def auth_headers(token):
return {"Authorization": f"Bearer {token}"} return {"Authorization": f"Bearer {token}"}
def test_get_setting_unauthorized(client): def test_get_setting_unauthorized(client):
resp = client.get("/settings/API_TOKEN") # no auth header resp = client.get("/settings/API_TOKEN") # no auth header
assert resp.status_code == 403 assert resp.status_code == 403

View File

@@ -6,16 +6,17 @@ Tests the fix for Issue #1210 - compound conditions with multiple AND/OR clauses
import sys import sys
import pytest import pytest
import os
from unittest.mock import MagicMock from unittest.mock import MagicMock
# Mock the logger module before importing SafeConditionBuilder # Mock the logger module before importing SafeConditionBuilder
sys.modules['logger'] = MagicMock() sys.modules['logger'] = MagicMock()
# Add parent directory to path for imports # Add parent directory to path for imports
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
from server.db.sql_safe_builder import SafeConditionBuilder from server.db.sql_safe_builder import SafeConditionBuilder # noqa: E402 [flake8 lint suppression]
@pytest.fixture @pytest.fixture
@@ -100,6 +101,7 @@ def test_multiple_or_clauses(builder):
assert 'Device2' in param_values assert 'Device2' in param_values
assert 'Device3' in param_values assert 'Device3' in param_values
def test_mixed_and_or_clauses(builder): def test_mixed_and_or_clauses(builder):
"""Test mixed AND/OR logical operators.""" """Test mixed AND/OR logical operators."""
condition = "AND devName = 'Device1' OR devName = 'Device2' AND devFavorite = '1'" condition = "AND devName = 'Device1' OR devName = 'Device2' AND devFavorite = '1'"

View File

@@ -137,7 +137,7 @@ def test_unicode_support(builder, unicode_str):
@pytest.mark.parametrize("case", [ @pytest.mark.parametrize("case", [
"", " ", "AND devName = ''", "AND devName = 'a'", "AND devName = '" + "x"*500 + "'" "", " ", "AND devName = ''", "AND devName = 'a'", "AND devName = '" + "x" * 500 + "'"
]) ])
def test_edge_cases(builder, case): def test_edge_cases(builder, case):
try: try:

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3 # !/usr/bin/env python3
""" """
Comprehensive SQL Injection Prevention Tests for NetAlertX Comprehensive SQL Injection Prevention Tests for NetAlertX
@@ -15,7 +15,7 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'server'))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'server', 'db')) sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'server', 'db'))
# Now import our module # Now import our module
from sql_safe_builder import SafeConditionBuilder from sql_safe_builder import SafeConditionBuilder # noqa: E402 [flake8 lint suppression]
@pytest.fixture @pytest.fixture
@@ -28,7 +28,7 @@ def test_sql_injection_attempt_single_quote(builder):
"""Test that single quote injection attempts are blocked.""" """Test that single quote injection attempts are blocked."""
malicious_input = "'; DROP TABLE users; --" malicious_input = "'; DROP TABLE users; --"
condition, params = builder.get_safe_condition_legacy(malicious_input) condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should return empty condition when invalid # Should return empty condition when invalid
assert condition == "" assert condition == ""
assert params == {} assert params == {}
@@ -38,7 +38,7 @@ def test_sql_injection_attempt_union(builder):
"""Test that UNION injection attempts are blocked.""" """Test that UNION injection attempts are blocked."""
malicious_input = "1' UNION SELECT * FROM passwords --" malicious_input = "1' UNION SELECT * FROM passwords --"
condition, params = builder.get_safe_condition_legacy(malicious_input) condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should return empty condition when invalid # Should return empty condition when invalid
assert condition == "" assert condition == ""
assert params == {} assert params == {}
@@ -48,7 +48,7 @@ def test_sql_injection_attempt_or_true(builder):
"""Test that OR 1=1 injection attempts are blocked.""" """Test that OR 1=1 injection attempts are blocked."""
malicious_input = "' OR '1'='1" malicious_input = "' OR '1'='1"
condition, params = builder.get_safe_condition_legacy(malicious_input) condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should return empty condition when invalid # Should return empty condition when invalid
assert condition == "" assert condition == ""
assert params == {} assert params == {}
@@ -58,7 +58,7 @@ def test_valid_simple_condition(builder):
"""Test that valid simple conditions are handled correctly.""" """Test that valid simple conditions are handled correctly."""
valid_input = "AND devName = 'Test Device'" valid_input = "AND devName = 'Test Device'"
condition, params = builder.get_safe_condition_legacy(valid_input) condition, params = builder.get_safe_condition_legacy(valid_input)
# Should create parameterized query # Should create parameterized query
assert "AND devName = :" in condition assert "AND devName = :" in condition
assert len(params) == 1 assert len(params) == 1
@@ -69,7 +69,7 @@ def test_empty_condition(builder):
"""Test that empty conditions are handled safely.""" """Test that empty conditions are handled safely."""
empty_input = "" empty_input = ""
condition, params = builder.get_safe_condition_legacy(empty_input) condition, params = builder.get_safe_condition_legacy(empty_input)
# Should return empty condition # Should return empty condition
assert condition == "" assert condition == ""
assert params == {} assert params == {}
@@ -79,7 +79,7 @@ def test_whitespace_only_condition(builder):
"""Test that whitespace-only conditions are handled safely.""" """Test that whitespace-only conditions are handled safely."""
whitespace_input = " \n\t " whitespace_input = " \n\t "
condition, params = builder.get_safe_condition_legacy(whitespace_input) condition, params = builder.get_safe_condition_legacy(whitespace_input)
# Should return empty condition # Should return empty condition
assert condition == "" assert condition == ""
assert params == {} assert params == {}
@@ -90,7 +90,7 @@ def test_multiple_conditions_valid(builder):
# Test with a single condition first (our current parser handles single conditions well) # Test with a single condition first (our current parser handles single conditions well)
valid_input = "AND devName = 'Device1'" valid_input = "AND devName = 'Device1'"
condition, params = builder.get_safe_condition_legacy(valid_input) condition, params = builder.get_safe_condition_legacy(valid_input)
# Should create parameterized query # Should create parameterized query
assert "devName = :" in condition assert "devName = :" in condition
assert len(params) == 1 assert len(params) == 1
@@ -101,7 +101,7 @@ def test_disallowed_column_name(builder):
"""Test that non-whitelisted column names are rejected.""" """Test that non-whitelisted column names are rejected."""
invalid_input = "AND malicious_column = 'value'" invalid_input = "AND malicious_column = 'value'"
condition, params = builder.get_safe_condition_legacy(invalid_input) condition, params = builder.get_safe_condition_legacy(invalid_input)
# Should return empty condition when column not in whitelist # Should return empty condition when column not in whitelist
assert condition == "" assert condition == ""
assert params == {} assert params == {}
@@ -111,7 +111,7 @@ def test_disallowed_operator(builder):
"""Test that non-whitelisted operators are rejected.""" """Test that non-whitelisted operators are rejected."""
invalid_input = "AND devName SOUNDS LIKE 'test'" invalid_input = "AND devName SOUNDS LIKE 'test'"
condition, params = builder.get_safe_condition_legacy(invalid_input) condition, params = builder.get_safe_condition_legacy(invalid_input)
# Should return empty condition when operator not allowed # Should return empty condition when operator not allowed
assert condition == "" assert condition == ""
assert params == {} assert params == {}
@@ -121,7 +121,7 @@ def test_nested_select_attempt(builder):
"""Test that nested SELECT attempts are blocked.""" """Test that nested SELECT attempts are blocked."""
malicious_input = "AND devName IN (SELECT password FROM users)" malicious_input = "AND devName IN (SELECT password FROM users)"
condition, params = builder.get_safe_condition_legacy(malicious_input) condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should return empty condition when nested SELECT detected # Should return empty condition when nested SELECT detected
assert condition == "" assert condition == ""
assert params == {} assert params == {}
@@ -131,7 +131,7 @@ def test_hex_encoding_attempt(builder):
"""Test that hex-encoded injection attempts are blocked.""" """Test that hex-encoded injection attempts are blocked."""
malicious_input = "AND 0x44524f50205441424c45" malicious_input = "AND 0x44524f50205441424c45"
condition, params = builder.get_safe_condition_legacy(malicious_input) condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should return empty condition when hex encoding detected # Should return empty condition when hex encoding detected
assert condition == "" assert condition == ""
assert params == {} assert params == {}
@@ -141,7 +141,7 @@ def test_comment_injection_attempt(builder):
"""Test that comment injection attempts are handled.""" """Test that comment injection attempts are handled."""
malicious_input = "AND devName = 'test' /* comment */ --" malicious_input = "AND devName = 'test' /* comment */ --"
condition, params = builder.get_safe_condition_legacy(malicious_input) condition, params = builder.get_safe_condition_legacy(malicious_input)
# Comments should be stripped and condition validated # Comments should be stripped and condition validated
if condition: if condition:
assert "/*" not in condition assert "/*" not in condition
@@ -152,7 +152,7 @@ def test_special_placeholder_replacement(builder):
"""Test that {s-quote} placeholder is safely replaced.""" """Test that {s-quote} placeholder is safely replaced."""
input_with_placeholder = "AND devName = {s-quote}Test{s-quote}" input_with_placeholder = "AND devName = {s-quote}Test{s-quote}"
condition, params = builder.get_safe_condition_legacy(input_with_placeholder) condition, params = builder.get_safe_condition_legacy(input_with_placeholder)
# Should handle placeholder safely # Should handle placeholder safely
if condition: if condition:
assert "{s-quote}" not in condition assert "{s-quote}" not in condition
@@ -163,7 +163,7 @@ def test_null_byte_injection(builder):
"""Test that null byte injection attempts are blocked.""" """Test that null byte injection attempts are blocked."""
malicious_input = "AND devName = 'test\x00' DROP TABLE --" malicious_input = "AND devName = 'test\x00' DROP TABLE --"
condition, params = builder.get_safe_condition_legacy(malicious_input) condition, params = builder.get_safe_condition_legacy(malicious_input)
# Null bytes should be sanitized # Null bytes should be sanitized
if condition: if condition:
assert "\x00" not in condition assert "\x00" not in condition
@@ -178,7 +178,7 @@ def test_build_condition_with_allowed_values(builder):
{"column": "devName", "operator": "LIKE", "value": "%test%"} {"column": "devName", "operator": "LIKE", "value": "%test%"}
] ]
condition, params = builder.build_condition(conditions, "AND") condition, params = builder.build_condition(conditions, "AND")
# Should create valid parameterized condition # Should create valid parameterized condition
assert "eve_EventType = :" in condition assert "eve_EventType = :" in condition
assert "devName LIKE :" in condition assert "devName LIKE :" in condition
@@ -191,7 +191,7 @@ def test_build_condition_with_invalid_column(builder):
{"column": "invalid_column", "operator": "=", "value": "test"} {"column": "invalid_column", "operator": "=", "value": "test"}
] ]
condition, params = builder.build_condition(conditions) condition, params = builder.build_condition(conditions)
# Should return empty when invalid column # Should return empty when invalid column
assert condition == "" assert condition == ""
assert params == {} assert params == {}
@@ -204,7 +204,7 @@ def test_case_variations_injection(builder):
"oR 1=1", "oR 1=1",
"UnIoN SeLeCt * FrOm users" "UnIoN SeLeCt * FrOm users"
] ]
for malicious_input in malicious_inputs: for malicious_input in malicious_inputs:
condition, params = builder.get_safe_condition_legacy(malicious_input) condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should handle case variations safely # Should handle case variations safely
@@ -217,7 +217,7 @@ def test_time_based_injection_attempt(builder):
"""Test that time-based injection attempts are blocked.""" """Test that time-based injection attempts are blocked."""
malicious_input = "AND IF(1=1, SLEEP(5), 0)" malicious_input = "AND IF(1=1, SLEEP(5), 0)"
condition, params = builder.get_safe_condition_legacy(malicious_input) condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should return empty condition when SQL functions detected # Should return empty condition when SQL functions detected
assert condition == "" assert condition == ""
assert params == {} assert params == {}
@@ -227,7 +227,7 @@ def test_stacked_queries_attempt(builder):
"""Test that stacked query attempts are blocked.""" """Test that stacked query attempts are blocked."""
malicious_input = "'; INSERT INTO admin VALUES ('hacker', 'password'); --" malicious_input = "'; INSERT INTO admin VALUES ('hacker', 'password'); --"
condition, params = builder.get_safe_condition_legacy(malicious_input) condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should return empty condition when semicolon detected # Should return empty condition when semicolon detected
assert condition == "" assert condition == ""
assert params == {} assert params == {}

View File

@@ -13,16 +13,15 @@ import unittest
import sqlite3 import sqlite3
import tempfile import tempfile
import os import os
from unittest.mock import Mock, patch, MagicMock from unittest.mock import Mock, patch
# Add the server directory to the path for imports # Add the server directory to the path for imports
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app') INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/server"]) sys.path.extend([f"{INSTALL_PATH}/server"])
sys.path.append('/home/dell/coding/bash/10x-agentic-setup/netalertx-sql-fix/server') sys.path.append('/home/dell/coding/bash/10x-agentic-setup/netalertx-sql-fix/server')
from db.sql_safe_builder import SafeConditionBuilder, create_safe_condition_builder from db.sql_safe_builder import SafeConditionBuilder # noqa: E402 [flake8 lint suppression]
from database import DB from messaging.reporting import get_notifications # noqa: E402 [flake8 lint suppression]
from messaging.reporting import get_notifications
class TestSafeConditionBuilder(unittest.TestCase): class TestSafeConditionBuilder(unittest.TestCase):
@@ -83,7 +82,7 @@ class TestSafeConditionBuilder(unittest.TestCase):
def test_build_simple_condition_valid(self): def test_build_simple_condition_valid(self):
"""Test building valid simple conditions.""" """Test building valid simple conditions."""
sql, params = self.builder._build_simple_condition('AND', 'devName', '=', 'TestDevice') sql, params = self.builder._build_simple_condition('AND', 'devName', '=', 'TestDevice')
self.assertIn('AND devName = :param_', sql) self.assertIn('AND devName = :param_', sql)
self.assertEqual(len(params), 1) self.assertEqual(len(params), 1)
self.assertIn('TestDevice', params.values()) self.assertIn('TestDevice', params.values())
@@ -92,20 +91,20 @@ class TestSafeConditionBuilder(unittest.TestCase):
"""Test that invalid column names are rejected.""" """Test that invalid column names are rejected."""
with self.assertRaises(ValueError) as context: with self.assertRaises(ValueError) as context:
self.builder._build_simple_condition('AND', 'invalid_column', '=', 'value') self.builder._build_simple_condition('AND', 'invalid_column', '=', 'value')
self.assertIn('Invalid column name', str(context.exception)) self.assertIn('Invalid column name', str(context.exception))
def test_build_simple_condition_invalid_operator(self): def test_build_simple_condition_invalid_operator(self):
"""Test that invalid operators are rejected.""" """Test that invalid operators are rejected."""
with self.assertRaises(ValueError) as context: with self.assertRaises(ValueError) as context:
self.builder._build_simple_condition('AND', 'devName', 'UNION', 'value') self.builder._build_simple_condition('AND', 'devName', 'UNION', 'value')
self.assertIn('Invalid operator', str(context.exception)) self.assertIn('Invalid operator', str(context.exception))
def test_build_in_condition_valid(self): def test_build_in_condition_valid(self):
"""Test building valid IN conditions.""" """Test building valid IN conditions."""
sql, params = self.builder._build_in_condition('AND', 'eve_EventType', 'IN', "'Connected', 'Disconnected'") sql, params = self.builder._build_in_condition('AND', 'eve_EventType', 'IN', "'Connected', 'Disconnected'")
self.assertIn('AND eve_EventType IN', sql) self.assertIn('AND eve_EventType IN', sql)
self.assertEqual(len(params), 2) self.assertEqual(len(params), 2)
self.assertIn('Connected', params.values()) self.assertIn('Connected', params.values())
@@ -114,7 +113,7 @@ class TestSafeConditionBuilder(unittest.TestCase):
def test_build_null_condition(self): def test_build_null_condition(self):
"""Test building NULL check conditions.""" """Test building NULL check conditions."""
sql, params = self.builder._build_null_condition('AND', 'devComments', 'IS NULL') sql, params = self.builder._build_null_condition('AND', 'devComments', 'IS NULL')
self.assertEqual(sql, 'AND devComments IS NULL') self.assertEqual(sql, 'AND devComments IS NULL')
self.assertEqual(len(params), 0) self.assertEqual(len(params), 0)
@@ -154,7 +153,7 @@ class TestSafeConditionBuilder(unittest.TestCase):
def test_device_name_filter(self): def test_device_name_filter(self):
"""Test the device name filter helper method.""" """Test the device name filter helper method."""
sql, params = self.builder.build_device_name_filter("TestDevice") sql, params = self.builder.build_device_name_filter("TestDevice")
self.assertIn('AND devName = :device_name_', sql) self.assertIn('AND devName = :device_name_', sql)
self.assertIn('TestDevice', params.values()) self.assertIn('TestDevice', params.values())
@@ -162,14 +161,13 @@ class TestSafeConditionBuilder(unittest.TestCase):
"""Test the event type filter helper method.""" """Test the event type filter helper method."""
event_types = ['Connected', 'Disconnected'] event_types = ['Connected', 'Disconnected']
sql, params = self.builder.build_event_type_filter(event_types) sql, params = self.builder.build_event_type_filter(event_types)
self.assertIn('AND eve_EventType IN', sql) self.assertIn('AND eve_EventType IN', sql)
self.assertEqual(len(params), 2) self.assertEqual(len(params), 2)
self.assertIn('Connected', params.values()) self.assertIn('Connected', params.values())
self.assertIn('Disconnected', params.values()) self.assertIn('Disconnected', params.values())
class TestDatabaseParameterSupport(unittest.TestCase): class TestDatabaseParameterSupport(unittest.TestCase):
"""Test that database layer supports parameterized queries.""" """Test that database layer supports parameterized queries."""
@@ -177,7 +175,7 @@ class TestDatabaseParameterSupport(unittest.TestCase):
"""Set up test database.""" """Set up test database."""
self.temp_db = tempfile.NamedTemporaryFile(delete=False, suffix='.db') self.temp_db = tempfile.NamedTemporaryFile(delete=False, suffix='.db')
self.temp_db.close() self.temp_db.close()
# Create test database # Create test database
self.conn = sqlite3.connect(self.temp_db.name) self.conn = sqlite3.connect(self.temp_db.name)
self.conn.execute('''CREATE TABLE test_table ( self.conn.execute('''CREATE TABLE test_table (
@@ -197,23 +195,23 @@ class TestDatabaseParameterSupport(unittest.TestCase):
def test_parameterized_query_execution(self): def test_parameterized_query_execution(self):
"""Test that parameterized queries work correctly.""" """Test that parameterized queries work correctly."""
cursor = self.conn.cursor() cursor = self.conn.cursor()
# Test named parameters # Test named parameters
cursor.execute("SELECT * FROM test_table WHERE name = :name", {'name': 'test1'}) cursor.execute("SELECT * FROM test_table WHERE name = :name", {'name': 'test1'})
results = cursor.fetchall() results = cursor.fetchall()
self.assertEqual(len(results), 1) self.assertEqual(len(results), 1)
self.assertEqual(results[0][1], 'test1') self.assertEqual(results[0][1], 'test1')
def test_parameterized_query_prevents_injection(self): def test_parameterized_query_prevents_injection(self):
"""Test that parameterized queries prevent SQL injection.""" """Test that parameterized queries prevent SQL injection."""
cursor = self.conn.cursor() cursor = self.conn.cursor()
# This should not cause SQL injection # This should not cause SQL injection
malicious_input = "'; DROP TABLE test_table; --" malicious_input = "'; DROP TABLE test_table; --"
cursor.execute("SELECT * FROM test_table WHERE name = :name", {'name': malicious_input}) cursor.execute("SELECT * FROM test_table WHERE name = :name", {'name': malicious_input})
results = cursor.fetchall() # results = cursor.fetchall()
# The table should still exist and be queryable # The table should still exist and be queryable
cursor.execute("SELECT COUNT(*) FROM test_table") cursor.execute("SELECT COUNT(*) FROM test_table")
count = cursor.fetchone()[0] count = cursor.fetchone()[0]
@@ -228,7 +226,7 @@ class TestReportingSecurityIntegration(unittest.TestCase):
self.mock_db = Mock() self.mock_db = Mock()
self.mock_db.sql = Mock() self.mock_db.sql = Mock()
self.mock_db.get_table_as_json = Mock() self.mock_db.get_table_as_json = Mock()
# Mock successful JSON response # Mock successful JSON response
mock_json_obj = Mock() mock_json_obj = Mock()
mock_json_obj.columnNames = ['MAC', 'Datetime', 'IP', 'Event Type', 'Device name', 'Comments'] mock_json_obj.columnNames = ['MAC', 'Datetime', 'IP', 'Event Type', 'Device name', 'Comments']
@@ -245,7 +243,7 @@ class TestReportingSecurityIntegration(unittest.TestCase):
}.get(key, '') }.get(key, '')
# Call the function # Call the function
result = get_notifications(self.mock_db) get_notifications(self.mock_db)
# Verify that get_table_as_json was called with parameters # Verify that get_table_as_json was called with parameters
self.mock_db.get_table_as_json.assert_called() self.mock_db.get_table_as_json.assert_called()
@@ -265,7 +263,6 @@ class TestReportingSecurityIntegration(unittest.TestCase):
# Ensure the parameter dict has the correct value (using actual param name) # Ensure the parameter dict has the correct value (using actual param name)
self.assertEqual(list(params.values())[0], "TestDevice") self.assertEqual(list(params.values())[0], "TestDevice")
@patch('messaging.reporting.get_setting_value') @patch('messaging.reporting.get_setting_value')
def test_events_section_security(self, mock_get_setting): def test_events_section_security(self, mock_get_setting):
"""Test that events section uses safe SQL building.""" """Test that events section uses safe SQL building."""
@@ -276,7 +273,7 @@ class TestReportingSecurityIntegration(unittest.TestCase):
}.get(key, '') }.get(key, '')
# Call the function # Call the function
result = get_notifications(self.mock_db) get_notifications(self.mock_db)
# Verify that get_table_as_json was called with parameters # Verify that get_table_as_json was called with parameters
self.mock_db.get_table_as_json.assert_called() self.mock_db.get_table_as_json.assert_called()
@@ -291,7 +288,7 @@ class TestReportingSecurityIntegration(unittest.TestCase):
}.get(key, '') }.get(key, '')
# Call the function - should not raise an exception # Call the function - should not raise an exception
result = get_notifications(self.mock_db) get_notifications(self.mock_db)
# Should still call get_table_as_json (with safe fallback query) # Should still call get_table_as_json (with safe fallback query)
self.mock_db.get_table_as_json.assert_called() self.mock_db.get_table_as_json.assert_called()
@@ -306,7 +303,7 @@ class TestReportingSecurityIntegration(unittest.TestCase):
}.get(key, '') }.get(key, '')
# Call the function # Call the function
result = get_notifications(self.mock_db) get_notifications(self.mock_db)
# Should call get_table_as_json # Should call get_table_as_json
self.mock_db.get_table_as_json.assert_called() self.mock_db.get_table_as_json.assert_called()
@@ -322,12 +319,12 @@ class TestSecurityBenchmarks(unittest.TestCase):
def test_performance_simple_condition(self): def test_performance_simple_condition(self):
"""Test performance of simple condition building.""" """Test performance of simple condition building."""
import time import time
start_time = time.time() start_time = time.time()
for _ in range(1000): for _ in range(1000):
sql, params = self.builder.build_safe_condition("AND devName = 'TestDevice'") sql, params = self.builder.build_safe_condition("AND devName = 'TestDevice'")
end_time = time.time() end_time = time.time()
execution_time = end_time - start_time execution_time = end_time - start_time
self.assertLess(execution_time, 1.0, "Simple condition building should be fast") self.assertLess(execution_time, 1.0, "Simple condition building should be fast")
@@ -339,7 +336,7 @@ class TestSecurityBenchmarks(unittest.TestCase):
self.skipTest("psutil not available") self.skipTest("psutil not available")
return return
import os import os
process = psutil.Process(os.getpid()) process = psutil.Process(os.getpid())
initial_memory = process.memory_info().rss initial_memory = process.memory_info().rss
@@ -350,7 +347,7 @@ class TestSecurityBenchmarks(unittest.TestCase):
final_memory = process.memory_info().rss final_memory = process.memory_info().rss
memory_increase = final_memory - initial_memory memory_increase = final_memory - initial_memory
# Memory increase should be reasonable (less than 10MB) # Memory increase should be reasonable (less than 10MB)
self.assertLess(memory_increase, 10 * 1024 * 1024, "Memory usage should be reasonable") self.assertLess(memory_increase, 10 * 1024 * 1024, "Memory usage should be reasonable")
@@ -376,4 +373,4 @@ class TestSecurityBenchmarks(unittest.TestCase):
if __name__ == '__main__': if __name__ == '__main__':
# Run the test suite # Run the test suite
unittest.main(verbosity=2) unittest.main(verbosity=2)

Some files were not shown because too many files have changed in this diff Show More