BE: linting fixes

Signed-off-by: jokob-sk <jokob.sk@gmail.com>
This commit is contained in:
jokob-sk
2025-11-22 13:14:06 +11:00
parent f0abd500d9
commit 5c14b34a8b
104 changed files with 2163 additions and 2199 deletions

View File

@@ -1,6 +1,6 @@
import json
import os
import sys
def merge_translations(main_file, other_files):
# Load main file
@@ -30,10 +30,14 @@ def merge_translations(main_file, other_files):
json.dump(data, f, indent=4, ensure_ascii=False)
f.truncate()
if __name__ == "__main__":
current_path = os.path.dirname(os.path.abspath(__file__))
# language codes can be found here: http://www.lingoes.net/en/translator/langcode.htm
# "en_us.json" has to be first!
json_files = [ "en_us.json", "ar_ar.json", "ca_ca.json", "cs_cz.json", "de_de.json", "es_es.json", "fa_fa.json", "fr_fr.json", "it_it.json", "ja_jp.json", "nb_no.json", "pl_pl.json", "pt_br.json", "pt_pt.json", "ru_ru.json", "sv_sv.json", "tr_tr.json", "uk_ua.json", "zh_cn.json"]
# "en_us.json" has to be first!
json_files = ["en_us.json", "ar_ar.json", "ca_ca.json", "cs_cz.json", "de_de.json",
"es_es.json", "fa_fa.json", "fr_fr.json", "it_it.json", "ja_jp.json",
"nb_no.json", "pl_pl.json", "pt_br.json", "pt_pt.json", "ru_ru.json",
"sv_sv.json", "tr_tr.json", "uk_ua.json", "zh_cn.json"]
file_paths = [os.path.join(current_path, file) for file in json_files]
merge_translations(file_paths[0], file_paths[1:])

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -8,12 +8,12 @@ from pytz import timezone
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from const import logPath
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath # noqa: E402, E261 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402, E261 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402, E261 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402, E261 [flake8 lint suppression]
import conf
import conf # noqa: E402, E261 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -32,7 +32,6 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] In script'])
@@ -78,6 +77,7 @@ def main():
return 0
# retrieve data
def get_device_data(some_setting):
@@ -116,5 +116,6 @@ def get_device_data(some_setting):
# Return the data to be detected by the main application
return device_data
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# Just a testing library plugin for development purposes
import os
import sys
@@ -11,10 +11,10 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# NetAlertX modules
from const import logPath
from plugin_helper import Plugin_Objects
from logger import mylog
from helper import get_setting_value
from const import logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
pluginName = 'TESTONLY'
@@ -28,10 +28,7 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
md5_hash = hashlib.md5()
# globals
def main():
# START
mylog('verbose', [f'[{pluginName}] In script'])
@@ -43,7 +40,6 @@ def main():
# result = cleanDeviceName(str, True)
regexes = get_setting_value('NEWDEV_NAME_CLEANUP_REGEX')
print(regexes)
subnets = get_setting_value('SCAN_SUBNETS')
@@ -57,16 +53,12 @@ def main():
mylog('trace', ["[cleanDeviceName] name after regex : " + str])
mylog('debug', ["[cleanDeviceName] output: " + str])
# SPACE FOR TESTING 🔼
# END
mylog('verbose', [f'[{pluginName}] result "{str}"'])
# -------------INIT---------------------
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import json
import subprocess
@@ -9,15 +9,15 @@ import sys
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from utils.datetime_utils import timeNowDB
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))
@@ -35,7 +35,7 @@ def main():
mylog("verbose", [f"[{pluginName}](publisher) In script"])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog(
"none",
[
@@ -80,8 +80,7 @@ def main():
# -------------------------------------------------------------------------------
def check_config():
if get_setting_value("APPRISE_HOST") == "" or (
get_setting_value("APPRISE_URL") == ""
and get_setting_value("APPRISE_TAG") == ""
get_setting_value("APPRISE_URL") == "" and get_setting_value("APPRISE_TAG") == ""
):
return False
else:

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
import re
@@ -16,15 +16,15 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# NetAlertX modules
import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import get_setting_value, hide_email
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, hide_email # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -38,13 +38,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return
@@ -72,7 +71,6 @@ def main():
# mylog('verbose', [f'[{pluginName}] SMTP_REPORT_TO: ', get_setting_value("SMTP_REPORT_TO")])
# mylog('verbose', [f'[{pluginName}] SMTP_REPORT_FROM: ', get_setting_value("SMTP_REPORT_FROM")])
# Process the new notifications (see the Notifications DB table for structure or check the /php/server/query_json.php?file=table_notifications.json endpoint)
for notification in new_notifications:
@@ -93,8 +91,9 @@ def main():
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
def check_config ():
# -------------------------------------------------------------------------------
def check_config():
server = get_setting_value('SMTP_SERVER')
report_to = get_setting_value("SMTP_REPORT_TO")
@@ -106,12 +105,19 @@ def check_config ():
else:
return True
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def send(pHTML, pText):
mylog('debug', [f'[{pluginName}] SMTP_REPORT_TO: {hide_email(str(get_setting_value("SMTP_REPORT_TO")))} SMTP_USER: {hide_email(str(get_setting_value("SMTP_USER")))}'])
subject, from_email, to_email, message_html, message_text = sanitize_email_content(str(get_setting_value("SMTP_SUBJECT")), get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), pHTML, pText)
subject, from_email, to_email, message_html, message_text = sanitize_email_content(
str(get_setting_value("SMTP_SUBJECT")),
get_setting_value("SMTP_REPORT_FROM"),
get_setting_value("SMTP_REPORT_TO"),
pHTML,
pText
)
emails = []
@@ -134,8 +140,8 @@ def send(pHTML, pText):
msg['To'] = mail_addr
msg['Date'] = formatdate(localtime=True)
msg.attach (MIMEText (message_text, 'plain'))
msg.attach (MIMEText (message_html, 'html'))
msg.attach(MIMEText(message_text, 'plain'))
msg.attach(MIMEText(message_html, 'html'))
# Set a timeout for the SMTP connection (in seconds)
smtp_timeout = 30
@@ -144,12 +150,12 @@ def send(pHTML, pText):
if get_setting_value("LOG_LEVEL") == 'debug':
send_email(msg,smtp_timeout)
send_email(msg, smtp_timeout)
else:
try:
send_email(msg,smtp_timeout)
send_email(msg, smtp_timeout)
except smtplib.SMTPAuthenticationError as e:
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPAuthenticationError)'])
@@ -166,8 +172,9 @@ def send(pHTML, pText):
mylog('none', [' ERROR: Are you sure you need SMTP_FORCE_SSL enabled? Check your SMTP provider docs.'])
mylog('none', [' ERROR: ', str(e)])
# ----------------------------------------------------------------------------------
def send_email(msg,smtp_timeout):
def send_email(msg, smtp_timeout):
# Send mail
if get_setting_value('SMTP_FORCE_SSL'):
mylog('debug', ['SMTP_FORCE_SSL == True so using .SMTP_SSL()'])
@@ -182,10 +189,10 @@ def send_email(msg,smtp_timeout):
mylog('debug', ['SMTP_FORCE_SSL == False so using .SMTP()'])
if get_setting_value("SMTP_PORT") == 0:
mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER)'])
smtp_connection = smtplib.SMTP (get_setting_value('SMTP_SERVER'))
smtp_connection = smtplib.SMTP(get_setting_value('SMTP_SERVER'))
else:
mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER, SMTP_PORT)'])
smtp_connection = smtplib.SMTP (get_setting_value('SMTP_SERVER'), get_setting_value('SMTP_PORT'))
smtp_connection = smtplib.SMTP(get_setting_value('SMTP_SERVER'), get_setting_value('SMTP_PORT'))
mylog('debug', ['Setting SMTP debug level'])
@@ -193,7 +200,7 @@ def send_email(msg,smtp_timeout):
if get_setting_value('LOG_LEVEL') == 'debug':
smtp_connection.set_debuglevel(1)
mylog('debug', [ 'Sending .ehlo()'])
mylog('debug', ['Sending .ehlo()'])
smtp_connection.ehlo()
if not get_setting_value('SMTP_SKIP_TLS'):
@@ -203,12 +210,13 @@ def send_email(msg,smtp_timeout):
smtp_connection.ehlo()
if not get_setting_value('SMTP_SKIP_LOGIN'):
mylog('debug', ['SMTP_SKIP_LOGIN == False so sending .login()'])
smtp_connection.login (get_setting_value('SMTP_USER'), get_setting_value('SMTP_PASS'))
smtp_connection.login(get_setting_value('SMTP_USER'), get_setting_value('SMTP_PASS'))
mylog('debug', ['Sending .sendmail()'])
smtp_connection.sendmail (get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), msg.as_string())
smtp_connection.sendmail(get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), msg.as_string())
smtp_connection.quit()
# ----------------------------------------------------------------------------------
def sanitize_email_content(subject, from_email, to_email, message_html, message_text):
# Validate and sanitize subject
@@ -229,6 +237,7 @@ def sanitize_email_content(subject, from_email, to_email, message_html, message_
return subject, from_email, to_email, message_html, message_text
# ----------------------------------------------------------------------------------
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import json
import os
@@ -18,15 +18,14 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# NetAlertX modules
import conf
from const import confFileName, logPath
from utils.plugin_utils import getPluginObject
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from utils.plugin_utils import getPluginObject # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, bytes_to_string, \
sanitize_string, normalize_string
from utils.datetime_utils import timeNowDB
from database import DB, get_device_stats
sanitize_string, normalize_string # noqa: E402 [flake8 lint suppression]
from database import DB, get_device_stats # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
@@ -303,6 +302,7 @@ def publish_mqtt(mqtt_client, topic, message):
time.sleep(0.1)
return True
# ------------------------------------------------------------------------------
# Create a generic device for overal stats
def create_generic_device(mqtt_client, deviceId, deviceName):
@@ -434,7 +434,6 @@ def mqtt_start(db):
if not mqtt_connected_to_broker:
mqtt_client = mqtt_create_client()
deviceName = get_setting_value('MQTT_DEVICE_NAME')
deviceId = get_setting_value('MQTT_DEVICE_ID')
@@ -449,7 +448,9 @@ def mqtt_start(db):
row = get_device_stats(db)
# Publish (wrap into {} and remove last ',' from above)
publish_mqtt(mqtt_client, f"{topic_root}/sensor/{deviceId}/state",
publish_mqtt(
mqtt_client,
f"{topic_root}/sensor/{deviceId}/state",
{
"online": row[0],
"down": row[1],
@@ -466,11 +467,11 @@ def mqtt_start(db):
# Specific devices processing
# Get all devices
devices = db.read(get_setting_value('MQTT_DEVICES_SQL').replace('{s-quote}',"'"))
devices = db.read(get_setting_value('MQTT_DEVICES_SQL').replace('{s-quote}', "'"))
sec_delay = len(devices) * int(get_setting_value('MQTT_DELAY_SEC'))*5
sec_delay = len(devices) * int(get_setting_value('MQTT_DELAY_SEC')) * 5
mylog('verbose', [f"[{pluginName}] Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60, 1), 'min)'])
mylog('verbose', [f"[{pluginName}] Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay / 60, 1), 'min)'])
for device in devices:
@@ -515,7 +516,9 @@ def mqtt_start(db):
# create and update is_present sensor
sensorConfig = create_sensor(mqtt_client, deviceId, devDisplayName, 'binary_sensor', 'is_present', 'wifi', device["devMac"])
publish_mqtt(mqtt_client, sensorConfig.state_topic,
publish_mqtt(
mqtt_client,
sensorConfig.state_topic,
{
"is_present": to_binary_sensor(str(device["devPresentLastScan"]))
}

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python
# !/usr/bin/env python
import json
import os
@@ -11,15 +11,15 @@ from base64 import b64encode
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects, handleEmpty
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import get_setting_value
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -33,13 +33,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return
@@ -77,15 +76,15 @@ def main():
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def check_config():
if get_setting_value('NTFY_HOST') == '' or get_setting_value('NTFY_TOPIC') == '':
return False
else:
return True
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def send(html, text):
response_text = ''
@@ -100,7 +99,7 @@ def send(html, text):
# prepare request headers
headers = {
"Title": "NetAlertX Notification",
"Actions": "view, Open Dashboard, "+ get_setting_value('REPORT_DASHBOARD_URL'),
"Actions": "view, Open Dashboard, " + get_setting_value('REPORT_DASHBOARD_URL'),
"Priority": get_setting_value('NTFY_PRIORITY'),
"Tags": "warning"
}
@@ -116,11 +115,13 @@ def send(html, text):
# call NTFY service
try:
response = requests.post("{}/{}".format( get_setting_value('NTFY_HOST'),
response = requests.post("{}/{}".format(
get_setting_value('NTFY_HOST'),
get_setting_value('NTFY_TOPIC')),
data = text,
headers = headers,
verify = verify_ssl)
verify = verify_ssl
)
response_status_code = response.status_code
@@ -142,4 +143,3 @@ def send(html, text):
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
# !/usr/bin/env python3
import conf
from const import confFileName, logPath
from pytz import timezone
@@ -12,12 +12,12 @@ import requests
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402
from logger import mylog, Logger # noqa: E402
from helper import get_setting_value, hide_string # noqa: E402
from utils.datetime_utils import timeNowDB
from models.notification_instance import NotificationInstance # noqa: E402
from database import DB # noqa: E402
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, hide_string # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python
# !/usr/bin/env python
import json
import os
@@ -10,15 +10,15 @@ import requests
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects, handleEmpty
from logger import mylog, Logger
from helper import get_setting_value, hide_string
from utils.datetime_utils import timeNowDB
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, hide_string # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -32,13 +32,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return
@@ -76,8 +75,7 @@ def main():
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def send(text):
response_text = ''
@@ -87,7 +85,6 @@ def send(text):
mylog('verbose', [f'[{pluginName}] PUSHSAFER_TOKEN: "{hide_string(token)}"'])
try:
url = 'https://www.pushsafer.com/api'
post_fields = {
@@ -103,10 +100,8 @@ def send(text):
"k" : token,
}
response = requests.post(url, data=post_fields)
response_status_code = response.status_code
# Check if the request was successful (status code 200)
if response_status_code == 200:
response_text = response.text # This captures the response body/message
@@ -120,21 +115,17 @@ def send(text):
return response_text, response_status_code
return response_text, response_status_code
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def check_config():
if get_setting_value('PUSHSAFER_TOKEN') == 'ApiKey':
return False
else:
return True
# -------------------------------------------------------
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import subprocess
import os
@@ -8,15 +8,15 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import get_setting_value
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -30,13 +30,11 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog('none', [
f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python
# !/usr/bin/env python
import json
import subprocess
@@ -13,15 +13,15 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import logPath, confFileName
from plugin_helper import Plugin_Objects, handleEmpty
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import get_setting_value, write_file
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import logPath, confFileName # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, write_file # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -35,13 +35,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return
@@ -62,7 +61,11 @@ def main():
for notification in new_notifications:
# Send notification
response_stdout, response_stderr = send(notification["Text"], notification["HTML"], notification["JSON"])
response_stdout, response_stderr = send(
notification["Text"],
notification["HTML"],
notification["JSON"]
)
# Log result
plugin_objects.add_object(
@@ -79,16 +82,16 @@ def main():
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def check_config():
if get_setting_value('WEBHOOK_URL') == '':
return False
else:
return True
#-------------------------------------------------------------------------------
def send (text_data, html_data, json_data):
# -------------------------------------------------------------------------------
def send(text_data, html_data, json_data):
response_stderr = ''
response_stdout = ''
@@ -139,7 +142,10 @@ def send (text_data, html_data, json_data):
payloadData = text_data
# Define slack-compatible payload
_json_payload = { "text": payloadData } if payloadType == 'text' else {
if payloadType == 'text':
_json_payload = {"text": payloadData}
else:
_json_payload = {
"username": "NetAlertX",
"text": "There are new notifications",
"attachments": [{
@@ -150,22 +156,22 @@ def send (text_data, html_data, json_data):
}
# DEBUG - Write the json payload into a log file for debugging
write_file (logPath + '/webhook_payload.json', json.dumps(_json_payload))
write_file(logPath + '/webhook_payload.json', json.dumps(_json_payload))
# Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both
# Consider: curl has the ability to load in data to POST from a file + piping
if(endpointUrl.startswith('https://discord.com/api/webhooks/') and not endpointUrl.endswith("/slack")):
if (endpointUrl.startswith('https://discord.com/api/webhooks/') and not endpointUrl.endswith("/slack")):
_WEBHOOK_URL = f"{endpointUrl}/slack"
curlParams = ["curl","-i","-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL]
curlParams = ["curl", "-i", "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
else:
_WEBHOOK_URL = endpointUrl
curlParams = ["curl","-i","-X", requestMethod , "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
curlParams = ["curl", "-i", "-X", requestMethod , "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
# Add HMAC signature if configured
if(secret != ''):
if (secret != ''):
h = hmac.new(secret.encode("UTF-8"), json.dumps(_json_payload, separators=(',', ':')).encode(), hashlib.sha256).hexdigest()
curlParams.insert(4,"-H")
curlParams.insert(5,f"X-Webhook-Signature: sha256={h}")
curlParams.insert(4, "-H")
curlParams.insert(5, f"X-Webhook-Signature: sha256={h}")
try:
# Execute CURL call
@@ -179,18 +185,15 @@ def send (text_data, html_data, json_data):
mylog('debug', [f'[{pluginName}] stdout: ', response_stdout])
mylog('debug', [f'[{pluginName}] stderr: ', response_stderr])
except subprocess.CalledProcessError as e:
# An error occurred, handle it
mylog('none', [f'[{pluginName}] ⚠ ERROR: ', e.output])
response_stderr = e.output
return response_stdout, response_stderr
# -------------------------------------------------------
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,7 +1,6 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import time
import pathlib
import argparse
import sys
import re
@@ -9,16 +8,16 @@ import base64
import subprocess
# Register NetAlertX directories
INSTALL_PATH="/app"
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import DB
from plugin_helper import Plugin_Objects, handleEmpty
from logger import mylog, Logger, append_line_to_file
from helper import get_setting_value
from const import logPath, applicationPath
import conf
from pytz import timezone
from database import DB # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -6,17 +6,16 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
pluginName = "ASUSWRT"
import asyncio
import aiohttp
import conf
from asusrouter import AsusData, AsusRouter
from asusrouter.modules.connection import ConnectionState
from const import logPath
from helper import get_setting_value
from logger import Logger, mylog
from plugin_helper import (Plugin_Objects, handleEmpty)
from pytz import timezone
import asyncio # noqa: E402 [flake8 lint suppression]
import aiohttp # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from asusrouter import AsusData, AsusRouter # noqa: E402 [flake8 lint suppression]
from asusrouter.modules.connection import ConnectionState # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from logger import Logger, mylog # noqa: E402 [flake8 lint suppression]
from plugin_helper import (Plugin_Objects, handleEmpty) # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
conf.tz = timezone(get_setting_value("TIMEZONE"))

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
# !/usr/bin/env python3
import os
import sys
import socket
@@ -8,14 +8,14 @@ from zeroconf import Zeroconf
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
from database import DB
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Configure timezone and logging
conf.tz = timezone(get_setting_value("TIMEZONE"))
@@ -67,7 +67,7 @@ def resolve_mdns_name(ip: str, timeout: int = 5) -> str:
hostname = socket.getnameinfo((ip, 0), socket.NI_NAMEREQD)[0]
zeroconf.close()
if hostname and hostname != ip:
mylog("debug", [f"[{pluginName}] Found mDNS name: {hostname}"])
mylog("debug", [f"[{pluginName}] Found mDNS name (rev_name): {hostname} ({rev_name})"])
return hostname
except Exception as e:
mylog("debug", [f"[{pluginName}] Zeroconf lookup failed for {ip}: {e}"])

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import argparse
@@ -11,11 +11,11 @@ from datetime import datetime
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath, fullDbPath
import conf
from pytz import timezone
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath, fullDbPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -29,6 +29,7 @@ LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
# the script expects a parameter in the format of devices=device1,device2,...
@@ -72,7 +73,7 @@ def main():
csv_writer = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL)
# Wrap the header values in double quotes and write the header row
csv_writer.writerow([ '"' + col + '"' for col in columns])
csv_writer.writerow(['"' + col + '"' for col in columns])
# Fetch and write data rows
for row in cursor.fetchall():
@@ -96,8 +97,8 @@ def main():
return 0
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -8,11 +8,11 @@ import sqlite3
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath, fullDbPath
import conf
from pytz import timezone
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath, fullDbPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import argparse
@@ -9,11 +9,11 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value, check_IP_format
from const import logPath
import conf
from pytz import timezone
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, check_IP_format # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,7 +28,6 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
@@ -41,7 +40,6 @@ def main():
parser.add_argument('DDNS_PASSWORD', action="store", help="Password for Dynamic DNS (DDNS) authentication")
parser.add_argument('DDNS_DOMAIN', action="store", help="Dynamic DNS (DDNS) domain name")
values = parser.parse_args()
PREV_IP = values.prev_ip.split('=')[1]
@@ -51,17 +49,17 @@ def main():
DDNS_DOMAIN = values.DDNS_DOMAIN.split('=')[1]
# perform the new IP lookup and DDNS tasks if enabled
ddns_update( DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP)
ddns_update(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP)
mylog('verbose', [f'[{pluginName}] Finished '])
return 0
#===============================================================================
# ===============================================================================
# INTERNET IP CHANGE
#===============================================================================
def ddns_update ( DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP ):
# ===============================================================================
def ddns_update(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP):
# Update DDNS record if enabled and IP is different
# Get Dynamic DNS IP
@@ -78,11 +76,10 @@ def ddns_update ( DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_I
# Check DNS Change
if dns_IP != PREV_IP :
mylog('none', [f'[{pluginName}] Updating Dynamic DNS IP'])
message = set_dynamic_DNS_IP (DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN)
message = set_dynamic_DNS_IP(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN)
mylog('none', [f'[{pluginName}] ', message])
# plugin_objects = Plugin_Objects(RESULT_FILE)
# plugin_objects.add_object(
# primaryId = 'Internet', # MAC (Device Name)
# secondaryId = new_internet_IP, # IP Address
@@ -96,15 +93,15 @@ def ddns_update ( DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_I
# plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
def get_dynamic_DNS_IP (DDNS_DOMAIN):
# -------------------------------------------------------------------------------
def get_dynamic_DNS_IP(DDNS_DOMAIN):
# Using supplied DNS server
dig_args = ['dig', '+short', DDNS_DOMAIN]
try:
# try runnning a subprocess
dig_output = subprocess.check_output (dig_args, universal_newlines=True)
dig_output = subprocess.check_output(dig_args, universal_newlines=True)
mylog('none', [f'[{pluginName}] DIG output :', dig_output])
except subprocess.CalledProcessError as e:
# An error occured, handle it
@@ -112,7 +109,7 @@ def get_dynamic_DNS_IP (DDNS_DOMAIN):
dig_output = '' # probably no internet
# Check result is an IP
IP = check_IP_format (dig_output)
IP = check_IP_format(dig_output)
# Handle invalid response
if IP == '':
@@ -120,28 +117,27 @@ def get_dynamic_DNS_IP (DDNS_DOMAIN):
return IP
#-------------------------------------------------------------------------------
def set_dynamic_DNS_IP (DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN):
# -------------------------------------------------------------------------------
def set_dynamic_DNS_IP(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN):
try:
# try runnning a subprocess
# Update Dynamic IP
curl_output = subprocess.check_output (['curl',
curl_output = subprocess.check_output([
'curl',
'-s',
DDNS_UPDATE_URL +
'username=' + DDNS_USER +
'&password=' + DDNS_PASSWORD +
'&hostname=' + DDNS_DOMAIN],
DDNS_UPDATE_URL + 'username=' + DDNS_USER + '&password=' + DDNS_PASSWORD + '&hostname=' + DDNS_DOMAIN],
universal_newlines=True)
except subprocess.CalledProcessError as e:
# An error occured, handle it
mylog('none', [f'[{pluginName}] ⚠ ERROR - ',e.output])
mylog('none', [f'[{pluginName}] ⚠ ERROR - ', e.output])
curl_output = ""
return curl_output
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
from __future__ import unicode_literals
import argparse
@@ -10,13 +10,13 @@ import chardet
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty, is_mac
from logger import mylog, Logger
from dhcp_leases import DhcpLeases
from helper import get_setting_value
import conf
from const import logPath
from pytz import timezone
from plugin_helper import Plugin_Objects, handleEmpty, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from dhcp_leases import DhcpLeases # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -24,15 +24,13 @@ conf.tz = timezone(get_setting_value('TIMEZONE'))
# Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL'))
pluginName= 'DHCPLSS'
pluginName = 'DHCPLSS'
LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
# -------------------------------------------------------------
def main():
mylog('verbose', [f'[{pluginName}] In script'])
@@ -40,7 +38,12 @@ def main():
last_run_logfile.write("")
parser = argparse.ArgumentParser(description='Import devices from dhcp.leases files')
parser.add_argument('paths', action="store", help="absolute dhcp.leases file paths to check separated by ','")
parser.add_argument(
'paths',
action="store",
help="absolute dhcp.leases file paths to check separated by ','"
)
values = parser.parse_args()
plugin_objects = Plugin_Objects(RESULT_FILE)
@@ -52,6 +55,7 @@ def main():
plugin_objects.write_result_file()
# -------------------------------------------------------------
def get_entries(path, plugin_objects):
@@ -122,5 +126,6 @@ def get_entries(path, plugin_objects):
)
return plugin_objects
if __name__ == '__main__':
main()

View File

@@ -1,9 +1,8 @@
#!/usr/bin/env python
# !/usr/bin/env python
# Based on the work of https://github.com/leiweibau/Pi.Alert
import subprocess
import os
from datetime import datetime
import sys
@@ -11,12 +10,12 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, Plugin_Object
from logger import mylog, Logger
from helper import get_setting_value
import conf
from pytz import timezone
from const import logPath
from plugin_helper import Plugin_Objects, Plugin_Object # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
@@ -31,6 +30,7 @@ LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', ['[DHCPSRVS] In script'])
@@ -101,5 +101,6 @@ def main():
except Exception as e:
mylog('verbose', ['[DHCPSRVS] Error in main:', str(e)])
if __name__ == '__main__':
main()

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
import subprocess
@@ -8,14 +7,14 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
from database import DB
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -65,7 +64,6 @@ def main():
if domain_name != '':
plugin_objects.add_object(
# "MAC", "IP", "Server", "Name"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = dns_server,
@@ -73,19 +71,20 @@ def main():
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac'])
foreignKey = device['devMac']
)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
#===============================================================================
# ===============================================================================
# Execute scan
#===============================================================================
def execute_name_lookup (ip, timeout):
# ===============================================================================
def execute_name_lookup(ip, timeout):
"""
Execute the DIG command on IP.
"""
@@ -99,7 +98,13 @@ def execute_name_lookup (ip, timeout):
mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args])
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True).strip()
output = subprocess.check_output(
args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
).strip()
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
@@ -123,6 +128,6 @@ def execute_name_lookup (ip, timeout):
return '', ''
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -17,11 +17,11 @@ from aiofreepybox.exceptions import NotOpenError, AuthorizationError
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
import conf
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))
@@ -79,6 +79,7 @@ def map_device_type(type: str):
mylog("minimal", [f"[{pluginName}] Unknown device type: {type}"])
return device_type_map["other"]
async def get_device_data(api_version: int, api_address: str, api_port: int):
# ensure existence of db path
config_base = Path(os.getenv("NETALERTX_CONFIG", "/data/config"))

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# test script by running:
# tbc
@@ -11,14 +11,14 @@ import re
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
from database import DB
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -33,12 +33,10 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
timeout = get_setting_value('ICMP_RUN_TIMEOUT')
args = get_setting_value('ICMP_ARGS')
in_regex = get_setting_value('ICMP_IN_REGEX')
@@ -65,7 +63,6 @@ def main():
if regex_pattern.match(device['devLastIP'])
]
mylog('verbose', [f'[{pluginName}] Devices to PING: {len(filtered_devices)}'])
for device in filtered_devices:
@@ -73,30 +70,30 @@ def main():
mylog('verbose', [f"[{pluginName}] ip: {device['devLastIP']} is_online: {is_online}"])
if is_online:
plugin_objects.add_object(
# "MAC", "IP", "Name", "Output"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = device['devName'],
watched2 = output.replace('\n',''),
watched2 = output.replace('\n', ''),
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac'])
foreignKey = device['devMac']
)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
#===============================================================================
# ===============================================================================
# Execute scan
#===============================================================================
def execute_scan (ip, timeout, args):
# ===============================================================================
def execute_scan(ip, timeout, args):
"""
Execute the ICMP command on IP.
"""
@@ -108,12 +105,18 @@ def execute_scan (ip, timeout, args):
try:
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (icmp_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True)
output = subprocess.check_output(
icmp_args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
)
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
# Parse output using case-insensitive regular expressions
#Synology-NAS:/# ping -i 0.5 -c 3 -W 8 -w 9 192.168.1.82
# Synology-NAS:/# ping -i 0.5 -c 3 -W 8 -w 9 192.168.1.82
# PING 192.168.1.82 (192.168.1.82): 56 data bytes
# 64 bytes from 192.168.1.82: seq=0 ttl=64 time=0.080 ms
# 64 bytes from 192.168.1.82: seq=1 ttl=64 time=0.081 ms
@@ -157,10 +160,8 @@ def execute_scan (ip, timeout, args):
return False, output
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import time
@@ -11,13 +11,13 @@ import re
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger, append_line_to_file
from helper import check_IP_format, get_setting_value
from const import logPath
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger, append_line_to_file # noqa: E402 [flake8 lint suppression]
from helper import check_IP_format, get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -31,9 +31,9 @@ LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
no_internet_ip = '0.0.0.0'
def main():
mylog('verbose', [f'[{pluginName}] In script'])
@@ -60,10 +60,10 @@ def main():
for i in range(INTRNT_RETRIES + 1):
new_internet_IP, cmd_output = check_internet_IP( PREV_IP, DIG_GET_IP_ARG)
new_internet_IP, cmd_output = check_internet_IP(PREV_IP, DIG_GET_IP_ARG)
if new_internet_IP == no_internet_ip:
time.sleep(1*i) # Exponential backoff strategy
time.sleep(1 * i) # Exponential backoff strategy
else:
retries_needed = i
break
@@ -74,7 +74,7 @@ def main():
mylog('verbose', [f'[{pluginName}] Curl Fallback (new_internet_IP|cmd_output): {new_internet_IP} | {cmd_output}'])
# logging
append_line_to_file (logPath + '/IP_changes.log', '['+str(timeNowDB()) +']\t'+ new_internet_IP +'\n')
append_line_to_file(logPath + '/IP_changes.log', '[' + str(timeNowDB()) + ']\t' + new_internet_IP + '\n')
plugin_objects = Plugin_Objects(RESULT_FILE)
@@ -82,11 +82,12 @@ def main():
primaryId = 'Internet', # MAC (Device Name)
secondaryId = new_internet_IP, # IP Address
watched1 = f'Previous IP: {PREV_IP}',
watched2 = cmd_output.replace('\n',''),
watched2 = cmd_output.replace('\n', ''),
watched3 = retries_needed,
watched4 = 'Gateway',
extra = f'Previous IP: {PREV_IP}',
foreignKey = 'Internet')
foreignKey = 'Internet'
)
plugin_objects.write_result_file()
@@ -95,10 +96,10 @@ def main():
return 0
#===============================================================================
# ===============================================================================
# INTERNET IP CHANGE
#===============================================================================
def check_internet_IP ( PREV_IP, DIG_GET_IP_ARG ):
# ===============================================================================
def check_internet_IP(PREV_IP, DIG_GET_IP_ARG):
# Get Internet IP
mylog('verbose', [f'[{pluginName}] - Retrieving Internet IP'])
@@ -117,22 +118,22 @@ def check_internet_IP ( PREV_IP, DIG_GET_IP_ARG ):
return internet_IP, cmd_output
#-------------------------------------------------------------------------------
def get_internet_IP (DIG_GET_IP_ARG):
# -------------------------------------------------------------------------------
def get_internet_IP(DIG_GET_IP_ARG):
cmd_output = ''
# Using 'dig'
dig_args = ['dig', '+short'] + DIG_GET_IP_ARG.strip().split()
try:
cmd_output = subprocess.check_output (dig_args, universal_newlines=True)
cmd_output = subprocess.check_output(dig_args, universal_newlines=True)
mylog('verbose', [f'[{pluginName}] DIG result : {cmd_output}'])
except subprocess.CalledProcessError as e:
mylog('verbose', [e.output])
cmd_output = '' # no internet
# Check result is an IP
IP = check_IP_format (cmd_output)
IP = check_IP_format(cmd_output)
# Handle invalid response
if IP == '':
@@ -140,7 +141,8 @@ def get_internet_IP (DIG_GET_IP_ARG):
return IP, cmd_output
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def fallback_check_ip():
"""Fallback mechanism using `curl ifconfig.me/ip`."""
try:
@@ -155,8 +157,9 @@ def fallback_check_ip():
mylog('none', [f'[{pluginName}] Fallback curl exception: {e}'])
return no_internet_ip, f'Fallback via curl exception: "{e}"'
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,6 +1,5 @@
#!/usr/bin/env python
# !/usr/bin/env python
import argparse
import os
import sys
import speedtest
@@ -9,13 +8,13 @@ import speedtest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import get_setting_value
import conf
from pytz import timezone
from const import logPath
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,13 +27,11 @@ pluginName = 'INTRSPD'
LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', ['[INTRSPD] In script'])
parser = argparse.ArgumentParser(description='Speedtest Plugin for NetAlertX')
values = parser.parse_args()
plugin_objects = Plugin_Objects(RESULT_FILE)
speedtest_result = run_speedtest()
plugin_objects.add_object(
@@ -49,6 +46,7 @@ def main():
)
plugin_objects.write_result_file()
def run_speedtest():
try:
st = speedtest.Speedtest(secure=True)
@@ -69,5 +67,6 @@ def run_speedtest():
'upload_speed': -1,
}
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -11,11 +11,11 @@ from functools import reduce
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
import conf
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -34,7 +34,6 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] In script'])
@@ -66,10 +65,10 @@ def main():
# The following are always unknown
watched1 = device['hostname'], # don't use these --> handleEmpty(device['hostname']),
watched2 = device['vendor'], # handleEmpty(device['vendor']),
watched3 = device['device_type'], # handleEmpty(device['device_type']),
watched2 = device['vendor'], # don't use these --> handleEmpty(device['vendor']),
watched3 = device['device_type'], # don't use these --> handleEmpty(device['device_type']),
extra = '',
foreignKey = "" #device['mac']
foreignKey = "" # device['mac']
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
@@ -83,6 +82,7 @@ def main():
return 0
def parse_neighbors(raw_neighbors: list[str]):
neighbors = []
for line in raw_neighbors:
@@ -111,6 +111,7 @@ def is_multicast(ip):
prefixes = ['ff', '224', '231', '232', '233', '234', '238', '239']
return reduce(lambda acc, prefix: acc or ip.startswith(prefix), prefixes, False)
# retrieve data
def get_neighbors(interfaces):
@@ -136,11 +137,11 @@ def get_neighbors(interfaces):
mylog('verbose', [f'[{pluginName}] Scanning interface succeded: "{interface}"'])
except subprocess.CalledProcessError as e:
# An error occurred, handle it
mylog('verbose', [f'[{pluginName}] Scanning interface failed: "{interface}"'])
error_type = type(e).__name__ # Capture the error type
mylog('verbose', [f'[{pluginName}] Scanning interface failed: "{interface}" ({error_type})'])
return results
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -7,18 +7,18 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
pluginName = 'LUCIRPC'
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
try:
from openwrt_luci_rpc import OpenWrtRpc
except:
mylog('error', [f'[{pluginName}] Failed import openwrt_luci_rpc'])
exit()
except ImportError as e:
mylog('error', [f'[{pluginName}] Failed import openwrt_luci_rpc: {e}'])
exit(1)
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -30,6 +30,7 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] start script.'])
@@ -59,6 +60,7 @@ def main():
return 0
def get_device_data():
router = OpenWrtRpc(
get_setting_value("LUCIRPC_host"),
@@ -76,5 +78,6 @@ def get_device_data():
device_data = router.get_all_connected_devices(only_reachable=get_setting_value("LUCIRPC_only_reachable"))
return device_data
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -8,12 +8,12 @@ from collections import deque
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
from messaging.in_app import remove_old
import conf
from pytz import timezone
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from messaging.in_app import remove_old # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,7 +28,6 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
@@ -65,8 +64,8 @@ def main():
return 0
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -7,14 +7,14 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
from librouteros import connect
from librouteros.exceptions import TrapError
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
from librouteros import connect # noqa: E402 [flake8 lint suppression]
from librouteros.exceptions import TrapError # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -29,7 +29,6 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
@@ -72,7 +71,10 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
status = lease.get('status')
device_name = comment or host_name or "(unknown)"
mylog('verbose', [f"ID: {lease_id}, Address: {address}, MAC Address: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}"])
mylog(
'verbose',
[f"ID: {lease_id}, Address: {address}, MAC Address: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}"]
)
if (status == "bound"):
plugin_objects.add_object(
@@ -96,8 +98,8 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
return plugin_objects
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -8,14 +8,14 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
from database import DB
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -34,7 +34,6 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] In script'])
@@ -75,19 +74,20 @@ def main():
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac'])
foreignKey = device['devMac']
)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
#===============================================================================
# ===============================================================================
# Execute scan
#===============================================================================
def execute_name_lookup (ip, timeout):
# ===============================================================================
def execute_name_lookup(ip, timeout):
"""
Execute the NBTSCAN command on IP.
"""
@@ -101,7 +101,13 @@ def execute_name_lookup (ip, timeout):
mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args])
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True)
output = subprocess.check_output(
args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
)
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
@@ -112,7 +118,6 @@ def execute_name_lookup (ip, timeout):
lines = output.splitlines()
# Look for the first line containing a valid NetBIOS name entry
index = 0
for line in lines:
if 'Doing NBT name scan' not in line and ip in line:
# Split the line and extract the primary NetBIOS name
@@ -122,7 +127,6 @@ def execute_name_lookup (ip, timeout):
else:
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - Unexpected output format: {line}'])
mylog('verbose', [f'[{pluginName}] Domain Name: {domain_name}'])
return domain_name, dns_server
@@ -144,6 +148,9 @@ def execute_name_lookup (ip, timeout):
return '', ''
# ===============================================================================
# BEGIN
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# test script by running:
# tbc
@@ -13,13 +13,12 @@ import nmap
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
from database import DB
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -46,7 +45,6 @@ def main():
mylog('verbose', [f'[{pluginName}] subnets: ', subnets])
# Initialize the Plugin obj output file
plugin_objects = Plugin_Objects(RESULT_FILE)
@@ -65,18 +63,19 @@ def main():
watched3 = device['interface'],
watched4 = '',
extra = '',
foreignKey = device['mac'])
foreignKey = device['mac']
)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
#===============================================================================
# ===============================================================================
# Execute scan
#===============================================================================
# ===============================================================================
def execute_scan(subnets_list, timeout, fakeMac, args):
devices_list = []
@@ -103,13 +102,12 @@ def execute_scan(subnets_list, timeout, fakeMac, args):
return devices_list
def execute_scan_on_interface (interface, timeout, args):
def execute_scan_on_interface(interface, timeout, args):
# Remove unsupported VLAN flags
interface = re.sub(r'--vlan=\S+', '', interface).strip()
# Prepare command arguments
scan_args = args.split() + interface.replace('--interface=','-e ').split()
scan_args = args.split() + interface.replace('--interface=', '-e ').split()
mylog('verbose', [f'[{pluginName}] scan_args: ', scan_args])
@@ -138,7 +136,6 @@ def parse_nmap_xml(xml_output, interface, fakeMac):
ip = nm[host]['addresses']['ipv4'] if 'ipv4' in nm[host]['addresses'] else ''
mac = nm[host]['addresses']['mac'] if 'mac' in nm[host]['addresses'] else ''
mylog('verbose', [f'[{pluginName}] nm[host]: ', nm[host]])
vendor = ''
@@ -148,10 +145,8 @@ def parse_nmap_xml(xml_output, interface, fakeMac):
for key, value in nm[host]['vendor'].items():
vendor = value
break
# Log debug information
mylog('verbose', [f"[{pluginName}] Hostname: {hostname}, IP: {ip}, MAC: {mac}, Vendor: {vendor}"])
@@ -172,7 +167,6 @@ def parse_nmap_xml(xml_output, interface, fakeMac):
# MAC or IP missing
mylog('verbose', [f"[{pluginName}] Skipping: {hostname}, IP or MAC missing, or NMAPDEV_GENERATE_MAC setting not enabled"])
except Exception as e:
mylog('verbose', [f"[{pluginName}] Error parsing nmap XML: ", str(e)])
@@ -184,12 +178,13 @@ def string_to_mac_hash(input_string):
sha256_hash = hashlib.sha256(input_string.encode()).hexdigest()
# Take the first 12 characters of the hash and format as a MAC address
mac_hash = ':'.join(sha256_hash[i:i+2] for i in range(0, 12, 2))
mac_hash = ':'.join(sha256_hash[i:i + 2] for i in range(0, 12, 2))
return mac_hash
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import argparse
@@ -9,13 +9,13 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger, append_line_to_file
from utils.datetime_utils import timeNowDB
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger, append_line_to_file # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -32,7 +32,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
# Initialize the Plugin obj output file
plugin_objects = Plugin_Objects(RESULT_FILE)
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def main():
parser = argparse.ArgumentParser(
description='Scan ports of devices specified by IP addresses'
@@ -100,8 +101,8 @@ def main():
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
class nmap_entry:
def __init__(self, ip, mac, time, port, state, service, name = '', extra = '', index = 0):
self.ip = ip
@@ -112,10 +113,10 @@ class nmap_entry:
self.service = service
self.extra = extra
self.index = index
self.hash = str(mac) + str(port)+ str(state)+ str(service)
self.hash = str(mac) + str(port) + str(state) + str(service)
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def parse_kv_args(raw_args):
"""
Converts ['ips=a,b,c', 'macs=x,y,z', 'timeout=5'] to a dict.
@@ -136,7 +137,8 @@ def parse_kv_args(raw_args):
return parsed
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def safe_split_list(value, keyname):
"""Split comma list safely and ensure no empty items."""
items = [x.strip() for x in value.split(',') if x.strip()]
@@ -144,7 +146,8 @@ def safe_split_list(value, keyname):
mylog('none', [f"[{pluginName}] Scan: {keyname} list is empty or invalid"])
return items
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
"""
run nmap scan on a list of devices
@@ -154,15 +157,12 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
# collect ports / new Nmap Entries
newEntriesTmp = []
if len(deviceIPs) > 0:
devTotal = len(deviceIPs)
mylog('verbose', [f'[{pluginName}] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device'])
mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ])
mylog('verbose', [f'[{pluginName}] Scan: Nmap for max ', str(timeoutSec), 's (' + str(round(int(timeoutSec) / 60, 1)) + 'min) per device'])
mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec)) / 60, 1) , 'min)'])
devIndex = 0
for ip in deviceIPs:
@@ -171,32 +171,34 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
# prepare arguments from user supplied ones
nmapArgs = ['nmap'] + args.split() + [ip]
progress = ' (' + str(devIndex+1) + '/' + str(devTotal) + ')'
progress = ' (' + str(devIndex + 1) + '/' + str(devTotal) + ')'
try:
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(float(timeoutSec)))
output = subprocess.check_output(
nmapArgs,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(float(timeoutSec))
)
except subprocess.CalledProcessError as e:
# An error occured, handle it
mylog('none', ["[NMAP Scan] " ,e.output])
mylog('none', ["[NMAP Scan] ", e.output])
mylog('none', ["[NMAP Scan] ⚠ ERROR - Nmap Scan - check logs", progress])
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', ip, progress])
if output == "": # check if the subprocess failed
mylog('minimal', [f'[{pluginName}] Nmap FAIL for ', ip, progress ,' check logs for details'])
mylog('minimal', [f'[{pluginName}] Nmap FAIL for ', ip, progress, ' check logs for details'])
else:
mylog('verbose', [f'[{pluginName}] Nmap SUCCESS for ', ip, progress])
# check the last run output
newLines = output.split('\n')
# regular logging
for line in newLines:
append_line_to_file (logPath + '/app_nmap.log', line +'\n')
append_line_to_file(logPath + '/app_nmap.log', line + '\n')
index = 0
startCollecting = False
@@ -204,7 +206,7 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
newPortsPerDevice = 0
for line in newLines:
if 'Starting Nmap' in line:
if len(newLines) > index+1 and 'Note: Host seems down' in newLines[index+1]:
if len(newLines) > index + 1 and 'Note: Host seems down' in newLines[index + 1]:
break # this entry is empty
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
startCollecting = True
@@ -216,22 +218,16 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
elif 'Nmap done' in line:
duration = line.split('scanned in ')[1]
mylog('verbose', [f'[{pluginName}] {newPortsPerDevice} ports found on {deviceMACs[devIndex]}'])
mylog('verbose', [f'[{pluginName}] {newPortsPerDevice} ports found on {deviceMACs[devIndex]} after {duration}'])
index += 1
devIndex += 1
#end for loop
return newEntriesTmp
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# test script by running:
# tbc
@@ -11,14 +11,14 @@ import re
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
from database import DB
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -33,12 +33,10 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
timeout = get_setting_value('NSLOOKUP_RUN_TIMEOUT')
# Create a database connection
@@ -75,19 +73,20 @@ def main():
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac'])
foreignKey = device['devMac']
)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
#===============================================================================
# ===============================================================================
# Execute scan
#===============================================================================
def execute_nslookup (ip, timeout):
# ===============================================================================
def execute_nslookup(ip, timeout):
"""
Execute the NSLOOKUP command on IP.
"""
@@ -99,7 +98,13 @@ def execute_nslookup (ip, timeout):
try:
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (nslookup_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True)
output = subprocess.check_output(
nslookup_args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
)
domain_name = ''
dns_server = ''
@@ -110,7 +115,6 @@ def execute_nslookup (ip, timeout):
domain_pattern = re.compile(r'name\s*=\s*([^\s]+)', re.IGNORECASE)
server_pattern = re.compile(r'Server:\s+(.+)', re.IGNORECASE)
domain_match = domain_pattern.search(output)
server_match = server_pattern.search(output)
@@ -136,19 +140,15 @@ def execute_nslookup (ip, timeout):
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
if output == "": # check if the subprocess failed
tmp = 1 # can't have empty
# mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
else:
if output != "": # check if the subprocess failed
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
return '', ''
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
__author__ = "ffsb"
__version__ = "0.1" # initial
__version__ = "0.2" # added logic to retry omada api call once as it seems to sometimes fail for some reasons, and error handling logic...
@@ -15,10 +15,9 @@ __version__ = "1.3" # fix detection of the default gateway IP address that woul
# try to identify and populate their connections by switch/accesspoints and ports/SSID
# try to differentiate root bridges from accessory
#
# sample code to update unbound on opnsense - for reference...
# curl -X POST -d '{"host":{"enabled":"1","hostname":"test","domain":"testdomain.com","rr":"A","mxprio":"","mx":"","server":"10.0.1.1","description":""}}' -H "Content-Type: application/json" -k -u $OPNS_KEY:$OPNS_SECRET https://$IPFW/api/unbound/settings/AddHostOverride
# curl -X POST -d '{"host":{"enabled":"1","hostname":"test","domain":"testdomain.com","rr":"A","mxprio":"","mx":"","server":"10.0.1.1","description":""}}'\
# -H "Content-Type: application/json" -k -u $OPNS_KEY:$OPNS_SECRET https://$IPFW/api/unbound/settings/AddHostOverride
#
import os
import sys
@@ -35,12 +34,12 @@ import multiprocessing
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
from pytz import timezone
import conf
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -87,8 +86,6 @@ cMAC, cIP, cNAME, cSWITCH_AP, cPORT_SSID = range(5)
OMDLOGLEVEL = "debug"
#
# translate MAC address from standard ieee model to ietf draft
# AA-BB-CC-DD-EE-FF to aa:bb:cc:dd:ee:ff
# tplink adheres to ieee, Nax adheres to ietf
@@ -142,7 +139,7 @@ def callomada(myargs):
try:
mf = io.StringIO()
with redirect_stdout(mf):
bar = omada(myargs)
omada(myargs)
omada_output = mf.getvalue()
except Exception:
mylog(
@@ -207,8 +204,7 @@ def add_uplink(
# Determine port to uplink
if (
device_data_bymac[switch_mac].get(TYPE) == "Switch"
and device_data_bymac[uplink_mac].get(TYPE) == "Switch"
device_data_bymac[switch_mac].get(TYPE) == "Switch" and device_data_bymac[uplink_mac].get(TYPE) == "Switch"
):
port_to_uplink = port_byswitchmac_byclientmac.get(switch_mac, {}).get(uplink_mac)
if port_to_uplink is None:
@@ -223,9 +219,7 @@ def add_uplink(
# Recursively add uplinks for linked devices
for link in sadevices_linksbymac.get(switch_mac, []):
if (
link in device_data_bymac
and device_data_bymac[link].get(SWITCH_AP) in [None, "null"]
and device_data_bymac[switch_mac].get(TYPE) == "Switch"
link in device_data_bymac and device_data_bymac[link].get(SWITCH_AP) in [None, "null"] and device_data_bymac[switch_mac].get(TYPE) == "Switch"
):
add_uplink(
switch_mac,
@@ -236,7 +230,6 @@ def add_uplink(
)
# ----------------------------------------------
# Main initialization
def main():
@@ -369,7 +362,6 @@ def get_omada_devices_details(msadevice_data):
mswitch_dump = callomada(["-t", "myomada", "switch", "-d", mthisswitch])
else:
mswitch_detail = ""
nswitch_dump = ""
return mswitch_detail, mswitch_dump
@@ -414,7 +406,6 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
# 17:27:10 [<unique_prefix>] token: "['1A-2B-3C-4D-5E-6F', '192.168.0.217', '1A-2B-3C-4D-5E-6F', '17', '40-AE-30-A5-A7-50, 'Switch']"
# constants
sadevices_macbyname = {}
sadevices_macbymac = {}
sadevices_linksbymac = {}
port_byswitchmac_byclientmac = {}
device_data_bymac = {}
@@ -556,11 +547,11 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
#
naxname = real_naxname
if real_naxname != None:
if real_naxname is not None:
if "(" in real_naxname:
# removing parenthesis and domains from the name
naxname = real_naxname.split("(")[0]
if naxname != None and "." in naxname:
if naxname is not None and "." in naxname:
naxname = naxname.split(".")[0]
if naxname in (None, "null", ""):
naxname = (

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
"""
This plugin imports devices and clients from Omada Controller using their OpenAPI.
@@ -25,7 +25,6 @@ import sys
import urllib3
import requests
import time
import datetime
import pytz
from datetime import datetime
@@ -35,11 +34,11 @@ from typing import Literal, Any, Dict
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, is_typical_router_ip, is_mac
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
import conf
from plugin_helper import Plugin_Objects, is_typical_router_ip, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = pytz.timezone(get_setting_value('TIMEZONE'))
@@ -176,7 +175,10 @@ class OmadaHelper:
# If it's not a gateway try to assign parent node MAC
if data.get("type", "") != "gateway":
parent_mac = OmadaHelper.normalize_mac(data.get("uplinkDeviceMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
# Applicable only for CLIENT
if input_type == "client":
@@ -185,15 +187,26 @@ class OmadaHelper:
# Try to assign parent node MAC and PORT/SSID to the CLIENT
if data.get("connectDevType", "") == "gateway":
parent_mac = OmadaHelper.normalize_mac(data.get("gatewayMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
entry["parent_node_port"] = data.get("port", "")
elif data.get("connectDevType", "") == "switch":
parent_mac = OmadaHelper.normalize_mac(data.get("switchMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
entry["parent_node_port"] = data.get("port", "")
elif data.get("connectDevType", "") == "ap":
parent_mac = OmadaHelper.normalize_mac(data.get("apMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
entry["parent_node_ssid"] = data.get("ssid", "")
# Add the entry to the result
@@ -253,7 +266,7 @@ class OmadaAPI:
"""Return request headers."""
headers = {"Content-type": "application/json"}
# Add access token to header if requested and available
if include_auth == True:
if include_auth is True:
if not self.access_token:
OmadaHelper.debug("No access token available for headers")
else:

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
"""
NetAlertX plugin: PIHOLEAPI
Imports devices from Pi-hole v6 API (Network endpoints) into NetAlertX plugin results.
@@ -17,12 +17,12 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
pluginName = 'PIHOLEAPI'
from plugin_helper import Plugin_Objects, is_mac
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Setup timezone & logger using standard NAX helpers
conf.tz = timezone(get_setting_value('TIMEZONE'))

View File

@@ -5,18 +5,18 @@ import os
import re
import base64
import json
from datetime import datetime
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.append(f"{INSTALL_PATH}/front/plugins")
sys.path.append(f'{INSTALL_PATH}/server')
from logger import mylog, Logger
from utils.datetime_utils import timeNowDB
from const import default_tz, fullConfPath
from logger import mylog # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from const import default_tz, fullConfPath # noqa: E402 [flake8 lint suppression]
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def read_config_file():
"""
retuns dict on the config file key:value pairs
@@ -25,8 +25,8 @@ def read_config_file():
filename = fullConfPath
print('[plugin_helper] reading config file')
# load the variables from .conf
with open(filename, "r") as file:
code = compile(file.read(), filename, "exec")
@@ -42,6 +42,7 @@ if timeZoneSetting not in all_timezones:
timeZoneSetting = default_tz
timeZone = pytz.timezone(timeZoneSetting)
# -------------------------------------------------------------------
# Sanitizes plugin output
def handleEmpty(input):
@@ -55,6 +56,7 @@ def handleEmpty(input):
input = input.replace('\n', '') # Removing new lines
return input
# -------------------------------------------------------------------
# Sanitizes string
def rmBadChars(input):
@@ -64,6 +66,7 @@ def rmBadChars(input):
return input
# -------------------------------------------------------------------
# check if this is a router IP
def is_typical_router_ip(ip_address):
@@ -82,6 +85,7 @@ def is_typical_router_ip(ip_address):
return ip_address in common_router_ips
# -------------------------------------------------------------------
# Check if a valid MAC address
def is_mac(input):
@@ -94,6 +98,7 @@ def is_mac(input):
return isMac
# -------------------------------------------------------------------
def decodeBase64(inputParamBase64):
@@ -102,14 +107,11 @@ def decodeBase64(inputParamBase64):
print('[Plugins] Helper base64 input: ')
print(input)
# Extract the base64-encoded subnet information from the first element
# The format of the element is assumed to be like 'param=b<base64-encoded-data>'.
# Printing the extracted base64-encoded information.
mylog('debug', ['[Plugins] Helper base64 inputParamBase64: ', inputParamBase64])
# Decode the base64-encoded subnet information to get the actual subnet information in ASCII format.
result = base64.b64decode(inputParamBase64).decode('ascii')
@@ -118,6 +120,7 @@ def decodeBase64(inputParamBase64):
return result
# -------------------------------------------------------------------
def decode_settings_base64(encoded_str, convert_types=True):
"""
@@ -180,6 +183,7 @@ def normalize_mac(mac):
return normalized_mac
# -------------------------------------------------------------------
class Plugin_Object:
"""
@@ -243,6 +247,7 @@ class Plugin_Object:
)
return line
class Plugin_Objects:
"""
Plugin_Objects is the class that manages and holds all the objects created by the plugin.
@@ -303,7 +308,3 @@ class Plugin_Objects:
def __len__(self):
return len(self.objects)

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
from __future__ import unicode_literals
import subprocess
@@ -10,12 +10,12 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty, normalize_mac
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects, handleEmpty, normalize_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,7 +28,6 @@ pluginName = "SNMPDSC"
LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
# Workflow
def main():
mylog('verbose', ['[SNMPDSC] In script '])
@@ -36,9 +35,13 @@ def main():
# init global variables
global snmpWalkCmds
parser = argparse.ArgumentParser(description='This plugin is used to discover devices via the arp table(s) of a RFC1213 compliant router or switch.')
parser.add_argument('routers', action="store", help="IP(s) of routers, separated by comma (,) if passing multiple")
parser.add_argument(
'routers',
action="store",
help="IP(s) of routers, separated by comma (,) if passing multiple"
)
values = parser.parse_args()
timeoutSetting = get_setting_value("SNMPDSC_RUN_TIMEOUT")
@@ -46,8 +49,7 @@ def main():
plugin_objects = Plugin_Objects(RESULT_FILE)
if values.routers:
snmpWalkCmds = values.routers.split('=')[1].replace('\'','')
snmpWalkCmds = values.routers.split('=')[1].replace('\'', '')
if ',' in snmpWalkCmds:
commands = snmpWalkCmds.split(',')
@@ -63,7 +65,12 @@ def main():
probes = 1 # N probes
for _ in range(probes):
output = subprocess.check_output (snmpwalkArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSetting))
output = subprocess.check_output(
snmpwalkArgs,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeoutSetting)
)
mylog('verbose', ['[SNMPDSC] output: ', output])
@@ -95,7 +102,6 @@ def main():
else:
mylog('verbose', ['[SNMPDSC] ipStr does not seem to contain a valid IP:', ipStr])
elif line.startswith('ipNetToMediaPhysAddress'):
# Format: snmpwalk -OXsq output
parts = line.split()
@@ -120,7 +126,6 @@ def main():
plugin_objects.write_result_file()
# BEGIN
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -12,16 +12,16 @@ import base64
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from utils.plugin_utils import get_plugins_configs, decode_and_rename_files
from logger import mylog, Logger
from const import fullDbPath, logPath
from helper import get_setting_value
from utils.datetime_utils import timeNowDB
from utils.crypto_utils import encrypt_data
from messaging.in_app import write_notification
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.plugin_utils import get_plugins_configs, decode_and_rename_files # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import fullDbPath, logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from utils.crypto_utils import encrypt_data # noqa: E402 [flake8 lint suppression]
from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -102,7 +102,6 @@ def main():
else:
mylog('verbose', [f'[{pluginName}] {file_path} not found'])
# PUSHING/SENDING devices
if send_devices:
@@ -150,7 +149,6 @@ def main():
if lggr.isAbove('verbose'):
write_notification(message, 'info', timeNowDB())
# Process any received data for the Device DB table (ONLY JSON)
# Create the file path
@@ -210,7 +208,6 @@ def main():
cursor.execute(f'SELECT devMac FROM Devices WHERE devMac IN ({placeholders})', tuple(unique_mac_addresses))
existing_mac_addresses = set(row[0] for row in cursor.fetchall())
# insert devices into the last_result.log and thus CurrentScan table to manage state
for device in device_data:
# only insert devices taht were online and skip the root node to prevent IP flipping on the hub
@@ -258,7 +255,6 @@ def main():
mylog('verbose', [message])
write_notification(message, 'info', timeNowDB())
# Commit and close the connection
conn.commit()
conn.close()
@@ -268,6 +264,7 @@ def main():
return 0
# ------------------------------------------------------------------
# Data retrieval methods
api_endpoints = [
@@ -275,6 +272,7 @@ api_endpoints = [
"/plugins/sync/hub.php" # Legacy PHP endpoint
]
# send data to the HUB
def send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url):
"""Send encrypted data to HUB, preferring /sync endpoint and falling back to PHP version."""
@@ -345,6 +343,5 @@ def get_data(api_token, node_url):
return ""
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -10,12 +10,11 @@ from unifi_sm_api.api import SiteManagerAPI
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, decode_settings_base64
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
import conf
from plugin_helper import Plugin_Objects, decode_settings_base64 # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -75,7 +74,7 @@ def main():
watched1 = device['dev_name'], # name
watched2 = device['dev_type'], # device_type (AP/Switch etc)
watched3 = device['dev_connected'], # connectedAt or empty
watched4 = device['dev_parent_mac'],# parent_mac or "Internet"
watched4 = device['dev_parent_mac'], # parent_mac or "Internet"
extra = '',
foreignKey = device['dev_mac']
)
@@ -87,6 +86,7 @@ def main():
return 0
# retrieve data
def get_device_data(site, api):
device_data = []

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# Inspired by https://github.com/stevehoek/Pi.Alert
from __future__ import unicode_literals
@@ -14,12 +14,12 @@ from pyunifi.controller import Controller
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, rmBadChars, is_typical_router_ip, is_mac
from logger import mylog, Logger
from helper import get_setting_value, normalize_string
import conf
from pytz import timezone
from const import logPath
from plugin_helper import Plugin_Objects, rmBadChars, is_typical_router_ip, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, normalize_string # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -37,15 +37,10 @@ LOCK_FILE = os.path.join(LOG_PATH, f'full_run.{pluginName}.lock')
urllib3.disable_warnings(InsecureRequestWarning)
# Workflow
def main():
mylog('verbose', [f'[{pluginName}] In script'])
# init global variables
global UNIFI_USERNAME, UNIFI_PASSWORD, UNIFI_HOST, UNIFI_SITES, PORT, VERIFYSSL, VERSION, FULL_IMPORT
@@ -65,11 +60,10 @@ def main():
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices'])
# .............................................
# .............................................
def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
global VERIFYSSL
@@ -79,7 +73,6 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
mylog('verbose', [f'[{pluginName}] sites: {UNIFI_SITES}'])
if (VERIFYSSL.upper() == "TRUE"):
VERIFYSSL = True
else:
@@ -154,11 +147,9 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Users'])
mylog('verbose', [f'[{pluginName}] check if Lock file needs to be modified'])
set_lock_file_value(FULL_IMPORT, lock_file_value)
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Clients overall'])
return plugin_objects
@@ -185,7 +176,7 @@ def collect_details(device_type, devices, online_macs, processed_macs, plugin_ob
parentMac = 'Internet'
# Add object only if not processed
if macTmp not in processed_macs and ( status == 1 or force_import is True ):
if macTmp not in processed_macs and (status == 1 or force_import is True):
plugin_objects.add_object(
primaryId=macTmp,
secondaryId=ipTmp,
@@ -204,6 +195,7 @@ def collect_details(device_type, devices, online_macs, processed_macs, plugin_ob
else:
mylog('verbose', [f'[{pluginName}] Skipping, not a valid MAC address: {macTmp}'])
# -----------------------------------------------------------------------------
def get_unifi_val(obj, key, default='null'):
if isinstance(obj, dict):
@@ -212,7 +204,7 @@ def get_unifi_val(obj, key, default='null'):
for k, v in obj.items():
if isinstance(v, dict):
result = get_unifi_val(v, key, default)
if result not in ['','None', None, 'null']:
if result not in ['', 'None', None, 'null']:
return result
mylog('trace', [f'[{pluginName}] Value not found for key "{key}" in obj "{json.dumps(obj)}"'])
@@ -226,6 +218,7 @@ def get_name(*names: str) -> str:
return rmBadChars(name)
return 'null'
# -----------------------------------------------------------------------------
def get_parent_mac(*macs: str) -> str:
for mac in macs:
@@ -233,6 +226,7 @@ def get_parent_mac(*macs: str) -> str:
return mac
return 'null'
# -----------------------------------------------------------------------------
def get_port(*ports: str) -> str:
for port in ports:
@@ -240,12 +234,6 @@ def get_port(*ports: str) -> str:
return port
return 'null'
# -----------------------------------------------------------------------------
def get_port(*macs: str) -> str:
for mac in macs:
if mac and mac != 'null':
return mac
return 'null'
# -----------------------------------------------------------------------------
def get_ip(*ips: str) -> str:
@@ -286,15 +274,16 @@ def read_lock_file() -> bool:
# -----------------------------------------------------------------------------
def check_full_run_state(config_value: str, lock_file_value: bool) -> bool:
if config_value == 'always' or (config_value == 'once' and lock_file_value == False):
if config_value == 'always' or (config_value == 'once' and lock_file_value is False):
mylog('verbose', [f'[{pluginName}] Full import needs to be done: config_value: {config_value} and lock_file_value: {lock_file_value}'])
return True
else:
mylog('verbose', [f'[{pluginName}] Full import NOT needed: config_value: {config_value} and lock_file_value: {lock_file_value}'])
return False
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -9,13 +9,13 @@ import sqlite3
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath, applicationPath, fullDbPath
from scan.device_handling import query_MAC_vendor
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath, applicationPath, fullDbPath # noqa: E402 [flake8 lint suppression]
from scan.device_handling import query_MAC_vendor # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -25,11 +25,11 @@ Logger(get_setting_value('LOG_LEVEL'))
pluginName = 'VNDRPDT'
LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', ['[VNDRPDT] In script'])
@@ -48,9 +48,10 @@ def main():
return 0
#===============================================================================
# ===============================================================================
# Update device vendors database
#===============================================================================
# ===============================================================================
def update_vendor_database():
# Update vendors DB (iab oui)
@@ -60,30 +61,29 @@ def update_vendor_database():
# Execute command
try:
# try runnning a subprocess safely
update_output = subprocess.check_output (update_args)
subprocess.check_output(update_args)
except subprocess.CalledProcessError as e:
# An error occured, handle it
mylog('verbose', [' FAILED: Updating vendors DB, set LOG_LEVEL=debug for more info'])
mylog('verbose', [e.output])
# ------------------------------------------------------------------------------
# resolve missing vendors
def update_vendors (dbPath, plugin_objects):
def update_vendors(dbPath, plugin_objects):
# Connect to the App SQLite database
conn = sqlite3.connect(dbPath)
sql = conn.cursor()
# Initialize variables
recordsToUpdate = []
ignored = 0
notFound = 0
mylog('verbose', [' Searching devices vendor'])
# Get devices without a vendor
sql.execute ("""SELECT
sql.execute("""SELECT
devMac,
devLastIP,
devName,
@@ -103,7 +103,7 @@ def update_vendors (dbPath, plugin_objects):
# All devices loop
for device in devices:
# Search vendor in HW Vendors DB
vendor = query_MAC_vendor (device[0])
vendor = query_MAC_vendor(device[0])
if vendor == -1 :
notFound += 1
elif vendor == -2 :
@@ -124,15 +124,13 @@ def update_vendors (dbPath, plugin_objects):
mylog('verbose', [" Devices Ignored : ", ignored])
mylog('verbose', [" Devices with missing vendor : ", len(devices)])
mylog('verbose', [" Vendors Not Found : ", notFound])
mylog('verbose', [" Vendors updated : ", len(plugin_objects) ])
mylog('verbose', [" Vendors updated : ", len(plugin_objects)])
return plugin_objects
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -9,13 +9,13 @@ from wakeonlan import send_magic_packet
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
from database import DB
from models.device_instance import DeviceInstance
import conf
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -34,7 +34,6 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('none', [f'[{pluginName}] In script'])
@@ -95,6 +94,7 @@ def main():
return 0
# wake
def execute(port, ip, mac, name):
@@ -113,5 +113,6 @@ def execute(port, ip, mac, name):
# Return the data result
return result
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# Based on the work of https://github.com/leiweibau/Pi.Alert
import requests
@@ -12,12 +12,12 @@ from urllib3.exceptions import InsecureRequestWarning
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from const import logPath
from helper import get_setting_value
import conf
from pytz import timezone
from logger import mylog, Logger
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -30,16 +30,15 @@ pluginName = 'WEBMON'
LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
mylog('verbose', [f'[{pluginName}] In script'])
def main():
values = get_setting_value('WEBMON_urls_to_check')
mylog('verbose', [f'[{pluginName}] Checking URLs: {values}'])
if len(values) > 0:
plugin_objects = Plugin_Objects(RESULT_FILE)
# plugin_objects = service_monitoring(values.urls.split('=')[1].split(','), plugin_objects)
@@ -48,6 +47,7 @@ def main():
else:
return
def check_services_health(site):
mylog('verbose', [f'[{pluginName}] Checking {site}'])
@@ -79,6 +79,7 @@ def check_services_health(site):
return status, latency
def service_monitoring(urls, plugin_objects):
for site in urls:
status, latency = check_services_health(site)
@@ -94,7 +95,6 @@ def service_monitoring(urls, plugin_objects):
)
return plugin_objects
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
# !/usr/bin/env python3
import os
import sys

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python3
# !/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
NetAlertX-New-Devices-Checkmk-Script
@@ -19,6 +19,7 @@ import subprocess
import json
import os
def check_new_devices():
# Get API path from environment variable, fallback to /tmp/api
api_path = os.environ.get('NETALERTX_API', '/tmp/api')
@@ -73,6 +74,6 @@ def check_new_devices():
)
print(f"1 NetAlertX_New_Devices - WARNING - Found {len(new_devices)} new device(s): {device_list_str}")
if __name__ == "__main__":
check_new_devices()

View File

@@ -1,8 +1,8 @@
#!/usr/bin/env python3
# !/usr/bin/env python3
import subprocess
import sys
import os
def run_sqlite_command(command):
# Use environment variable with fallback
db_path = os.path.join(
@@ -19,6 +19,7 @@ def run_sqlite_command(command):
print(f"Error executing command: {e}")
return None
def check_and_clean_device():
while True:
print("\nDevice Cleanup Tool")
@@ -113,5 +114,6 @@ def check_and_clean_device():
else:
print("\nInvalid option, please try again")
if __name__ == "__main__":
check_and_clean_device()

View File

@@ -1,6 +1,5 @@
#!/usr/bin/env python3
import paramiko
import re
from datetime import datetime
import argparse
import sys
@@ -8,6 +7,9 @@ from pathlib import Path
import time
import logging
logger = None
def setup_logging(debug=False):
"""Configure logging based on debug flag."""
level = logging.DEBUG if debug else logging.INFO
@@ -18,6 +20,7 @@ def setup_logging(debug=False):
)
return logging.getLogger(__name__)
def parse_timestamp(date_str):
"""Convert OPNsense timestamp to Unix epoch time."""
try:
@@ -27,7 +30,7 @@ def parse_timestamp(date_str):
dt = datetime.strptime(clean_date, '%Y/%m/%d %H:%M:%S')
return int(dt.timestamp())
except Exception as e:
logger.error(f"Failed to parse timestamp: {date_str}")
logger.error(f"Failed to parse timestamp: {date_str} ({e})")
return None
@@ -39,8 +42,14 @@ def get_lease_file(hostname, username, password=None, key_filename=None, port=22
try:
logger.debug(f"Attempting to connect to {hostname}:{port} as {username}")
ssh.connect(hostname, port=port, username=username,
password=password, key_filename=key_filename)
ssh.connect(
hostname,
port=port,
username=username,
password=password,
key_filename=key_filename
)
# Get an interactive shell session
logger.debug("Opening interactive SSH channel")
@@ -75,10 +84,23 @@ def get_lease_file(hostname, username, password=None, key_filename=None, port=22
# Clean up the output by removing the command echo and shell prompts
lines = output.split('\n')
# Remove first line (command echo) and any lines containing shell prompts
cleaned_lines = [line for line in lines
if not line.strip().startswith(command.strip())
and not line.strip().endswith('> ')
and not line.strip().endswith('# ')]
# cleaned_lines = [line for line in lines
# if not line.strip().startswith(command.strip()) and not line.strip().endswith('> ') and not line.strip().endswith('# ')]
cmd = command.strip()
cleaned_lines = []
for line in lines:
stripped = line.strip()
if stripped.startswith(cmd):
continue
if stripped.endswith('> '):
continue
if stripped.endswith('# '):
continue
cleaned_lines.append(line)
cleaned_output = '\n'.join(cleaned_lines)
logger.debug(f"Final cleaned output length: {len(cleaned_output)} characters")
@@ -156,9 +178,7 @@ def parse_lease_file(lease_content):
# Filter only active leases
active_leases = [lease for lease in leases
if lease.get('state') == 'active'
and 'mac' in lease
and 'ip' in lease]
if lease.get('state') == 'active' and 'mac' in lease and 'ip' in lease]
logger.debug(f"Found {len(active_leases)} active leases out of {len(leases)} total leases")
logger.debug("Active leases:")
@@ -206,6 +226,7 @@ def convert_to_dnsmasq(leases):
return dnsmasq_lines
def main():
parser = argparse.ArgumentParser(description='Convert OPNsense DHCP leases to dnsmasq format')
parser.add_argument('--host', required=True, help='OPNsense hostname or IP')
@@ -219,6 +240,7 @@ def main():
args = parser.parse_args()
# Setup logging
global logger
logger = setup_logging(args.debug)
try:
@@ -255,5 +277,6 @@ def main():
logger.error(f"Error: {str(e)}")
sys.exit(1)
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
#
# -------------------------------------------------------------------------------
# NetAlertX v2.70 / 2021-02-01
@@ -22,7 +22,7 @@ from pathlib import Path
# Register NetAlertX modules
import conf
from const import *
from const import fullConfPath, sql_new_devices
from logger import mylog
from helper import filePermissions
from utils.datetime_utils import timeNowTZ
@@ -111,7 +111,7 @@ def main():
loop_start_time = conf.loop_start_time # TODO fix
# Handle plugins executed ONCE
if conf.plugins_once_run == False:
if conf.plugins_once_run is False:
pm.run_plugin_scripts("once")
conf.plugins_once_run = True
@@ -146,7 +146,7 @@ def main():
processScan = updateState("Check scan").processScan
mylog("debug", [f"[MAIN] processScan: {processScan}"])
if processScan == True:
if processScan is True:
mylog("debug", "[MAIN] start processing scan results")
process_scan(db)
updateState("Scan processed", None, None, None, None, False)

View File

@@ -1,3 +1,4 @@
# !/usr/bin/env python
import json
import time
import threading
@@ -145,8 +146,7 @@ class api_endpoint_class:
self.needsUpdate = True
# Only update changeDetectedWhen if it hasn't been set recently
if not self.changeDetectedWhen or current_time > (
self.changeDetectedWhen
+ datetime.timedelta(seconds=self.debounce_interval)
self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
):
self.changeDetectedWhen = (
current_time # Set timestamp for change detection
@@ -164,8 +164,7 @@ class api_endpoint_class:
self.needsUpdate = True
# Only update changeDetectedWhen if it hasn't been set recently
if not self.changeDetectedWhen or current_time > (
self.changeDetectedWhen
+ datetime.timedelta(seconds=self.debounce_interval)
self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
):
self.changeDetectedWhen = (
current_time # Initialize timestamp for new endpoint
@@ -180,17 +179,15 @@ class api_endpoint_class:
current_time = timeNowTZ()
# Debugging info to understand the issue
# mylog('debug', [f'[API] api_endpoint_class: {self.fileName} is_ad_hoc_user_event {self.is_ad_hoc_user_event} last_update_time={self.last_update_time}, debounce time={self.last_update_time + datetime.timedelta(seconds=self.debounce_interval)}.'])
# mylog('debug', [f'[API] api_endpoint_class: {self.fileName} is_ad_hoc_user_event
# {self.is_ad_hoc_user_event} last_update_time={self.last_update_time},
# debounce time={self.last_update_time + datetime.timedelta(seconds=self.debounce_interval)}.'])
# Only attempt to write if the debounce time has passed
if forceUpdate == True or (
self.needsUpdate
and (
self.changeDetectedWhen is None
or current_time
> (
self.changeDetectedWhen
+ datetime.timedelta(seconds=self.debounce_interval)
if forceUpdate is True or (
self.needsUpdate and (
self.changeDetectedWhen is None or current_time > (
self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
)
)
):

View File

@@ -9,25 +9,68 @@ from flask_cors import CORS
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
from helper import get_setting_value
from db.db_helper import get_date_from_period
from app_state import updateState
from logger import mylog # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from db.db_helper import get_date_from_period # noqa: E402 [flake8 lint suppression]
from app_state import updateState # noqa: E402 [flake8 lint suppression]
from .graphql_endpoint import devicesSchema
from .device_endpoint import get_device_data, set_device_data, delete_device, delete_device_events, reset_device_props, copy_device, update_device_column
from .devices_endpoint import get_all_devices, delete_unknown_devices, delete_all_with_empty_macs, delete_devices, export_devices, import_csv, devices_totals, devices_by_status
from .events_endpoint import delete_events, delete_events_older_than, get_events, create_event, get_events_totals
from .history_endpoint import delete_online_history
from .prometheus_endpoint import get_metric_stats
from .sessions_endpoint import get_sessions, delete_session, create_session, get_sessions_calendar, get_device_sessions, get_session_events
from .nettools_endpoint import wakeonlan, traceroute, speedtest, nslookup, nmap_scan, internet_info
from .dbquery_endpoint import read_query, write_query, update_query, delete_query
from .sync_endpoint import handle_sync_post, handle_sync_get
from .logs_endpoint import clean_log
from models.user_events_queue_instance import UserEventsQueueInstance
from messaging.in_app import write_notification, mark_all_notifications_read, delete_notifications, get_unread_notifications, delete_notification, mark_notification_as_read
from .graphql_endpoint import devicesSchema # noqa: E402 [flake8 lint suppression]
from .device_endpoint import ( # noqa: E402 [flake8 lint suppression]
get_device_data,
set_device_data,
delete_device,
delete_device_events,
reset_device_props,
copy_device,
update_device_column
)
from .devices_endpoint import ( # noqa: E402 [flake8 lint suppression]
get_all_devices,
delete_unknown_devices,
delete_all_with_empty_macs,
delete_devices,
export_devices,
import_csv,
devices_totals,
devices_by_status
)
from .events_endpoint import ( # noqa: E402 [flake8 lint suppression]
delete_events,
delete_events_older_than,
get_events,
create_event,
get_events_totals
)
from .history_endpoint import delete_online_history # noqa: E402 [flake8 lint suppression]
from .prometheus_endpoint import get_metric_stats # noqa: E402 [flake8 lint suppression]
from .sessions_endpoint import ( # noqa: E402 [flake8 lint suppression]
get_sessions,
delete_session,
create_session,
get_sessions_calendar,
get_device_sessions,
get_session_events
)
from .nettools_endpoint import ( # noqa: E402 [flake8 lint suppression]
wakeonlan,
traceroute,
speedtest,
nslookup,
nmap_scan,
internet_info
)
from .dbquery_endpoint import read_query, write_query, update_query, delete_query # noqa: E402 [flake8 lint suppression]
from .sync_endpoint import handle_sync_post, handle_sync_get # noqa: E402 [flake8 lint suppression]
from .logs_endpoint import clean_log # noqa: E402 [flake8 lint suppression]
from models.user_events_queue_instance import UserEventsQueueInstance # noqa: E402 [flake8 lint suppression]
from messaging.in_app import ( # noqa: E402 [flake8 lint suppression]
write_notification,
mark_all_notifications_read,
delete_notifications,
get_unread_notifications,
delete_notification,
mark_notification_as_read
)
# Flask application
app = Flask(__name__)
@@ -50,6 +93,7 @@ CORS(
allow_headers=["Authorization", "Content-Type"],
)
# -------------------------------------------------------------------
# Custom handler for 404 - Route not found
# -------------------------------------------------------------------
@@ -363,7 +407,13 @@ def dbquery_update():
data = request.get_json() or {}
required = ["columnName", "id", "dbtable", "columns", "values"]
if not all(data.get(k) for k in required):
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"}), 400
return jsonify(
{
"success": False,
"message": "ERROR: Missing parameters",
"error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"
}
), 400
return update_query(
column_name=data["columnName"],
@@ -418,6 +468,7 @@ def api_clean_log():
return clean_log(file)
@app.route("/logs/add-to-execution-queue", methods=["POST"])
def api_add_to_execution_queue():
@@ -673,6 +724,7 @@ def api_mark_notification_read(guid):
else:
return jsonify({"success": False, "message": "ERROR", "error": result.get("error")}), 500
# --------------------------
# SYNC endpoint
# --------------------------

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import base64
@@ -9,7 +9,7 @@ from flask import jsonify
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
def read_query(raw_sql_b64):

View File

@@ -1,18 +1,17 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
from datetime import datetime
from flask import jsonify, request
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from helper import is_random_mac, get_setting_value
from utils.datetime_utils import timeNowDB, format_date
from db.db_helper import row_to_json, get_date_from_period
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
from helper import is_random_mac, get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB, format_date # noqa: E402 [flake8 lint suppression]
from db.db_helper import row_to_json, get_date_from_period # noqa: E402 [flake8 lint suppression]
# --------------------------
# Device Endpoints Functions

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import base64
@@ -14,16 +14,13 @@ from logger import mylog
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from db.db_helper import get_table_json, get_device_condition_by_status
from utils.datetime_utils import format_date
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
from db.db_helper import get_table_json, get_device_condition_by_status # noqa: E402 [flake8 lint suppression]
# --------------------------
# Device Endpoints Functions
# --------------------------
def get_all_devices():
"""Retrieve all devices from the database."""
conn = get_temp_db_connection()
@@ -139,7 +136,6 @@ def export_devices(export_format):
def import_csv(file_storage=None):
data = ""
skipped = []
error = None
# 1. Try JSON `content` (base64-encoded CSV)
if request.is_json and request.json.get("content"):

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -9,10 +9,10 @@ from flask import jsonify
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from helper import is_random_mac, mylog
from db.db_helper import row_to_json, get_date_from_period
from utils.datetime_utils import format_date, format_date_iso, format_event_date, ensure_datetime
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
from helper import mylog # noqa: E402 [flake8 lint suppression]
from db.db_helper import row_to_json, get_date_from_period # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import ensure_datetime # noqa: E402 [flake8 lint suppression]
# --------------------------

View File

@@ -1,5 +1,7 @@
import graphene
from graphene import ObjectType, String, Int, Boolean, List, Field, InputObjectType, Argument
from graphene import (
ObjectType, String, Int, Boolean, List, Field, InputObjectType, Argument
)
import json
import sys
import os
@@ -8,9 +10,9 @@ import os
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
from const import apiPath
from helper import (
from logger import mylog # noqa: E402 [flake8 lint suppression]
from const import apiPath # noqa: E402 [flake8 lint suppression]
from helper import ( # noqa: E402 [flake8 lint suppression]
is_random_mac,
get_number_of_children,
format_ip_long,
@@ -113,10 +115,12 @@ class SettingResult(ObjectType):
# --- LANGSTRINGS ---
# In-memory cache for lang strings
_langstrings_cache = {} # caches lists per file (core JSON or plugin)
_langstrings_cache_mtime = {} # tracks last modified times
# LangString ObjectType
class LangString(ObjectType):
langCode = String()
@@ -128,6 +132,7 @@ class LangStringResult(ObjectType):
langStrings = List(LangString)
count = Int()
# Define Query Type with Pagination Support
class Query(ObjectType):
# --- DEVICES ---
@@ -184,31 +189,39 @@ class Query(ObjectType):
if (device.get("devParentRelType") not in hidden_relationships)
]
devices_data = [
device
for device in devices_data
if (
(
device["devPresentLastScan"] == 1
and "online" in allowed_statuses
filtered = []
for device in devices_data:
is_online = (
device["devPresentLastScan"] == 1 and "online" in allowed_statuses
)
or (device["devIsNew"] == 1 and "new" in allowed_statuses)
or (
device["devPresentLastScan"] == 0
and device["devAlertDown"]
and "down" in allowed_statuses
is_new = (
device["devIsNew"] == 1 and "new" in allowed_statuses
)
or (
device["devPresentLastScan"] == 0
and "offline" in allowed_statuses
is_down = (
device["devPresentLastScan"] == 0 and device["devAlertDown"] and "down" in allowed_statuses
)
and device["devIsArchived"] == 0
or (
device["devIsArchived"] == 1
and "archived" in allowed_statuses
is_offline = (
device["devPresentLastScan"] == 0 and "offline" in allowed_statuses
)
is_archived = (
device["devIsArchived"] == 1 and "archived" in allowed_statuses
)
]
# Matches if not archived and status matches OR it is archived and allowed
matches = (
(is_online or is_new or is_down or is_offline) and device["devIsArchived"] == 0
) or is_archived
if matches:
filtered.append(device)
devices_data = filtered
elif status == "connected":
devices_data = [
device
@@ -257,8 +270,7 @@ class Query(ObjectType):
devices_data = [
device
for device in devices_data
if str(device.get(filter.filterColumn, "")).lower()
== str(filter.filterValue).lower()
if str(device.get(filter.filterColumn, "")).lower() == str(filter.filterValue).lower()
]
# Search data if a search term is provided
@@ -437,11 +449,11 @@ class Query(ObjectType):
if en_fallback:
langStrings[i] = en_fallback[0]
mylog('trace', f'[graphql_schema] Collected {len(langStrings)} language strings '
f'(langCode={langCode}, key={langStringKey}, fallback_to_en={fallback_to_en})')
mylog('trace', f'[graphql_schema] Collected {len(langStrings)} language strings (langCode={langCode}, key={langStringKey}, fallback_to_en={fallback_to_en})')
return LangStringResult(langStrings=langStrings, count=len(langStrings))
# helps sorting inconsistent dataset mixed integers and strings
def mixed_type_sort_key(value):
if value is None or value == "":

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -8,7 +8,7 @@ from flask import jsonify
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
# --------------------------------------------------

View File

@@ -3,18 +3,18 @@ import sys
from flask import jsonify
# Register NetAlertX directories
INSTALL_PATH="/app"
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from const import logPath
from logger import mylog, Logger
from helper import get_setting_value
from utils.datetime_utils import timeNowDB
from messaging.in_app import write_notification
from const import logPath # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
# Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL'))
def clean_log(log_file):
"""
Purge the content of an allowed log file within the /app/log/ directory.
@@ -55,4 +55,3 @@ def clean_log(log_file):
mylog('none', [msg])
write_notification(msg, 'interrupt')
return jsonify({"success": False, "message": msg}), 500

View File

@@ -6,8 +6,8 @@ import os
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
from const import apiPath
from logger import mylog # noqa: E402 [flake8 lint suppression]
from const import apiPath # noqa: E402 [flake8 lint suppression]
def escape_label_value(val):

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sqlite3
@@ -9,10 +9,10 @@ from flask import jsonify
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from helper import is_random_mac, get_setting_value, mylog, format_ip_long
from db.db_helper import row_to_json, get_date_from_period
from utils.datetime_utils import format_date_iso, format_event_date, format_date_diff, parse_datetime, format_date
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, format_ip_long # noqa: E402 [flake8 lint suppression]
from db.db_helper import get_date_from_period # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import format_date_iso, format_event_date, format_date_diff, format_date # noqa: E402 [flake8 lint suppression]
# --------------------------
@@ -162,8 +162,7 @@ def get_sessions_calendar(start_date, end_date):
# Determine color
if (
row["ses_EventTypeConnection"] == "<missing event>"
or row["ses_EventTypeDisconnection"] == "<missing event>"
row["ses_EventTypeConnection"] == "<missing event>" or row["ses_EventTypeDisconnection"] == "<missing event>"
):
color = "#f39c12"
elif row["ses_StillConnected"] == 1:
@@ -337,8 +336,7 @@ def get_session_events(event_type, period_date):
sql = sql_events
elif event_type == "sessions":
sql = (
sql_sessions
+ f"""
sql_sessions + f"""
WHERE (
ses_DateTimeConnection >= {period_date}
OR ses_DateTimeDisconnection >= {period_date}
@@ -348,8 +346,7 @@ def get_session_events(event_type, period_date):
)
elif event_type == "missing":
sql = (
sql_sessions
+ f"""
sql_sessions + f"""
WHERE (
(ses_DateTimeConnection IS NULL AND ses_DateTimeDisconnection >= {period_date})
OR (ses_DateTimeDisconnection IS NULL AND ses_StillConnected = 0 AND ses_DateTimeConnection >= {period_date})

View File

@@ -1,7 +1,7 @@
import os
import json
from const import *
from const import applicationPath, apiPath
from logger import mylog
from helper import checkNewVersion
from utils.datetime_utils import timeNowDB, timeNow
@@ -32,14 +32,17 @@ class app_state_class:
isNewVersionChecked (int): Timestamp of last version check.
"""
def __init__(self, currentState=None,
def __init__(
self,
currentState=None,
settingsSaved=None,
settingsImported=None,
showSpinner=None,
graphQLServerStarted=0,
processScan=False,
pluginsStates=None,
appVersion=None):
appVersion=None
):
"""
Initialize the application state, optionally overwriting previous values.
@@ -182,14 +185,16 @@ def updateState(newState = None,
Returns:
app_state_class: Updated state object.
"""
return app_state_class( newState,
return app_state_class(
newState,
settingsSaved,
settingsImported,
showSpinner,
graphQLServerStarted,
processScan,
pluginsStates,
appVersion)
appVersion
)
# -------------------------------------------------------------------------------

View File

@@ -180,7 +180,7 @@ class DB:
# Init the AppEvent database table
AppEvent_obj(self)
# #-------------------------------------------------------------------------------
# # -------------------------------------------------------------------------------
# def get_table_as_json(self, sqlQuery):
# # mylog('debug',[ '[Database] - get_table_as_json - Query: ', sqlQuery])

View File

@@ -6,8 +6,8 @@ import os
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from helper import if_byte_then_to_str
from logger import mylog
from helper import if_byte_then_to_str # noqa: E402 [flake8 lint suppression]
from logger import mylog # noqa: E402 [flake8 lint suppression]
# -------------------------------------------------------------------------------

View File

@@ -5,8 +5,8 @@ import os
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
from messaging.in_app import write_notification
from logger import mylog # noqa: E402 [flake8 lint suppression]
from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
def ensure_column(sql, table: str, column_name: str, column_type: str) -> bool:

View File

@@ -18,7 +18,7 @@ from typing import Dict, List, Tuple, Any, Optional
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
from logger import mylog # noqa: E402 [flake8 lint suppression]
class SafeConditionBuilder:
@@ -494,8 +494,6 @@ class SafeConditionBuilder:
if logical_op and not self._validate_logical_operator(logical_op):
raise ValueError(f"Invalid logical operator: {logical_op}")
# Parse values from the IN clause
values = []
# Simple regex to extract quoted values
value_pattern = r"'([^']*)'"
matches = re.findall(value_pattern, values_str)

View File

@@ -7,25 +7,22 @@ import os
import re
import unicodedata
import subprocess
import pytz
import json
import requests
import base64
import hashlib
import random
import email
import string
import ipaddress
import conf
from const import *
from const import applicationPath, fullConfPath, fullDbPath, dbPath, confPath, apiPath
from logger import mylog, logResult
# Register NetAlertX directories using runtime configuration
INSTALL_PATH = applicationPath
# -------------------------------------------------------------------------------
# File system permission handling
# -------------------------------------------------------------------------------
@@ -58,12 +55,6 @@ def checkPermissionsOK():
return (confR_access, dbR_access)
# -------------------------------------------------------------------------------
def fixPermissions():
# Try fixing access rights if needed
chmodCommands = []
# -------------------------------------------------------------------------------
def initialiseFile(pathToCheck, defaultFile):
# if file not readable (missing?) try to copy over the backed-up (default) one
@@ -71,9 +62,7 @@ def initialiseFile(pathToCheck, defaultFile):
mylog(
"none",
[
"[Setup] ("
+ pathToCheck
+ ") file is not readable or missing. Trying to copy over the default one."
"[Setup] (" + pathToCheck + ") file is not readable or missing. Trying to copy over the default one."
],
)
try:
@@ -89,22 +78,14 @@ def initialiseFile(pathToCheck, defaultFile):
mylog(
"none",
[
"[Setup] ⚠ ERROR copying ("
+ defaultFile
+ ") to ("
+ pathToCheck
+ "). Make sure the app has Read & Write access to the parent directory."
"[Setup] ⚠ ERROR copying (" + defaultFile + ") to (" + pathToCheck + "). Make sure the app has Read & Write access to the parent directory."
],
)
else:
mylog(
"none",
[
"[Setup] ("
+ defaultFile
+ ") copied over successfully to ("
+ pathToCheck
+ ")."
"[Setup] (" + defaultFile + ") copied over successfully to (" + pathToCheck + ")."
],
)
@@ -116,10 +97,7 @@ def initialiseFile(pathToCheck, defaultFile):
mylog(
"none",
[
"[Setup] ⚠ ERROR copying ("
+ defaultFile
+ "). Make sure the app has Read & Write access to "
+ pathToCheck
"[Setup] ⚠ ERROR copying (" + defaultFile + "). Make sure the app has Read & Write access to " + pathToCheck
],
)
mylog("none", [e.output])
@@ -130,16 +108,13 @@ def filePermissions():
# check and initialize .conf
(confR_access, dbR_access) = checkPermissionsOK() # Initial check
if confR_access == False:
if confR_access is False:
initialiseFile(fullConfPath, f"{INSTALL_PATH}/back/app.conf")
# check and initialize .db
if dbR_access == False:
if dbR_access is False:
initialiseFile(fullDbPath, f"{INSTALL_PATH}/back/app.db")
# last attempt
fixPermissions()
# -------------------------------------------------------------------------------
# File manipulation methods
@@ -413,17 +388,12 @@ def setting_value_to_python_type(set_type, set_value):
value = set_value
elif (
dataType == "string"
and elementType == "input"
and any(opt.get("readonly") == "true" for opt in elementOptions)
dataType == "string" and elementType == "input" and any(opt.get("readonly") == "true" for opt in elementOptions)
):
value = reverseTransformers(str(set_value), transformers)
elif (
dataType == "string"
and elementType == "input"
and any(opt.get("type") == "password" for opt in elementOptions)
and "sha256" in transformers
dataType == "string" and elementType == "input" and any(opt.get("type") == "password" for opt in elementOptions) and "sha256" in transformers
):
value = hashlib.sha256(set_value.encode()).hexdigest()
@@ -602,23 +572,23 @@ def normalize_string(text):
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------------------
def is_random_mac(mac: str) -> bool:
"""Determine if a MAC address is random, respecting user-defined prefixes not to mark as random."""
# # -------------------------------------------------------------------------------------------
# def is_random_mac(mac: str) -> bool:
# """Determine if a MAC address is random, respecting user-defined prefixes not to mark as random."""
is_random = mac[1].upper() in ["2", "6", "A", "E"]
# is_random = mac[1].upper() in ["2", "6", "A", "E"]
# Get prefixes from settings
prefixes = get_setting_value("UI_NOT_RANDOM_MAC")
# # Get prefixes from settings
# prefixes = get_setting_value("UI_NOT_RANDOM_MAC")
# If detected as random, make sure it doesn't start with a prefix the user wants to exclude
if is_random:
for prefix in prefixes:
if mac.upper().startswith(prefix.upper()):
is_random = False
break
# # If detected as random, make sure it doesn't start with a prefix the user wants to exclude
# if is_random:
# for prefix in prefixes:
# if mac.upper().startswith(prefix.upper()):
# is_random = False
# break
return is_random
# return is_random
# -------------------------------------------------------------------------------------------
@@ -653,6 +623,7 @@ def extract_ip_addresses(text):
# -------------------------------------------------------------------------------
# Helper function to determine if a MAC address is random
def is_random_mac(mac):
"""Determine if a MAC address is random, respecting user-defined prefixes not to mark as random."""
# Check if second character matches "2", "6", "A", "E" (case insensitive)
is_random = mac[1].upper() in ["2", "6", "A", "E"]
@@ -773,7 +744,6 @@ def getBuildTimeStampAndVersion():
return tuple(results)
# -------------------------------------------------------------------------------
def checkNewVersion():
mylog("debug", ["[Version check] Checking if new version available"])

View File

@@ -9,8 +9,8 @@ import re
# Register NetAlertX libraries
import conf
from const import fullConfPath, applicationPath, fullConfFolder, default_tz
from helper import getBuildTimeStampAndVersion, fixPermissions, collect_lang_strings, updateSubnets, isJsonObject, setting_value_to_python_type, get_setting_value, generate_random_string
from const import fullConfPath, fullConfFolder, default_tz
from helper import getBuildTimeStampAndVersion, fixPermissions, collect_lang_strings, updateSubnets, generate_random_string
from utils.datetime_utils import timeNowDB
from app_state import updateState
from logger import mylog
@@ -19,7 +19,6 @@ from scheduler import schedule_class
from plugin import plugin_manager, print_plugin_info
from utils.plugin_utils import get_plugins_configs, get_set_value_for_init
from messaging.in_app import write_notification
from utils.crypto_utils import get_random_bytes
# ===============================================================================
# Initialise user defined values
@@ -59,7 +58,7 @@ def ccd(
result = default
# Use existing value if already supplied, otherwise default value is used
if forceDefault == False and key in config_dir:
if forceDefault is False and key in config_dir:
result = config_dir[key]
# Single quotes might break SQL queries, replacing them
@@ -216,7 +215,7 @@ def importConfigs(pm, db, all_plugins):
[],
c_d,
"Loaded plugins",
'{"dataType":"array","elements":[{"elementType":"select","elementHasInputValue":1,"elementOptions":[{"multiple":"true","ordeable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"selectChange(this)"},{"getStringKey":"Gen_Change"}],"transformers":[]}]}',
'{"dataType":"array","elements":[{"elementType":"select","elementHasInputValue":1,"elementOptions":[{"multiple":"true","ordeable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"selectChange(this)"},{"getStringKey":"Gen_Change"}],"transformers":[]}]}', # noqa: E501
"[]",
"General",
)
@@ -234,7 +233,7 @@ def importConfigs(pm, db, all_plugins):
["192.168.1.0/24 --interface=eth1", "192.168.1.0/24 --interface=eth0"],
c_d,
"Subnets to scan",
"""{"dataType": "array","elements": [{"elementType": "input","elementOptions": [{"placeholder": "192.168.1.0/24 --interface=eth1"},{"suffix": "_in"},{"cssClasses": "col-sm-10"},{"prefillValue": "null"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": ["_in"]},{"separator": ""},{"cssClasses": "col-xs-12"},{"onClick": "addList(this, false)"},{"getStringKey": "Gen_Add"}],"transformers": []},{"elementType": "select","elementHasInputValue": 1,"elementOptions": [{"multiple": "true"},{"readonly": "true"},{"editable": "true"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeAllOptions(this)"},{"getStringKey": "Gen_Remove_All"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeFromList(this)"},{"getStringKey": "Gen_Remove_Last"}],"transformers": []}]}""",
"""{"dataType": "array","elements": [{"elementType": "input","elementOptions": [{"placeholder": "192.168.1.0/24 --interface=eth1"},{"suffix": "_in"},{"cssClasses": "col-sm-10"},{"prefillValue": "null"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": ["_in"]},{"separator": ""},{"cssClasses": "col-xs-12"},{"onClick": "addList(this, false)"},{"getStringKey": "Gen_Add"}],"transformers": []},{"elementType": "select","elementHasInputValue": 1,"elementOptions": [{"multiple": "true"},{"readonly": "true"},{"editable": "true"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeAllOptions(this)"},{"getStringKey": "Gen_Remove_All"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeFromList(this)"},{"getStringKey": "Gen_Remove_Last"}],"transformers": []}]}""", # noqa: E501 - inline JSON
"[]",
"General",
)
@@ -356,7 +355,7 @@ def importConfigs(pm, db, all_plugins):
],
c_d,
"Network device types",
'{"dataType":"array","elements":[{"elementType":"input","elementOptions":[{"placeholder":"Enter value"},{"suffix":"_in"},{"cssClasses":"col-sm-10"},{"prefillValue":"null"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":["_in"]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"addList(this,false)"},{"getStringKey":"Gen_Add"}],"transformers":[]},{"elementType":"select", "elementHasInputValue":1,"elementOptions":[{"multiple":"true"},{"readonly":"true"},{"editable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeAllOptions(this)"},{"getStringKey":"Gen_Remove_All"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeFromList(this)"},{"getStringKey":"Gen_Remove_Last"}],"transformers":[]}]}',
'{"dataType":"array","elements":[{"elementType":"input","elementOptions":[{"placeholder":"Enter value"},{"suffix":"_in"},{"cssClasses":"col-sm-10"},{"prefillValue":"null"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":["_in"]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"addList(this,false)"},{"getStringKey":"Gen_Add"}],"transformers":[]},{"elementType":"select", "elementHasInputValue":1,"elementOptions":[{"multiple":"true"},{"readonly":"true"},{"editable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeAllOptions(this)"},{"getStringKey":"Gen_Remove_All"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeFromList(this)"},{"getStringKey":"Gen_Remove_Last"}],"transformers":[]}]}', # noqa: E501 - inline JSON
"[]",
"General",
)
@@ -374,7 +373,7 @@ def importConfigs(pm, db, all_plugins):
"t_" + generate_random_string(20),
c_d,
"API token",
'{"dataType": "string","elements": [{"elementType": "input","elementHasInputValue": 1,"elementOptions": [{ "cssClasses": "col-xs-12" }],"transformers": []},{"elementType": "button","elementOptions": [{ "getStringKey": "Gen_Generate" },{ "customParams": "API_TOKEN" },{ "onClick": "generateApiToken(this, 20)" },{ "cssClasses": "col-xs-12" }],"transformers": []}]}',
'{"dataType": "string","elements": [{"elementType": "input","elementHasInputValue": 1,"elementOptions": [{ "cssClasses": "col-xs-12" }],"transformers": []},{"elementType": "button","elementOptions": [{ "getStringKey": "Gen_Generate" },{ "customParams": "API_TOKEN" },{ "onClick": "generateApiToken(this, 20)" },{ "cssClasses": "col-xs-12" }],"transformers": []}]}', # noqa: E501 - inline JSON
"[]",
"General",
)
@@ -386,7 +385,7 @@ def importConfigs(pm, db, all_plugins):
c_d,
"Language Interface",
'{"dataType":"string", "elements": [{"elementType" : "select", "elementOptions" : [] ,"transformers": []}]}',
"['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Japanese (ja_jp)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Chinese (zh_cn)']",
"['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Japanese (ja_jp)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Chinese (zh_cn)']", # noqa: E501 - inline JSON
"UI",
)
@@ -483,9 +482,7 @@ def importConfigs(pm, db, all_plugins):
# only include loaded plugins, and the ones that are enabled
if (
pref in conf.LOADED_PLUGINS
or plugin_run != "disabled"
or plugin_run is None
pref in conf.LOADED_PLUGINS or plugin_run != "disabled" or plugin_run is None
):
print_plugin_info(plugin, ["display_name", "description"])
@@ -524,9 +521,7 @@ def importConfigs(pm, db, all_plugins):
if "popupForm" in option:
for popup_entry in option["popupForm"]:
popup_pref = (
key
+ "_popupform_"
+ popup_entry.get("function", "")
key + "_popupform_" + popup_entry.get("function", "")
)
stringSqlParams = collect_lang_strings(
popup_entry, popup_pref, stringSqlParams
@@ -606,7 +601,7 @@ def importConfigs(pm, db, all_plugins):
# Loop through settings_override dictionary
for setting_name, value in settings_override.items():
# Ensure the value is treated as a string and passed directly
if isinstance(value, str) == False:
if isinstance(value, str) is False:
value = str(value)
# Log the value being passed
@@ -684,8 +679,16 @@ def importConfigs(pm, db, all_plugins):
# ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False)
ccd('VERSION', new_version , c_d, '_KEEP_', '_KEEP_', '_KEEP_', '_KEEP_', None, "_KEEP_", None, None, True)
write_notification(f'[Upgrade] : App upgraded from <code>{prev_version}</code> to <code>{new_version}</code> 🚀 Please clear the cache: <ol> <li>Click OK below</li> <li>Clear the browser cache (shift + browser refresh button)</li> <li> Clear app cache with the <i class="fa-solid fa-rotate"></i> (reload) button in the header</li><li>Go to Settings and click Save</li> </ol> Check out new features and what has changed in the <a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.', 'interrupt', timeNowDB())
write_notification(f'[Upgrade]: App upgraded from <code>{prev_version}</code> to \
<code>{new_version}</ code> 🚀 Please clear the cache: \
<ol> <li>Click OK below</li> <li>Clear the browser cache (shift + \
browser refresh button)</li> <li> Clear app cache with the <i class="fa-solid fa-rotate"></i> \
(reload) button in the header</li><li>Go to Settings and click Save</li> </ol>\
Check out new features and what has changed in the \
<a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.',
'interrupt',
timeNowDB()
)
# -----------------
# Initialization finished, update DB and API endpoints

View File

@@ -1,19 +1,14 @@
import sys
import io
import datetime
# import datetime
import threading
import queue
import logging
from zoneinfo import ZoneInfo
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# from zoneinfo import ZoneInfo
# NetAlertX imports
import conf
from const import *
from const import logPath
from utils.datetime_utils import timeNowTZ

View File

@@ -11,13 +11,9 @@ from flask import jsonify
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from const import apiPath
from logger import mylog
import conf
from const import applicationPath, logPath, apiPath, confFileName, reportTemplatesPath
from logger import mylog
from utils.datetime_utils import timeNowDB
from const import apiPath # noqa: E402 [flake8 lint suppression]
from logger import mylog # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
NOTIFICATION_API_FILE = apiPath + 'user_notifications.json'

View File

@@ -18,12 +18,12 @@ import sys
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from helper import (
from helper import ( # noqa: E402 [flake8 lint suppression]
get_setting_value,
)
from logger import mylog
from db.sql_safe_builder import create_safe_condition_builder
from utils.datetime_utils import get_timezone_offset
from logger import mylog # noqa: E402 [flake8 lint suppression]
from db.sql_safe_builder import create_safe_condition_builder # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import get_timezone_offset # noqa: E402 [flake8 lint suppression]
# ===============================================================================
# REPORTING
@@ -79,7 +79,13 @@ def get_notifications(db):
safe_condition, parameters = condition_builder.get_safe_condition_legacy(
new_dev_condition_setting
)
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device' {}
ORDER BY eve_DateTime""".format(safe_condition)
@@ -89,7 +95,13 @@ def get_notifications(db):
["[Notification] Error building safe condition for new devices: ", e],
)
# Fall back to safe default (no additional conditions)
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device'
ORDER BY eve_DateTime"""
@@ -181,7 +193,13 @@ def get_notifications(db):
safe_condition, parameters = condition_builder.get_safe_condition_legacy(
event_condition_setting
)
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed') {}
ORDER BY eve_DateTime""".format(safe_condition)
@@ -191,7 +209,13 @@ def get_notifications(db):
["[Notification] Error building safe condition for events: ", e],
)
# Fall back to safe default (no additional conditions)
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed')
ORDER BY eve_DateTime"""
@@ -208,7 +232,17 @@ def get_notifications(db):
if "plugins" in sections:
# Compose Plugins Section
sqlQuery = """SELECT Plugin, Object_PrimaryId, Object_SecondaryId, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status from Plugins_Events"""
sqlQuery = """SELECT
Plugin,
Object_PrimaryId,
Object_SecondaryId,
DateTimeChanged,
Watched_Value1,
Watched_Value2,
Watched_Value3,
Watched_Value4,
Status
from Plugins_Events"""
# Get the events as JSON
json_obj = db.get_table_as_json(sqlQuery)

View File

@@ -1,13 +1,12 @@
import json
import uuid
import socket
import subprocess
from yattag import indent
from json2table import convert
# Register NetAlertX modules
import conf
from const import applicationPath, logPath, apiPath, reportTemplatesPath
from const import logPath, apiPath, reportTemplatesPath
from logger import mylog, Logger
from helper import (
generate_mac_links,
@@ -62,11 +61,7 @@ class NotificationInstance:
# Check if nothing to report, end
if (
JSON["new_devices"] == []
and JSON["down_devices"] == []
and JSON["events"] == []
and JSON["plugins"] == []
and JSON["down_reconnected"] == []
JSON["new_devices"] == [] and JSON["down_devices"] == [] and JSON["events"] == [] and JSON["plugins"] == [] and JSON["down_reconnected"] == []
):
self.HasNotifications = False
else:
@@ -88,8 +83,6 @@ class NotificationInstance:
# else:
# mylog('debug', ['[Notification] notiStruc:', json.dumps(notiStruc.__dict__, indent=4)])
Text = ""
HTML = ""
template_file_path = reportTemplatesPath + "report_template.html"
# Open text Template

View File

@@ -117,7 +117,3 @@ class UserEventsQueueInstance:
mylog('none', [msg])
return False, msg

View File

@@ -10,11 +10,20 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
import conf
from const import pluginsPath, logPath, applicationPath, reportTemplatesPath
from logger import mylog, Logger
from helper import get_file_content, write_file, get_setting, get_setting_value
from helper import get_file_content, get_setting, get_setting_value
from utils.datetime_utils import timeNowTZ, timeNowDB
from app_state import updateState
from api import update_api
from utils.plugin_utils import logEventStatusCounts, get_plugin_string, get_plugin_setting_obj, print_plugin_info, list_to_csv, combine_plugin_objects, resolve_wildcards_arr, handle_empty, custom_plugin_decoder, decode_and_rename_files
from utils.plugin_utils import (
logEventStatusCounts,
get_plugin_setting_obj,
print_plugin_info,
list_to_csv,
combine_plugin_objects,
resolve_wildcards_arr,
handle_empty,
decode_and_rename_files
)
from models.notification_instance import NotificationInstance
from messaging.in_app import write_notification
from models.user_events_queue_instance import UserEventsQueueInstance
@@ -57,13 +66,7 @@ class plugin_manager:
# Header
updateState("Run: Plugins")
mylog(
"debug",
[
"[Plugins] Check if any plugins need to be executed on run type: ",
runType,
],
)
mylog("debug", f"[Plugins] Check if any plugins need to be executed on run type: {runType}")
for plugin in self.all_plugins:
shouldRun = False
@@ -72,7 +75,7 @@ class plugin_manager:
# 🔹 Lookup RUN setting from cache instead of calling get_plugin_setting_obj each time
run_setting = self._cache["settings"].get(prefix, {}).get("RUN")
if run_setting != None and run_setting["value"] == runType:
if run_setting is not None and run_setting["value"] == runType:
if runType != "schedule":
shouldRun = True
elif runType == "schedule":
@@ -91,10 +94,7 @@ class plugin_manager:
# 🔹 CMD also retrieved from cache
cmd_setting = self._cache["settings"].get(prefix, {}).get("CMD")
mylog(
"debug",
["[Plugins] CMD: ", cmd_setting["value"] if cmd_setting else None],
)
mylog("debug", f"[Plugins] CMD: {cmd_setting["value"] if cmd_setting else None}")
execute_plugin(self.db, self.all_plugins, plugin)
@@ -130,13 +130,7 @@ class plugin_manager:
mylog("debug", ["[check_and_run_user_event] User Execution Queue is empty"])
return # Exit early if the log file is empty
else:
mylog(
"debug",
[
"[check_and_run_user_event] Process User Execution Queue:"
+ ", ".join(map(str, lines))
],
)
mylog("debug", "[check_and_run_user_event] Process User Execution Queue:" + ", ".join(map(str, lines)))
for line in lines:
# Extract event name and parameters from the log line
@@ -160,15 +154,7 @@ class plugin_manager:
update_api(self.db, self.all_plugins, False, param.split(","), True)
else:
mylog(
"minimal",
[
"[check_and_run_user_event] WARNING: Unhandled event in execution queue: ",
event,
" | ",
param,
],
)
mylog("minimal", f"[check_and_run_user_event] WARNING: Unhandled event in execution queue: {event} | {param}")
execution_log.finalize_event(
event
) # Finalize unknown events to remove them
@@ -312,7 +298,7 @@ class plugin_param:
if param["type"] == "setting":
inputValue = get_setting(param["value"])
if inputValue != None:
if inputValue is not None:
setVal = inputValue["setValue"] # setting value
setTyp = inputValue["setType"] # setting type
@@ -337,9 +323,7 @@ class plugin_param:
resolved = list_to_csv(setVal)
else:
mylog(
"none", ["[Plugins] ⚠ ERROR: Parameter probably not converted."]
)
mylog("none", "[Plugins] ⚠ ERROR: Parameter probably not converted.")
return json.dumps(setVal)
# Get SQL result
@@ -390,15 +374,10 @@ def run_plugin(command, set_RUN_TIMEOUT, plugin):
)
except subprocess.CalledProcessError as e:
mylog("none", [e.output])
mylog("none", ["[Plugins] ⚠ ERROR - enable LOG_LEVEL=debug and check logs"])
mylog("none", "[Plugins] ⚠ ERROR - enable LOG_LEVEL=debug and check logs")
return None
except subprocess.TimeoutExpired:
mylog(
"none",
[
f"[Plugins] ⚠ ERROR - TIMEOUT - the plugin {plugin['unique_prefix']} forcefully terminated as timeout reached. Increase TIMEOUT setting and scan interval."
],
)
mylog("none", f"[Plugins] ⚠ ERROR - TIMEOUT - the plugin {plugin['unique_prefix']} forcefully terminated as timeout reached. Increase TIMEOUT setting and scan interval.")
return None
@@ -411,7 +390,7 @@ def execute_plugin(db, all_plugins, plugin):
set = get_plugin_setting_obj(plugin, "CMD")
# handle missing "function":"CMD" setting
if set == None:
if set is None:
return
set_CMD = set["value"]
@@ -441,13 +420,8 @@ def execute_plugin(db, all_plugins, plugin):
for param in plugin["params"]:
tempParam = plugin_param(param, plugin, db)
if tempParam.resolved == None:
mylog(
"none",
[
f'[Plugins] The parameter "name":"{tempParam.name}" for "value": {tempParam.value} was resolved as None'
],
)
if tempParam.resolved is None:
mylog("none", f'[Plugins] The parameter "name":"{tempParam.name}" for "value": {tempParam.value} was resolved as None')
else:
# params.append( [param["name"], resolved] )
@@ -456,14 +430,9 @@ def execute_plugin(db, all_plugins, plugin):
if tempParam.multiplyTimeout:
set_RUN_TIMEOUT = set_RUN_TIMEOUT * tempParam.paramValuesCount
mylog(
"debug",
[
f'[Plugins] The parameter "name":"{param["name"]}" will multiply the timeout {tempParam.paramValuesCount} times. Total timeout: {set_RUN_TIMEOUT}s'
],
)
mylog("debug", f'[Plugins] The parameter "name":"{param["name"]}" will multiply timeout {tempParam.paramValuesCount}x. Total timeout: {set_RUN_TIMEOUT}s')
mylog("debug", ["[Plugins] Timeout: ", set_RUN_TIMEOUT])
mylog("debug", f"[Plugins] Timeout: {set_RUN_TIMEOUT}")
# build SQL query parameters to insert into the DB
sqlParams = []
@@ -475,8 +444,8 @@ def execute_plugin(db, all_plugins, plugin):
command = resolve_wildcards_arr(set_CMD.split(), params)
# Execute command
mylog("verbose", ["[Plugins] Executing: ", set_CMD])
mylog("debug", ["[Plugins] Resolved : ", command])
mylog("verbose", f"[Plugins] Executing: {set_CMD}")
mylog("debug", f"[Plugins] Resolved : {command}")
# Using ThreadPoolExecutor to handle concurrent subprocesses
with ThreadPoolExecutor(max_workers=5) as executor:
@@ -521,12 +490,7 @@ def execute_plugin(db, all_plugins, plugin):
columns = line.split("|")
# There have to be 9 or 13 columns
if len(columns) not in [9, 13]:
mylog(
"none",
[
f"[Plugins] Wrong number of input values, must be 9 or 13, got {len(columns)} from: {line}"
],
)
mylog("none", f"[Plugins] Wrong number of input values, must be 9 or 13, got {len(columns)} from: {line}")
continue # Skip lines with incorrect number of columns
# Common part of the SQL parameters
@@ -581,9 +545,7 @@ def execute_plugin(db, all_plugins, plugin):
# keep current instance log file, delete all from other nodes
if filename != "last_result.log" and os.path.exists(full_path):
os.remove(full_path) # DEBUG:TODO uncomment 🐛
mylog(
"verbose", [f"[Plugins] Processed and deleted file: {full_path} "]
)
mylog("verbose", f"[Plugins] Processed and deleted file: {full_path} ")
# app-db-query
if plugin["data_source"] == "app-db-query":
@@ -591,7 +553,7 @@ def execute_plugin(db, all_plugins, plugin):
q = set_CMD.replace("{s-quote}", "'")
# Execute command
mylog("verbose", ["[Plugins] Executing: ", q])
mylog("verbose", f"[Plugins] Executing: {q}")
# set_CMD should contain a SQL query
arr = db.get_sql_array(q)
@@ -650,7 +612,7 @@ def execute_plugin(db, all_plugins, plugin):
# Append the final parameters to sqlParams
sqlParams.append(tuple(base_params))
else:
mylog("none", ["[Plugins] Skipped invalid sql result"])
mylog("none", "[Plugins] Skipped invalid sql result")
# app-db-query
if plugin["data_source"] == "sqlite-db-query":
@@ -659,19 +621,14 @@ def execute_plugin(db, all_plugins, plugin):
q = set_CMD.replace("{s-quote}", "'")
# Execute command
mylog("verbose", ["[Plugins] Executing: ", q])
mylog("verbose", f"[Plugins] Executing: {q}")
# ------- necessary settings check --------
set = get_plugin_setting_obj(plugin, "DB_PATH")
# handle missing "function":"DB_PATH" setting
if set == None:
mylog(
"none",
[
"[Plugins] ⚠ ERROR: DB_PATH setting for plugin type sqlite-db-query missing."
],
)
if set is None:
mylog("none", "[Plugins] ⚠ ERROR: DB_PATH setting for plugin type sqlite-db-query missing.")
return
fullSqlitePath = set["value"]
@@ -679,25 +636,14 @@ def execute_plugin(db, all_plugins, plugin):
# try attaching the sqlite DB
try:
sql.execute(
"ATTACH DATABASE '"
+ fullSqlitePath
+ "' AS EXTERNAL_"
+ plugin["unique_prefix"]
"ATTACH DATABASE '" + fullSqlitePath + "' AS EXTERNAL_" + plugin["unique_prefix"]
)
arr = db.get_sql_array(q)
sql.execute("DETACH DATABASE EXTERNAL_" + plugin["unique_prefix"])
except sqlite3.Error as e:
mylog(
"none",
[
f"[Plugins] ⚠ ERROR: DB_PATH setting ({fullSqlitePath}) for plugin {plugin['unique_prefix']}. Did you mount it correctly?"
],
)
mylog(
"none",
["[Plugins] ⚠ ERROR: ATTACH DATABASE failed with SQL ERROR: ", e],
)
mylog("none", f"[Plugins] ⚠ ERROR: DB_PATH setting ({fullSqlitePath}) for plugin {plugin['unique_prefix']}. Did you mount it correctly?")
mylog("none", f"[Plugins] ⚠ ERROR: ATTACH DATABASE failed with SQL ERROR: {e}")
return
for row in arr:
@@ -748,24 +694,14 @@ def execute_plugin(db, all_plugins, plugin):
# Append the final parameters to sqlParams
sqlParams.append(tuple(base_params))
else:
mylog("none", ["[Plugins] Skipped invalid sql result"])
mylog("none", "[Plugins] Skipped invalid sql result")
# check if the subprocess / SQL query failed / there was no valid output
if len(sqlParams) == 0:
mylog(
"none",
[
f'[Plugins] No output received from the plugin "{plugin["unique_prefix"]}"'
],
)
mylog("none", f'[Plugins] No output received from the plugin "{plugin["unique_prefix"]}"')
else:
mylog(
"verbose",
[
f"[Plugins] SUCCESS for {plugin['unique_prefix']} received {len(sqlParams)} entries"
],
)
mylog("verbose", f"[Plugins] SUCCESS for {plugin['unique_prefix']} received {len(sqlParams)} entries")
# mylog('debug', ['[Plugins] sqlParam entries: ', sqlParams])
# create objects
@@ -782,12 +718,7 @@ def execute_plugin(db, all_plugins, plugin):
# check if we need to update devices api endpoint as well to prevent long user waits on Loading...
userUpdatedDevices = UserEventsQueueInstance().has_update_devices()
mylog(
"verbose",
[
f"[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}"
],
)
mylog("verbose", f"[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}")
if userUpdatedDevices:
endpoints += ["devices"]
@@ -807,7 +738,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
pluginPref = plugin["unique_prefix"]
mylog("verbose", ["[Plugins] Processing : ", pluginPref])
mylog("verbose", f"[Plugins] Processing : {pluginPref}")
try:
# Begin a transaction
@@ -827,20 +758,8 @@ def process_plugin_events(db, plugin, plugEventsArr):
for eve in plugEventsArr:
pluginEvents.append(plugin_object_class(plugin, eve))
mylog(
"debug",
[
"[Plugins] Existing objects from Plugins_Objects: ",
len(pluginObjects),
],
)
mylog(
"debug",
[
"[Plugins] Logged events from the plugin run : ",
len(pluginEvents),
],
)
mylog("debug", f"[Plugins] Existing objects from Plugins_Objects: {len(pluginObjects)}")
mylog("debug", f"[Plugins] Logged events from the plugin run : {len(pluginEvents)}")
# Loop thru all current events and update the status to "exists" if the event matches an existing object
index = 0
@@ -857,8 +776,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
if tmpObjFromEvent.status == "exists":
# compare hash of the changed watched columns for uniqueness - make sure you compare the values with the same idsHash before checking watchedHash
if any(
x.idsHash == tmpObjFromEvent.idsHash
and x.watchedHash != tmpObjFromEvent.watchedHash
x.idsHash == tmpObjFromEvent.idsHash and x.watchedHash != tmpObjFromEvent.watchedHash
for x in pluginObjects
):
pluginEvents[index].status = "watched-changed"
@@ -955,25 +873,17 @@ def process_plugin_events(db, plugin, plugEventsArr):
# combine all DB insert and update events into one for history
history_to_insert.append(values)
mylog("debug", ["[Plugins] pluginEvents count: ", len(pluginEvents)])
mylog("debug", ["[Plugins] pluginObjects count: ", len(pluginObjects)])
mylog("debug", f"[Plugins] pluginEvents count: {len(pluginEvents)}")
mylog("debug", f"[Plugins] pluginObjects count: {len(pluginObjects)}")
mylog(
"debug", ["[Plugins] events_to_insert count: ", len(events_to_insert)]
)
mylog(
"debug", ["[Plugins] history_to_insert count: ", len(history_to_insert)]
)
mylog(
"debug", ["[Plugins] objects_to_insert count: ", len(objects_to_insert)]
)
mylog(
"debug", ["[Plugins] objects_to_update count: ", len(objects_to_update)]
)
mylog("debug", f"[Plugins] events_to_insert count: {len(events_to_insert)}")
mylog("debug", f"[Plugins] history_to_insert count: {len(history_to_insert)}")
mylog("debug", f"[Plugins] objects_to_insert count: {len(objects_to_insert)}")
mylog("debug", f"[Plugins] objects_to_update count: {len(objects_to_update)}")
mylog("trace", ["[Plugins] objects_to_update: ", objects_to_update])
mylog("trace", ["[Plugins] events_to_insert: ", events_to_insert])
mylog("trace", ["[Plugins] history_to_insert: ", history_to_insert])
mylog("trace", f"[Plugins] objects_to_update: {objects_to_update}")
mylog("trace", f"[Plugins] events_to_insert: {events_to_insert}")
mylog("trace", f"[Plugins] history_to_insert: {history_to_insert}")
logEventStatusCounts("pluginEvents", pluginEvents)
logEventStatusCounts("pluginObjects", pluginObjects)
@@ -1044,7 +954,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
except Exception as e:
# Rollback the transaction in case of an error
conn.rollback()
mylog("none", ["[Plugins] ⚠ ERROR: ", e])
mylog("none", f"[Plugins] ⚠ ERROR: {e}")
raise e
# Perform database table mapping if enabled for the plugin
@@ -1056,7 +966,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
dbTable = plugin["mapped_to_table"]
# Log a debug message indicating the mapping of objects to the database table.
mylog("debug", ["[Plugins] Mapping objects to database table: ", dbTable])
mylog("debug", f"[Plugins] Mapping objects to database table: {dbTable}")
# Initialize lists to hold mapped column names, columnsStr, and valuesStr for SQL query.
mappedCols = []
@@ -1121,8 +1031,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
# Check if there's a default value specified for this column in the JSON.
if (
"mapped_to_column_data" in col
and "value" in col["mapped_to_column_data"]
"mapped_to_column_data" in col and "value" in col["mapped_to_column_data"]
):
tmpList.append(col["mapped_to_column_data"]["value"])
@@ -1133,8 +1042,8 @@ def process_plugin_events(db, plugin, plugEventsArr):
q = f"INSERT OR IGNORE INTO {dbTable} ({columnsStr}) VALUES ({valuesStr})"
# Log a debug message showing the generated SQL query for mapping.
mylog("debug", ["[Plugins] SQL query for mapping: ", q])
mylog("debug", ["[Plugins] SQL sqlParams for mapping: ", sqlParams])
mylog("debug", f"[Plugins] SQL query for mapping: {q}")
mylog("debug", f"[Plugins] SQL sqlParams for mapping: {sqlParams}")
# Execute the SQL query using 'sql.executemany()' and the 'sqlParams' list of tuples.
# This will insert multiple rows into the database in one go.

View File

@@ -1,14 +1,6 @@
import sys
import subprocess
import os
import re
import datetime
from dateutil import parser
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from helper import get_setting_value, check_IP_format
from utils.datetime_utils import timeNowDB, normalizeTimeStamp
from logger import mylog, Logger
@@ -57,9 +49,10 @@ def exclude_ignored_devices(db):
sql.execute(query)
#-------------------------------------------------------------------------------
def update_devices_data_from_scan (db):
sql = db.sql #TO-DO
# -------------------------------------------------------------------------------
def update_devices_data_from_scan(db):
sql = db.sql # TO-DO
startTime = timeNowDB()
# Update Last Connection
@@ -425,8 +418,8 @@ def print_scan_stats(db):
mylog("verbose", f" {row['cur_ScanMethod']}: {row['scan_method_count']}")
#-------------------------------------------------------------------------------
def create_new_devices (db):
# -------------------------------------------------------------------------------
def create_new_devices(db):
sql = db.sql # TO-DO
startTime = timeNowDB()
@@ -598,7 +591,8 @@ def create_new_devices (db):
mylog("debug", "[New Devices] New Devices end")
db.commitDB()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
# Check if plugins data changed
def check_plugin_data_changed(pm, plugins_to_check):
"""
@@ -639,13 +633,13 @@ def check_plugin_data_changed(pm, plugins_to_check):
# Normalize and validate last_changed timestamp
last_changed_ts = normalizeTimeStamp(last_data_change)
if last_changed_ts == None:
if last_changed_ts is None:
mylog('none', f'[check_plugin_data_changed] Unexpected last_data_change timestamp for {plugin_name} (input|output): ({last_data_change}|{last_changed_ts})')
# Normalize and validate last_data_check timestamp
last_data_check_ts = normalizeTimeStamp(last_data_check)
if last_data_check_ts == None:
if last_data_check_ts is None:
mylog('none', f'[check_plugin_data_changed] Unexpected last_data_check timestamp for {plugin_name} (input|output): ({last_data_check}|{last_data_check_ts})')
# Track which plugins have newer state than last_checked
@@ -660,15 +654,19 @@ def check_plugin_data_changed(pm, plugins_to_check):
# Continue if changes detected
for p in plugins_changed:
mylog('debug', f'[check_plugin_data_changed] {p} changed (last_data_change|last_data_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})')
mylog(
'debug',
f'[check_plugin_data_changed] {p} changed (last_data_change|last_data_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})'
)
return True
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def update_devices_names(pm):
# --- Short-circuit if no name-resolution plugin has changed ---
if check_plugin_data_changed(pm, ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]) == False:
if check_plugin_data_changed(pm, ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]) is False:
mylog('debug', '[Update Device Name] No relevant plugin changes since last check.')
return
@@ -722,8 +720,7 @@ def update_devices_names(pm):
# If a valid result is found, record it and stop further attempts
if (
newFQDN not in [nameNotFound, "", "localhost."]
and " communications error to " not in newFQDN
newFQDN not in [nameNotFound, "", "localhost."] and " communications error to " not in newFQDN
):
foundStats[label] += 1
@@ -750,14 +747,14 @@ def update_devices_names(pm):
)
# Try resolving both name and FQDN
recordsToUpdate, recordsNotFound, foundStats, notFound = resolve_devices(
recordsToUpdate, recordsNotFound, fs, notFound = resolve_devices(
unknownDevices
)
# Log summary
mylog(
"verbose",
f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}/{foundStats['NBTSCAN']})",
f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']})",
)
mylog("verbose", f"[Update Device Name] Names Not Found : {notFound}")
@@ -780,16 +777,14 @@ def update_devices_names(pm):
)
# Try resolving only FQDN
recordsToUpdate, _, foundStats, notFound = resolve_devices(
recordsToUpdate, _, fs, notFound = resolve_devices(
allDevices, resolve_both_name_and_fqdn=False
)
# Log summary
mylog(
"verbose",
f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)}"+
f"({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}"+
f"/{foundStats['NBTSCAN']})",
f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)}({fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']})",
)
mylog("verbose", f"[Update FQDN] Names Not Found : {notFound}")
@@ -803,7 +798,7 @@ def update_devices_names(pm):
# --- Step 3: Log last checked time ---
# After resolving names, update last checked
pm.plugin_checks = {"DIGSCAN": timeNowDB(), "AVAHISCAN": timeNowDB(), "NSLOOKUP": timeNowDB(), "NBTSCAN": timeNowDB() }
pm.plugin_checks = {"DIGSCAN": timeNowDB(), "AVAHISCAN": timeNowDB(), "NSLOOKUP": timeNowDB(), "NBTSCAN": timeNowDB()}
# -------------------------------------------------------------------------------
@@ -901,7 +896,6 @@ def query_MAC_vendor(pMAC):
# Search vendor in HW Vendors DB
mac_start_string6 = mac[0:6]
mac_start_string9 = mac[0:9]
try:
with open(filePath, "r") as f:

View File

@@ -1,16 +1,13 @@
import sys
import os
import re
import json
import base64
from pathlib import Path
from typing import Optional, Tuple
from logger import mylog
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
# Load MAC/device-type/icon rules from external file
MAC_TYPE_ICON_PATH = Path(f"{INSTALL_PATH}/back/device_heuristics_rules.json")
@@ -83,7 +80,7 @@ def match_vendor(vendor: str, default_type: str, default_icon: str) -> Tuple[str
for pattern in patterns:
# Only apply fallback when no MAC prefix is specified
mac_prefix = pattern.get("mac_prefix", "")
# mac_prefix = pattern.get("mac_prefix", "")
vendor_pattern = pattern.get("vendor", "").lower()
if vendor_pattern and vendor_pattern in vendor_lc:

View File

@@ -1,11 +1,4 @@
import sys
import os
import re
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
from helper import get_setting_value

View File

@@ -1,10 +1,3 @@
import sys
import os
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from scan.device_handling import (
create_new_devices,
print_scan_stats,
@@ -14,7 +7,7 @@ from scan.device_handling import (
)
from helper import get_setting_value
from db.db_helper import print_table_schema
from utils.datetime_utils import timeNowDB, timeNowTZ
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from messaging.reporting import skip_repeated_notifications
@@ -133,9 +126,9 @@ def create_sessions_snapshot(db):
db.commitDB()
#-------------------------------------------------------------------------------
def insert_events (db):
sql = db.sql #TO-DO
# -------------------------------------------------------------------------------
def insert_events(db):
sql = db.sql # TO-DO
startTime = timeNowDB()
# Check device down

View File

@@ -1,49 +1,43 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import pathlib
import sys
from datetime import datetime
# from datetime import datetime
from dateutil import parser
import datetime
import re
import pytz
from pytz import timezone
from typing import Union
from zoneinfo import ZoneInfo
import email.utils
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import *
# from const import *
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
# DateTime
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
DATETIME_PATTERN = "%Y-%m-%d %H:%M:%S"
DATETIME_REGEX = re.compile(r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$')
def timeNowTZ():
if conf.tz:
return datetime.datetime.now(conf.tz).replace(microsecond=0)
else:
return datetime.datetime.now().replace(microsecond=0)
def timeNow():
return datetime.datetime.now().replace(microsecond=0)
def get_timezone_offset():
now = datetime.datetime.now(conf.tz)
offset_hours = now.utcoffset().total_seconds() / 3600
offset_formatted = "{:+03d}:{:02d}".format(int(offset_hours), int((offset_hours % 1) * 60))
return offset_formatted
def timeNowDB(local=True):
"""
Return the current time (local or UTC) as ISO 8601 for DB storage.
@@ -67,9 +61,9 @@ def timeNowDB(local=True):
return datetime.datetime.now(datetime.UTC).strftime(DATETIME_PATTERN)
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
# Date and time methods
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def normalizeTimeStamp(inputTimeStamp):
"""
@@ -125,6 +119,7 @@ def format_date_iso(date1: str) -> str:
dt = datetime.datetime.fromisoformat(date1) if isinstance(date1, str) else date1
return dt.isoformat()
# -------------------------------------------------------------------------------------------
def format_event_date(date_str: str, event_type: str) -> str:
"""Format event date with fallback rules."""
@@ -135,6 +130,7 @@ def format_event_date(date_str: str, event_type: str) -> str:
else:
return "<still connected>"
# -------------------------------------------------------------------------------------------
def ensure_datetime(dt: Union[str, datetime.datetime, None]) -> datetime.datetime:
if dt is None:
@@ -157,6 +153,7 @@ def parse_datetime(dt_str):
except ValueError:
return None
def format_date(date_str: str) -> str:
try:
dt = parse_datetime(date_str)
@@ -168,6 +165,7 @@ def format_date(date_str: str) -> str:
except (ValueError, AttributeError, TypeError):
return "invalid"
def format_date_diff(date1, date2, tz_name):
"""
Return difference between two datetimes as 'Xd HH:MM'.

View File

@@ -1,6 +1,6 @@
import os
import json
from collections import namedtuple
import conf
from logger import mylog
from utils.crypto_utils import decrypt_data
@@ -220,9 +220,7 @@ def get_plugins_configs(loadAll):
# Load all plugins if `loadAll` is True, the plugin is in the enabled list,
# or no specific plugins are enabled (enabledPlugins is empty)
if (
loadAll
or plugJson["unique_prefix"] in enabledPlugins
or enabledPlugins == []
loadAll or plugJson["unique_prefix"] in enabledPlugins or enabledPlugins == []
):
# Load the contents of the config.json file as a JSON object and append it to pluginsList
pluginsList.append(plugJson)

View File

@@ -1,11 +1,4 @@
import sqlite3
import os
import sys
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value
from models.device_instance import DeviceInstance
@@ -15,7 +8,6 @@ from models.plugin_object_instance import PluginObjectInstance
Logger(get_setting_value("LOG_LEVEL"))
class Action:
"""Base class for all actions."""

View File

@@ -1,10 +1,3 @@
import os
import sys
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from logger import Logger
from const import sql_generateGuid

View File

@@ -1,12 +1,5 @@
import re
import json
import os
import sys
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value

View File

@@ -1,22 +1,17 @@
import json
import os
import sys
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from const import fullConfFolder
from logger import mylog, Logger
from helper import get_setting_value
# Make sure log level is initialized correctly
Logger(get_setting_value("LOG_LEVEL"))
from workflows.triggers import Trigger
from workflows.conditions import ConditionGroup
from workflows.actions import DeleteObjectAction, RunPluginAction, UpdateFieldAction
# Make sure log level is initialized correctly
Logger(get_setting_value("LOG_LEVEL"))
class WorkflowManager:
def __init__(self, db):
self.db = db

View File

@@ -1,11 +1,4 @@
import json
import os
import sys
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value
from database import get_array_from_sql_rows
@@ -28,8 +21,7 @@ class Trigger:
self.event_type = triggerJson["event_type"]
self.event = event # Store the triggered event context, if provided
self.triggered = (
self.object_type == event["ObjectType"]
and self.event_type == event["AppEventType"]
self.object_type == event["ObjectType"] and self.event_type == event["AppEventType"]
)
mylog(

View File

@@ -7,9 +7,9 @@ import pytest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from utils.datetime_utils import timeNowDB
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
@@ -26,7 +26,7 @@ def client():
@pytest.fixture(scope="session")
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):

View File

@@ -1,17 +1,17 @@
import sys
import pathlib
import sqlite3
# import pathlib
# import sqlite3
import random
import string
import uuid
# import string
# import uuid
import os
import pytest
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
@@ -28,7 +28,7 @@ def client():
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
@@ -38,7 +38,6 @@ def auth_headers(token):
def test_create_device(client, api_token, test_mac):
payload = {
"createNew": True,
"devType": "Test Device",
"devOwner": "Unit Test",
"devType": "Router",
"devVendor": "TestVendor",
@@ -103,7 +102,7 @@ def test_copy_device(client, api_token, test_mac):
# Step 2: Generate a target MAC
target_mac = "AA:BB:CC:" + ":".join(
f"{random.randint(0,255):02X}" for _ in range(3)
f"{random.randint(0, 255):02X}" for _ in range(3)
)
# Step 3: Copy device

View File

@@ -1,32 +1,36 @@
import sys
import pathlib
import sqlite3
# import pathlib
# import sqlite3
import base64
import random
import string
import uuid
# import string
# import uuid
import os
import pytest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
@@ -40,7 +44,8 @@ def create_dummy(client, api_token, test_mac):
"devType": "Router",
"devVendor": "TestVendor",
}
resp = client.post(f"/device/{test_mac}", json=payload, headers=auth_headers(api_token))
client.post(f"/device/{test_mac}", json=payload, headers=auth_headers(api_token))
def test_get_all_devices(client, api_token, test_mac):
# Ensure there is at least one device
@@ -67,6 +72,7 @@ def test_delete_devices_with_macs(client, api_token, test_mac):
assert resp.status_code == 200
assert resp.json.get("success") is True
def test_delete_all_empty_macs(client, api_token):
resp = client.delete("/devices/empty-macs", headers=auth_headers(api_token))
assert resp.status_code == 200
@@ -79,6 +85,7 @@ def test_delete_unknown_devices(client, api_token):
assert resp.status_code == 200
assert resp.json.get("success") is True
def test_export_devices_csv(client, api_token, test_mac):
# Create a device first
create_dummy(client, api_token, test_mac)
@@ -92,6 +99,7 @@ def test_export_devices_csv(client, api_token, test_mac):
# CSV should contain test_mac
assert test_mac in resp.data.decode()
def test_export_devices_json(client, api_token, test_mac):
# Create a device first
create_dummy(client, api_token, test_mac)
@@ -143,6 +151,7 @@ def test_export_import_cycle_base64(client, api_token, test_mac):
assert resp.json.get("inserted") >= 1
assert resp.json.get("skipped_lines") == []
def test_devices_totals(client, api_token, test_mac):
# 1. Create a dummy device
create_dummy(client, api_token, test_mac)
@@ -189,6 +198,7 @@ def test_devices_by_status(client, api_token, test_mac):
assert fav_data is not None
assert "&#9733" in fav_data["title"]
def test_delete_test_devices(client, api_token, test_mac):
# Delete by MAC

View File

@@ -1,37 +1,38 @@
import sys
import pathlib
import sqlite3
import random
import string
import uuid
import os
import pytest
from datetime import datetime, timedelta
import random
from datetime import timedelta
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from utils.datetime_utils import timeNowTZ
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowTZ # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
def create_event(client, api_token, mac, event="UnitTest Event", days_old=None):
payload = {"ip": "0.0.0.0", "event_type": event}
@@ -43,10 +44,12 @@ def create_event(client, api_token, mac, event="UnitTest Event", days_old=None):
return client.post(f"/events/create/{mac}", json=payload, headers=auth_headers(api_token))
def list_events(client, api_token, mac=None):
url = "/events" if mac is None else f"/events?mac={mac}"
return client.get(url, headers=auth_headers(api_token))
def test_create_event(client, api_token, test_mac):
# create event
resp = create_event(client, api_token, test_mac)
@@ -82,6 +85,7 @@ def test_delete_events_for_mac(client, api_token, test_mac):
assert resp.status_code == 200
assert len(resp.json.get("events", [])) == 0
def test_get_events_totals(client, api_token):
# 1. Request totals with default period
resp = client.get(
@@ -108,7 +112,6 @@ def test_get_events_totals(client, api_token):
assert len(data_month) == 6
def test_delete_all_events(client, api_token, test_mac):
# create two events
create_event(client, api_token, test_mac)
@@ -146,5 +149,3 @@ def test_delete_events_dynamic_days(client, api_token, test_mac):
events = resp.get_json().get("events", [])
mac_events = [ev for ev in events if ev.get("eve_MAC") == test_mac]
assert len(mac_events) == 1

View File

@@ -1,31 +1,30 @@
import sys
import pathlib
import sqlite3
import random
import string
import uuid
import pytest
from datetime import datetime, timedelta
INSTALL_PATH = "/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
@@ -37,6 +36,7 @@ def test_graphql_debug_get(client):
assert resp.status_code == 200
assert resp.data.decode() == "NetAlertX GraphQL server running."
def test_graphql_post_unauthorized(client):
"""POST /graphql without token should return 401"""
query = {"query": "{ devices { devName devMac } }"}
@@ -47,6 +47,7 @@ def test_graphql_post_unauthorized(client):
# --- DEVICES TESTS ---
def test_graphql_post_devices(client, api_token):
"""POST /graphql with a valid token should return device data"""
query = {
@@ -77,8 +78,8 @@ def test_graphql_post_devices(client, api_token):
assert isinstance(data["devices"]["devices"], list)
assert isinstance(data["devices"]["count"], int)
# --- SETTINGS TESTS ---
# --- SETTINGS TESTS ---
def test_graphql_post_settings(client, api_token):
"""POST /graphql should return settings data"""
query = {
@@ -97,8 +98,8 @@ def test_graphql_post_settings(client, api_token):
assert "settings" in data
assert isinstance(data["settings"]["settings"], list)
# --- LANGSTRINGS TESTS ---
# --- LANGSTRINGS TESTS ---
def test_graphql_post_langstrings_specific(client, api_token):
"""Retrieve a specific langString in a given language"""
query = {

View File

@@ -1,17 +1,13 @@
import sys
import pathlib
import sqlite3
import random
import string
import uuid
import os
import pytest
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
@@ -28,7 +24,7 @@ def client():
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
@@ -36,6 +32,6 @@ def auth_headers(token):
def test_delete_history(client, api_token):
resp = client.delete(f"/history", headers=auth_headers(api_token))
resp = client.delete("/history", headers=auth_headers(api_token))
assert resp.status_code == 200
assert resp.json.get("success") is True

View File

@@ -5,8 +5,9 @@ import pytest
INSTALL_PATH = "/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
# ----------------------------
# Fixtures
@@ -15,14 +16,17 @@ from api_server.api_server_start import app
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
# ----------------------------
# Logs Endpoint Tests
# ----------------------------
@@ -31,16 +35,18 @@ def test_clean_log(client, api_token):
assert resp.status_code == 200
assert resp.json.get("success") is True
def test_clean_log_not_allowed(client, api_token):
resp = client.delete("/logs?file=not_allowed.log", headers=auth_headers(api_token))
assert resp.status_code == 400
assert resp.json.get("success") is False
# ----------------------------
# Execution Queue Endpoint Tests
# ----------------------------
def test_add_to_execution_queue(client, api_token):
action_name = f"test_action_{random.randint(0,9999)}"
action_name = f"test_action_{random.randint(0, 9999)}"
resp = client.post(
"/logs/add-to-execution-queue",
json={"action": action_name},
@@ -50,6 +56,7 @@ def test_add_to_execution_queue(client, api_token):
assert resp.json.get("success") is True
assert action_name in resp.json.get("message", "")
def test_add_to_execution_queue_missing_action(client, api_token):
resp = client.post(
"/logs/add-to-execution-queue",

View File

@@ -1,11 +1,8 @@
# -----------------------------
# In-app notifications tests with cleanup
# -----------------------------
import json
import random
import string
import uuid
import pytest
import os
import sys
@@ -14,26 +11,31 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from api_server.api_server_start import app
from messaging.in_app import NOTIFICATION_API_FILE # Import the path to notifications file
from helper import get_setting_value
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
from messaging.in_app import NOTIFICATION_API_FILE # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
@pytest.fixture
def random_content():
return "Test Notification " + "".join(random.choices(string.ascii_letters + string.digits, k=6))
@pytest.fixture
def notification_guid(client, api_token, random_content):
# Write a notification and return its GUID
@@ -50,6 +52,7 @@ def notification_guid(client, api_token, random_content):
assert guid is not None
return guid
@pytest.fixture(autouse=True)
def cleanup_notifications():
# Runs before and after each test
@@ -70,6 +73,7 @@ def cleanup_notifications():
with open(NOTIFICATION_API_FILE, "w") as f:
f.write(backup)
# -----------------------------
def test_write_notification(client, api_token, random_content):
resp = client.post(
@@ -80,6 +84,7 @@ def test_write_notification(client, api_token, random_content):
assert resp.status_code == 200
assert resp.json.get("success") is True
def test_get_unread_notifications(client, api_token, random_content):
client.post("/messaging/in-app/write", json={"content": random_content}, headers=auth_headers(api_token))
resp = client.get("/messaging/in-app/unread", headers=auth_headers(api_token))
@@ -87,22 +92,26 @@ def test_get_unread_notifications(client, api_token, random_content):
notifications = resp.json
assert any(n["content"] == random_content for n in notifications)
def test_mark_all_notifications_read(client, api_token, random_content):
client.post("/messaging/in-app/write", json={"content": random_content}, headers=auth_headers(api_token))
resp = client.post("/messaging/in-app/read/all", headers=auth_headers(api_token))
assert resp.status_code == 200
assert resp.json.get("success") is True
def test_mark_single_notification_read(client, api_token, notification_guid):
resp = client.post(f"/messaging/in-app/read/{notification_guid}", headers=auth_headers(api_token))
assert resp.status_code == 200
assert resp.json.get("success") is True
def test_delete_single_notification(client, api_token, notification_guid):
resp = client.delete(f"/messaging/in-app/delete/{notification_guid}", headers=auth_headers(api_token))
assert resp.status_code == 200
assert resp.json.get("success") is True
def test_delete_all_notifications(client, api_token, random_content):
# Add a notification first
client.post("/messaging/in-app/write", json={"content": random_content}, headers=auth_headers(api_token))

View File

@@ -1,32 +1,31 @@
import sys
import pathlib
import sqlite3
import base64
import random
import string
import uuid
import os
import pytest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
@@ -40,7 +39,8 @@ def create_dummy(client, api_token, test_mac):
"devType": "Router",
"devVendor": "TestVendor",
}
resp = client.post(f"/device/{test_mac}", json=payload, headers=auth_headers(api_token))
client.post(f"/device/{test_mac}", json=payload, headers=auth_headers(api_token))
def test_wakeonlan_device(client, api_token, test_mac):
# 1. Ensure at least one device exists
@@ -73,6 +73,7 @@ def test_wakeonlan_device(client, api_token, test_mac):
assert data.get("success") is True
assert "WOL packet sent" in data.get("message", "")
def test_speedtest_endpoint(client, api_token):
# 1. Call the speedtest endpoint
resp = client.get("/nettools/speedtest", headers=auth_headers(api_token))
@@ -93,6 +94,7 @@ def test_speedtest_endpoint(client, api_token):
# Optionally check that output lines are strings
assert all(isinstance(line, str) for line in data["output"])
def test_traceroute_device(client, api_token, test_mac):
# 1. Ensure at least one device exists
create_dummy(client, api_token, test_mac)
@@ -127,6 +129,7 @@ def test_traceroute_device(client, api_token, test_mac):
assert "output" in data
assert isinstance(data["output"], str)
@pytest.mark.parametrize("ip,expected_status", [
("8.8.8.8", 200),
("256.256.256.256", 400), # Invalid IP
@@ -147,6 +150,7 @@ def test_nslookup_endpoint(client, api_token, ip, expected_status):
assert data.get("success") is False
assert "error" in data
@pytest.mark.parametrize("ip,mode,expected_status", [
("127.0.0.1", "fast", 200),
pytest.param("127.0.0.1", "normal", 200, marks=pytest.mark.feature_complete),
@@ -172,6 +176,7 @@ def test_nmap_endpoint(client, api_token, ip, mode, expected_status):
assert data.get("success") is False
assert "error" in data
def test_nslookup_unauthorized(client):
# No auth headers
resp = client.post("/nettools/nslookup", json={"devLastIP": "8.8.8.8"})
@@ -180,6 +185,7 @@ def test_nslookup_unauthorized(client):
assert data.get("success") is False
assert data.get("error") == "Forbidden"
def test_nmap_unauthorized(client):
# No auth headers
resp = client.post("/nettools/nmap", json={"scan": "127.0.0.1", "mode": "fast"})

View File

@@ -1,9 +1,5 @@
import sys
import pathlib
import sqlite3
import random
import string
import uuid
import os
import pytest
from datetime import datetime, timedelta
@@ -11,31 +7,35 @@ from datetime import datetime, timedelta
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from utils.datetime_utils import timeNowTZ, timeNowDB
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowTZ, timeNowDB # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
def test_create_device(client, api_token, test_mac):
payload = {
"createNew": True,
"devType": "Test Device",
"devOwner": "Unit Test",
"devType": "Router",
"devVendor": "TestVendor",
@@ -129,7 +129,7 @@ def test_device_session_events(client, api_token, test_mac):
# 2. Fetch session events with default type ('all') and period ('7 days')
resp = client.get(
f"/sessions/session-events?type=all&period=7 days",
"/sessions/session-events?type=all&period=7 days",
headers=auth_headers(api_token)
)
assert resp.status_code == 200
@@ -159,6 +159,7 @@ def test_device_session_events(client, api_token, test_mac):
sessions = resp_sessions.json["data"]
assert isinstance(sessions, list)
# -----------------------------
def test_delete_session(client, api_token, test_mac):
# First create session
@@ -180,15 +181,12 @@ def test_delete_session(client, api_token, test_mac):
assert not any(ses["ses_MAC"] == test_mac for ses in sessions)
def test_get_sessions_calendar(client, api_token, test_mac):
"""
Test the /sessions/calendar endpoint.
Creates session and ensures the calendar output is correct.
Cleans up test sessions after test.
"""
# --- Setup: create two sessions for the test MAC ---
now = timeNowTZ()
start1 = (now - timedelta(days=2)).isoformat(timespec="seconds")

View File

@@ -1,36 +1,36 @@
import sys
import pathlib
import sqlite3
import random
import string
import uuid
import os
import pytest
from datetime import datetime, timedelta
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
def test_get_setting_unauthorized(client):
resp = client.get("/settings/API_TOKEN") # no auth header
assert resp.status_code == 403

View File

@@ -6,16 +6,17 @@ Tests the fix for Issue #1210 - compound conditions with multiple AND/OR clauses
import sys
import pytest
import os
from unittest.mock import MagicMock
# Mock the logger module before importing SafeConditionBuilder
sys.modules['logger'] = MagicMock()
# Add parent directory to path for imports
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
from server.db.sql_safe_builder import SafeConditionBuilder
from server.db.sql_safe_builder import SafeConditionBuilder # noqa: E402 [flake8 lint suppression]
@pytest.fixture
@@ -100,6 +101,7 @@ def test_multiple_or_clauses(builder):
assert 'Device2' in param_values
assert 'Device3' in param_values
def test_mixed_and_or_clauses(builder):
"""Test mixed AND/OR logical operators."""
condition = "AND devName = 'Device1' OR devName = 'Device2' AND devFavorite = '1'"

View File

@@ -137,7 +137,7 @@ def test_unicode_support(builder, unicode_str):
@pytest.mark.parametrize("case", [
"", " ", "AND devName = ''", "AND devName = 'a'", "AND devName = '" + "x"*500 + "'"
"", " ", "AND devName = ''", "AND devName = 'a'", "AND devName = '" + "x" * 500 + "'"
])
def test_edge_cases(builder, case):
try:

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
# !/usr/bin/env python3
"""
Comprehensive SQL Injection Prevention Tests for NetAlertX
@@ -15,7 +15,7 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'server'))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'server', 'db'))
# Now import our module
from sql_safe_builder import SafeConditionBuilder
from sql_safe_builder import SafeConditionBuilder # noqa: E402 [flake8 lint suppression]
@pytest.fixture

View File

@@ -13,16 +13,15 @@ import unittest
import sqlite3
import tempfile
import os
from unittest.mock import Mock, patch, MagicMock
from unittest.mock import Mock, patch
# Add the server directory to the path for imports
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/server"])
sys.path.append('/home/dell/coding/bash/10x-agentic-setup/netalertx-sql-fix/server')
from db.sql_safe_builder import SafeConditionBuilder, create_safe_condition_builder
from database import DB
from messaging.reporting import get_notifications
from db.sql_safe_builder import SafeConditionBuilder # noqa: E402 [flake8 lint suppression]
from messaging.reporting import get_notifications # noqa: E402 [flake8 lint suppression]
class TestSafeConditionBuilder(unittest.TestCase):
@@ -169,7 +168,6 @@ class TestSafeConditionBuilder(unittest.TestCase):
self.assertIn('Disconnected', params.values())
class TestDatabaseParameterSupport(unittest.TestCase):
"""Test that database layer supports parameterized queries."""
@@ -212,7 +210,7 @@ class TestDatabaseParameterSupport(unittest.TestCase):
# This should not cause SQL injection
malicious_input = "'; DROP TABLE test_table; --"
cursor.execute("SELECT * FROM test_table WHERE name = :name", {'name': malicious_input})
results = cursor.fetchall()
# results = cursor.fetchall()
# The table should still exist and be queryable
cursor.execute("SELECT COUNT(*) FROM test_table")
@@ -245,7 +243,7 @@ class TestReportingSecurityIntegration(unittest.TestCase):
}.get(key, '')
# Call the function
result = get_notifications(self.mock_db)
get_notifications(self.mock_db)
# Verify that get_table_as_json was called with parameters
self.mock_db.get_table_as_json.assert_called()
@@ -265,7 +263,6 @@ class TestReportingSecurityIntegration(unittest.TestCase):
# Ensure the parameter dict has the correct value (using actual param name)
self.assertEqual(list(params.values())[0], "TestDevice")
@patch('messaging.reporting.get_setting_value')
def test_events_section_security(self, mock_get_setting):
"""Test that events section uses safe SQL building."""
@@ -276,7 +273,7 @@ class TestReportingSecurityIntegration(unittest.TestCase):
}.get(key, '')
# Call the function
result = get_notifications(self.mock_db)
get_notifications(self.mock_db)
# Verify that get_table_as_json was called with parameters
self.mock_db.get_table_as_json.assert_called()
@@ -291,7 +288,7 @@ class TestReportingSecurityIntegration(unittest.TestCase):
}.get(key, '')
# Call the function - should not raise an exception
result = get_notifications(self.mock_db)
get_notifications(self.mock_db)
# Should still call get_table_as_json (with safe fallback query)
self.mock_db.get_table_as_json.assert_called()
@@ -306,7 +303,7 @@ class TestReportingSecurityIntegration(unittest.TestCase):
}.get(key, '')
# Call the function
result = get_notifications(self.mock_db)
get_notifications(self.mock_db)
# Should call get_table_as_json
self.mock_db.get_table_as_json.assert_called()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
# !/usr/bin/env python3
"""
Pytest-based Mount Diagnostic Tests for NetAlertX

View File

@@ -139,10 +139,10 @@ def _run_container(
# Copy the script content and run it
script_path = pathlib.Path("install/production-filesystem/entrypoint.d/99-ports-available.sh")
with script_path.open('r', encoding='utf-8') as f:
script_content = f.read()
script_cont = f.read()
# Use printf to avoid shell interpretation issues
script = f"printf '%s\\n' '{script_content.replace(chr(39), chr(39)+chr(92)+chr(39)+chr(39))}' > /tmp/ports-check.sh && chmod +x /tmp/ports-check.sh && sh /tmp/ports-check.sh"
script = f"printf '%s\\n' '{script_cont.replace(chr(39), chr(39) + chr(92) + chr(39) + chr(39))}' > /tmp/ports-check.sh && chmod +x /tmp/ports-check.sh && sh /tmp/ports-check.sh" # noqa: E501 - inline script
cmd.extend(["--entrypoint", "/bin/sh", IMAGE, "-c", script])
print(f"\n--- DOCKER CMD ---\n{' '.join(cmd)}\n--- END CMD ---\n")
@@ -157,8 +157,7 @@ def _run_container(
# Combine and clean stdout and stderr
stdouterr = (
re.sub(r'\x1b\[[0-9;]*m', '', result.stdout or '') +
re.sub(r'\x1b\[[0-9;]*m', '', result.stderr or '')
re.sub(r'\x1b\[[0-9;]*m', '', result.stdout or '') + re.sub(r'\x1b\[[0-9;]*m', '', result.stderr or '')
)
result.output = stdouterr
print(f"\n--- CONTAINER stdout ---\n{result.stdout}")

Some files were not shown because too many files have changed in this diff Show More