BE: linting fixes

Signed-off-by: jokob-sk <jokob.sk@gmail.com>
This commit is contained in:
jokob-sk
2025-11-22 13:14:06 +11:00
parent f0abd500d9
commit 5c14b34a8b
104 changed files with 2163 additions and 2199 deletions

View File

@@ -1,6 +1,6 @@
import json
import os
import sys
def merge_translations(main_file, other_files):
# Load main file
@@ -30,10 +30,14 @@ def merge_translations(main_file, other_files):
json.dump(data, f, indent=4, ensure_ascii=False)
f.truncate()
if __name__ == "__main__":
current_path = os.path.dirname(os.path.abspath(__file__))
# language codes can be found here: http://www.lingoes.net/en/translator/langcode.htm
# "en_us.json" has to be first!
json_files = [ "en_us.json", "ar_ar.json", "ca_ca.json", "cs_cz.json", "de_de.json", "es_es.json", "fa_fa.json", "fr_fr.json", "it_it.json", "ja_jp.json", "nb_no.json", "pl_pl.json", "pt_br.json", "pt_pt.json", "ru_ru.json", "sv_sv.json", "tr_tr.json", "uk_ua.json", "zh_cn.json"]
# "en_us.json" has to be first!
json_files = ["en_us.json", "ar_ar.json", "ca_ca.json", "cs_cz.json", "de_de.json",
"es_es.json", "fa_fa.json", "fr_fr.json", "it_it.json", "ja_jp.json",
"nb_no.json", "pl_pl.json", "pt_br.json", "pt_pt.json", "ru_ru.json",
"sv_sv.json", "tr_tr.json", "uk_ua.json", "zh_cn.json"]
file_paths = [os.path.join(current_path, file) for file in json_files]
merge_translations(file_paths[0], file_paths[1:])

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -8,12 +8,12 @@ from pytz import timezone
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from const import logPath
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath # noqa: E402, E261 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402, E261 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402, E261 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402, E261 [flake8 lint suppression]
import conf
import conf # noqa: E402, E261 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -32,9 +32,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
# Retrieve configuration settings
some_setting = get_setting_value('SYNC_plugins')
@@ -47,14 +46,14 @@ def main():
# Process the data into native application tables
if len(device_data) > 0:
# insert devices into the lats_result.log
# make sure the below mapping is mapped in config.json, for example:
# insert devices into the lats_result.log
# make sure the below mapping is mapped in config.json, for example:
# "database_column_definitions": [
# {
# "column": "Object_PrimaryID", <--------- the value I save into primaryId
# "mapped_to_column": "cur_MAC", <--------- gets inserted into the CurrentScan DB
# table column cur_MAC
#
#
for device in device_data:
plugin_objects.add_object(
primaryId = device['mac_address'],
@@ -65,11 +64,11 @@ def main():
watched4 = device['last_seen'],
extra = '',
foreignKey = device['mac_address']
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
# helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md
)
)
mylog('verbose', [f'[{pluginName}] New entries: "{len(device_data)}"'])
@@ -78,14 +77,15 @@ def main():
return 0
# retrieve data
def get_device_data(some_setting):
device_data = []
# do some processing, call exteranl APIs, and return a device_data list
# ...
#
#
# Sample data for testing purposes, you can adjust the processing in main() as needed
# ... before adding it to the plugin_objects.add_object(...)
device_data = [
@@ -113,8 +113,9 @@ def get_device_data(some_setting):
}
]
# Return the data to be detected by the main application
# Return the data to be detected by the main application
return device_data
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# Just a testing library plugin for development purposes
import os
import sys
@@ -11,10 +11,10 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# NetAlertX modules
from const import logPath
from plugin_helper import Plugin_Objects
from logger import mylog
from helper import get_setting_value
from const import logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
pluginName = 'TESTONLY'
@@ -28,14 +28,11 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
md5_hash = hashlib.md5()
# globals
def main():
# START
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
# SPACE FOR TESTING 🔽
str = "ABC-MBP._another.localdomain."
@@ -43,28 +40,23 @@ def main():
# result = cleanDeviceName(str, True)
regexes = get_setting_value('NEWDEV_NAME_CLEANUP_REGEX')
print(regexes)
subnets = get_setting_value('SCAN_SUBNETS')
print(subnets)
for rgx in regexes:
for rgx in regexes:
mylog('trace', ["[cleanDeviceName] applying regex : " + rgx])
mylog('trace', ["[cleanDeviceName] name before regex : " + str])
str = re.sub(rgx, "", str)
mylog('trace', ["[cleanDeviceName] name after regex : " + str])
mylog('debug', ["[cleanDeviceName] output: " + str])
# SPACE FOR TESTING 🔼
# END
mylog('verbose', [f'[{pluginName}] result "{str}"'])
mylog('verbose', [f'[{pluginName}] result "{str}"'])
# -------------INIT---------------------

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import json
import subprocess
@@ -9,15 +9,15 @@ import sys
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from utils.datetime_utils import timeNowDB
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))
@@ -35,7 +35,7 @@ def main():
mylog("verbose", [f"[{pluginName}](publisher) In script"])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog(
"none",
[
@@ -65,9 +65,9 @@ def main():
# Log result
plugin_objects.add_object(
primaryId = pluginName,
secondaryId = timeNowDB(),
secondaryId = timeNowDB(),
watched1 = notification["GUID"],
watched2 = result,
watched2 = result,
watched3 = 'null',
watched4 = 'null',
extra = 'null',
@@ -80,8 +80,7 @@ def main():
# -------------------------------------------------------------------------------
def check_config():
if get_setting_value("APPRISE_HOST") == "" or (
get_setting_value("APPRISE_URL") == ""
and get_setting_value("APPRISE_TAG") == ""
get_setting_value("APPRISE_URL") == "" and get_setting_value("APPRISE_TAG") == ""
):
return False
else:

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
import re
@@ -16,15 +16,15 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# NetAlertX modules
import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import get_setting_value, hide_email
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, hide_email # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -38,13 +38,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}](publisher) In script'])
mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return
@@ -61,7 +60,7 @@ def main():
# Retrieve new notifications
new_notifications = notifications.getNew()
# mylog('verbose', [f'[{pluginName}] new_notifications: ', new_notifications])
# mylog('verbose', [f'[{pluginName}] new_notifications: ', new_notifications])
mylog('verbose', [f'[{pluginName}] SMTP_SERVER: ', get_setting_value("SMTP_SERVER")])
mylog('verbose', [f'[{pluginName}] SMTP_PORT: ', get_setting_value("SMTP_PORT")])
mylog('verbose', [f'[{pluginName}] SMTP_SKIP_LOGIN: ', get_setting_value("SMTP_SKIP_LOGIN")])
@@ -72,19 +71,18 @@ def main():
# mylog('verbose', [f'[{pluginName}] SMTP_REPORT_TO: ', get_setting_value("SMTP_REPORT_TO")])
# mylog('verbose', [f'[{pluginName}] SMTP_REPORT_FROM: ', get_setting_value("SMTP_REPORT_FROM")])
# Process the new notifications (see the Notifications DB table for structure or check the /php/server/query_json.php?file=table_notifications.json endpoint)
for notification in new_notifications:
# Send notification
result = send(notification["HTML"], notification["Text"])
result = send(notification["HTML"], notification["Text"])
# Log result
plugin_objects.add_object(
primaryId = pluginName,
secondaryId = timeNowDB(),
secondaryId = timeNowDB(),
watched1 = notification["GUID"],
watched2 = result,
watched2 = result,
watched3 = 'null',
watched4 = 'null',
extra = 'null',
@@ -93,25 +91,33 @@ def main():
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
def check_config ():
# -------------------------------------------------------------------------------
def check_config():
server = get_setting_value('SMTP_SERVER')
report_to = get_setting_value("SMTP_REPORT_TO")
report_from = get_setting_value("SMTP_REPORT_FROM")
if server == '' or report_from == '' or report_to == '':
mylog('none', [f'[Email Check Config] ⚠ ERROR: Email service not set up correctly. Check your {confFileName} SMTP_*, SMTP_REPORT_FROM and SMTP_REPORT_TO variables.'])
return False
else:
return True
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def send(pHTML, pText):
mylog('debug', [f'[{pluginName}] SMTP_REPORT_TO: {hide_email(str(get_setting_value("SMTP_REPORT_TO")))} SMTP_USER: {hide_email(str(get_setting_value("SMTP_USER")))}'])
subject, from_email, to_email, message_html, message_text = sanitize_email_content(str(get_setting_value("SMTP_SUBJECT")), get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), pHTML, pText)
subject, from_email, to_email, message_html, message_text = sanitize_email_content(
str(get_setting_value("SMTP_SUBJECT")),
get_setting_value("SMTP_REPORT_FROM"),
get_setting_value("SMTP_REPORT_TO"),
pHTML,
pText
)
emails = []
@@ -132,10 +138,10 @@ def send(pHTML, pText):
msg['Subject'] = subject
msg['From'] = from_email
msg['To'] = mail_addr
msg['Date'] = formatdate(localtime=True)
msg['Date'] = formatdate(localtime=True)
msg.attach (MIMEText (message_text, 'plain'))
msg.attach (MIMEText (message_html, 'html'))
msg.attach(MIMEText(message_text, 'plain'))
msg.attach(MIMEText(message_html, 'html'))
# Set a timeout for the SMTP connection (in seconds)
smtp_timeout = 30
@@ -144,30 +150,31 @@ def send(pHTML, pText):
if get_setting_value("LOG_LEVEL") == 'debug':
send_email(msg,smtp_timeout)
send_email(msg, smtp_timeout)
else:
try:
send_email(msg,smtp_timeout)
except smtplib.SMTPAuthenticationError as e:
send_email(msg, smtp_timeout)
except smtplib.SMTPAuthenticationError as e:
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPAuthenticationError)'])
mylog('none', [' ERROR: Double-check your SMTP_USER and SMTP_PASS settings.)'])
mylog('none', [' ERROR: ', str(e)])
except smtplib.SMTPServerDisconnected as e:
except smtplib.SMTPServerDisconnected as e:
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPServerDisconnected)'])
mylog('none', [' ERROR: ', str(e)])
except socket.gaierror as e:
except socket.gaierror as e:
mylog('none', [' ERROR: Could not resolve hostname (socket.gaierror)'])
mylog('none', [' ERROR: ', str(e)])
except ssl.SSLError as e:
mylog('none', [' ERROR: ', str(e)])
except ssl.SSLError as e:
mylog('none', [' ERROR: Could not establish SSL connection (ssl.SSLError)'])
mylog('none', [' ERROR: Are you sure you need SMTP_FORCE_SSL enabled? Check your SMTP provider docs.'])
mylog('none', [' ERROR: ', str(e)])
mylog('none', [' ERROR: ', str(e)])
# ----------------------------------------------------------------------------------
def send_email(msg,smtp_timeout):
def send_email(msg, smtp_timeout):
# Send mail
if get_setting_value('SMTP_FORCE_SSL'):
mylog('debug', ['SMTP_FORCE_SSL == True so using .SMTP_SSL()'])
@@ -182,10 +189,10 @@ def send_email(msg,smtp_timeout):
mylog('debug', ['SMTP_FORCE_SSL == False so using .SMTP()'])
if get_setting_value("SMTP_PORT") == 0:
mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER)'])
smtp_connection = smtplib.SMTP (get_setting_value('SMTP_SERVER'))
smtp_connection = smtplib.SMTP(get_setting_value('SMTP_SERVER'))
else:
mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER, SMTP_PORT)'])
smtp_connection = smtplib.SMTP (get_setting_value('SMTP_SERVER'), get_setting_value('SMTP_PORT'))
smtp_connection = smtplib.SMTP(get_setting_value('SMTP_SERVER'), get_setting_value('SMTP_PORT'))
mylog('debug', ['Setting SMTP debug level'])
@@ -193,7 +200,7 @@ def send_email(msg,smtp_timeout):
if get_setting_value('LOG_LEVEL') == 'debug':
smtp_connection.set_debuglevel(1)
mylog('debug', [ 'Sending .ehlo()'])
mylog('debug', ['Sending .ehlo()'])
smtp_connection.ehlo()
if not get_setting_value('SMTP_SKIP_TLS'):
@@ -203,12 +210,13 @@ def send_email(msg,smtp_timeout):
smtp_connection.ehlo()
if not get_setting_value('SMTP_SKIP_LOGIN'):
mylog('debug', ['SMTP_SKIP_LOGIN == False so sending .login()'])
smtp_connection.login (get_setting_value('SMTP_USER'), get_setting_value('SMTP_PASS'))
smtp_connection.login(get_setting_value('SMTP_USER'), get_setting_value('SMTP_PASS'))
mylog('debug', ['Sending .sendmail()'])
smtp_connection.sendmail (get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), msg.as_string())
smtp_connection.sendmail(get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), msg.as_string())
smtp_connection.quit()
# ----------------------------------------------------------------------------------
def sanitize_email_content(subject, from_email, to_email, message_html, message_text):
# Validate and sanitize subject
@@ -229,6 +237,7 @@ def sanitize_email_content(subject, from_email, to_email, message_html, message_
return subject, from_email, to_email, message_html, message_text
# ----------------------------------------------------------------------------------
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import json
import os
@@ -18,15 +18,14 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# NetAlertX modules
import conf
from const import confFileName, logPath
from utils.plugin_utils import getPluginObject
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from utils.plugin_utils import getPluginObject # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, bytes_to_string, \
sanitize_string, normalize_string
from utils.datetime_utils import timeNowDB
from database import DB, get_device_stats
sanitize_string, normalize_string # noqa: E402 [flake8 lint suppression]
from database import DB, get_device_stats # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
@@ -287,11 +286,11 @@ def publish_mqtt(mqtt_client, topic, message):
# mylog('verbose', [f"[{pluginName}] mqtt_client.is_connected(): {mqtt_client.is_connected()} "])
result = mqtt_client.publish(
topic=topic,
payload=message,
qos=qos,
retain=True,
)
topic=topic,
payload=message,
qos=qos,
retain=True,
)
status = result[0]
@@ -303,6 +302,7 @@ def publish_mqtt(mqtt_client, topic, message):
time.sleep(0.1)
return True
# ------------------------------------------------------------------------------
# Create a generic device for overal stats
def create_generic_device(mqtt_client, deviceId, deviceName):
@@ -434,7 +434,6 @@ def mqtt_start(db):
if not mqtt_connected_to_broker:
mqtt_client = mqtt_create_client()
deviceName = get_setting_value('MQTT_DEVICE_NAME')
deviceId = get_setting_value('MQTT_DEVICE_ID')
@@ -449,16 +448,18 @@ def mqtt_start(db):
row = get_device_stats(db)
# Publish (wrap into {} and remove last ',' from above)
publish_mqtt(mqtt_client, f"{topic_root}/sensor/{deviceId}/state",
{
"online": row[0],
"down": row[1],
"all": row[2],
"archived": row[3],
"new": row[4],
"unknown": row[5]
}
)
publish_mqtt(
mqtt_client,
f"{topic_root}/sensor/{deviceId}/state",
{
"online": row[0],
"down": row[1],
"all": row[2],
"archived": row[3],
"new": row[4],
"unknown": row[5]
}
)
# Generate device-specific MQTT messages if enabled
if get_setting_value('MQTT_SEND_DEVICES'):
@@ -466,11 +467,11 @@ def mqtt_start(db):
# Specific devices processing
# Get all devices
devices = db.read(get_setting_value('MQTT_DEVICES_SQL').replace('{s-quote}',"'"))
devices = db.read(get_setting_value('MQTT_DEVICES_SQL').replace('{s-quote}', "'"))
sec_delay = len(devices) * int(get_setting_value('MQTT_DELAY_SEC'))*5
sec_delay = len(devices) * int(get_setting_value('MQTT_DELAY_SEC')) * 5
mylog('verbose', [f"[{pluginName}] Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60, 1), 'min)'])
mylog('verbose', [f"[{pluginName}] Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay / 60, 1), 'min)'])
for device in devices:
@@ -495,27 +496,29 @@ def mqtt_start(db):
# handle device_tracker
# IMPORTANT: shared payload - device_tracker attributes and individual sensors
devJson = {
"last_ip": device["devLastIP"],
"is_new": str(device["devIsNew"]),
"alert_down": str(device["devAlertDown"]),
"vendor": sanitize_string(device["devVendor"]),
"mac_address": str(device["devMac"]),
"model": devDisplayName,
"last_connection": prepTimeStamp(str(device["devLastConnection"])),
"first_connection": prepTimeStamp(str(device["devFirstConnection"])),
"sync_node": device["devSyncHubNode"],
"group": device["devGroup"],
"location": device["devLocation"],
"network_parent_mac": device["devParentMAC"],
"network_parent_name": next((dev["devName"] for dev in devices if dev["devMAC"] == device["devParentMAC"]), "")
}
"last_ip": device["devLastIP"],
"is_new": str(device["devIsNew"]),
"alert_down": str(device["devAlertDown"]),
"vendor": sanitize_string(device["devVendor"]),
"mac_address": str(device["devMac"]),
"model": devDisplayName,
"last_connection": prepTimeStamp(str(device["devLastConnection"])),
"first_connection": prepTimeStamp(str(device["devFirstConnection"])),
"sync_node": device["devSyncHubNode"],
"group": device["devGroup"],
"location": device["devLocation"],
"network_parent_mac": device["devParentMAC"],
"network_parent_name": next((dev["devName"] for dev in devices if dev["devMAC"] == device["devParentMAC"]), "")
}
# bulk update device sensors in home assistant
publish_mqtt(mqtt_client, sensorConfig.state_topic, devJson) # REQUIRED, DON'T DELETE
# create and update is_present sensor
sensorConfig = create_sensor(mqtt_client, deviceId, devDisplayName, 'binary_sensor', 'is_present', 'wifi', device["devMac"])
publish_mqtt(mqtt_client, sensorConfig.state_topic,
publish_mqtt(
mqtt_client,
sensorConfig.state_topic,
{
"is_present": to_binary_sensor(str(device["devPresentLastScan"]))
}
@@ -547,7 +550,7 @@ def to_binary_sensor(input):
elif isinstance(input, bool) and input:
return "ON"
elif isinstance(input, str) and input == "1":
return "ON"
return "ON"
elif isinstance(input, bytes) and bytes_to_string(input) == "1":
return "ON"
return "OFF"

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python
# !/usr/bin/env python
import json
import os
@@ -11,15 +11,15 @@ from base64 import b64encode
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects, handleEmpty
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import get_setting_value
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -33,13 +33,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}](publisher) In script'])
mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return
@@ -65,9 +64,9 @@ def main():
# Log result
plugin_objects.add_object(
primaryId = pluginName,
secondaryId = timeNowDB(),
secondaryId = timeNowDB(),
watched1 = notification["GUID"],
watched2 = handleEmpty(response_text),
watched2 = handleEmpty(response_text),
watched3 = response_status_code,
watched4 = 'null',
extra = 'null',
@@ -77,15 +76,15 @@ def main():
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def check_config():
if get_setting_value('NTFY_HOST') == '' or get_setting_value('NTFY_TOPIC') == '':
if get_setting_value('NTFY_HOST') == '' or get_setting_value('NTFY_TOPIC') == '':
return False
else:
return True
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def send(html, text):
response_text = ''
@@ -100,7 +99,7 @@ def send(html, text):
# prepare request headers
headers = {
"Title": "NetAlertX Notification",
"Actions": "view, Open Dashboard, "+ get_setting_value('REPORT_DASHBOARD_URL'),
"Actions": "view, Open Dashboard, " + get_setting_value('REPORT_DASHBOARD_URL'),
"Priority": get_setting_value('NTFY_PRIORITY'),
"Tags": "warning"
}
@@ -109,37 +108,38 @@ def send(html, text):
if token != '':
headers["Authorization"] = "Bearer {}".format(token)
elif user != "" and pwd != "":
# Generate hash for basic auth
# Generate hash for basic auth
basichash = b64encode(bytes(user + ':' + pwd, "utf-8")).decode("ascii")
# add authorization header with hash
# add authorization header with hash
headers["Authorization"] = "Basic {}".format(basichash)
# call NTFY service
try:
response = requests.post("{}/{}".format( get_setting_value('NTFY_HOST'),
get_setting_value('NTFY_TOPIC')),
data = text,
headers = headers,
verify = verify_ssl)
response = requests.post("{}/{}".format(
get_setting_value('NTFY_HOST'),
get_setting_value('NTFY_TOPIC')),
data = text,
headers = headers,
verify = verify_ssl
)
response_status_code = response.status_code
# Check if the request was successful (status code 200)
if response_status_code == 200:
response_text = response.text # This captures the response body/message
response_text = response.text # This captures the response body/message
else:
response_text = json.dumps(response.text)
response_text = json.dumps(response.text)
except requests.exceptions.RequestException as e:
except requests.exceptions.RequestException as e:
mylog('none', [f'[{pluginName}] ⚠ ERROR: ', e])
response_text = e
return response_text, response_status_code
return response_text, response_status_code
return response_text, response_status_code
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
# !/usr/bin/env python3
import conf
from const import confFileName, logPath
from pytz import timezone
@@ -12,12 +12,12 @@ import requests
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402
from logger import mylog, Logger # noqa: E402
from helper import get_setting_value, hide_string # noqa: E402
from utils.datetime_utils import timeNowDB
from models.notification_instance import NotificationInstance # noqa: E402
from database import DB # noqa: E402
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, hide_string # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python
# !/usr/bin/env python
import json
import os
@@ -10,15 +10,15 @@ import requests
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects, handleEmpty
from logger import mylog, Logger
from helper import get_setting_value, hide_string
from utils.datetime_utils import timeNowDB
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, hide_string # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -32,13 +32,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}](publisher) In script'])
mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return
@@ -59,14 +58,14 @@ def main():
for notification in new_notifications:
# Send notification
response_text, response_status_code = send(notification["Text"])
response_text, response_status_code = send(notification["Text"])
# Log result
plugin_objects.add_object(
primaryId = pluginName,
secondaryId = timeNowDB(),
secondaryId = timeNowDB(),
watched1 = notification["GUID"],
watched2 = handleEmpty(response_text),
watched2 = handleEmpty(response_text),
watched3 = response_status_code,
watched4 = 'null',
extra = 'null',
@@ -76,8 +75,7 @@ def main():
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def send(text):
response_text = ''
@@ -85,8 +83,7 @@ def send(text):
token = get_setting_value('PUSHSAFER_TOKEN')
mylog('verbose', [f'[{pluginName}] PUSHSAFER_TOKEN: "{hide_string(token)}"'])
mylog('verbose', [f'[{pluginName}] PUSHSAFER_TOKEN: "{hide_string(token)}"'])
try:
url = 'https://www.pushsafer.com/api'
@@ -101,40 +98,34 @@ def send(text):
"u" : get_setting_value('REPORT_DASHBOARD_URL'),
"ut" : 'Open NetAlertX',
"k" : token,
}
}
response = requests.post(url, data=post_fields)
response_status_code = response.status_code
# Check if the request was successful (status code 200)
if response_status_code == 200:
response_text = response.text # This captures the response body/message
response_text = response.text # This captures the response body/message
else:
response_text = json.dumps(response.text)
response_text = json.dumps(response.text)
except requests.exceptions.RequestException as e:
except requests.exceptions.RequestException as e:
mylog('none', [f'[{pluginName}] ⚠ ERROR: ', e])
response_text = e
return response_text, response_status_code
return response_text, response_status_code
return response_text, response_status_code
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def check_config():
if get_setting_value('PUSHSAFER_TOKEN') == 'ApiKey':
return False
else:
return True
if get_setting_value('PUSHSAFER_TOKEN') == 'ApiKey':
return False
else:
return True
# -------------------------------------------------------
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import subprocess
import os
@@ -8,15 +8,15 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import get_setting_value
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -30,13 +30,11 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog('none', [
f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python
# !/usr/bin/env python
import json
import subprocess
@@ -13,15 +13,15 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import logPath, confFileName
from plugin_helper import Plugin_Objects, handleEmpty
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import get_setting_value, write_file
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import logPath, confFileName # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, write_file # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -35,13 +35,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}](publisher) In script'])
mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return
@@ -62,15 +61,19 @@ def main():
for notification in new_notifications:
# Send notification
response_stdout, response_stderr = send(notification["Text"], notification["HTML"], notification["JSON"])
response_stdout, response_stderr = send(
notification["Text"],
notification["HTML"],
notification["JSON"]
)
# Log result
plugin_objects.add_object(
primaryId = pluginName,
secondaryId = timeNowDB(),
secondaryId = timeNowDB(),
watched1 = notification["GUID"],
watched2 = handleEmpty(response_stdout),
watched3 = handleEmpty(response_stderr),
watched2 = handleEmpty(response_stdout),
watched3 = handleEmpty(response_stderr),
watched4 = 'null',
extra = 'null',
foreignKey = notification["GUID"]
@@ -79,16 +82,16 @@ def main():
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def check_config():
if get_setting_value('WEBHOOK_URL') == '':
return False
else:
return True
#-------------------------------------------------------------------------------
if get_setting_value('WEBHOOK_URL') == '':
return False
else:
return True
def send (text_data, html_data, json_data):
# -------------------------------------------------------------------------------
def send(text_data, html_data, json_data):
response_stderr = ''
response_stdout = ''
@@ -102,9 +105,9 @@ def send (text_data, html_data, json_data):
# use data type based on specified payload type
if payloadType == 'json':
# In this code, the truncate_json function is used to recursively traverse the JSON object
# and remove nodes that exceed the size limit. It checks the size of each node's JSON representation
# using json.dumps and includes only the nodes that are within the limit.
# In this code, the truncate_json function is used to recursively traverse the JSON object
# and remove nodes that exceed the size limit. It checks the size of each node's JSON representation
# using json.dumps and includes only the nodes that are within the limit.
json_str = json.dumps(json_data)
if len(json_str) <= limit:
@@ -127,45 +130,48 @@ def send (text_data, html_data, json_data):
return obj
payloadData = truncate_json(json_data)
if payloadType == 'html':
if payloadType == 'html':
if len(html_data) > limit:
payloadData = html_data[:limit] + " <h1>(text was truncated)</h1>"
else:
payloadData = html_data
if payloadType == 'text':
if payloadType == 'text':
if len(text_data) > limit:
payloadData = text_data[:limit] + " (text was truncated)"
else:
payloadData = text_data
# Define slack-compatible payload
_json_payload = { "text": payloadData } if payloadType == 'text' else {
"username": "NetAlertX",
"text": "There are new notifications",
"attachments": [{
"title": "NetAlertX Notifications",
"title_link": get_setting_value('REPORT_DASHBOARD_URL'),
"text": payloadData
}]
}
if payloadType == 'text':
_json_payload = {"text": payloadData}
else:
_json_payload = {
"username": "NetAlertX",
"text": "There are new notifications",
"attachments": [{
"title": "NetAlertX Notifications",
"title_link": get_setting_value('REPORT_DASHBOARD_URL'),
"text": payloadData
}]
}
# DEBUG - Write the json payload into a log file for debugging
write_file (logPath + '/webhook_payload.json', json.dumps(_json_payload))
write_file(logPath + '/webhook_payload.json', json.dumps(_json_payload))
# Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both
# Consider: curl has the ability to load in data to POST from a file + piping
if(endpointUrl.startswith('https://discord.com/api/webhooks/') and not endpointUrl.endswith("/slack")):
if (endpointUrl.startswith('https://discord.com/api/webhooks/') and not endpointUrl.endswith("/slack")):
_WEBHOOK_URL = f"{endpointUrl}/slack"
curlParams = ["curl","-i","-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL]
curlParams = ["curl", "-i", "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
else:
_WEBHOOK_URL = endpointUrl
curlParams = ["curl","-i","-X", requestMethod , "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
curlParams = ["curl", "-i", "-X", requestMethod , "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
# Add HMAC signature if configured
if(secret != ''):
if (secret != ''):
h = hmac.new(secret.encode("UTF-8"), json.dumps(_json_payload, separators=(',', ':')).encode(), hashlib.sha256).hexdigest()
curlParams.insert(4,"-H")
curlParams.insert(5,f"X-Webhook-Signature: sha256={h}")
curlParams.insert(4, "-H")
curlParams.insert(5, f"X-Webhook-Signature: sha256={h}")
try:
# Execute CURL call
@@ -173,13 +179,11 @@ def send (text_data, html_data, json_data):
result = subprocess.run(curlParams, capture_output=True, text=True)
response_stderr = result.stderr
response_stdout = result.stdout
response_stdout = result.stdout
# Write stdout and stderr into .log files for debugging if needed
mylog('debug', [f'[{pluginName}] stdout: ', response_stdout])
mylog('debug', [f'[{pluginName}] stderr: ', response_stderr])
mylog('debug', [f'[{pluginName}] stderr: ', response_stderr])
except subprocess.CalledProcessError as e:
# An error occurred, handle it
@@ -187,10 +191,9 @@ def send (text_data, html_data, json_data):
response_stderr = e.output
return response_stdout, response_stderr
return response_stdout, response_stderr
# -------------------------------------------------------
# -------------------------------------------------------
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,7 +1,6 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import time
import pathlib
import argparse
import sys
import re
@@ -9,16 +8,16 @@ import base64
import subprocess
# Register NetAlertX directories
INSTALL_PATH="/app"
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import DB
from plugin_helper import Plugin_Objects, handleEmpty
from logger import mylog, Logger, append_line_to_file
from helper import get_setting_value
from const import logPath, applicationPath
import conf
from pytz import timezone
from database import DB # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -6,17 +6,16 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
pluginName = "ASUSWRT"
import asyncio
import aiohttp
import conf
from asusrouter import AsusData, AsusRouter
from asusrouter.modules.connection import ConnectionState
from const import logPath
from helper import get_setting_value
from logger import Logger, mylog
from plugin_helper import (Plugin_Objects, handleEmpty)
from pytz import timezone
import asyncio # noqa: E402 [flake8 lint suppression]
import aiohttp # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from asusrouter import AsusData, AsusRouter # noqa: E402 [flake8 lint suppression]
from asusrouter.modules.connection import ConnectionState # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from logger import Logger, mylog # noqa: E402 [flake8 lint suppression]
from plugin_helper import (Plugin_Objects, handleEmpty) # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
conf.tz = timezone(get_setting_value("TIMEZONE"))

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
# !/usr/bin/env python3
import os
import sys
import socket
@@ -8,14 +8,14 @@ from zeroconf import Zeroconf
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
from database import DB
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Configure timezone and logging
conf.tz = timezone(get_setting_value("TIMEZONE"))
@@ -67,7 +67,7 @@ def resolve_mdns_name(ip: str, timeout: int = 5) -> str:
hostname = socket.getnameinfo((ip, 0), socket.NI_NAMEREQD)[0]
zeroconf.close()
if hostname and hostname != ip:
mylog("debug", [f"[{pluginName}] Found mDNS name: {hostname}"])
mylog("debug", [f"[{pluginName}] Found mDNS name (rev_name): {hostname} ({rev_name})"])
return hostname
except Exception as e:
mylog("debug", [f"[{pluginName}] Zeroconf lookup failed for {ip}: {e}"])
@@ -89,7 +89,7 @@ def main():
timeout = get_setting_value("AVAHISCAN_RUN_TIMEOUT")
use_mock = "--mockdata" in sys.argv
if use_mock:
mylog("verbose", [f"[{pluginName}] Running in MOCK mode"])
devices = [
@@ -137,4 +137,4 @@ def main():
# Entrypoint
# =============================================================================
if __name__ == "__main__":
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import argparse
@@ -11,11 +11,11 @@ from datetime import datetime
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath, fullDbPath
import conf
from pytz import timezone
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath, fullDbPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -29,6 +29,7 @@ LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
# the script expects a parameter in the format of devices=device1,device2,...
@@ -44,7 +45,7 @@ def main():
else:
overwrite = False
mylog('verbose', ['[CSVBCKP] In script'])
mylog('verbose', ['[CSVBCKP] In script'])
# Connect to the App database
conn = sqlite3.connect(fullDbPath)
@@ -64,7 +65,7 @@ def main():
fullPath = os.path.join(values.location.split('=')[1], filename)
mylog('verbose', ['[CSVBCKP] Writing file ', fullPath])
mylog('verbose', ['[CSVBCKP] Writing file ', fullPath])
# Create a CSV file in the specified location
with open(fullPath, 'w', newline='') as csvfile:
@@ -72,7 +73,7 @@ def main():
csv_writer = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL)
# Wrap the header values in double quotes and write the header row
csv_writer.writerow([ '"' + col + '"' for col in columns])
csv_writer.writerow(['"' + col + '"' for col in columns])
# Fetch and write data rows
for row in cursor.fetchall():
@@ -96,8 +97,8 @@ def main():
return 0
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -8,11 +8,11 @@ import sqlite3
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath, fullDbPath
import conf
from pytz import timezone
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath, fullDbPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))
@@ -81,7 +81,7 @@ def cleanup_database(
)
cursor.execute(
"""DELETE from Online_History where "Index" not in (
SELECT "Index" from Online_History
SELECT "Index" from Online_History
order by Scan_Date desc limit 150)"""
)
@@ -94,7 +94,7 @@ def cleanup_database(
],
)
cursor.execute(
f"""DELETE FROM Events
f"""DELETE FROM Events
WHERE eve_DateTime <= date('now', '-{str(DAYS_TO_KEEP_EVENTS)} day')"""
)
# -----------------------------------------------------
@@ -107,11 +107,11 @@ def cleanup_database(
)
# Build the SQL query to delete entries that exceed the limit per unique "Plugin" column entry
delete_query = f"""DELETE FROM Plugins_History
delete_query = f"""DELETE FROM Plugins_History
WHERE "Index" NOT IN (
SELECT "Index"
FROM (
SELECT "Index",
SELECT "Index",
ROW_NUMBER() OVER(PARTITION BY "Plugin" ORDER BY DateTimeChanged DESC) AS row_num
FROM Plugins_History
) AS ranked_objects
@@ -133,11 +133,11 @@ def cleanup_database(
)
# Build the SQL query to delete entries
delete_query = f"""DELETE FROM Notifications
delete_query = f"""DELETE FROM Notifications
WHERE "Index" NOT IN (
SELECT "Index"
FROM (
SELECT "Index",
SELECT "Index",
ROW_NUMBER() OVER(PARTITION BY "Notifications" ORDER BY DateTimeCreated DESC) AS row_num
FROM Notifications
) AS ranked_objects
@@ -153,11 +153,11 @@ def cleanup_database(
mylog("verbose", [f"[{pluginName}] Trim AppEvents to less than {histCount}"])
# Build the SQL query to delete entries
delete_query = f"""DELETE FROM AppEvents
delete_query = f"""DELETE FROM AppEvents
WHERE "Index" NOT IN (
SELECT "Index"
FROM (
SELECT "Index",
SELECT "Index",
ROW_NUMBER() OVER(PARTITION BY "AppEvents" ORDER BY DateTimeCreated DESC) AS row_num
FROM AppEvents
) AS ranked_objects

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import argparse
@@ -9,11 +9,11 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value, check_IP_format
from const import logPath
import conf
from pytz import timezone
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, check_IP_format # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,91 +28,88 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
parser = argparse.ArgumentParser(description='Check internet connectivity and IP')
parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP")
parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP")
parser.add_argument('DDNS_UPDATE_URL', action="store", help="URL for updating Dynamic DNS (DDNS)")
parser.add_argument('DDNS_USER', action="store", help="Username for Dynamic DNS (DDNS) authentication")
parser.add_argument('DDNS_PASSWORD', action="store", help="Password for Dynamic DNS (DDNS) authentication")
parser.add_argument('DDNS_DOMAIN', action="store", help="Dynamic DNS (DDNS) domain name")
values = parser.parse_args()
PREV_IP = values.prev_ip.split('=')[1]
PREV_IP = values.prev_ip.split('=')[1]
DDNS_UPDATE_URL = values.DDNS_UPDATE_URL.split('=')[1]
DDNS_USER = values.DDNS_USER.split('=')[1]
DDNS_PASSWORD = values.DDNS_PASSWORD.split('=')[1]
DDNS_DOMAIN = values.DDNS_DOMAIN.split('=')[1]
DDNS_DOMAIN = values.DDNS_DOMAIN.split('=')[1]
# perform the new IP lookup and DDNS tasks if enabled
ddns_update( DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP)
ddns_update(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP)
mylog('verbose', [f'[{pluginName}] Finished '])
mylog('verbose', [f'[{pluginName}] Finished '])
return 0
#===============================================================================
# ===============================================================================
# INTERNET IP CHANGE
#===============================================================================
def ddns_update ( DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP ):
# ===============================================================================
def ddns_update(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP):
# Update DDNS record if enabled and IP is different
# Get Dynamic DNS IP
mylog('verbose', [f'[{pluginName}] Retrieving Dynamic DNS IP'])
dns_IP = get_dynamic_DNS_IP(DDNS_DOMAIN)
# Check Dynamic DNS IP
if dns_IP == "" or dns_IP == "0.0.0.0" :
mylog('none', [f'[{pluginName}] Error retrieving Dynamic DNS IP'])
mylog('none', [f'[{pluginName}] Error retrieving Dynamic DNS IP'])
mylog('none', [f'[{pluginName}] ', dns_IP])
# Check DNS Change
if dns_IP != PREV_IP :
mylog('none', [f'[{pluginName}] Updating Dynamic DNS IP'])
message = set_dynamic_DNS_IP (DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN)
mylog('none', [f'[{pluginName}] ', message])
message = set_dynamic_DNS_IP(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN)
mylog('none', [f'[{pluginName}] ', message])
# plugin_objects = Plugin_Objects(RESULT_FILE)
# plugin_objects = Plugin_Objects(RESULT_FILE)
# plugin_objects.add_object(
# primaryId = 'Internet', # MAC (Device Name)
# secondaryId = new_internet_IP, # IP Address
# secondaryId = new_internet_IP, # IP Address
# watched1 = f'Previous IP: {PREV_IP}',
# watched2 = '',
# watched3 = '',
# watched3 = '',
# watched4 = '',
# extra = f'Previous IP: {PREV_IP}',
# extra = f'Previous IP: {PREV_IP}',
# foreignKey = 'Internet')
# plugin_objects.write_result_file()
# plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
def get_dynamic_DNS_IP (DDNS_DOMAIN):
# -------------------------------------------------------------------------------
def get_dynamic_DNS_IP(DDNS_DOMAIN):
# Using supplied DNS server
dig_args = ['dig', '+short', DDNS_DOMAIN]
try:
# try runnning a subprocess
dig_output = subprocess.check_output (dig_args, universal_newlines=True)
dig_output = subprocess.check_output(dig_args, universal_newlines=True)
mylog('none', [f'[{pluginName}] DIG output :', dig_output])
except subprocess.CalledProcessError as e:
# An error occured, handle it
mylog('none', [f'[{pluginName}] ⚠ ERROR - ', e.output])
dig_output = '' # probably no internet
dig_output = '' # probably no internet
# Check result is an IP
IP = check_IP_format (dig_output)
IP = check_IP_format(dig_output)
# Handle invalid response
if IP == '':
@@ -120,28 +117,27 @@ def get_dynamic_DNS_IP (DDNS_DOMAIN):
return IP
#-------------------------------------------------------------------------------
def set_dynamic_DNS_IP (DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN):
# -------------------------------------------------------------------------------
def set_dynamic_DNS_IP(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN):
try:
# try runnning a subprocess
# Update Dynamic IP
curl_output = subprocess.check_output (['curl',
'-s',
DDNS_UPDATE_URL +
'username=' + DDNS_USER +
'&password=' + DDNS_PASSWORD +
'&hostname=' + DDNS_DOMAIN],
universal_newlines=True)
curl_output = subprocess.check_output([
'curl',
'-s',
DDNS_UPDATE_URL + 'username=' + DDNS_USER + '&password=' + DDNS_PASSWORD + '&hostname=' + DDNS_DOMAIN],
universal_newlines=True)
except subprocess.CalledProcessError as e:
# An error occured, handle it
mylog('none', [f'[{pluginName}] ⚠ ERROR - ',e.output])
curl_output = ""
mylog('none', [f'[{pluginName}] ⚠ ERROR - ', e.output])
curl_output = ""
return curl_output
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,22 +1,22 @@
#!/usr/bin/env python
# !/usr/bin/env python
from __future__ import unicode_literals
import argparse
import os
import sys
import chardet
import chardet
# Register NetAlertX directories
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty, is_mac
from logger import mylog, Logger
from dhcp_leases import DhcpLeases
from helper import get_setting_value
import conf
from const import logPath
from pytz import timezone
from plugin_helper import Plugin_Objects, handleEmpty, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from dhcp_leases import DhcpLeases # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -24,34 +24,38 @@ conf.tz = timezone(get_setting_value('TIMEZONE'))
# Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL'))
pluginName= 'DHCPLSS'
pluginName = 'DHCPLSS'
LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
# -------------------------------------------------------------
def main():
def main():
mylog('verbose', [f'[{pluginName}] In script'])
last_run_logfile = open(RESULT_FILE, 'a')
last_run_logfile = open(RESULT_FILE, 'a')
last_run_logfile.write("")
parser = argparse.ArgumentParser(description='Import devices from dhcp.leases files')
parser.add_argument('paths', action="store", help="absolute dhcp.leases file paths to check separated by ','")
parser.add_argument(
'paths',
action="store",
help="absolute dhcp.leases file paths to check separated by ','"
)
values = parser.parse_args()
plugin_objects = Plugin_Objects(RESULT_FILE)
if values.paths:
for path in values.paths.split('=')[1].split(','):
for path in values.paths.split('=')[1].split(','):
plugin_objects = get_entries(path, plugin_objects)
mylog('verbose', [f'[{pluginName}] {len(plugin_objects)} Entries found in "{path}"'])
mylog('verbose', [f'[{pluginName}] {len(plugin_objects)} Entries found in "{path}"'])
plugin_objects.write_result_file()
# -------------------------------------------------------------
def get_entries(path, plugin_objects):
@@ -66,7 +70,7 @@ def get_entries(path, plugin_objects):
# Use the detected encoding
encoding = result['encoding']
# Order: MAC, IP, IsActive, NAME, Hardware
# Order: MAC, IP, IsActive, NAME, Hardware
# Handle pihole-specific dhcp.leases files
if 'pihole' in path:
with open(path, 'r', encoding=encoding, errors='replace') as f:
@@ -111,9 +115,9 @@ def get_entries(path, plugin_objects):
if is_mac(lease.ethernet):
plugin_objects.add_object(
primaryId = handleEmpty(lease.ethernet),
secondaryId = handleEmpty(lease.ip),
watched1 = handleEmpty(lease.active),
primaryId = handleEmpty(lease.ethernet),
secondaryId = handleEmpty(lease.ip),
watched1 = handleEmpty(lease.active),
watched2 = handleEmpty(lease.hostname),
watched3 = handleEmpty(lease.hardware),
watched4 = handleEmpty(lease.binding_state),
@@ -122,5 +126,6 @@ def get_entries(path, plugin_objects):
)
return plugin_objects
if __name__ == '__main__':
main()
if __name__ == '__main__':
main()

View File

@@ -1,9 +1,8 @@
#!/usr/bin/env python
# !/usr/bin/env python
# Based on the work of https://github.com/leiweibau/Pi.Alert
import subprocess
import os
from datetime import datetime
import sys
@@ -11,12 +10,12 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, Plugin_Object
from logger import mylog, Logger
from helper import get_setting_value
import conf
from pytz import timezone
from const import logPath
from plugin_helper import Plugin_Objects, Plugin_Object # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
@@ -31,13 +30,14 @@ LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', ['[DHCPSRVS] In script'])
last_run_logfile = open(RESULT_FILE, 'a')
last_run_logfile = open(RESULT_FILE, 'a')
last_run_logfile.write("")
plugin_objects = Plugin_Objects(RESULT_FILE)
timeoutSec = get_setting_value('DHCPSRVS_RUN_TIMEOUT')
@@ -46,10 +46,10 @@ def main():
try:
# Number of DHCP discovery probes to send
dhcp_probes = 1
# Initialize a list to store output lines from the scan
newLines = []
for _ in range(dhcp_probes):
output = subprocess.check_output(nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=timeoutSec)
newLines += output.split("\n")
@@ -57,9 +57,9 @@ def main():
newEntries = []
for line in newLines:
mylog('verbose', [f'[DHCPSRVS] Processing line: {line} '])
if 'Response ' in line and ' of ' in line:
newEntries.append(Plugin_Object())
elif 'Server Identifier' in line:
@@ -85,7 +85,7 @@ def main():
newEntries[-1].extra += ',' + newVal
for e in newEntries:
plugin_objects.add_object(
primaryId=e.primaryId,
secondaryId=e.secondaryId,
@@ -101,5 +101,6 @@ def main():
except Exception as e:
mylog('verbose', ['[DHCPSRVS] Error in main:', str(e)])
if __name__ == '__main__':
main()

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
import subprocess
@@ -8,14 +7,14 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
from database import DB
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -35,7 +34,7 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
timeout = get_setting_value('DIGSCAN_RUN_TIMEOUT')
@@ -50,13 +49,13 @@ def main():
device_handler = DeviceInstance(db)
# Retrieve devices
if get_setting_value("REFRESH_FQDN"):
if get_setting_value("REFRESH_FQDN"):
devices = device_handler.getAll()
else:
else:
devices = device_handler.getUnknown()
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
# TEST - below is a WINDOWS host IP
# execute_name_lookup('192.168.1.121', timeout)
@@ -65,27 +64,27 @@ def main():
if domain_name != '':
plugin_objects.add_object(
# "MAC", "IP", "Server", "Name"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = dns_server,
watched2 = domain_name,
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac'])
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = dns_server,
watched2 = domain_name,
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac']
)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
#===============================================================================
# ===============================================================================
# Execute scan
#===============================================================================
def execute_name_lookup (ip, timeout):
# ===============================================================================
def execute_name_lookup(ip, timeout):
"""
Execute the DIG command on IP.
"""
@@ -97,32 +96,38 @@ def execute_name_lookup (ip, timeout):
try:
mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args])
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True).strip()
output = subprocess.check_output(
args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
).strip()
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
domain_name = output
dns_server = ''
dns_server = ''
mylog('verbose', [f'[{pluginName}] Domain Name: {domain_name}'])
return domain_name, dns_server
except subprocess.CalledProcessError as e:
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
if output == "": # check if the subprocess failed
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
else:
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
if output == "": # check if the subprocess failed
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
else:
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
return '', ''
return '', ''
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -17,11 +17,11 @@ from aiofreepybox.exceptions import NotOpenError, AuthorizationError
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
import conf
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))
@@ -79,6 +79,7 @@ def map_device_type(type: str):
mylog("minimal", [f"[{pluginName}] Unknown device type: {type}"])
return device_type_map["other"]
async def get_device_data(api_version: int, api_address: str, api_port: int):
# ensure existence of db path
config_base = Path(os.getenv("NETALERTX_CONFIG", "/data/config"))

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# test script by running:
# tbc
@@ -11,14 +11,14 @@ import re
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
from database import DB
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -33,16 +33,14 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
timeout = get_setting_value('ICMP_RUN_TIMEOUT')
args = get_setting_value('ICMP_ARGS')
in_regex = get_setting_value('ICMP_IN_REGEX')
# Create a database connection
db = DB() # instance of class DB
db.open()
@@ -61,46 +59,45 @@ def main():
# Filter devices based on the regex match
filtered_devices = [
device for device in all_devices
device for device in all_devices
if regex_pattern.match(device['devLastIP'])
]
mylog('verbose', [f'[{pluginName}] Devices to PING: {len(filtered_devices)}'])
mylog('verbose', [f'[{pluginName}] Devices to PING: {len(filtered_devices)}'])
for device in filtered_devices:
is_online, output = execute_scan(device['devLastIP'], timeout, args)
mylog('verbose', [f"[{pluginName}] ip: {device['devLastIP']} is_online: {is_online}"])
if is_online:
plugin_objects.add_object(
# "MAC", "IP", "Name", "Output"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = device['devName'],
watched2 = output.replace('\n',''),
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac'])
# "MAC", "IP", "Name", "Output"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = device['devName'],
watched2 = output.replace('\n', ''),
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac']
)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
#===============================================================================
# ===============================================================================
# Execute scan
#===============================================================================
def execute_scan (ip, timeout, args):
# ===============================================================================
def execute_scan(ip, timeout, args):
"""
Execute the ICMP command on IP.
"""
icmp_args = ['ping'] + args.split() + [ip]
# Execute command
@@ -108,12 +105,18 @@ def execute_scan (ip, timeout, args):
try:
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (icmp_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True)
output = subprocess.check_output(
icmp_args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
)
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
# Parse output using case-insensitive regular expressions
#Synology-NAS:/# ping -i 0.5 -c 3 -W 8 -w 9 192.168.1.82
# Synology-NAS:/# ping -i 0.5 -c 3 -W 8 -w 9 192.168.1.82
# PING 192.168.1.82 (192.168.1.82): 56 data bytes
# 64 bytes from 192.168.1.82: seq=0 ttl=64 time=0.080 ms
# 64 bytes from 192.168.1.82: seq=1 ttl=64 time=0.081 ms
@@ -130,7 +133,7 @@ def execute_scan (ip, timeout, args):
# --- 192.168.1.92 ping statistics ---
# 3 packets transmitted, 0 packets received, 100% packet loss
# TODO: parse output and return True if online, False if Offline (100% packet loss, bad address)
# TODO: parse output and return True if online, False if Offline (100% packet loss, bad address)
is_online = True
# Check for 0% packet loss in the output
@@ -145,22 +148,20 @@ def execute_scan (ip, timeout, args):
except subprocess.CalledProcessError as e:
# An error occurred, handle it
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
mylog('verbose', [f'[{pluginName}]', e.output])
return False, output
return False, output
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
return False, output
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
return False, output
return False, output
return False, output
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import time
@@ -11,13 +11,13 @@ import re
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger, append_line_to_file
from helper import check_IP_format, get_setting_value
from const import logPath
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger, append_line_to_file # noqa: E402 [flake8 lint suppression]
from helper import check_IP_format, get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -31,39 +31,39 @@ LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
no_internet_ip = '0.0.0.0'
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
parser = argparse.ArgumentParser(description='Check internet connectivity and IP')
parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP")
parser.add_argument('DIG_GET_IP_ARG', action="store", help="Arguments for the 'dig' command to retrieve the IP address") # unused
parser.add_argument('DIG_GET_IP_ARG', action="store", help="Arguments for the 'dig' command to retrieve the IP address") # unused
values = parser.parse_args()
PREV_IP = values.prev_ip.split('=')[1]
PREV_IP = values.prev_ip.split('=')[1]
DIG_GET_IP_ARG = get_setting_value("INTRNT_DIG_GET_IP_ARG")
new_internet_IP = no_internet_ip
mylog('verbose', [f'[{pluginName}] INTRNT_DIG_GET_IP_ARG: ', DIG_GET_IP_ARG])
mylog('verbose', [f'[{pluginName}] INTRNT_DIG_GET_IP_ARG: ', DIG_GET_IP_ARG])
# METHOD 1: dig
# perform the new IP lookup N times specified by the INTRNT_TRIES setting
INTRNT_RETRIES = get_setting_value("INTRNT_RETRIES")
retries_needed = 0
for i in range(INTRNT_RETRIES + 1):
new_internet_IP, cmd_output = check_internet_IP( PREV_IP, DIG_GET_IP_ARG)
new_internet_IP, cmd_output = check_internet_IP(PREV_IP, DIG_GET_IP_ARG)
if new_internet_IP == no_internet_ip:
time.sleep(1*i) # Exponential backoff strategy
time.sleep(1 * i) # Exponential backoff strategy
else:
retries_needed = i
break
@@ -71,68 +71,69 @@ def main():
# METHOD 2: curl
if new_internet_IP == no_internet_ip:
new_internet_IP, cmd_output = fallback_check_ip()
mylog('verbose', [f'[{pluginName}] Curl Fallback (new_internet_IP|cmd_output): {new_internet_IP} | {cmd_output}'])
mylog('verbose', [f'[{pluginName}] Curl Fallback (new_internet_IP|cmd_output): {new_internet_IP} | {cmd_output}'])
# logging
append_line_to_file (logPath + '/IP_changes.log', '['+str(timeNowDB()) +']\t'+ new_internet_IP +'\n')
append_line_to_file(logPath + '/IP_changes.log', '[' + str(timeNowDB()) + ']\t' + new_internet_IP + '\n')
plugin_objects = Plugin_Objects(RESULT_FILE)
plugin_objects = Plugin_Objects(RESULT_FILE)
plugin_objects.add_object(
primaryId = 'Internet', # MAC (Device Name)
secondaryId = new_internet_IP, # IP Address
secondaryId = new_internet_IP, # IP Address
watched1 = f'Previous IP: {PREV_IP}',
watched2 = cmd_output.replace('\n',''),
watched3 = retries_needed,
watched2 = cmd_output.replace('\n', ''),
watched3 = retries_needed,
watched4 = 'Gateway',
extra = f'Previous IP: {PREV_IP}',
foreignKey = 'Internet')
extra = f'Previous IP: {PREV_IP}',
foreignKey = 'Internet'
)
plugin_objects.write_result_file()
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Finished '])
mylog('verbose', [f'[{pluginName}] Finished '])
return 0
#===============================================================================
# ===============================================================================
# INTERNET IP CHANGE
#===============================================================================
def check_internet_IP ( PREV_IP, DIG_GET_IP_ARG ):
# ===============================================================================
def check_internet_IP(PREV_IP, DIG_GET_IP_ARG):
# Get Internet IP
mylog('verbose', [f'[{pluginName}] - Retrieving Internet IP'])
internet_IP, cmd_output = get_internet_IP(DIG_GET_IP_ARG)
mylog('verbose', [f'[{pluginName}] Current internet_IP : {internet_IP}'])
# Check previously stored IP
mylog('verbose', [f'[{pluginName}] Current internet_IP : {internet_IP}'])
# Check previously stored IP
previous_IP = no_internet_ip
if PREV_IP is not None and len(PREV_IP) > 0 :
if PREV_IP is not None and len(PREV_IP) > 0 :
previous_IP = PREV_IP
mylog('verbose', [f'[{pluginName}] previous_IP : {previous_IP}'])
mylog('verbose', [f'[{pluginName}] previous_IP : {previous_IP}'])
return internet_IP, cmd_output
#-------------------------------------------------------------------------------
def get_internet_IP (DIG_GET_IP_ARG):
# -------------------------------------------------------------------------------
def get_internet_IP(DIG_GET_IP_ARG):
cmd_output = ''
# Using 'dig'
dig_args = ['dig', '+short'] + DIG_GET_IP_ARG.strip().split()
try:
cmd_output = subprocess.check_output (dig_args, universal_newlines=True)
mylog('verbose', [f'[{pluginName}] DIG result : {cmd_output}'])
cmd_output = subprocess.check_output(dig_args, universal_newlines=True)
mylog('verbose', [f'[{pluginName}] DIG result : {cmd_output}'])
except subprocess.CalledProcessError as e:
mylog('verbose', [e.output])
cmd_output = '' # no internet
cmd_output = '' # no internet
# Check result is an IP
IP = check_IP_format (cmd_output)
IP = check_IP_format(cmd_output)
# Handle invalid response
if IP == '':
@@ -140,7 +141,8 @@ def get_internet_IP (DIG_GET_IP_ARG):
return IP, cmd_output
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def fallback_check_ip():
"""Fallback mechanism using `curl ifconfig.me/ip`."""
try:
@@ -155,8 +157,9 @@ def fallback_check_ip():
mylog('none', [f'[{pluginName}] Fallback curl exception: {e}'])
return no_internet_ip, f'Fallback via curl exception: "{e}"'
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,6 +1,5 @@
#!/usr/bin/env python
# !/usr/bin/env python
import argparse
import os
import sys
import speedtest
@@ -9,13 +8,13 @@ import speedtest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import get_setting_value
import conf
from pytz import timezone
from const import logPath
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,18 +27,16 @@ pluginName = 'INTRSPD'
LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', ['[INTRSPD] In script'])
parser = argparse.ArgumentParser(description='Speedtest Plugin for NetAlertX')
values = parser.parse_args()
def main():
mylog('verbose', ['[INTRSPD] In script'])
plugin_objects = Plugin_Objects(RESULT_FILE)
speedtest_result = run_speedtest()
plugin_objects.add_object(
primaryId = 'Speedtest',
secondaryId = timeNowDB(),
secondaryId = timeNowDB(),
watched1 = speedtest_result['download_speed'],
watched2 = speedtest_result['upload_speed'],
watched3 = 'null',
@@ -49,25 +46,27 @@ def main():
)
plugin_objects.write_result_file()
def run_speedtest():
try:
st = speedtest.Speedtest(secure=True)
st.get_best_server()
download_speed = round(st.download() / 10**6, 2) # Convert to Mbps
upload_speed = round(st.upload() / 10**6, 2) # Convert to Mbps
mylog('verbose', [f"[INTRSPD] Result (down|up): {str(download_speed)} Mbps|{upload_speed} Mbps"])
mylog('verbose', [f"[INTRSPD] Result (down|up): {str(download_speed)} Mbps|{upload_speed} Mbps"])
return {
'download_speed': download_speed,
'upload_speed': upload_speed,
}
except Exception as e:
mylog('verbose', [f"[INTRSPD] Error running speedtest: {str(e)}"])
mylog('verbose', [f"[INTRSPD] Error running speedtest: {str(e)}"])
return {
'download_speed': -1,
'upload_speed': -1,
}
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -11,11 +11,11 @@ from functools import reduce
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
import conf
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -34,9 +34,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
# Retrieve configuration settings
SCAN_SUBNETS = get_setting_value('SCAN_SUBNETS')
@@ -48,33 +47,33 @@ def main():
entry.split('--interface=')[-1].strip() for entry in SCAN_SUBNETS if '--interface=' in entry
)
mylog('verbose', [f'[{pluginName}] Interfaces value: "{interfaces}"'])
mylog('verbose', [f'[{pluginName}] Interfaces value: "{interfaces}"'])
# retrieve data
raw_neighbors = get_neighbors(interfaces)
neighbors = parse_neighbors(raw_neighbors)
# Process the data into native application tables
if len(neighbors) > 0:
for device in neighbors:
plugin_objects.add_object(
primaryId = device['mac'],
secondaryId = device['ip'],
watched4 = device['last_seen'],
plugin_objects.add_object(
primaryId = device['mac'],
secondaryId = device['ip'],
watched4 = device['last_seen'],
# The following are always unknown
watched1 = device['hostname'], # don't use these --> handleEmpty(device['hostname']),
watched2 = device['vendor'], # handleEmpty(device['vendor']),
watched3 = device['device_type'], # handleEmpty(device['device_type']),
extra = '',
foreignKey = "" #device['mac']
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
# helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md
)
# The following are always unknown
watched1 = device['hostname'], # don't use these --> handleEmpty(device['hostname']),
watched2 = device['vendor'], # don't use these --> handleEmpty(device['vendor']),
watched3 = device['device_type'], # don't use these --> handleEmpty(device['device_type']),
extra = '',
foreignKey = "" # device['mac']
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
# helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md
)
mylog('verbose', [f'[{pluginName}] New entries: "{len(neighbors)}"'])
@@ -83,13 +82,14 @@ def main():
return 0
def parse_neighbors(raw_neighbors: list[str]):
neighbors = []
for line in raw_neighbors:
if "lladdr" in line and "REACHABLE" in line:
# Known data
fields = line.split()
if not is_multicast(fields[0]):
# mylog('verbose', [f'[{pluginName}] adding ip {fields[0]}"'])
neighbor = {}
@@ -101,9 +101,9 @@ def parse_neighbors(raw_neighbors: list[str]):
neighbor['hostname'] = '(unknown)'
neighbor['vendor'] = '(unknown)'
neighbor['device_type'] = '(unknown)'
neighbors.append(neighbor)
return neighbors
@@ -111,6 +111,7 @@ def is_multicast(ip):
prefixes = ['ff', '224', '231', '232', '233', '234', '238', '239']
return reduce(lambda acc, prefix: acc or ip.startswith(prefix), prefixes, False)
# retrieve data
def get_neighbors(interfaces):
@@ -119,7 +120,7 @@ def get_neighbors(interfaces):
for interface in interfaces.split(","):
try:
# Ping all IPv6 devices in multicast to trigger NDP
# Ping all IPv6 devices in multicast to trigger NDP
mylog('verbose', [f'[{pluginName}] Pinging on interface: "{interface}"'])
command = f"ping ff02::1%{interface} -c 2".split()
@@ -136,11 +137,11 @@ def get_neighbors(interfaces):
mylog('verbose', [f'[{pluginName}] Scanning interface succeded: "{interface}"'])
except subprocess.CalledProcessError as e:
# An error occurred, handle it
mylog('verbose', [f'[{pluginName}] Scanning interface failed: "{interface}"'])
error_type = type(e).__name__ # Capture the error type
mylog('verbose', [f'[{pluginName}] Scanning interface failed: "{interface}" ({error_type})'])
return results
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -7,18 +7,18 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
pluginName = 'LUCIRPC'
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
try:
from openwrt_luci_rpc import OpenWrtRpc
except:
mylog('error', [f'[{pluginName}] Failed import openwrt_luci_rpc'])
exit()
except ImportError as e:
mylog('error', [f'[{pluginName}] Failed import openwrt_luci_rpc: {e}'])
exit(1)
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -30,13 +30,14 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] start script.'])
def main():
mylog('verbose', [f'[{pluginName}] start script.'])
device_data = get_device_data()
for entry in device_data:
mylog('verbose', [f'[{pluginName}] found: ', str(entry.mac).lower()])
mylog('verbose', [f'[{pluginName}] found: ', str(entry.mac).lower()])
name = str(entry.hostname)
@@ -45,36 +46,38 @@ def main():
plugin_objects.add_object(
primaryId = str(entry.mac).lower(),
secondaryId = entry.ip,
secondaryId = entry.ip,
watched1 = entry.host,
watched2 = name,
watched3 = "",
watched3 = "",
watched4 = "",
extra = pluginName,
extra = pluginName,
foreignKey = str(entry.mac).lower())
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
def get_device_data():
router = OpenWrtRpc(
get_setting_value("LUCIRPC_host"),
get_setting_value("LUCIRPC_user"),
get_setting_value("LUCIRPC_password"),
get_setting_value("LUCIRPC_ssl"),
get_setting_value("LUCIRPC_user"),
get_setting_value("LUCIRPC_password"),
get_setting_value("LUCIRPC_ssl"),
get_setting_value("LUCIRPC_verify_ssl")
)
)
if router.is_logged_in():
mylog('verbose', [f'[{pluginName}] login successfully.'])
mylog('verbose', [f'[{pluginName}] login successfully.'])
else:
mylog('error', [f'[{pluginName}] login fail.'])
mylog('error', [f'[{pluginName}] login fail.'])
device_data = router.get_all_connected_devices(only_reachable=get_setting_value("LUCIRPC_only_reachable"))
return device_data
if __name__ == '__main__':
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -8,12 +8,12 @@ from collections import deque
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
from messaging.in_app import remove_old
import conf
from pytz import timezone
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from messaging.in_app import remove_old # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,10 +28,9 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
MAINT_LOG_LENGTH = int(get_setting_value('MAINT_LOG_LENGTH'))
MAINT_NOTI_LENGTH = int(get_setting_value('MAINT_NOTI_LENGTH'))
@@ -39,7 +38,7 @@ def main():
# Check if set
if MAINT_LOG_LENGTH != 0:
mylog('verbose', [f'[{pluginName}] Cleaning file'])
mylog('verbose', [f'[{pluginName}] Cleaning file'])
logFile = logPath + "/app.log"
@@ -54,19 +53,19 @@ def main():
with open(logFile, 'w') as file:
# Write the last N lines back to the file
file.writelines(lines_to_keep)
mylog('verbose', [f'[{pluginName}] Cleanup finished'])
mylog('verbose', [f'[{pluginName}] Cleanup finished'])
# Check if set
if MAINT_NOTI_LENGTH != 0:
mylog('verbose', [f'[{pluginName}] Cleaning in-app notification history'])
mylog('verbose', [f'[{pluginName}] Cleaning in-app notification history'])
remove_old(MAINT_NOTI_LENGTH)
return 0
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -7,14 +7,14 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
from librouteros import connect
from librouteros.exceptions import TrapError
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
from librouteros import connect # noqa: E402 [flake8 lint suppression]
from librouteros.exceptions import TrapError # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -29,7 +29,6 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
@@ -49,7 +48,7 @@ def main():
plugin_objects = get_entries(plugin_objects)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices'])
@@ -58,10 +57,10 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
try:
# connect router
api = connect(username=MT_USER, password=MT_PASS, host=MT_HOST, port=MT_PORT)
# get dhcp leases
leases = api('/ip/dhcp-server/lease/print')
for lease in leases:
lease_id = lease.get('.id')
address = lease.get('address')
@@ -71,8 +70,11 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
last_seen = lease.get('last-seen')
status = lease.get('status')
device_name = comment or host_name or "(unknown)"
mylog('verbose', [f"ID: {lease_id}, Address: {address}, MAC Address: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}"])
mylog(
'verbose',
[f"ID: {lease_id}, Address: {address}, MAC Address: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}"]
)
if (status == "bound"):
plugin_objects.add_object(
@@ -83,7 +85,7 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
watched3 = host_name,
watched4 = last_seen,
extra = '',
helpVal1 = comment,
helpVal1 = comment,
foreignKey = mac_address)
except TrapError as e:
@@ -91,13 +93,13 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
except Exception as e:
mylog('error', [f"Failed to connect to MikroTik API: {e}"])
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return plugin_objects
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -8,14 +8,14 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
from database import DB
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -34,9 +34,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
# timeout = get_setting_value('NBLOOKUP_RUN_TIMEOUT')
timeout = 20
@@ -52,13 +51,13 @@ def main():
device_handler = DeviceInstance(db)
# Retrieve devices
if get_setting_value("REFRESH_FQDN"):
if get_setting_value("REFRESH_FQDN"):
devices = device_handler.getAll()
else:
else:
devices = device_handler.getUnknown()
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
# TEST - below is a WINDOWS host IP
# execute_name_lookup('192.168.1.121', timeout)
@@ -67,31 +66,32 @@ def main():
if domain_name != '':
plugin_objects.add_object(
# "MAC", "IP", "Server", "Name"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = dns_server,
watched2 = domain_name,
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac'])
# "MAC", "IP", "Server", "Name"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = dns_server,
watched2 = domain_name,
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac']
)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
#===============================================================================
# ===============================================================================
# Execute scan
#===============================================================================
def execute_name_lookup (ip, timeout):
# ===============================================================================
def execute_name_lookup(ip, timeout):
"""
Execute the NBTSCAN command on IP.
"""
args = ['nbtscan', ip]
# Execute command
@@ -99,20 +99,25 @@ def execute_name_lookup (ip, timeout):
try:
mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args])
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True)
output = subprocess.check_output(
args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
)
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
domain_name = ''
dns_server = ''
# Split the output into lines
lines = output.splitlines()
# Look for the first line containing a valid NetBIOS name entry
index = 0
for line in lines:
if 'Doing NBT name scan' not in line and ip in line:
# Split the line and extract the primary NetBIOS name
@@ -121,7 +126,6 @@ def execute_name_lookup (ip, timeout):
domain_name = parts[1]
else:
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - Unexpected output format: {line}'])
mylog('verbose', [f'[{pluginName}] Domain Name: {domain_name}'])
@@ -132,18 +136,21 @@ def execute_name_lookup (ip, timeout):
# if "NXDOMAIN" in e.output:
# mylog('verbose', [f'[{pluginName}]', f"No PTR record found for IP: {ip}"])
# else:
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
if output == "": # check if the subprocess failed
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
else:
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
if output == "": # check if the subprocess failed
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
else:
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
return '', ''
return '', ''
# ===============================================================================
# BEGIN
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# test script by running:
# tbc
@@ -7,19 +7,18 @@ import subprocess
import sys
import hashlib
import re
import nmap
import nmap
# Register NetAlertX directories
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
from database import DB
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -37,46 +36,46 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
timeout = get_setting_value('NMAPDEV_RUN_TIMEOUT')
fakeMac = get_setting_value('NMAPDEV_FAKE_MAC')
subnets = get_setting_value('SCAN_SUBNETS')
args = get_setting_value('NMAPDEV_ARGS')
mylog('verbose', [f'[{pluginName}] subnets: ', subnets])
mylog('verbose', [f'[{pluginName}] subnets: ', subnets])
# Initialize the Plugin obj output file
plugin_objects = Plugin_Objects(RESULT_FILE)
unique_devices = execute_scan(subnets, timeout, fakeMac, args)
mylog('verbose', [f'[{pluginName}] Devices found: {len(unique_devices)}'])
mylog('verbose', [f'[{pluginName}] Devices found: {len(unique_devices)}'])
for device in unique_devices:
plugin_objects.add_object(
# "MAC", "IP", "Name", "Vendor", "Interface"
primaryId = device['mac'].lower(),
secondaryId = device['ip'],
watched1 = device['name'],
watched2 = device['vendor'],
watched3 = device['interface'],
watched4 = '',
extra = '',
foreignKey = device['mac'])
# "MAC", "IP", "Name", "Vendor", "Interface"
primaryId = device['mac'].lower(),
secondaryId = device['ip'],
watched1 = device['name'],
watched2 = device['vendor'],
watched3 = device['interface'],
watched4 = '',
extra = '',
foreignKey = device['mac']
)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
#===============================================================================
# ===============================================================================
# Execute scan
#===============================================================================
# ===============================================================================
def execute_scan(subnets_list, timeout, fakeMac, args):
devices_list = []
@@ -103,22 +102,21 @@ def execute_scan(subnets_list, timeout, fakeMac, args):
return devices_list
def execute_scan_on_interface (interface, timeout, args):
# Remove unsupported VLAN flags
def execute_scan_on_interface(interface, timeout, args):
# Remove unsupported VLAN flags
interface = re.sub(r'--vlan=\S+', '', interface).strip()
# Prepare command arguments
scan_args = args.split() + interface.replace('--interface=','-e ').split()
scan_args = args.split() + interface.replace('--interface=', '-e ').split()
mylog('verbose', [f'[{pluginName}] scan_args: ', scan_args])
mylog('verbose', [f'[{pluginName}] scan_args: ', scan_args])
try:
result = subprocess.check_output(scan_args, universal_newlines=True)
except subprocess.CalledProcessError as e:
error_type = type(e).__name__
result = ""
mylog('verbose', [f'[{pluginName}] ERROR: ', error_type])
mylog('verbose', [f'[{pluginName}] ERROR: ', error_type])
return result
@@ -130,28 +128,25 @@ def parse_nmap_xml(xml_output, interface, fakeMac):
nm = nmap.PortScanner()
nm.analyse_nmap_xml_scan(xml_output)
mylog('verbose', [f'[{pluginName}] Number of hosts: ', len(nm.all_hosts())])
mylog('verbose', [f'[{pluginName}] Number of hosts: ', len(nm.all_hosts())])
for host in nm.all_hosts():
hostname = nm[host].hostname() or '(unknown)'
hostname = nm[host].hostname() or '(unknown)'
ip = nm[host]['addresses']['ipv4'] if 'ipv4' in nm[host]['addresses'] else ''
mac = nm[host]['addresses']['mac'] if 'mac' in nm[host]['addresses'] else ''
mylog('verbose', [f'[{pluginName}] nm[host]: ', nm[host]])
mylog('verbose', [f'[{pluginName}] nm[host]: ', nm[host]])
vendor = ''
if nm[host]['vendor']:
mylog('verbose', [f'[{pluginName}] entry: ', nm[host]['vendor']])
mylog('verbose', [f'[{pluginName}] entry: ', nm[host]['vendor']])
for key, value in nm[host]['vendor'].items():
vendor = value
break
# Log debug information
mylog('verbose', [f"[{pluginName}] Hostname: {hostname}, IP: {ip}, MAC: {mac}, Vendor: {vendor}"])
@@ -172,24 +167,24 @@ def parse_nmap_xml(xml_output, interface, fakeMac):
# MAC or IP missing
mylog('verbose', [f"[{pluginName}] Skipping: {hostname}, IP or MAC missing, or NMAPDEV_GENERATE_MAC setting not enabled"])
except Exception as e:
mylog('verbose', [f"[{pluginName}] Error parsing nmap XML: ", str(e)])
return devices_list
def string_to_mac_hash(input_string):
# Calculate a hash using SHA-256
sha256_hash = hashlib.sha256(input_string.encode()).hexdigest()
# Take the first 12 characters of the hash and format as a MAC address
mac_hash = ':'.join(sha256_hash[i:i+2] for i in range(0, 12, 2))
mac_hash = ':'.join(sha256_hash[i:i + 2] for i in range(0, 12, 2))
return mac_hash
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import argparse
@@ -9,13 +9,13 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger, append_line_to_file
from utils.datetime_utils import timeNowDB
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger, append_line_to_file # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -32,7 +32,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
# Initialize the Plugin obj output file
plugin_objects = Plugin_Objects(RESULT_FILE)
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def main():
parser = argparse.ArgumentParser(
description='Scan ports of devices specified by IP addresses'
@@ -85,7 +86,7 @@ def main():
mylog('verbose', [f'[{pluginName}] Total number of ports found by NMAP: ', len(entries)])
for entry in entries:
for entry in entries:
plugin_objects.add_object(
primaryId = entry.mac, # MAC (Device Name)
@@ -94,14 +95,14 @@ def main():
watched2 = entry.service,
watched3 = entry.ip + ":" + entry.port,
watched4 = "",
extra = entry.extra,
foreignKey = entry.mac
extra = entry.extra,
foreignKey = entry.mac
)
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
class nmap_entry:
def __init__(self, ip, mac, time, port, state, service, name = '', extra = '', index = 0):
self.ip = ip
@@ -109,13 +110,13 @@ class nmap_entry:
self.time = time
self.port = port
self.state = state
self.service = service
self.service = service
self.extra = extra
self.index = index
self.hash = str(mac) + str(port)+ str(state)+ str(service)
self.hash = str(mac) + str(port) + str(state) + str(service)
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def parse_kv_args(raw_args):
"""
Converts ['ips=a,b,c', 'macs=x,y,z', 'timeout=5'] to a dict.
@@ -125,26 +126,28 @@ def parse_kv_args(raw_args):
for item in raw_args:
if '=' not in item:
mylog('none', [f"[{pluginName}] Scan: Invalid parameter (missing '='): {item}"])
mylog('none', [f"[{pluginName}] Scan: Invalid parameter (missing '='): {item}"])
key, value = item.split('=', 1)
if key in parsed:
mylog('none', [f"[{pluginName}] Scan: Duplicate parameter supplied: {key}"])
mylog('none', [f"[{pluginName}] Scan: Duplicate parameter supplied: {key}"])
parsed[key] = value
return parsed
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def safe_split_list(value, keyname):
"""Split comma list safely and ensure no empty items."""
items = [x.strip() for x in value.split(',') if x.strip()]
if not items:
mylog('none', [f"[{pluginName}] Scan: {keyname} list is empty or invalid"])
mylog('none', [f"[{pluginName}] Scan: {keyname} list is empty or invalid"])
return items
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
"""
run nmap scan on a list of devices
@@ -154,15 +157,12 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
# collect ports / new Nmap Entries
newEntriesTmp = []
if len(deviceIPs) > 0:
if len(deviceIPs) > 0:
devTotal = len(deviceIPs)
mylog('verbose', [f'[{pluginName}] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device'])
mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ])
mylog('verbose', [f'[{pluginName}] Scan: Nmap for max ', str(timeoutSec), 's (' + str(round(int(timeoutSec) / 60, 1)) + 'min) per device'])
mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec)) / 60, 1) , 'min)'])
devIndex = 0
for ip in deviceIPs:
@@ -171,67 +171,63 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
# prepare arguments from user supplied ones
nmapArgs = ['nmap'] + args.split() + [ip]
progress = ' (' + str(devIndex+1) + '/' + str(devTotal) + ')'
progress = ' (' + str(devIndex + 1) + '/' + str(devTotal) + ')'
try:
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(float(timeoutSec)))
output = subprocess.check_output(
nmapArgs,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(float(timeoutSec))
)
except subprocess.CalledProcessError as e:
# An error occured, handle it
mylog('none', ["[NMAP Scan] " ,e.output])
mylog('none', ["[NMAP Scan] ⚠ ERROR - Nmap Scan - check logs", progress])
mylog('none', ["[NMAP Scan] ", e.output])
mylog('none', ["[NMAP Scan] ⚠ ERROR - Nmap Scan - check logs", progress])
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', ip, progress])
mylog('verbose', [f'[{pluginName}] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', ip, progress])
if output == "": # check if the subprocess failed
mylog('minimal', [f'[{pluginName}] Nmap FAIL for ', ip, progress ,' check logs for details'])
else:
if output == "": # check if the subprocess failed
mylog('minimal', [f'[{pluginName}] Nmap FAIL for ', ip, progress, ' check logs for details'])
else:
mylog('verbose', [f'[{pluginName}] Nmap SUCCESS for ', ip, progress])
# check the last run output
# check the last run output
newLines = output.split('\n')
# regular logging
for line in newLines:
append_line_to_file (logPath + '/app_nmap.log', line +'\n')
append_line_to_file(logPath + '/app_nmap.log', line + '\n')
index = 0
startCollecting = False
duration = ""
duration = ""
newPortsPerDevice = 0
for line in newLines:
for line in newLines:
if 'Starting Nmap' in line:
if len(newLines) > index+1 and 'Note: Host seems down' in newLines[index+1]:
break # this entry is empty
if len(newLines) > index + 1 and 'Note: Host seems down' in newLines[index + 1]:
break # this entry is empty
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
startCollecting = True
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
startCollecting = False # end reached
elif startCollecting and len(line.split()) == 3:
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
startCollecting = False # end reached
elif startCollecting and len(line.split()) == 3:
newEntriesTmp.append(nmap_entry(ip, deviceMACs[devIndex], timeNowDB(), line.split()[0], line.split()[1], line.split()[2]))
newPortsPerDevice += 1
elif 'Nmap done' in line:
duration = line.split('scanned in ')[1]
mylog('verbose', [f'[{pluginName}] {newPortsPerDevice} ports found on {deviceMACs[devIndex]}'])
duration = line.split('scanned in ')[1]
mylog('verbose', [f'[{pluginName}] {newPortsPerDevice} ports found on {deviceMACs[devIndex]} after {duration}'])
index += 1
devIndex += 1
#end for loop
return newEntriesTmp
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# test script by running:
# tbc
@@ -11,14 +11,14 @@ import re
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
from database import DB
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -33,11 +33,9 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
timeout = get_setting_value('NSLOOKUP_RUN_TIMEOUT')
@@ -52,13 +50,13 @@ def main():
device_handler = DeviceInstance(db)
# Retrieve devices
if get_setting_value("REFRESH_FQDN"):
if get_setting_value("REFRESH_FQDN"):
devices = device_handler.getAll()
else:
else:
devices = device_handler.getUnknown()
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
# TEST - below is a WINDOWS host IP
# execute_name_lookup('192.168.1.121', timeout)
@@ -67,31 +65,32 @@ def main():
if domain_name != '':
plugin_objects.add_object(
# "MAC", "IP", "Server", "Name"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = dns_server,
watched2 = domain_name,
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac'])
# "MAC", "IP", "Server", "Name"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = dns_server,
watched2 = domain_name,
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac']
)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
#===============================================================================
# ===============================================================================
# Execute scan
#===============================================================================
def execute_nslookup (ip, timeout):
# ===============================================================================
def execute_nslookup(ip, timeout):
"""
Execute the NSLOOKUP command on IP.
"""
nslookup_args = ['nslookup', ip]
# Execute command
@@ -99,7 +98,13 @@ def execute_nslookup (ip, timeout):
try:
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (nslookup_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True)
output = subprocess.check_output(
nslookup_args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
)
domain_name = ''
dns_server = ''
@@ -110,8 +115,7 @@ def execute_nslookup (ip, timeout):
domain_pattern = re.compile(r'name\s*=\s*([^\s]+)', re.IGNORECASE)
server_pattern = re.compile(r'Server:\s+(.+)', re.IGNORECASE)
domain_match = domain_pattern.search(output)
domain_match = domain_pattern.search(output)
server_match = server_pattern.search(output)
if domain_match:
@@ -131,24 +135,20 @@ def execute_nslookup (ip, timeout):
else:
mylog('verbose', [f'[{pluginName}]', e.output])
# Handle other errors here
# mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
# mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
if output != "": # check if the subprocess failed
if output == "": # check if the subprocess failed
tmp = 1 # can't have empty
# mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
else:
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
return '', ''
return '', ''
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
__author__ = "ffsb"
__version__ = "0.1" # initial
__version__ = "0.2" # added logic to retry omada api call once as it seems to sometimes fail for some reasons, and error handling logic...
@@ -15,10 +15,9 @@ __version__ = "1.3" # fix detection of the default gateway IP address that woul
# try to identify and populate their connections by switch/accesspoints and ports/SSID
# try to differentiate root bridges from accessory
#
# sample code to update unbound on opnsense - for reference...
# curl -X POST -d '{"host":{"enabled":"1","hostname":"test","domain":"testdomain.com","rr":"A","mxprio":"","mx":"","server":"10.0.1.1","description":""}}' -H "Content-Type: application/json" -k -u $OPNS_KEY:$OPNS_SECRET https://$IPFW/api/unbound/settings/AddHostOverride
# curl -X POST -d '{"host":{"enabled":"1","hostname":"test","domain":"testdomain.com","rr":"A","mxprio":"","mx":"","server":"10.0.1.1","description":""}}'\
# -H "Content-Type: application/json" -k -u $OPNS_KEY:$OPNS_SECRET https://$IPFW/api/unbound/settings/AddHostOverride
#
import os
import sys
@@ -35,12 +34,12 @@ import multiprocessing
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
from pytz import timezone
import conf
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -87,8 +86,6 @@ cMAC, cIP, cNAME, cSWITCH_AP, cPORT_SSID = range(5)
OMDLOGLEVEL = "debug"
#
# translate MAC address from standard ieee model to ietf draft
# AA-BB-CC-DD-EE-FF to aa:bb:cc:dd:ee:ff
# tplink adheres to ieee, Nax adheres to ietf
@@ -142,7 +139,7 @@ def callomada(myargs):
try:
mf = io.StringIO()
with redirect_stdout(mf):
bar = omada(myargs)
omada(myargs)
omada_output = mf.getvalue()
except Exception:
mylog(
@@ -190,12 +187,12 @@ def add_uplink(
if switch_mac not in device_data_bymac:
mylog("none", [f"[{pluginName}] switch_mac '{switch_mac}' not found in device_data_bymac"])
return
# Ensure SWITCH_AP key exists in the dictionary
if SWITCH_AP not in device_data_bymac[switch_mac]:
mylog("none", [f"[{pluginName}] Missing key '{SWITCH_AP}' in device_data_bymac[{switch_mac}]"])
return
# Check if uplink should be added
if device_data_bymac[switch_mac][SWITCH_AP] in [None, "null"]:
device_data_bymac[switch_mac][SWITCH_AP] = uplink_mac
@@ -204,11 +201,10 @@ def add_uplink(
if uplink_mac not in device_data_bymac:
mylog("none", [f"[{pluginName}] uplink_mac '{uplink_mac}' not found in device_data_bymac"])
return
# Determine port to uplink
if (
device_data_bymac[switch_mac].get(TYPE) == "Switch"
and device_data_bymac[uplink_mac].get(TYPE) == "Switch"
device_data_bymac[switch_mac].get(TYPE) == "Switch" and device_data_bymac[uplink_mac].get(TYPE) == "Switch"
):
port_to_uplink = port_byswitchmac_byclientmac.get(switch_mac, {}).get(uplink_mac)
if port_to_uplink is None:
@@ -216,16 +212,14 @@ def add_uplink(
return
else:
port_to_uplink = device_data_bymac[uplink_mac].get(PORT_SSID)
# Assign port to switch_mac
device_data_bymac[switch_mac][PORT_SSID] = port_to_uplink
# Recursively add uplinks for linked devices
for link in sadevices_linksbymac.get(switch_mac, []):
if (
link in device_data_bymac
and device_data_bymac[link].get(SWITCH_AP) in [None, "null"]
and device_data_bymac[switch_mac].get(TYPE) == "Switch"
link in device_data_bymac and device_data_bymac[link].get(SWITCH_AP) in [None, "null"] and device_data_bymac[switch_mac].get(TYPE) == "Switch"
):
add_uplink(
switch_mac,
@@ -236,7 +230,6 @@ def add_uplink(
)
# ----------------------------------------------
# Main initialization
def main():
@@ -324,16 +317,16 @@ def main():
)
mymac = ieee2ietf_mac_formater(device[MAC])
plugin_objects.add_object(
primaryId=mymac, # MAC
secondaryId=device[IP], # IP
watched1=device[NAME], # NAME/HOSTNAME
watched2=ParentNetworkNode, # PARENT NETWORK NODE MAC
watched3=myport, # PORT
watched4=myssid, # SSID
primaryId=mymac, # MAC
secondaryId=device[IP], # IP
watched1=device[NAME], # NAME/HOSTNAME
watched2=ParentNetworkNode, # PARENT NETWORK NODE MAC
watched3=myport, # PORT
watched4=myssid, # SSID
extra=device[TYPE],
# omada_site, # SITENAME (cur_NetworkSite) or VENDOR (cur_Vendor) (PICK one and adjust config.json -> "column": "Extra")
foreignKey=device[MAC].lower().replace("-", ":"),
) # usually MAC
) # usually MAC
mylog(
"verbose",
@@ -369,7 +362,6 @@ def get_omada_devices_details(msadevice_data):
mswitch_dump = callomada(["-t", "myomada", "switch", "-d", mthisswitch])
else:
mswitch_detail = ""
nswitch_dump = ""
return mswitch_detail, mswitch_dump
@@ -414,7 +406,6 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
# 17:27:10 [<unique_prefix>] token: "['1A-2B-3C-4D-5E-6F', '192.168.0.217', '1A-2B-3C-4D-5E-6F', '17', '40-AE-30-A5-A7-50, 'Switch']"
# constants
sadevices_macbyname = {}
sadevices_macbymac = {}
sadevices_linksbymac = {}
port_byswitchmac_byclientmac = {}
device_data_bymac = {}
@@ -427,7 +418,7 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
def run_command(command, index):
result = subprocess.run(command, capture_output=True, text=True, shell=True)
return str(index), result.stdout.strip()
myindex, command_output= run_command(command, 2)
mylog('verbose', [f'[{pluginName}] command={command} index={myindex} results={command_output}'])
"""
@@ -556,11 +547,11 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
#
naxname = real_naxname
if real_naxname != None:
if real_naxname is not None:
if "(" in real_naxname:
# removing parenthesis and domains from the name
naxname = real_naxname.split("(")[0]
if naxname != None and "." in naxname:
if naxname is not None and "." in naxname:
naxname = naxname.split(".")[0]
if naxname in (None, "null", ""):
naxname = (

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
"""
This plugin imports devices and clients from Omada Controller using their OpenAPI.
@@ -25,7 +25,6 @@ import sys
import urllib3
import requests
import time
import datetime
import pytz
from datetime import datetime
@@ -35,11 +34,11 @@ from typing import Literal, Any, Dict
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, is_typical_router_ip, is_mac
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
import conf
from plugin_helper import Plugin_Objects, is_typical_router_ip, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = pytz.timezone(get_setting_value('TIMEZONE'))
@@ -176,7 +175,10 @@ class OmadaHelper:
# If it's not a gateway try to assign parent node MAC
if data.get("type", "") != "gateway":
parent_mac = OmadaHelper.normalize_mac(data.get("uplinkDeviceMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
# Applicable only for CLIENT
if input_type == "client":
@@ -185,15 +187,26 @@ class OmadaHelper:
# Try to assign parent node MAC and PORT/SSID to the CLIENT
if data.get("connectDevType", "") == "gateway":
parent_mac = OmadaHelper.normalize_mac(data.get("gatewayMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
entry["parent_node_port"] = data.get("port", "")
elif data.get("connectDevType", "") == "switch":
parent_mac = OmadaHelper.normalize_mac(data.get("switchMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
entry["parent_node_port"] = data.get("port", "")
elif data.get("connectDevType", "") == "ap":
parent_mac = OmadaHelper.normalize_mac(data.get("apMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
entry["parent_node_ssid"] = data.get("ssid", "")
# Add the entry to the result
@@ -253,7 +266,7 @@ class OmadaAPI:
"""Return request headers."""
headers = {"Content-type": "application/json"}
# Add access token to header if requested and available
if include_auth == True:
if include_auth is True:
if not self.access_token:
OmadaHelper.debug("No access token available for headers")
else:
@@ -368,7 +381,7 @@ class OmadaAPI:
# Failed site population
OmadaHelper.debug(f"Site population response: {response}")
return OmadaHelper.response("error", f"Site population failed - error: {response.get('response_message', 'Not provided')}")
return OmadaHelper.response("error", f"Site population failed - error: {response.get('response_message', 'Not provided')}")
def requested_sites(self) -> list:
"""Returns sites requested by user."""

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
"""
NetAlertX plugin: PIHOLEAPI
Imports devices from Pi-hole v6 API (Network endpoints) into NetAlertX plugin results.
@@ -17,12 +17,12 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
pluginName = 'PIHOLEAPI'
from plugin_helper import Plugin_Objects, is_mac
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Setup timezone & logger using standard NAX helpers
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -179,7 +179,7 @@ def get_pihole_network_devices():
resp = requests.get(PIHOLEAPI_URL + 'api/network/devices', headers=headers, params=params, verify=PIHOLEAPI_VERIFY_SSL, timeout=PIHOLEAPI_RUN_TIMEOUT)
resp.raise_for_status()
data = resp.json()
mylog('debug', [f'[{pluginName}] Pi-hole API returned data: {json.dumps(data)}'])
except Exception as e:
@@ -267,7 +267,7 @@ def main():
for entry in device_entries:
if is_mac(entry['mac']):
# Map to Plugin_Objects fields
# Map to Plugin_Objects fields
mylog('verbose', [f'[{pluginName}] found: {entry['name']}|{entry['mac']}|{entry['ip']}'])
plugin_objects.add_object(

View File

@@ -5,18 +5,18 @@ import os
import re
import base64
import json
from datetime import datetime
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.append(f"{INSTALL_PATH}/front/plugins")
sys.path.append(f'{INSTALL_PATH}/server')
sys.path.append(f'{INSTALL_PATH}/server')
from logger import mylog, Logger
from utils.datetime_utils import timeNowDB
from const import default_tz, fullConfPath
from logger import mylog # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from const import default_tz, fullConfPath # noqa: E402 [flake8 lint suppression]
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def read_config_file():
"""
retuns dict on the config file key:value pairs
@@ -25,15 +25,15 @@ def read_config_file():
filename = fullConfPath
print('[plugin_helper] reading config file')
# load the variables from .conf
with open(filename, "r") as file:
code = compile(file.read(), filename, "exec")
confDict = {} # config dictionary
confDict = {} # config dictionary
exec(code, {"__builtins__": {}}, confDict)
return confDict
return confDict
configFile = read_config_file()
@@ -42,6 +42,7 @@ if timeZoneSetting not in all_timezones:
timeZoneSetting = default_tz
timeZone = pytz.timezone(timeZoneSetting)
# -------------------------------------------------------------------
# Sanitizes plugin output
def handleEmpty(input):
@@ -54,70 +55,72 @@ def handleEmpty(input):
input = re.sub(r'[^\x00-\x7F]+', ' ', input)
input = input.replace('\n', '') # Removing new lines
return input
# -------------------------------------------------------------------
# Sanitizes string
def rmBadChars(input):
input = handleEmpty(input)
input = input.replace("'", '_') # Removing ' (single quotes)
return input
# -------------------------------------------------------------------
# check if this is a router IP
def is_typical_router_ip(ip_address):
# List of common default gateway IP addresses
common_router_ips = [
"192.168.0.1", "192.168.1.1", "192.168.1.254", "192.168.0.254",
"10.0.0.1", "10.1.1.1", "192.168.2.1", "192.168.10.1", "192.168.11.1",
"192.168.100.1", "192.168.101.1", "192.168.123.254", "192.168.223.1",
"192.168.31.1", "192.168.8.1", "192.168.254.254", "192.168.50.1",
"192.168.3.1", "192.168.4.1", "192.168.5.1", "192.168.9.1",
"192.168.15.1", "192.168.16.1", "192.168.20.1", "192.168.30.1",
"192.168.42.1", "192.168.62.1", "192.168.178.1", "192.168.1.1",
"192.168.1.254", "192.168.0.1", "192.168.0.10", "192.168.0.100",
"192.168.0.254"
]
return ip_address in common_router_ips
# List of common default gateway IP addresses
common_router_ips = [
"192.168.0.1", "192.168.1.1", "192.168.1.254", "192.168.0.254",
"10.0.0.1", "10.1.1.1", "192.168.2.1", "192.168.10.1", "192.168.11.1",
"192.168.100.1", "192.168.101.1", "192.168.123.254", "192.168.223.1",
"192.168.31.1", "192.168.8.1", "192.168.254.254", "192.168.50.1",
"192.168.3.1", "192.168.4.1", "192.168.5.1", "192.168.9.1",
"192.168.15.1", "192.168.16.1", "192.168.20.1", "192.168.30.1",
"192.168.42.1", "192.168.62.1", "192.168.178.1", "192.168.1.1",
"192.168.1.254", "192.168.0.1", "192.168.0.10", "192.168.0.100",
"192.168.0.254"
]
return ip_address in common_router_ips
# -------------------------------------------------------------------
# Check if a valid MAC address
def is_mac(input):
input_str = str(input).lower() # Convert to string and lowercase so non-string values won't raise errors
isMac = bool(re.match("[0-9a-f]{2}([-:]?)[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$", input_str))
if not isMac: # If it's not a MAC address, log the input
mylog('verbose', [f'[is_mac] not a MAC: {input_str}'])
return isMac
# -------------------------------------------------------------------
def decodeBase64(inputParamBase64):
# Printing the input list to check its content.
mylog('debug', ['[Plugins] Helper base64 input: ', input])
print('[Plugins] Helper base64 input: ')
print(input)
# Extract the base64-encoded subnet information from the first element
# The format of the element is assumed to be like 'param=b<base64-encoded-data>'.
mylog('debug', ['[Plugins] Helper base64 input: ', input])
print('[Plugins] Helper base64 input: ')
print(input)
# Extract the base64-encoded subnet information from the first element
# The format of the element is assumed to be like 'param=b<base64-encoded-data>'.
# Printing the extracted base64-encoded information.
mylog('debug', ['[Plugins] Helper base64 inputParamBase64: ', inputParamBase64])
mylog('debug', ['[Plugins] Helper base64 inputParamBase64: ', inputParamBase64])
# Decode the base64-encoded subnet information to get the actual subnet information in ASCII format.
result = base64.b64decode(inputParamBase64).decode('ascii')
# Print the decoded subnet information.
mylog('debug', ['[Plugins] Helper base64 result: ', result])
mylog('debug', ['[Plugins] Helper base64 result: ', result])
return result
# -------------------------------------------------------------------
def decode_settings_base64(encoded_str, convert_types=True):
"""
@@ -167,7 +170,7 @@ def decode_settings_base64(encoded_str, convert_types=True):
def normalize_mac(mac):
# Split the MAC address by colon (:) or hyphen (-) and convert each part to uppercase
parts = mac.upper().split(':')
# If the MAC address is split by hyphen instead of colon
if len(parts) == 1:
parts = mac.upper().split('-')
@@ -177,14 +180,15 @@ def normalize_mac(mac):
# Join the parts with colon (:)
normalized_mac = ':'.join(normalized_parts)
return normalized_mac
# -------------------------------------------------------------------
class Plugin_Object:
"""
"""
Plugin_Object class to manage one object introduced by the plugin.
An object typically is a device but could also be a website or something
An object typically is a device but could also be a website or something
else that is monitored by the plugin.
"""
@@ -222,8 +226,8 @@ class Plugin_Object:
self.helpVal4 = helpVal4 or ""
def write(self):
"""
Write the object details as a string in the
"""
Write the object details as a string in the
format required to write the result file.
"""
line = "{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}\n".format(
@@ -243,6 +247,7 @@ class Plugin_Object:
)
return line
class Plugin_Objects:
"""
Plugin_Objects is the class that manages and holds all the objects created by the plugin.
@@ -303,7 +308,3 @@ class Plugin_Objects:
def __len__(self):
return len(self.objects)

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
from __future__ import unicode_literals
import subprocess
@@ -10,12 +10,12 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty, normalize_mac
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects, handleEmpty, normalize_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,53 +28,60 @@ pluginName = "SNMPDSC"
LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
# Workflow
def main():
mylog('verbose', ['[SNMPDSC] In script '])
def main():
mylog('verbose', ['[SNMPDSC] In script '])
# init global variables
global snmpWalkCmds
parser = argparse.ArgumentParser(description='This plugin is used to discover devices via the arp table(s) of a RFC1213 compliant router or switch.')
parser.add_argument('routers', action="store", help="IP(s) of routers, separated by comma (,) if passing multiple")
parser = argparse.ArgumentParser(description='This plugin is used to discover devices via the arp table(s) of a RFC1213 compliant router or switch.')
parser.add_argument(
'routers',
action="store",
help="IP(s) of routers, separated by comma (,) if passing multiple"
)
values = parser.parse_args()
timeoutSetting = get_setting_value("SNMPDSC_RUN_TIMEOUT")
plugin_objects = Plugin_Objects(RESULT_FILE)
if values.routers:
snmpWalkCmds = values.routers.split('=')[1].replace('\'','')
if values.routers:
snmpWalkCmds = values.routers.split('=')[1].replace('\'', '')
if ',' in snmpWalkCmds:
commands = snmpWalkCmds.split(',')
else:
commands = [snmpWalkCmds]
for cmd in commands:
mylog('verbose', ['[SNMPDSC] Router snmpwalk command: ', cmd])
mylog('verbose', ['[SNMPDSC] Router snmpwalk command: ', cmd])
# split the string, remove white spaces around each item, and exclude any empty strings
snmpwalkArgs = [arg.strip() for arg in cmd.split(' ') if arg.strip()]
# Execute N probes and insert in list
probes = 1 # N probes
for _ in range(probes):
output = subprocess.check_output (snmpwalkArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSetting))
mylog('verbose', ['[SNMPDSC] output: ', output])
for _ in range(probes):
output = subprocess.check_output(
snmpwalkArgs,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeoutSetting)
)
mylog('verbose', ['[SNMPDSC] output: ', output])
lines = output.split('\n')
for line in lines:
for line in lines:
tmpSplt = line.split('"')
tmpSplt = line.split('"')
if len(tmpSplt) == 3:
ipStr = tmpSplt[0].split('.')[-4:] # Get the last 4 elements to extract the IP
macStr = tmpSplt[1].strip().split(' ') # Remove leading/trailing spaces from MAC
@@ -82,19 +89,18 @@ def main():
macAddress = ':'.join(macStr)
ipAddress = '.'.join(ipStr)
mylog('verbose', [f'[SNMPDSC] IP: {ipAddress} MAC: {macAddress}'])
mylog('verbose', [f'[SNMPDSC] IP: {ipAddress} MAC: {macAddress}'])
plugin_objects.add_object(
primaryId = handleEmpty(macAddress),
secondaryId = handleEmpty(ipAddress.strip()), # Remove leading/trailing spaces from IP
secondaryId = handleEmpty(ipAddress.strip()), # Remove leading/trailing spaces from IP
watched1 = '(unknown)',
watched2 = handleEmpty(snmpwalkArgs[6]), # router IP
extra = handleEmpty(line),
foreignKey = handleEmpty(macAddress) # Use the primary ID as the foreign key
)
else:
mylog('verbose', ['[SNMPDSC] ipStr does not seem to contain a valid IP:', ipStr])
mylog('verbose', ['[SNMPDSC] ipStr does not seem to contain a valid IP:', ipStr])
elif line.startswith('ipNetToMediaPhysAddress'):
# Format: snmpwalk -OXsq output
@@ -115,12 +121,11 @@ def main():
foreignKey = handleEmpty(macAddress)
)
mylog('verbose', ['[SNMPDSC] Entries found: ', len(plugin_objects)])
mylog('verbose', ['[SNMPDSC] Entries found: ', len(plugin_objects)])
plugin_objects.write_result_file()
# BEGIN
if __name__ == '__main__':
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -12,16 +12,16 @@ import base64
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from utils.plugin_utils import get_plugins_configs, decode_and_rename_files
from logger import mylog, Logger
from const import fullDbPath, logPath
from helper import get_setting_value
from utils.datetime_utils import timeNowDB
from utils.crypto_utils import encrypt_data
from messaging.in_app import write_notification
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.plugin_utils import get_plugins_configs, decode_and_rename_files # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import fullDbPath, logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from utils.crypto_utils import encrypt_data # noqa: E402 [flake8 lint suppression]
from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -41,21 +41,21 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
# Retrieve configuration settings
plugins_to_sync = get_setting_value('SYNC_plugins')
api_token = get_setting_value('API_TOKEN')
api_token = get_setting_value('API_TOKEN')
encryption_key = get_setting_value('SYNC_encryption_key')
hub_url = get_setting_value('SYNC_hub_url')
node_name = get_setting_value('SYNC_node_name')
send_devices = get_setting_value('SYNC_devices')
pull_nodes = get_setting_value('SYNC_nodes')
# variables to determine operation mode
is_hub = False
is_node = False
# Check if api_token set
if not api_token:
mylog('verbose', [f'[{pluginName}] ⚠ ERROR api_token not defined - quitting.'])
@@ -63,23 +63,23 @@ def main():
# check if this is a hub or a node
if len(hub_url) > 0 and (send_devices or plugins_to_sync):
is_node = True
mylog('verbose', [f'[{pluginName}] Mode 1: PUSH (NODE) - This is a NODE as SYNC_hub_url, SYNC_devices or SYNC_plugins are set'])
if len(pull_nodes) > 0:
is_node = True
mylog('verbose', [f'[{pluginName}] Mode 1: PUSH (NODE) - This is a NODE as SYNC_hub_url, SYNC_devices or SYNC_plugins are set'])
if len(pull_nodes) > 0:
is_hub = True
mylog('verbose', [f'[{pluginName}] Mode 2: PULL (HUB) - This is a HUB as SYNC_nodes is set'])
mylog('verbose', [f'[{pluginName}] Mode 2: PULL (HUB) - This is a HUB as SYNC_nodes is set'])
# Mode 1: PUSH/SEND (NODE)
# Mode 1: PUSH/SEND (NODE)
if is_node:
# PUSHING/SENDING Plugins
# PUSHING/SENDING Plugins
# Get all plugin configurations
all_plugins = get_plugins_configs(False)
mylog('verbose', [f'[{pluginName}] plugins_to_sync {plugins_to_sync}'])
for plugin in all_plugins:
pref = plugin["unique_prefix"]
pref = plugin["unique_prefix"]
index = 0
if pref in plugins_to_sync:
@@ -100,9 +100,8 @@ def main():
send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url)
else:
mylog('verbose', [f'[{pluginName}] {file_path} not found'])
mylog('verbose', [f'[{pluginName}] {file_path} not found'])
# PUSHING/SENDING devices
if send_devices:
@@ -117,27 +116,27 @@ def main():
mylog('verbose', [f'[{pluginName}] Sending file_content: "{file_content}"'])
send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url)
else:
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Devices" data'])
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Devices" data'])
else:
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Plugins" and "Devices" data'])
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Plugins" and "Devices" data'])
# Mode 2: PULL/GET (HUB)
# PULLING DEVICES
# PULLING DEVICES
file_prefix = 'last_result'
# pull data from nodes if specified
if is_hub:
for node_url in pull_nodes:
response_json = get_data(api_token, node_url)
# Extract node_name and base64 data
node_name = response_json.get('node_name', 'unknown_node')
data_base64 = response_json.get('data_base64', '')
# Decode base64 data
decoded_data = base64.b64decode(data_base64)
# Create log file name using node name
log_file_name = f'{file_prefix}.{node_name}.log'
@@ -148,18 +147,17 @@ def main():
message = f'[{pluginName}] Device data from node "{node_name}" written to {log_file_name}'
mylog('verbose', [message])
if lggr.isAbove('verbose'):
write_notification(message, 'info', timeNowDB())
write_notification(message, 'info', timeNowDB())
# Process any received data for the Device DB table (ONLY JSON)
# Create the file path
# Get all "last_result" files from the sync folder, decode, rename them, and get the list of files
files_to_process = decode_and_rename_files(LOG_PATH, file_prefix)
if len(files_to_process) > 0:
mylog('verbose', [f'[{pluginName}] Mode 3: RECEIVE (HUB) - This is a HUB as received data found'])
mylog('verbose', [f'[{pluginName}] Mode 3: RECEIVE (HUB) - This is a HUB as received data found'])
# Connect to the App database
conn = sqlite3.connect(fullDbPath)
@@ -176,24 +174,24 @@ def main():
# only process received .log files, skipping the one logging the progress of this plugin
if file_name != 'last_result.log':
mylog('verbose', [f'[{pluginName}] Processing: "{file_name}"'])
# make sure the file has the correct name (e.g last_result.encoded.Node_1.1.log) to skip any otehr plugin files
if len(file_name.split('.')) > 2:
# Extract node name from either last_result.decoded.Node_1.1.log or last_result.Node_1.log
parts = file_name.split('.')
# If decoded/encoded file, node name is at index 2; otherwise at index 1
syncHubNodeName = parts[2] if 'decoded' in file_name or 'encoded' in file_name else parts[1]
syncHubNodeName = parts[2] if 'decoded' in file_name or 'encoded' in file_name else parts[1]
file_path = f"{LOG_PATH}/{file_name}"
with open(file_path, 'r') as f:
data = json.load(f)
for device in data['data']:
if device['devMac'] not in unique_mac_addresses:
device['devSyncHubNode'] = syncHubNodeName
unique_mac_addresses.add(device['devMac'])
device_data.append(device)
device_data.append(device)
# Rename the file to "processed_" + current name
new_file_name = f"processed_{file_name}"
new_file_path = os.path.join(LOG_PATH, new_file_name)
@@ -209,7 +207,6 @@ def main():
placeholders = ', '.join('?' for _ in unique_mac_addresses)
cursor.execute(f'SELECT devMac FROM Devices WHERE devMac IN ({placeholders})', tuple(unique_mac_addresses))
existing_mac_addresses = set(row[0] for row in cursor.fetchall())
# insert devices into the last_result.log and thus CurrentScan table to manage state
for device in device_data:
@@ -228,7 +225,7 @@ def main():
# Filter out existing devices
new_devices = [device for device in device_data if device['devMac'] not in existing_mac_addresses]
# Remove 'rowid' key if it exists
# Remove 'rowid' key if it exists
for device in new_devices:
device.pop('rowid', None)
device.pop('devStatus', None)
@@ -257,7 +254,6 @@ def main():
mylog('verbose', [message])
write_notification(message, 'info', timeNowDB())
# Commit and close the connection
conn.commit()
@@ -268,6 +264,7 @@ def main():
return 0
# ------------------------------------------------------------------
# Data retrieval methods
api_endpoints = [
@@ -275,6 +272,7 @@ api_endpoints = [
"/plugins/sync/hub.php" # Legacy PHP endpoint
]
# send data to the HUB
def send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url):
"""Send encrypted data to HUB, preferring /sync endpoint and falling back to PHP version."""
@@ -345,6 +343,5 @@ def get_data(api_token, node_url):
return ""
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -10,12 +10,11 @@ from unifi_sm_api.api import SiteManagerAPI
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, decode_settings_base64
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
import conf
from plugin_helper import Plugin_Objects, decode_settings_base64 # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -35,13 +34,13 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
# Retrieve configuration settings
unifi_sites_configs = get_setting_value('UNIFIAPI_sites')
mylog('verbose', [f'[{pluginName}] number of unifi_sites_configs: {len(unifi_sites_configs)}'])
for site_config in unifi_sites_configs:
siteDict = decode_settings_base64(site_config)
@@ -50,11 +49,11 @@ def main():
mylog('none', [f'[{pluginName}] Connecting to: {siteDict["UNIFIAPI_site_name"]}'])
api = SiteManagerAPI(
api_key=siteDict["UNIFIAPI_api_key"],
version=siteDict["UNIFIAPI_api_version"],
base_url=siteDict["UNIFIAPI_base_url"],
verify_ssl=siteDict["UNIFIAPI_verify_ssl"]
)
api_key=siteDict["UNIFIAPI_api_key"],
version=siteDict["UNIFIAPI_api_version"],
base_url=siteDict["UNIFIAPI_base_url"],
verify_ssl=siteDict["UNIFIAPI_verify_ssl"]
)
sites_resp = api.get_sites()
sites = sites_resp.get("data", [])
@@ -67,18 +66,18 @@ def main():
# Process the data into native application tables
if len(device_data) > 0:
# insert devices into the lats_result.log
# insert devices into the lats_result.log
for device in device_data:
plugin_objects.add_object(
primaryId = device['dev_mac'], # mac
secondaryId = device['dev_ip'], # IP
watched1 = device['dev_name'], # name
watched2 = device['dev_type'], # device_type (AP/Switch etc)
watched3 = device['dev_connected'], # connectedAt or empty
watched4 = device['dev_parent_mac'],# parent_mac or "Internet"
extra = '',
foreignKey = device['dev_mac']
)
plugin_objects.add_object(
primaryId = device['dev_mac'], # mac
secondaryId = device['dev_ip'], # IP
watched1 = device['dev_name'], # name
watched2 = device['dev_type'], # device_type (AP/Switch etc)
watched3 = device['dev_connected'], # connectedAt or empty
watched4 = device['dev_parent_mac'], # parent_mac or "Internet"
extra = '',
foreignKey = device['dev_mac']
)
mylog('verbose', [f'[{pluginName}] New entries: "{len(device_data)}"'])
@@ -87,6 +86,7 @@ def main():
return 0
# retrieve data
def get_device_data(site, api):
device_data = []
@@ -146,8 +146,8 @@ def get_device_data(site, api):
dev_parent_mac = resolve_parent_mac(uplinkDeviceId)
device_data.append({
"dev_mac": dev_mac,
"dev_ip": dev_ip,
"dev_mac": dev_mac,
"dev_ip": dev_ip,
"dev_name": dev_name,
"dev_type": dev_type,
"dev_connected": dev_connected,

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# Inspired by https://github.com/stevehoek/Pi.Alert
from __future__ import unicode_literals
@@ -14,12 +14,12 @@ from pyunifi.controller import Controller
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, rmBadChars, is_typical_router_ip, is_mac
from logger import mylog, Logger
from helper import get_setting_value, normalize_string
import conf
from pytz import timezone
from const import logPath
from plugin_helper import Plugin_Objects, rmBadChars, is_typical_router_ip, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, normalize_string # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -37,21 +37,16 @@ LOCK_FILE = os.path.join(LOG_PATH, f'full_run.{pluginName}.lock')
urllib3.disable_warnings(InsecureRequestWarning)
# Workflow
def main():
mylog('verbose', [f'[{pluginName}] In script'])
# init global variables
global UNIFI_USERNAME, UNIFI_PASSWORD, UNIFI_HOST, UNIFI_SITES, PORT, VERIFYSSL, VERSION, FULL_IMPORT
# parse output
plugin_objects = Plugin_Objects(RESULT_FILE)
plugin_objects = Plugin_Objects(RESULT_FILE)
UNIFI_USERNAME = get_setting_value("UNFIMP_username")
UNIFI_PASSWORD = get_setting_value("UNFIMP_password")
UNIFI_HOST = get_setting_value("UNFIMP_host")
@@ -64,12 +59,11 @@ def main():
plugin_objects = get_entries(plugin_objects)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices'])
# .............................................
# .............................................
def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
global VERIFYSSL
@@ -79,27 +73,26 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
mylog('verbose', [f'[{pluginName}] sites: {UNIFI_SITES}'])
if (VERIFYSSL.upper() == "TRUE"):
VERIFYSSL = True
else:
VERIFYSSL = False
# mylog('verbose', [f'[{pluginName}] sites: {sites}'])
for site in UNIFI_SITES:
mylog('verbose', [f'[{pluginName}] site: {site}'])
c = Controller(
UNIFI_HOST,
UNIFI_USERNAME,
UNIFI_PASSWORD,
port=PORT,
version=VERSION,
ssl_verify=VERIFYSSL,
UNIFI_HOST,
UNIFI_USERNAME,
UNIFI_PASSWORD,
port=PORT,
version=VERSION,
ssl_verify=VERIFYSSL,
site_id=site)
online_macs = set()
processed_macs = []
@@ -114,7 +107,7 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
plugin_objects=plugin_objects,
device_label='client',
device_vendor="",
force_import=True # These are online clients, force import
force_import=True # These are online clients, force import
)
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Online Devices'])
@@ -154,11 +147,9 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Users'])
mylog('verbose', [f'[{pluginName}] check if Lock file needs to be modified'])
set_lock_file_value(FULL_IMPORT, lock_file_value)
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Clients overall'])
return plugin_objects
@@ -173,19 +164,19 @@ def collect_details(device_type, devices, online_macs, processed_macs, plugin_ob
name = get_name(get_unifi_val(device, 'name'), get_unifi_val(device, 'hostname'))
ipTmp = get_ip(get_unifi_val(device, 'lan_ip'), get_unifi_val(device, 'last_ip'), get_unifi_val(device, 'fixed_ip'), get_unifi_val(device, 'ip'))
macTmp = device['mac']
# continue only if valid MAC address
if is_mac(macTmp):
status = 1 if macTmp in online_macs else device.get('state', 0)
deviceType = device_type.get(device.get('type'), '')
parentMac = get_parent_mac(get_unifi_val(device, 'uplink_mac'), get_unifi_val(device, 'ap_mac'), get_unifi_val(device, 'sw_mac'))
# override parent MAC if this is a router
if parentMac == 'null' and is_typical_router_ip(ipTmp):
parentMac = 'Internet'
parentMac = 'Internet'
# Add object only if not processed
if macTmp not in processed_macs and ( status == 1 or force_import is True ):
if macTmp not in processed_macs and (status == 1 or force_import is True):
plugin_objects.add_object(
primaryId=macTmp,
secondaryId=ipTmp,
@@ -203,7 +194,8 @@ def collect_details(device_type, devices, online_macs, processed_macs, plugin_ob
processed_macs.append(macTmp)
else:
mylog('verbose', [f'[{pluginName}] Skipping, not a valid MAC address: {macTmp}'])
# -----------------------------------------------------------------------------
def get_unifi_val(obj, key, default='null'):
if isinstance(obj, dict):
@@ -212,9 +204,9 @@ def get_unifi_val(obj, key, default='null'):
for k, v in obj.items():
if isinstance(v, dict):
result = get_unifi_val(v, key, default)
if result not in ['','None', None, 'null']:
if result not in ['', 'None', None, 'null']:
return result
mylog('trace', [f'[{pluginName}] Value not found for key "{key}" in obj "{json.dumps(obj)}"'])
return default
@@ -226,6 +218,7 @@ def get_name(*names: str) -> str:
return rmBadChars(name)
return 'null'
# -----------------------------------------------------------------------------
def get_parent_mac(*macs: str) -> str:
for mac in macs:
@@ -233,6 +226,7 @@ def get_parent_mac(*macs: str) -> str:
return mac
return 'null'
# -----------------------------------------------------------------------------
def get_port(*ports: str) -> str:
for port in ports:
@@ -240,12 +234,6 @@ def get_port(*ports: str) -> str:
return port
return 'null'
# -----------------------------------------------------------------------------
def get_port(*macs: str) -> str:
for mac in macs:
if mac and mac != 'null':
return mac
return 'null'
# -----------------------------------------------------------------------------
def get_ip(*ips: str) -> str:
@@ -271,7 +259,7 @@ def set_lock_file_value(config_value: str, lock_file_value: bool) -> None:
mylog('verbose', [f'[{pluginName}] Setting lock value for "full import" to {out}'])
with open(LOCK_FILE, 'w') as lock_file:
lock_file.write(str(out))
lock_file.write(str(out))
# -----------------------------------------------------------------------------
@@ -286,15 +274,16 @@ def read_lock_file() -> bool:
# -----------------------------------------------------------------------------
def check_full_run_state(config_value: str, lock_file_value: bool) -> bool:
if config_value == 'always' or (config_value == 'once' and lock_file_value == False):
if config_value == 'always' or (config_value == 'once' and lock_file_value is False):
mylog('verbose', [f'[{pluginName}] Full import needs to be done: config_value: {config_value} and lock_file_value: {lock_file_value}'])
return True
else:
mylog('verbose', [f'[{pluginName}] Full import NOT needed: config_value: {config_value} and lock_file_value: {lock_file_value}'])
return False
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -9,13 +9,13 @@ import sqlite3
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath, applicationPath, fullDbPath
from scan.device_handling import query_MAC_vendor
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath, applicationPath, fullDbPath # noqa: E402 [flake8 lint suppression]
from scan.device_handling import query_MAC_vendor # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -25,17 +25,17 @@ Logger(get_setting_value('LOG_LEVEL'))
pluginName = 'VNDRPDT'
LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', ['[VNDRPDT] In script'])
def main():
# Get newest DB
update_vendor_database()
mylog('verbose', ['[VNDRPDT] In script'])
# Get newest DB
update_vendor_database()
# Resolve missing vendors
plugin_objects = Plugin_Objects(RESULT_FILE)
@@ -43,67 +43,67 @@ def main():
plugin_objects = update_vendors(fullDbPath, plugin_objects)
plugin_objects.write_result_file()
mylog('verbose', ['[VNDRPDT] Update complete'])
mylog('verbose', ['[VNDRPDT] Update complete'])
return 0
#===============================================================================
# ===============================================================================
# Update device vendors database
#===============================================================================
# ===============================================================================
def update_vendor_database():
# Update vendors DB (iab oui)
mylog('verbose', [' Updating vendors DB (iab & oui)'])
mylog('verbose', [' Updating vendors DB (iab & oui)'])
update_args = ['sh', applicationPath + '/services/update_vendors.sh']
# Execute command
# Execute command
try:
# try runnning a subprocess safely
update_output = subprocess.check_output (update_args)
subprocess.check_output(update_args)
except subprocess.CalledProcessError as e:
# An error occured, handle it
mylog('verbose', [' FAILED: Updating vendors DB, set LOG_LEVEL=debug for more info'])
mylog('verbose', [e.output])
mylog('verbose', [' FAILED: Updating vendors DB, set LOG_LEVEL=debug for more info'])
mylog('verbose', [e.output])
# ------------------------------------------------------------------------------
# resolve missing vendors
def update_vendors (dbPath, plugin_objects):
def update_vendors(dbPath, plugin_objects):
# Connect to the App SQLite database
conn = sqlite3.connect(dbPath)
sql = conn.cursor()
# Initialize variables
recordsToUpdate = []
ignored = 0
notFound = 0
mylog('verbose', [' Searching devices vendor'])
mylog('verbose', [' Searching devices vendor'])
# Get devices without a vendor
sql.execute ("""SELECT
devMac,
devLastIP,
devName,
devVendor
sql.execute("""SELECT
devMac,
devLastIP,
devName,
devVendor
FROM Devices
WHERE devVendor = '(unknown)'
OR devVendor = '(Unknown)'
WHERE devVendor = '(unknown)'
OR devVendor = '(Unknown)'
OR devVendor = ''
OR devVendor IS NULL
""")
devices = sql.fetchall()
conn.commit()
devices = sql.fetchall()
conn.commit()
# Close the database connection
conn.close()
conn.close()
# All devices loop
for device in devices:
# Search vendor in HW Vendors DB
vendor = query_MAC_vendor (device[0])
vendor = query_MAC_vendor(device[0])
if vendor == -1 :
notFound += 1
elif vendor == -2 :
@@ -112,27 +112,25 @@ def update_vendors (dbPath, plugin_objects):
plugin_objects.add_object(
primaryId = handleEmpty(device[0]), # MAC (Device Name)
secondaryId = handleEmpty(device[1]), # IP Address (always 0.0.0.0)
watched1 = handleEmpty(vendor),
watched1 = handleEmpty(vendor),
watched2 = handleEmpty(device[2]), # Device name
watched3 = "",
watched4 = "",
extra = "",
foreignKey = handleEmpty(device[0])
)
# Print log
extra = "",
foreignKey = handleEmpty(device[0])
)
# Print log
mylog('verbose', [" Devices Ignored : ", ignored])
mylog('verbose', [" Devices with missing vendor : ", len(devices)])
mylog('verbose', [" Vendors Not Found : ", notFound])
mylog('verbose', [" Vendors updated : ", len(plugin_objects) ])
mylog('verbose', [" Vendors updated : ", len(plugin_objects)])
return plugin_objects
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -9,13 +9,13 @@ from wakeonlan import send_magic_packet
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
from database import DB
from models.device_instance import DeviceInstance
import conf
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -34,9 +34,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('none', [f'[{pluginName}] In script'])
mylog('none', [f'[{pluginName}] In script'])
# Retrieve configuration settings
broadcast_ips = get_setting_value('WOL_broadcast_ips')
@@ -58,7 +57,7 @@ def main():
devices_to_wake = device_handler.getOffline()
elif 'down' in devices_to_wake:
devices_to_wake = device_handler.getDown()
else:
@@ -89,15 +88,16 @@ def main():
# log result
plugin_objects.write_result_file()
else:
mylog('none', [f'[{pluginName}] No devices to wake'])
mylog('none', [f'[{pluginName}] No devices to wake'])
mylog('none', [f'[{pluginName}] Script finished'])
mylog('none', [f'[{pluginName}] Script finished'])
return 0
# wake
def execute(port, ip, mac, name):
result = 'null'
try:
# Send the magic packet to wake up the device
@@ -105,7 +105,7 @@ def execute(port, ip, mac, name):
mylog('verbose', [f'[{pluginName}] Magic packet sent to {mac} ({name})'])
result = 'success'
except Exception as e:
result = str(e)
mylog('verbose', [f'[{pluginName}] Failed to send magic packet to {mac} ({name}): {e}'])
@@ -113,5 +113,6 @@ def execute(port, ip, mac, name):
# Return the data result
return result
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# Based on the work of https://github.com/leiweibau/Pi.Alert
import requests
@@ -12,12 +12,12 @@ from urllib3.exceptions import InsecureRequestWarning
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from const import logPath
from helper import get_setting_value
import conf
from pytz import timezone
from logger import mylog, Logger
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -30,15 +30,14 @@ pluginName = 'WEBMON'
LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
mylog('verbose', [f'[{pluginName}] In script'])
def main():
values = get_setting_value('WEBMON_urls_to_check')
mylog('verbose', [f'[{pluginName}] Checking URLs: {values}'])
if len(values) > 0:
plugin_objects = Plugin_Objects(RESULT_FILE)
@@ -48,12 +47,13 @@ def main():
else:
return
def check_services_health(site):
mylog('verbose', [f'[{pluginName}] Checking {site}'])
urllib3.disable_warnings(InsecureRequestWarning)
try:
resp = requests.get(site, verify=False, timeout=get_setting_value('WEBMON_RUN_TIMEOUT'), headers={"User-Agent": "NetAlertX"})
latency = resp.elapsed.total_seconds()
@@ -79,12 +79,13 @@ def check_services_health(site):
return status, latency
def service_monitoring(urls, plugin_objects):
for site in urls:
status, latency = check_services_health(site)
plugin_objects.add_object(
primaryId=site,
secondaryId='null',
secondaryId='null',
watched1=status,
watched2=latency,
watched3='null',
@@ -94,7 +95,6 @@ def service_monitoring(urls, plugin_objects):
)
return plugin_objects
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
# !/usr/bin/env python3
import os
import sys

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python3
# !/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
NetAlertX-New-Devices-Checkmk-Script
@@ -19,11 +19,12 @@ import subprocess
import json
import os
def check_new_devices():
# Get API path from environment variable, fallback to /tmp/api
api_path = os.environ.get('NETALERTX_API', '/tmp/api')
table_devices_path = f'{api_path}/table_devices.json'
try:
# Rufe die JSON-Datei aus dem Docker-Container ab
result = subprocess.run(
@@ -73,6 +74,6 @@ def check_new_devices():
)
print(f"1 NetAlertX_New_Devices - WARNING - Found {len(new_devices)} new device(s): {device_list_str}")
if __name__ == "__main__":
check_new_devices()

View File

@@ -1,8 +1,8 @@
#!/usr/bin/env python3
# !/usr/bin/env python3
import subprocess
import sys
import os
def run_sqlite_command(command):
# Use environment variable with fallback
db_path = os.path.join(
@@ -19,18 +19,19 @@ def run_sqlite_command(command):
print(f"Error executing command: {e}")
return None
def check_and_clean_device():
while True:
print("\nDevice Cleanup Tool")
print("1. Check/Clean by MAC address")
print("2. Check/Clean by IP address")
print("3. Exit")
choice = input("\nSelect option (1-3): ")
if choice == "1":
mac = input("Enter MAC address (format: xx:xx:xx:xx:xx:xx): ").lower()
# Check all tables for MAC
tables_checks = [
f"SELECT 'Events' as source, * FROM Events WHERE eve_MAC='{mac}'",
@@ -40,14 +41,14 @@ def check_and_clean_device():
f"SELECT 'AppEvents' as source, * FROM AppEvents WHERE ObjectPrimaryID LIKE '%{mac}%' OR ObjectSecondaryID LIKE '%{mac}%'",
f"SELECT 'Plugins_Objects' as source, * FROM Plugins_Objects WHERE Object_PrimaryID LIKE '%{mac}%'"
]
found = False
for check in tables_checks:
result = run_sqlite_command(check)
if result and result.strip():
found = True
print(f"\nFound entries:\n{result}")
if found:
confirm = input("\nWould you like to clean these entries? (y/n): ")
if confirm.lower() == 'y':
@@ -60,16 +61,16 @@ def check_and_clean_device():
f"DELETE FROM AppEvents WHERE ObjectPrimaryID LIKE '%{mac}%' OR ObjectSecondaryID LIKE '%{mac}%'",
f"DELETE FROM Plugins_Objects WHERE Object_PrimaryID LIKE '%{mac}%'"
]
for delete in deletes:
run_sqlite_command(delete)
print("Cleanup completed!")
else:
print("\nNo entries found for this MAC address")
elif choice == "2":
ip = input("Enter IP address (format: xxx.xxx.xxx.xxx): ")
# Check all tables for IP
tables_checks = [
f"SELECT 'Events' as source, * FROM Events WHERE eve_IP='{ip}'",
@@ -79,14 +80,14 @@ def check_and_clean_device():
f"SELECT 'AppEvents' as source, * FROM AppEvents WHERE ObjectSecondaryID LIKE '%{ip}%'",
f"SELECT 'Plugins_Objects' as source, * FROM Plugins_Objects WHERE Object_SecondaryID LIKE '%{ip}%'"
]
found = False
for check in tables_checks:
result = run_sqlite_command(check)
if result and result.strip():
found = True
print(f"\nFound entries:\n{result}")
if found:
confirm = input("\nWould you like to clean these entries? (y/n): ")
if confirm.lower() == 'y':
@@ -99,19 +100,20 @@ def check_and_clean_device():
f"DELETE FROM AppEvents WHERE ObjectSecondaryID LIKE '%{ip}%'",
f"DELETE FROM Plugins_Objects WHERE Object_SecondaryID LIKE '%{ip}%'"
]
for delete in deletes:
run_sqlite_command(delete)
print("Cleanup completed!")
else:
print("\nNo entries found for this IP address")
elif choice == "3":
print("\nExiting...")
break
else:
print("\nInvalid option, please try again")
if __name__ == "__main__":
check_and_clean_device()

View File

@@ -1,6 +1,5 @@
#!/usr/bin/env python3
import paramiko
import re
from datetime import datetime
import argparse
import sys
@@ -8,6 +7,9 @@ from pathlib import Path
import time
import logging
logger = None
def setup_logging(debug=False):
"""Configure logging based on debug flag."""
level = logging.DEBUG if debug else logging.INFO
@@ -18,6 +20,7 @@ def setup_logging(debug=False):
)
return logging.getLogger(__name__)
def parse_timestamp(date_str):
"""Convert OPNsense timestamp to Unix epoch time."""
try:
@@ -27,7 +30,7 @@ def parse_timestamp(date_str):
dt = datetime.strptime(clean_date, '%Y/%m/%d %H:%M:%S')
return int(dt.timestamp())
except Exception as e:
logger.error(f"Failed to parse timestamp: {date_str}")
logger.error(f"Failed to parse timestamp: {date_str} ({e})")
return None
@@ -39,8 +42,14 @@ def get_lease_file(hostname, username, password=None, key_filename=None, port=22
try:
logger.debug(f"Attempting to connect to {hostname}:{port} as {username}")
ssh.connect(hostname, port=port, username=username,
password=password, key_filename=key_filename)
ssh.connect(
hostname,
port=port,
username=username,
password=password,
key_filename=key_filename
)
# Get an interactive shell session
logger.debug("Opening interactive SSH channel")
@@ -75,10 +84,23 @@ def get_lease_file(hostname, username, password=None, key_filename=None, port=22
# Clean up the output by removing the command echo and shell prompts
lines = output.split('\n')
# Remove first line (command echo) and any lines containing shell prompts
cleaned_lines = [line for line in lines
if not line.strip().startswith(command.strip())
and not line.strip().endswith('> ')
and not line.strip().endswith('# ')]
# cleaned_lines = [line for line in lines
# if not line.strip().startswith(command.strip()) and not line.strip().endswith('> ') and not line.strip().endswith('# ')]
cmd = command.strip()
cleaned_lines = []
for line in lines:
stripped = line.strip()
if stripped.startswith(cmd):
continue
if stripped.endswith('> '):
continue
if stripped.endswith('# '):
continue
cleaned_lines.append(line)
cleaned_output = '\n'.join(cleaned_lines)
logger.debug(f"Final cleaned output length: {len(cleaned_output)} characters")
@@ -156,9 +178,7 @@ def parse_lease_file(lease_content):
# Filter only active leases
active_leases = [lease for lease in leases
if lease.get('state') == 'active'
and 'mac' in lease
and 'ip' in lease]
if lease.get('state') == 'active' and 'mac' in lease and 'ip' in lease]
logger.debug(f"Found {len(active_leases)} active leases out of {len(leases)} total leases")
logger.debug("Active leases:")
@@ -206,6 +226,7 @@ def convert_to_dnsmasq(leases):
return dnsmasq_lines
def main():
parser = argparse.ArgumentParser(description='Convert OPNsense DHCP leases to dnsmasq format')
parser.add_argument('--host', required=True, help='OPNsense hostname or IP')
@@ -219,6 +240,7 @@ def main():
args = parser.parse_args()
# Setup logging
global logger
logger = setup_logging(args.debug)
try:
@@ -255,5 +277,6 @@ def main():
logger.error(f"Error: {str(e)}")
sys.exit(1)
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
#
# -------------------------------------------------------------------------------
# NetAlertX v2.70 / 2021-02-01
@@ -22,9 +22,9 @@ from pathlib import Path
# Register NetAlertX modules
import conf
from const import *
from logger import mylog
from helper import filePermissions
from const import fullConfPath, sql_new_devices
from logger import mylog
from helper import filePermissions
from utils.datetime_utils import timeNowTZ
from app_state import updateState
from api import update_api
@@ -48,12 +48,12 @@ main structure of NetAlertX
Initialise All
Rename old settings
start Loop forever
initialise loop
initialise loop
(re)import config
(re)import plugin config
run plugins (once)
run frontend events
update API
update API
run plugins (scheduled)
processing scan results
run plugins (after Scan)
@@ -111,7 +111,7 @@ def main():
loop_start_time = conf.loop_start_time # TODO fix
# Handle plugins executed ONCE
if conf.plugins_once_run == False:
if conf.plugins_once_run is False:
pm.run_plugin_scripts("once")
conf.plugins_once_run = True
@@ -146,7 +146,7 @@ def main():
processScan = updateState("Check scan").processScan
mylog("debug", [f"[MAIN] processScan: {processScan}"])
if processScan == True:
if processScan is True:
mylog("debug", "[MAIN] start processing scan results")
process_scan(db)
updateState("Scan processed", None, None, None, None, False)

View File

@@ -1,3 +1,4 @@
# !/usr/bin/env python
import json
import time
import threading
@@ -145,8 +146,7 @@ class api_endpoint_class:
self.needsUpdate = True
# Only update changeDetectedWhen if it hasn't been set recently
if not self.changeDetectedWhen or current_time > (
self.changeDetectedWhen
+ datetime.timedelta(seconds=self.debounce_interval)
self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
):
self.changeDetectedWhen = (
current_time # Set timestamp for change detection
@@ -164,8 +164,7 @@ class api_endpoint_class:
self.needsUpdate = True
# Only update changeDetectedWhen if it hasn't been set recently
if not self.changeDetectedWhen or current_time > (
self.changeDetectedWhen
+ datetime.timedelta(seconds=self.debounce_interval)
self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
):
self.changeDetectedWhen = (
current_time # Initialize timestamp for new endpoint
@@ -180,17 +179,15 @@ class api_endpoint_class:
current_time = timeNowTZ()
# Debugging info to understand the issue
# mylog('debug', [f'[API] api_endpoint_class: {self.fileName} is_ad_hoc_user_event {self.is_ad_hoc_user_event} last_update_time={self.last_update_time}, debounce time={self.last_update_time + datetime.timedelta(seconds=self.debounce_interval)}.'])
# mylog('debug', [f'[API] api_endpoint_class: {self.fileName} is_ad_hoc_user_event
# {self.is_ad_hoc_user_event} last_update_time={self.last_update_time},
# debounce time={self.last_update_time + datetime.timedelta(seconds=self.debounce_interval)}.'])
# Only attempt to write if the debounce time has passed
if forceUpdate == True or (
self.needsUpdate
and (
self.changeDetectedWhen is None
or current_time
> (
self.changeDetectedWhen
+ datetime.timedelta(seconds=self.debounce_interval)
if forceUpdate is True or (
self.needsUpdate and (
self.changeDetectedWhen is None or current_time > (
self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
)
)
):

View File

@@ -9,25 +9,68 @@ from flask_cors import CORS
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
from helper import get_setting_value
from db.db_helper import get_date_from_period
from app_state import updateState
from logger import mylog # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from db.db_helper import get_date_from_period # noqa: E402 [flake8 lint suppression]
from app_state import updateState # noqa: E402 [flake8 lint suppression]
from .graphql_endpoint import devicesSchema
from .device_endpoint import get_device_data, set_device_data, delete_device, delete_device_events, reset_device_props, copy_device, update_device_column
from .devices_endpoint import get_all_devices, delete_unknown_devices, delete_all_with_empty_macs, delete_devices, export_devices, import_csv, devices_totals, devices_by_status
from .events_endpoint import delete_events, delete_events_older_than, get_events, create_event, get_events_totals
from .history_endpoint import delete_online_history
from .prometheus_endpoint import get_metric_stats
from .sessions_endpoint import get_sessions, delete_session, create_session, get_sessions_calendar, get_device_sessions, get_session_events
from .nettools_endpoint import wakeonlan, traceroute, speedtest, nslookup, nmap_scan, internet_info
from .dbquery_endpoint import read_query, write_query, update_query, delete_query
from .sync_endpoint import handle_sync_post, handle_sync_get
from .logs_endpoint import clean_log
from models.user_events_queue_instance import UserEventsQueueInstance
from messaging.in_app import write_notification, mark_all_notifications_read, delete_notifications, get_unread_notifications, delete_notification, mark_notification_as_read
from .graphql_endpoint import devicesSchema # noqa: E402 [flake8 lint suppression]
from .device_endpoint import ( # noqa: E402 [flake8 lint suppression]
get_device_data,
set_device_data,
delete_device,
delete_device_events,
reset_device_props,
copy_device,
update_device_column
)
from .devices_endpoint import ( # noqa: E402 [flake8 lint suppression]
get_all_devices,
delete_unknown_devices,
delete_all_with_empty_macs,
delete_devices,
export_devices,
import_csv,
devices_totals,
devices_by_status
)
from .events_endpoint import ( # noqa: E402 [flake8 lint suppression]
delete_events,
delete_events_older_than,
get_events,
create_event,
get_events_totals
)
from .history_endpoint import delete_online_history # noqa: E402 [flake8 lint suppression]
from .prometheus_endpoint import get_metric_stats # noqa: E402 [flake8 lint suppression]
from .sessions_endpoint import ( # noqa: E402 [flake8 lint suppression]
get_sessions,
delete_session,
create_session,
get_sessions_calendar,
get_device_sessions,
get_session_events
)
from .nettools_endpoint import ( # noqa: E402 [flake8 lint suppression]
wakeonlan,
traceroute,
speedtest,
nslookup,
nmap_scan,
internet_info
)
from .dbquery_endpoint import read_query, write_query, update_query, delete_query # noqa: E402 [flake8 lint suppression]
from .sync_endpoint import handle_sync_post, handle_sync_get # noqa: E402 [flake8 lint suppression]
from .logs_endpoint import clean_log # noqa: E402 [flake8 lint suppression]
from models.user_events_queue_instance import UserEventsQueueInstance # noqa: E402 [flake8 lint suppression]
from messaging.in_app import ( # noqa: E402 [flake8 lint suppression]
write_notification,
mark_all_notifications_read,
delete_notifications,
get_unread_notifications,
delete_notification,
mark_notification_as_read
)
# Flask application
app = Flask(__name__)
@@ -50,13 +93,14 @@ CORS(
allow_headers=["Authorization", "Content-Type"],
)
# -------------------------------------------------------------------
# Custom handler for 404 - Route not found
# -------------------------------------------------------------------
@app.errorhandler(404)
def not_found(error):
response = {
"success": False,
"success": False,
"error": "API route not found",
"message": f"The requested URL {error.description if hasattr(error, 'description') else ''} was not found on the server.",
}
@@ -200,7 +244,7 @@ def api_get_devices():
def api_delete_devices():
if not is_authorized():
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
macs = request.json.get("macs") if request.is_json else None
return delete_devices(macs)
@@ -338,7 +382,7 @@ def dbquery_read():
if not raw_sql_b64:
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
return read_query(raw_sql_b64)
@@ -350,7 +394,7 @@ def dbquery_write():
data = request.get_json() or {}
raw_sql_b64 = data.get("rawSql")
if not raw_sql_b64:
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
return write_query(raw_sql_b64)
@@ -363,7 +407,13 @@ def dbquery_update():
data = request.get_json() or {}
required = ["columnName", "id", "dbtable", "columns", "values"]
if not all(data.get(k) for k in required):
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"}), 400
return jsonify(
{
"success": False,
"message": "ERROR: Missing parameters",
"error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"
}
), 400
return update_query(
column_name=data["columnName"],
@@ -418,12 +468,13 @@ def api_clean_log():
return clean_log(file)
@app.route("/logs/add-to-execution-queue", methods=["POST"])
def api_add_to_execution_queue():
if not is_authorized():
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
queue = UserEventsQueueInstance()
# Get JSON payload safely
@@ -499,7 +550,7 @@ def api_delete_old_events(days: int):
"""
if not is_authorized():
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
return delete_events_older_than(days)
@@ -619,7 +670,7 @@ def api_write_notification():
if not content:
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing content"}), 400
write_notification(content, level)
return jsonify({"success": True})
@@ -672,7 +723,8 @@ def api_mark_notification_read(guid):
return jsonify({"success": True})
else:
return jsonify({"success": False, "message": "ERROR", "error": result.get("error")}), 500
# --------------------------
# SYNC endpoint
# --------------------------

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import base64
@@ -9,7 +9,7 @@ from flask import jsonify
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
def read_query(raw_sql_b64):

View File

@@ -1,18 +1,17 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
from datetime import datetime
from flask import jsonify, request
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from helper import is_random_mac, get_setting_value
from utils.datetime_utils import timeNowDB, format_date
from db.db_helper import row_to_json, get_date_from_period
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
from helper import is_random_mac, get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB, format_date # noqa: E402 [flake8 lint suppression]
from db.db_helper import row_to_json, get_date_from_period # noqa: E402 [flake8 lint suppression]
# --------------------------
# Device Endpoints Functions
@@ -27,10 +26,10 @@ def get_device_data(mac):
cur = conn.cursor()
now = timeNowDB()
# Special case for new device
if mac.lower() == "new":
device_data = {
"devMac": "",
"devName": "",
@@ -89,10 +88,10 @@ def get_device_data(mac):
ELSE 'Off-line'
END AS devStatus,
(SELECT COUNT(*) FROM Sessions
(SELECT COUNT(*) FROM Sessions
WHERE ses_MAC = d.devMac AND (
ses_DateTimeConnection >= {period_date_sql} OR
ses_DateTimeDisconnection >= {period_date_sql} OR
ses_DateTimeConnection >= {period_date_sql} OR
ses_DateTimeDisconnection >= {period_date_sql} OR
ses_StillConnected = 1
)) AS devSessions,

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import base64
@@ -14,16 +14,13 @@ from logger import mylog
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from db.db_helper import get_table_json, get_device_condition_by_status
from utils.datetime_utils import format_date
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
from db.db_helper import get_table_json, get_device_condition_by_status # noqa: E402 [flake8 lint suppression]
# --------------------------
# Device Endpoints Functions
# --------------------------
def get_all_devices():
"""Retrieve all devices from the database."""
conn = get_temp_db_connection()
@@ -139,7 +136,6 @@ def export_devices(export_format):
def import_csv(file_storage=None):
data = ""
skipped = []
error = None
# 1. Try JSON `content` (base64-encoded CSV)
if request.is_json and request.json.get("content"):

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -9,10 +9,10 @@ from flask import jsonify
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from helper import is_random_mac, mylog
from db.db_helper import row_to_json, get_date_from_period
from utils.datetime_utils import format_date, format_date_iso, format_event_date, ensure_datetime
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
from helper import mylog # noqa: E402 [flake8 lint suppression]
from db.db_helper import row_to_json, get_date_from_period # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import ensure_datetime # noqa: E402 [flake8 lint suppression]
# --------------------------
@@ -120,14 +120,14 @@ def get_events_totals(period: str = "7 days"):
cur = conn.cursor()
sql = f"""
SELECT
SELECT
(SELECT COUNT(*) FROM Events WHERE eve_DateTime >= {period_date_sql}) AS all_events,
(SELECT COUNT(*) FROM Sessions WHERE
(SELECT COUNT(*) FROM Sessions WHERE
ses_DateTimeConnection >= {period_date_sql}
OR ses_DateTimeDisconnection >= {period_date_sql}
OR ses_StillConnected = 1
) AS sessions,
(SELECT COUNT(*) FROM Sessions WHERE
(SELECT COUNT(*) FROM Sessions WHERE
(ses_DateTimeConnection IS NULL AND ses_DateTimeDisconnection >= {period_date_sql})
OR (ses_DateTimeDisconnection IS NULL AND ses_StillConnected = 0 AND ses_DateTimeConnection >= {period_date_sql})
) AS missing,

View File

@@ -1,5 +1,7 @@
import graphene
from graphene import ObjectType, String, Int, Boolean, List, Field, InputObjectType, Argument
from graphene import (
ObjectType, String, Int, Boolean, List, Field, InputObjectType, Argument
)
import json
import sys
import os
@@ -8,9 +10,9 @@ import os
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
from const import apiPath
from helper import (
from logger import mylog # noqa: E402 [flake8 lint suppression]
from const import apiPath # noqa: E402 [flake8 lint suppression]
from helper import ( # noqa: E402 [flake8 lint suppression]
is_random_mac,
get_number_of_children,
format_ip_long,
@@ -111,12 +113,14 @@ class SettingResult(ObjectType):
settings = List(Setting)
count = Int()
# --- LANGSTRINGS ---
# --- LANGSTRINGS ---
# In-memory cache for lang strings
_langstrings_cache = {} # caches lists per file (core JSON or plugin)
_langstrings_cache_mtime = {} # tracks last modified times
# LangString ObjectType
class LangString(ObjectType):
langCode = String()
@@ -128,6 +132,7 @@ class LangStringResult(ObjectType):
langStrings = List(LangString)
count = Int()
# Define Query Type with Pagination Support
class Query(ObjectType):
# --- DEVICES ---
@@ -184,31 +189,39 @@ class Query(ObjectType):
if (device.get("devParentRelType") not in hidden_relationships)
]
devices_data = [
device
for device in devices_data
if (
(
device["devPresentLastScan"] == 1
and "online" in allowed_statuses
)
or (device["devIsNew"] == 1 and "new" in allowed_statuses)
or (
device["devPresentLastScan"] == 0
and device["devAlertDown"]
and "down" in allowed_statuses
)
or (
device["devPresentLastScan"] == 0
and "offline" in allowed_statuses
)
and device["devIsArchived"] == 0
or (
device["devIsArchived"] == 1
and "archived" in allowed_statuses
)
filtered = []
for device in devices_data:
is_online = (
device["devPresentLastScan"] == 1 and "online" in allowed_statuses
)
]
is_new = (
device["devIsNew"] == 1 and "new" in allowed_statuses
)
is_down = (
device["devPresentLastScan"] == 0 and device["devAlertDown"] and "down" in allowed_statuses
)
is_offline = (
device["devPresentLastScan"] == 0 and "offline" in allowed_statuses
)
is_archived = (
device["devIsArchived"] == 1 and "archived" in allowed_statuses
)
# Matches if not archived and status matches OR it is archived and allowed
matches = (
(is_online or is_new or is_down or is_offline) and device["devIsArchived"] == 0
) or is_archived
if matches:
filtered.append(device)
devices_data = filtered
elif status == "connected":
devices_data = [
device
@@ -257,8 +270,7 @@ class Query(ObjectType):
devices_data = [
device
for device in devices_data
if str(device.get(filter.filterColumn, "")).lower()
== str(filter.filterValue).lower()
if str(device.get(filter.filterColumn, "")).lower() == str(filter.filterValue).lower()
]
# Search data if a search term is provided
@@ -340,7 +352,7 @@ class Query(ObjectType):
return SettingResult(settings=settings, count=len(settings))
# --- LANGSTRINGS ---
# --- LANGSTRINGS ---
langStrings = Field(
LangStringResult,
langCode=Argument(String, required=False),
@@ -437,11 +449,11 @@ class Query(ObjectType):
if en_fallback:
langStrings[i] = en_fallback[0]
mylog('trace', f'[graphql_schema] Collected {len(langStrings)} language strings '
f'(langCode={langCode}, key={langStringKey}, fallback_to_en={fallback_to_en})')
mylog('trace', f'[graphql_schema] Collected {len(langStrings)} language strings (langCode={langCode}, key={langStringKey}, fallback_to_en={fallback_to_en})')
return LangStringResult(langStrings=langStrings, count=len(langStrings))
# helps sorting inconsistent dataset mixed integers and strings
def mixed_type_sort_key(value):
if value is None or value == "":

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -8,7 +8,7 @@ from flask import jsonify
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
# --------------------------------------------------

View File

@@ -3,18 +3,18 @@ import sys
from flask import jsonify
# Register NetAlertX directories
INSTALL_PATH="/app"
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from const import logPath
from logger import mylog, Logger
from helper import get_setting_value
from utils.datetime_utils import timeNowDB
from messaging.in_app import write_notification
from const import logPath # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
# Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL'))
def clean_log(log_file):
"""
Purge the content of an allowed log file within the /app/log/ directory.
@@ -55,4 +55,3 @@ def clean_log(log_file):
mylog('none', [msg])
write_notification(msg, 'interrupt')
return jsonify({"success": False, "message": msg}), 500

View File

@@ -6,8 +6,8 @@ import os
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
from const import apiPath
from logger import mylog # noqa: E402 [flake8 lint suppression]
from const import apiPath # noqa: E402 [flake8 lint suppression]
def escape_label_value(val):

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sqlite3
@@ -9,10 +9,10 @@ from flask import jsonify
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from helper import is_random_mac, get_setting_value, mylog, format_ip_long
from db.db_helper import row_to_json, get_date_from_period
from utils.datetime_utils import format_date_iso, format_event_date, format_date_diff, parse_datetime, format_date
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, format_ip_long # noqa: E402 [flake8 lint suppression]
from db.db_helper import get_date_from_period # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import format_date_iso, format_event_date, format_date_diff, format_date # noqa: E402 [flake8 lint suppression]
# --------------------------
@@ -33,7 +33,7 @@ def create_session(
cur.execute(
"""
INSERT INTO Sessions (ses_MAC, ses_IP, ses_DateTimeConnection, ses_DateTimeDisconnection,
INSERT INTO Sessions (ses_MAC, ses_IP, ses_DateTimeConnection, ses_DateTimeDisconnection,
ses_EventTypeConnection, ses_EventTypeDisconnection)
VALUES (?, ?, ?, ?, ?, ?)
""",
@@ -105,7 +105,7 @@ def get_sessions_calendar(start_date, end_date):
-- If ses_EventTypeConnection is missing, backfill from last disconnection
-- If ses_EventTypeDisconnection is missing, forward-fill from next connection
SELECT
SELECT
SES1.ses_MAC, SES1.ses_EventTypeConnection, SES1.ses_DateTimeConnection,
SES1.ses_EventTypeDisconnection, SES1.ses_DateTimeDisconnection, SES1.ses_IP,
SES1.ses_AdditionalInfo, SES1.ses_StillConnected,
@@ -113,9 +113,9 @@ def get_sessions_calendar(start_date, end_date):
CASE
WHEN SES1.ses_EventTypeConnection = '<missing event>' THEN
IFNULL(
(SELECT MAX(SES2.ses_DateTimeDisconnection)
FROM Sessions AS SES2
WHERE SES2.ses_MAC = SES1.ses_MAC
(SELECT MAX(SES2.ses_DateTimeDisconnection)
FROM Sessions AS SES2
WHERE SES2.ses_MAC = SES1.ses_MAC
AND SES2.ses_DateTimeDisconnection < SES1.ses_DateTimeDisconnection
AND SES2.ses_DateTimeDisconnection BETWEEN Date(?) AND Date(?)
),
@@ -126,9 +126,9 @@ def get_sessions_calendar(start_date, end_date):
CASE
WHEN SES1.ses_EventTypeDisconnection = '<missing event>' THEN
(SELECT MIN(SES2.ses_DateTimeConnection)
FROM Sessions AS SES2
WHERE SES2.ses_MAC = SES1.ses_MAC
(SELECT MIN(SES2.ses_DateTimeConnection)
FROM Sessions AS SES2
WHERE SES2.ses_MAC = SES1.ses_MAC
AND SES2.ses_DateTimeConnection > SES1.ses_DateTimeConnection
AND SES2.ses_DateTimeConnection BETWEEN Date(?) AND Date(?)
)
@@ -162,8 +162,7 @@ def get_sessions_calendar(start_date, end_date):
# Determine color
if (
row["ses_EventTypeConnection"] == "<missing event>"
or row["ses_EventTypeDisconnection"] == "<missing event>"
row["ses_EventTypeConnection"] == "<missing event>" or row["ses_EventTypeDisconnection"] == "<missing event>"
):
color = "#f39c12"
elif row["ses_StillConnected"] == 1:
@@ -205,7 +204,7 @@ def get_device_sessions(mac, period):
cur = conn.cursor()
sql = f"""
SELECT
SELECT
IFNULL(ses_DateTimeConnection, ses_DateTimeDisconnection) AS ses_DateTimeOrder,
ses_EventTypeConnection,
ses_DateTimeConnection,
@@ -293,7 +292,7 @@ def get_session_events(event_type, period_date):
# Base SQLs
sql_events = f"""
SELECT
SELECT
eve_DateTime AS eve_DateTimeOrder,
devName,
devOwner,
@@ -314,7 +313,7 @@ def get_session_events(event_type, period_date):
"""
sql_sessions = """
SELECT
SELECT
IFNULL(ses_DateTimeConnection, ses_DateTimeDisconnection) AS ses_DateTimeOrder,
devName,
devOwner,
@@ -337,8 +336,7 @@ def get_session_events(event_type, period_date):
sql = sql_events
elif event_type == "sessions":
sql = (
sql_sessions
+ f"""
sql_sessions + f"""
WHERE (
ses_DateTimeConnection >= {period_date}
OR ses_DateTimeDisconnection >= {period_date}
@@ -348,8 +346,7 @@ def get_session_events(event_type, period_date):
)
elif event_type == "missing":
sql = (
sql_sessions
+ f"""
sql_sessions + f"""
WHERE (
(ses_DateTimeConnection IS NULL AND ses_DateTimeDisconnection >= {period_date})
OR (ses_DateTimeDisconnection IS NULL AND ses_StillConnected = 0 AND ses_DateTimeConnection >= {period_date})

View File

@@ -1,7 +1,7 @@
import os
import json
from const import *
from const import applicationPath, apiPath
from logger import mylog
from helper import checkNewVersion
from utils.datetime_utils import timeNowDB, timeNow
@@ -32,14 +32,17 @@ class app_state_class:
isNewVersionChecked (int): Timestamp of last version check.
"""
def __init__(self, currentState=None,
settingsSaved=None,
settingsImported=None,
showSpinner=None,
graphQLServerStarted=0,
processScan=False,
pluginsStates=None,
appVersion=None):
def __init__(
self,
currentState=None,
settingsSaved=None,
settingsImported=None,
showSpinner=None,
graphQLServerStarted=0,
processScan=False,
pluginsStates=None,
appVersion=None
):
"""
Initialize the application state, optionally overwriting previous values.
@@ -62,7 +65,7 @@ class app_state_class:
# Update self
self.lastUpdated = str(timeNowDB())
if os.path.exists(stateFile):
try:
with open(stateFile, "r") as json_file:
@@ -73,7 +76,7 @@ class app_state_class:
)
# Check if the file exists and recover previous values
if previousState != "":
if previousState != "":
self.settingsSaved = previousState.get("settingsSaved", 0)
self.settingsImported = previousState.get("settingsImported", 0)
self.processScan = previousState.get("processScan", False)
@@ -82,9 +85,9 @@ class app_state_class:
self.isNewVersionChecked = previousState.get("isNewVersionChecked", 0)
self.graphQLServerStarted = previousState.get("graphQLServerStarted", 0)
self.currentState = previousState.get("currentState", "Init")
self.pluginsStates = previousState.get("pluginsStates", {})
self.appVersion = previousState.get("appVersion", "")
else: # init first time values
self.pluginsStates = previousState.get("pluginsStates", {})
self.appVersion = previousState.get("appVersion", "")
else: # init first time values
self.settingsSaved = 0
self.settingsImported = 0
self.showSpinner = False
@@ -158,12 +161,12 @@ class app_state_class:
# -------------------------------------------------------------------------------
# method to update the state
def updateState(newState = None,
settingsSaved = None,
settingsImported = None,
showSpinner = None,
graphQLServerStarted = None,
processScan = None,
def updateState(newState = None,
settingsSaved = None,
settingsImported = None,
showSpinner = None,
graphQLServerStarted = None,
processScan = None,
pluginsStates=None,
appVersion=None):
"""
@@ -182,14 +185,16 @@ def updateState(newState = None,
Returns:
app_state_class: Updated state object.
"""
return app_state_class( newState,
settingsSaved,
settingsImported,
showSpinner,
graphQLServerStarted,
processScan,
pluginsStates,
appVersion)
return app_state_class(
newState,
settingsSaved,
settingsImported,
showSpinner,
graphQLServerStarted,
processScan,
pluginsStates,
appVersion
)
# -------------------------------------------------------------------------------

View File

@@ -52,7 +52,7 @@ default_tz = "Europe/Berlin"
# SQL queries
# ===============================================================================
sql_devices_all = """
SELECT
SELECT
rowid,
IFNULL(devMac, '') AS devMac,
IFNULL(devName, '') AS devName,
@@ -88,7 +88,7 @@ sql_devices_all = """
IFNULL(devFQDN, '') AS devFQDN,
IFNULL(devParentRelType, '') AS devParentRelType,
IFNULL(devReqNicsOnline, '') AS devReqNicsOnline,
CASE
CASE
WHEN devIsNew = 1 THEN 'New'
WHEN devPresentLastScan = 1 THEN 'On-line'
WHEN devPresentLastScan = 0 AND devAlertDown != 0 THEN 'Down'
@@ -133,7 +133,7 @@ sql_devices_tiles = """
(SELECT COUNT(*) FROM Devices) AS "all_devices",
-- My Devices count
(SELECT COUNT(*) FROM MyDevicesFilter) AS my_devices
FROM Statuses;
FROM Statuses;
"""
sql_devices_filters = """
SELECT DISTINCT 'devSite' AS columnName, devSite AS columnValue
@@ -164,9 +164,9 @@ sql_devices_filters = """
FROM Devices WHERE devSSID NOT IN ('', 'null') AND devSSID IS NOT NULL
ORDER BY columnName;
"""
sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived,
(select count(*) from Devices a where devIsNew = 1 ) as new,
(select count(*) from Devices a where devName = '(unknown)' or devName = '(name not found)' ) as unknown
sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived,
(select count(*) from Devices a where devIsNew = 1 ) as new,
(select count(*) from Devices a where devName = '(unknown)' or devName = '(name not found)' ) as unknown
from Online_History order by Scan_Date desc limit 1"""
sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0"
sql_settings = "SELECT * FROM Settings"
@@ -176,23 +176,23 @@ sql_notifications_all = "SELECT * FROM Notifications"
sql_online_history = "SELECT * FROM Online_History"
sql_plugins_events = "SELECT * FROM Plugins_Events"
sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY DateTimeChanged DESC"
sql_new_devices = """SELECT * FROM (
SELECT eve_IP as devLastIP, eve_MAC as devMac
sql_new_devices = """SELECT * FROM (
SELECT eve_IP as devLastIP, eve_MAC as devMac
FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device'
ORDER BY eve_DateTime ) t1
LEFT JOIN
( SELECT devName, devMac as devMac_t2 FROM Devices) t2
LEFT JOIN
( SELECT devName, devMac as devMac_t2 FROM Devices) t2
ON t1.devMac = t2.devMac_t2"""
sql_generateGuid = """
lower(
hex(randomblob(4)) || '-' || hex(randomblob(2)) || '-' || '4' ||
substr(hex( randomblob(2)), 2) || '-' ||
hex(randomblob(4)) || '-' || hex(randomblob(2)) || '-' || '4' ||
substr(hex( randomblob(2)), 2) || '-' ||
substr('AB89', 1 + (abs(random()) % 4) , 1) ||
substr(hex(randomblob(2)), 2) || '-' ||
substr(hex(randomblob(2)), 2) || '-' ||
hex(randomblob(6))
)
"""

View File

@@ -180,7 +180,7 @@ class DB:
# Init the AppEvent database table
AppEvent_obj(self)
# #-------------------------------------------------------------------------------
# # -------------------------------------------------------------------------------
# def get_table_as_json(self, sqlQuery):
# # mylog('debug',[ '[Database] - get_table_as_json - Query: ', sqlQuery])

View File

@@ -6,8 +6,8 @@ import os
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from helper import if_byte_then_to_str
from logger import mylog
from helper import if_byte_then_to_str # noqa: E402 [flake8 lint suppression]
from logger import mylog # noqa: E402 [flake8 lint suppression]
# -------------------------------------------------------------------------------

View File

@@ -5,8 +5,8 @@ import os
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
from messaging.in_app import write_notification
from logger import mylog # noqa: E402 [flake8 lint suppression]
from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
def ensure_column(sql, table: str, column_name: str, column_type: str) -> bool:
@@ -108,23 +108,23 @@ def ensure_views(sql) -> bool:
- sql: database cursor or connection wrapper (must support execute() and fetchall()).
"""
sql.execute(""" DROP VIEW IF EXISTS Events_Devices;""")
sql.execute(""" CREATE VIEW Events_Devices AS
SELECT *
FROM Events
sql.execute(""" CREATE VIEW Events_Devices AS
SELECT *
FROM Events
LEFT JOIN Devices ON eve_MAC = devMac;
""")
sql.execute(""" DROP VIEW IF EXISTS LatestEventsPerMAC;""")
sql.execute("""CREATE VIEW LatestEventsPerMAC AS
WITH RankedEvents AS (
SELECT
SELECT
e.*,
ROW_NUMBER() OVER (PARTITION BY e.eve_MAC ORDER BY e.eve_DateTime DESC) AS row_num
FROM Events AS e
)
SELECT
e.*,
d.*,
SELECT
e.*,
d.*,
c.*
FROM RankedEvents AS e
LEFT JOIN Devices AS d ON e.eve_MAC = d.devMac
@@ -138,14 +138,14 @@ def ensure_views(sql) -> bool:
sql.execute(""" CREATE VIEW IF NOT EXISTS LatestEventsPerMAC AS
WITH RankedEvents AS (
SELECT
SELECT
e.*,
ROW_NUMBER() OVER (PARTITION BY e.eve_MAC ORDER BY e.eve_DateTime DESC) AS row_num
FROM Events AS e
)
SELECT
e.*,
d.*,
SELECT
e.*,
d.*,
c.*
FROM RankedEvents AS e
LEFT JOIN Devices AS d ON e.eve_MAC = d.devMac
@@ -272,7 +272,7 @@ def ensure_CurrentScan(sql) -> bool:
"""
# 🐛 CurrentScan DEBUG: comment out below when debugging to keep the CurrentScan table after restarts/scan finishes
sql.execute("DROP TABLE IF EXISTS CurrentScan;")
sql.execute(""" CREATE TABLE IF NOT EXISTS CurrentScan (
sql.execute(""" CREATE TABLE IF NOT EXISTS CurrentScan (
cur_MAC STRING(50) NOT NULL COLLATE NOCASE,
cur_IP STRING(50) NOT NULL COLLATE NOCASE,
cur_Vendor STRING(250),
@@ -354,7 +354,7 @@ def ensure_plugins_tables(sql) -> bool:
# Plugin state
sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects(
"Index" INTEGER,
Plugin TEXT NOT NULL,
Plugin TEXT NOT NULL,
Object_PrimaryID TEXT NOT NULL,
Object_SecondaryID TEXT NOT NULL,
DateTimeCreated TEXT NOT NULL,

View File

@@ -18,7 +18,7 @@ from typing import Dict, List, Tuple, Any, Optional
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
from logger import mylog # noqa: E402 [flake8 lint suppression]
class SafeConditionBuilder:
@@ -494,8 +494,6 @@ class SafeConditionBuilder:
if logical_op and not self._validate_logical_operator(logical_op):
raise ValueError(f"Invalid logical operator: {logical_op}")
# Parse values from the IN clause
values = []
# Simple regex to extract quoted values
value_pattern = r"'([^']*)'"
matches = re.findall(value_pattern, values_str)

View File

@@ -7,25 +7,22 @@ import os
import re
import unicodedata
import subprocess
import pytz
import json
import requests
import base64
import hashlib
import random
import email
import string
import ipaddress
import conf
from const import *
from const import applicationPath, fullConfPath, fullDbPath, dbPath, confPath, apiPath
from logger import mylog, logResult
# Register NetAlertX directories using runtime configuration
INSTALL_PATH = applicationPath
# -------------------------------------------------------------------------------
# File system permission handling
# -------------------------------------------------------------------------------
@@ -58,12 +55,6 @@ def checkPermissionsOK():
return (confR_access, dbR_access)
# -------------------------------------------------------------------------------
def fixPermissions():
# Try fixing access rights if needed
chmodCommands = []
# -------------------------------------------------------------------------------
def initialiseFile(pathToCheck, defaultFile):
# if file not readable (missing?) try to copy over the backed-up (default) one
@@ -71,9 +62,7 @@ def initialiseFile(pathToCheck, defaultFile):
mylog(
"none",
[
"[Setup] ("
+ pathToCheck
+ ") file is not readable or missing. Trying to copy over the default one."
"[Setup] (" + pathToCheck + ") file is not readable or missing. Trying to copy over the default one."
],
)
try:
@@ -89,22 +78,14 @@ def initialiseFile(pathToCheck, defaultFile):
mylog(
"none",
[
"[Setup] ⚠ ERROR copying ("
+ defaultFile
+ ") to ("
+ pathToCheck
+ "). Make sure the app has Read & Write access to the parent directory."
"[Setup] ⚠ ERROR copying (" + defaultFile + ") to (" + pathToCheck + "). Make sure the app has Read & Write access to the parent directory."
],
)
else:
mylog(
"none",
[
"[Setup] ("
+ defaultFile
+ ") copied over successfully to ("
+ pathToCheck
+ ")."
"[Setup] (" + defaultFile + ") copied over successfully to (" + pathToCheck + ")."
],
)
@@ -116,10 +97,7 @@ def initialiseFile(pathToCheck, defaultFile):
mylog(
"none",
[
"[Setup] ⚠ ERROR copying ("
+ defaultFile
+ "). Make sure the app has Read & Write access to "
+ pathToCheck
"[Setup] ⚠ ERROR copying (" + defaultFile + "). Make sure the app has Read & Write access to " + pathToCheck
],
)
mylog("none", [e.output])
@@ -130,16 +108,13 @@ def filePermissions():
# check and initialize .conf
(confR_access, dbR_access) = checkPermissionsOK() # Initial check
if confR_access == False:
if confR_access is False:
initialiseFile(fullConfPath, f"{INSTALL_PATH}/back/app.conf")
# check and initialize .db
if dbR_access == False:
if dbR_access is False:
initialiseFile(fullDbPath, f"{INSTALL_PATH}/back/app.db")
# last attempt
fixPermissions()
# -------------------------------------------------------------------------------
# File manipulation methods
@@ -292,7 +267,7 @@ def get_setting_value(key):
value = setting_value_to_python_type(set_type, set_value)
else:
value = setting_value_to_python_type(set_type, str(set_value))
SETTINGS_SECONDARYCACHE[key] = value
return value
@@ -382,7 +357,7 @@ def setting_value_to_python_type(set_type, set_value):
if isinstance(set_value, str):
try:
value = json.loads(set_value.replace("'", "\""))
except json.JSONDecodeError as e:
mylog(
"none",
@@ -413,17 +388,12 @@ def setting_value_to_python_type(set_type, set_value):
value = set_value
elif (
dataType == "string"
and elementType == "input"
and any(opt.get("readonly") == "true" for opt in elementOptions)
dataType == "string" and elementType == "input" and any(opt.get("readonly") == "true" for opt in elementOptions)
):
value = reverseTransformers(str(set_value), transformers)
elif (
dataType == "string"
and elementType == "input"
and any(opt.get("type") == "password" for opt in elementOptions)
and "sha256" in transformers
dataType == "string" and elementType == "input" and any(opt.get("type") == "password" for opt in elementOptions) and "sha256" in transformers
):
value = hashlib.sha256(set_value.encode()).hexdigest()
@@ -602,23 +572,23 @@ def normalize_string(text):
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------------------
def is_random_mac(mac: str) -> bool:
"""Determine if a MAC address is random, respecting user-defined prefixes not to mark as random."""
# # -------------------------------------------------------------------------------------------
# def is_random_mac(mac: str) -> bool:
# """Determine if a MAC address is random, respecting user-defined prefixes not to mark as random."""
is_random = mac[1].upper() in ["2", "6", "A", "E"]
# is_random = mac[1].upper() in ["2", "6", "A", "E"]
# Get prefixes from settings
prefixes = get_setting_value("UI_NOT_RANDOM_MAC")
# # Get prefixes from settings
# prefixes = get_setting_value("UI_NOT_RANDOM_MAC")
# If detected as random, make sure it doesn't start with a prefix the user wants to exclude
if is_random:
for prefix in prefixes:
if mac.upper().startswith(prefix.upper()):
is_random = False
break
# # If detected as random, make sure it doesn't start with a prefix the user wants to exclude
# if is_random:
# for prefix in prefixes:
# if mac.upper().startswith(prefix.upper()):
# is_random = False
# break
return is_random
# return is_random
# -------------------------------------------------------------------------------------------
@@ -653,6 +623,7 @@ def extract_ip_addresses(text):
# -------------------------------------------------------------------------------
# Helper function to determine if a MAC address is random
def is_random_mac(mac):
"""Determine if a MAC address is random, respecting user-defined prefixes not to mark as random."""
# Check if second character matches "2", "6", "A", "E" (case insensitive)
is_random = mac[1].upper() in ["2", "6", "A", "E"]
@@ -773,7 +744,6 @@ def getBuildTimeStampAndVersion():
return tuple(results)
# -------------------------------------------------------------------------------
def checkNewVersion():
mylog("debug", ["[Version check] Checking if new version available"])

View File

@@ -8,9 +8,9 @@ import shutil
import re
# Register NetAlertX libraries
import conf
from const import fullConfPath, applicationPath, fullConfFolder, default_tz
from helper import getBuildTimeStampAndVersion, fixPermissions, collect_lang_strings, updateSubnets, isJsonObject, setting_value_to_python_type, get_setting_value, generate_random_string
import conf
from const import fullConfPath, fullConfFolder, default_tz
from helper import getBuildTimeStampAndVersion, fixPermissions, collect_lang_strings, updateSubnets, generate_random_string
from utils.datetime_utils import timeNowDB
from app_state import updateState
from logger import mylog
@@ -19,7 +19,6 @@ from scheduler import schedule_class
from plugin import plugin_manager, print_plugin_info
from utils.plugin_utils import get_plugins_configs, get_set_value_for_init
from messaging.in_app import write_notification
from utils.crypto_utils import get_random_bytes
# ===============================================================================
# Initialise user defined values
@@ -59,7 +58,7 @@ def ccd(
result = default
# Use existing value if already supplied, otherwise default value is used
if forceDefault == False and key in config_dir:
if forceDefault is False and key in config_dir:
result = config_dir[key]
# Single quotes might break SQL queries, replacing them
@@ -216,7 +215,7 @@ def importConfigs(pm, db, all_plugins):
[],
c_d,
"Loaded plugins",
'{"dataType":"array","elements":[{"elementType":"select","elementHasInputValue":1,"elementOptions":[{"multiple":"true","ordeable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"selectChange(this)"},{"getStringKey":"Gen_Change"}],"transformers":[]}]}',
'{"dataType":"array","elements":[{"elementType":"select","elementHasInputValue":1,"elementOptions":[{"multiple":"true","ordeable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"selectChange(this)"},{"getStringKey":"Gen_Change"}],"transformers":[]}]}', # noqa: E501
"[]",
"General",
)
@@ -234,7 +233,7 @@ def importConfigs(pm, db, all_plugins):
["192.168.1.0/24 --interface=eth1", "192.168.1.0/24 --interface=eth0"],
c_d,
"Subnets to scan",
"""{"dataType": "array","elements": [{"elementType": "input","elementOptions": [{"placeholder": "192.168.1.0/24 --interface=eth1"},{"suffix": "_in"},{"cssClasses": "col-sm-10"},{"prefillValue": "null"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": ["_in"]},{"separator": ""},{"cssClasses": "col-xs-12"},{"onClick": "addList(this, false)"},{"getStringKey": "Gen_Add"}],"transformers": []},{"elementType": "select","elementHasInputValue": 1,"elementOptions": [{"multiple": "true"},{"readonly": "true"},{"editable": "true"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeAllOptions(this)"},{"getStringKey": "Gen_Remove_All"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeFromList(this)"},{"getStringKey": "Gen_Remove_Last"}],"transformers": []}]}""",
"""{"dataType": "array","elements": [{"elementType": "input","elementOptions": [{"placeholder": "192.168.1.0/24 --interface=eth1"},{"suffix": "_in"},{"cssClasses": "col-sm-10"},{"prefillValue": "null"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": ["_in"]},{"separator": ""},{"cssClasses": "col-xs-12"},{"onClick": "addList(this, false)"},{"getStringKey": "Gen_Add"}],"transformers": []},{"elementType": "select","elementHasInputValue": 1,"elementOptions": [{"multiple": "true"},{"readonly": "true"},{"editable": "true"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeAllOptions(this)"},{"getStringKey": "Gen_Remove_All"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeFromList(this)"},{"getStringKey": "Gen_Remove_Last"}],"transformers": []}]}""", # noqa: E501 - inline JSON
"[]",
"General",
)
@@ -356,7 +355,7 @@ def importConfigs(pm, db, all_plugins):
],
c_d,
"Network device types",
'{"dataType":"array","elements":[{"elementType":"input","elementOptions":[{"placeholder":"Enter value"},{"suffix":"_in"},{"cssClasses":"col-sm-10"},{"prefillValue":"null"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":["_in"]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"addList(this,false)"},{"getStringKey":"Gen_Add"}],"transformers":[]},{"elementType":"select", "elementHasInputValue":1,"elementOptions":[{"multiple":"true"},{"readonly":"true"},{"editable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeAllOptions(this)"},{"getStringKey":"Gen_Remove_All"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeFromList(this)"},{"getStringKey":"Gen_Remove_Last"}],"transformers":[]}]}',
'{"dataType":"array","elements":[{"elementType":"input","elementOptions":[{"placeholder":"Enter value"},{"suffix":"_in"},{"cssClasses":"col-sm-10"},{"prefillValue":"null"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":["_in"]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"addList(this,false)"},{"getStringKey":"Gen_Add"}],"transformers":[]},{"elementType":"select", "elementHasInputValue":1,"elementOptions":[{"multiple":"true"},{"readonly":"true"},{"editable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeAllOptions(this)"},{"getStringKey":"Gen_Remove_All"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeFromList(this)"},{"getStringKey":"Gen_Remove_Last"}],"transformers":[]}]}', # noqa: E501 - inline JSON
"[]",
"General",
)
@@ -374,7 +373,7 @@ def importConfigs(pm, db, all_plugins):
"t_" + generate_random_string(20),
c_d,
"API token",
'{"dataType": "string","elements": [{"elementType": "input","elementHasInputValue": 1,"elementOptions": [{ "cssClasses": "col-xs-12" }],"transformers": []},{"elementType": "button","elementOptions": [{ "getStringKey": "Gen_Generate" },{ "customParams": "API_TOKEN" },{ "onClick": "generateApiToken(this, 20)" },{ "cssClasses": "col-xs-12" }],"transformers": []}]}',
'{"dataType": "string","elements": [{"elementType": "input","elementHasInputValue": 1,"elementOptions": [{ "cssClasses": "col-xs-12" }],"transformers": []},{"elementType": "button","elementOptions": [{ "getStringKey": "Gen_Generate" },{ "customParams": "API_TOKEN" },{ "onClick": "generateApiToken(this, 20)" },{ "cssClasses": "col-xs-12" }],"transformers": []}]}', # noqa: E501 - inline JSON
"[]",
"General",
)
@@ -386,7 +385,7 @@ def importConfigs(pm, db, all_plugins):
c_d,
"Language Interface",
'{"dataType":"string", "elements": [{"elementType" : "select", "elementOptions" : [] ,"transformers": []}]}',
"['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Japanese (ja_jp)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Chinese (zh_cn)']",
"['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Japanese (ja_jp)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Chinese (zh_cn)']", # noqa: E501 - inline JSON
"UI",
)
@@ -483,9 +482,7 @@ def importConfigs(pm, db, all_plugins):
# only include loaded plugins, and the ones that are enabled
if (
pref in conf.LOADED_PLUGINS
or plugin_run != "disabled"
or plugin_run is None
pref in conf.LOADED_PLUGINS or plugin_run != "disabled" or plugin_run is None
):
print_plugin_info(plugin, ["display_name", "description"])
@@ -524,9 +521,7 @@ def importConfigs(pm, db, all_plugins):
if "popupForm" in option:
for popup_entry in option["popupForm"]:
popup_pref = (
key
+ "_popupform_"
+ popup_entry.get("function", "")
key + "_popupform_" + popup_entry.get("function", "")
)
stringSqlParams = collect_lang_strings(
popup_entry, popup_pref, stringSqlParams
@@ -606,7 +601,7 @@ def importConfigs(pm, db, all_plugins):
# Loop through settings_override dictionary
for setting_name, value in settings_override.items():
# Ensure the value is treated as a string and passed directly
if isinstance(value, str) == False:
if isinstance(value, str) is False:
value = str(value)
# Log the value being passed
@@ -669,23 +664,31 @@ def importConfigs(pm, db, all_plugins):
# -----------------
# HANDLE APP was upgraded message - clear cache
# Check if app was upgraded
buildTimestamp, new_version = getBuildTimeStampAndVersion()
prev_version = conf.VERSION if conf.VERSION != '' else "unknown"
mylog('debug', [f"[Config] buildTimestamp | prev_version | .VERSION file: '{buildTimestamp}|{prev_version}|{new_version}'"])
if str(prev_version) != str(new_version):
mylog('none', ['[Config] App upgraded 🚀'])
mylog('none', ['[Config] App upgraded 🚀'])
# ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False)
ccd('VERSION', new_version , c_d, '_KEEP_', '_KEEP_', '_KEEP_', '_KEEP_', None, "_KEEP_", None, None, True)
write_notification(f'[Upgrade] : App upgraded from <code>{prev_version}</code> to <code>{new_version}</code> 🚀 Please clear the cache: <ol> <li>Click OK below</li> <li>Clear the browser cache (shift + browser refresh button)</li> <li> Clear app cache with the <i class="fa-solid fa-rotate"></i> (reload) button in the header</li><li>Go to Settings and click Save</li> </ol> Check out new features and what has changed in the <a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.', 'interrupt', timeNowDB())
write_notification(f'[Upgrade]: App upgraded from <code>{prev_version}</code> to \
<code>{new_version}</ code> 🚀 Please clear the cache: \
<ol> <li>Click OK below</li> <li>Clear the browser cache (shift + \
browser refresh button)</li> <li> Clear app cache with the <i class="fa-solid fa-rotate"></i> \
(reload) button in the header</li><li>Go to Settings and click Save</li> </ol>\
Check out new features and what has changed in the \
<a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.',
'interrupt',
timeNowDB()
)
# -----------------
# Initialization finished, update DB and API endpoints
@@ -717,13 +720,13 @@ def importConfigs(pm, db, all_plugins):
# settingsImported = None (timestamp),
# showSpinner = False (1/0),
# graphQLServerStarted = 1 (1/0))
updateState("Config imported", conf.lastImportedConfFile, conf.lastImportedConfFile, False, 1, None, None, new_version)
updateState("Config imported", conf.lastImportedConfFile, conf.lastImportedConfFile, False, 1, None, None, new_version)
msg = '[Config] Imported new settings config'
mylog('minimal', msg)
# front end app log loggging
write_notification(msg, 'info', timeNowDB())
write_notification(msg, 'info', timeNowDB())
return pm, all_plugins, True

View File

@@ -1,19 +1,14 @@
import sys
import io
import datetime
# import datetime
import threading
import queue
import logging
from zoneinfo import ZoneInfo
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# from zoneinfo import ZoneInfo
# NetAlertX imports
import conf
from const import *
from const import logPath
from utils.datetime_utils import timeNowTZ

View File

@@ -11,13 +11,9 @@ from flask import jsonify
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from const import apiPath
from logger import mylog
import conf
from const import applicationPath, logPath, apiPath, confFileName, reportTemplatesPath
from logger import mylog
from utils.datetime_utils import timeNowDB
from const import apiPath # noqa: E402 [flake8 lint suppression]
from logger import mylog # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
NOTIFICATION_API_FILE = apiPath + 'user_notifications.json'
@@ -38,7 +34,7 @@ def write_notification(content, level="alert", timestamp=None):
None
"""
if timestamp is None:
timestamp = timeNowDB()
timestamp = timeNowDB()
# Generate GUID
guid = str(uuid.uuid4())

View File

@@ -18,12 +18,12 @@ import sys
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from helper import (
from helper import ( # noqa: E402 [flake8 lint suppression]
get_setting_value,
)
from logger import mylog
from db.sql_safe_builder import create_safe_condition_builder
from utils.datetime_utils import get_timezone_offset
from logger import mylog # noqa: E402 [flake8 lint suppression]
from db.sql_safe_builder import create_safe_condition_builder # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import get_timezone_offset # noqa: E402 [flake8 lint suppression]
# ===============================================================================
# REPORTING
@@ -56,14 +56,14 @@ def get_notifications(db):
WHERE eve_PendingAlertEmail = 1 AND eve_EventType not in ('Device Down', 'Down Reconnected', 'New Device' ) AND eve_MAC IN
(
SELECT devMac FROM Devices WHERE devAlertEvents = 0
)""")
)""")
# Disable down/down reconnected notifications on devices where devAlertDown is disabled
sql.execute("""UPDATE Events SET eve_PendingAlertEmail = 0
WHERE eve_PendingAlertEmail = 1 AND eve_EventType in ('Device Down', 'Down Reconnected') AND eve_MAC IN
(
SELECT devMac FROM Devices WHERE devAlertDown = 0
)""")
)""")
sections = get_setting_value("NTFPRCS_INCLUDED_SECTIONS")
@@ -79,20 +79,32 @@ def get_notifications(db):
safe_condition, parameters = condition_builder.get_safe_condition_legacy(
new_dev_condition_setting
)
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device' {}
ORDER BY eve_DateTime""".format(safe_condition)
ORDER BY eve_DateTime""".format(safe_condition)
except Exception as e:
mylog(
"verbose",
["[Notification] Error building safe condition for new devices: ", e],
)
# Fall back to safe default (no additional conditions)
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device'
ORDER BY eve_DateTime"""
ORDER BY eve_DateTime"""
parameters = {}
mylog("debug", ["[Notification] new_devices SQL query: ", sqlQuery])
@@ -114,17 +126,17 @@ def get_notifications(db):
minutes = int(get_setting_value("NTFPRCS_alert_down_time") or 0)
tz_offset = get_timezone_offset()
sqlQuery = f"""
SELECT devName, eve_MAC, devVendor, eve_IP, eve_DateTime, eve_EventType
SELECT devName, eve_MAC, devVendor, eve_IP, eve_DateTime, eve_EventType
FROM Events_Devices AS down_events
WHERE eve_PendingAlertEmail = 1
AND down_events.eve_EventType = 'Device Down'
WHERE eve_PendingAlertEmail = 1
AND down_events.eve_EventType = 'Device Down'
AND eve_DateTime < datetime('now', '-{minutes} minutes', '{tz_offset}')
AND NOT EXISTS (
SELECT 1
FROM Events AS connected_events
WHERE connected_events.eve_MAC = down_events.eve_MAC
AND connected_events.eve_EventType = 'Connected'
AND connected_events.eve_DateTime > down_events.eve_DateTime
AND connected_events.eve_DateTime > down_events.eve_DateTime
)
ORDER BY down_events.eve_DateTime;
"""
@@ -181,20 +193,32 @@ def get_notifications(db):
safe_condition, parameters = condition_builder.get_safe_condition_legacy(
event_condition_setting
)
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed') {}
ORDER BY eve_DateTime""".format(safe_condition)
ORDER BY eve_DateTime""".format(safe_condition)
except Exception as e:
mylog(
"verbose",
["[Notification] Error building safe condition for events: ", e],
)
# Fall back to safe default (no additional conditions)
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed')
ORDER BY eve_DateTime"""
ORDER BY eve_DateTime"""
parameters = {}
mylog("debug", ["[Notification] events SQL query: ", sqlQuery])
@@ -208,7 +232,17 @@ def get_notifications(db):
if "plugins" in sections:
# Compose Plugins Section
sqlQuery = """SELECT Plugin, Object_PrimaryId, Object_SecondaryId, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status from Plugins_Events"""
sqlQuery = """SELECT
Plugin,
Object_PrimaryId,
Object_SecondaryId,
DateTimeChanged,
Watched_Value1,
Watched_Value2,
Watched_Value3,
Watched_Value4,
Status
from Plugins_Events"""
# Get the events as JSON
json_obj = db.get_table_as_json(sqlQuery)

View File

@@ -1,13 +1,12 @@
import json
import uuid
import socket
import subprocess
from yattag import indent
from json2table import convert
# Register NetAlertX modules
import conf
from const import applicationPath, logPath, apiPath, reportTemplatesPath
from const import logPath, apiPath, reportTemplatesPath
from logger import mylog, Logger
from helper import (
generate_mac_links,
@@ -62,11 +61,7 @@ class NotificationInstance:
# Check if nothing to report, end
if (
JSON["new_devices"] == []
and JSON["down_devices"] == []
and JSON["events"] == []
and JSON["plugins"] == []
and JSON["down_reconnected"] == []
JSON["new_devices"] == [] and JSON["down_devices"] == [] and JSON["events"] == [] and JSON["plugins"] == [] and JSON["down_reconnected"] == []
):
self.HasNotifications = False
else:
@@ -88,8 +83,6 @@ class NotificationInstance:
# else:
# mylog('debug', ['[Notification] notiStruc:', json.dumps(notiStruc.__dict__, indent=4)])
Text = ""
HTML = ""
template_file_path = reportTemplatesPath + "report_template.html"
# Open text Template
@@ -274,7 +267,7 @@ class NotificationInstance:
# Clear the Pending Email flag from all events and devices
def clearPendingEmailFlag(self):
# Clean Pending Alert Events
# Clean Pending Alert Events
self.db.sql.execute("""
UPDATE Devices SET devLastNotification = ?
WHERE devMac IN (

View File

@@ -100,7 +100,7 @@ class UserEventsQueueInstance:
if not action or not isinstance(action, str):
msg = "[UserEventsQueueInstance] Invalid or missing action"
mylog('none', [msg])
return False, msg
try:
@@ -109,15 +109,11 @@ class UserEventsQueueInstance:
msg = f'[UserEventsQueueInstance] Action "{action}" added to the execution queue.'
mylog('minimal', [msg])
return True, msg
except Exception as e:
msg = f"[UserEventsQueueInstance] ERROR Failed to write to {self.log_file}: {e}"
mylog('none', [msg])
return False, msg

View File

@@ -9,12 +9,21 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
# Register NetAlertX modules
import conf
from const import pluginsPath, logPath, applicationPath, reportTemplatesPath
from logger import mylog, Logger
from helper import get_file_content, write_file, get_setting, get_setting_value
from logger import mylog, Logger
from helper import get_file_content, get_setting, get_setting_value
from utils.datetime_utils import timeNowTZ, timeNowDB
from app_state import updateState
from api import update_api
from utils.plugin_utils import logEventStatusCounts, get_plugin_string, get_plugin_setting_obj, print_plugin_info, list_to_csv, combine_plugin_objects, resolve_wildcards_arr, handle_empty, custom_plugin_decoder, decode_and_rename_files
from utils.plugin_utils import (
logEventStatusCounts,
get_plugin_setting_obj,
print_plugin_info,
list_to_csv,
combine_plugin_objects,
resolve_wildcards_arr,
handle_empty,
decode_and_rename_files
)
from models.notification_instance import NotificationInstance
from messaging.in_app import write_notification
from models.user_events_queue_instance import UserEventsQueueInstance
@@ -57,13 +66,7 @@ class plugin_manager:
# Header
updateState("Run: Plugins")
mylog(
"debug",
[
"[Plugins] Check if any plugins need to be executed on run type: ",
runType,
],
)
mylog("debug", f"[Plugins] Check if any plugins need to be executed on run type: {runType}")
for plugin in self.all_plugins:
shouldRun = False
@@ -72,7 +75,7 @@ class plugin_manager:
# 🔹 Lookup RUN setting from cache instead of calling get_plugin_setting_obj each time
run_setting = self._cache["settings"].get(prefix, {}).get("RUN")
if run_setting != None and run_setting["value"] == runType:
if run_setting is not None and run_setting["value"] == runType:
if runType != "schedule":
shouldRun = True
elif runType == "schedule":
@@ -91,10 +94,7 @@ class plugin_manager:
# 🔹 CMD also retrieved from cache
cmd_setting = self._cache["settings"].get(prefix, {}).get("CMD")
mylog(
"debug",
["[Plugins] CMD: ", cmd_setting["value"] if cmd_setting else None],
)
mylog("debug", f"[Plugins] CMD: {cmd_setting["value"] if cmd_setting else None}")
execute_plugin(self.db, self.all_plugins, plugin)
@@ -130,13 +130,7 @@ class plugin_manager:
mylog("debug", ["[check_and_run_user_event] User Execution Queue is empty"])
return # Exit early if the log file is empty
else:
mylog(
"debug",
[
"[check_and_run_user_event] Process User Execution Queue:"
+ ", ".join(map(str, lines))
],
)
mylog("debug", "[check_and_run_user_event] Process User Execution Queue:" + ", ".join(map(str, lines)))
for line in lines:
# Extract event name and parameters from the log line
@@ -160,15 +154,7 @@ class plugin_manager:
update_api(self.db, self.all_plugins, False, param.split(","), True)
else:
mylog(
"minimal",
[
"[check_and_run_user_event] WARNING: Unhandled event in execution queue: ",
event,
" | ",
param,
],
)
mylog("minimal", f"[check_and_run_user_event] WARNING: Unhandled event in execution queue: {event} | {param}")
execution_log.finalize_event(
event
) # Finalize unknown events to remove them
@@ -183,9 +169,9 @@ class plugin_manager:
# -------------------------------------------------------------------------------
def handle_run(self, runType):
mylog('minimal', ['[', timeNowDB(), '] START Run: ', runType])
# run the plugin
for plugin in self.all_plugins:
if plugin["unique_prefix"] == runType:
@@ -201,7 +187,7 @@ class plugin_manager:
pluginsStates={pluginName: current_plugin_state.get(pluginName, {})}
)
mylog('minimal', ['[', timeNowDB(), '] END Run: ', runType])
mylog('minimal', ['[', timeNowDB(), '] END Run: ', runType])
return
@@ -210,7 +196,7 @@ class plugin_manager:
mylog("minimal", ["[", timeNowTZ(), "] [Test] START Test: ", runType])
mylog('minimal', ['[', timeNowDB(), '] [Test] START Test: ', runType])
# Prepare test samples
sample_json = json.loads(
get_file_content(reportTemplatesPath + "webhook_json_sample.json")
@@ -312,7 +298,7 @@ class plugin_param:
if param["type"] == "setting":
inputValue = get_setting(param["value"])
if inputValue != None:
if inputValue is not None:
setVal = inputValue["setValue"] # setting value
setTyp = inputValue["setType"] # setting type
@@ -337,9 +323,7 @@ class plugin_param:
resolved = list_to_csv(setVal)
else:
mylog(
"none", ["[Plugins] ⚠ ERROR: Parameter probably not converted."]
)
mylog("none", "[Plugins] ⚠ ERROR: Parameter probably not converted.")
return json.dumps(setVal)
# Get SQL result
@@ -390,15 +374,10 @@ def run_plugin(command, set_RUN_TIMEOUT, plugin):
)
except subprocess.CalledProcessError as e:
mylog("none", [e.output])
mylog("none", ["[Plugins] ⚠ ERROR - enable LOG_LEVEL=debug and check logs"])
mylog("none", "[Plugins] ⚠ ERROR - enable LOG_LEVEL=debug and check logs")
return None
except subprocess.TimeoutExpired:
mylog(
"none",
[
f"[Plugins] ⚠ ERROR - TIMEOUT - the plugin {plugin['unique_prefix']} forcefully terminated as timeout reached. Increase TIMEOUT setting and scan interval."
],
)
mylog("none", f"[Plugins] ⚠ ERROR - TIMEOUT - the plugin {plugin['unique_prefix']} forcefully terminated as timeout reached. Increase TIMEOUT setting and scan interval.")
return None
@@ -411,11 +390,11 @@ def execute_plugin(db, all_plugins, plugin):
set = get_plugin_setting_obj(plugin, "CMD")
# handle missing "function":"CMD" setting
if set == None:
if set is None:
return
set_CMD = set["value"]
# Replace hardcoded /app paths with environment-aware path
if "/app/front/plugins" in set_CMD:
set_CMD = set_CMD.replace("/app/front/plugins", str(pluginsPath))
@@ -441,13 +420,8 @@ def execute_plugin(db, all_plugins, plugin):
for param in plugin["params"]:
tempParam = plugin_param(param, plugin, db)
if tempParam.resolved == None:
mylog(
"none",
[
f'[Plugins] The parameter "name":"{tempParam.name}" for "value": {tempParam.value} was resolved as None'
],
)
if tempParam.resolved is None:
mylog("none", f'[Plugins] The parameter "name":"{tempParam.name}" for "value": {tempParam.value} was resolved as None')
else:
# params.append( [param["name"], resolved] )
@@ -456,14 +430,9 @@ def execute_plugin(db, all_plugins, plugin):
if tempParam.multiplyTimeout:
set_RUN_TIMEOUT = set_RUN_TIMEOUT * tempParam.paramValuesCount
mylog(
"debug",
[
f'[Plugins] The parameter "name":"{param["name"]}" will multiply the timeout {tempParam.paramValuesCount} times. Total timeout: {set_RUN_TIMEOUT}s'
],
)
mylog("debug", f'[Plugins] The parameter "name":"{param["name"]}" will multiply timeout {tempParam.paramValuesCount}x. Total timeout: {set_RUN_TIMEOUT}s')
mylog("debug", ["[Plugins] Timeout: ", set_RUN_TIMEOUT])
mylog("debug", f"[Plugins] Timeout: {set_RUN_TIMEOUT}")
# build SQL query parameters to insert into the DB
sqlParams = []
@@ -475,8 +444,8 @@ def execute_plugin(db, all_plugins, plugin):
command = resolve_wildcards_arr(set_CMD.split(), params)
# Execute command
mylog("verbose", ["[Plugins] Executing: ", set_CMD])
mylog("debug", ["[Plugins] Resolved : ", command])
mylog("verbose", f"[Plugins] Executing: {set_CMD}")
mylog("debug", f"[Plugins] Resolved : {command}")
# Using ThreadPoolExecutor to handle concurrent subprocesses
with ThreadPoolExecutor(max_workers=5) as executor:
@@ -521,12 +490,7 @@ def execute_plugin(db, all_plugins, plugin):
columns = line.split("|")
# There have to be 9 or 13 columns
if len(columns) not in [9, 13]:
mylog(
"none",
[
f"[Plugins] Wrong number of input values, must be 9 or 13, got {len(columns)} from: {line}"
],
)
mylog("none", f"[Plugins] Wrong number of input values, must be 9 or 13, got {len(columns)} from: {line}")
continue # Skip lines with incorrect number of columns
# Common part of the SQL parameters
@@ -581,9 +545,7 @@ def execute_plugin(db, all_plugins, plugin):
# keep current instance log file, delete all from other nodes
if filename != "last_result.log" and os.path.exists(full_path):
os.remove(full_path) # DEBUG:TODO uncomment 🐛
mylog(
"verbose", [f"[Plugins] Processed and deleted file: {full_path} "]
)
mylog("verbose", f"[Plugins] Processed and deleted file: {full_path} ")
# app-db-query
if plugin["data_source"] == "app-db-query":
@@ -591,7 +553,7 @@ def execute_plugin(db, all_plugins, plugin):
q = set_CMD.replace("{s-quote}", "'")
# Execute command
mylog("verbose", ["[Plugins] Executing: ", q])
mylog("verbose", f"[Plugins] Executing: {q}")
# set_CMD should contain a SQL query
arr = db.get_sql_array(q)
@@ -650,7 +612,7 @@ def execute_plugin(db, all_plugins, plugin):
# Append the final parameters to sqlParams
sqlParams.append(tuple(base_params))
else:
mylog("none", ["[Plugins] Skipped invalid sql result"])
mylog("none", "[Plugins] Skipped invalid sql result")
# app-db-query
if plugin["data_source"] == "sqlite-db-query":
@@ -659,19 +621,14 @@ def execute_plugin(db, all_plugins, plugin):
q = set_CMD.replace("{s-quote}", "'")
# Execute command
mylog("verbose", ["[Plugins] Executing: ", q])
mylog("verbose", f"[Plugins] Executing: {q}")
# ------- necessary settings check --------
set = get_plugin_setting_obj(plugin, "DB_PATH")
# handle missing "function":"DB_PATH" setting
if set == None:
mylog(
"none",
[
"[Plugins] ⚠ ERROR: DB_PATH setting for plugin type sqlite-db-query missing."
],
)
if set is None:
mylog("none", "[Plugins] ⚠ ERROR: DB_PATH setting for plugin type sqlite-db-query missing.")
return
fullSqlitePath = set["value"]
@@ -679,25 +636,14 @@ def execute_plugin(db, all_plugins, plugin):
# try attaching the sqlite DB
try:
sql.execute(
"ATTACH DATABASE '"
+ fullSqlitePath
+ "' AS EXTERNAL_"
+ plugin["unique_prefix"]
"ATTACH DATABASE '" + fullSqlitePath + "' AS EXTERNAL_" + plugin["unique_prefix"]
)
arr = db.get_sql_array(q)
sql.execute("DETACH DATABASE EXTERNAL_" + plugin["unique_prefix"])
except sqlite3.Error as e:
mylog(
"none",
[
f"[Plugins] ⚠ ERROR: DB_PATH setting ({fullSqlitePath}) for plugin {plugin['unique_prefix']}. Did you mount it correctly?"
],
)
mylog(
"none",
["[Plugins] ⚠ ERROR: ATTACH DATABASE failed with SQL ERROR: ", e],
)
mylog("none", f"[Plugins] ⚠ ERROR: DB_PATH setting ({fullSqlitePath}) for plugin {plugin['unique_prefix']}. Did you mount it correctly?")
mylog("none", f"[Plugins] ⚠ ERROR: ATTACH DATABASE failed with SQL ERROR: {e}")
return
for row in arr:
@@ -748,24 +694,14 @@ def execute_plugin(db, all_plugins, plugin):
# Append the final parameters to sqlParams
sqlParams.append(tuple(base_params))
else:
mylog("none", ["[Plugins] Skipped invalid sql result"])
mylog("none", "[Plugins] Skipped invalid sql result")
# check if the subprocess / SQL query failed / there was no valid output
if len(sqlParams) == 0:
mylog(
"none",
[
f'[Plugins] No output received from the plugin "{plugin["unique_prefix"]}"'
],
)
mylog("none", f'[Plugins] No output received from the plugin "{plugin["unique_prefix"]}"')
else:
mylog(
"verbose",
[
f"[Plugins] SUCCESS for {plugin['unique_prefix']} received {len(sqlParams)} entries"
],
)
mylog("verbose", f"[Plugins] SUCCESS for {plugin['unique_prefix']} received {len(sqlParams)} entries")
# mylog('debug', ['[Plugins] sqlParam entries: ', sqlParams])
# create objects
@@ -782,12 +718,7 @@ def execute_plugin(db, all_plugins, plugin):
# check if we need to update devices api endpoint as well to prevent long user waits on Loading...
userUpdatedDevices = UserEventsQueueInstance().has_update_devices()
mylog(
"verbose",
[
f"[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}"
],
)
mylog("verbose", f"[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}")
if userUpdatedDevices:
endpoints += ["devices"]
@@ -807,7 +738,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
pluginPref = plugin["unique_prefix"]
mylog("verbose", ["[Plugins] Processing : ", pluginPref])
mylog("verbose", f"[Plugins] Processing : {pluginPref}")
try:
# Begin a transaction
@@ -827,20 +758,8 @@ def process_plugin_events(db, plugin, plugEventsArr):
for eve in plugEventsArr:
pluginEvents.append(plugin_object_class(plugin, eve))
mylog(
"debug",
[
"[Plugins] Existing objects from Plugins_Objects: ",
len(pluginObjects),
],
)
mylog(
"debug",
[
"[Plugins] Logged events from the plugin run : ",
len(pluginEvents),
],
)
mylog("debug", f"[Plugins] Existing objects from Plugins_Objects: {len(pluginObjects)}")
mylog("debug", f"[Plugins] Logged events from the plugin run : {len(pluginEvents)}")
# Loop thru all current events and update the status to "exists" if the event matches an existing object
index = 0
@@ -857,8 +776,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
if tmpObjFromEvent.status == "exists":
# compare hash of the changed watched columns for uniqueness - make sure you compare the values with the same idsHash before checking watchedHash
if any(
x.idsHash == tmpObjFromEvent.idsHash
and x.watchedHash != tmpObjFromEvent.watchedHash
x.idsHash == tmpObjFromEvent.idsHash and x.watchedHash != tmpObjFromEvent.watchedHash
for x in pluginObjects
):
pluginEvents[index].status = "watched-changed"
@@ -879,7 +797,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
# if wasn't missing before, mark as changed
if tmpObj.status != "missing-in-last-scan":
tmpObj.changed = timeNowDB()
tmpObj.status = "missing-in-last-scan"
tmpObj.status = "missing-in-last-scan"
# mylog('debug', [f'[Plugins] Missing from last scan (PrimaryID | SecondaryID): {tmpObj.primaryId} | {tmpObj.secondaryId}'])
# Merge existing plugin objects with newly discovered ones and update existing ones with new values
@@ -955,25 +873,17 @@ def process_plugin_events(db, plugin, plugEventsArr):
# combine all DB insert and update events into one for history
history_to_insert.append(values)
mylog("debug", ["[Plugins] pluginEvents count: ", len(pluginEvents)])
mylog("debug", ["[Plugins] pluginObjects count: ", len(pluginObjects)])
mylog("debug", f"[Plugins] pluginEvents count: {len(pluginEvents)}")
mylog("debug", f"[Plugins] pluginObjects count: {len(pluginObjects)}")
mylog(
"debug", ["[Plugins] events_to_insert count: ", len(events_to_insert)]
)
mylog(
"debug", ["[Plugins] history_to_insert count: ", len(history_to_insert)]
)
mylog(
"debug", ["[Plugins] objects_to_insert count: ", len(objects_to_insert)]
)
mylog(
"debug", ["[Plugins] objects_to_update count: ", len(objects_to_update)]
)
mylog("debug", f"[Plugins] events_to_insert count: {len(events_to_insert)}")
mylog("debug", f"[Plugins] history_to_insert count: {len(history_to_insert)}")
mylog("debug", f"[Plugins] objects_to_insert count: {len(objects_to_insert)}")
mylog("debug", f"[Plugins] objects_to_update count: {len(objects_to_update)}")
mylog("trace", ["[Plugins] objects_to_update: ", objects_to_update])
mylog("trace", ["[Plugins] events_to_insert: ", events_to_insert])
mylog("trace", ["[Plugins] history_to_insert: ", history_to_insert])
mylog("trace", f"[Plugins] objects_to_update: {objects_to_update}")
mylog("trace", f"[Plugins] events_to_insert: {events_to_insert}")
mylog("trace", f"[Plugins] history_to_insert: {history_to_insert}")
logEventStatusCounts("pluginEvents", pluginEvents)
logEventStatusCounts("pluginObjects", pluginObjects)
@@ -982,12 +892,12 @@ def process_plugin_events(db, plugin, plugEventsArr):
if objects_to_insert:
sql.executemany(
"""
INSERT INTO Plugins_Objects
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
INSERT INTO Plugins_Objects
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
"ObjectGUID")
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
"ObjectGUID")
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
objects_to_insert,
@@ -998,10 +908,10 @@ def process_plugin_events(db, plugin, plugEventsArr):
sql.executemany(
"""
UPDATE Plugins_Objects
SET "Plugin" = ?, "Object_PrimaryID" = ?, "Object_SecondaryID" = ?, "DateTimeCreated" = ?,
"DateTimeChanged" = ?, "Watched_Value1" = ?, "Watched_Value2" = ?, "Watched_Value3" = ?,
"Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ?,
"HelpVal1" = ?, "HelpVal2" = ?, "HelpVal3" = ?, "HelpVal4" = ?,
SET "Plugin" = ?, "Object_PrimaryID" = ?, "Object_SecondaryID" = ?, "DateTimeCreated" = ?,
"DateTimeChanged" = ?, "Watched_Value1" = ?, "Watched_Value2" = ?, "Watched_Value3" = ?,
"Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ?,
"HelpVal1" = ?, "HelpVal2" = ?, "HelpVal3" = ?, "HelpVal4" = ?,
"ObjectGUID" = ?
WHERE "Index" = ?
""",
@@ -1012,12 +922,12 @@ def process_plugin_events(db, plugin, plugEventsArr):
if events_to_insert:
sql.executemany(
"""
INSERT INTO Plugins_Events
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
INSERT INTO Plugins_Events
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
"ObjectGUID")
"ObjectGUID")
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
events_to_insert,
@@ -1027,12 +937,12 @@ def process_plugin_events(db, plugin, plugEventsArr):
if history_to_insert:
sql.executemany(
"""
INSERT INTO Plugins_History
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
INSERT INTO Plugins_History
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
"ObjectGUID")
"ObjectGUID")
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
history_to_insert,
@@ -1044,7 +954,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
except Exception as e:
# Rollback the transaction in case of an error
conn.rollback()
mylog("none", ["[Plugins] ⚠ ERROR: ", e])
mylog("none", f"[Plugins] ⚠ ERROR: {e}")
raise e
# Perform database table mapping if enabled for the plugin
@@ -1056,7 +966,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
dbTable = plugin["mapped_to_table"]
# Log a debug message indicating the mapping of objects to the database table.
mylog("debug", ["[Plugins] Mapping objects to database table: ", dbTable])
mylog("debug", f"[Plugins] Mapping objects to database table: {dbTable}")
# Initialize lists to hold mapped column names, columnsStr, and valuesStr for SQL query.
mappedCols = []
@@ -1121,8 +1031,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
# Check if there's a default value specified for this column in the JSON.
if (
"mapped_to_column_data" in col
and "value" in col["mapped_to_column_data"]
"mapped_to_column_data" in col and "value" in col["mapped_to_column_data"]
):
tmpList.append(col["mapped_to_column_data"]["value"])
@@ -1133,8 +1042,8 @@ def process_plugin_events(db, plugin, plugEventsArr):
q = f"INSERT OR IGNORE INTO {dbTable} ({columnsStr}) VALUES ({valuesStr})"
# Log a debug message showing the generated SQL query for mapping.
mylog("debug", ["[Plugins] SQL query for mapping: ", q])
mylog("debug", ["[Plugins] SQL sqlParams for mapping: ", sqlParams])
mylog("debug", f"[Plugins] SQL query for mapping: {q}")
mylog("debug", f"[Plugins] SQL sqlParams for mapping: {sqlParams}")
# Execute the SQL query using 'sql.executemany()' and the 'sqlParams' list of tuples.
# This will insert multiple rows into the database in one go.

View File

@@ -1,14 +1,6 @@
import sys
import subprocess
import os
import re
import datetime
from dateutil import parser
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from helper import get_setting_value, check_IP_format
from utils.datetime_utils import timeNowDB, normalizeTimeStamp
from logger import mylog, Logger
@@ -44,7 +36,7 @@ def exclude_ignored_devices(db):
# Join conditions and prepare the query
conditions_str = " OR ".join(conditions)
if conditions_str:
query = f"""DELETE FROM CurrentScan WHERE
query = f"""DELETE FROM CurrentScan WHERE
1=1
AND (
{conditions_str}
@@ -57,22 +49,23 @@ def exclude_ignored_devices(db):
sql.execute(query)
#-------------------------------------------------------------------------------
def update_devices_data_from_scan (db):
sql = db.sql #TO-DO
# -------------------------------------------------------------------------------
def update_devices_data_from_scan(db):
sql = db.sql # TO-DO
startTime = timeNowDB()
# Update Last Connection
mylog("debug", "[Update Devices] 1 Last Connection")
sql.execute(f"""UPDATE Devices SET devLastConnection = '{startTime}',
devPresentLastScan = 1
WHERE EXISTS (SELECT 1 FROM CurrentScan
WHERE EXISTS (SELECT 1 FROM CurrentScan
WHERE devMac = cur_MAC) """)
# Clean no active devices
mylog("debug", "[Update Devices] 2 Clean no active devices")
sql.execute("""UPDATE Devices SET devPresentLastScan = 0
WHERE NOT EXISTS (SELECT 1 FROM CurrentScan
WHERE NOT EXISTS (SELECT 1 FROM CurrentScan
WHERE devMac = cur_MAC) """)
# Update IP
@@ -103,7 +96,7 @@ def update_devices_data_from_scan (db):
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
)
WHERE
WHERE
(devVendor IS NULL OR devVendor IN ("", "null", "(unknown)", "(Unknown)"))
AND EXISTS (
SELECT 1
@@ -116,12 +109,12 @@ def update_devices_data_from_scan (db):
sql.execute("""UPDATE Devices
SET devParentPort = (
SELECT cur_Port
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
)
WHERE
WHERE
(devParentPort IS NULL OR devParentPort IN ("", "null", "(unknown)", "(Unknown)"))
AND
AND
EXISTS (
SELECT 1
FROM CurrentScan
@@ -139,9 +132,9 @@ def update_devices_data_from_scan (db):
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
)
WHERE
WHERE
(devParentMAC IS NULL OR devParentMAC IN ("", "null", "(unknown)", "(Unknown)"))
AND
AND
EXISTS (
SELECT 1
FROM CurrentScan
@@ -161,7 +154,7 @@ def update_devices_data_from_scan (db):
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
)
WHERE
WHERE
(devSite IS NULL OR devSite IN ("", "null"))
AND EXISTS (
SELECT 1
@@ -178,7 +171,7 @@ def update_devices_data_from_scan (db):
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
)
WHERE
WHERE
(devSSID IS NULL OR devSSID IN ("", "null"))
AND EXISTS (
SELECT 1
@@ -195,7 +188,7 @@ def update_devices_data_from_scan (db):
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
)
WHERE
WHERE
(devType IS NULL OR devType IN ("", "null"))
AND EXISTS (
SELECT 1
@@ -208,17 +201,17 @@ def update_devices_data_from_scan (db):
mylog("debug", "[Update Devices] - (if not empty) cur_Name -> (if empty) devName")
sql.execute(""" UPDATE Devices
SET devName = COALESCE((
SELECT cur_Name
SELECT cur_Name
FROM CurrentScan
WHERE cur_MAC = devMac
AND cur_Name IS NOT NULL
AND cur_Name <> 'null'
AND cur_Name <> ''
), devName)
WHERE (devName IN ('(unknown)', '(name not found)', '')
WHERE (devName IN ('(unknown)', '(name not found)', '')
OR devName IS NULL)
AND EXISTS (
SELECT 1
SELECT 1
FROM CurrentScan
WHERE cur_MAC = devMac
AND cur_Name IS NOT NULL
@@ -425,9 +418,9 @@ def print_scan_stats(db):
mylog("verbose", f" {row['cur_ScanMethod']}: {row['scan_method_count']}")
#-------------------------------------------------------------------------------
def create_new_devices (db):
sql = db.sql # TO-DO
# -------------------------------------------------------------------------------
def create_new_devices(db):
sql = db.sql # TO-DO
startTime = timeNowDB()
# Insert events for new devices from CurrentScan (not yet in Devices)
@@ -474,36 +467,36 @@ def create_new_devices (db):
mylog("debug", "[New Devices] 2 Create devices")
# default New Device values preparation
newDevColumns = """devAlertEvents,
devAlertDown,
devPresentLastScan,
devIsArchived,
devIsNew,
devSkipRepeated,
devScan,
devOwner,
devFavorite,
devGroup,
devComments,
devLogEvents,
newDevColumns = """devAlertEvents,
devAlertDown,
devPresentLastScan,
devIsArchived,
devIsNew,
devSkipRepeated,
devScan,
devOwner,
devFavorite,
devGroup,
devComments,
devLogEvents,
devLocation,
devCustomProps,
devParentRelType,
devReqNicsOnline
"""
newDevDefaults = f"""{get_setting_value("NEWDEV_devAlertEvents")},
{get_setting_value("NEWDEV_devAlertDown")},
{get_setting_value("NEWDEV_devPresentLastScan")},
{get_setting_value("NEWDEV_devIsArchived")},
{get_setting_value("NEWDEV_devIsNew")},
{get_setting_value("NEWDEV_devSkipRepeated")},
{get_setting_value("NEWDEV_devScan")},
'{sanitize_SQL_input(get_setting_value("NEWDEV_devOwner"))}',
{get_setting_value("NEWDEV_devFavorite")},
'{sanitize_SQL_input(get_setting_value("NEWDEV_devGroup"))}',
'{sanitize_SQL_input(get_setting_value("NEWDEV_devComments"))}',
{get_setting_value("NEWDEV_devLogEvents")},
newDevDefaults = f"""{get_setting_value("NEWDEV_devAlertEvents")},
{get_setting_value("NEWDEV_devAlertDown")},
{get_setting_value("NEWDEV_devPresentLastScan")},
{get_setting_value("NEWDEV_devIsArchived")},
{get_setting_value("NEWDEV_devIsNew")},
{get_setting_value("NEWDEV_devSkipRepeated")},
{get_setting_value("NEWDEV_devScan")},
'{sanitize_SQL_input(get_setting_value("NEWDEV_devOwner"))}',
{get_setting_value("NEWDEV_devFavorite")},
'{sanitize_SQL_input(get_setting_value("NEWDEV_devGroup"))}',
'{sanitize_SQL_input(get_setting_value("NEWDEV_devComments"))}',
{get_setting_value("NEWDEV_devLogEvents")},
'{sanitize_SQL_input(get_setting_value("NEWDEV_devLocation"))}',
'{sanitize_SQL_input(get_setting_value("NEWDEV_devCustomProps"))}',
'{sanitize_SQL_input(get_setting_value("NEWDEV_devParentRelType"))}',
@@ -511,7 +504,7 @@ def create_new_devices (db):
"""
# Fetch data from CurrentScan skipping ignored devices by IP and MAC
query = """SELECT cur_MAC, cur_Name, cur_Vendor, cur_ScanMethod, cur_IP, cur_SyncHubNodeName, cur_NetworkNodeMAC, cur_PORT, cur_NetworkSite, cur_SSID, cur_Type
query = """SELECT cur_MAC, cur_Name, cur_Vendor, cur_ScanMethod, cur_IP, cur_SyncHubNodeName, cur_NetworkNodeMAC, cur_PORT, cur_NetworkSite, cur_SSID, cur_Type
FROM CurrentScan """
mylog("debug", f"[New Devices] Collecting New Devices Query: {query}")
@@ -554,40 +547,40 @@ def create_new_devices (db):
)
# Preparing the individual insert statement
sqlQuery = f"""INSERT OR IGNORE INTO Devices
sqlQuery = f"""INSERT OR IGNORE INTO Devices
(
devMac,
devName,
devMac,
devName,
devVendor,
devLastIP,
devFirstConnection,
devLastConnection,
devSyncHubNode,
devLastIP,
devFirstConnection,
devLastConnection,
devSyncHubNode,
devGUID,
devParentMAC,
devParentMAC,
devParentPort,
devSite,
devSite,
devSSID,
devType,
devSourcePlugin,
devType,
devSourcePlugin,
{newDevColumns}
)
VALUES
VALUES
(
'{sanitize_SQL_input(cur_MAC)}',
'{sanitize_SQL_input(cur_MAC)}',
'{sanitize_SQL_input(cur_Name)}',
'{sanitize_SQL_input(cur_Vendor)}',
'{sanitize_SQL_input(cur_IP)}',
?,
?,
'{sanitize_SQL_input(cur_SyncHubNodeName)}',
'{sanitize_SQL_input(cur_Vendor)}',
'{sanitize_SQL_input(cur_IP)}',
?,
?,
'{sanitize_SQL_input(cur_SyncHubNodeName)}',
{sql_generateGuid},
'{sanitize_SQL_input(cur_NetworkNodeMAC)}',
'{sanitize_SQL_input(cur_PORT)}',
'{sanitize_SQL_input(cur_NetworkSite)}',
'{sanitize_SQL_input(cur_NetworkSite)}',
'{sanitize_SQL_input(cur_SSID)}',
'{sanitize_SQL_input(cur_Type)}',
'{sanitize_SQL_input(cur_ScanMethod)}',
'{sanitize_SQL_input(cur_Type)}',
'{sanitize_SQL_input(cur_ScanMethod)}',
{newDevDefaults}
)"""
@@ -598,7 +591,8 @@ def create_new_devices (db):
mylog("debug", "[New Devices] New Devices end")
db.commitDB()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
# Check if plugins data changed
def check_plugin_data_changed(pm, plugins_to_check):
"""
@@ -630,7 +624,7 @@ def check_plugin_data_changed(pm, plugins_to_check):
for plugin_name in plugins_to_check:
last_data_change = pm.plugin_states.get(plugin_name, {}).get("lastDataChange")
last_data_change = pm.plugin_states.get(plugin_name, {}).get("lastDataChange")
last_data_check = pm.plugin_checks.get(plugin_name, "")
if not last_data_change:
@@ -639,13 +633,13 @@ def check_plugin_data_changed(pm, plugins_to_check):
# Normalize and validate last_changed timestamp
last_changed_ts = normalizeTimeStamp(last_data_change)
if last_changed_ts == None:
if last_changed_ts is None:
mylog('none', f'[check_plugin_data_changed] Unexpected last_data_change timestamp for {plugin_name} (input|output): ({last_data_change}|{last_changed_ts})')
# Normalize and validate last_data_check timestamp
last_data_check_ts = normalizeTimeStamp(last_data_check)
if last_data_check_ts == None:
if last_data_check_ts is None:
mylog('none', f'[check_plugin_data_changed] Unexpected last_data_check timestamp for {plugin_name} (input|output): ({last_data_check}|{last_data_check_ts})')
# Track which plugins have newer state than last_checked
@@ -660,15 +654,19 @@ def check_plugin_data_changed(pm, plugins_to_check):
# Continue if changes detected
for p in plugins_changed:
mylog('debug', f'[check_plugin_data_changed] {p} changed (last_data_change|last_data_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})')
mylog(
'debug',
f'[check_plugin_data_changed] {p} changed (last_data_change|last_data_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})'
)
return True
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def update_devices_names(pm):
# --- Short-circuit if no name-resolution plugin has changed ---
if check_plugin_data_changed(pm, ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]) == False:
if check_plugin_data_changed(pm, ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]) is False:
mylog('debug', '[Update Device Name] No relevant plugin changes since last check.')
return
@@ -676,8 +674,8 @@ def update_devices_names(pm):
sql = pm.db.sql
resolver = NameResolver(pm.db)
device_handler = DeviceInstance(pm.db)
device_handler = DeviceInstance(pm.db)
nameNotFound = "(name not found)"
# Define resolution strategies in priority order
@@ -722,8 +720,7 @@ def update_devices_names(pm):
# If a valid result is found, record it and stop further attempts
if (
newFQDN not in [nameNotFound, "", "localhost."]
and " communications error to " not in newFQDN
newFQDN not in [nameNotFound, "", "localhost."] and " communications error to " not in newFQDN
):
foundStats[label] += 1
@@ -750,14 +747,14 @@ def update_devices_names(pm):
)
# Try resolving both name and FQDN
recordsToUpdate, recordsNotFound, foundStats, notFound = resolve_devices(
recordsToUpdate, recordsNotFound, fs, notFound = resolve_devices(
unknownDevices
)
# Log summary
mylog(
"verbose",
f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}/{foundStats['NBTSCAN']})",
f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']})",
)
mylog("verbose", f"[Update Device Name] Names Not Found : {notFound}")
@@ -780,16 +777,14 @@ def update_devices_names(pm):
)
# Try resolving only FQDN
recordsToUpdate, _, foundStats, notFound = resolve_devices(
recordsToUpdate, _, fs, notFound = resolve_devices(
allDevices, resolve_both_name_and_fqdn=False
)
# Log summary
mylog(
"verbose",
f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)}"+
f"({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}"+
f"/{foundStats['NBTSCAN']})",
f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)}({fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']})",
)
mylog("verbose", f"[Update FQDN] Names Not Found : {notFound}")
@@ -803,7 +798,7 @@ def update_devices_names(pm):
# --- Step 3: Log last checked time ---
# After resolving names, update last checked
pm.plugin_checks = {"DIGSCAN": timeNowDB(), "AVAHISCAN": timeNowDB(), "NSLOOKUP": timeNowDB(), "NBTSCAN": timeNowDB() }
pm.plugin_checks = {"DIGSCAN": timeNowDB(), "AVAHISCAN": timeNowDB(), "NSLOOKUP": timeNowDB(), "NBTSCAN": timeNowDB()}
# -------------------------------------------------------------------------------
@@ -901,7 +896,6 @@ def query_MAC_vendor(pMAC):
# Search vendor in HW Vendors DB
mac_start_string6 = mac[0:6]
mac_start_string9 = mac[0:9]
try:
with open(filePath, "r") as f:

View File

@@ -1,16 +1,13 @@
import sys
import os
import re
import json
import base64
from pathlib import Path
from typing import Optional, Tuple
from logger import mylog
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
# Load MAC/device-type/icon rules from external file
MAC_TYPE_ICON_PATH = Path(f"{INSTALL_PATH}/back/device_heuristics_rules.json")
@@ -83,7 +80,7 @@ def match_vendor(vendor: str, default_type: str, default_icon: str) -> Tuple[str
for pattern in patterns:
# Only apply fallback when no MAC prefix is specified
mac_prefix = pattern.get("mac_prefix", "")
# mac_prefix = pattern.get("mac_prefix", "")
vendor_pattern = pattern.get("vendor", "").lower()
if vendor_pattern and vendor_pattern in vendor_lc:

View File

@@ -1,11 +1,4 @@
import sys
import os
import re
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
from helper import get_setting_value
@@ -31,7 +24,7 @@ class NameResolver:
# Check by MAC
sql.execute(f"""
SELECT Watched_Value2 FROM Plugins_Objects
SELECT Watched_Value2 FROM Plugins_Objects
WHERE Plugin = '{plugin}' AND Object_PrimaryID = '{pMAC}'
""")
result = sql.fetchall()
@@ -42,9 +35,9 @@ class NameResolver:
# Check name by IP if enabled
if get_setting_value('NEWDEV_IP_MATCH_NAME'):
sql.execute(f"""
SELECT Watched_Value2 FROM Plugins_Objects
SELECT Watched_Value2 FROM Plugins_Objects
WHERE Plugin = '{plugin}' AND Object_SecondaryID = '{pIP}'
""")
result = sql.fetchall()

View File

@@ -1,10 +1,3 @@
import sys
import os
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from scan.device_handling import (
create_new_devices,
print_scan_stats,
@@ -14,7 +7,7 @@ from scan.device_handling import (
)
from helper import get_setting_value
from db.db_helper import print_table_schema
from utils.datetime_utils import timeNowDB, timeNowTZ
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from messaging.reporting import skip_repeated_notifications
@@ -133,20 +126,20 @@ def create_sessions_snapshot(db):
db.commitDB()
#-------------------------------------------------------------------------------
def insert_events (db):
sql = db.sql #TO-DO
startTime = timeNowDB()
# -------------------------------------------------------------------------------
def insert_events(db):
sql = db.sql # TO-DO
startTime = timeNowDB()
# Check device down
mylog("debug", "[Events] - 1 - Devices down")
sql.execute(f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
SELECT devMac, devLastIP, '{startTime}', 'Device Down', '', 1
FROM Devices
FROM Devices
WHERE devAlertDown != 0
AND devPresentLastScan = 1
AND devPresentLastScan = 1
AND NOT EXISTS (SELECT 1 FROM CurrentScan
WHERE devMac = cur_MAC
) """)
@@ -156,15 +149,15 @@ def insert_events (db):
sql.execute(f""" INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
SELECT DISTINCT c.cur_MAC, c.cur_IP, '{startTime}',
CASE
WHEN last_event.eve_EventType = 'Device Down' and last_event.eve_PendingAlertEmail = 0 THEN 'Down Reconnected'
ELSE 'Connected'
SELECT DISTINCT c.cur_MAC, c.cur_IP, '{startTime}',
CASE
WHEN last_event.eve_EventType = 'Device Down' and last_event.eve_PendingAlertEmail = 0 THEN 'Down Reconnected'
ELSE 'Connected'
END,
'',
1
FROM CurrentScan AS c
LEFT JOIN LatestEventsPerMAC AS last_event ON c.cur_MAC = last_event.eve_MAC
FROM CurrentScan AS c
LEFT JOIN LatestEventsPerMAC AS last_event ON c.cur_MAC = last_event.eve_MAC
WHERE last_event.devPresentLastScan = 0 OR last_event.eve_MAC IS NULL
""")
@@ -190,7 +183,7 @@ def insert_events (db):
SELECT cur_MAC, cur_IP, '{startTime}', 'IP Changed',
'Previous IP: '|| devLastIP, devAlertEvents
FROM Devices, CurrentScan
WHERE devMac = cur_MAC
WHERE devMac = cur_MAC
AND devLastIP <> cur_IP """)
mylog("debug", "[Events] - Events end")

View File

@@ -1,49 +1,43 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import pathlib
import sys
from datetime import datetime
# from datetime import datetime
from dateutil import parser
import datetime
import re
import pytz
from pytz import timezone
from typing import Union
from zoneinfo import ZoneInfo
import email.utils
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import *
# from const import *
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
# DateTime
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
DATETIME_PATTERN = "%Y-%m-%d %H:%M:%S"
DATETIME_REGEX = re.compile(r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$')
def timeNowTZ():
if conf.tz:
return datetime.datetime.now(conf.tz).replace(microsecond=0)
else:
return datetime.datetime.now().replace(microsecond=0)
def timeNow():
return datetime.datetime.now().replace(microsecond=0)
def get_timezone_offset():
def get_timezone_offset():
now = datetime.datetime.now(conf.tz)
offset_hours = now.utcoffset().total_seconds() / 3600
offset_hours = now.utcoffset().total_seconds() / 3600
offset_formatted = "{:+03d}:{:02d}".format(int(offset_hours), int((offset_hours % 1) * 60))
return offset_formatted
def timeNowDB(local=True):
"""
Return the current time (local or UTC) as ISO 8601 for DB storage.
@@ -67,9 +61,9 @@ def timeNowDB(local=True):
return datetime.datetime.now(datetime.UTC).strftime(DATETIME_PATTERN)
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
# Date and time methods
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def normalizeTimeStamp(inputTimeStamp):
"""
@@ -91,7 +85,7 @@ def normalizeTimeStamp(inputTimeStamp):
# Epoch timestamp (integer or float)
if isinstance(inputTimeStamp, (int, float)):
try:
try:
return datetime.datetime.fromtimestamp(inputTimeStamp)
except (OSError, OverflowError, ValueError):
return None
@@ -125,6 +119,7 @@ def format_date_iso(date1: str) -> str:
dt = datetime.datetime.fromisoformat(date1) if isinstance(date1, str) else date1
return dt.isoformat()
# -------------------------------------------------------------------------------------------
def format_event_date(date_str: str, event_type: str) -> str:
"""Format event date with fallback rules."""
@@ -135,6 +130,7 @@ def format_event_date(date_str: str, event_type: str) -> str:
else:
return "<still connected>"
# -------------------------------------------------------------------------------------------
def ensure_datetime(dt: Union[str, datetime.datetime, None]) -> datetime.datetime:
if dt is None:
@@ -157,6 +153,7 @@ def parse_datetime(dt_str):
except ValueError:
return None
def format_date(date_str: str) -> str:
try:
dt = parse_datetime(date_str)
@@ -168,13 +165,14 @@ def format_date(date_str: str) -> str:
except (ValueError, AttributeError, TypeError):
return "invalid"
def format_date_diff(date1, date2, tz_name):
"""
Return difference between two datetimes as 'Xd HH:MM'.
Uses app timezone if datetime is naive.
date2 can be None (uses now).
"""
# Get timezone from settings
# Get timezone from settings
tz = pytz.timezone(tz_name)
def parse_dt(dt):
@@ -184,8 +182,8 @@ def format_date_diff(date1, date2, tz_name):
try:
dt_parsed = email.utils.parsedate_to_datetime(dt)
except (ValueError, TypeError):
# fallback: parse ISO string
dt_parsed = datetime.datetime.fromisoformat(dt)
# fallback: parse ISO string
dt_parsed = datetime.datetime.fromisoformat(dt)
# convert naive GMT/UTC to app timezone
if dt_parsed.tzinfo is None:
dt_parsed = tz.localize(dt_parsed)
@@ -208,4 +206,4 @@ def format_date_diff(date1, date2, tz_name):
"hours": hours,
"minutes": minutes,
"total_minutes": total_minutes
}
}

View File

@@ -1,6 +1,6 @@
import os
import json
from collections import namedtuple
import conf
from logger import mylog
from utils.crypto_utils import decrypt_data
@@ -220,9 +220,7 @@ def get_plugins_configs(loadAll):
# Load all plugins if `loadAll` is True, the plugin is in the enabled list,
# or no specific plugins are enabled (enabledPlugins is empty)
if (
loadAll
or plugJson["unique_prefix"] in enabledPlugins
or enabledPlugins == []
loadAll or plugJson["unique_prefix"] in enabledPlugins or enabledPlugins == []
):
# Load the contents of the config.json file as a JSON object and append it to pluginsList
pluginsList.append(plugJson)

View File

@@ -1,11 +1,4 @@
import sqlite3
import os
import sys
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value
from models.device_instance import DeviceInstance
@@ -15,7 +8,6 @@ from models.plugin_object_instance import PluginObjectInstance
Logger(get_setting_value("LOG_LEVEL"))
class Action:
"""Base class for all actions."""

View File

@@ -1,10 +1,3 @@
import os
import sys
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from logger import Logger
from const import sql_generateGuid
@@ -96,11 +89,11 @@ class AppEvent_obj:
"ObjectPrimaryID" TEXT,
"ObjectSecondaryID" TEXT,
"ObjectForeignKey" TEXT,
"ObjectIndex" TEXT,
"ObjectIsNew" BOOLEAN,
"ObjectIsArchived" BOOLEAN,
"ObjectIndex" TEXT,
"ObjectIsNew" BOOLEAN,
"ObjectIsArchived" BOOLEAN,
"ObjectStatusColumn" TEXT,
"ObjectStatus" TEXT,
"ObjectStatus" TEXT,
"AppEventType" TEXT,
"Helper1" TEXT,
"Helper2" TEXT,
@@ -117,11 +110,11 @@ class AppEvent_obj:
CREATE TRIGGER IF NOT EXISTS "{trigger_name}"
AFTER {event.upper()} ON "{table_name}"
WHEN NOT EXISTS (
SELECT 1 FROM AppEvents
WHERE AppEventProcessed = 0
SELECT 1 FROM AppEvents
WHERE AppEventProcessed = 0
AND ObjectType = '{table_name}'
AND ObjectGUID = {manage_prefix(config["fields"]["ObjectGUID"], event)}
AND ObjectStatus = {manage_prefix(config["fields"]["ObjectStatus"], event)}
AND ObjectStatus = {manage_prefix(config["fields"]["ObjectStatus"], event)}
AND AppEventType = '{event.lower()}'
)
BEGIN
@@ -142,10 +135,10 @@ class AppEvent_obj:
"AppEventType"
)
VALUES (
{sql_generateGuid},
DATETIME('now'),
FALSE,
'{table_name}',
{sql_generateGuid},
DATETIME('now'),
FALSE,
'{table_name}',
{manage_prefix(config["fields"]["ObjectGUID"], event)}, -- ObjectGUID
{manage_prefix(config["fields"]["ObjectPrimaryID"], event)}, -- ObjectPrimaryID
{manage_prefix(config["fields"]["ObjectSecondaryID"], event)}, -- ObjectSecondaryID

View File

@@ -1,12 +1,5 @@
import re
import json
import os
import sys
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value

View File

@@ -1,22 +1,17 @@
import json
import os
import sys
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from const import fullConfFolder
from logger import mylog, Logger
from helper import get_setting_value
# Make sure log level is initialized correctly
Logger(get_setting_value("LOG_LEVEL"))
from workflows.triggers import Trigger
from workflows.conditions import ConditionGroup
from workflows.actions import DeleteObjectAction, RunPluginAction, UpdateFieldAction
# Make sure log level is initialized correctly
Logger(get_setting_value("LOG_LEVEL"))
class WorkflowManager:
def __init__(self, db):
self.db = db

View File

@@ -1,11 +1,4 @@
import json
import os
import sys
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value
from database import get_array_from_sql_rows
@@ -28,8 +21,7 @@ class Trigger:
self.event_type = triggerJson["event_type"]
self.event = event # Store the triggered event context, if provided
self.triggered = (
self.object_type == event["ObjectType"]
and self.event_type == event["AppEventType"]
self.object_type == event["ObjectType"] and self.event_type == event["AppEventType"]
)
mylog(
@@ -53,9 +45,9 @@ class Trigger:
raise ValueError(m)
query = f"""
SELECT * FROM
SELECT * FROM
{db_table}
WHERE {refField} = '{event["ObjectGUID"]}'
WHERE {refField} = '{event["ObjectGUID"]}'
"""
mylog("debug", [query])

View File

@@ -1 +1 @@
""" tests for NetAlertX """
""" tests for NetAlertX """

View File

@@ -7,9 +7,9 @@ import pytest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from utils.datetime_utils import timeNowDB
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
@@ -26,7 +26,7 @@ def client():
@pytest.fixture(scope="session")
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):

View File

@@ -1,17 +1,17 @@
import sys
import pathlib
import sqlite3
# import pathlib
# import sqlite3
import random
import string
import uuid
# import string
# import uuid
import os
import pytest
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
@@ -28,7 +28,7 @@ def client():
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
@@ -38,7 +38,6 @@ def auth_headers(token):
def test_create_device(client, api_token, test_mac):
payload = {
"createNew": True,
"devType": "Test Device",
"devOwner": "Unit Test",
"devType": "Router",
"devVendor": "TestVendor",
@@ -103,7 +102,7 @@ def test_copy_device(client, api_token, test_mac):
# Step 2: Generate a target MAC
target_mac = "AA:BB:CC:" + ":".join(
f"{random.randint(0,255):02X}" for _ in range(3)
f"{random.randint(0, 255):02X}" for _ in range(3)
)
# Step 3: Copy device

View File

@@ -1,32 +1,36 @@
import sys
import pathlib
import sqlite3
# import pathlib
# import sqlite3
import base64
import random
import string
import uuid
# import string
# import uuid
import os
import pytest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
@@ -40,12 +44,13 @@ def create_dummy(client, api_token, test_mac):
"devType": "Router",
"devVendor": "TestVendor",
}
resp = client.post(f"/device/{test_mac}", json=payload, headers=auth_headers(api_token))
client.post(f"/device/{test_mac}", json=payload, headers=auth_headers(api_token))
def test_get_all_devices(client, api_token, test_mac):
# Ensure there is at least one device
create_dummy(client, api_token, test_mac)
# Fetch all devices
resp = client.get("/devices", headers=auth_headers(api_token))
assert resp.status_code == 200
@@ -59,7 +64,7 @@ def test_get_all_devices(client, api_token, test_mac):
def test_delete_devices_with_macs(client, api_token, test_mac):
# First create device so it exists
create_dummy(client, api_token, test_mac)
client.post(f"/device/{test_mac}", json={"createNew": True}, headers=auth_headers(api_token))
# Delete by MAC
@@ -67,6 +72,7 @@ def test_delete_devices_with_macs(client, api_token, test_mac):
assert resp.status_code == 200
assert resp.json.get("success") is True
def test_delete_all_empty_macs(client, api_token):
resp = client.delete("/devices/empty-macs", headers=auth_headers(api_token))
assert resp.status_code == 200
@@ -79,6 +85,7 @@ def test_delete_unknown_devices(client, api_token):
assert resp.status_code == 200
assert resp.json.get("success") is True
def test_export_devices_csv(client, api_token, test_mac):
# Create a device first
create_dummy(client, api_token, test_mac)
@@ -92,6 +99,7 @@ def test_export_devices_csv(client, api_token, test_mac):
# CSV should contain test_mac
assert test_mac in resp.data.decode()
def test_export_devices_json(client, api_token, test_mac):
# Create a device first
create_dummy(client, api_token, test_mac)
@@ -101,7 +109,7 @@ def test_export_devices_json(client, api_token, test_mac):
assert resp.status_code == 200
assert resp.is_json
data = resp.get_json()
assert any(dev.get("devMac") == test_mac for dev in data["data"])
assert any(dev.get("devMac") == test_mac for dev in data["data"])
def test_export_devices_invalid_format(client, api_token):
@@ -143,6 +151,7 @@ def test_export_import_cycle_base64(client, api_token, test_mac):
assert resp.json.get("inserted") >= 1
assert resp.json.get("skipped_lines") == []
def test_devices_totals(client, api_token, test_mac):
# 1. Create a dummy device
create_dummy(client, api_token, test_mac)
@@ -189,9 +198,10 @@ def test_devices_by_status(client, api_token, test_mac):
assert fav_data is not None
assert "&#9733" in fav_data["title"]
def test_delete_test_devices(client, api_token, test_mac):
# Delete by MAC
resp = client.delete("/devices", json={"macs": ["AA:BB:CC:*"]}, headers=auth_headers(api_token))
assert resp.status_code == 200
assert resp.json.get("success") is True
assert resp.json.get("success") is True

View File

@@ -1,37 +1,38 @@
import sys
import pathlib
import sqlite3
import random
import string
import uuid
import os
import pytest
from datetime import datetime, timedelta
import random
from datetime import timedelta
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from utils.datetime_utils import timeNowTZ
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowTZ # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
def create_event(client, api_token, mac, event="UnitTest Event", days_old=None):
payload = {"ip": "0.0.0.0", "event_type": event}
@@ -43,10 +44,12 @@ def create_event(client, api_token, mac, event="UnitTest Event", days_old=None):
return client.post(f"/events/create/{mac}", json=payload, headers=auth_headers(api_token))
def list_events(client, api_token, mac=None):
url = "/events" if mac is None else f"/events?mac={mac}"
return client.get(url, headers=auth_headers(api_token))
def test_create_event(client, api_token, test_mac):
# create event
resp = create_event(client, api_token, test_mac)
@@ -82,6 +85,7 @@ def test_delete_events_for_mac(client, api_token, test_mac):
assert resp.status_code == 200
assert len(resp.json.get("events", [])) == 0
def test_get_events_totals(client, api_token):
# 1. Request totals with default period
resp = client.get(
@@ -108,7 +112,6 @@ def test_get_events_totals(client, api_token):
assert len(data_month) == 6
def test_delete_all_events(client, api_token, test_mac):
# create two events
create_event(client, api_token, test_mac)
@@ -146,5 +149,3 @@ def test_delete_events_dynamic_days(client, api_token, test_mac):
events = resp.get_json().get("events", [])
mac_events = [ev for ev in events if ev.get("eve_MAC") == test_mac]
assert len(mac_events) == 1

View File

@@ -1,31 +1,30 @@
import sys
import pathlib
import sqlite3
import random
import string
import uuid
import pytest
from datetime import datetime, timedelta
INSTALL_PATH = "/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
@@ -37,6 +36,7 @@ def test_graphql_debug_get(client):
assert resp.status_code == 200
assert resp.data.decode() == "NetAlertX GraphQL server running."
def test_graphql_post_unauthorized(client):
"""POST /graphql without token should return 401"""
query = {"query": "{ devices { devName devMac } }"}
@@ -47,13 +47,14 @@ def test_graphql_post_unauthorized(client):
# --- DEVICES TESTS ---
def test_graphql_post_devices(client, api_token):
"""POST /graphql with a valid token should return device data"""
query = {
"query": """
{
devices {
devices {
devices {
devGUID
devGroup
devIsRandomMac
@@ -77,8 +78,8 @@ def test_graphql_post_devices(client, api_token):
assert isinstance(data["devices"]["devices"], list)
assert isinstance(data["devices"]["count"], int)
# --- SETTINGS TESTS ---
# --- SETTINGS TESTS ---
def test_graphql_post_settings(client, api_token):
"""POST /graphql should return settings data"""
query = {
@@ -97,8 +98,8 @@ def test_graphql_post_settings(client, api_token):
assert "settings" in data
assert isinstance(data["settings"]["settings"], list)
# --- LANGSTRINGS TESTS ---
# --- LANGSTRINGS TESTS ---
def test_graphql_post_langstrings_specific(client, api_token):
"""Retrieve a specific langString in a given language"""
query = {
@@ -167,4 +168,4 @@ def test_graphql_post_langstrings_all_languages(client, api_token):
assert data["enStrings"]["count"] >= 1
assert data["deStrings"]["count"] >= 1
# Ensure langCode matches
assert all(e["langCode"] == "en_us" for e in data["enStrings"]["langStrings"])
assert all(e["langCode"] == "en_us" for e in data["enStrings"]["langStrings"])

View File

@@ -1,17 +1,13 @@
import sys
import pathlib
import sqlite3
import random
import string
import uuid
import os
import pytest
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
@@ -28,7 +24,7 @@ def client():
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
@@ -36,6 +32,6 @@ def auth_headers(token):
def test_delete_history(client, api_token):
resp = client.delete(f"/history", headers=auth_headers(api_token))
resp = client.delete("/history", headers=auth_headers(api_token))
assert resp.status_code == 200
assert resp.json.get("success") is True

View File

@@ -5,8 +5,9 @@ import pytest
INSTALL_PATH = "/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
# ----------------------------
# Fixtures
@@ -15,14 +16,17 @@ from api_server.api_server_start import app
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
# ----------------------------
# Logs Endpoint Tests
# ----------------------------
@@ -31,16 +35,18 @@ def test_clean_log(client, api_token):
assert resp.status_code == 200
assert resp.json.get("success") is True
def test_clean_log_not_allowed(client, api_token):
resp = client.delete("/logs?file=not_allowed.log", headers=auth_headers(api_token))
assert resp.status_code == 400
assert resp.json.get("success") is False
# ----------------------------
# Execution Queue Endpoint Tests
# ----------------------------
def test_add_to_execution_queue(client, api_token):
action_name = f"test_action_{random.randint(0,9999)}"
action_name = f"test_action_{random.randint(0, 9999)}"
resp = client.post(
"/logs/add-to-execution-queue",
json={"action": action_name},
@@ -50,6 +56,7 @@ def test_add_to_execution_queue(client, api_token):
assert resp.json.get("success") is True
assert action_name in resp.json.get("message", "")
def test_add_to_execution_queue_missing_action(client, api_token):
resp = client.post(
"/logs/add-to-execution-queue",

View File

@@ -1,11 +1,8 @@
# -----------------------------
# In-app notifications tests with cleanup
# -----------------------------
import json
import random
import string
import uuid
import pytest
import os
import sys
@@ -14,26 +11,31 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from api_server.api_server_start import app
from messaging.in_app import NOTIFICATION_API_FILE # Import the path to notifications file
from helper import get_setting_value
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
from messaging.in_app import NOTIFICATION_API_FILE # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
@pytest.fixture
def random_content():
return "Test Notification " + "".join(random.choices(string.ascii_letters + string.digits, k=6))
@pytest.fixture
def notification_guid(client, api_token, random_content):
# Write a notification and return its GUID
@@ -50,6 +52,7 @@ def notification_guid(client, api_token, random_content):
assert guid is not None
return guid
@pytest.fixture(autouse=True)
def cleanup_notifications():
# Runs before and after each test
@@ -70,6 +73,7 @@ def cleanup_notifications():
with open(NOTIFICATION_API_FILE, "w") as f:
f.write(backup)
# -----------------------------
def test_write_notification(client, api_token, random_content):
resp = client.post(
@@ -80,6 +84,7 @@ def test_write_notification(client, api_token, random_content):
assert resp.status_code == 200
assert resp.json.get("success") is True
def test_get_unread_notifications(client, api_token, random_content):
client.post("/messaging/in-app/write", json={"content": random_content}, headers=auth_headers(api_token))
resp = client.get("/messaging/in-app/unread", headers=auth_headers(api_token))
@@ -87,22 +92,26 @@ def test_get_unread_notifications(client, api_token, random_content):
notifications = resp.json
assert any(n["content"] == random_content for n in notifications)
def test_mark_all_notifications_read(client, api_token, random_content):
client.post("/messaging/in-app/write", json={"content": random_content}, headers=auth_headers(api_token))
resp = client.post("/messaging/in-app/read/all", headers=auth_headers(api_token))
assert resp.status_code == 200
assert resp.json.get("success") is True
def test_mark_single_notification_read(client, api_token, notification_guid):
resp = client.post(f"/messaging/in-app/read/{notification_guid}", headers=auth_headers(api_token))
assert resp.status_code == 200
assert resp.json.get("success") is True
def test_delete_single_notification(client, api_token, notification_guid):
resp = client.delete(f"/messaging/in-app/delete/{notification_guid}", headers=auth_headers(api_token))
assert resp.status_code == 200
assert resp.json.get("success") is True
def test_delete_all_notifications(client, api_token, random_content):
# Add a notification first
client.post("/messaging/in-app/write", json={"content": random_content}, headers=auth_headers(api_token))

View File

@@ -1,32 +1,31 @@
import sys
import pathlib
import sqlite3
import base64
import random
import string
import uuid
import os
import pytest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
@@ -40,7 +39,8 @@ def create_dummy(client, api_token, test_mac):
"devType": "Router",
"devVendor": "TestVendor",
}
resp = client.post(f"/device/{test_mac}", json=payload, headers=auth_headers(api_token))
client.post(f"/device/{test_mac}", json=payload, headers=auth_headers(api_token))
def test_wakeonlan_device(client, api_token, test_mac):
# 1. Ensure at least one device exists
@@ -73,6 +73,7 @@ def test_wakeonlan_device(client, api_token, test_mac):
assert data.get("success") is True
assert "WOL packet sent" in data.get("message", "")
def test_speedtest_endpoint(client, api_token):
# 1. Call the speedtest endpoint
resp = client.get("/nettools/speedtest", headers=auth_headers(api_token))
@@ -92,7 +93,8 @@ def test_speedtest_endpoint(client, api_token):
assert isinstance(data["output"], list)
# Optionally check that output lines are strings
assert all(isinstance(line, str) for line in data["output"])
def test_traceroute_device(client, api_token, test_mac):
# 1. Ensure at least one device exists
create_dummy(client, api_token, test_mac)
@@ -127,6 +129,7 @@ def test_traceroute_device(client, api_token, test_mac):
assert "output" in data
assert isinstance(data["output"], str)
@pytest.mark.parametrize("ip,expected_status", [
("8.8.8.8", 200),
("256.256.256.256", 400), # Invalid IP
@@ -147,6 +150,7 @@ def test_nslookup_endpoint(client, api_token, ip, expected_status):
assert data.get("success") is False
assert "error" in data
@pytest.mark.parametrize("ip,mode,expected_status", [
("127.0.0.1", "fast", 200),
pytest.param("127.0.0.1", "normal", 200, marks=pytest.mark.feature_complete),
@@ -172,6 +176,7 @@ def test_nmap_endpoint(client, api_token, ip, mode, expected_status):
assert data.get("success") is False
assert "error" in data
def test_nslookup_unauthorized(client):
# No auth headers
resp = client.post("/nettools/nslookup", json={"devLastIP": "8.8.8.8"})
@@ -180,6 +185,7 @@ def test_nslookup_unauthorized(client):
assert data.get("success") is False
assert data.get("error") == "Forbidden"
def test_nmap_unauthorized(client):
# No auth headers
resp = client.post("/nettools/nmap", json={"scan": "127.0.0.1", "mode": "fast"})
@@ -201,4 +207,4 @@ def test_internet_info_endpoint(client, api_token):
# Handle errors, e.g., curl failure
assert data.get("success") is False
assert "error" in data
assert "details" in data
assert "details" in data

View File

@@ -1,9 +1,5 @@
import sys
import pathlib
import sqlite3
import random
import string
import uuid
import os
import pytest
from datetime import datetime, timedelta
@@ -11,31 +7,35 @@ from datetime import datetime, timedelta
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from utils.datetime_utils import timeNowTZ, timeNowDB
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowTZ, timeNowDB # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
def test_create_device(client, api_token, test_mac):
payload = {
"createNew": True,
"devType": "Test Device",
"devOwner": "Unit Test",
"devType": "Router",
"devVendor": "TestVendor",
@@ -129,7 +129,7 @@ def test_device_session_events(client, api_token, test_mac):
# 2. Fetch session events with default type ('all') and period ('7 days')
resp = client.get(
f"/sessions/session-events?type=all&period=7 days",
"/sessions/session-events?type=all&period=7 days",
headers=auth_headers(api_token)
)
assert resp.status_code == 200
@@ -159,6 +159,7 @@ def test_device_session_events(client, api_token, test_mac):
sessions = resp_sessions.json["data"]
assert isinstance(sessions, list)
# -----------------------------
def test_delete_session(client, api_token, test_mac):
# First create session
@@ -180,15 +181,12 @@ def test_delete_session(client, api_token, test_mac):
assert not any(ses["ses_MAC"] == test_mac for ses in sessions)
def test_get_sessions_calendar(client, api_token, test_mac):
"""
Test the /sessions/calendar endpoint.
Creates session and ensures the calendar output is correct.
Cleans up test sessions after test.
"""
# --- Setup: create two sessions for the test MAC ---
now = timeNowTZ()
start1 = (now - timedelta(days=2)).isoformat(timespec="seconds")
@@ -256,4 +254,4 @@ def test_get_sessions_calendar(client, api_token, test_mac):
assert "<still connected>" in ses["tooltip"], f"End is None but session not marked as still connected: {ses}"
# --- Cleanup: delete all test sessions for this MAC ---
client.delete(f"/sessions/delete?mac={test_mac}", headers=auth_headers(api_token))
client.delete(f"/sessions/delete?mac={test_mac}", headers=auth_headers(api_token))

View File

@@ -1,36 +1,36 @@
import sys
import pathlib
import sqlite3
import random
import string
import uuid
import os
import pytest
from datetime import datetime, timedelta
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value
from api_server.api_server_start import app
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from api_server.api_server_start import app # noqa: E402 [flake8 lint suppression]
@pytest.fixture(scope="session")
def api_token():
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
with app.test_client() as client:
yield client
@pytest.fixture
def test_mac():
# Generate a unique MAC for each test run
return "AA:BB:CC:" + ":".join(f"{random.randint(0,255):02X}" for _ in range(3))
return "AA:BB:CC:" + ":".join(f"{random.randint(0, 255):02X}" for _ in range(3))
def auth_headers(token):
return {"Authorization": f"Bearer {token}"}
def test_get_setting_unauthorized(client):
resp = client.get("/settings/API_TOKEN") # no auth header
assert resp.status_code == 403

View File

@@ -6,16 +6,17 @@ Tests the fix for Issue #1210 - compound conditions with multiple AND/OR clauses
import sys
import pytest
import os
from unittest.mock import MagicMock
# Mock the logger module before importing SafeConditionBuilder
sys.modules['logger'] = MagicMock()
# Add parent directory to path for imports
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
from server.db.sql_safe_builder import SafeConditionBuilder
from server.db.sql_safe_builder import SafeConditionBuilder # noqa: E402 [flake8 lint suppression]
@pytest.fixture
@@ -100,6 +101,7 @@ def test_multiple_or_clauses(builder):
assert 'Device2' in param_values
assert 'Device3' in param_values
def test_mixed_and_or_clauses(builder):
"""Test mixed AND/OR logical operators."""
condition = "AND devName = 'Device1' OR devName = 'Device2' AND devFavorite = '1'"

View File

@@ -137,7 +137,7 @@ def test_unicode_support(builder, unicode_str):
@pytest.mark.parametrize("case", [
"", " ", "AND devName = ''", "AND devName = 'a'", "AND devName = '" + "x"*500 + "'"
"", " ", "AND devName = ''", "AND devName = 'a'", "AND devName = '" + "x" * 500 + "'"
])
def test_edge_cases(builder, case):
try:

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
# !/usr/bin/env python3
"""
Comprehensive SQL Injection Prevention Tests for NetAlertX
@@ -15,7 +15,7 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'server'))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'server', 'db'))
# Now import our module
from sql_safe_builder import SafeConditionBuilder
from sql_safe_builder import SafeConditionBuilder # noqa: E402 [flake8 lint suppression]
@pytest.fixture
@@ -28,7 +28,7 @@ def test_sql_injection_attempt_single_quote(builder):
"""Test that single quote injection attempts are blocked."""
malicious_input = "'; DROP TABLE users; --"
condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should return empty condition when invalid
assert condition == ""
assert params == {}
@@ -38,7 +38,7 @@ def test_sql_injection_attempt_union(builder):
"""Test that UNION injection attempts are blocked."""
malicious_input = "1' UNION SELECT * FROM passwords --"
condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should return empty condition when invalid
assert condition == ""
assert params == {}
@@ -48,7 +48,7 @@ def test_sql_injection_attempt_or_true(builder):
"""Test that OR 1=1 injection attempts are blocked."""
malicious_input = "' OR '1'='1"
condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should return empty condition when invalid
assert condition == ""
assert params == {}
@@ -58,7 +58,7 @@ def test_valid_simple_condition(builder):
"""Test that valid simple conditions are handled correctly."""
valid_input = "AND devName = 'Test Device'"
condition, params = builder.get_safe_condition_legacy(valid_input)
# Should create parameterized query
assert "AND devName = :" in condition
assert len(params) == 1
@@ -69,7 +69,7 @@ def test_empty_condition(builder):
"""Test that empty conditions are handled safely."""
empty_input = ""
condition, params = builder.get_safe_condition_legacy(empty_input)
# Should return empty condition
assert condition == ""
assert params == {}
@@ -79,7 +79,7 @@ def test_whitespace_only_condition(builder):
"""Test that whitespace-only conditions are handled safely."""
whitespace_input = " \n\t "
condition, params = builder.get_safe_condition_legacy(whitespace_input)
# Should return empty condition
assert condition == ""
assert params == {}
@@ -90,7 +90,7 @@ def test_multiple_conditions_valid(builder):
# Test with a single condition first (our current parser handles single conditions well)
valid_input = "AND devName = 'Device1'"
condition, params = builder.get_safe_condition_legacy(valid_input)
# Should create parameterized query
assert "devName = :" in condition
assert len(params) == 1
@@ -101,7 +101,7 @@ def test_disallowed_column_name(builder):
"""Test that non-whitelisted column names are rejected."""
invalid_input = "AND malicious_column = 'value'"
condition, params = builder.get_safe_condition_legacy(invalid_input)
# Should return empty condition when column not in whitelist
assert condition == ""
assert params == {}
@@ -111,7 +111,7 @@ def test_disallowed_operator(builder):
"""Test that non-whitelisted operators are rejected."""
invalid_input = "AND devName SOUNDS LIKE 'test'"
condition, params = builder.get_safe_condition_legacy(invalid_input)
# Should return empty condition when operator not allowed
assert condition == ""
assert params == {}
@@ -121,7 +121,7 @@ def test_nested_select_attempt(builder):
"""Test that nested SELECT attempts are blocked."""
malicious_input = "AND devName IN (SELECT password FROM users)"
condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should return empty condition when nested SELECT detected
assert condition == ""
assert params == {}
@@ -131,7 +131,7 @@ def test_hex_encoding_attempt(builder):
"""Test that hex-encoded injection attempts are blocked."""
malicious_input = "AND 0x44524f50205441424c45"
condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should return empty condition when hex encoding detected
assert condition == ""
assert params == {}
@@ -141,7 +141,7 @@ def test_comment_injection_attempt(builder):
"""Test that comment injection attempts are handled."""
malicious_input = "AND devName = 'test' /* comment */ --"
condition, params = builder.get_safe_condition_legacy(malicious_input)
# Comments should be stripped and condition validated
if condition:
assert "/*" not in condition
@@ -152,7 +152,7 @@ def test_special_placeholder_replacement(builder):
"""Test that {s-quote} placeholder is safely replaced."""
input_with_placeholder = "AND devName = {s-quote}Test{s-quote}"
condition, params = builder.get_safe_condition_legacy(input_with_placeholder)
# Should handle placeholder safely
if condition:
assert "{s-quote}" not in condition
@@ -163,7 +163,7 @@ def test_null_byte_injection(builder):
"""Test that null byte injection attempts are blocked."""
malicious_input = "AND devName = 'test\x00' DROP TABLE --"
condition, params = builder.get_safe_condition_legacy(malicious_input)
# Null bytes should be sanitized
if condition:
assert "\x00" not in condition
@@ -178,7 +178,7 @@ def test_build_condition_with_allowed_values(builder):
{"column": "devName", "operator": "LIKE", "value": "%test%"}
]
condition, params = builder.build_condition(conditions, "AND")
# Should create valid parameterized condition
assert "eve_EventType = :" in condition
assert "devName LIKE :" in condition
@@ -191,7 +191,7 @@ def test_build_condition_with_invalid_column(builder):
{"column": "invalid_column", "operator": "=", "value": "test"}
]
condition, params = builder.build_condition(conditions)
# Should return empty when invalid column
assert condition == ""
assert params == {}
@@ -204,7 +204,7 @@ def test_case_variations_injection(builder):
"oR 1=1",
"UnIoN SeLeCt * FrOm users"
]
for malicious_input in malicious_inputs:
condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should handle case variations safely
@@ -217,7 +217,7 @@ def test_time_based_injection_attempt(builder):
"""Test that time-based injection attempts are blocked."""
malicious_input = "AND IF(1=1, SLEEP(5), 0)"
condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should return empty condition when SQL functions detected
assert condition == ""
assert params == {}
@@ -227,7 +227,7 @@ def test_stacked_queries_attempt(builder):
"""Test that stacked query attempts are blocked."""
malicious_input = "'; INSERT INTO admin VALUES ('hacker', 'password'); --"
condition, params = builder.get_safe_condition_legacy(malicious_input)
# Should return empty condition when semicolon detected
assert condition == ""
assert params == {}

View File

@@ -13,16 +13,15 @@ import unittest
import sqlite3
import tempfile
import os
from unittest.mock import Mock, patch, MagicMock
from unittest.mock import Mock, patch
# Add the server directory to the path for imports
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/server"])
sys.path.append('/home/dell/coding/bash/10x-agentic-setup/netalertx-sql-fix/server')
from db.sql_safe_builder import SafeConditionBuilder, create_safe_condition_builder
from database import DB
from messaging.reporting import get_notifications
from db.sql_safe_builder import SafeConditionBuilder # noqa: E402 [flake8 lint suppression]
from messaging.reporting import get_notifications # noqa: E402 [flake8 lint suppression]
class TestSafeConditionBuilder(unittest.TestCase):
@@ -83,7 +82,7 @@ class TestSafeConditionBuilder(unittest.TestCase):
def test_build_simple_condition_valid(self):
"""Test building valid simple conditions."""
sql, params = self.builder._build_simple_condition('AND', 'devName', '=', 'TestDevice')
self.assertIn('AND devName = :param_', sql)
self.assertEqual(len(params), 1)
self.assertIn('TestDevice', params.values())
@@ -92,20 +91,20 @@ class TestSafeConditionBuilder(unittest.TestCase):
"""Test that invalid column names are rejected."""
with self.assertRaises(ValueError) as context:
self.builder._build_simple_condition('AND', 'invalid_column', '=', 'value')
self.assertIn('Invalid column name', str(context.exception))
def test_build_simple_condition_invalid_operator(self):
"""Test that invalid operators are rejected."""
with self.assertRaises(ValueError) as context:
self.builder._build_simple_condition('AND', 'devName', 'UNION', 'value')
self.assertIn('Invalid operator', str(context.exception))
def test_build_in_condition_valid(self):
"""Test building valid IN conditions."""
sql, params = self.builder._build_in_condition('AND', 'eve_EventType', 'IN', "'Connected', 'Disconnected'")
self.assertIn('AND eve_EventType IN', sql)
self.assertEqual(len(params), 2)
self.assertIn('Connected', params.values())
@@ -114,7 +113,7 @@ class TestSafeConditionBuilder(unittest.TestCase):
def test_build_null_condition(self):
"""Test building NULL check conditions."""
sql, params = self.builder._build_null_condition('AND', 'devComments', 'IS NULL')
self.assertEqual(sql, 'AND devComments IS NULL')
self.assertEqual(len(params), 0)
@@ -154,7 +153,7 @@ class TestSafeConditionBuilder(unittest.TestCase):
def test_device_name_filter(self):
"""Test the device name filter helper method."""
sql, params = self.builder.build_device_name_filter("TestDevice")
self.assertIn('AND devName = :device_name_', sql)
self.assertIn('TestDevice', params.values())
@@ -162,14 +161,13 @@ class TestSafeConditionBuilder(unittest.TestCase):
"""Test the event type filter helper method."""
event_types = ['Connected', 'Disconnected']
sql, params = self.builder.build_event_type_filter(event_types)
self.assertIn('AND eve_EventType IN', sql)
self.assertEqual(len(params), 2)
self.assertIn('Connected', params.values())
self.assertIn('Disconnected', params.values())
class TestDatabaseParameterSupport(unittest.TestCase):
"""Test that database layer supports parameterized queries."""
@@ -177,7 +175,7 @@ class TestDatabaseParameterSupport(unittest.TestCase):
"""Set up test database."""
self.temp_db = tempfile.NamedTemporaryFile(delete=False, suffix='.db')
self.temp_db.close()
# Create test database
self.conn = sqlite3.connect(self.temp_db.name)
self.conn.execute('''CREATE TABLE test_table (
@@ -197,23 +195,23 @@ class TestDatabaseParameterSupport(unittest.TestCase):
def test_parameterized_query_execution(self):
"""Test that parameterized queries work correctly."""
cursor = self.conn.cursor()
# Test named parameters
cursor.execute("SELECT * FROM test_table WHERE name = :name", {'name': 'test1'})
results = cursor.fetchall()
self.assertEqual(len(results), 1)
self.assertEqual(results[0][1], 'test1')
def test_parameterized_query_prevents_injection(self):
"""Test that parameterized queries prevent SQL injection."""
cursor = self.conn.cursor()
# This should not cause SQL injection
malicious_input = "'; DROP TABLE test_table; --"
cursor.execute("SELECT * FROM test_table WHERE name = :name", {'name': malicious_input})
results = cursor.fetchall()
# results = cursor.fetchall()
# The table should still exist and be queryable
cursor.execute("SELECT COUNT(*) FROM test_table")
count = cursor.fetchone()[0]
@@ -228,7 +226,7 @@ class TestReportingSecurityIntegration(unittest.TestCase):
self.mock_db = Mock()
self.mock_db.sql = Mock()
self.mock_db.get_table_as_json = Mock()
# Mock successful JSON response
mock_json_obj = Mock()
mock_json_obj.columnNames = ['MAC', 'Datetime', 'IP', 'Event Type', 'Device name', 'Comments']
@@ -245,7 +243,7 @@ class TestReportingSecurityIntegration(unittest.TestCase):
}.get(key, '')
# Call the function
result = get_notifications(self.mock_db)
get_notifications(self.mock_db)
# Verify that get_table_as_json was called with parameters
self.mock_db.get_table_as_json.assert_called()
@@ -265,7 +263,6 @@ class TestReportingSecurityIntegration(unittest.TestCase):
# Ensure the parameter dict has the correct value (using actual param name)
self.assertEqual(list(params.values())[0], "TestDevice")
@patch('messaging.reporting.get_setting_value')
def test_events_section_security(self, mock_get_setting):
"""Test that events section uses safe SQL building."""
@@ -276,7 +273,7 @@ class TestReportingSecurityIntegration(unittest.TestCase):
}.get(key, '')
# Call the function
result = get_notifications(self.mock_db)
get_notifications(self.mock_db)
# Verify that get_table_as_json was called with parameters
self.mock_db.get_table_as_json.assert_called()
@@ -291,7 +288,7 @@ class TestReportingSecurityIntegration(unittest.TestCase):
}.get(key, '')
# Call the function - should not raise an exception
result = get_notifications(self.mock_db)
get_notifications(self.mock_db)
# Should still call get_table_as_json (with safe fallback query)
self.mock_db.get_table_as_json.assert_called()
@@ -306,7 +303,7 @@ class TestReportingSecurityIntegration(unittest.TestCase):
}.get(key, '')
# Call the function
result = get_notifications(self.mock_db)
get_notifications(self.mock_db)
# Should call get_table_as_json
self.mock_db.get_table_as_json.assert_called()
@@ -322,12 +319,12 @@ class TestSecurityBenchmarks(unittest.TestCase):
def test_performance_simple_condition(self):
"""Test performance of simple condition building."""
import time
start_time = time.time()
for _ in range(1000):
sql, params = self.builder.build_safe_condition("AND devName = 'TestDevice'")
end_time = time.time()
execution_time = end_time - start_time
self.assertLess(execution_time, 1.0, "Simple condition building should be fast")
@@ -339,7 +336,7 @@ class TestSecurityBenchmarks(unittest.TestCase):
self.skipTest("psutil not available")
return
import os
process = psutil.Process(os.getpid())
initial_memory = process.memory_info().rss
@@ -350,7 +347,7 @@ class TestSecurityBenchmarks(unittest.TestCase):
final_memory = process.memory_info().rss
memory_increase = final_memory - initial_memory
# Memory increase should be reasonable (less than 10MB)
self.assertLess(memory_increase, 10 * 1024 * 1024, "Memory usage should be reasonable")
@@ -376,4 +373,4 @@ class TestSecurityBenchmarks(unittest.TestCase):
if __name__ == '__main__':
# Run the test suite
unittest.main(verbosity=2)
unittest.main(verbosity=2)

Some files were not shown because too many files have changed in this diff Show More