BE: linting fixes

Signed-off-by: jokob-sk <jokob.sk@gmail.com>
This commit is contained in:
jokob-sk
2025-11-22 13:14:06 +11:00
parent f0abd500d9
commit 5c14b34a8b
104 changed files with 2163 additions and 2199 deletions

View File

@@ -1,6 +1,6 @@
import json
import os
import sys
def merge_translations(main_file, other_files):
# Load main file
@@ -30,10 +30,14 @@ def merge_translations(main_file, other_files):
json.dump(data, f, indent=4, ensure_ascii=False)
f.truncate()
if __name__ == "__main__":
current_path = os.path.dirname(os.path.abspath(__file__))
# language codes can be found here: http://www.lingoes.net/en/translator/langcode.htm
# "en_us.json" has to be first!
json_files = [ "en_us.json", "ar_ar.json", "ca_ca.json", "cs_cz.json", "de_de.json", "es_es.json", "fa_fa.json", "fr_fr.json", "it_it.json", "ja_jp.json", "nb_no.json", "pl_pl.json", "pt_br.json", "pt_pt.json", "ru_ru.json", "sv_sv.json", "tr_tr.json", "uk_ua.json", "zh_cn.json"]
# "en_us.json" has to be first!
json_files = ["en_us.json", "ar_ar.json", "ca_ca.json", "cs_cz.json", "de_de.json",
"es_es.json", "fa_fa.json", "fr_fr.json", "it_it.json", "ja_jp.json",
"nb_no.json", "pl_pl.json", "pt_br.json", "pt_pt.json", "ru_ru.json",
"sv_sv.json", "tr_tr.json", "uk_ua.json", "zh_cn.json"]
file_paths = [os.path.join(current_path, file) for file in json_files]
merge_translations(file_paths[0], file_paths[1:])

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -8,12 +8,12 @@ from pytz import timezone
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from const import logPath
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath # noqa: E402, E261 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402, E261 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402, E261 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402, E261 [flake8 lint suppression]
import conf
import conf # noqa: E402, E261 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -32,9 +32,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
# Retrieve configuration settings
some_setting = get_setting_value('SYNC_plugins')
@@ -47,14 +46,14 @@ def main():
# Process the data into native application tables
if len(device_data) > 0:
# insert devices into the lats_result.log
# make sure the below mapping is mapped in config.json, for example:
# insert devices into the lats_result.log
# make sure the below mapping is mapped in config.json, for example:
# "database_column_definitions": [
# {
# "column": "Object_PrimaryID", <--------- the value I save into primaryId
# "mapped_to_column": "cur_MAC", <--------- gets inserted into the CurrentScan DB
# table column cur_MAC
#
#
for device in device_data:
plugin_objects.add_object(
primaryId = device['mac_address'],
@@ -65,11 +64,11 @@ def main():
watched4 = device['last_seen'],
extra = '',
foreignKey = device['mac_address']
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
# helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md
)
)
mylog('verbose', [f'[{pluginName}] New entries: "{len(device_data)}"'])
@@ -78,14 +77,15 @@ def main():
return 0
# retrieve data
def get_device_data(some_setting):
device_data = []
# do some processing, call exteranl APIs, and return a device_data list
# ...
#
#
# Sample data for testing purposes, you can adjust the processing in main() as needed
# ... before adding it to the plugin_objects.add_object(...)
device_data = [
@@ -113,8 +113,9 @@ def get_device_data(some_setting):
}
]
# Return the data to be detected by the main application
# Return the data to be detected by the main application
return device_data
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# Just a testing library plugin for development purposes
import os
import sys
@@ -11,10 +11,10 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# NetAlertX modules
from const import logPath
from plugin_helper import Plugin_Objects
from logger import mylog
from helper import get_setting_value
from const import logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
pluginName = 'TESTONLY'
@@ -28,14 +28,11 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
md5_hash = hashlib.md5()
# globals
def main():
# START
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
# SPACE FOR TESTING 🔽
str = "ABC-MBP._another.localdomain."
@@ -43,28 +40,23 @@ def main():
# result = cleanDeviceName(str, True)
regexes = get_setting_value('NEWDEV_NAME_CLEANUP_REGEX')
print(regexes)
subnets = get_setting_value('SCAN_SUBNETS')
print(subnets)
for rgx in regexes:
for rgx in regexes:
mylog('trace', ["[cleanDeviceName] applying regex : " + rgx])
mylog('trace', ["[cleanDeviceName] name before regex : " + str])
str = re.sub(rgx, "", str)
mylog('trace', ["[cleanDeviceName] name after regex : " + str])
mylog('debug', ["[cleanDeviceName] output: " + str])
# SPACE FOR TESTING 🔼
# END
mylog('verbose', [f'[{pluginName}] result "{str}"'])
mylog('verbose', [f'[{pluginName}] result "{str}"'])
# -------------INIT---------------------

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import json
import subprocess
@@ -9,15 +9,15 @@ import sys
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from utils.datetime_utils import timeNowDB
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))
@@ -35,7 +35,7 @@ def main():
mylog("verbose", [f"[{pluginName}](publisher) In script"])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog(
"none",
[
@@ -65,9 +65,9 @@ def main():
# Log result
plugin_objects.add_object(
primaryId = pluginName,
secondaryId = timeNowDB(),
secondaryId = timeNowDB(),
watched1 = notification["GUID"],
watched2 = result,
watched2 = result,
watched3 = 'null',
watched4 = 'null',
extra = 'null',
@@ -80,8 +80,7 @@ def main():
# -------------------------------------------------------------------------------
def check_config():
if get_setting_value("APPRISE_HOST") == "" or (
get_setting_value("APPRISE_URL") == ""
and get_setting_value("APPRISE_TAG") == ""
get_setting_value("APPRISE_URL") == "" and get_setting_value("APPRISE_TAG") == ""
):
return False
else:

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
import re
@@ -16,15 +16,15 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# NetAlertX modules
import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import get_setting_value, hide_email
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, hide_email # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -38,13 +38,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}](publisher) In script'])
mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return
@@ -61,7 +60,7 @@ def main():
# Retrieve new notifications
new_notifications = notifications.getNew()
# mylog('verbose', [f'[{pluginName}] new_notifications: ', new_notifications])
# mylog('verbose', [f'[{pluginName}] new_notifications: ', new_notifications])
mylog('verbose', [f'[{pluginName}] SMTP_SERVER: ', get_setting_value("SMTP_SERVER")])
mylog('verbose', [f'[{pluginName}] SMTP_PORT: ', get_setting_value("SMTP_PORT")])
mylog('verbose', [f'[{pluginName}] SMTP_SKIP_LOGIN: ', get_setting_value("SMTP_SKIP_LOGIN")])
@@ -72,19 +71,18 @@ def main():
# mylog('verbose', [f'[{pluginName}] SMTP_REPORT_TO: ', get_setting_value("SMTP_REPORT_TO")])
# mylog('verbose', [f'[{pluginName}] SMTP_REPORT_FROM: ', get_setting_value("SMTP_REPORT_FROM")])
# Process the new notifications (see the Notifications DB table for structure or check the /php/server/query_json.php?file=table_notifications.json endpoint)
for notification in new_notifications:
# Send notification
result = send(notification["HTML"], notification["Text"])
result = send(notification["HTML"], notification["Text"])
# Log result
plugin_objects.add_object(
primaryId = pluginName,
secondaryId = timeNowDB(),
secondaryId = timeNowDB(),
watched1 = notification["GUID"],
watched2 = result,
watched2 = result,
watched3 = 'null',
watched4 = 'null',
extra = 'null',
@@ -93,25 +91,33 @@ def main():
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
def check_config ():
# -------------------------------------------------------------------------------
def check_config():
server = get_setting_value('SMTP_SERVER')
report_to = get_setting_value("SMTP_REPORT_TO")
report_from = get_setting_value("SMTP_REPORT_FROM")
if server == '' or report_from == '' or report_to == '':
mylog('none', [f'[Email Check Config] ⚠ ERROR: Email service not set up correctly. Check your {confFileName} SMTP_*, SMTP_REPORT_FROM and SMTP_REPORT_TO variables.'])
return False
else:
return True
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def send(pHTML, pText):
mylog('debug', [f'[{pluginName}] SMTP_REPORT_TO: {hide_email(str(get_setting_value("SMTP_REPORT_TO")))} SMTP_USER: {hide_email(str(get_setting_value("SMTP_USER")))}'])
subject, from_email, to_email, message_html, message_text = sanitize_email_content(str(get_setting_value("SMTP_SUBJECT")), get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), pHTML, pText)
subject, from_email, to_email, message_html, message_text = sanitize_email_content(
str(get_setting_value("SMTP_SUBJECT")),
get_setting_value("SMTP_REPORT_FROM"),
get_setting_value("SMTP_REPORT_TO"),
pHTML,
pText
)
emails = []
@@ -132,10 +138,10 @@ def send(pHTML, pText):
msg['Subject'] = subject
msg['From'] = from_email
msg['To'] = mail_addr
msg['Date'] = formatdate(localtime=True)
msg['Date'] = formatdate(localtime=True)
msg.attach (MIMEText (message_text, 'plain'))
msg.attach (MIMEText (message_html, 'html'))
msg.attach(MIMEText(message_text, 'plain'))
msg.attach(MIMEText(message_html, 'html'))
# Set a timeout for the SMTP connection (in seconds)
smtp_timeout = 30
@@ -144,30 +150,31 @@ def send(pHTML, pText):
if get_setting_value("LOG_LEVEL") == 'debug':
send_email(msg,smtp_timeout)
send_email(msg, smtp_timeout)
else:
try:
send_email(msg,smtp_timeout)
except smtplib.SMTPAuthenticationError as e:
send_email(msg, smtp_timeout)
except smtplib.SMTPAuthenticationError as e:
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPAuthenticationError)'])
mylog('none', [' ERROR: Double-check your SMTP_USER and SMTP_PASS settings.)'])
mylog('none', [' ERROR: ', str(e)])
except smtplib.SMTPServerDisconnected as e:
except smtplib.SMTPServerDisconnected as e:
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPServerDisconnected)'])
mylog('none', [' ERROR: ', str(e)])
except socket.gaierror as e:
except socket.gaierror as e:
mylog('none', [' ERROR: Could not resolve hostname (socket.gaierror)'])
mylog('none', [' ERROR: ', str(e)])
except ssl.SSLError as e:
mylog('none', [' ERROR: ', str(e)])
except ssl.SSLError as e:
mylog('none', [' ERROR: Could not establish SSL connection (ssl.SSLError)'])
mylog('none', [' ERROR: Are you sure you need SMTP_FORCE_SSL enabled? Check your SMTP provider docs.'])
mylog('none', [' ERROR: ', str(e)])
mylog('none', [' ERROR: ', str(e)])
# ----------------------------------------------------------------------------------
def send_email(msg,smtp_timeout):
def send_email(msg, smtp_timeout):
# Send mail
if get_setting_value('SMTP_FORCE_SSL'):
mylog('debug', ['SMTP_FORCE_SSL == True so using .SMTP_SSL()'])
@@ -182,10 +189,10 @@ def send_email(msg,smtp_timeout):
mylog('debug', ['SMTP_FORCE_SSL == False so using .SMTP()'])
if get_setting_value("SMTP_PORT") == 0:
mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER)'])
smtp_connection = smtplib.SMTP (get_setting_value('SMTP_SERVER'))
smtp_connection = smtplib.SMTP(get_setting_value('SMTP_SERVER'))
else:
mylog('debug', ['SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER, SMTP_PORT)'])
smtp_connection = smtplib.SMTP (get_setting_value('SMTP_SERVER'), get_setting_value('SMTP_PORT'))
smtp_connection = smtplib.SMTP(get_setting_value('SMTP_SERVER'), get_setting_value('SMTP_PORT'))
mylog('debug', ['Setting SMTP debug level'])
@@ -193,7 +200,7 @@ def send_email(msg,smtp_timeout):
if get_setting_value('LOG_LEVEL') == 'debug':
smtp_connection.set_debuglevel(1)
mylog('debug', [ 'Sending .ehlo()'])
mylog('debug', ['Sending .ehlo()'])
smtp_connection.ehlo()
if not get_setting_value('SMTP_SKIP_TLS'):
@@ -203,12 +210,13 @@ def send_email(msg,smtp_timeout):
smtp_connection.ehlo()
if not get_setting_value('SMTP_SKIP_LOGIN'):
mylog('debug', ['SMTP_SKIP_LOGIN == False so sending .login()'])
smtp_connection.login (get_setting_value('SMTP_USER'), get_setting_value('SMTP_PASS'))
smtp_connection.login(get_setting_value('SMTP_USER'), get_setting_value('SMTP_PASS'))
mylog('debug', ['Sending .sendmail()'])
smtp_connection.sendmail (get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), msg.as_string())
smtp_connection.sendmail(get_setting_value("SMTP_REPORT_FROM"), get_setting_value("SMTP_REPORT_TO"), msg.as_string())
smtp_connection.quit()
# ----------------------------------------------------------------------------------
def sanitize_email_content(subject, from_email, to_email, message_html, message_text):
# Validate and sanitize subject
@@ -229,6 +237,7 @@ def sanitize_email_content(subject, from_email, to_email, message_html, message_
return subject, from_email, to_email, message_html, message_text
# ----------------------------------------------------------------------------------
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import json
import os
@@ -18,15 +18,14 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# NetAlertX modules
import conf
from const import confFileName, logPath
from utils.plugin_utils import getPluginObject
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from utils.plugin_utils import getPluginObject # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, bytes_to_string, \
sanitize_string, normalize_string
from utils.datetime_utils import timeNowDB
from database import DB, get_device_stats
sanitize_string, normalize_string # noqa: E402 [flake8 lint suppression]
from database import DB, get_device_stats # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
@@ -287,11 +286,11 @@ def publish_mqtt(mqtt_client, topic, message):
# mylog('verbose', [f"[{pluginName}] mqtt_client.is_connected(): {mqtt_client.is_connected()} "])
result = mqtt_client.publish(
topic=topic,
payload=message,
qos=qos,
retain=True,
)
topic=topic,
payload=message,
qos=qos,
retain=True,
)
status = result[0]
@@ -303,6 +302,7 @@ def publish_mqtt(mqtt_client, topic, message):
time.sleep(0.1)
return True
# ------------------------------------------------------------------------------
# Create a generic device for overal stats
def create_generic_device(mqtt_client, deviceId, deviceName):
@@ -434,7 +434,6 @@ def mqtt_start(db):
if not mqtt_connected_to_broker:
mqtt_client = mqtt_create_client()
deviceName = get_setting_value('MQTT_DEVICE_NAME')
deviceId = get_setting_value('MQTT_DEVICE_ID')
@@ -449,16 +448,18 @@ def mqtt_start(db):
row = get_device_stats(db)
# Publish (wrap into {} and remove last ',' from above)
publish_mqtt(mqtt_client, f"{topic_root}/sensor/{deviceId}/state",
{
"online": row[0],
"down": row[1],
"all": row[2],
"archived": row[3],
"new": row[4],
"unknown": row[5]
}
)
publish_mqtt(
mqtt_client,
f"{topic_root}/sensor/{deviceId}/state",
{
"online": row[0],
"down": row[1],
"all": row[2],
"archived": row[3],
"new": row[4],
"unknown": row[5]
}
)
# Generate device-specific MQTT messages if enabled
if get_setting_value('MQTT_SEND_DEVICES'):
@@ -466,11 +467,11 @@ def mqtt_start(db):
# Specific devices processing
# Get all devices
devices = db.read(get_setting_value('MQTT_DEVICES_SQL').replace('{s-quote}',"'"))
devices = db.read(get_setting_value('MQTT_DEVICES_SQL').replace('{s-quote}', "'"))
sec_delay = len(devices) * int(get_setting_value('MQTT_DELAY_SEC'))*5
sec_delay = len(devices) * int(get_setting_value('MQTT_DELAY_SEC')) * 5
mylog('verbose', [f"[{pluginName}] Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60, 1), 'min)'])
mylog('verbose', [f"[{pluginName}] Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay / 60, 1), 'min)'])
for device in devices:
@@ -495,27 +496,29 @@ def mqtt_start(db):
# handle device_tracker
# IMPORTANT: shared payload - device_tracker attributes and individual sensors
devJson = {
"last_ip": device["devLastIP"],
"is_new": str(device["devIsNew"]),
"alert_down": str(device["devAlertDown"]),
"vendor": sanitize_string(device["devVendor"]),
"mac_address": str(device["devMac"]),
"model": devDisplayName,
"last_connection": prepTimeStamp(str(device["devLastConnection"])),
"first_connection": prepTimeStamp(str(device["devFirstConnection"])),
"sync_node": device["devSyncHubNode"],
"group": device["devGroup"],
"location": device["devLocation"],
"network_parent_mac": device["devParentMAC"],
"network_parent_name": next((dev["devName"] for dev in devices if dev["devMAC"] == device["devParentMAC"]), "")
}
"last_ip": device["devLastIP"],
"is_new": str(device["devIsNew"]),
"alert_down": str(device["devAlertDown"]),
"vendor": sanitize_string(device["devVendor"]),
"mac_address": str(device["devMac"]),
"model": devDisplayName,
"last_connection": prepTimeStamp(str(device["devLastConnection"])),
"first_connection": prepTimeStamp(str(device["devFirstConnection"])),
"sync_node": device["devSyncHubNode"],
"group": device["devGroup"],
"location": device["devLocation"],
"network_parent_mac": device["devParentMAC"],
"network_parent_name": next((dev["devName"] for dev in devices if dev["devMAC"] == device["devParentMAC"]), "")
}
# bulk update device sensors in home assistant
publish_mqtt(mqtt_client, sensorConfig.state_topic, devJson) # REQUIRED, DON'T DELETE
# create and update is_present sensor
sensorConfig = create_sensor(mqtt_client, deviceId, devDisplayName, 'binary_sensor', 'is_present', 'wifi', device["devMac"])
publish_mqtt(mqtt_client, sensorConfig.state_topic,
publish_mqtt(
mqtt_client,
sensorConfig.state_topic,
{
"is_present": to_binary_sensor(str(device["devPresentLastScan"]))
}
@@ -547,7 +550,7 @@ def to_binary_sensor(input):
elif isinstance(input, bool) and input:
return "ON"
elif isinstance(input, str) and input == "1":
return "ON"
return "ON"
elif isinstance(input, bytes) and bytes_to_string(input) == "1":
return "ON"
return "OFF"

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python
# !/usr/bin/env python
import json
import os
@@ -11,15 +11,15 @@ from base64 import b64encode
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects, handleEmpty
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import get_setting_value
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -33,13 +33,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}](publisher) In script'])
mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return
@@ -65,9 +64,9 @@ def main():
# Log result
plugin_objects.add_object(
primaryId = pluginName,
secondaryId = timeNowDB(),
secondaryId = timeNowDB(),
watched1 = notification["GUID"],
watched2 = handleEmpty(response_text),
watched2 = handleEmpty(response_text),
watched3 = response_status_code,
watched4 = 'null',
extra = 'null',
@@ -77,15 +76,15 @@ def main():
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def check_config():
if get_setting_value('NTFY_HOST') == '' or get_setting_value('NTFY_TOPIC') == '':
if get_setting_value('NTFY_HOST') == '' or get_setting_value('NTFY_TOPIC') == '':
return False
else:
return True
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def send(html, text):
response_text = ''
@@ -100,7 +99,7 @@ def send(html, text):
# prepare request headers
headers = {
"Title": "NetAlertX Notification",
"Actions": "view, Open Dashboard, "+ get_setting_value('REPORT_DASHBOARD_URL'),
"Actions": "view, Open Dashboard, " + get_setting_value('REPORT_DASHBOARD_URL'),
"Priority": get_setting_value('NTFY_PRIORITY'),
"Tags": "warning"
}
@@ -109,37 +108,38 @@ def send(html, text):
if token != '':
headers["Authorization"] = "Bearer {}".format(token)
elif user != "" and pwd != "":
# Generate hash for basic auth
# Generate hash for basic auth
basichash = b64encode(bytes(user + ':' + pwd, "utf-8")).decode("ascii")
# add authorization header with hash
# add authorization header with hash
headers["Authorization"] = "Basic {}".format(basichash)
# call NTFY service
try:
response = requests.post("{}/{}".format( get_setting_value('NTFY_HOST'),
get_setting_value('NTFY_TOPIC')),
data = text,
headers = headers,
verify = verify_ssl)
response = requests.post("{}/{}".format(
get_setting_value('NTFY_HOST'),
get_setting_value('NTFY_TOPIC')),
data = text,
headers = headers,
verify = verify_ssl
)
response_status_code = response.status_code
# Check if the request was successful (status code 200)
if response_status_code == 200:
response_text = response.text # This captures the response body/message
response_text = response.text # This captures the response body/message
else:
response_text = json.dumps(response.text)
response_text = json.dumps(response.text)
except requests.exceptions.RequestException as e:
except requests.exceptions.RequestException as e:
mylog('none', [f'[{pluginName}] ⚠ ERROR: ', e])
response_text = e
return response_text, response_status_code
return response_text, response_status_code
return response_text, response_status_code
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
# !/usr/bin/env python3
import conf
from const import confFileName, logPath
from pytz import timezone
@@ -12,12 +12,12 @@ import requests
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402
from logger import mylog, Logger # noqa: E402
from helper import get_setting_value, hide_string # noqa: E402
from utils.datetime_utils import timeNowDB
from models.notification_instance import NotificationInstance # noqa: E402
from database import DB # noqa: E402
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, hide_string # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python
# !/usr/bin/env python
import json
import os
@@ -10,15 +10,15 @@ import requests
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects, handleEmpty
from logger import mylog, Logger
from helper import get_setting_value, hide_string
from utils.datetime_utils import timeNowDB
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, hide_string # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -32,13 +32,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}](publisher) In script'])
mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return
@@ -59,14 +58,14 @@ def main():
for notification in new_notifications:
# Send notification
response_text, response_status_code = send(notification["Text"])
response_text, response_status_code = send(notification["Text"])
# Log result
plugin_objects.add_object(
primaryId = pluginName,
secondaryId = timeNowDB(),
secondaryId = timeNowDB(),
watched1 = notification["GUID"],
watched2 = handleEmpty(response_text),
watched2 = handleEmpty(response_text),
watched3 = response_status_code,
watched4 = 'null',
extra = 'null',
@@ -76,8 +75,7 @@ def main():
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def send(text):
response_text = ''
@@ -85,8 +83,7 @@ def send(text):
token = get_setting_value('PUSHSAFER_TOKEN')
mylog('verbose', [f'[{pluginName}] PUSHSAFER_TOKEN: "{hide_string(token)}"'])
mylog('verbose', [f'[{pluginName}] PUSHSAFER_TOKEN: "{hide_string(token)}"'])
try:
url = 'https://www.pushsafer.com/api'
@@ -101,40 +98,34 @@ def send(text):
"u" : get_setting_value('REPORT_DASHBOARD_URL'),
"ut" : 'Open NetAlertX',
"k" : token,
}
}
response = requests.post(url, data=post_fields)
response_status_code = response.status_code
# Check if the request was successful (status code 200)
if response_status_code == 200:
response_text = response.text # This captures the response body/message
response_text = response.text # This captures the response body/message
else:
response_text = json.dumps(response.text)
response_text = json.dumps(response.text)
except requests.exceptions.RequestException as e:
except requests.exceptions.RequestException as e:
mylog('none', [f'[{pluginName}] ⚠ ERROR: ', e])
response_text = e
return response_text, response_status_code
return response_text, response_status_code
return response_text, response_status_code
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def check_config():
if get_setting_value('PUSHSAFER_TOKEN') == 'ApiKey':
return False
else:
return True
if get_setting_value('PUSHSAFER_TOKEN') == 'ApiKey':
return False
else:
return True
# -------------------------------------------------------
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import subprocess
import os
@@ -8,15 +8,15 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import confFileName, logPath
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import get_setting_value
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import confFileName, logPath # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -30,13 +30,11 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog('none', [
f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python
# !/usr/bin/env python
import json
import subprocess
@@ -13,15 +13,15 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
import conf
from const import logPath, confFileName
from plugin_helper import Plugin_Objects, handleEmpty
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import get_setting_value, write_file
from models.notification_instance import NotificationInstance
from database import DB
from pytz import timezone
import conf # noqa: E402 [flake8 lint suppression]
from const import logPath, confFileName # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, write_file # noqa: E402 [flake8 lint suppression]
from models.notification_instance import NotificationInstance # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -35,13 +35,12 @@ LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}](publisher) In script'])
mylog('verbose', [f'[{pluginName}](publisher) In script'])
# Check if basic config settings supplied
if check_config() == False:
if check_config() is False:
mylog('none', [f'[{pluginName}] ⚠ ERROR: Publisher notification gateway not set up correctly. Check your {confFileName} {pluginName}_* variables.'])
return
@@ -62,15 +61,19 @@ def main():
for notification in new_notifications:
# Send notification
response_stdout, response_stderr = send(notification["Text"], notification["HTML"], notification["JSON"])
response_stdout, response_stderr = send(
notification["Text"],
notification["HTML"],
notification["JSON"]
)
# Log result
plugin_objects.add_object(
primaryId = pluginName,
secondaryId = timeNowDB(),
secondaryId = timeNowDB(),
watched1 = notification["GUID"],
watched2 = handleEmpty(response_stdout),
watched3 = handleEmpty(response_stderr),
watched2 = handleEmpty(response_stdout),
watched3 = handleEmpty(response_stderr),
watched4 = 'null',
extra = 'null',
foreignKey = notification["GUID"]
@@ -79,16 +82,16 @@ def main():
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def check_config():
if get_setting_value('WEBHOOK_URL') == '':
return False
else:
return True
#-------------------------------------------------------------------------------
if get_setting_value('WEBHOOK_URL') == '':
return False
else:
return True
def send (text_data, html_data, json_data):
# -------------------------------------------------------------------------------
def send(text_data, html_data, json_data):
response_stderr = ''
response_stdout = ''
@@ -102,9 +105,9 @@ def send (text_data, html_data, json_data):
# use data type based on specified payload type
if payloadType == 'json':
# In this code, the truncate_json function is used to recursively traverse the JSON object
# and remove nodes that exceed the size limit. It checks the size of each node's JSON representation
# using json.dumps and includes only the nodes that are within the limit.
# In this code, the truncate_json function is used to recursively traverse the JSON object
# and remove nodes that exceed the size limit. It checks the size of each node's JSON representation
# using json.dumps and includes only the nodes that are within the limit.
json_str = json.dumps(json_data)
if len(json_str) <= limit:
@@ -127,45 +130,48 @@ def send (text_data, html_data, json_data):
return obj
payloadData = truncate_json(json_data)
if payloadType == 'html':
if payloadType == 'html':
if len(html_data) > limit:
payloadData = html_data[:limit] + " <h1>(text was truncated)</h1>"
else:
payloadData = html_data
if payloadType == 'text':
if payloadType == 'text':
if len(text_data) > limit:
payloadData = text_data[:limit] + " (text was truncated)"
else:
payloadData = text_data
# Define slack-compatible payload
_json_payload = { "text": payloadData } if payloadType == 'text' else {
"username": "NetAlertX",
"text": "There are new notifications",
"attachments": [{
"title": "NetAlertX Notifications",
"title_link": get_setting_value('REPORT_DASHBOARD_URL'),
"text": payloadData
}]
}
if payloadType == 'text':
_json_payload = {"text": payloadData}
else:
_json_payload = {
"username": "NetAlertX",
"text": "There are new notifications",
"attachments": [{
"title": "NetAlertX Notifications",
"title_link": get_setting_value('REPORT_DASHBOARD_URL'),
"text": payloadData
}]
}
# DEBUG - Write the json payload into a log file for debugging
write_file (logPath + '/webhook_payload.json', json.dumps(_json_payload))
write_file(logPath + '/webhook_payload.json', json.dumps(_json_payload))
# Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both
# Consider: curl has the ability to load in data to POST from a file + piping
if(endpointUrl.startswith('https://discord.com/api/webhooks/') and not endpointUrl.endswith("/slack")):
if (endpointUrl.startswith('https://discord.com/api/webhooks/') and not endpointUrl.endswith("/slack")):
_WEBHOOK_URL = f"{endpointUrl}/slack"
curlParams = ["curl","-i","-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL]
curlParams = ["curl", "-i", "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
else:
_WEBHOOK_URL = endpointUrl
curlParams = ["curl","-i","-X", requestMethod , "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
curlParams = ["curl", "-i", "-X", requestMethod , "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
# Add HMAC signature if configured
if(secret != ''):
if (secret != ''):
h = hmac.new(secret.encode("UTF-8"), json.dumps(_json_payload, separators=(',', ':')).encode(), hashlib.sha256).hexdigest()
curlParams.insert(4,"-H")
curlParams.insert(5,f"X-Webhook-Signature: sha256={h}")
curlParams.insert(4, "-H")
curlParams.insert(5, f"X-Webhook-Signature: sha256={h}")
try:
# Execute CURL call
@@ -173,13 +179,11 @@ def send (text_data, html_data, json_data):
result = subprocess.run(curlParams, capture_output=True, text=True)
response_stderr = result.stderr
response_stdout = result.stdout
response_stdout = result.stdout
# Write stdout and stderr into .log files for debugging if needed
mylog('debug', [f'[{pluginName}] stdout: ', response_stdout])
mylog('debug', [f'[{pluginName}] stderr: ', response_stderr])
mylog('debug', [f'[{pluginName}] stderr: ', response_stderr])
except subprocess.CalledProcessError as e:
# An error occurred, handle it
@@ -187,10 +191,9 @@ def send (text_data, html_data, json_data):
response_stderr = e.output
return response_stdout, response_stderr
return response_stdout, response_stderr
# -------------------------------------------------------
# -------------------------------------------------------
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,7 +1,6 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import time
import pathlib
import argparse
import sys
import re
@@ -9,16 +8,16 @@ import base64
import subprocess
# Register NetAlertX directories
INSTALL_PATH="/app"
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import DB
from plugin_helper import Plugin_Objects, handleEmpty
from logger import mylog, Logger, append_line_to_file
from helper import get_setting_value
from const import logPath, applicationPath
import conf
from pytz import timezone
from database import DB # noqa: E402 [flake8 lint suppression]
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -6,17 +6,16 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
pluginName = "ASUSWRT"
import asyncio
import aiohttp
import conf
from asusrouter import AsusData, AsusRouter
from asusrouter.modules.connection import ConnectionState
from const import logPath
from helper import get_setting_value
from logger import Logger, mylog
from plugin_helper import (Plugin_Objects, handleEmpty)
from pytz import timezone
import asyncio # noqa: E402 [flake8 lint suppression]
import aiohttp # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from asusrouter import AsusData, AsusRouter # noqa: E402 [flake8 lint suppression]
from asusrouter.modules.connection import ConnectionState # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from logger import Logger, mylog # noqa: E402 [flake8 lint suppression]
from plugin_helper import (Plugin_Objects, handleEmpty) # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
conf.tz = timezone(get_setting_value("TIMEZONE"))

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python3
# !/usr/bin/env python3
import os
import sys
import socket
@@ -8,14 +8,14 @@ from zeroconf import Zeroconf
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
from database import DB
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Configure timezone and logging
conf.tz = timezone(get_setting_value("TIMEZONE"))
@@ -67,7 +67,7 @@ def resolve_mdns_name(ip: str, timeout: int = 5) -> str:
hostname = socket.getnameinfo((ip, 0), socket.NI_NAMEREQD)[0]
zeroconf.close()
if hostname and hostname != ip:
mylog("debug", [f"[{pluginName}] Found mDNS name: {hostname}"])
mylog("debug", [f"[{pluginName}] Found mDNS name (rev_name): {hostname} ({rev_name})"])
return hostname
except Exception as e:
mylog("debug", [f"[{pluginName}] Zeroconf lookup failed for {ip}: {e}"])
@@ -89,7 +89,7 @@ def main():
timeout = get_setting_value("AVAHISCAN_RUN_TIMEOUT")
use_mock = "--mockdata" in sys.argv
if use_mock:
mylog("verbose", [f"[{pluginName}] Running in MOCK mode"])
devices = [
@@ -137,4 +137,4 @@ def main():
# Entrypoint
# =============================================================================
if __name__ == "__main__":
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import argparse
@@ -11,11 +11,11 @@ from datetime import datetime
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath, fullDbPath
import conf
from pytz import timezone
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath, fullDbPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -29,6 +29,7 @@ LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
# the script expects a parameter in the format of devices=device1,device2,...
@@ -44,7 +45,7 @@ def main():
else:
overwrite = False
mylog('verbose', ['[CSVBCKP] In script'])
mylog('verbose', ['[CSVBCKP] In script'])
# Connect to the App database
conn = sqlite3.connect(fullDbPath)
@@ -64,7 +65,7 @@ def main():
fullPath = os.path.join(values.location.split('=')[1], filename)
mylog('verbose', ['[CSVBCKP] Writing file ', fullPath])
mylog('verbose', ['[CSVBCKP] Writing file ', fullPath])
# Create a CSV file in the specified location
with open(fullPath, 'w', newline='') as csvfile:
@@ -72,7 +73,7 @@ def main():
csv_writer = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL)
# Wrap the header values in double quotes and write the header row
csv_writer.writerow([ '"' + col + '"' for col in columns])
csv_writer.writerow(['"' + col + '"' for col in columns])
# Fetch and write data rows
for row in cursor.fetchall():
@@ -96,8 +97,8 @@ def main():
return 0
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -8,11 +8,11 @@ import sqlite3
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath, fullDbPath
import conf
from pytz import timezone
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath, fullDbPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))
@@ -81,7 +81,7 @@ def cleanup_database(
)
cursor.execute(
"""DELETE from Online_History where "Index" not in (
SELECT "Index" from Online_History
SELECT "Index" from Online_History
order by Scan_Date desc limit 150)"""
)
@@ -94,7 +94,7 @@ def cleanup_database(
],
)
cursor.execute(
f"""DELETE FROM Events
f"""DELETE FROM Events
WHERE eve_DateTime <= date('now', '-{str(DAYS_TO_KEEP_EVENTS)} day')"""
)
# -----------------------------------------------------
@@ -107,11 +107,11 @@ def cleanup_database(
)
# Build the SQL query to delete entries that exceed the limit per unique "Plugin" column entry
delete_query = f"""DELETE FROM Plugins_History
delete_query = f"""DELETE FROM Plugins_History
WHERE "Index" NOT IN (
SELECT "Index"
FROM (
SELECT "Index",
SELECT "Index",
ROW_NUMBER() OVER(PARTITION BY "Plugin" ORDER BY DateTimeChanged DESC) AS row_num
FROM Plugins_History
) AS ranked_objects
@@ -133,11 +133,11 @@ def cleanup_database(
)
# Build the SQL query to delete entries
delete_query = f"""DELETE FROM Notifications
delete_query = f"""DELETE FROM Notifications
WHERE "Index" NOT IN (
SELECT "Index"
FROM (
SELECT "Index",
SELECT "Index",
ROW_NUMBER() OVER(PARTITION BY "Notifications" ORDER BY DateTimeCreated DESC) AS row_num
FROM Notifications
) AS ranked_objects
@@ -153,11 +153,11 @@ def cleanup_database(
mylog("verbose", [f"[{pluginName}] Trim AppEvents to less than {histCount}"])
# Build the SQL query to delete entries
delete_query = f"""DELETE FROM AppEvents
delete_query = f"""DELETE FROM AppEvents
WHERE "Index" NOT IN (
SELECT "Index"
FROM (
SELECT "Index",
SELECT "Index",
ROW_NUMBER() OVER(PARTITION BY "AppEvents" ORDER BY DateTimeCreated DESC) AS row_num
FROM AppEvents
) AS ranked_objects

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import argparse
@@ -9,11 +9,11 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value, check_IP_format
from const import logPath
import conf
from pytz import timezone
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, check_IP_format # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,91 +28,88 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
parser = argparse.ArgumentParser(description='Check internet connectivity and IP')
parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP")
parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP")
parser.add_argument('DDNS_UPDATE_URL', action="store", help="URL for updating Dynamic DNS (DDNS)")
parser.add_argument('DDNS_USER', action="store", help="Username for Dynamic DNS (DDNS) authentication")
parser.add_argument('DDNS_PASSWORD', action="store", help="Password for Dynamic DNS (DDNS) authentication")
parser.add_argument('DDNS_DOMAIN', action="store", help="Dynamic DNS (DDNS) domain name")
values = parser.parse_args()
PREV_IP = values.prev_ip.split('=')[1]
PREV_IP = values.prev_ip.split('=')[1]
DDNS_UPDATE_URL = values.DDNS_UPDATE_URL.split('=')[1]
DDNS_USER = values.DDNS_USER.split('=')[1]
DDNS_PASSWORD = values.DDNS_PASSWORD.split('=')[1]
DDNS_DOMAIN = values.DDNS_DOMAIN.split('=')[1]
DDNS_DOMAIN = values.DDNS_DOMAIN.split('=')[1]
# perform the new IP lookup and DDNS tasks if enabled
ddns_update( DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP)
ddns_update(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP)
mylog('verbose', [f'[{pluginName}] Finished '])
mylog('verbose', [f'[{pluginName}] Finished '])
return 0
#===============================================================================
# ===============================================================================
# INTERNET IP CHANGE
#===============================================================================
def ddns_update ( DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP ):
# ===============================================================================
def ddns_update(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN, PREV_IP):
# Update DDNS record if enabled and IP is different
# Get Dynamic DNS IP
mylog('verbose', [f'[{pluginName}] Retrieving Dynamic DNS IP'])
dns_IP = get_dynamic_DNS_IP(DDNS_DOMAIN)
# Check Dynamic DNS IP
if dns_IP == "" or dns_IP == "0.0.0.0" :
mylog('none', [f'[{pluginName}] Error retrieving Dynamic DNS IP'])
mylog('none', [f'[{pluginName}] Error retrieving Dynamic DNS IP'])
mylog('none', [f'[{pluginName}] ', dns_IP])
# Check DNS Change
if dns_IP != PREV_IP :
mylog('none', [f'[{pluginName}] Updating Dynamic DNS IP'])
message = set_dynamic_DNS_IP (DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN)
mylog('none', [f'[{pluginName}] ', message])
message = set_dynamic_DNS_IP(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN)
mylog('none', [f'[{pluginName}] ', message])
# plugin_objects = Plugin_Objects(RESULT_FILE)
# plugin_objects = Plugin_Objects(RESULT_FILE)
# plugin_objects.add_object(
# primaryId = 'Internet', # MAC (Device Name)
# secondaryId = new_internet_IP, # IP Address
# secondaryId = new_internet_IP, # IP Address
# watched1 = f'Previous IP: {PREV_IP}',
# watched2 = '',
# watched3 = '',
# watched3 = '',
# watched4 = '',
# extra = f'Previous IP: {PREV_IP}',
# extra = f'Previous IP: {PREV_IP}',
# foreignKey = 'Internet')
# plugin_objects.write_result_file()
# plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
def get_dynamic_DNS_IP (DDNS_DOMAIN):
# -------------------------------------------------------------------------------
def get_dynamic_DNS_IP(DDNS_DOMAIN):
# Using supplied DNS server
dig_args = ['dig', '+short', DDNS_DOMAIN]
try:
# try runnning a subprocess
dig_output = subprocess.check_output (dig_args, universal_newlines=True)
dig_output = subprocess.check_output(dig_args, universal_newlines=True)
mylog('none', [f'[{pluginName}] DIG output :', dig_output])
except subprocess.CalledProcessError as e:
# An error occured, handle it
mylog('none', [f'[{pluginName}] ⚠ ERROR - ', e.output])
dig_output = '' # probably no internet
dig_output = '' # probably no internet
# Check result is an IP
IP = check_IP_format (dig_output)
IP = check_IP_format(dig_output)
# Handle invalid response
if IP == '':
@@ -120,28 +117,27 @@ def get_dynamic_DNS_IP (DDNS_DOMAIN):
return IP
#-------------------------------------------------------------------------------
def set_dynamic_DNS_IP (DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN):
# -------------------------------------------------------------------------------
def set_dynamic_DNS_IP(DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD, DDNS_DOMAIN):
try:
# try runnning a subprocess
# Update Dynamic IP
curl_output = subprocess.check_output (['curl',
'-s',
DDNS_UPDATE_URL +
'username=' + DDNS_USER +
'&password=' + DDNS_PASSWORD +
'&hostname=' + DDNS_DOMAIN],
universal_newlines=True)
curl_output = subprocess.check_output([
'curl',
'-s',
DDNS_UPDATE_URL + 'username=' + DDNS_USER + '&password=' + DDNS_PASSWORD + '&hostname=' + DDNS_DOMAIN],
universal_newlines=True)
except subprocess.CalledProcessError as e:
# An error occured, handle it
mylog('none', [f'[{pluginName}] ⚠ ERROR - ',e.output])
curl_output = ""
mylog('none', [f'[{pluginName}] ⚠ ERROR - ', e.output])
curl_output = ""
return curl_output
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,22 +1,22 @@
#!/usr/bin/env python
# !/usr/bin/env python
from __future__ import unicode_literals
import argparse
import os
import sys
import chardet
import chardet
# Register NetAlertX directories
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty, is_mac
from logger import mylog, Logger
from dhcp_leases import DhcpLeases
from helper import get_setting_value
import conf
from const import logPath
from pytz import timezone
from plugin_helper import Plugin_Objects, handleEmpty, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from dhcp_leases import DhcpLeases # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -24,34 +24,38 @@ conf.tz = timezone(get_setting_value('TIMEZONE'))
# Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL'))
pluginName= 'DHCPLSS'
pluginName = 'DHCPLSS'
LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
# -------------------------------------------------------------
def main():
def main():
mylog('verbose', [f'[{pluginName}] In script'])
last_run_logfile = open(RESULT_FILE, 'a')
last_run_logfile = open(RESULT_FILE, 'a')
last_run_logfile.write("")
parser = argparse.ArgumentParser(description='Import devices from dhcp.leases files')
parser.add_argument('paths', action="store", help="absolute dhcp.leases file paths to check separated by ','")
parser.add_argument(
'paths',
action="store",
help="absolute dhcp.leases file paths to check separated by ','"
)
values = parser.parse_args()
plugin_objects = Plugin_Objects(RESULT_FILE)
if values.paths:
for path in values.paths.split('=')[1].split(','):
for path in values.paths.split('=')[1].split(','):
plugin_objects = get_entries(path, plugin_objects)
mylog('verbose', [f'[{pluginName}] {len(plugin_objects)} Entries found in "{path}"'])
mylog('verbose', [f'[{pluginName}] {len(plugin_objects)} Entries found in "{path}"'])
plugin_objects.write_result_file()
# -------------------------------------------------------------
def get_entries(path, plugin_objects):
@@ -66,7 +70,7 @@ def get_entries(path, plugin_objects):
# Use the detected encoding
encoding = result['encoding']
# Order: MAC, IP, IsActive, NAME, Hardware
# Order: MAC, IP, IsActive, NAME, Hardware
# Handle pihole-specific dhcp.leases files
if 'pihole' in path:
with open(path, 'r', encoding=encoding, errors='replace') as f:
@@ -111,9 +115,9 @@ def get_entries(path, plugin_objects):
if is_mac(lease.ethernet):
plugin_objects.add_object(
primaryId = handleEmpty(lease.ethernet),
secondaryId = handleEmpty(lease.ip),
watched1 = handleEmpty(lease.active),
primaryId = handleEmpty(lease.ethernet),
secondaryId = handleEmpty(lease.ip),
watched1 = handleEmpty(lease.active),
watched2 = handleEmpty(lease.hostname),
watched3 = handleEmpty(lease.hardware),
watched4 = handleEmpty(lease.binding_state),
@@ -122,5 +126,6 @@ def get_entries(path, plugin_objects):
)
return plugin_objects
if __name__ == '__main__':
main()
if __name__ == '__main__':
main()

View File

@@ -1,9 +1,8 @@
#!/usr/bin/env python
# !/usr/bin/env python
# Based on the work of https://github.com/leiweibau/Pi.Alert
import subprocess
import os
from datetime import datetime
import sys
@@ -11,12 +10,12 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, Plugin_Object
from logger import mylog, Logger
from helper import get_setting_value
import conf
from pytz import timezone
from const import logPath
from plugin_helper import Plugin_Objects, Plugin_Object # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
@@ -31,13 +30,14 @@ LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', ['[DHCPSRVS] In script'])
last_run_logfile = open(RESULT_FILE, 'a')
last_run_logfile = open(RESULT_FILE, 'a')
last_run_logfile.write("")
plugin_objects = Plugin_Objects(RESULT_FILE)
timeoutSec = get_setting_value('DHCPSRVS_RUN_TIMEOUT')
@@ -46,10 +46,10 @@ def main():
try:
# Number of DHCP discovery probes to send
dhcp_probes = 1
# Initialize a list to store output lines from the scan
newLines = []
for _ in range(dhcp_probes):
output = subprocess.check_output(nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=timeoutSec)
newLines += output.split("\n")
@@ -57,9 +57,9 @@ def main():
newEntries = []
for line in newLines:
mylog('verbose', [f'[DHCPSRVS] Processing line: {line} '])
if 'Response ' in line and ' of ' in line:
newEntries.append(Plugin_Object())
elif 'Server Identifier' in line:
@@ -85,7 +85,7 @@ def main():
newEntries[-1].extra += ',' + newVal
for e in newEntries:
plugin_objects.add_object(
primaryId=e.primaryId,
secondaryId=e.secondaryId,
@@ -101,5 +101,6 @@ def main():
except Exception as e:
mylog('verbose', ['[DHCPSRVS] Error in main:', str(e)])
if __name__ == '__main__':
main()

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
import subprocess
@@ -8,14 +7,14 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
from database import DB
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -35,7 +34,7 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
timeout = get_setting_value('DIGSCAN_RUN_TIMEOUT')
@@ -50,13 +49,13 @@ def main():
device_handler = DeviceInstance(db)
# Retrieve devices
if get_setting_value("REFRESH_FQDN"):
if get_setting_value("REFRESH_FQDN"):
devices = device_handler.getAll()
else:
else:
devices = device_handler.getUnknown()
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
# TEST - below is a WINDOWS host IP
# execute_name_lookup('192.168.1.121', timeout)
@@ -65,27 +64,27 @@ def main():
if domain_name != '':
plugin_objects.add_object(
# "MAC", "IP", "Server", "Name"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = dns_server,
watched2 = domain_name,
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac'])
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = dns_server,
watched2 = domain_name,
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac']
)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
#===============================================================================
# ===============================================================================
# Execute scan
#===============================================================================
def execute_name_lookup (ip, timeout):
# ===============================================================================
def execute_name_lookup(ip, timeout):
"""
Execute the DIG command on IP.
"""
@@ -97,32 +96,38 @@ def execute_name_lookup (ip, timeout):
try:
mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args])
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True).strip()
output = subprocess.check_output(
args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
).strip()
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
domain_name = output
dns_server = ''
dns_server = ''
mylog('verbose', [f'[{pluginName}] Domain Name: {domain_name}'])
return domain_name, dns_server
except subprocess.CalledProcessError as e:
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
if output == "": # check if the subprocess failed
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
else:
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
if output == "": # check if the subprocess failed
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
else:
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
return '', ''
return '', ''
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -17,11 +17,11 @@ from aiofreepybox.exceptions import NotOpenError, AuthorizationError
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
import conf
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value("TIMEZONE"))
@@ -79,6 +79,7 @@ def map_device_type(type: str):
mylog("minimal", [f"[{pluginName}] Unknown device type: {type}"])
return device_type_map["other"]
async def get_device_data(api_version: int, api_address: str, api_port: int):
# ensure existence of db path
config_base = Path(os.getenv("NETALERTX_CONFIG", "/data/config"))

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# test script by running:
# tbc
@@ -11,14 +11,14 @@ import re
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
from database import DB
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -33,16 +33,14 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
timeout = get_setting_value('ICMP_RUN_TIMEOUT')
args = get_setting_value('ICMP_ARGS')
in_regex = get_setting_value('ICMP_IN_REGEX')
# Create a database connection
db = DB() # instance of class DB
db.open()
@@ -61,46 +59,45 @@ def main():
# Filter devices based on the regex match
filtered_devices = [
device for device in all_devices
device for device in all_devices
if regex_pattern.match(device['devLastIP'])
]
mylog('verbose', [f'[{pluginName}] Devices to PING: {len(filtered_devices)}'])
mylog('verbose', [f'[{pluginName}] Devices to PING: {len(filtered_devices)}'])
for device in filtered_devices:
is_online, output = execute_scan(device['devLastIP'], timeout, args)
mylog('verbose', [f"[{pluginName}] ip: {device['devLastIP']} is_online: {is_online}"])
if is_online:
plugin_objects.add_object(
# "MAC", "IP", "Name", "Output"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = device['devName'],
watched2 = output.replace('\n',''),
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac'])
# "MAC", "IP", "Name", "Output"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = device['devName'],
watched2 = output.replace('\n', ''),
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac']
)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
#===============================================================================
# ===============================================================================
# Execute scan
#===============================================================================
def execute_scan (ip, timeout, args):
# ===============================================================================
def execute_scan(ip, timeout, args):
"""
Execute the ICMP command on IP.
"""
icmp_args = ['ping'] + args.split() + [ip]
# Execute command
@@ -108,12 +105,18 @@ def execute_scan (ip, timeout, args):
try:
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (icmp_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True)
output = subprocess.check_output(
icmp_args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
)
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
# Parse output using case-insensitive regular expressions
#Synology-NAS:/# ping -i 0.5 -c 3 -W 8 -w 9 192.168.1.82
# Synology-NAS:/# ping -i 0.5 -c 3 -W 8 -w 9 192.168.1.82
# PING 192.168.1.82 (192.168.1.82): 56 data bytes
# 64 bytes from 192.168.1.82: seq=0 ttl=64 time=0.080 ms
# 64 bytes from 192.168.1.82: seq=1 ttl=64 time=0.081 ms
@@ -130,7 +133,7 @@ def execute_scan (ip, timeout, args):
# --- 192.168.1.92 ping statistics ---
# 3 packets transmitted, 0 packets received, 100% packet loss
# TODO: parse output and return True if online, False if Offline (100% packet loss, bad address)
# TODO: parse output and return True if online, False if Offline (100% packet loss, bad address)
is_online = True
# Check for 0% packet loss in the output
@@ -145,22 +148,20 @@ def execute_scan (ip, timeout, args):
except subprocess.CalledProcessError as e:
# An error occurred, handle it
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
mylog('verbose', [f'[{pluginName}]', e.output])
return False, output
return False, output
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
return False, output
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
return False, output
return False, output
return False, output
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import time
@@ -11,13 +11,13 @@ import re
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger, append_line_to_file
from helper import check_IP_format, get_setting_value
from const import logPath
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger, append_line_to_file # noqa: E402 [flake8 lint suppression]
from helper import check_IP_format, get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -31,39 +31,39 @@ LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
no_internet_ip = '0.0.0.0'
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
parser = argparse.ArgumentParser(description='Check internet connectivity and IP')
parser.add_argument('prev_ip', action="store", help="Previous IP address to compare against the current IP")
parser.add_argument('DIG_GET_IP_ARG', action="store", help="Arguments for the 'dig' command to retrieve the IP address") # unused
parser.add_argument('DIG_GET_IP_ARG', action="store", help="Arguments for the 'dig' command to retrieve the IP address") # unused
values = parser.parse_args()
PREV_IP = values.prev_ip.split('=')[1]
PREV_IP = values.prev_ip.split('=')[1]
DIG_GET_IP_ARG = get_setting_value("INTRNT_DIG_GET_IP_ARG")
new_internet_IP = no_internet_ip
mylog('verbose', [f'[{pluginName}] INTRNT_DIG_GET_IP_ARG: ', DIG_GET_IP_ARG])
mylog('verbose', [f'[{pluginName}] INTRNT_DIG_GET_IP_ARG: ', DIG_GET_IP_ARG])
# METHOD 1: dig
# perform the new IP lookup N times specified by the INTRNT_TRIES setting
INTRNT_RETRIES = get_setting_value("INTRNT_RETRIES")
retries_needed = 0
for i in range(INTRNT_RETRIES + 1):
new_internet_IP, cmd_output = check_internet_IP( PREV_IP, DIG_GET_IP_ARG)
new_internet_IP, cmd_output = check_internet_IP(PREV_IP, DIG_GET_IP_ARG)
if new_internet_IP == no_internet_ip:
time.sleep(1*i) # Exponential backoff strategy
time.sleep(1 * i) # Exponential backoff strategy
else:
retries_needed = i
break
@@ -71,68 +71,69 @@ def main():
# METHOD 2: curl
if new_internet_IP == no_internet_ip:
new_internet_IP, cmd_output = fallback_check_ip()
mylog('verbose', [f'[{pluginName}] Curl Fallback (new_internet_IP|cmd_output): {new_internet_IP} | {cmd_output}'])
mylog('verbose', [f'[{pluginName}] Curl Fallback (new_internet_IP|cmd_output): {new_internet_IP} | {cmd_output}'])
# logging
append_line_to_file (logPath + '/IP_changes.log', '['+str(timeNowDB()) +']\t'+ new_internet_IP +'\n')
append_line_to_file(logPath + '/IP_changes.log', '[' + str(timeNowDB()) + ']\t' + new_internet_IP + '\n')
plugin_objects = Plugin_Objects(RESULT_FILE)
plugin_objects = Plugin_Objects(RESULT_FILE)
plugin_objects.add_object(
primaryId = 'Internet', # MAC (Device Name)
secondaryId = new_internet_IP, # IP Address
secondaryId = new_internet_IP, # IP Address
watched1 = f'Previous IP: {PREV_IP}',
watched2 = cmd_output.replace('\n',''),
watched3 = retries_needed,
watched2 = cmd_output.replace('\n', ''),
watched3 = retries_needed,
watched4 = 'Gateway',
extra = f'Previous IP: {PREV_IP}',
foreignKey = 'Internet')
extra = f'Previous IP: {PREV_IP}',
foreignKey = 'Internet'
)
plugin_objects.write_result_file()
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Finished '])
mylog('verbose', [f'[{pluginName}] Finished '])
return 0
#===============================================================================
# ===============================================================================
# INTERNET IP CHANGE
#===============================================================================
def check_internet_IP ( PREV_IP, DIG_GET_IP_ARG ):
# ===============================================================================
def check_internet_IP(PREV_IP, DIG_GET_IP_ARG):
# Get Internet IP
mylog('verbose', [f'[{pluginName}] - Retrieving Internet IP'])
internet_IP, cmd_output = get_internet_IP(DIG_GET_IP_ARG)
mylog('verbose', [f'[{pluginName}] Current internet_IP : {internet_IP}'])
# Check previously stored IP
mylog('verbose', [f'[{pluginName}] Current internet_IP : {internet_IP}'])
# Check previously stored IP
previous_IP = no_internet_ip
if PREV_IP is not None and len(PREV_IP) > 0 :
if PREV_IP is not None and len(PREV_IP) > 0 :
previous_IP = PREV_IP
mylog('verbose', [f'[{pluginName}] previous_IP : {previous_IP}'])
mylog('verbose', [f'[{pluginName}] previous_IP : {previous_IP}'])
return internet_IP, cmd_output
#-------------------------------------------------------------------------------
def get_internet_IP (DIG_GET_IP_ARG):
# -------------------------------------------------------------------------------
def get_internet_IP(DIG_GET_IP_ARG):
cmd_output = ''
# Using 'dig'
dig_args = ['dig', '+short'] + DIG_GET_IP_ARG.strip().split()
try:
cmd_output = subprocess.check_output (dig_args, universal_newlines=True)
mylog('verbose', [f'[{pluginName}] DIG result : {cmd_output}'])
cmd_output = subprocess.check_output(dig_args, universal_newlines=True)
mylog('verbose', [f'[{pluginName}] DIG result : {cmd_output}'])
except subprocess.CalledProcessError as e:
mylog('verbose', [e.output])
cmd_output = '' # no internet
cmd_output = '' # no internet
# Check result is an IP
IP = check_IP_format (cmd_output)
IP = check_IP_format(cmd_output)
# Handle invalid response
if IP == '':
@@ -140,7 +141,8 @@ def get_internet_IP (DIG_GET_IP_ARG):
return IP, cmd_output
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def fallback_check_ip():
"""Fallback mechanism using `curl ifconfig.me/ip`."""
try:
@@ -155,8 +157,9 @@ def fallback_check_ip():
mylog('none', [f'[{pluginName}] Fallback curl exception: {e}'])
return no_internet_ip, f'Fallback via curl exception: "{e}"'
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,6 +1,5 @@
#!/usr/bin/env python
# !/usr/bin/env python
import argparse
import os
import sys
import speedtest
@@ -9,13 +8,13 @@ import speedtest
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from utils.datetime_utils import timeNowDB
from logger import mylog, Logger
from helper import get_setting_value
import conf
from pytz import timezone
from const import logPath
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,18 +27,16 @@ pluginName = 'INTRSPD'
LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', ['[INTRSPD] In script'])
parser = argparse.ArgumentParser(description='Speedtest Plugin for NetAlertX')
values = parser.parse_args()
def main():
mylog('verbose', ['[INTRSPD] In script'])
plugin_objects = Plugin_Objects(RESULT_FILE)
speedtest_result = run_speedtest()
plugin_objects.add_object(
primaryId = 'Speedtest',
secondaryId = timeNowDB(),
secondaryId = timeNowDB(),
watched1 = speedtest_result['download_speed'],
watched2 = speedtest_result['upload_speed'],
watched3 = 'null',
@@ -49,25 +46,27 @@ def main():
)
plugin_objects.write_result_file()
def run_speedtest():
try:
st = speedtest.Speedtest(secure=True)
st.get_best_server()
download_speed = round(st.download() / 10**6, 2) # Convert to Mbps
upload_speed = round(st.upload() / 10**6, 2) # Convert to Mbps
mylog('verbose', [f"[INTRSPD] Result (down|up): {str(download_speed)} Mbps|{upload_speed} Mbps"])
mylog('verbose', [f"[INTRSPD] Result (down|up): {str(download_speed)} Mbps|{upload_speed} Mbps"])
return {
'download_speed': download_speed,
'upload_speed': upload_speed,
}
except Exception as e:
mylog('verbose', [f"[INTRSPD] Error running speedtest: {str(e)}"])
mylog('verbose', [f"[INTRSPD] Error running speedtest: {str(e)}"])
return {
'download_speed': -1,
'upload_speed': -1,
}
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -11,11 +11,11 @@ from functools import reduce
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
import conf
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -34,9 +34,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
# Retrieve configuration settings
SCAN_SUBNETS = get_setting_value('SCAN_SUBNETS')
@@ -48,33 +47,33 @@ def main():
entry.split('--interface=')[-1].strip() for entry in SCAN_SUBNETS if '--interface=' in entry
)
mylog('verbose', [f'[{pluginName}] Interfaces value: "{interfaces}"'])
mylog('verbose', [f'[{pluginName}] Interfaces value: "{interfaces}"'])
# retrieve data
raw_neighbors = get_neighbors(interfaces)
neighbors = parse_neighbors(raw_neighbors)
# Process the data into native application tables
if len(neighbors) > 0:
for device in neighbors:
plugin_objects.add_object(
primaryId = device['mac'],
secondaryId = device['ip'],
watched4 = device['last_seen'],
plugin_objects.add_object(
primaryId = device['mac'],
secondaryId = device['ip'],
watched4 = device['last_seen'],
# The following are always unknown
watched1 = device['hostname'], # don't use these --> handleEmpty(device['hostname']),
watched2 = device['vendor'], # handleEmpty(device['vendor']),
watched3 = device['device_type'], # handleEmpty(device['device_type']),
extra = '',
foreignKey = "" #device['mac']
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
# helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md
)
# The following are always unknown
watched1 = device['hostname'], # don't use these --> handleEmpty(device['hostname']),
watched2 = device['vendor'], # don't use these --> handleEmpty(device['vendor']),
watched3 = device['device_type'], # don't use these --> handleEmpty(device['device_type']),
extra = '',
foreignKey = "" # device['mac']
# helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app
# helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too
# helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details:
# helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md
)
mylog('verbose', [f'[{pluginName}] New entries: "{len(neighbors)}"'])
@@ -83,13 +82,14 @@ def main():
return 0
def parse_neighbors(raw_neighbors: list[str]):
neighbors = []
for line in raw_neighbors:
if "lladdr" in line and "REACHABLE" in line:
# Known data
fields = line.split()
if not is_multicast(fields[0]):
# mylog('verbose', [f'[{pluginName}] adding ip {fields[0]}"'])
neighbor = {}
@@ -101,9 +101,9 @@ def parse_neighbors(raw_neighbors: list[str]):
neighbor['hostname'] = '(unknown)'
neighbor['vendor'] = '(unknown)'
neighbor['device_type'] = '(unknown)'
neighbors.append(neighbor)
return neighbors
@@ -111,6 +111,7 @@ def is_multicast(ip):
prefixes = ['ff', '224', '231', '232', '233', '234', '238', '239']
return reduce(lambda acc, prefix: acc or ip.startswith(prefix), prefixes, False)
# retrieve data
def get_neighbors(interfaces):
@@ -119,7 +120,7 @@ def get_neighbors(interfaces):
for interface in interfaces.split(","):
try:
# Ping all IPv6 devices in multicast to trigger NDP
# Ping all IPv6 devices in multicast to trigger NDP
mylog('verbose', [f'[{pluginName}] Pinging on interface: "{interface}"'])
command = f"ping ff02::1%{interface} -c 2".split()
@@ -136,11 +137,11 @@ def get_neighbors(interfaces):
mylog('verbose', [f'[{pluginName}] Scanning interface succeded: "{interface}"'])
except subprocess.CalledProcessError as e:
# An error occurred, handle it
mylog('verbose', [f'[{pluginName}] Scanning interface failed: "{interface}"'])
error_type = type(e).__name__ # Capture the error type
mylog('verbose', [f'[{pluginName}] Scanning interface failed: "{interface}" ({error_type})'])
return results
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -7,18 +7,18 @@ INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
pluginName = 'LUCIRPC'
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
try:
from openwrt_luci_rpc import OpenWrtRpc
except:
mylog('error', [f'[{pluginName}] Failed import openwrt_luci_rpc'])
exit()
except ImportError as e:
mylog('error', [f'[{pluginName}] Failed import openwrt_luci_rpc: {e}'])
exit(1)
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -30,13 +30,14 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] start script.'])
def main():
mylog('verbose', [f'[{pluginName}] start script.'])
device_data = get_device_data()
for entry in device_data:
mylog('verbose', [f'[{pluginName}] found: ', str(entry.mac).lower()])
mylog('verbose', [f'[{pluginName}] found: ', str(entry.mac).lower()])
name = str(entry.hostname)
@@ -45,36 +46,38 @@ def main():
plugin_objects.add_object(
primaryId = str(entry.mac).lower(),
secondaryId = entry.ip,
secondaryId = entry.ip,
watched1 = entry.host,
watched2 = name,
watched3 = "",
watched3 = "",
watched4 = "",
extra = pluginName,
extra = pluginName,
foreignKey = str(entry.mac).lower())
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
def get_device_data():
router = OpenWrtRpc(
get_setting_value("LUCIRPC_host"),
get_setting_value("LUCIRPC_user"),
get_setting_value("LUCIRPC_password"),
get_setting_value("LUCIRPC_ssl"),
get_setting_value("LUCIRPC_user"),
get_setting_value("LUCIRPC_password"),
get_setting_value("LUCIRPC_ssl"),
get_setting_value("LUCIRPC_verify_ssl")
)
)
if router.is_logged_in():
mylog('verbose', [f'[{pluginName}] login successfully.'])
mylog('verbose', [f'[{pluginName}] login successfully.'])
else:
mylog('error', [f'[{pluginName}] login fail.'])
mylog('error', [f'[{pluginName}] login fail.'])
device_data = router.get_all_connected_devices(only_reachable=get_setting_value("LUCIRPC_only_reachable"))
return device_data
if __name__ == '__main__':
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -8,12 +8,12 @@ from collections import deque
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
from messaging.in_app import remove_old
import conf
from pytz import timezone
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from messaging.in_app import remove_old # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,10 +28,9 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
MAINT_LOG_LENGTH = int(get_setting_value('MAINT_LOG_LENGTH'))
MAINT_NOTI_LENGTH = int(get_setting_value('MAINT_NOTI_LENGTH'))
@@ -39,7 +38,7 @@ def main():
# Check if set
if MAINT_LOG_LENGTH != 0:
mylog('verbose', [f'[{pluginName}] Cleaning file'])
mylog('verbose', [f'[{pluginName}] Cleaning file'])
logFile = logPath + "/app.log"
@@ -54,19 +53,19 @@ def main():
with open(logFile, 'w') as file:
# Write the last N lines back to the file
file.writelines(lines_to_keep)
mylog('verbose', [f'[{pluginName}] Cleanup finished'])
mylog('verbose', [f'[{pluginName}] Cleanup finished'])
# Check if set
if MAINT_NOTI_LENGTH != 0:
mylog('verbose', [f'[{pluginName}] Cleaning in-app notification history'])
mylog('verbose', [f'[{pluginName}] Cleaning in-app notification history'])
remove_old(MAINT_NOTI_LENGTH)
return 0
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -7,14 +7,14 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
from librouteros import connect
from librouteros.exceptions import TrapError
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
from librouteros import connect # noqa: E402 [flake8 lint suppression]
from librouteros.exceptions import TrapError # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -29,7 +29,6 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
@@ -49,7 +48,7 @@ def main():
plugin_objects = get_entries(plugin_objects)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices'])
@@ -58,10 +57,10 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
try:
# connect router
api = connect(username=MT_USER, password=MT_PASS, host=MT_HOST, port=MT_PORT)
# get dhcp leases
leases = api('/ip/dhcp-server/lease/print')
for lease in leases:
lease_id = lease.get('.id')
address = lease.get('address')
@@ -71,8 +70,11 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
last_seen = lease.get('last-seen')
status = lease.get('status')
device_name = comment or host_name or "(unknown)"
mylog('verbose', [f"ID: {lease_id}, Address: {address}, MAC Address: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}"])
mylog(
'verbose',
[f"ID: {lease_id}, Address: {address}, MAC Address: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}, Status: {status}"]
)
if (status == "bound"):
plugin_objects.add_object(
@@ -83,7 +85,7 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
watched3 = host_name,
watched4 = last_seen,
extra = '',
helpVal1 = comment,
helpVal1 = comment,
foreignKey = mac_address)
except TrapError as e:
@@ -91,13 +93,13 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
except Exception as e:
mylog('error', [f"Failed to connect to MikroTik API: {e}"])
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return plugin_objects
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -8,14 +8,14 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
from database import DB
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -34,9 +34,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
# timeout = get_setting_value('NBLOOKUP_RUN_TIMEOUT')
timeout = 20
@@ -52,13 +51,13 @@ def main():
device_handler = DeviceInstance(db)
# Retrieve devices
if get_setting_value("REFRESH_FQDN"):
if get_setting_value("REFRESH_FQDN"):
devices = device_handler.getAll()
else:
else:
devices = device_handler.getUnknown()
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
# TEST - below is a WINDOWS host IP
# execute_name_lookup('192.168.1.121', timeout)
@@ -67,31 +66,32 @@ def main():
if domain_name != '':
plugin_objects.add_object(
# "MAC", "IP", "Server", "Name"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = dns_server,
watched2 = domain_name,
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac'])
# "MAC", "IP", "Server", "Name"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = dns_server,
watched2 = domain_name,
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac']
)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
#===============================================================================
# ===============================================================================
# Execute scan
#===============================================================================
def execute_name_lookup (ip, timeout):
# ===============================================================================
def execute_name_lookup(ip, timeout):
"""
Execute the NBTSCAN command on IP.
"""
args = ['nbtscan', ip]
# Execute command
@@ -99,20 +99,25 @@ def execute_name_lookup (ip, timeout):
try:
mylog('verbose', [f'[{pluginName}] DEBUG CMD :', args])
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True)
output = subprocess.check_output(
args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
)
mylog('verbose', [f'[{pluginName}] DEBUG OUTPUT : {output}'])
domain_name = ''
dns_server = ''
# Split the output into lines
lines = output.splitlines()
# Look for the first line containing a valid NetBIOS name entry
index = 0
for line in lines:
if 'Doing NBT name scan' not in line and ip in line:
# Split the line and extract the primary NetBIOS name
@@ -121,7 +126,6 @@ def execute_name_lookup (ip, timeout):
domain_name = parts[1]
else:
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - Unexpected output format: {line}'])
mylog('verbose', [f'[{pluginName}] Domain Name: {domain_name}'])
@@ -132,18 +136,21 @@ def execute_name_lookup (ip, timeout):
# if "NXDOMAIN" in e.output:
# mylog('verbose', [f'[{pluginName}]', f"No PTR record found for IP: {ip}"])
# else:
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
mylog('verbose', [f'[{pluginName}] ⚠ ERROR - {e.output}'])
if output == "": # check if the subprocess failed
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
else:
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
if output == "": # check if the subprocess failed
mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
else:
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
return '', ''
return '', ''
# ===============================================================================
# BEGIN
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# test script by running:
# tbc
@@ -7,19 +7,18 @@ import subprocess
import sys
import hashlib
import re
import nmap
import nmap
# Register NetAlertX directories
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
from database import DB
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -37,46 +36,46 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
timeout = get_setting_value('NMAPDEV_RUN_TIMEOUT')
fakeMac = get_setting_value('NMAPDEV_FAKE_MAC')
subnets = get_setting_value('SCAN_SUBNETS')
args = get_setting_value('NMAPDEV_ARGS')
mylog('verbose', [f'[{pluginName}] subnets: ', subnets])
mylog('verbose', [f'[{pluginName}] subnets: ', subnets])
# Initialize the Plugin obj output file
plugin_objects = Plugin_Objects(RESULT_FILE)
unique_devices = execute_scan(subnets, timeout, fakeMac, args)
mylog('verbose', [f'[{pluginName}] Devices found: {len(unique_devices)}'])
mylog('verbose', [f'[{pluginName}] Devices found: {len(unique_devices)}'])
for device in unique_devices:
plugin_objects.add_object(
# "MAC", "IP", "Name", "Vendor", "Interface"
primaryId = device['mac'].lower(),
secondaryId = device['ip'],
watched1 = device['name'],
watched2 = device['vendor'],
watched3 = device['interface'],
watched4 = '',
extra = '',
foreignKey = device['mac'])
# "MAC", "IP", "Name", "Vendor", "Interface"
primaryId = device['mac'].lower(),
secondaryId = device['ip'],
watched1 = device['name'],
watched2 = device['vendor'],
watched3 = device['interface'],
watched4 = '',
extra = '',
foreignKey = device['mac']
)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
#===============================================================================
# ===============================================================================
# Execute scan
#===============================================================================
# ===============================================================================
def execute_scan(subnets_list, timeout, fakeMac, args):
devices_list = []
@@ -103,22 +102,21 @@ def execute_scan(subnets_list, timeout, fakeMac, args):
return devices_list
def execute_scan_on_interface (interface, timeout, args):
# Remove unsupported VLAN flags
def execute_scan_on_interface(interface, timeout, args):
# Remove unsupported VLAN flags
interface = re.sub(r'--vlan=\S+', '', interface).strip()
# Prepare command arguments
scan_args = args.split() + interface.replace('--interface=','-e ').split()
scan_args = args.split() + interface.replace('--interface=', '-e ').split()
mylog('verbose', [f'[{pluginName}] scan_args: ', scan_args])
mylog('verbose', [f'[{pluginName}] scan_args: ', scan_args])
try:
result = subprocess.check_output(scan_args, universal_newlines=True)
except subprocess.CalledProcessError as e:
error_type = type(e).__name__
result = ""
mylog('verbose', [f'[{pluginName}] ERROR: ', error_type])
mylog('verbose', [f'[{pluginName}] ERROR: ', error_type])
return result
@@ -130,28 +128,25 @@ def parse_nmap_xml(xml_output, interface, fakeMac):
nm = nmap.PortScanner()
nm.analyse_nmap_xml_scan(xml_output)
mylog('verbose', [f'[{pluginName}] Number of hosts: ', len(nm.all_hosts())])
mylog('verbose', [f'[{pluginName}] Number of hosts: ', len(nm.all_hosts())])
for host in nm.all_hosts():
hostname = nm[host].hostname() or '(unknown)'
hostname = nm[host].hostname() or '(unknown)'
ip = nm[host]['addresses']['ipv4'] if 'ipv4' in nm[host]['addresses'] else ''
mac = nm[host]['addresses']['mac'] if 'mac' in nm[host]['addresses'] else ''
mylog('verbose', [f'[{pluginName}] nm[host]: ', nm[host]])
mylog('verbose', [f'[{pluginName}] nm[host]: ', nm[host]])
vendor = ''
if nm[host]['vendor']:
mylog('verbose', [f'[{pluginName}] entry: ', nm[host]['vendor']])
mylog('verbose', [f'[{pluginName}] entry: ', nm[host]['vendor']])
for key, value in nm[host]['vendor'].items():
vendor = value
break
# Log debug information
mylog('verbose', [f"[{pluginName}] Hostname: {hostname}, IP: {ip}, MAC: {mac}, Vendor: {vendor}"])
@@ -172,24 +167,24 @@ def parse_nmap_xml(xml_output, interface, fakeMac):
# MAC or IP missing
mylog('verbose', [f"[{pluginName}] Skipping: {hostname}, IP or MAC missing, or NMAPDEV_GENERATE_MAC setting not enabled"])
except Exception as e:
mylog('verbose', [f"[{pluginName}] Error parsing nmap XML: ", str(e)])
return devices_list
def string_to_mac_hash(input_string):
# Calculate a hash using SHA-256
sha256_hash = hashlib.sha256(input_string.encode()).hexdigest()
# Take the first 12 characters of the hash and format as a MAC address
mac_hash = ':'.join(sha256_hash[i:i+2] for i in range(0, 12, 2))
mac_hash = ':'.join(sha256_hash[i:i + 2] for i in range(0, 12, 2))
return mac_hash
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import argparse
@@ -9,13 +9,13 @@ import subprocess
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger, append_line_to_file
from utils.datetime_utils import timeNowDB
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger, append_line_to_file # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -32,7 +32,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
# Initialize the Plugin obj output file
plugin_objects = Plugin_Objects(RESULT_FILE)
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def main():
parser = argparse.ArgumentParser(
description='Scan ports of devices specified by IP addresses'
@@ -85,7 +86,7 @@ def main():
mylog('verbose', [f'[{pluginName}] Total number of ports found by NMAP: ', len(entries)])
for entry in entries:
for entry in entries:
plugin_objects.add_object(
primaryId = entry.mac, # MAC (Device Name)
@@ -94,14 +95,14 @@ def main():
watched2 = entry.service,
watched3 = entry.ip + ":" + entry.port,
watched4 = "",
extra = entry.extra,
foreignKey = entry.mac
extra = entry.extra,
foreignKey = entry.mac
)
plugin_objects.write_result_file()
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
class nmap_entry:
def __init__(self, ip, mac, time, port, state, service, name = '', extra = '', index = 0):
self.ip = ip
@@ -109,13 +110,13 @@ class nmap_entry:
self.time = time
self.port = port
self.state = state
self.service = service
self.service = service
self.extra = extra
self.index = index
self.hash = str(mac) + str(port)+ str(state)+ str(service)
self.hash = str(mac) + str(port) + str(state) + str(service)
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def parse_kv_args(raw_args):
"""
Converts ['ips=a,b,c', 'macs=x,y,z', 'timeout=5'] to a dict.
@@ -125,26 +126,28 @@ def parse_kv_args(raw_args):
for item in raw_args:
if '=' not in item:
mylog('none', [f"[{pluginName}] Scan: Invalid parameter (missing '='): {item}"])
mylog('none', [f"[{pluginName}] Scan: Invalid parameter (missing '='): {item}"])
key, value = item.split('=', 1)
if key in parsed:
mylog('none', [f"[{pluginName}] Scan: Duplicate parameter supplied: {key}"])
mylog('none', [f"[{pluginName}] Scan: Duplicate parameter supplied: {key}"])
parsed[key] = value
return parsed
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def safe_split_list(value, keyname):
"""Split comma list safely and ensure no empty items."""
items = [x.strip() for x in value.split(',') if x.strip()]
if not items:
mylog('none', [f"[{pluginName}] Scan: {keyname} list is empty or invalid"])
mylog('none', [f"[{pluginName}] Scan: {keyname} list is empty or invalid"])
return items
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
"""
run nmap scan on a list of devices
@@ -154,15 +157,12 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
# collect ports / new Nmap Entries
newEntriesTmp = []
if len(deviceIPs) > 0:
if len(deviceIPs) > 0:
devTotal = len(deviceIPs)
mylog('verbose', [f'[{pluginName}] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device'])
mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ])
mylog('verbose', [f'[{pluginName}] Scan: Nmap for max ', str(timeoutSec), 's (' + str(round(int(timeoutSec) / 60, 1)) + 'min) per device'])
mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec)) / 60, 1) , 'min)'])
devIndex = 0
for ip in deviceIPs:
@@ -171,67 +171,63 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
# prepare arguments from user supplied ones
nmapArgs = ['nmap'] + args.split() + [ip]
progress = ' (' + str(devIndex+1) + '/' + str(devTotal) + ')'
progress = ' (' + str(devIndex + 1) + '/' + str(devTotal) + ')'
try:
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(float(timeoutSec)))
output = subprocess.check_output(
nmapArgs,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(float(timeoutSec))
)
except subprocess.CalledProcessError as e:
# An error occured, handle it
mylog('none', ["[NMAP Scan] " ,e.output])
mylog('none', ["[NMAP Scan] ⚠ ERROR - Nmap Scan - check logs", progress])
mylog('none', ["[NMAP Scan] ", e.output])
mylog('none', ["[NMAP Scan] ⚠ ERROR - Nmap Scan - check logs", progress])
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', ip, progress])
mylog('verbose', [f'[{pluginName}] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', ip, progress])
if output == "": # check if the subprocess failed
mylog('minimal', [f'[{pluginName}] Nmap FAIL for ', ip, progress ,' check logs for details'])
else:
if output == "": # check if the subprocess failed
mylog('minimal', [f'[{pluginName}] Nmap FAIL for ', ip, progress, ' check logs for details'])
else:
mylog('verbose', [f'[{pluginName}] Nmap SUCCESS for ', ip, progress])
# check the last run output
# check the last run output
newLines = output.split('\n')
# regular logging
for line in newLines:
append_line_to_file (logPath + '/app_nmap.log', line +'\n')
append_line_to_file(logPath + '/app_nmap.log', line + '\n')
index = 0
startCollecting = False
duration = ""
duration = ""
newPortsPerDevice = 0
for line in newLines:
for line in newLines:
if 'Starting Nmap' in line:
if len(newLines) > index+1 and 'Note: Host seems down' in newLines[index+1]:
break # this entry is empty
if len(newLines) > index + 1 and 'Note: Host seems down' in newLines[index + 1]:
break # this entry is empty
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
startCollecting = True
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
startCollecting = False # end reached
elif startCollecting and len(line.split()) == 3:
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
startCollecting = False # end reached
elif startCollecting and len(line.split()) == 3:
newEntriesTmp.append(nmap_entry(ip, deviceMACs[devIndex], timeNowDB(), line.split()[0], line.split()[1], line.split()[2]))
newPortsPerDevice += 1
elif 'Nmap done' in line:
duration = line.split('scanned in ')[1]
mylog('verbose', [f'[{pluginName}] {newPortsPerDevice} ports found on {deviceMACs[devIndex]}'])
duration = line.split('scanned in ')[1]
mylog('verbose', [f'[{pluginName}] {newPortsPerDevice} ports found on {deviceMACs[devIndex]} after {duration}'])
index += 1
devIndex += 1
#end for loop
return newEntriesTmp
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# test script by running:
# tbc
@@ -11,14 +11,14 @@ import re
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
from database import DB
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -33,11 +33,9 @@ LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
timeout = get_setting_value('NSLOOKUP_RUN_TIMEOUT')
@@ -52,13 +50,13 @@ def main():
device_handler = DeviceInstance(db)
# Retrieve devices
if get_setting_value("REFRESH_FQDN"):
if get_setting_value("REFRESH_FQDN"):
devices = device_handler.getAll()
else:
else:
devices = device_handler.getUnknown()
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
mylog('verbose', [f'[{pluginName}] Devices count: {len(devices)}'])
# TEST - below is a WINDOWS host IP
# execute_name_lookup('192.168.1.121', timeout)
@@ -67,31 +65,32 @@ def main():
if domain_name != '':
plugin_objects.add_object(
# "MAC", "IP", "Server", "Name"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = dns_server,
watched2 = domain_name,
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac'])
# "MAC", "IP", "Server", "Name"
primaryId = device['devMac'],
secondaryId = device['devLastIP'],
watched1 = dns_server,
watched2 = domain_name,
watched3 = '',
watched4 = '',
extra = '',
foreignKey = device['devMac']
)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Script finished'])
mylog('verbose', [f'[{pluginName}] Script finished'])
return 0
#===============================================================================
# ===============================================================================
# Execute scan
#===============================================================================
def execute_nslookup (ip, timeout):
# ===============================================================================
def execute_nslookup(ip, timeout):
"""
Execute the NSLOOKUP command on IP.
"""
nslookup_args = ['nslookup', ip]
# Execute command
@@ -99,7 +98,13 @@ def execute_nslookup (ip, timeout):
try:
# try runnning a subprocess with a forced (timeout) in case the subprocess hangs
output = subprocess.check_output (nslookup_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeout), text=True)
output = subprocess.check_output(
nslookup_args,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeout),
text=True
)
domain_name = ''
dns_server = ''
@@ -110,8 +115,7 @@ def execute_nslookup (ip, timeout):
domain_pattern = re.compile(r'name\s*=\s*([^\s]+)', re.IGNORECASE)
server_pattern = re.compile(r'Server:\s+(.+)', re.IGNORECASE)
domain_match = domain_pattern.search(output)
domain_match = domain_pattern.search(output)
server_match = server_pattern.search(output)
if domain_match:
@@ -131,24 +135,20 @@ def execute_nslookup (ip, timeout):
else:
mylog('verbose', [f'[{pluginName}]', e.output])
# Handle other errors here
# mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
# mylog('verbose', [f'[{pluginName}] ⚠ ERROR - check logs'])
except subprocess.TimeoutExpired:
mylog('verbose', [f'[{pluginName}] TIMEOUT - the process forcefully terminated as timeout reached'])
if output != "": # check if the subprocess failed
if output == "": # check if the subprocess failed
tmp = 1 # can't have empty
# mylog('verbose', [f'[{pluginName}] Scan: FAIL - check logs'])
else:
mylog('verbose', [f'[{pluginName}] Scan: SUCCESS'])
return '', ''
return '', ''
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
__author__ = "ffsb"
__version__ = "0.1" # initial
__version__ = "0.2" # added logic to retry omada api call once as it seems to sometimes fail for some reasons, and error handling logic...
@@ -15,10 +15,9 @@ __version__ = "1.3" # fix detection of the default gateway IP address that woul
# try to identify and populate their connections by switch/accesspoints and ports/SSID
# try to differentiate root bridges from accessory
#
# sample code to update unbound on opnsense - for reference...
# curl -X POST -d '{"host":{"enabled":"1","hostname":"test","domain":"testdomain.com","rr":"A","mxprio":"","mx":"","server":"10.0.1.1","description":""}}' -H "Content-Type: application/json" -k -u $OPNS_KEY:$OPNS_SECRET https://$IPFW/api/unbound/settings/AddHostOverride
# curl -X POST -d '{"host":{"enabled":"1","hostname":"test","domain":"testdomain.com","rr":"A","mxprio":"","mx":"","server":"10.0.1.1","description":""}}'\
# -H "Content-Type: application/json" -k -u $OPNS_KEY:$OPNS_SECRET https://$IPFW/api/unbound/settings/AddHostOverride
#
import os
import sys
@@ -35,12 +34,12 @@ import multiprocessing
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
from pytz import timezone
import conf
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -87,8 +86,6 @@ cMAC, cIP, cNAME, cSWITCH_AP, cPORT_SSID = range(5)
OMDLOGLEVEL = "debug"
#
# translate MAC address from standard ieee model to ietf draft
# AA-BB-CC-DD-EE-FF to aa:bb:cc:dd:ee:ff
# tplink adheres to ieee, Nax adheres to ietf
@@ -142,7 +139,7 @@ def callomada(myargs):
try:
mf = io.StringIO()
with redirect_stdout(mf):
bar = omada(myargs)
omada(myargs)
omada_output = mf.getvalue()
except Exception:
mylog(
@@ -190,12 +187,12 @@ def add_uplink(
if switch_mac not in device_data_bymac:
mylog("none", [f"[{pluginName}] switch_mac '{switch_mac}' not found in device_data_bymac"])
return
# Ensure SWITCH_AP key exists in the dictionary
if SWITCH_AP not in device_data_bymac[switch_mac]:
mylog("none", [f"[{pluginName}] Missing key '{SWITCH_AP}' in device_data_bymac[{switch_mac}]"])
return
# Check if uplink should be added
if device_data_bymac[switch_mac][SWITCH_AP] in [None, "null"]:
device_data_bymac[switch_mac][SWITCH_AP] = uplink_mac
@@ -204,11 +201,10 @@ def add_uplink(
if uplink_mac not in device_data_bymac:
mylog("none", [f"[{pluginName}] uplink_mac '{uplink_mac}' not found in device_data_bymac"])
return
# Determine port to uplink
if (
device_data_bymac[switch_mac].get(TYPE) == "Switch"
and device_data_bymac[uplink_mac].get(TYPE) == "Switch"
device_data_bymac[switch_mac].get(TYPE) == "Switch" and device_data_bymac[uplink_mac].get(TYPE) == "Switch"
):
port_to_uplink = port_byswitchmac_byclientmac.get(switch_mac, {}).get(uplink_mac)
if port_to_uplink is None:
@@ -216,16 +212,14 @@ def add_uplink(
return
else:
port_to_uplink = device_data_bymac[uplink_mac].get(PORT_SSID)
# Assign port to switch_mac
device_data_bymac[switch_mac][PORT_SSID] = port_to_uplink
# Recursively add uplinks for linked devices
for link in sadevices_linksbymac.get(switch_mac, []):
if (
link in device_data_bymac
and device_data_bymac[link].get(SWITCH_AP) in [None, "null"]
and device_data_bymac[switch_mac].get(TYPE) == "Switch"
link in device_data_bymac and device_data_bymac[link].get(SWITCH_AP) in [None, "null"] and device_data_bymac[switch_mac].get(TYPE) == "Switch"
):
add_uplink(
switch_mac,
@@ -236,7 +230,6 @@ def add_uplink(
)
# ----------------------------------------------
# Main initialization
def main():
@@ -324,16 +317,16 @@ def main():
)
mymac = ieee2ietf_mac_formater(device[MAC])
plugin_objects.add_object(
primaryId=mymac, # MAC
secondaryId=device[IP], # IP
watched1=device[NAME], # NAME/HOSTNAME
watched2=ParentNetworkNode, # PARENT NETWORK NODE MAC
watched3=myport, # PORT
watched4=myssid, # SSID
primaryId=mymac, # MAC
secondaryId=device[IP], # IP
watched1=device[NAME], # NAME/HOSTNAME
watched2=ParentNetworkNode, # PARENT NETWORK NODE MAC
watched3=myport, # PORT
watched4=myssid, # SSID
extra=device[TYPE],
# omada_site, # SITENAME (cur_NetworkSite) or VENDOR (cur_Vendor) (PICK one and adjust config.json -> "column": "Extra")
foreignKey=device[MAC].lower().replace("-", ":"),
) # usually MAC
) # usually MAC
mylog(
"verbose",
@@ -369,7 +362,6 @@ def get_omada_devices_details(msadevice_data):
mswitch_dump = callomada(["-t", "myomada", "switch", "-d", mthisswitch])
else:
mswitch_detail = ""
nswitch_dump = ""
return mswitch_detail, mswitch_dump
@@ -414,7 +406,6 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
# 17:27:10 [<unique_prefix>] token: "['1A-2B-3C-4D-5E-6F', '192.168.0.217', '1A-2B-3C-4D-5E-6F', '17', '40-AE-30-A5-A7-50, 'Switch']"
# constants
sadevices_macbyname = {}
sadevices_macbymac = {}
sadevices_linksbymac = {}
port_byswitchmac_byclientmac = {}
device_data_bymac = {}
@@ -427,7 +418,7 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
def run_command(command, index):
result = subprocess.run(command, capture_output=True, text=True, shell=True)
return str(index), result.stdout.strip()
myindex, command_output= run_command(command, 2)
mylog('verbose', [f'[{pluginName}] command={command} index={myindex} results={command_output}'])
"""
@@ -556,11 +547,11 @@ def get_device_data(omada_clients_output, switches_and_aps, device_handler):
#
naxname = real_naxname
if real_naxname != None:
if real_naxname is not None:
if "(" in real_naxname:
# removing parenthesis and domains from the name
naxname = real_naxname.split("(")[0]
if naxname != None and "." in naxname:
if naxname is not None and "." in naxname:
naxname = naxname.split(".")[0]
if naxname in (None, "null", ""):
naxname = (

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
"""
This plugin imports devices and clients from Omada Controller using their OpenAPI.
@@ -25,7 +25,6 @@ import sys
import urllib3
import requests
import time
import datetime
import pytz
from datetime import datetime
@@ -35,11 +34,11 @@ from typing import Literal, Any, Dict
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, is_typical_router_ip, is_mac
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
import conf
from plugin_helper import Plugin_Objects, is_typical_router_ip, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = pytz.timezone(get_setting_value('TIMEZONE'))
@@ -176,7 +175,10 @@ class OmadaHelper:
# If it's not a gateway try to assign parent node MAC
if data.get("type", "") != "gateway":
parent_mac = OmadaHelper.normalize_mac(data.get("uplinkDeviceMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
# Applicable only for CLIENT
if input_type == "client":
@@ -185,15 +187,26 @@ class OmadaHelper:
# Try to assign parent node MAC and PORT/SSID to the CLIENT
if data.get("connectDevType", "") == "gateway":
parent_mac = OmadaHelper.normalize_mac(data.get("gatewayMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
entry["parent_node_port"] = data.get("port", "")
elif data.get("connectDevType", "") == "switch":
parent_mac = OmadaHelper.normalize_mac(data.get("switchMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
entry["parent_node_port"] = data.get("port", "")
elif data.get("connectDevType", "") == "ap":
parent_mac = OmadaHelper.normalize_mac(data.get("apMac"))
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and parent_mac.get("response_type") == "success" else ""
resp_type = parent_mac.get("response_type")
entry["parent_node_mac_address"] = parent_mac.get("response_result") if isinstance(parent_mac, dict) and resp_type == "success" else ""
entry["parent_node_ssid"] = data.get("ssid", "")
# Add the entry to the result
@@ -253,7 +266,7 @@ class OmadaAPI:
"""Return request headers."""
headers = {"Content-type": "application/json"}
# Add access token to header if requested and available
if include_auth == True:
if include_auth is True:
if not self.access_token:
OmadaHelper.debug("No access token available for headers")
else:
@@ -368,7 +381,7 @@ class OmadaAPI:
# Failed site population
OmadaHelper.debug(f"Site population response: {response}")
return OmadaHelper.response("error", f"Site population failed - error: {response.get('response_message', 'Not provided')}")
return OmadaHelper.response("error", f"Site population failed - error: {response.get('response_message', 'Not provided')}")
def requested_sites(self) -> list:
"""Returns sites requested by user."""

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
"""
NetAlertX plugin: PIHOLEAPI
Imports devices from Pi-hole v6 API (Network endpoints) into NetAlertX plugin results.
@@ -17,12 +17,12 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
pluginName = 'PIHOLEAPI'
from plugin_helper import Plugin_Objects, is_mac
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Setup timezone & logger using standard NAX helpers
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -179,7 +179,7 @@ def get_pihole_network_devices():
resp = requests.get(PIHOLEAPI_URL + 'api/network/devices', headers=headers, params=params, verify=PIHOLEAPI_VERIFY_SSL, timeout=PIHOLEAPI_RUN_TIMEOUT)
resp.raise_for_status()
data = resp.json()
mylog('debug', [f'[{pluginName}] Pi-hole API returned data: {json.dumps(data)}'])
except Exception as e:
@@ -267,7 +267,7 @@ def main():
for entry in device_entries:
if is_mac(entry['mac']):
# Map to Plugin_Objects fields
# Map to Plugin_Objects fields
mylog('verbose', [f'[{pluginName}] found: {entry['name']}|{entry['mac']}|{entry['ip']}'])
plugin_objects.add_object(

View File

@@ -5,18 +5,18 @@ import os
import re
import base64
import json
from datetime import datetime
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.append(f"{INSTALL_PATH}/front/plugins")
sys.path.append(f'{INSTALL_PATH}/server')
sys.path.append(f'{INSTALL_PATH}/server')
from logger import mylog, Logger
from utils.datetime_utils import timeNowDB
from const import default_tz, fullConfPath
from logger import mylog # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from const import default_tz, fullConfPath # noqa: E402 [flake8 lint suppression]
#-------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def read_config_file():
"""
retuns dict on the config file key:value pairs
@@ -25,15 +25,15 @@ def read_config_file():
filename = fullConfPath
print('[plugin_helper] reading config file')
# load the variables from .conf
with open(filename, "r") as file:
code = compile(file.read(), filename, "exec")
confDict = {} # config dictionary
confDict = {} # config dictionary
exec(code, {"__builtins__": {}}, confDict)
return confDict
return confDict
configFile = read_config_file()
@@ -42,6 +42,7 @@ if timeZoneSetting not in all_timezones:
timeZoneSetting = default_tz
timeZone = pytz.timezone(timeZoneSetting)
# -------------------------------------------------------------------
# Sanitizes plugin output
def handleEmpty(input):
@@ -54,70 +55,72 @@ def handleEmpty(input):
input = re.sub(r'[^\x00-\x7F]+', ' ', input)
input = input.replace('\n', '') # Removing new lines
return input
# -------------------------------------------------------------------
# Sanitizes string
def rmBadChars(input):
input = handleEmpty(input)
input = input.replace("'", '_') # Removing ' (single quotes)
return input
# -------------------------------------------------------------------
# check if this is a router IP
def is_typical_router_ip(ip_address):
# List of common default gateway IP addresses
common_router_ips = [
"192.168.0.1", "192.168.1.1", "192.168.1.254", "192.168.0.254",
"10.0.0.1", "10.1.1.1", "192.168.2.1", "192.168.10.1", "192.168.11.1",
"192.168.100.1", "192.168.101.1", "192.168.123.254", "192.168.223.1",
"192.168.31.1", "192.168.8.1", "192.168.254.254", "192.168.50.1",
"192.168.3.1", "192.168.4.1", "192.168.5.1", "192.168.9.1",
"192.168.15.1", "192.168.16.1", "192.168.20.1", "192.168.30.1",
"192.168.42.1", "192.168.62.1", "192.168.178.1", "192.168.1.1",
"192.168.1.254", "192.168.0.1", "192.168.0.10", "192.168.0.100",
"192.168.0.254"
]
return ip_address in common_router_ips
# List of common default gateway IP addresses
common_router_ips = [
"192.168.0.1", "192.168.1.1", "192.168.1.254", "192.168.0.254",
"10.0.0.1", "10.1.1.1", "192.168.2.1", "192.168.10.1", "192.168.11.1",
"192.168.100.1", "192.168.101.1", "192.168.123.254", "192.168.223.1",
"192.168.31.1", "192.168.8.1", "192.168.254.254", "192.168.50.1",
"192.168.3.1", "192.168.4.1", "192.168.5.1", "192.168.9.1",
"192.168.15.1", "192.168.16.1", "192.168.20.1", "192.168.30.1",
"192.168.42.1", "192.168.62.1", "192.168.178.1", "192.168.1.1",
"192.168.1.254", "192.168.0.1", "192.168.0.10", "192.168.0.100",
"192.168.0.254"
]
return ip_address in common_router_ips
# -------------------------------------------------------------------
# Check if a valid MAC address
def is_mac(input):
input_str = str(input).lower() # Convert to string and lowercase so non-string values won't raise errors
isMac = bool(re.match("[0-9a-f]{2}([-:]?)[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$", input_str))
if not isMac: # If it's not a MAC address, log the input
mylog('verbose', [f'[is_mac] not a MAC: {input_str}'])
return isMac
# -------------------------------------------------------------------
def decodeBase64(inputParamBase64):
# Printing the input list to check its content.
mylog('debug', ['[Plugins] Helper base64 input: ', input])
print('[Plugins] Helper base64 input: ')
print(input)
# Extract the base64-encoded subnet information from the first element
# The format of the element is assumed to be like 'param=b<base64-encoded-data>'.
mylog('debug', ['[Plugins] Helper base64 input: ', input])
print('[Plugins] Helper base64 input: ')
print(input)
# Extract the base64-encoded subnet information from the first element
# The format of the element is assumed to be like 'param=b<base64-encoded-data>'.
# Printing the extracted base64-encoded information.
mylog('debug', ['[Plugins] Helper base64 inputParamBase64: ', inputParamBase64])
mylog('debug', ['[Plugins] Helper base64 inputParamBase64: ', inputParamBase64])
# Decode the base64-encoded subnet information to get the actual subnet information in ASCII format.
result = base64.b64decode(inputParamBase64).decode('ascii')
# Print the decoded subnet information.
mylog('debug', ['[Plugins] Helper base64 result: ', result])
mylog('debug', ['[Plugins] Helper base64 result: ', result])
return result
# -------------------------------------------------------------------
def decode_settings_base64(encoded_str, convert_types=True):
"""
@@ -167,7 +170,7 @@ def decode_settings_base64(encoded_str, convert_types=True):
def normalize_mac(mac):
# Split the MAC address by colon (:) or hyphen (-) and convert each part to uppercase
parts = mac.upper().split(':')
# If the MAC address is split by hyphen instead of colon
if len(parts) == 1:
parts = mac.upper().split('-')
@@ -177,14 +180,15 @@ def normalize_mac(mac):
# Join the parts with colon (:)
normalized_mac = ':'.join(normalized_parts)
return normalized_mac
# -------------------------------------------------------------------
class Plugin_Object:
"""
"""
Plugin_Object class to manage one object introduced by the plugin.
An object typically is a device but could also be a website or something
An object typically is a device but could also be a website or something
else that is monitored by the plugin.
"""
@@ -222,8 +226,8 @@ class Plugin_Object:
self.helpVal4 = helpVal4 or ""
def write(self):
"""
Write the object details as a string in the
"""
Write the object details as a string in the
format required to write the result file.
"""
line = "{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}\n".format(
@@ -243,6 +247,7 @@ class Plugin_Object:
)
return line
class Plugin_Objects:
"""
Plugin_Objects is the class that manages and holds all the objects created by the plugin.
@@ -303,7 +308,3 @@ class Plugin_Objects:
def __len__(self):
return len(self.objects)

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
from __future__ import unicode_literals
import subprocess
@@ -10,12 +10,12 @@ import sys
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty, normalize_mac
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects, handleEmpty, normalize_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -28,53 +28,60 @@ pluginName = "SNMPDSC"
LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
# Workflow
def main():
mylog('verbose', ['[SNMPDSC] In script '])
def main():
mylog('verbose', ['[SNMPDSC] In script '])
# init global variables
global snmpWalkCmds
parser = argparse.ArgumentParser(description='This plugin is used to discover devices via the arp table(s) of a RFC1213 compliant router or switch.')
parser.add_argument('routers', action="store", help="IP(s) of routers, separated by comma (,) if passing multiple")
parser = argparse.ArgumentParser(description='This plugin is used to discover devices via the arp table(s) of a RFC1213 compliant router or switch.')
parser.add_argument(
'routers',
action="store",
help="IP(s) of routers, separated by comma (,) if passing multiple"
)
values = parser.parse_args()
timeoutSetting = get_setting_value("SNMPDSC_RUN_TIMEOUT")
plugin_objects = Plugin_Objects(RESULT_FILE)
if values.routers:
snmpWalkCmds = values.routers.split('=')[1].replace('\'','')
if values.routers:
snmpWalkCmds = values.routers.split('=')[1].replace('\'', '')
if ',' in snmpWalkCmds:
commands = snmpWalkCmds.split(',')
else:
commands = [snmpWalkCmds]
for cmd in commands:
mylog('verbose', ['[SNMPDSC] Router snmpwalk command: ', cmd])
mylog('verbose', ['[SNMPDSC] Router snmpwalk command: ', cmd])
# split the string, remove white spaces around each item, and exclude any empty strings
snmpwalkArgs = [arg.strip() for arg in cmd.split(' ') if arg.strip()]
# Execute N probes and insert in list
probes = 1 # N probes
for _ in range(probes):
output = subprocess.check_output (snmpwalkArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSetting))
mylog('verbose', ['[SNMPDSC] output: ', output])
for _ in range(probes):
output = subprocess.check_output(
snmpwalkArgs,
universal_newlines=True,
stderr=subprocess.STDOUT,
timeout=(timeoutSetting)
)
mylog('verbose', ['[SNMPDSC] output: ', output])
lines = output.split('\n')
for line in lines:
for line in lines:
tmpSplt = line.split('"')
tmpSplt = line.split('"')
if len(tmpSplt) == 3:
ipStr = tmpSplt[0].split('.')[-4:] # Get the last 4 elements to extract the IP
macStr = tmpSplt[1].strip().split(' ') # Remove leading/trailing spaces from MAC
@@ -82,19 +89,18 @@ def main():
macAddress = ':'.join(macStr)
ipAddress = '.'.join(ipStr)
mylog('verbose', [f'[SNMPDSC] IP: {ipAddress} MAC: {macAddress}'])
mylog('verbose', [f'[SNMPDSC] IP: {ipAddress} MAC: {macAddress}'])
plugin_objects.add_object(
primaryId = handleEmpty(macAddress),
secondaryId = handleEmpty(ipAddress.strip()), # Remove leading/trailing spaces from IP
secondaryId = handleEmpty(ipAddress.strip()), # Remove leading/trailing spaces from IP
watched1 = '(unknown)',
watched2 = handleEmpty(snmpwalkArgs[6]), # router IP
extra = handleEmpty(line),
foreignKey = handleEmpty(macAddress) # Use the primary ID as the foreign key
)
else:
mylog('verbose', ['[SNMPDSC] ipStr does not seem to contain a valid IP:', ipStr])
mylog('verbose', ['[SNMPDSC] ipStr does not seem to contain a valid IP:', ipStr])
elif line.startswith('ipNetToMediaPhysAddress'):
# Format: snmpwalk -OXsq output
@@ -115,12 +121,11 @@ def main():
foreignKey = handleEmpty(macAddress)
)
mylog('verbose', ['[SNMPDSC] Entries found: ', len(plugin_objects)])
mylog('verbose', ['[SNMPDSC] Entries found: ', len(plugin_objects)])
plugin_objects.write_result_file()
# BEGIN
if __name__ == '__main__':
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -12,16 +12,16 @@ import base64
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from utils.plugin_utils import get_plugins_configs, decode_and_rename_files
from logger import mylog, Logger
from const import fullDbPath, logPath
from helper import get_setting_value
from utils.datetime_utils import timeNowDB
from utils.crypto_utils import encrypt_data
from messaging.in_app import write_notification
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from utils.plugin_utils import get_plugins_configs, decode_and_rename_files # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import fullDbPath, logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
from utils.crypto_utils import encrypt_data # noqa: E402 [flake8 lint suppression]
from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -41,21 +41,21 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
# Retrieve configuration settings
plugins_to_sync = get_setting_value('SYNC_plugins')
api_token = get_setting_value('API_TOKEN')
api_token = get_setting_value('API_TOKEN')
encryption_key = get_setting_value('SYNC_encryption_key')
hub_url = get_setting_value('SYNC_hub_url')
node_name = get_setting_value('SYNC_node_name')
send_devices = get_setting_value('SYNC_devices')
pull_nodes = get_setting_value('SYNC_nodes')
# variables to determine operation mode
is_hub = False
is_node = False
# Check if api_token set
if not api_token:
mylog('verbose', [f'[{pluginName}] ⚠ ERROR api_token not defined - quitting.'])
@@ -63,23 +63,23 @@ def main():
# check if this is a hub or a node
if len(hub_url) > 0 and (send_devices or plugins_to_sync):
is_node = True
mylog('verbose', [f'[{pluginName}] Mode 1: PUSH (NODE) - This is a NODE as SYNC_hub_url, SYNC_devices or SYNC_plugins are set'])
if len(pull_nodes) > 0:
is_node = True
mylog('verbose', [f'[{pluginName}] Mode 1: PUSH (NODE) - This is a NODE as SYNC_hub_url, SYNC_devices or SYNC_plugins are set'])
if len(pull_nodes) > 0:
is_hub = True
mylog('verbose', [f'[{pluginName}] Mode 2: PULL (HUB) - This is a HUB as SYNC_nodes is set'])
mylog('verbose', [f'[{pluginName}] Mode 2: PULL (HUB) - This is a HUB as SYNC_nodes is set'])
# Mode 1: PUSH/SEND (NODE)
# Mode 1: PUSH/SEND (NODE)
if is_node:
# PUSHING/SENDING Plugins
# PUSHING/SENDING Plugins
# Get all plugin configurations
all_plugins = get_plugins_configs(False)
mylog('verbose', [f'[{pluginName}] plugins_to_sync {plugins_to_sync}'])
for plugin in all_plugins:
pref = plugin["unique_prefix"]
pref = plugin["unique_prefix"]
index = 0
if pref in plugins_to_sync:
@@ -100,9 +100,8 @@ def main():
send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url)
else:
mylog('verbose', [f'[{pluginName}] {file_path} not found'])
mylog('verbose', [f'[{pluginName}] {file_path} not found'])
# PUSHING/SENDING devices
if send_devices:
@@ -117,27 +116,27 @@ def main():
mylog('verbose', [f'[{pluginName}] Sending file_content: "{file_content}"'])
send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url)
else:
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Devices" data'])
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Devices" data'])
else:
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Plugins" and "Devices" data'])
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Plugins" and "Devices" data'])
# Mode 2: PULL/GET (HUB)
# PULLING DEVICES
# PULLING DEVICES
file_prefix = 'last_result'
# pull data from nodes if specified
if is_hub:
for node_url in pull_nodes:
response_json = get_data(api_token, node_url)
# Extract node_name and base64 data
node_name = response_json.get('node_name', 'unknown_node')
data_base64 = response_json.get('data_base64', '')
# Decode base64 data
decoded_data = base64.b64decode(data_base64)
# Create log file name using node name
log_file_name = f'{file_prefix}.{node_name}.log'
@@ -148,18 +147,17 @@ def main():
message = f'[{pluginName}] Device data from node "{node_name}" written to {log_file_name}'
mylog('verbose', [message])
if lggr.isAbove('verbose'):
write_notification(message, 'info', timeNowDB())
write_notification(message, 'info', timeNowDB())
# Process any received data for the Device DB table (ONLY JSON)
# Create the file path
# Get all "last_result" files from the sync folder, decode, rename them, and get the list of files
files_to_process = decode_and_rename_files(LOG_PATH, file_prefix)
if len(files_to_process) > 0:
mylog('verbose', [f'[{pluginName}] Mode 3: RECEIVE (HUB) - This is a HUB as received data found'])
mylog('verbose', [f'[{pluginName}] Mode 3: RECEIVE (HUB) - This is a HUB as received data found'])
# Connect to the App database
conn = sqlite3.connect(fullDbPath)
@@ -176,24 +174,24 @@ def main():
# only process received .log files, skipping the one logging the progress of this plugin
if file_name != 'last_result.log':
mylog('verbose', [f'[{pluginName}] Processing: "{file_name}"'])
# make sure the file has the correct name (e.g last_result.encoded.Node_1.1.log) to skip any otehr plugin files
if len(file_name.split('.')) > 2:
# Extract node name from either last_result.decoded.Node_1.1.log or last_result.Node_1.log
parts = file_name.split('.')
# If decoded/encoded file, node name is at index 2; otherwise at index 1
syncHubNodeName = parts[2] if 'decoded' in file_name or 'encoded' in file_name else parts[1]
syncHubNodeName = parts[2] if 'decoded' in file_name or 'encoded' in file_name else parts[1]
file_path = f"{LOG_PATH}/{file_name}"
with open(file_path, 'r') as f:
data = json.load(f)
for device in data['data']:
if device['devMac'] not in unique_mac_addresses:
device['devSyncHubNode'] = syncHubNodeName
unique_mac_addresses.add(device['devMac'])
device_data.append(device)
device_data.append(device)
# Rename the file to "processed_" + current name
new_file_name = f"processed_{file_name}"
new_file_path = os.path.join(LOG_PATH, new_file_name)
@@ -209,7 +207,6 @@ def main():
placeholders = ', '.join('?' for _ in unique_mac_addresses)
cursor.execute(f'SELECT devMac FROM Devices WHERE devMac IN ({placeholders})', tuple(unique_mac_addresses))
existing_mac_addresses = set(row[0] for row in cursor.fetchall())
# insert devices into the last_result.log and thus CurrentScan table to manage state
for device in device_data:
@@ -228,7 +225,7 @@ def main():
# Filter out existing devices
new_devices = [device for device in device_data if device['devMac'] not in existing_mac_addresses]
# Remove 'rowid' key if it exists
# Remove 'rowid' key if it exists
for device in new_devices:
device.pop('rowid', None)
device.pop('devStatus', None)
@@ -257,7 +254,6 @@ def main():
mylog('verbose', [message])
write_notification(message, 'info', timeNowDB())
# Commit and close the connection
conn.commit()
@@ -268,6 +264,7 @@ def main():
return 0
# ------------------------------------------------------------------
# Data retrieval methods
api_endpoints = [
@@ -275,6 +272,7 @@ api_endpoints = [
"/plugins/sync/hub.php" # Legacy PHP endpoint
]
# send data to the HUB
def send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url):
"""Send encrypted data to HUB, preferring /sync endpoint and falling back to PHP version."""
@@ -345,6 +343,5 @@ def get_data(api_token, node_url):
return ""
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -10,12 +10,11 @@ from unifi_sm_api.api import SiteManagerAPI
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, decode_settings_base64
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
import conf
from plugin_helper import Plugin_Objects, decode_settings_base64 # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -35,13 +34,13 @@ plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('verbose', [f'[{pluginName}] In script'])
mylog('verbose', [f'[{pluginName}] In script'])
# Retrieve configuration settings
unifi_sites_configs = get_setting_value('UNIFIAPI_sites')
mylog('verbose', [f'[{pluginName}] number of unifi_sites_configs: {len(unifi_sites_configs)}'])
for site_config in unifi_sites_configs:
siteDict = decode_settings_base64(site_config)
@@ -50,11 +49,11 @@ def main():
mylog('none', [f'[{pluginName}] Connecting to: {siteDict["UNIFIAPI_site_name"]}'])
api = SiteManagerAPI(
api_key=siteDict["UNIFIAPI_api_key"],
version=siteDict["UNIFIAPI_api_version"],
base_url=siteDict["UNIFIAPI_base_url"],
verify_ssl=siteDict["UNIFIAPI_verify_ssl"]
)
api_key=siteDict["UNIFIAPI_api_key"],
version=siteDict["UNIFIAPI_api_version"],
base_url=siteDict["UNIFIAPI_base_url"],
verify_ssl=siteDict["UNIFIAPI_verify_ssl"]
)
sites_resp = api.get_sites()
sites = sites_resp.get("data", [])
@@ -67,18 +66,18 @@ def main():
# Process the data into native application tables
if len(device_data) > 0:
# insert devices into the lats_result.log
# insert devices into the lats_result.log
for device in device_data:
plugin_objects.add_object(
primaryId = device['dev_mac'], # mac
secondaryId = device['dev_ip'], # IP
watched1 = device['dev_name'], # name
watched2 = device['dev_type'], # device_type (AP/Switch etc)
watched3 = device['dev_connected'], # connectedAt or empty
watched4 = device['dev_parent_mac'],# parent_mac or "Internet"
extra = '',
foreignKey = device['dev_mac']
)
plugin_objects.add_object(
primaryId = device['dev_mac'], # mac
secondaryId = device['dev_ip'], # IP
watched1 = device['dev_name'], # name
watched2 = device['dev_type'], # device_type (AP/Switch etc)
watched3 = device['dev_connected'], # connectedAt or empty
watched4 = device['dev_parent_mac'], # parent_mac or "Internet"
extra = '',
foreignKey = device['dev_mac']
)
mylog('verbose', [f'[{pluginName}] New entries: "{len(device_data)}"'])
@@ -87,6 +86,7 @@ def main():
return 0
# retrieve data
def get_device_data(site, api):
device_data = []
@@ -146,8 +146,8 @@ def get_device_data(site, api):
dev_parent_mac = resolve_parent_mac(uplinkDeviceId)
device_data.append({
"dev_mac": dev_mac,
"dev_ip": dev_ip,
"dev_mac": dev_mac,
"dev_ip": dev_ip,
"dev_name": dev_name,
"dev_type": dev_type,
"dev_connected": dev_connected,

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# Inspired by https://github.com/stevehoek/Pi.Alert
from __future__ import unicode_literals
@@ -14,12 +14,12 @@ from pyunifi.controller import Controller
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, rmBadChars, is_typical_router_ip, is_mac
from logger import mylog, Logger
from helper import get_setting_value, normalize_string
import conf
from pytz import timezone
from const import logPath
from plugin_helper import Plugin_Objects, rmBadChars, is_typical_router_ip, is_mac # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value, normalize_string # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -37,21 +37,16 @@ LOCK_FILE = os.path.join(LOG_PATH, f'full_run.{pluginName}.lock')
urllib3.disable_warnings(InsecureRequestWarning)
# Workflow
def main():
mylog('verbose', [f'[{pluginName}] In script'])
# init global variables
global UNIFI_USERNAME, UNIFI_PASSWORD, UNIFI_HOST, UNIFI_SITES, PORT, VERIFYSSL, VERSION, FULL_IMPORT
# parse output
plugin_objects = Plugin_Objects(RESULT_FILE)
plugin_objects = Plugin_Objects(RESULT_FILE)
UNIFI_USERNAME = get_setting_value("UNFIMP_username")
UNIFI_PASSWORD = get_setting_value("UNFIMP_password")
UNIFI_HOST = get_setting_value("UNFIMP_host")
@@ -64,12 +59,11 @@ def main():
plugin_objects = get_entries(plugin_objects)
plugin_objects.write_result_file()
mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices'])
# .............................................
# .............................................
def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
global VERIFYSSL
@@ -79,27 +73,26 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
mylog('verbose', [f'[{pluginName}] sites: {UNIFI_SITES}'])
if (VERIFYSSL.upper() == "TRUE"):
VERIFYSSL = True
else:
VERIFYSSL = False
# mylog('verbose', [f'[{pluginName}] sites: {sites}'])
for site in UNIFI_SITES:
mylog('verbose', [f'[{pluginName}] site: {site}'])
c = Controller(
UNIFI_HOST,
UNIFI_USERNAME,
UNIFI_PASSWORD,
port=PORT,
version=VERSION,
ssl_verify=VERIFYSSL,
UNIFI_HOST,
UNIFI_USERNAME,
UNIFI_PASSWORD,
port=PORT,
version=VERSION,
ssl_verify=VERIFYSSL,
site_id=site)
online_macs = set()
processed_macs = []
@@ -114,7 +107,7 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
plugin_objects=plugin_objects,
device_label='client',
device_vendor="",
force_import=True # These are online clients, force import
force_import=True # These are online clients, force import
)
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Online Devices'])
@@ -154,11 +147,9 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects:
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Users'])
mylog('verbose', [f'[{pluginName}] check if Lock file needs to be modified'])
set_lock_file_value(FULL_IMPORT, lock_file_value)
mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Clients overall'])
return plugin_objects
@@ -173,19 +164,19 @@ def collect_details(device_type, devices, online_macs, processed_macs, plugin_ob
name = get_name(get_unifi_val(device, 'name'), get_unifi_val(device, 'hostname'))
ipTmp = get_ip(get_unifi_val(device, 'lan_ip'), get_unifi_val(device, 'last_ip'), get_unifi_val(device, 'fixed_ip'), get_unifi_val(device, 'ip'))
macTmp = device['mac']
# continue only if valid MAC address
if is_mac(macTmp):
status = 1 if macTmp in online_macs else device.get('state', 0)
deviceType = device_type.get(device.get('type'), '')
parentMac = get_parent_mac(get_unifi_val(device, 'uplink_mac'), get_unifi_val(device, 'ap_mac'), get_unifi_val(device, 'sw_mac'))
# override parent MAC if this is a router
if parentMac == 'null' and is_typical_router_ip(ipTmp):
parentMac = 'Internet'
parentMac = 'Internet'
# Add object only if not processed
if macTmp not in processed_macs and ( status == 1 or force_import is True ):
if macTmp not in processed_macs and (status == 1 or force_import is True):
plugin_objects.add_object(
primaryId=macTmp,
secondaryId=ipTmp,
@@ -203,7 +194,8 @@ def collect_details(device_type, devices, online_macs, processed_macs, plugin_ob
processed_macs.append(macTmp)
else:
mylog('verbose', [f'[{pluginName}] Skipping, not a valid MAC address: {macTmp}'])
# -----------------------------------------------------------------------------
def get_unifi_val(obj, key, default='null'):
if isinstance(obj, dict):
@@ -212,9 +204,9 @@ def get_unifi_val(obj, key, default='null'):
for k, v in obj.items():
if isinstance(v, dict):
result = get_unifi_val(v, key, default)
if result not in ['','None', None, 'null']:
if result not in ['', 'None', None, 'null']:
return result
mylog('trace', [f'[{pluginName}] Value not found for key "{key}" in obj "{json.dumps(obj)}"'])
return default
@@ -226,6 +218,7 @@ def get_name(*names: str) -> str:
return rmBadChars(name)
return 'null'
# -----------------------------------------------------------------------------
def get_parent_mac(*macs: str) -> str:
for mac in macs:
@@ -233,6 +226,7 @@ def get_parent_mac(*macs: str) -> str:
return mac
return 'null'
# -----------------------------------------------------------------------------
def get_port(*ports: str) -> str:
for port in ports:
@@ -240,12 +234,6 @@ def get_port(*ports: str) -> str:
return port
return 'null'
# -----------------------------------------------------------------------------
def get_port(*macs: str) -> str:
for mac in macs:
if mac and mac != 'null':
return mac
return 'null'
# -----------------------------------------------------------------------------
def get_ip(*ips: str) -> str:
@@ -271,7 +259,7 @@ def set_lock_file_value(config_value: str, lock_file_value: bool) -> None:
mylog('verbose', [f'[{pluginName}] Setting lock value for "full import" to {out}'])
with open(LOCK_FILE, 'w') as lock_file:
lock_file.write(str(out))
lock_file.write(str(out))
# -----------------------------------------------------------------------------
@@ -286,15 +274,16 @@ def read_lock_file() -> bool:
# -----------------------------------------------------------------------------
def check_full_run_state(config_value: str, lock_file_value: bool) -> bool:
if config_value == 'always' or (config_value == 'once' and lock_file_value == False):
if config_value == 'always' or (config_value == 'once' and lock_file_value is False):
mylog('verbose', [f'[{pluginName}] Full import needs to be done: config_value: {config_value} and lock_file_value: {lock_file_value}'])
return True
else:
mylog('verbose', [f'[{pluginName}] Full import NOT needed: config_value: {config_value} and lock_file_value: {lock_file_value}'])
return False
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -9,13 +9,13 @@ import sqlite3
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects, handleEmpty
from logger import mylog, Logger
from helper import get_setting_value
from const import logPath, applicationPath, fullDbPath
from scan.device_handling import query_MAC_vendor
import conf
from pytz import timezone
from plugin_helper import Plugin_Objects, handleEmpty # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from const import logPath, applicationPath, fullDbPath # noqa: E402 [flake8 lint suppression]
from scan.device_handling import query_MAC_vendor # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -25,17 +25,17 @@ Logger(get_setting_value('LOG_LEVEL'))
pluginName = 'VNDRPDT'
LOG_PATH = logPath + '/plugins'
LOG_FILE = os.path.join(LOG_PATH, f'script.{pluginName}.log')
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
def main():
mylog('verbose', ['[VNDRPDT] In script'])
def main():
# Get newest DB
update_vendor_database()
mylog('verbose', ['[VNDRPDT] In script'])
# Get newest DB
update_vendor_database()
# Resolve missing vendors
plugin_objects = Plugin_Objects(RESULT_FILE)
@@ -43,67 +43,67 @@ def main():
plugin_objects = update_vendors(fullDbPath, plugin_objects)
plugin_objects.write_result_file()
mylog('verbose', ['[VNDRPDT] Update complete'])
mylog('verbose', ['[VNDRPDT] Update complete'])
return 0
#===============================================================================
# ===============================================================================
# Update device vendors database
#===============================================================================
# ===============================================================================
def update_vendor_database():
# Update vendors DB (iab oui)
mylog('verbose', [' Updating vendors DB (iab & oui)'])
mylog('verbose', [' Updating vendors DB (iab & oui)'])
update_args = ['sh', applicationPath + '/services/update_vendors.sh']
# Execute command
# Execute command
try:
# try runnning a subprocess safely
update_output = subprocess.check_output (update_args)
subprocess.check_output(update_args)
except subprocess.CalledProcessError as e:
# An error occured, handle it
mylog('verbose', [' FAILED: Updating vendors DB, set LOG_LEVEL=debug for more info'])
mylog('verbose', [e.output])
mylog('verbose', [' FAILED: Updating vendors DB, set LOG_LEVEL=debug for more info'])
mylog('verbose', [e.output])
# ------------------------------------------------------------------------------
# resolve missing vendors
def update_vendors (dbPath, plugin_objects):
def update_vendors(dbPath, plugin_objects):
# Connect to the App SQLite database
conn = sqlite3.connect(dbPath)
sql = conn.cursor()
# Initialize variables
recordsToUpdate = []
ignored = 0
notFound = 0
mylog('verbose', [' Searching devices vendor'])
mylog('verbose', [' Searching devices vendor'])
# Get devices without a vendor
sql.execute ("""SELECT
devMac,
devLastIP,
devName,
devVendor
sql.execute("""SELECT
devMac,
devLastIP,
devName,
devVendor
FROM Devices
WHERE devVendor = '(unknown)'
OR devVendor = '(Unknown)'
WHERE devVendor = '(unknown)'
OR devVendor = '(Unknown)'
OR devVendor = ''
OR devVendor IS NULL
""")
devices = sql.fetchall()
conn.commit()
devices = sql.fetchall()
conn.commit()
# Close the database connection
conn.close()
conn.close()
# All devices loop
for device in devices:
# Search vendor in HW Vendors DB
vendor = query_MAC_vendor (device[0])
vendor = query_MAC_vendor(device[0])
if vendor == -1 :
notFound += 1
elif vendor == -2 :
@@ -112,27 +112,25 @@ def update_vendors (dbPath, plugin_objects):
plugin_objects.add_object(
primaryId = handleEmpty(device[0]), # MAC (Device Name)
secondaryId = handleEmpty(device[1]), # IP Address (always 0.0.0.0)
watched1 = handleEmpty(vendor),
watched1 = handleEmpty(vendor),
watched2 = handleEmpty(device[2]), # Device name
watched3 = "",
watched4 = "",
extra = "",
foreignKey = handleEmpty(device[0])
)
# Print log
extra = "",
foreignKey = handleEmpty(device[0])
)
# Print log
mylog('verbose', [" Devices Ignored : ", ignored])
mylog('verbose', [" Devices with missing vendor : ", len(devices)])
mylog('verbose', [" Vendors Not Found : ", notFound])
mylog('verbose', [" Vendors updated : ", len(plugin_objects) ])
mylog('verbose', [" Vendors updated : ", len(plugin_objects)])
return plugin_objects
#===============================================================================
# ===============================================================================
# BEGIN
#===============================================================================
# ===============================================================================
if __name__ == '__main__':
main()
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
import os
import sys
@@ -9,13 +9,13 @@ from wakeonlan import send_magic_packet
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from logger import mylog, Logger
from const import logPath
from helper import get_setting_value
from database import DB
from models.device_instance import DeviceInstance
import conf
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
from database import DB # noqa: E402 [flake8 lint suppression]
from models.device_instance import DeviceInstance # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -34,9 +34,8 @@ RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
plugin_objects = Plugin_Objects(RESULT_FILE)
def main():
mylog('none', [f'[{pluginName}] In script'])
mylog('none', [f'[{pluginName}] In script'])
# Retrieve configuration settings
broadcast_ips = get_setting_value('WOL_broadcast_ips')
@@ -58,7 +57,7 @@ def main():
devices_to_wake = device_handler.getOffline()
elif 'down' in devices_to_wake:
devices_to_wake = device_handler.getDown()
else:
@@ -89,15 +88,16 @@ def main():
# log result
plugin_objects.write_result_file()
else:
mylog('none', [f'[{pluginName}] No devices to wake'])
mylog('none', [f'[{pluginName}] No devices to wake'])
mylog('none', [f'[{pluginName}] Script finished'])
mylog('none', [f'[{pluginName}] Script finished'])
return 0
# wake
def execute(port, ip, mac, name):
result = 'null'
try:
# Send the magic packet to wake up the device
@@ -105,7 +105,7 @@ def execute(port, ip, mac, name):
mylog('verbose', [f'[{pluginName}] Magic packet sent to {mac} ({name})'])
result = 'success'
except Exception as e:
result = str(e)
mylog('verbose', [f'[{pluginName}] Failed to send magic packet to {mac} ({name}): {e}'])
@@ -113,5 +113,6 @@ def execute(port, ip, mac, name):
# Return the data result
return result
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
# !/usr/bin/env python
# Based on the work of https://github.com/leiweibau/Pi.Alert
import requests
@@ -12,12 +12,12 @@ from urllib3.exceptions import InsecureRequestWarning
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from plugin_helper import Plugin_Objects
from const import logPath
from helper import get_setting_value
import conf
from pytz import timezone
from logger import mylog, Logger
from plugin_helper import Plugin_Objects # noqa: E402 [flake8 lint suppression]
from const import logPath # noqa: E402 [flake8 lint suppression]
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
from pytz import timezone # noqa: E402 [flake8 lint suppression]
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
# Make sure the TIMEZONE for logging is correct
conf.tz = timezone(get_setting_value('TIMEZONE'))
@@ -30,15 +30,14 @@ pluginName = 'WEBMON'
LOG_PATH = logPath + '/plugins'
RESULT_FILE = os.path.join(LOG_PATH, f'last_result.{pluginName}.log')
mylog('verbose', [f'[{pluginName}] In script'])
def main():
values = get_setting_value('WEBMON_urls_to_check')
mylog('verbose', [f'[{pluginName}] Checking URLs: {values}'])
if len(values) > 0:
plugin_objects = Plugin_Objects(RESULT_FILE)
@@ -48,12 +47,13 @@ def main():
else:
return
def check_services_health(site):
mylog('verbose', [f'[{pluginName}] Checking {site}'])
urllib3.disable_warnings(InsecureRequestWarning)
try:
resp = requests.get(site, verify=False, timeout=get_setting_value('WEBMON_RUN_TIMEOUT'), headers={"User-Agent": "NetAlertX"})
latency = resp.elapsed.total_seconds()
@@ -79,12 +79,13 @@ def check_services_health(site):
return status, latency
def service_monitoring(urls, plugin_objects):
for site in urls:
status, latency = check_services_health(site)
plugin_objects.add_object(
primaryId=site,
secondaryId='null',
secondaryId='null',
watched1=status,
watched2=latency,
watched3='null',
@@ -94,7 +95,6 @@ def service_monitoring(urls, plugin_objects):
)
return plugin_objects
if __name__ == '__main__':
sys.exit(main())