mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2025-12-07 09:36:05 -08:00
/data and /tmp standarization
This commit is contained in:
@@ -1 +1 @@
|
||||
""" __init__ for NetAlertX """
|
||||
"""__init__ for NetAlertX"""
|
||||
|
||||
@@ -1,32 +1,30 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# NetAlertX v2.70 / 2021-02-01
|
||||
# Open Source Network Guard / WIFI & LAN intrusion detector
|
||||
# Open Source Network Guard / WIFI & LAN intrusion detector
|
||||
#
|
||||
# Back module. Network scanner
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# Puche 2021 / 2022+ jokob jokob@duck.com GNU GPLv3
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# IMPORTS
|
||||
#===============================================================================
|
||||
#from __future__ import print_function
|
||||
# ===============================================================================
|
||||
# from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import time
|
||||
import datetime
|
||||
import multiprocessing
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
# Register NetAlertX modules
|
||||
# Register NetAlertX modules
|
||||
import conf
|
||||
from const import *
|
||||
from logger import mylog
|
||||
from helper import filePermissions, timeNowTZ, get_setting_value
|
||||
from logger import mylog
|
||||
from helper import filePermissions, timeNowTZ
|
||||
from app_state import updateState
|
||||
from api import update_api
|
||||
from scan.session_events import process_scan
|
||||
@@ -36,13 +34,13 @@ from messaging.reporting import get_notifications
|
||||
from models.notification_instance import NotificationInstance
|
||||
from models.user_events_queue_instance import UserEventsQueueInstance
|
||||
from scan.device_handling import update_devices_names
|
||||
from workflows.manager import WorkflowManager
|
||||
from workflows.manager import WorkflowManager
|
||||
|
||||
#===============================================================================
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# ===============================================================================
|
||||
# MAIN
|
||||
#===============================================================================
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# ===============================================================================
|
||||
"""
|
||||
main structure of NetAlertX
|
||||
|
||||
@@ -62,16 +60,19 @@ main structure of NetAlertX
|
||||
end loop
|
||||
"""
|
||||
|
||||
def main ():
|
||||
mylog('none', ['[MAIN] Setting up ...']) # has to be level 'none' as user config not loaded yet
|
||||
|
||||
mylog('none', [f'[conf.tz] Setting up ...{conf.tz}'])
|
||||
|
||||
def main():
|
||||
mylog(
|
||||
"none", ["[MAIN] Setting up ..."]
|
||||
) # has to be level 'none' as user config not loaded yet
|
||||
|
||||
mylog("none", [f"[conf.tz] Setting up ...{conf.tz}"])
|
||||
|
||||
# check file permissions and fix if required
|
||||
filePermissions()
|
||||
|
||||
# Header + init app state
|
||||
updateState("Initializing", None, None, None, 0)
|
||||
updateState("Initializing", None, None, None, 0)
|
||||
|
||||
# Open DB once and keep open
|
||||
# Opening/closing the DB frequently actually causes more issues
|
||||
@@ -79,17 +80,17 @@ def main ():
|
||||
db.open()
|
||||
sql = db.sql # To-Do replace with the db class
|
||||
|
||||
# Init DB
|
||||
# Init DB
|
||||
db.initDB()
|
||||
|
||||
# Initialize the WorkflowManager
|
||||
workflow_manager = WorkflowManager(db)
|
||||
|
||||
#===============================================================================
|
||||
# This is the main loop of NetAlertX
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# This is the main loop of NetAlertX
|
||||
# ===============================================================================
|
||||
|
||||
mylog('debug', '[MAIN] Starting loop')
|
||||
mylog("debug", "[MAIN] Starting loop")
|
||||
|
||||
all_plugins = None
|
||||
pm = None
|
||||
@@ -100,80 +101,77 @@ def main ():
|
||||
# -- SETTINGS BACKWARD COMPATIBILITY END --
|
||||
|
||||
while True:
|
||||
|
||||
# re-load user configuration and plugins
|
||||
# re-load user configuration and plugins
|
||||
pm, all_plugins, imported = importConfigs(pm, db, all_plugins)
|
||||
|
||||
# update time started
|
||||
conf.loop_start_time = timeNowTZ()
|
||||
|
||||
loop_start_time = conf.loop_start_time # TODO fix
|
||||
conf.loop_start_time = timeNowTZ()
|
||||
|
||||
loop_start_time = conf.loop_start_time # TODO fix
|
||||
|
||||
# Handle plugins executed ONCE
|
||||
if conf.plugins_once_run == False:
|
||||
pm.run_plugin_scripts('once')
|
||||
pm.run_plugin_scripts("once")
|
||||
conf.plugins_once_run = True
|
||||
|
||||
|
||||
# check if user is waiting for api_update
|
||||
pm.check_and_run_user_event()
|
||||
|
||||
# Update API endpoints
|
||||
# Update API endpoints
|
||||
update_api(db, all_plugins, False)
|
||||
|
||||
# proceed if 1 minute passed
|
||||
if conf.last_scan_run + datetime.timedelta(minutes=1) < conf.loop_start_time :
|
||||
|
||||
# last time any scan or maintenance/upkeep was run
|
||||
conf.last_scan_run = loop_start_time
|
||||
if conf.last_scan_run + datetime.timedelta(minutes=1) < conf.loop_start_time:
|
||||
# last time any scan or maintenance/upkeep was run
|
||||
conf.last_scan_run = loop_start_time
|
||||
|
||||
# Header
|
||||
updateState("Process: Start")
|
||||
updateState("Process: Start")
|
||||
|
||||
# Timestamp
|
||||
startTime = loop_start_time
|
||||
startTime = startTime.replace (microsecond=0)
|
||||
startTime = startTime.replace(microsecond=0)
|
||||
|
||||
# Check if any plugins need to run on schedule
|
||||
pm.run_plugin_scripts('schedule')
|
||||
pm.run_plugin_scripts("schedule")
|
||||
|
||||
# determine run/scan type based on passed time
|
||||
# --------------------------------------------
|
||||
|
||||
# Runs plugin scripts which are set to run every time after a scans finished
|
||||
pm.run_plugin_scripts('always_after_scan')
|
||||
|
||||
|
||||
# Runs plugin scripts which are set to run every time after a scans finished
|
||||
pm.run_plugin_scripts("always_after_scan")
|
||||
|
||||
# process all the scanned data into new devices
|
||||
processScan = updateState("Check scan").processScan
|
||||
mylog('debug', [f'[MAIN] processScan: {processScan}'])
|
||||
|
||||
if processScan == True:
|
||||
mylog('debug', "[MAIN] start processing scan results")
|
||||
mylog("debug", [f"[MAIN] processScan: {processScan}"])
|
||||
|
||||
if processScan == True:
|
||||
mylog("debug", "[MAIN] start processing scan results")
|
||||
process_scan(db)
|
||||
updateState("Scan processed", None, None, None, None, False)
|
||||
|
||||
|
||||
# Name resolution
|
||||
# --------------------------------------------
|
||||
|
||||
# run plugins before notification processing (e.g. Plugins to discover device names)
|
||||
pm.run_plugin_scripts('before_name_updates')
|
||||
pm.run_plugin_scripts("before_name_updates")
|
||||
|
||||
# Resolve devices names
|
||||
mylog('debug','[Main] Resolve devices names')
|
||||
update_devices_names(pm)
|
||||
|
||||
mylog("debug", "[Main] Resolve devices names")
|
||||
update_devices_names(pm)
|
||||
|
||||
# --------
|
||||
# Reporting
|
||||
# Reporting
|
||||
|
||||
# Check if new devices found
|
||||
sql.execute (sql_new_devices)
|
||||
sql.execute(sql_new_devices)
|
||||
newDevices = sql.fetchall()
|
||||
db.commitDB()
|
||||
|
||||
|
||||
# new devices were found
|
||||
if len(newDevices) > 0:
|
||||
# run all plugins registered to be run when new devices are found
|
||||
pm.run_plugin_scripts('on_new_device')
|
||||
# run all plugins registered to be run when new devices are found
|
||||
pm.run_plugin_scripts("on_new_device")
|
||||
|
||||
# Notification handling
|
||||
# ----------------------------------------
|
||||
@@ -182,7 +180,7 @@ def main ():
|
||||
final_json = get_notifications(db)
|
||||
|
||||
# Write the notifications into the DB
|
||||
notification = NotificationInstance(db)
|
||||
notification = NotificationInstance(db)
|
||||
notificationObj = notification.create(final_json, "")
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
@@ -195,75 +193,84 @@ def main ():
|
||||
# (e.g. down-event reporting, delay timers, plugin conditions).
|
||||
# - A pending flag means “still under evaluation,” not “missed.”
|
||||
# It will clear automatically once its event is included in a sent alert.
|
||||
# ------------------------------------------------------------------------------
|
||||
if notificationObj.HasNotifications:
|
||||
|
||||
pm.run_plugin_scripts('on_notification')
|
||||
# ------------------------------------------------------------------------------
|
||||
if notificationObj.HasNotifications:
|
||||
pm.run_plugin_scripts("on_notification")
|
||||
notification.setAllProcessed()
|
||||
|
||||
|
||||
# Only clear pending email flags and plugins_events once notifications are sent.
|
||||
notification.clearPendingEmailFlag()
|
||||
|
||||
|
||||
else:
|
||||
# If there are no notifications to process,
|
||||
# we still need to clear all plugin events to prevent database growth if
|
||||
# we still need to clear all plugin events to prevent database growth if
|
||||
# no notification gateways are configured
|
||||
notification.clearPluginEvents()
|
||||
mylog('verbose', ['[Notification] No changes to report'])
|
||||
mylog("verbose", ["[Notification] No changes to report"])
|
||||
|
||||
# Commit SQL
|
||||
db.commitDB()
|
||||
|
||||
mylog('verbose', ['[MAIN] Process: Idle'])
|
||||
db.commitDB()
|
||||
|
||||
mylog("verbose", ["[MAIN] Process: Idle"])
|
||||
else:
|
||||
# do something
|
||||
# do something
|
||||
# mylog('verbose', ['[MAIN] Waiting to start next loop'])
|
||||
updateState("Process: Idle")
|
||||
|
||||
# WORKFLOWS handling
|
||||
# WORKFLOWS handling
|
||||
# ----------------------------------------
|
||||
# Fetch new unprocessed events
|
||||
new_events = workflow_manager.get_new_app_events()
|
||||
|
||||
mylog('debug', [f'[MAIN] Processing WORKFLOW new_events from get_new_app_events: {len(new_events)}'])
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
f"[MAIN] Processing WORKFLOW new_events from get_new_app_events: {len(new_events)}"
|
||||
],
|
||||
)
|
||||
|
||||
# Process each new event and check triggers
|
||||
if len(new_events) > 0:
|
||||
updateState("Workflows: Start")
|
||||
update_api_flag = False
|
||||
for event in new_events:
|
||||
mylog('debug', [f'[MAIN] Processing WORKFLOW app event with GUID {event["GUID"]}'])
|
||||
mylog(
|
||||
"debug",
|
||||
[f"[MAIN] Processing WORKFLOW app event with GUID {event['GUID']}"],
|
||||
)
|
||||
|
||||
# proceed to process events
|
||||
workflow_manager.process_event(event)
|
||||
workflow_manager.process_event(event)
|
||||
|
||||
if workflow_manager.update_api:
|
||||
# Update API endpoints if needed
|
||||
update_api_flag = True
|
||||
# Update API endpoints if needed
|
||||
update_api_flag = True
|
||||
|
||||
if update_api_flag:
|
||||
if update_api_flag:
|
||||
update_api(db, all_plugins, True)
|
||||
|
||||
updateState("Workflows: End")
|
||||
|
||||
|
||||
# check if devices list needs updating
|
||||
userUpdatedDevices = UserEventsQueueInstance().has_update_devices()
|
||||
|
||||
mylog('debug', [f'[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}'])
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
f"[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}"
|
||||
],
|
||||
)
|
||||
|
||||
if userUpdatedDevices:
|
||||
if userUpdatedDevices:
|
||||
update_api(db, all_plugins, True, ["devices"], userUpdatedDevices)
|
||||
|
||||
update_api(db, all_plugins, True, ["devices"], userUpdatedDevices)
|
||||
|
||||
#loop
|
||||
time.sleep(5) # wait for N seconds
|
||||
# loop
|
||||
time.sleep(5) # wait for N seconds
|
||||
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# BEGIN
|
||||
#===============================================================================
|
||||
if __name__ == '__main__':
|
||||
mylog('debug', ['[__main__] Welcome to NetAlertX'])
|
||||
sys.exit(main())
|
||||
# ===============================================================================
|
||||
if __name__ == "__main__":
|
||||
mylog("debug", ["[__main__] Welcome to NetAlertX"])
|
||||
sys.exit(main())
|
||||
|
||||
157
server/api.py
157
server/api.py
@@ -3,17 +3,30 @@ import time
|
||||
import threading
|
||||
import datetime
|
||||
|
||||
# Register NetAlertX modules
|
||||
import conf
|
||||
from const import (apiPath, sql_appevents, sql_devices_all, sql_events_pending_alert, sql_settings, sql_plugins_events, sql_plugins_history, sql_plugins_objects,sql_language_strings, sql_notifications_all, sql_online_history, sql_devices_tiles, sql_devices_filters)
|
||||
# Register NetAlertX modules
|
||||
import conf
|
||||
from const import (
|
||||
apiPath,
|
||||
sql_appevents,
|
||||
sql_devices_all,
|
||||
sql_events_pending_alert,
|
||||
sql_settings,
|
||||
sql_plugins_events,
|
||||
sql_plugins_history,
|
||||
sql_plugins_objects,
|
||||
sql_language_strings,
|
||||
sql_notifications_all,
|
||||
sql_online_history,
|
||||
sql_devices_tiles,
|
||||
sql_devices_filters,
|
||||
)
|
||||
from logger import mylog
|
||||
from helper import write_file, get_setting_value, timeNowTZ
|
||||
from app_state import updateState
|
||||
from models.user_events_queue_instance import UserEventsQueueInstance
|
||||
from messaging.in_app import write_notification
|
||||
|
||||
# Import the start_server function
|
||||
from api_server.api_server_start import start_server
|
||||
from api_server.api_server_start import start_server
|
||||
|
||||
apiEndpoints = []
|
||||
|
||||
@@ -22,25 +35,28 @@ api_lock = threading.Lock()
|
||||
periodic_write_lock = threading.Lock()
|
||||
stop_event = threading.Event() # Event to signal thread termination
|
||||
|
||||
#===============================================================================
|
||||
|
||||
# ===============================================================================
|
||||
# API
|
||||
#===============================================================================
|
||||
def update_api(db, all_plugins, forceUpdate, updateOnlyDataSources=[], is_ad_hoc_user_event=False):
|
||||
mylog('debug', ['[API] Update API starting'])
|
||||
# ===============================================================================
|
||||
def update_api(
|
||||
db, all_plugins, forceUpdate, updateOnlyDataSources=[], is_ad_hoc_user_event=False
|
||||
):
|
||||
mylog("debug", ["[API] Update API starting"])
|
||||
|
||||
# Start periodic write if not running
|
||||
start_periodic_write(interval=1)
|
||||
|
||||
# Update app_state.json and retrieve app_state to check if GraphQL server is running
|
||||
app_state = updateState()
|
||||
|
||||
# Save plugins
|
||||
write_file(apiPath + 'plugins.json', json.dumps({"data": all_plugins}))
|
||||
|
||||
# Prepare database tables we want to expose
|
||||
# Save plugins
|
||||
write_file(apiPath + "plugins.json", json.dumps({"data": all_plugins}))
|
||||
|
||||
# Prepare database tables we want to expose
|
||||
dataSourcesSQLs = [
|
||||
["appevents", sql_appevents],
|
||||
["devices", sql_devices_all],
|
||||
["appevents", sql_appevents],
|
||||
["devices", sql_devices_all],
|
||||
["events_pending_alert", sql_events_pending_alert],
|
||||
["settings", sql_settings],
|
||||
["plugins_events", sql_plugins_events],
|
||||
@@ -57,8 +73,14 @@ def update_api(db, all_plugins, forceUpdate, updateOnlyDataSources=[], is_ad_hoc
|
||||
# Save selected database tables
|
||||
for dsSQL in dataSourcesSQLs:
|
||||
if not updateOnlyDataSources or dsSQL[0] in updateOnlyDataSources:
|
||||
api_endpoint_class(db, forceUpdate, dsSQL[1], apiPath + 'table_' + dsSQL[0] + '.json', is_ad_hoc_user_event)
|
||||
|
||||
api_endpoint_class(
|
||||
db,
|
||||
forceUpdate,
|
||||
dsSQL[1],
|
||||
apiPath + "table_" + dsSQL[0] + ".json",
|
||||
is_ad_hoc_user_event,
|
||||
)
|
||||
|
||||
# Start the GraphQL server
|
||||
graphql_port_value = get_setting_value("GRAPHQL_PORT")
|
||||
api_token_value = get_setting_value("API_TOKEN")
|
||||
@@ -67,16 +89,26 @@ def update_api(db, all_plugins, forceUpdate, updateOnlyDataSources=[], is_ad_hoc
|
||||
if app_state.graphQLServerStarted == 0:
|
||||
if graphql_port_value is not None and len(api_token_value) > 1:
|
||||
try:
|
||||
graphql_port_value = int(graphql_port_value) # Ensure port is an integer
|
||||
graphql_port_value = int(
|
||||
graphql_port_value
|
||||
) # Ensure port is an integer
|
||||
start_server(graphql_port_value, app_state) # Start the server
|
||||
except ValueError:
|
||||
mylog('none', [f"[API] Invalid GRAPHQL_PORT value, must be an integer: {graphql_port_value}"])
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f"[API] Invalid GRAPHQL_PORT value, must be an integer: {graphql_port_value}"
|
||||
],
|
||||
)
|
||||
else:
|
||||
mylog('none', [f"[API] GRAPHQL_PORT or API_TOKEN is not set, will try later."])
|
||||
mylog(
|
||||
"none", ["[API] GRAPHQL_PORT or API_TOKEN is not set, will try later."]
|
||||
)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
class api_endpoint_class:
|
||||
def __init__(self, db, forceUpdate, query, path, is_ad_hoc_user_event=False):
|
||||
def __init__(self, db, forceUpdate, query, path, is_ad_hoc_user_event=False):
|
||||
global apiEndpoints
|
||||
|
||||
current_time = timeNowTZ()
|
||||
@@ -85,29 +117,39 @@ class api_endpoint_class:
|
||||
self.query = query
|
||||
self.jsonData = db.get_table_as_json(self.query).json
|
||||
self.path = path
|
||||
self.fileName = path.split('/')[-1]
|
||||
self.fileName = path.split("/")[-1]
|
||||
self.hash = hash(json.dumps(self.jsonData))
|
||||
self.debounce_interval = 3 # Time in seconds to wait before writing
|
||||
self.changeDetectedWhen = None
|
||||
self.changeDetectedWhen = None
|
||||
# self.last_update_time = current_time - datetime.timedelta(minutes=1) # Last time data was updated
|
||||
self.is_ad_hoc_user_event = is_ad_hoc_user_event
|
||||
self.needsUpdate = False
|
||||
|
||||
# Check if the endpoint needs to be updated
|
||||
found = False
|
||||
found = False
|
||||
index = 0
|
||||
|
||||
|
||||
# Search previous endpoint states to check if API needs updating
|
||||
for endpoint in apiEndpoints:
|
||||
# Match SQL and API endpoint path
|
||||
# Match SQL and API endpoint path
|
||||
if endpoint.query == self.query and endpoint.path == self.path:
|
||||
found = True
|
||||
mylog('trace', [f'[API] api_endpoint_class: Hashes (file|old|new): ({self.fileName}|{endpoint.hash}|{self.hash})'])
|
||||
if endpoint.hash != self.hash:
|
||||
found = True
|
||||
mylog(
|
||||
"trace",
|
||||
[
|
||||
f"[API] api_endpoint_class: Hashes (file|old|new): ({self.fileName}|{endpoint.hash}|{self.hash})"
|
||||
],
|
||||
)
|
||||
if endpoint.hash != self.hash:
|
||||
self.needsUpdate = True
|
||||
# Only update changeDetectedWhen if it hasn't been set recently
|
||||
if not self.changeDetectedWhen or current_time > (self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)):
|
||||
self.changeDetectedWhen = current_time # Set timestamp for change detection
|
||||
if not self.changeDetectedWhen or current_time > (
|
||||
self.changeDetectedWhen
|
||||
+ datetime.timedelta(seconds=self.debounce_interval)
|
||||
):
|
||||
self.changeDetectedWhen = (
|
||||
current_time # Set timestamp for change detection
|
||||
)
|
||||
if index < len(apiEndpoints):
|
||||
apiEndpoints[index] = self
|
||||
# check end of bounds and replace
|
||||
@@ -120,28 +162,45 @@ class api_endpoint_class:
|
||||
if not found:
|
||||
self.needsUpdate = True
|
||||
# Only update changeDetectedWhen if it hasn't been set recently
|
||||
if not self.changeDetectedWhen or current_time > (self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)):
|
||||
self.changeDetectedWhen = current_time # Initialize timestamp for new endpoint
|
||||
if not self.changeDetectedWhen or current_time > (
|
||||
self.changeDetectedWhen
|
||||
+ datetime.timedelta(seconds=self.debounce_interval)
|
||||
):
|
||||
self.changeDetectedWhen = (
|
||||
current_time # Initialize timestamp for new endpoint
|
||||
)
|
||||
apiEndpoints.append(self)
|
||||
|
||||
# Needs to be called for initial updates
|
||||
self.try_write(forceUpdate)
|
||||
|
||||
#----------------------------------------
|
||||
# ----------------------------------------
|
||||
def try_write(self, forceUpdate):
|
||||
current_time = timeNowTZ()
|
||||
|
||||
# Debugging info to understand the issue
|
||||
# Debugging info to understand the issue
|
||||
# mylog('debug', [f'[API] api_endpoint_class: {self.fileName} is_ad_hoc_user_event {self.is_ad_hoc_user_event} last_update_time={self.last_update_time}, debounce time={self.last_update_time + datetime.timedelta(seconds=self.debounce_interval)}.'])
|
||||
|
||||
# Only attempt to write if the debounce time has passed
|
||||
if forceUpdate == True or (self.needsUpdate and (self.changeDetectedWhen is None or current_time > (self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)))):
|
||||
|
||||
mylog('debug', [f'[API] api_endpoint_class: Writing {self.fileName} after debounce.'])
|
||||
if forceUpdate == True or (
|
||||
self.needsUpdate
|
||||
and (
|
||||
self.changeDetectedWhen is None
|
||||
or current_time
|
||||
> (
|
||||
self.changeDetectedWhen
|
||||
+ datetime.timedelta(seconds=self.debounce_interval)
|
||||
)
|
||||
)
|
||||
):
|
||||
mylog(
|
||||
"debug",
|
||||
[f"[API] api_endpoint_class: Writing {self.fileName} after debounce."],
|
||||
)
|
||||
|
||||
write_file(self.path, json.dumps(self.jsonData))
|
||||
|
||||
self.needsUpdate = False
|
||||
self.needsUpdate = False
|
||||
self.last_update_time = timeNowTZ() # Reset last_update_time after writing
|
||||
|
||||
# Update user event execution log
|
||||
@@ -156,13 +215,13 @@ class api_endpoint_class:
|
||||
# mylog('trace', [f'[API] api_endpoint_class: Skipping write for {self.fileName}, debounce time not passed.'])
|
||||
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# Periodic Write Functions
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
periodic_write_running = False
|
||||
periodic_write_thread = None
|
||||
|
||||
|
||||
def periodic_write(interval=1):
|
||||
"""Periodically checks all endpoints for pending writes."""
|
||||
global apiEndpoints
|
||||
@@ -179,12 +238,15 @@ def start_periodic_write(interval=1):
|
||||
|
||||
with periodic_write_lock:
|
||||
if not periodic_write_running:
|
||||
mylog('trace', ["[API] Starting periodic_write thread."])
|
||||
mylog("trace", ["[API] Starting periodic_write thread."])
|
||||
periodic_write_running = True
|
||||
periodic_write_thread = threading.Thread(target=periodic_write, args=(interval,), daemon=True)
|
||||
periodic_write_thread = threading.Thread(
|
||||
target=periodic_write, args=(interval,), daemon=True
|
||||
)
|
||||
periodic_write_thread.start()
|
||||
else:
|
||||
mylog('trace', ["[API] periodic_write is already running."])
|
||||
mylog("trace", ["[API] periodic_write is already running."])
|
||||
|
||||
|
||||
def stop_periodic_write():
|
||||
"""Stop the periodic_write thread."""
|
||||
@@ -195,5 +257,4 @@ def stop_periodic_write():
|
||||
stop_event.set()
|
||||
periodic_write_thread.join()
|
||||
periodic_write_running = False
|
||||
mylog('trace', ["[API] periodic_write thread stopped."])
|
||||
|
||||
mylog("trace", ["[API] periodic_write thread stopped."])
|
||||
|
||||
@@ -1,30 +1,74 @@
|
||||
import threading
|
||||
import sys
|
||||
import os
|
||||
|
||||
from flask import Flask, request, jsonify, Response
|
||||
from flask_cors import CORS
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = "/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
from helper import get_setting_value, timeNowTZ
|
||||
from helper import get_setting_value
|
||||
from db.db_helper import get_date_from_period
|
||||
from app_state import updateState
|
||||
|
||||
|
||||
from .graphql_endpoint import devicesSchema
|
||||
from .device_endpoint import get_device_data, set_device_data, delete_device, delete_device_events, reset_device_props, copy_device, update_device_column
|
||||
from .devices_endpoint import get_all_devices, delete_unknown_devices, delete_all_with_empty_macs, delete_devices, export_devices, import_csv, devices_totals, devices_by_status
|
||||
from .events_endpoint import delete_events, delete_events_older_than, get_events, create_event, get_events_totals
|
||||
from .history_endpoint import delete_online_history
|
||||
from .prometheus_endpoint import get_metric_stats
|
||||
from .sessions_endpoint import get_sessions, delete_session, create_session, get_sessions_calendar, get_device_sessions, get_session_events
|
||||
from .nettools_endpoint import wakeonlan, traceroute, speedtest, nslookup, nmap_scan, internet_info
|
||||
from .dbquery_endpoint import read_query, write_query, update_query, delete_query
|
||||
from .sync_endpoint import handle_sync_post, handle_sync_get
|
||||
from messaging.in_app import write_notification, mark_all_notifications_read, delete_notifications, get_unread_notifications, delete_notification, mark_notification_as_read
|
||||
from api_server.graphql_endpoint import devicesSchema
|
||||
from api_server.device_endpoint import (
|
||||
get_device_data,
|
||||
set_device_data,
|
||||
delete_device,
|
||||
delete_device_events,
|
||||
reset_device_props,
|
||||
copy_device,
|
||||
update_device_column,
|
||||
)
|
||||
from api_server.devices_endpoint import (
|
||||
get_all_devices,
|
||||
delete_unknown_devices,
|
||||
delete_all_with_empty_macs,
|
||||
delete_devices,
|
||||
export_devices,
|
||||
import_csv,
|
||||
devices_totals,
|
||||
devices_by_status,
|
||||
)
|
||||
from api_server.events_endpoint import (
|
||||
delete_events,
|
||||
delete_events_older_than,
|
||||
get_events,
|
||||
create_event,
|
||||
get_events_totals,
|
||||
)
|
||||
from api_server.history_endpoint import delete_online_history
|
||||
from api_server.prometheus_endpoint import get_metric_stats
|
||||
from api_server.sessions_endpoint import (
|
||||
get_sessions,
|
||||
delete_session,
|
||||
create_session,
|
||||
get_sessions_calendar,
|
||||
get_device_sessions,
|
||||
get_session_events,
|
||||
)
|
||||
from api_server.nettools_endpoint import (
|
||||
wakeonlan,
|
||||
traceroute,
|
||||
speedtest,
|
||||
nslookup,
|
||||
nmap_scan,
|
||||
internet_info,
|
||||
)
|
||||
from api_server.dbquery_endpoint import read_query, write_query, update_query, delete_query
|
||||
from api_server.sync_endpoint import handle_sync_post, handle_sync_get
|
||||
from messaging.in_app import (
|
||||
write_notification,
|
||||
mark_all_notifications_read,
|
||||
delete_notifications,
|
||||
get_unread_notifications,
|
||||
delete_notification,
|
||||
mark_notification_as_read,
|
||||
)
|
||||
|
||||
# Flask application
|
||||
app = Flask(__name__)
|
||||
@@ -40,34 +84,36 @@ CORS(
|
||||
r"/settings/*": {"origins": "*"},
|
||||
r"/dbquery/*": {"origins": "*"},
|
||||
r"/messaging/*": {"origins": "*"},
|
||||
r"/events/*": {"origins": "*"}
|
||||
r"/events/*": {"origins": "*"},
|
||||
},
|
||||
supports_credentials=True,
|
||||
allow_headers=["Authorization", "Content-Type"]
|
||||
allow_headers=["Authorization", "Content-Type"],
|
||||
)
|
||||
|
||||
# --------------------------
|
||||
# GraphQL Endpoints
|
||||
# --------------------------
|
||||
|
||||
|
||||
# Endpoint used when accessed via browser
|
||||
@app.route("/graphql", methods=["GET"])
|
||||
def graphql_debug():
|
||||
# Handles GET requests
|
||||
return "NetAlertX GraphQL server running."
|
||||
|
||||
|
||||
# Endpoint for GraphQL queries
|
||||
@app.route("/graphql", methods=["POST"])
|
||||
def graphql_endpoint():
|
||||
# Check for API token in headers
|
||||
if not is_authorized():
|
||||
msg = '[graphql_server] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct.'
|
||||
mylog('verbose', [msg])
|
||||
msg = "[graphql_server] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct."
|
||||
mylog("verbose", [msg])
|
||||
return jsonify({"error": msg}), 401
|
||||
|
||||
# Retrieve and log request data
|
||||
data = request.get_json()
|
||||
mylog('verbose', [f'[graphql_server] data: {data}'])
|
||||
mylog("verbose", [f"[graphql_server] data: {data}"])
|
||||
|
||||
# Execute the GraphQL query
|
||||
result = devicesSchema.execute(data.get("query"), variables=data.get("variables"))
|
||||
@@ -82,10 +128,12 @@ def graphql_endpoint():
|
||||
|
||||
return jsonify(response)
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Settings Endpoints
|
||||
# --------------------------
|
||||
|
||||
|
||||
@app.route("/settings/<setKey>", methods=["GET"])
|
||||
def api_get_setting(setKey):
|
||||
if not is_authorized():
|
||||
@@ -93,40 +141,47 @@ def api_get_setting(setKey):
|
||||
value = get_setting_value(setKey)
|
||||
return jsonify({"success": True, "value": value})
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Device Endpoints
|
||||
# --------------------------
|
||||
|
||||
|
||||
@app.route("/device/<mac>", methods=["GET"])
|
||||
def api_get_device(mac):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return get_device_data(mac)
|
||||
|
||||
|
||||
@app.route("/device/<mac>", methods=["POST"])
|
||||
def api_set_device(mac):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return set_device_data(mac, request.json)
|
||||
|
||||
|
||||
@app.route("/device/<mac>/delete", methods=["DELETE"])
|
||||
def api_delete_device(mac):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return delete_device(mac)
|
||||
|
||||
|
||||
@app.route("/device/<mac>/events/delete", methods=["DELETE"])
|
||||
def api_delete_device_events(mac):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return delete_device_events(mac)
|
||||
|
||||
|
||||
@app.route("/device/<mac>/reset-props", methods=["POST"])
|
||||
def api_reset_device_props(mac):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return reset_device_props(mac, request.json)
|
||||
|
||||
|
||||
@app.route("/device/copy", methods=["POST"])
|
||||
def api_copy_device():
|
||||
if not is_authorized():
|
||||
@@ -137,10 +192,13 @@ def api_copy_device():
|
||||
mac_to = data.get("macTo")
|
||||
|
||||
if not mac_from or not mac_to:
|
||||
return jsonify({"success": False, "error": "macFrom and macTo are required"}), 400
|
||||
return jsonify(
|
||||
{"success": False, "error": "macFrom and macTo are required"}
|
||||
), 400
|
||||
|
||||
return copy_device(mac_from, mac_to)
|
||||
|
||||
|
||||
@app.route("/device/<mac>/update-column", methods=["POST"])
|
||||
def api_update_device_column(mac):
|
||||
if not is_authorized():
|
||||
@@ -151,35 +209,42 @@ def api_update_device_column(mac):
|
||||
column_value = data.get("columnValue")
|
||||
|
||||
if not column_name or not column_value:
|
||||
return jsonify({"success": False, "error": "columnName and columnValue are required"}), 400
|
||||
return jsonify(
|
||||
{"success": False, "error": "columnName and columnValue are required"}
|
||||
), 400
|
||||
|
||||
return update_device_column(mac, column_name, column_value)
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Devices Collections
|
||||
# --------------------------
|
||||
|
||||
|
||||
@app.route("/devices", methods=["GET"])
|
||||
def api_get_devices():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return get_all_devices()
|
||||
|
||||
|
||||
@app.route("/devices", methods=["DELETE"])
|
||||
def api_delete_devices():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
|
||||
|
||||
macs = request.json.get("macs") if request.is_json else None
|
||||
|
||||
return delete_devices(macs)
|
||||
|
||||
|
||||
@app.route("/devices/empty-macs", methods=["DELETE"])
|
||||
def api_delete_all_empty_macs():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return delete_all_with_empty_macs()
|
||||
|
||||
|
||||
@app.route("/devices/unknown", methods=["DELETE"])
|
||||
def api_delete_unknown_devices():
|
||||
if not is_authorized():
|
||||
@@ -196,18 +261,21 @@ def api_export_devices(format=None):
|
||||
export_format = (format or request.args.get("format", "csv")).lower()
|
||||
return export_devices(export_format)
|
||||
|
||||
|
||||
@app.route("/devices/import", methods=["POST"])
|
||||
def api_import_csv():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return import_csv(request.files.get("file"))
|
||||
|
||||
|
||||
@app.route("/devices/totals", methods=["GET"])
|
||||
def api_devices_totals():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return devices_totals()
|
||||
|
||||
|
||||
@app.route("/devices/by-status", methods=["GET"])
|
||||
def api_devices_by_status():
|
||||
if not is_authorized():
|
||||
@@ -217,6 +285,7 @@ def api_devices_by_status():
|
||||
|
||||
return devices_by_status(status)
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Net tools
|
||||
# --------------------------
|
||||
@@ -228,6 +297,7 @@ def api_wakeonlan():
|
||||
mac = request.json.get("devMac")
|
||||
return wakeonlan(mac)
|
||||
|
||||
|
||||
@app.route("/nettools/traceroute", methods=["POST"])
|
||||
def api_traceroute():
|
||||
if not is_authorized():
|
||||
@@ -235,12 +305,14 @@ def api_traceroute():
|
||||
ip = request.json.get("devLastIP")
|
||||
return traceroute(ip)
|
||||
|
||||
|
||||
@app.route("/nettools/speedtest", methods=["GET"])
|
||||
def api_speedtest():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return speedtest()
|
||||
|
||||
|
||||
@app.route("/nettools/nslookup", methods=["POST"])
|
||||
def api_nslookup():
|
||||
"""
|
||||
@@ -257,6 +329,7 @@ def api_nslookup():
|
||||
ip = data["devLastIP"]
|
||||
return nslookup(ip)
|
||||
|
||||
|
||||
@app.route("/nettools/nmap", methods=["POST"])
|
||||
def api_nmap():
|
||||
"""
|
||||
@@ -273,7 +346,7 @@ def api_nmap():
|
||||
ip = data["scan"]
|
||||
mode = data["mode"]
|
||||
return nmap_scan(ip, mode)
|
||||
|
||||
|
||||
|
||||
@app.route("/nettools/internetinfo", methods=["GET"])
|
||||
def api_internet_info():
|
||||
@@ -286,6 +359,7 @@ def api_internet_info():
|
||||
# DB query
|
||||
# --------------------------
|
||||
|
||||
|
||||
@app.route("/dbquery/read", methods=["POST"])
|
||||
def dbquery_read():
|
||||
if not is_authorized():
|
||||
@@ -296,9 +370,9 @@ def dbquery_read():
|
||||
|
||||
if not raw_sql_b64:
|
||||
return jsonify({"error": "rawSql is required"}), 400
|
||||
|
||||
|
||||
return read_query(raw_sql_b64)
|
||||
|
||||
|
||||
|
||||
@app.route("/dbquery/write", methods=["POST"])
|
||||
def dbquery_write():
|
||||
@@ -324,12 +398,12 @@ def dbquery_update():
|
||||
return jsonify({"error": "Missing required parameters"}), 400
|
||||
|
||||
return update_query(
|
||||
column_name=data["columnName"],
|
||||
ids=data["id"],
|
||||
dbtable=data["dbtable"],
|
||||
columns=data["columns"],
|
||||
values=data["values"],
|
||||
)
|
||||
column_name=data["columnName"],
|
||||
ids=data["id"],
|
||||
dbtable=data["dbtable"],
|
||||
columns=data["columns"],
|
||||
values=data["values"],
|
||||
)
|
||||
|
||||
|
||||
@app.route("/dbquery/delete", methods=["POST"])
|
||||
@@ -342,26 +416,30 @@ def dbquery_delete():
|
||||
if not all(data.get(k) for k in required):
|
||||
return jsonify({"error": "Missing required parameters"}), 400
|
||||
|
||||
return delete_query(
|
||||
column_name=data["columnName"],
|
||||
ids=data["id"],
|
||||
dbtable=data["dbtable"],
|
||||
)
|
||||
return delete_query(
|
||||
column_name=data["columnName"],
|
||||
ids=data["id"],
|
||||
dbtable=data["dbtable"],
|
||||
)
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Online history
|
||||
# --------------------------
|
||||
|
||||
|
||||
@app.route("/history", methods=["DELETE"])
|
||||
def api_delete_online_history():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return delete_online_history()
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Device Events
|
||||
# --------------------------
|
||||
|
||||
|
||||
@app.route("/events/create/<mac>", methods=["POST"])
|
||||
def api_create_event(mac):
|
||||
if not is_authorized():
|
||||
@@ -387,12 +465,14 @@ def api_events_by_mac(mac):
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return delete_device_events(mac)
|
||||
|
||||
|
||||
@app.route("/events", methods=["DELETE"])
|
||||
def api_delete_all_events():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return delete_events()
|
||||
|
||||
|
||||
@app.route("/events", methods=["GET"])
|
||||
def api_get_events():
|
||||
if not is_authorized():
|
||||
@@ -401,6 +481,7 @@ def api_get_events():
|
||||
mac = request.args.get("mac")
|
||||
return get_events(mac)
|
||||
|
||||
|
||||
@app.route("/events/<int:days>", methods=["DELETE"])
|
||||
def api_delete_old_events(days: int):
|
||||
"""
|
||||
@@ -409,9 +490,10 @@ def api_delete_old_events(days: int):
|
||||
"""
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
|
||||
|
||||
return delete_events_older_than(days)
|
||||
|
||||
|
||||
@app.route("/sessions/totals", methods=["GET"])
|
||||
def api_get_events_totals():
|
||||
if not is_authorized():
|
||||
@@ -420,10 +502,12 @@ def api_get_events_totals():
|
||||
period = get_date_from_period(request.args.get("period", "7 days"))
|
||||
return get_events_totals(period)
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Sessions
|
||||
# --------------------------
|
||||
|
||||
|
||||
@app.route("/sessions/create", methods=["POST"])
|
||||
def api_create_session():
|
||||
if not is_authorized():
|
||||
@@ -440,7 +524,9 @@ def api_create_session():
|
||||
if not mac or not ip or not start_time:
|
||||
return jsonify({"success": False, "error": "Missing required parameters"}), 400
|
||||
|
||||
return create_session(mac, ip, start_time, end_time, event_type_conn, event_type_disc)
|
||||
return create_session(
|
||||
mac, ip, start_time, end_time, event_type_conn, event_type_disc
|
||||
)
|
||||
|
||||
|
||||
@app.route("/sessions/delete", methods=["DELETE"])
|
||||
@@ -466,6 +552,7 @@ def api_get_sessions():
|
||||
|
||||
return get_sessions(mac, start_date, end_date)
|
||||
|
||||
|
||||
@app.route("/sessions/calendar", methods=["GET"])
|
||||
def api_get_sessions_calendar():
|
||||
if not is_authorized():
|
||||
@@ -477,6 +564,7 @@ def api_get_sessions_calendar():
|
||||
|
||||
return get_sessions_calendar(start_date, end_date)
|
||||
|
||||
|
||||
@app.route("/sessions/<mac>", methods=["GET"])
|
||||
def api_device_sessions(mac):
|
||||
if not is_authorized():
|
||||
@@ -485,6 +573,7 @@ def api_device_sessions(mac):
|
||||
period = request.args.get("period", "1 day")
|
||||
return get_device_sessions(mac, period)
|
||||
|
||||
|
||||
@app.route("/sessions/session-events", methods=["GET"])
|
||||
def api_get_session_events():
|
||||
if not is_authorized():
|
||||
@@ -494,6 +583,7 @@ def api_get_session_events():
|
||||
period = get_date_from_period(request.args.get("period", "7 days"))
|
||||
return get_session_events(session_event_type, period)
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Prometheus metrics endpoint
|
||||
# --------------------------
|
||||
@@ -503,7 +593,8 @@ def metrics():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
|
||||
# Return Prometheus metrics as plain text
|
||||
return Response(get_metric_stats(), mimetype="text/plain")
|
||||
return Response(get_metric_stats(), mimetype="text/plain")
|
||||
|
||||
|
||||
# --------------------------
|
||||
# In-app notifications
|
||||
@@ -519,10 +610,11 @@ def api_write_notification():
|
||||
|
||||
if not content:
|
||||
return jsonify({"success": False, "error": "Missing content"}), 400
|
||||
|
||||
|
||||
write_notification(content, level)
|
||||
return jsonify({"success": True})
|
||||
|
||||
|
||||
@app.route("/messaging/in-app/unread", methods=["GET"])
|
||||
def api_get_unread_notifications():
|
||||
if not is_authorized():
|
||||
@@ -530,6 +622,7 @@ def api_get_unread_notifications():
|
||||
|
||||
return get_unread_notifications()
|
||||
|
||||
|
||||
@app.route("/messaging/in-app/read/all", methods=["POST"])
|
||||
def api_mark_all_notifications_read():
|
||||
if not is_authorized():
|
||||
@@ -537,6 +630,7 @@ def api_mark_all_notifications_read():
|
||||
|
||||
return jsonify(mark_all_notifications_read())
|
||||
|
||||
|
||||
@app.route("/messaging/in-app/delete", methods=["DELETE"])
|
||||
def api_delete_all_notifications():
|
||||
if not is_authorized():
|
||||
@@ -544,6 +638,7 @@ def api_delete_all_notifications():
|
||||
|
||||
return delete_notifications()
|
||||
|
||||
|
||||
@app.route("/messaging/in-app/delete/<guid>", methods=["DELETE"])
|
||||
def api_delete_notification(guid):
|
||||
"""Delete a single notification by GUID."""
|
||||
@@ -556,6 +651,7 @@ def api_delete_notification(guid):
|
||||
else:
|
||||
return jsonify({"success": False, "error": result.get("error")}), 500
|
||||
|
||||
|
||||
@app.route("/messaging/in-app/read/<guid>", methods=["POST"])
|
||||
def api_mark_notification_read(guid):
|
||||
"""Mark a single notification as read by GUID."""
|
||||
@@ -567,7 +663,8 @@ def api_mark_notification_read(guid):
|
||||
return jsonify({"success": True})
|
||||
else:
|
||||
return jsonify({"success": False, "error": result.get("error")}), 500
|
||||
|
||||
|
||||
|
||||
# --------------------------
|
||||
# SYNC endpoint
|
||||
# --------------------------
|
||||
@@ -586,6 +683,7 @@ def sync_endpoint():
|
||||
mylog("verbose", [msg])
|
||||
return jsonify({"error": "Method Not Allowed"}), 405
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Background Server Start
|
||||
# --------------------------
|
||||
@@ -594,7 +692,7 @@ def is_authorized():
|
||||
is_authorized = token == f"Bearer {get_setting_value('API_TOKEN')}"
|
||||
|
||||
if not is_authorized:
|
||||
msg = f"[api] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct."
|
||||
msg = "[api] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct."
|
||||
write_notification(msg, "alert")
|
||||
mylog("verbose", [msg])
|
||||
|
||||
@@ -605,19 +703,15 @@ def start_server(graphql_port, app_state):
|
||||
"""Start the GraphQL server in a background thread."""
|
||||
|
||||
if app_state.graphQLServerStarted == 0:
|
||||
|
||||
mylog('verbose', [f'[graphql endpoint] Starting on port: {graphql_port}'])
|
||||
mylog("verbose", [f"[graphql endpoint] Starting on port: {graphql_port}"])
|
||||
|
||||
# Start Flask app in a separate thread
|
||||
thread = threading.Thread(
|
||||
target=lambda: app.run(
|
||||
host="0.0.0.0",
|
||||
port=graphql_port,
|
||||
debug=True,
|
||||
use_reloader=False
|
||||
host="0.0.0.0", port=graphql_port, debug=True, use_reloader=False
|
||||
)
|
||||
)
|
||||
thread.start()
|
||||
|
||||
# Update the state to indicate the server has started
|
||||
app_state = updateState("Process: Idle", None, None, None, 1)
|
||||
app_state = updateState("Process: Idle", None, None, None, 1)
|
||||
|
||||
@@ -1,20 +1,12 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import argparse
|
||||
import os
|
||||
import pathlib
|
||||
import base64
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from flask import jsonify, request, Response
|
||||
import csv
|
||||
import io
|
||||
from io import StringIO
|
||||
from flask import jsonify
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
@@ -100,4 +92,4 @@ def delete_query(column_name, ids, dbtable):
|
||||
conn.close()
|
||||
return jsonify({"success": True, "deleted_count": deleted_count})
|
||||
except Exception as e:
|
||||
return jsonify({"success": False, "error": str(e)}), 400
|
||||
return jsonify({"success": False, "error": str(e)}), 400
|
||||
|
||||
@@ -1,16 +1,12 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import argparse
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from flask import jsonify, request
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
@@ -21,13 +17,14 @@ from db.db_helper import row_to_json, get_date_from_period
|
||||
# Device Endpoints Functions
|
||||
# --------------------------
|
||||
|
||||
|
||||
def get_device_data(mac):
|
||||
"""Fetch device info with children, event stats, and presence calculation."""
|
||||
|
||||
# Open temporary connection for this request
|
||||
conn = get_temp_db_connection()
|
||||
cur = conn.cursor()
|
||||
|
||||
|
||||
# Special case for new device
|
||||
if mac.lower() == "new":
|
||||
now = datetime.now().strftime("%Y-%m-%d %H:%M")
|
||||
@@ -71,12 +68,12 @@ def get_device_data(mac):
|
||||
"devEvents": 0,
|
||||
"devDownAlerts": 0,
|
||||
"devPresenceHours": 0,
|
||||
"devFQDN": ""
|
||||
"devFQDN": "",
|
||||
}
|
||||
return jsonify(device_data)
|
||||
|
||||
# Compute period date for sessions/events
|
||||
period = request.args.get('period', '') # e.g., '7 days', '1 month', etc.
|
||||
period = request.args.get("period", "") # e.g., '7 days', '1 month', etc.
|
||||
period_date_sql = get_date_from_period(period)
|
||||
current_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
@@ -128,18 +125,21 @@ def get_device_data(mac):
|
||||
return jsonify({"error": "Device not found"}), 404
|
||||
|
||||
device_data = row_to_json(list(row.keys()), row)
|
||||
device_data['devFirstConnection'] = format_date(device_data['devFirstConnection'])
|
||||
device_data['devLastConnection'] = format_date(device_data['devLastConnection'])
|
||||
device_data['devIsRandomMAC'] = is_random_mac(device_data['devMac'])
|
||||
device_data["devFirstConnection"] = format_date(device_data["devFirstConnection"])
|
||||
device_data["devLastConnection"] = format_date(device_data["devLastConnection"])
|
||||
device_data["devIsRandomMAC"] = is_random_mac(device_data["devMac"])
|
||||
|
||||
# Fetch children
|
||||
cur.execute("SELECT * FROM Devices WHERE devParentMAC = ? ORDER BY devPresentLastScan DESC", ( device_data['devMac'],))
|
||||
cur.execute(
|
||||
"SELECT * FROM Devices WHERE devParentMAC = ? ORDER BY devPresentLastScan DESC",
|
||||
(device_data["devMac"],),
|
||||
)
|
||||
children_rows = cur.fetchall()
|
||||
children = [row_to_json(list(r.keys()), r) for r in children_rows]
|
||||
children_nics = [c for c in children if c.get("devParentRelType") == "nic"]
|
||||
|
||||
device_data['devChildrenDynamic'] = children
|
||||
device_data['devChildrenNicsDynamic'] = children_nics
|
||||
device_data["devChildrenDynamic"] = children
|
||||
device_data["devChildrenNicsDynamic"] = children_nics
|
||||
|
||||
conn.close()
|
||||
|
||||
@@ -187,7 +187,9 @@ def set_device_data(mac, data):
|
||||
data.get("devIsNew", 0),
|
||||
data.get("devIsArchived", 0),
|
||||
data.get("devLastConnection", datetime.now().strftime("%Y-%m-%d %H:%M:%S")),
|
||||
data.get("devFirstConnection", datetime.now().strftime("%Y-%m-%d %H:%M:%S")),
|
||||
data.get(
|
||||
"devFirstConnection", datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
),
|
||||
data.get("devLastIP", ""),
|
||||
data.get("devGUID", ""),
|
||||
data.get("devCustomProps", ""),
|
||||
@@ -206,31 +208,31 @@ def set_device_data(mac, data):
|
||||
WHERE devMac=?
|
||||
"""
|
||||
values = (
|
||||
data.get("devName", ""),
|
||||
data.get("devOwner", ""),
|
||||
data.get("devType", ""),
|
||||
data.get("devVendor", ""),
|
||||
data.get("devIcon", ""),
|
||||
data.get("devFavorite", 0),
|
||||
data.get("devGroup", ""),
|
||||
data.get("devLocation", ""),
|
||||
data.get("devComments", ""),
|
||||
data.get("devParentMAC", ""),
|
||||
data.get("devParentPort", ""),
|
||||
data.get("devSSID", ""),
|
||||
data.get("devSite", ""),
|
||||
data.get("devStaticIP", 0),
|
||||
data.get("devScan", 0),
|
||||
data.get("devAlertEvents", 0),
|
||||
data.get("devAlertDown", 0),
|
||||
data.get("devParentRelType", "default"),
|
||||
data.get("devReqNicsOnline", 0),
|
||||
data.get("devSkipRepeated", 0),
|
||||
data.get("devIsNew", 0),
|
||||
data.get("devIsArchived", 0),
|
||||
data.get("devCustomProps", ""),
|
||||
mac
|
||||
)
|
||||
data.get("devName", ""),
|
||||
data.get("devOwner", ""),
|
||||
data.get("devType", ""),
|
||||
data.get("devVendor", ""),
|
||||
data.get("devIcon", ""),
|
||||
data.get("devFavorite", 0),
|
||||
data.get("devGroup", ""),
|
||||
data.get("devLocation", ""),
|
||||
data.get("devComments", ""),
|
||||
data.get("devParentMAC", ""),
|
||||
data.get("devParentPort", ""),
|
||||
data.get("devSSID", ""),
|
||||
data.get("devSite", ""),
|
||||
data.get("devStaticIP", 0),
|
||||
data.get("devScan", 0),
|
||||
data.get("devAlertEvents", 0),
|
||||
data.get("devAlertDown", 0),
|
||||
data.get("devParentRelType", "default"),
|
||||
data.get("devReqNicsOnline", 0),
|
||||
data.get("devSkipRepeated", 0),
|
||||
data.get("devIsNew", 0),
|
||||
data.get("devIsArchived", 0),
|
||||
data.get("devCustomProps", ""),
|
||||
mac,
|
||||
)
|
||||
|
||||
conn = get_temp_db_connection()
|
||||
cur = conn.cursor()
|
||||
@@ -240,7 +242,6 @@ def set_device_data(mac, data):
|
||||
return jsonify({"success": True})
|
||||
|
||||
|
||||
|
||||
def delete_device(mac):
|
||||
"""Delete a device by MAC."""
|
||||
conn = get_temp_db_connection()
|
||||
@@ -274,12 +275,13 @@ def reset_device_props(mac, data=None):
|
||||
conn.close()
|
||||
return jsonify({"success": True})
|
||||
|
||||
|
||||
def update_device_column(mac, column_name, column_value):
|
||||
"""
|
||||
Update a specific column for a given device.
|
||||
Example: update_device_column("AA:BB:CC:DD:EE:FF", "devParentMAC", "Internet")
|
||||
"""
|
||||
|
||||
|
||||
conn = get_temp_db_connection()
|
||||
cur = conn.cursor()
|
||||
|
||||
@@ -292,11 +294,12 @@ def update_device_column(mac, column_name, column_value):
|
||||
return jsonify({"success": True})
|
||||
else:
|
||||
return jsonify({"success": False, "error": "Device not found"}), 404
|
||||
|
||||
|
||||
conn.close()
|
||||
|
||||
return jsonify({"success": True})
|
||||
|
||||
|
||||
def copy_device(mac_from, mac_to):
|
||||
"""
|
||||
Copy a device entry from one MAC to another.
|
||||
@@ -310,7 +313,10 @@ def copy_device(mac_from, mac_to):
|
||||
cur.execute("DROP TABLE IF EXISTS temp_devices")
|
||||
|
||||
# Create temporary table with source device
|
||||
cur.execute("CREATE TABLE temp_devices AS SELECT * FROM Devices WHERE devMac = ?", (mac_from,))
|
||||
cur.execute(
|
||||
"CREATE TABLE temp_devices AS SELECT * FROM Devices WHERE devMac = ?",
|
||||
(mac_from,),
|
||||
)
|
||||
|
||||
# Update temporary table to target MAC
|
||||
cur.execute("UPDATE temp_devices SET devMac = ?", (mac_to,))
|
||||
@@ -319,18 +325,21 @@ def copy_device(mac_from, mac_to):
|
||||
cur.execute("DELETE FROM Devices WHERE devMac = ?", (mac_to,))
|
||||
|
||||
# Insert new entry from temporary table
|
||||
cur.execute("INSERT INTO Devices SELECT * FROM temp_devices WHERE devMac = ?", (mac_to,))
|
||||
cur.execute(
|
||||
"INSERT INTO Devices SELECT * FROM temp_devices WHERE devMac = ?", (mac_to,)
|
||||
)
|
||||
|
||||
# Drop temporary table
|
||||
cur.execute("DROP TABLE temp_devices")
|
||||
|
||||
conn.commit()
|
||||
return jsonify({"success": True, "message": f"Device copied from {mac_from} to {mac_to}"})
|
||||
|
||||
return jsonify(
|
||||
{"success": True, "message": f"Device copied from {mac_from} to {mac_to}"}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
return jsonify({"success": False, "error": str(e)})
|
||||
|
||||
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
@@ -1,25 +1,20 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import argparse
|
||||
import os
|
||||
import pathlib
|
||||
import base64
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
import sqlite3
|
||||
from flask import jsonify, request, Response
|
||||
import csv
|
||||
import io
|
||||
from io import StringIO
|
||||
from logger import mylog
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from helper import is_random_mac, format_date, get_setting_value
|
||||
from db.db_helper import get_table_json, get_device_condition_by_status
|
||||
|
||||
|
||||
@@ -27,6 +22,7 @@ from db.db_helper import get_table_json, get_device_condition_by_status
|
||||
# Device Endpoints Functions
|
||||
# --------------------------
|
||||
|
||||
|
||||
def get_all_devices():
|
||||
"""Retrieve all devices from the database."""
|
||||
conn = get_temp_db_connection()
|
||||
@@ -41,6 +37,7 @@ def get_all_devices():
|
||||
conn.close()
|
||||
return jsonify({"success": True, "devices": devices})
|
||||
|
||||
|
||||
def delete_devices(macs):
|
||||
"""
|
||||
Delete devices from the Devices table.
|
||||
@@ -75,6 +72,7 @@ def delete_devices(macs):
|
||||
|
||||
return jsonify({"success": True, "deleted_count": deleted_count})
|
||||
|
||||
|
||||
def delete_all_with_empty_macs():
|
||||
"""Delete devices with empty MAC addresses."""
|
||||
conn = get_temp_db_connection()
|
||||
@@ -85,15 +83,19 @@ def delete_all_with_empty_macs():
|
||||
conn.close()
|
||||
return jsonify({"success": True, "deleted": deleted})
|
||||
|
||||
|
||||
def delete_unknown_devices():
|
||||
"""Delete devices marked as unknown."""
|
||||
conn = get_temp_db_connection()
|
||||
cur = conn.cursor()
|
||||
cur.execute("""DELETE FROM Devices WHERE devName='(unknown)' OR devName='(name not found)'""")
|
||||
cur.execute(
|
||||
"""DELETE FROM Devices WHERE devName='(unknown)' OR devName='(name not found)'"""
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
return jsonify({"success": True, "deleted": cur.rowcount})
|
||||
|
||||
|
||||
def export_devices(export_format):
|
||||
"""
|
||||
Export devices from the Devices table in teh desired format.
|
||||
@@ -112,15 +114,12 @@ def export_devices(export_format):
|
||||
list(devices_json["data"][0].keys()) if devices_json["data"] else []
|
||||
)
|
||||
|
||||
|
||||
if export_format == "json":
|
||||
# Convert to standard dict for Flask JSON
|
||||
return jsonify({
|
||||
"data": [row for row in devices_json["data"]],
|
||||
"columns": list(columns)
|
||||
})
|
||||
return jsonify(
|
||||
{"data": [row for row in devices_json["data"]], "columns": list(columns)}
|
||||
)
|
||||
elif export_format == "csv":
|
||||
|
||||
si = StringIO()
|
||||
writer = csv.DictWriter(si, fieldnames=columns, quoting=csv.QUOTE_ALL)
|
||||
writer.writeheader()
|
||||
@@ -135,6 +134,7 @@ def export_devices(export_format):
|
||||
else:
|
||||
return jsonify({"error": f"Unsupported format '{export_format}'"}), 400
|
||||
|
||||
|
||||
def import_csv(file_storage=None):
|
||||
data = ""
|
||||
skipped = []
|
||||
@@ -143,7 +143,9 @@ def import_csv(file_storage=None):
|
||||
# 1. Try JSON `content` (base64-encoded CSV)
|
||||
if request.is_json and request.json.get("content"):
|
||||
try:
|
||||
data = base64.b64decode(request.json["content"], validate=True).decode("utf-8")
|
||||
data = base64.b64decode(request.json["content"], validate=True).decode(
|
||||
"utf-8"
|
||||
)
|
||||
except Exception as e:
|
||||
return jsonify({"error": f"Base64 decode failed: {e}"}), 400
|
||||
|
||||
@@ -153,7 +155,8 @@ def import_csv(file_storage=None):
|
||||
|
||||
# 3. Fallback: try local file (same as PHP `$file = '../../../config/devices.csv';`)
|
||||
else:
|
||||
local_file = "/app/config/devices.csv"
|
||||
config_root = os.environ.get("NETALERTX_CONFIG", "/data/config")
|
||||
local_file = os.path.join(config_root, "devices.csv")
|
||||
try:
|
||||
with open(local_file, "r", encoding="utf-8") as f:
|
||||
data = f.read()
|
||||
@@ -164,11 +167,7 @@ def import_csv(file_storage=None):
|
||||
return jsonify({"error": "No CSV data found"}), 400
|
||||
|
||||
# --- Clean up newlines inside quoted fields ---
|
||||
data = re.sub(
|
||||
r'"([^"]*)"',
|
||||
lambda m: m.group(0).replace("\n", " "),
|
||||
data
|
||||
)
|
||||
data = re.sub(r'"([^"]*)"', lambda m: m.group(0).replace("\n", " "), data)
|
||||
|
||||
# --- Parse CSV ---
|
||||
lines = data.splitlines()
|
||||
@@ -202,11 +201,8 @@ def import_csv(file_storage=None):
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"inserted": row_count,
|
||||
"skipped_lines": skipped
|
||||
})
|
||||
return jsonify({"success": True, "inserted": row_count, "skipped_lines": skipped})
|
||||
|
||||
|
||||
def devices_totals():
|
||||
conn = get_temp_db_connection()
|
||||
@@ -215,15 +211,17 @@ def devices_totals():
|
||||
# Build a combined query with sub-selects for each status
|
||||
query = f"""
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status('my')}) AS devices,
|
||||
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status('connected')}) AS connected,
|
||||
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status('favorites')}) AS favorites,
|
||||
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status('new')}) AS new,
|
||||
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status('down')}) AS down,
|
||||
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status('archived')}) AS archived
|
||||
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status("my")}) AS devices,
|
||||
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status("connected")}) AS connected,
|
||||
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status("favorites")}) AS favorites,
|
||||
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status("new")}) AS new,
|
||||
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status("down")}) AS down,
|
||||
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status("archived")}) AS archived
|
||||
"""
|
||||
sql.execute(query)
|
||||
row = sql.fetchone() # returns a tuple like (devices, connected, favorites, new, down, archived)
|
||||
row = (
|
||||
sql.fetchone()
|
||||
) # returns a tuple like (devices, connected, favorites, new, down, archived)
|
||||
|
||||
conn.close()
|
||||
|
||||
@@ -252,12 +250,13 @@ def devices_by_status(status=None):
|
||||
if r.get("devFavorite") == 1:
|
||||
dev_name = f'<span class="text-yellow">★</span> {dev_name}'
|
||||
|
||||
table_data.append({
|
||||
"id": r.get("devMac", ""),
|
||||
"title": dev_name,
|
||||
"favorite": r.get("devFavorite", 0)
|
||||
})
|
||||
table_data.append(
|
||||
{
|
||||
"id": r.get("devMac", ""),
|
||||
"title": dev_name,
|
||||
"favorite": r.get("devFavorite", 0),
|
||||
}
|
||||
)
|
||||
|
||||
conn.close()
|
||||
return jsonify(table_data)
|
||||
|
||||
|
||||
@@ -1,20 +1,19 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import argparse
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from flask import jsonify, request
|
||||
from flask import jsonify
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from helper import is_random_mac, format_date, get_setting_value, format_date_iso, format_event_date, timeNowTZ, mylog, ensure_datetime
|
||||
from helper import (
|
||||
mylog,
|
||||
ensure_datetime,
|
||||
)
|
||||
from db.db_helper import row_to_json, get_date_from_period
|
||||
|
||||
|
||||
@@ -24,12 +23,12 @@ from db.db_helper import row_to_json, get_date_from_period
|
||||
|
||||
|
||||
def create_event(
|
||||
mac: str,
|
||||
ip: str,
|
||||
event_type: str = "Device Down",
|
||||
additional_info: str = "",
|
||||
mac: str,
|
||||
ip: str,
|
||||
event_type: str = "Device Down",
|
||||
additional_info: str = "",
|
||||
pending_alert: int = 1,
|
||||
event_time: datetime | None = None
|
||||
event_time: datetime | None = None,
|
||||
):
|
||||
"""
|
||||
Insert a single event into the Events table and return a standardized JSON response.
|
||||
@@ -42,10 +41,13 @@ def create_event(
|
||||
|
||||
start_time = ensure_datetime(event_time)
|
||||
|
||||
cur.execute("""
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, eve_EventType, eve_AdditionalInfo, eve_PendingAlertEmail)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""", (mac, ip, start_time, event_type, additional_info, pending_alert))
|
||||
""",
|
||||
(mac, ip, start_time, event_type, additional_info, pending_alert),
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
@@ -75,6 +77,7 @@ def get_events(mac=None):
|
||||
conn.close()
|
||||
return jsonify({"success": True, "events": events})
|
||||
|
||||
|
||||
def delete_events_older_than(days):
|
||||
"""Delete all events older than a specified number of days"""
|
||||
|
||||
@@ -83,15 +86,15 @@ def delete_events_older_than(days):
|
||||
|
||||
# Use a parameterized query with sqlite date function
|
||||
sql = "DELETE FROM Events WHERE eve_DateTime <= date('now', ?)"
|
||||
cur.execute(sql, [f'-{days} days'])
|
||||
|
||||
cur.execute(sql, [f"-{days} days"])
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"message": f"Deleted events older than {days} days"
|
||||
})
|
||||
return jsonify(
|
||||
{"success": True, "message": f"Deleted events older than {days} days"}
|
||||
)
|
||||
|
||||
|
||||
def delete_events():
|
||||
"""Delete all events"""
|
||||
@@ -107,7 +110,6 @@ def delete_events():
|
||||
return jsonify({"success": True, "message": "Deleted all events"})
|
||||
|
||||
|
||||
|
||||
def get_events_totals(period: str = "7 days"):
|
||||
"""
|
||||
Return counts for events and sessions totals over a given period.
|
||||
@@ -143,4 +145,3 @@ def get_events_totals(period: str = "7 days"):
|
||||
# Return as JSON array
|
||||
result_json = [row[0], row[1], row[2], row[3], row[4], row[5]]
|
||||
return jsonify(result_json)
|
||||
|
||||
|
||||
@@ -2,24 +2,32 @@ import graphene
|
||||
from graphene import ObjectType, String, Int, Boolean, List, Field, InputObjectType
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
from const import apiPath
|
||||
from helper import is_random_mac, get_number_of_children, format_ip_long, get_setting_value
|
||||
from helper import (
|
||||
is_random_mac,
|
||||
get_number_of_children,
|
||||
format_ip_long,
|
||||
get_setting_value,
|
||||
)
|
||||
|
||||
# Define a base URL with the user's home directory
|
||||
folder = apiPath
|
||||
folder = apiPath
|
||||
|
||||
# --- DEVICES ---
|
||||
|
||||
# --- DEVICES ---
|
||||
# Pagination and Sorting Input Types
|
||||
class SortOptionsInput(InputObjectType):
|
||||
field = String()
|
||||
order = String()
|
||||
|
||||
|
||||
class FilterOptionsInput(InputObjectType):
|
||||
filterColumn = String()
|
||||
filterValue = String()
|
||||
@@ -37,45 +45,45 @@ class PageQueryOptionsInput(InputObjectType):
|
||||
# Device ObjectType
|
||||
class Device(ObjectType):
|
||||
rowid = Int()
|
||||
devMac = String()
|
||||
devName = String()
|
||||
devOwner = String()
|
||||
devType = String()
|
||||
devVendor = String()
|
||||
devFavorite = Int()
|
||||
devGroup = String()
|
||||
devComments = String()
|
||||
devFirstConnection = String()
|
||||
devLastConnection = String()
|
||||
devLastIP = String()
|
||||
devStaticIP = Int()
|
||||
devScan = Int()
|
||||
devLogEvents = Int()
|
||||
devAlertEvents = Int()
|
||||
devAlertDown = Int()
|
||||
devSkipRepeated = Int()
|
||||
devLastNotification = String()
|
||||
devPresentLastScan = Int()
|
||||
devIsNew = Int()
|
||||
devLocation = String()
|
||||
devIsArchived = Int()
|
||||
devParentMAC = String()
|
||||
devParentPort = String()
|
||||
devIcon = String()
|
||||
devGUID = String()
|
||||
devSite = String()
|
||||
devSSID = String()
|
||||
devSyncHubNode = String()
|
||||
devMac = String()
|
||||
devName = String()
|
||||
devOwner = String()
|
||||
devType = String()
|
||||
devVendor = String()
|
||||
devFavorite = Int()
|
||||
devGroup = String()
|
||||
devComments = String()
|
||||
devFirstConnection = String()
|
||||
devLastConnection = String()
|
||||
devLastIP = String()
|
||||
devStaticIP = Int()
|
||||
devScan = Int()
|
||||
devLogEvents = Int()
|
||||
devAlertEvents = Int()
|
||||
devAlertDown = Int()
|
||||
devSkipRepeated = Int()
|
||||
devLastNotification = String()
|
||||
devPresentLastScan = Int()
|
||||
devIsNew = Int()
|
||||
devLocation = String()
|
||||
devIsArchived = Int()
|
||||
devParentMAC = String()
|
||||
devParentPort = String()
|
||||
devIcon = String()
|
||||
devGUID = String()
|
||||
devSite = String()
|
||||
devSSID = String()
|
||||
devSyncHubNode = String()
|
||||
devSourcePlugin = String()
|
||||
devCustomProps = String()
|
||||
devStatus = String()
|
||||
devIsRandomMac = Int()
|
||||
devParentChildrenCount = Int()
|
||||
devIpLong = Int()
|
||||
devFilterStatus = String()
|
||||
devFQDN = String()
|
||||
devParentRelType = String()
|
||||
devReqNicsOnline = Int()
|
||||
devIsRandomMac = Int()
|
||||
devParentChildrenCount = Int()
|
||||
devIpLong = Int()
|
||||
devFilterStatus = String()
|
||||
devFQDN = String()
|
||||
devParentRelType = String()
|
||||
devReqNicsOnline = Int()
|
||||
|
||||
|
||||
class DeviceResult(ObjectType):
|
||||
@@ -83,7 +91,8 @@ class DeviceResult(ObjectType):
|
||||
count = Int()
|
||||
|
||||
|
||||
# --- SETTINGS ---
|
||||
# --- SETTINGS ---
|
||||
|
||||
|
||||
# Setting ObjectType
|
||||
class Setting(ObjectType):
|
||||
@@ -102,110 +111,168 @@ class SettingResult(ObjectType):
|
||||
settings = List(Setting)
|
||||
count = Int()
|
||||
|
||||
|
||||
# Define Query Type with Pagination Support
|
||||
class Query(ObjectType):
|
||||
|
||||
# --- DEVICES ---
|
||||
devices = Field(DeviceResult, options=PageQueryOptionsInput())
|
||||
|
||||
def resolve_devices(self, info, options=None):
|
||||
# mylog('none', f'[graphql_schema] resolve_devices: {self}')
|
||||
try:
|
||||
with open(folder + 'table_devices.json', 'r') as f:
|
||||
with open(folder + "table_devices.json", "r") as f:
|
||||
devices_data = json.load(f)["data"]
|
||||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
||||
mylog('none', f'[graphql_schema] Error loading devices data: {e}')
|
||||
mylog("none", f"[graphql_schema] Error loading devices data: {e}")
|
||||
return DeviceResult(devices=[], count=0)
|
||||
|
||||
|
||||
# Add dynamic fields to each device
|
||||
for device in devices_data:
|
||||
device["devIsRandomMac"] = 1 if is_random_mac(device["devMac"]) else 0
|
||||
device["devParentChildrenCount"] = get_number_of_children(device["devMac"], devices_data)
|
||||
device["devParentChildrenCount"] = get_number_of_children(
|
||||
device["devMac"], devices_data
|
||||
)
|
||||
device["devIpLong"] = format_ip_long(device.get("devLastIP", ""))
|
||||
|
||||
mylog('trace', f'[graphql_schema] devices_data: {devices_data}')
|
||||
|
||||
mylog("trace", f"[graphql_schema] devices_data: {devices_data}")
|
||||
|
||||
# initialize total_count
|
||||
total_count = len(devices_data)
|
||||
|
||||
# Apply sorting if options are provided
|
||||
if options:
|
||||
|
||||
# Define status-specific filtering
|
||||
if options.status:
|
||||
status = options.status
|
||||
mylog('trace', f'[graphql_schema] Applying status filter: {status}')
|
||||
mylog("trace", f"[graphql_schema] Applying status filter: {status}")
|
||||
|
||||
# Include devices matching criteria in UI_MY_DEVICES
|
||||
allowed_statuses = get_setting_value("UI_MY_DEVICES")
|
||||
hidden_relationships = get_setting_value("UI_hide_rel_types")
|
||||
network_dev_types = get_setting_value("NETWORK_DEVICE_TYPES")
|
||||
allowed_statuses = get_setting_value("UI_MY_DEVICES")
|
||||
hidden_relationships = get_setting_value("UI_hide_rel_types")
|
||||
network_dev_types = get_setting_value("NETWORK_DEVICE_TYPES")
|
||||
|
||||
mylog('trace', f'[graphql_schema] allowed_statuses: {allowed_statuses}')
|
||||
mylog('trace', f'[graphql_schema] hidden_relationships: {hidden_relationships}')
|
||||
mylog('trace', f'[graphql_schema] network_dev_types: {network_dev_types}')
|
||||
mylog("trace", f"[graphql_schema] allowed_statuses: {allowed_statuses}")
|
||||
mylog(
|
||||
"trace",
|
||||
f"[graphql_schema] hidden_relationships: {hidden_relationships}",
|
||||
)
|
||||
mylog(
|
||||
"trace", f"[graphql_schema] network_dev_types: {network_dev_types}"
|
||||
)
|
||||
|
||||
# Filtering based on the "status"
|
||||
if status == "my_devices":
|
||||
|
||||
devices_data = [
|
||||
device for device in devices_data
|
||||
if ( device.get("devParentRelType") not in hidden_relationships)
|
||||
device
|
||||
for device in devices_data
|
||||
if (device.get("devParentRelType") not in hidden_relationships)
|
||||
]
|
||||
|
||||
devices_data = [
|
||||
device for device in devices_data
|
||||
device
|
||||
for device in devices_data
|
||||
if (
|
||||
(device["devPresentLastScan"] == 1 and 'online' in allowed_statuses) or
|
||||
(device["devIsNew"] == 1 and 'new' in allowed_statuses) or
|
||||
(device["devPresentLastScan"] == 0 and device["devAlertDown"] and 'down' in allowed_statuses) or
|
||||
(device["devPresentLastScan"] == 0 and 'offline' in allowed_statuses) and device["devIsArchived"] == 0 or
|
||||
(device["devIsArchived"] == 1 and 'archived' in allowed_statuses)
|
||||
(
|
||||
device["devPresentLastScan"] == 1
|
||||
and "online" in allowed_statuses
|
||||
)
|
||||
or (device["devIsNew"] == 1 and "new" in allowed_statuses)
|
||||
or (
|
||||
device["devPresentLastScan"] == 0
|
||||
and device["devAlertDown"]
|
||||
and "down" in allowed_statuses
|
||||
)
|
||||
or (
|
||||
device["devPresentLastScan"] == 0
|
||||
and "offline" in allowed_statuses
|
||||
)
|
||||
and device["devIsArchived"] == 0
|
||||
or (
|
||||
device["devIsArchived"] == 1
|
||||
and "archived" in allowed_statuses
|
||||
)
|
||||
)
|
||||
]
|
||||
elif status == "connected":
|
||||
devices_data = [device for device in devices_data if device["devPresentLastScan"] == 1]
|
||||
devices_data = [
|
||||
device
|
||||
for device in devices_data
|
||||
if device["devPresentLastScan"] == 1
|
||||
]
|
||||
elif status == "favorites":
|
||||
devices_data = [device for device in devices_data if device["devFavorite"] == 1]
|
||||
devices_data = [
|
||||
device for device in devices_data if device["devFavorite"] == 1
|
||||
]
|
||||
elif status == "new":
|
||||
devices_data = [device for device in devices_data if device["devIsNew"] == 1]
|
||||
devices_data = [
|
||||
device for device in devices_data if device["devIsNew"] == 1
|
||||
]
|
||||
elif status == "down":
|
||||
devices_data = [
|
||||
device for device in devices_data
|
||||
device
|
||||
for device in devices_data
|
||||
if device["devPresentLastScan"] == 0 and device["devAlertDown"]
|
||||
]
|
||||
elif status == "archived":
|
||||
devices_data = [device for device in devices_data if device["devIsArchived"] == 1]
|
||||
devices_data = [
|
||||
device
|
||||
for device in devices_data
|
||||
if device["devIsArchived"] == 1
|
||||
]
|
||||
elif status == "offline":
|
||||
devices_data = [device for device in devices_data if device["devPresentLastScan"] == 0]
|
||||
devices_data = [
|
||||
device
|
||||
for device in devices_data
|
||||
if device["devPresentLastScan"] == 0
|
||||
]
|
||||
elif status == "network_devices":
|
||||
devices_data = [device for device in devices_data if device["devType"] in network_dev_types]
|
||||
devices_data = [
|
||||
device
|
||||
for device in devices_data
|
||||
if device["devType"] in network_dev_types
|
||||
]
|
||||
elif status == "all_devices":
|
||||
devices_data = devices_data # keep all
|
||||
devices_data = devices_data # keep all
|
||||
|
||||
# additional filters
|
||||
if options.filters:
|
||||
for filter in options.filters:
|
||||
if filter.filterColumn and filter.filterValue:
|
||||
devices_data = [
|
||||
device for device in devices_data
|
||||
if str(device.get(filter.filterColumn, "")).lower() == str(filter.filterValue).lower()
|
||||
device
|
||||
for device in devices_data
|
||||
if str(device.get(filter.filterColumn, "")).lower()
|
||||
== str(filter.filterValue).lower()
|
||||
]
|
||||
|
||||
# Search data if a search term is provided
|
||||
if options.search:
|
||||
# Define static list of searchable fields
|
||||
searchable_fields = [
|
||||
"devName", "devMac", "devOwner", "devType", "devVendor", "devLastIP",
|
||||
"devGroup", "devComments", "devLocation", "devStatus", "devSSID",
|
||||
"devSite", "devSourcePlugin", "devSyncHubNode", "devFQDN", "devParentRelType", "devParentMAC"
|
||||
"devName",
|
||||
"devMac",
|
||||
"devOwner",
|
||||
"devType",
|
||||
"devVendor",
|
||||
"devLastIP",
|
||||
"devGroup",
|
||||
"devComments",
|
||||
"devLocation",
|
||||
"devStatus",
|
||||
"devSSID",
|
||||
"devSite",
|
||||
"devSourcePlugin",
|
||||
"devSyncHubNode",
|
||||
"devFQDN",
|
||||
"devParentRelType",
|
||||
"devParentMAC",
|
||||
]
|
||||
|
||||
search_term = options.search.lower()
|
||||
|
||||
devices_data = [
|
||||
device for device in devices_data
|
||||
device
|
||||
for device in devices_data
|
||||
if any(
|
||||
search_term in str(device.get(field, "")).lower()
|
||||
for field in searchable_fields # Search only predefined fields
|
||||
@@ -218,12 +285,14 @@ class Query(ObjectType):
|
||||
devices_data = sorted(
|
||||
devices_data,
|
||||
key=lambda x: mixed_type_sort_key(
|
||||
x.get(sort_option.field).lower() if isinstance(x.get(sort_option.field), str) else x.get(sort_option.field)
|
||||
x.get(sort_option.field).lower()
|
||||
if isinstance(x.get(sort_option.field), str)
|
||||
else x.get(sort_option.field)
|
||||
),
|
||||
reverse=(sort_option.order.lower() == "desc")
|
||||
reverse=(sort_option.order.lower() == "desc"),
|
||||
)
|
||||
|
||||
# capture total count after all the filtering and searching, BEFORE pagination
|
||||
# capture total count after all the filtering and searching, BEFORE pagination
|
||||
total_count = len(devices_data)
|
||||
|
||||
# Then apply pagination
|
||||
@@ -234,24 +303,21 @@ class Query(ObjectType):
|
||||
|
||||
# Convert dict objects to Device instances to enable field resolution
|
||||
devices = [Device(**device) for device in devices_data]
|
||||
|
||||
|
||||
return DeviceResult(devices=devices, count=total_count)
|
||||
|
||||
# --- SETTINGS ---
|
||||
settings = Field(SettingResult)
|
||||
# --- SETTINGS ---
|
||||
settings = Field(SettingResult)
|
||||
|
||||
def resolve_settings(root, info):
|
||||
|
||||
try:
|
||||
with open(folder + 'table_settings.json', 'r') as f:
|
||||
with open(folder + "table_settings.json", "r") as f:
|
||||
settings_data = json.load(f)["data"]
|
||||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
||||
mylog('none', f'[graphql_schema] Error loading settings data: {e}')
|
||||
mylog("none", f"[graphql_schema] Error loading settings data: {e}")
|
||||
return SettingResult(settings=[], count=0)
|
||||
|
||||
|
||||
mylog('trace', f'[graphql_schema] settings_data: {settings_data}')
|
||||
mylog("trace", f"[graphql_schema] settings_data: {settings_data}")
|
||||
|
||||
# Convert to Setting objects
|
||||
settings = [Setting(**setting) for setting in settings_data]
|
||||
@@ -259,15 +325,15 @@ class Query(ObjectType):
|
||||
return SettingResult(settings=settings, count=len(settings))
|
||||
|
||||
|
||||
|
||||
# helps sorting inconsistent dataset mixed integers and strings
|
||||
def mixed_type_sort_key(value):
|
||||
if value is None or value == "":
|
||||
return (2, '') # Place None or empty strings last
|
||||
return (2, "") # Place None or empty strings last
|
||||
try:
|
||||
return (0, int(value)) # Integers get priority
|
||||
except (ValueError, TypeError):
|
||||
return (1, str(value)) # Strings come next
|
||||
|
||||
|
||||
# Schema Definition
|
||||
devicesSchema = graphene.Schema(query=Query)
|
||||
|
||||
@@ -1,26 +1,21 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import argparse
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from flask import jsonify, request
|
||||
from flask import jsonify
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from helper import is_random_mac, format_date, get_setting_value
|
||||
|
||||
|
||||
# --------------------------------------------------
|
||||
# Online History Activity Endpoints Functions
|
||||
# --------------------------------------------------
|
||||
|
||||
|
||||
def delete_online_history():
|
||||
"""Delete all online history activity"""
|
||||
|
||||
@@ -32,4 +27,4 @@ def delete_online_history():
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return jsonify({"success": True, "message": "Deleted online history"})
|
||||
return jsonify({"success": True, "message": "Deleted online history"})
|
||||
|
||||
@@ -6,26 +6,27 @@ import shutil
|
||||
import os
|
||||
from flask import jsonify
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = "/app"
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
# Resolve speedtest-cli path once at module load and validate it.
|
||||
# We do this once to avoid repeated PATH lookups and to fail fast when
|
||||
# the binary isn't available or executable.
|
||||
SPEEDTEST_CLI_PATH = None
|
||||
|
||||
|
||||
def _get_speedtest_cli_path():
|
||||
"""Resolve and validate the speedtest-cli executable path."""
|
||||
path = shutil.which("speedtest-cli")
|
||||
if path is None:
|
||||
raise RuntimeError(
|
||||
"speedtest-cli not found in PATH. Please install it: pip install speedtest-cli"
|
||||
"speedtest-cli not found in PATH. Please install it: "
|
||||
"pip install speedtest-cli"
|
||||
)
|
||||
if not os.access(path, os.X_OK):
|
||||
raise RuntimeError(f"speedtest-cli found at {path} but is not executable")
|
||||
raise RuntimeError(
|
||||
f"speedtest-cli found at {path} but is not executable"
|
||||
)
|
||||
return path
|
||||
|
||||
|
||||
try:
|
||||
SPEEDTEST_CLI_PATH = _get_speedtest_cli_path()
|
||||
except Exception as e:
|
||||
@@ -33,22 +34,32 @@ except Exception as e:
|
||||
print(f"Warning: {e}", file=sys.stderr)
|
||||
SPEEDTEST_CLI_PATH = None
|
||||
|
||||
def wakeonlan(mac):
|
||||
|
||||
def wakeonlan(mac):
|
||||
# Validate MAC
|
||||
if not re.match(r'^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$', mac):
|
||||
if not re.match(r"^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$", mac):
|
||||
return jsonify({"success": False, "error": f"Invalid MAC: {mac}"}), 400
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["wakeonlan", mac],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
["wakeonlan", mac], capture_output=True, text=True, check=True
|
||||
)
|
||||
return jsonify(
|
||||
{
|
||||
"success": True,
|
||||
"message": "WOL packet sent",
|
||||
"output": result.stdout.strip(),
|
||||
}
|
||||
)
|
||||
return jsonify({"success": True, "message": "WOL packet sent", "output": result.stdout.strip()})
|
||||
except subprocess.CalledProcessError as e:
|
||||
return jsonify({"success": False, "error": "Failed to send WOL packet", "details": e.stderr.strip()}), 500
|
||||
return jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Failed to send WOL packet",
|
||||
"details": e.stderr.strip(),
|
||||
}
|
||||
), 500
|
||||
|
||||
|
||||
def traceroute(ip):
|
||||
"""
|
||||
@@ -77,10 +88,10 @@ def traceroute(ip):
|
||||
# --------------------------
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["traceroute", ip], # Command and argument
|
||||
capture_output=True, # Capture stdout/stderr
|
||||
text=True, # Return output as string
|
||||
check=True # Raise CalledProcessError on non-zero exit
|
||||
["traceroute", ip], # Command and argument
|
||||
capture_output=True, # Capture stdout/stderr
|
||||
text=True, # Return output as string
|
||||
check=True, # Raise CalledProcessError on non-zero exit
|
||||
)
|
||||
# Return success response with traceroute output
|
||||
return jsonify({"success": True, "output": result.stdout.strip()})
|
||||
@@ -90,11 +101,13 @@ def traceroute(ip):
|
||||
# --------------------------
|
||||
except subprocess.CalledProcessError as e:
|
||||
# Return 500 if traceroute fails
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"error": "Traceroute failed",
|
||||
"details": e.stderr.strip()
|
||||
}), 500
|
||||
return jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Traceroute failed",
|
||||
"details": e.stderr.strip(),
|
||||
}
|
||||
), 500
|
||||
|
||||
|
||||
def speedtest():
|
||||
@@ -105,10 +118,12 @@ def speedtest():
|
||||
# If the CLI wasn't found at module load, return a 503 so the caller
|
||||
# knows the service is unavailable rather than failing unpredictably.
|
||||
if SPEEDTEST_CLI_PATH is None:
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"error": "speedtest-cli is not installed or not found in PATH"
|
||||
}), 503
|
||||
return jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"error": "speedtest-cli is not installed or not found in PATH",
|
||||
}
|
||||
), 503
|
||||
|
||||
try:
|
||||
# Run speedtest-cli command using the resolved absolute path
|
||||
@@ -116,7 +131,7 @@ def speedtest():
|
||||
[SPEEDTEST_CLI_PATH, "--secure", "--simple"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
check=True,
|
||||
)
|
||||
|
||||
# Return each line as a list
|
||||
@@ -124,18 +139,22 @@ def speedtest():
|
||||
return jsonify({"success": True, "output": output_lines})
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"error": "Speedtest failed",
|
||||
"details": e.stderr.strip()
|
||||
}), 500
|
||||
return jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Speedtest failed",
|
||||
"details": e.stderr.strip(),
|
||||
}
|
||||
), 500
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"error": "Failed to run speedtest",
|
||||
"details": str(e)
|
||||
}), 500
|
||||
return jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Failed to run speedtest",
|
||||
"details": str(e),
|
||||
}
|
||||
), 500
|
||||
|
||||
|
||||
def nslookup(ip):
|
||||
@@ -147,29 +166,25 @@ def nslookup(ip):
|
||||
try:
|
||||
ipaddress.ip_address(ip)
|
||||
except ValueError:
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"error": "Invalid IP address"
|
||||
}), 400
|
||||
return jsonify({"success": False, "error": "Invalid IP address"}), 400
|
||||
|
||||
try:
|
||||
# Run nslookup command
|
||||
result = subprocess.run(
|
||||
["nslookup", ip],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
["nslookup", ip], capture_output=True, text=True, check=True
|
||||
)
|
||||
|
||||
output_lines = result.stdout.strip().split("\n")
|
||||
return jsonify({"success": True, "output": output_lines})
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"error": "nslookup failed",
|
||||
"details": e.stderr.strip()
|
||||
}), 500
|
||||
return jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"error": "nslookup failed",
|
||||
"details": e.stderr.strip(),
|
||||
}
|
||||
), 500
|
||||
|
||||
|
||||
def nmap_scan(ip, mode):
|
||||
@@ -186,24 +201,20 @@ def nmap_scan(ip, mode):
|
||||
try:
|
||||
ipaddress.ip_address(ip)
|
||||
except ValueError:
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"error": "Invalid IP address"
|
||||
}), 400
|
||||
return jsonify({"success": False, "error": "Invalid IP address"}), 400
|
||||
|
||||
# Map scan modes to nmap arguments
|
||||
mode_args = {
|
||||
"fast": ["-F"],
|
||||
"normal": [],
|
||||
"detail": ["-A"],
|
||||
"skipdiscovery": ["-Pn"]
|
||||
"skipdiscovery": ["-Pn"],
|
||||
}
|
||||
|
||||
if mode not in mode_args:
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"error": f"Invalid scan mode '{mode}'"
|
||||
}), 400
|
||||
return jsonify(
|
||||
{"success": False, "error": f"Invalid scan mode '{mode}'"}
|
||||
), 400
|
||||
|
||||
try:
|
||||
# Build and run nmap command
|
||||
@@ -212,23 +223,22 @@ def nmap_scan(ip, mode):
|
||||
cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
check=True,
|
||||
)
|
||||
|
||||
output_lines = result.stdout.strip().split("\n")
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"mode": mode,
|
||||
"ip": ip,
|
||||
"output": output_lines
|
||||
})
|
||||
return jsonify(
|
||||
{"success": True, "mode": mode, "ip": ip, "output": output_lines}
|
||||
)
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"error": "nmap scan failed",
|
||||
"details": e.stderr.strip()
|
||||
}), 500
|
||||
return jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"error": "nmap scan failed",
|
||||
"details": e.stderr.strip(),
|
||||
}
|
||||
), 500
|
||||
|
||||
|
||||
def internet_info():
|
||||
@@ -242,7 +252,7 @@ def internet_info():
|
||||
["curl", "-s", "https://ipinfo.io"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
check=True,
|
||||
)
|
||||
|
||||
output = result.stdout.strip()
|
||||
@@ -250,13 +260,20 @@ def internet_info():
|
||||
raise ValueError("Empty response from ipinfo.io")
|
||||
|
||||
# Clean up the JSON-like string by removing { } , and "
|
||||
cleaned_output = output.replace("{", "").replace("}", "").replace(",", "").replace('"', "")
|
||||
cleaned_output = (
|
||||
output.replace("{", "")
|
||||
.replace("}", "")
|
||||
.replace(",", "")
|
||||
.replace('"', "")
|
||||
)
|
||||
|
||||
return jsonify({"success": True, "output": cleaned_output})
|
||||
|
||||
except (subprocess.CalledProcessError, ValueError) as e:
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"error": "Failed to fetch internet info",
|
||||
"details": str(e)
|
||||
}), 500
|
||||
return jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Failed to fetch internet info",
|
||||
"details": str(e),
|
||||
}
|
||||
), 500
|
||||
|
||||
@@ -1,51 +1,54 @@
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = "/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
from const import apiPath
|
||||
from helper import is_random_mac, get_number_of_children, format_ip_long, get_setting_value
|
||||
|
||||
|
||||
def escape_label_value(val):
|
||||
"""
|
||||
Escape special characters for Prometheus labels.
|
||||
"""
|
||||
return str(val).replace('\\', '\\\\').replace('\n', '\\n').replace('"', '\\"')
|
||||
return str(val).replace("\\", "\\\\").replace("\n", "\\n").replace('"', '\\"')
|
||||
|
||||
|
||||
# Define a base URL with the user's home directory
|
||||
folder = apiPath
|
||||
|
||||
|
||||
def get_metric_stats():
|
||||
output = []
|
||||
|
||||
# 1. Dashboard totals
|
||||
try:
|
||||
with open(folder + 'table_devices_tiles.json', 'r') as f:
|
||||
with open(folder + "table_devices_tiles.json", "r") as f:
|
||||
tiles_data = json.load(f)["data"]
|
||||
|
||||
if isinstance(tiles_data, list) and tiles_data:
|
||||
totals = tiles_data[0]
|
||||
output.append(f'netalertx_connected_devices {totals.get("connected", 0)}')
|
||||
output.append(f'netalertx_offline_devices {totals.get("offline", 0)}')
|
||||
output.append(f'netalertx_down_devices {totals.get("down", 0)}')
|
||||
output.append(f'netalertx_new_devices {totals.get("new", 0)}')
|
||||
output.append(f'netalertx_archived_devices {totals.get("archived", 0)}')
|
||||
output.append(f'netalertx_favorite_devices {totals.get("favorites", 0)}')
|
||||
output.append(f'netalertx_my_devices {totals.get("my_devices", 0)}')
|
||||
output.append(f"netalertx_connected_devices {totals.get('connected', 0)}")
|
||||
output.append(f"netalertx_offline_devices {totals.get('offline', 0)}")
|
||||
output.append(f"netalertx_down_devices {totals.get('down', 0)}")
|
||||
output.append(f"netalertx_new_devices {totals.get('new', 0)}")
|
||||
output.append(f"netalertx_archived_devices {totals.get('archived', 0)}")
|
||||
output.append(f"netalertx_favorite_devices {totals.get('favorites', 0)}")
|
||||
output.append(f"netalertx_my_devices {totals.get('my_devices', 0)}")
|
||||
else:
|
||||
output.append("# Unexpected format in table_devices_tiles.json")
|
||||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
||||
mylog('none', f'[metrics] Error loading tiles data: {e}')
|
||||
mylog("none", f"[metrics] Error loading tiles data: {e}")
|
||||
output.append(f"# Error loading tiles data: {e}")
|
||||
except Exception as e:
|
||||
output.append(f"# General error loading dashboard totals: {e}")
|
||||
|
||||
# 2. Device-level metrics
|
||||
try:
|
||||
with open(folder + 'table_devices.json', 'r') as f:
|
||||
with open(folder + "table_devices.json", "r") as f:
|
||||
data = json.load(f)
|
||||
|
||||
devices = data.get("data", [])
|
||||
@@ -68,7 +71,7 @@ def get_metric_stats():
|
||||
)
|
||||
|
||||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
||||
mylog('none', f'[metrics] Error loading devices data: {e}')
|
||||
mylog("none", f"[metrics] Error loading devices data: {e}")
|
||||
output.append(f"# Error loading devices data: {e}")
|
||||
except Exception as e:
|
||||
output.append(f"# General error processing device metrics: {e}")
|
||||
|
||||
@@ -1,39 +1,49 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import argparse
|
||||
import os
|
||||
import pathlib
|
||||
import sqlite3
|
||||
import time
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from flask import jsonify, request
|
||||
from flask import jsonify
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from helper import is_random_mac, format_date, get_setting_value, format_date_iso, format_event_date, mylog, timeNowTZ, format_date_diff, format_ip_long, parse_datetime
|
||||
from db.db_helper import row_to_json, get_date_from_period
|
||||
from helper import (
|
||||
format_date,
|
||||
format_date_iso,
|
||||
format_event_date,
|
||||
format_date_diff,
|
||||
format_ip_long,
|
||||
)
|
||||
from db.db_helper import get_date_from_period
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Sessions Endpoints Functions
|
||||
# --------------------------
|
||||
# -------------------------------------------------------------------------------------------
|
||||
def create_session(mac, ip, start_time, end_time=None, event_type_conn="Connected", event_type_disc="Disconnected"):
|
||||
def create_session(
|
||||
mac,
|
||||
ip,
|
||||
start_time,
|
||||
end_time=None,
|
||||
event_type_conn="Connected",
|
||||
event_type_disc="Disconnected",
|
||||
):
|
||||
"""Insert a new session into Sessions table"""
|
||||
conn = get_temp_db_connection()
|
||||
cur = conn.cursor()
|
||||
|
||||
cur.execute("""
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO Sessions (ses_MAC, ses_IP, ses_DateTimeConnection, ses_DateTimeDisconnection,
|
||||
ses_EventTypeConnection, ses_EventTypeDisconnection)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""", (mac, ip, start_time, end_time, event_type_conn, event_type_disc))
|
||||
""",
|
||||
(mac, ip, start_time, end_time, event_type_conn, event_type_disc),
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
@@ -83,7 +93,6 @@ def get_sessions(mac=None, start_date=None, end_date=None):
|
||||
return jsonify({"success": True, "sessions": table_data})
|
||||
|
||||
|
||||
|
||||
def get_sessions_calendar(start_date, end_date):
|
||||
"""
|
||||
Fetch sessions between a start and end date for calendar display.
|
||||
@@ -137,7 +146,19 @@ def get_sessions_calendar(start_date, end_date):
|
||||
OR SES1.ses_StillConnected = 1
|
||||
"""
|
||||
|
||||
cur.execute(sql, (start_date, end_date, start_date, end_date, start_date, end_date, start_date, end_date))
|
||||
cur.execute(
|
||||
sql,
|
||||
(
|
||||
start_date,
|
||||
end_date,
|
||||
start_date,
|
||||
end_date,
|
||||
start_date,
|
||||
end_date,
|
||||
start_date,
|
||||
end_date,
|
||||
),
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
|
||||
table_data = []
|
||||
@@ -145,7 +166,10 @@ def get_sessions_calendar(start_date, end_date):
|
||||
row = dict(r)
|
||||
|
||||
# Determine color
|
||||
if row["ses_EventTypeConnection"] == "<missing event>" or row["ses_EventTypeDisconnection"] == "<missing event>":
|
||||
if (
|
||||
row["ses_EventTypeConnection"] == "<missing event>"
|
||||
or row["ses_EventTypeDisconnection"] == "<missing event>"
|
||||
):
|
||||
color = "#f39c12"
|
||||
elif row["ses_StillConnected"] == 1:
|
||||
color = "#00a659"
|
||||
@@ -160,21 +184,22 @@ def get_sessions_calendar(start_date, end_date):
|
||||
)
|
||||
|
||||
# Append calendar entry
|
||||
table_data.append({
|
||||
"resourceId": row["ses_MAC"],
|
||||
"title": "",
|
||||
"start": format_date_iso(row["ses_DateTimeConnectionCorrected"]),
|
||||
"end": format_date_iso(row["ses_DateTimeDisconnectionCorrected"]),
|
||||
"color": color,
|
||||
"tooltip": tooltip,
|
||||
"className": "no-border"
|
||||
})
|
||||
table_data.append(
|
||||
{
|
||||
"resourceId": row["ses_MAC"],
|
||||
"title": "",
|
||||
"start": format_date_iso(row["ses_DateTimeConnectionCorrected"]),
|
||||
"end": format_date_iso(row["ses_DateTimeDisconnectionCorrected"]),
|
||||
"color": color,
|
||||
"tooltip": tooltip,
|
||||
"className": "no-border",
|
||||
}
|
||||
)
|
||||
|
||||
conn.close()
|
||||
return jsonify({"success": True, "sessions": table_data})
|
||||
|
||||
|
||||
|
||||
def get_device_sessions(mac, period):
|
||||
"""
|
||||
Fetch device sessions for a given MAC address and period.
|
||||
@@ -203,7 +228,6 @@ def get_device_sessions(mac, period):
|
||||
)
|
||||
"""
|
||||
|
||||
|
||||
cur.execute(sql, (mac,))
|
||||
rows = cur.fetchall()
|
||||
conn.close()
|
||||
@@ -226,12 +250,16 @@ def get_device_sessions(mac, period):
|
||||
end = format_date(row["ses_DateTimeDisconnection"])
|
||||
|
||||
# Duration
|
||||
if row["ses_EventTypeConnection"] in ("<missing event>", None) or row["ses_EventTypeDisconnection"] in ("<missing event>", None):
|
||||
if row["ses_EventTypeConnection"] in ("<missing event>", None) or row[
|
||||
"ses_EventTypeDisconnection"
|
||||
] in ("<missing event>", None):
|
||||
dur = "..."
|
||||
elif row["ses_StillConnected"]:
|
||||
dur = format_date_diff(row["ses_DateTimeConnection"], None)["text"]
|
||||
else:
|
||||
dur = format_date_diff(row["ses_DateTimeConnection"], row["ses_DateTimeDisconnection"])["text"]
|
||||
dur = format_date_diff(
|
||||
row["ses_DateTimeConnection"], row["ses_DateTimeDisconnection"]
|
||||
)["text"]
|
||||
|
||||
# Additional Info
|
||||
info = row["ses_AdditionalInfo"]
|
||||
@@ -239,15 +267,17 @@ def get_device_sessions(mac, period):
|
||||
info = f"{row['ses_EventTypeConnection']}: {info}"
|
||||
|
||||
# Push row data
|
||||
table_data["data"].append({
|
||||
"ses_MAC": mac,
|
||||
"ses_DateTimeOrder": row["ses_DateTimeOrder"],
|
||||
"ses_Connection": ini,
|
||||
"ses_Disconnection": end,
|
||||
"ses_Duration": dur,
|
||||
"ses_IP": row["ses_IP"],
|
||||
"ses_Info": info,
|
||||
})
|
||||
table_data["data"].append(
|
||||
{
|
||||
"ses_MAC": mac,
|
||||
"ses_DateTimeOrder": row["ses_DateTimeOrder"],
|
||||
"ses_Connection": ini,
|
||||
"ses_Disconnection": end,
|
||||
"ses_Duration": dur,
|
||||
"ses_IP": row["ses_IP"],
|
||||
"ses_Info": info,
|
||||
}
|
||||
)
|
||||
|
||||
# Control no rows
|
||||
if not table_data["data"]:
|
||||
@@ -255,10 +285,7 @@ def get_device_sessions(mac, period):
|
||||
|
||||
sessions = table_data["data"]
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"sessions": sessions
|
||||
})
|
||||
return jsonify({"success": True, "sessions": sessions})
|
||||
|
||||
|
||||
def get_session_events(event_type, period_date):
|
||||
@@ -291,7 +318,7 @@ def get_session_events(event_type, period_date):
|
||||
WHERE eve_DateTime >= {period_date}
|
||||
"""
|
||||
|
||||
sql_sessions = f"""
|
||||
sql_sessions = """
|
||||
SELECT
|
||||
IFNULL(ses_DateTimeConnection, ses_DateTimeDisconnection) AS ses_DateTimeOrder,
|
||||
devName,
|
||||
@@ -314,20 +341,26 @@ def get_session_events(event_type, period_date):
|
||||
if event_type == "all":
|
||||
sql = sql_events
|
||||
elif event_type == "sessions":
|
||||
sql = sql_sessions + f"""
|
||||
sql = (
|
||||
sql_sessions
|
||||
+ f"""
|
||||
WHERE (
|
||||
ses_DateTimeConnection >= {period_date}
|
||||
OR ses_DateTimeDisconnection >= {period_date}
|
||||
OR ses_StillConnected = 1
|
||||
)
|
||||
"""
|
||||
)
|
||||
elif event_type == "missing":
|
||||
sql = sql_sessions + f"""
|
||||
sql = (
|
||||
sql_sessions
|
||||
+ f"""
|
||||
WHERE (
|
||||
(ses_DateTimeConnection IS NULL AND ses_DateTimeDisconnection >= {period_date})
|
||||
OR (ses_DateTimeDisconnection IS NULL AND ses_StillConnected = 0 AND ses_DateTimeConnection >= {period_date})
|
||||
)
|
||||
"""
|
||||
)
|
||||
elif event_type == "voided":
|
||||
sql = sql_events + ' AND eve_EventType LIKE "VOIDED%"'
|
||||
elif event_type == "new":
|
||||
@@ -335,7 +368,7 @@ def get_session_events(event_type, period_date):
|
||||
elif event_type == "down":
|
||||
sql = sql_events + ' AND eve_EventType = "Device Down"'
|
||||
else:
|
||||
sql = sql_events + ' AND 1=0'
|
||||
sql = sql_events + " AND 1=0"
|
||||
|
||||
cur.execute(sql)
|
||||
rows = cur.fetchall()
|
||||
|
||||
@@ -5,7 +5,8 @@ from logger import mylog
|
||||
from helper import get_setting_value, timeNowTZ
|
||||
from messaging.in_app import write_notification
|
||||
|
||||
INSTALL_PATH = "/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
|
||||
|
||||
def handle_sync_get():
|
||||
"""Handle GET requests for SYNC (NODE → HUB)."""
|
||||
@@ -23,13 +24,15 @@ def handle_sync_get():
|
||||
response_data = base64.b64encode(raw_data).decode("utf-8")
|
||||
|
||||
write_notification("[Plugin: SYNC] Data sent", "info", timeNowTZ())
|
||||
return jsonify({
|
||||
"node_name": get_setting_value("SYNC_node_name"),
|
||||
"status": 200,
|
||||
"message": "OK",
|
||||
"data_base64": response_data,
|
||||
"timestamp": timeNowTZ()
|
||||
}), 200
|
||||
return jsonify(
|
||||
{
|
||||
"node_name": get_setting_value("SYNC_node_name"),
|
||||
"status": 200,
|
||||
"message": "OK",
|
||||
"data_base64": response_data,
|
||||
"timestamp": timeNowTZ(),
|
||||
}
|
||||
), 200
|
||||
|
||||
|
||||
def handle_sync_post():
|
||||
@@ -42,18 +45,19 @@ def handle_sync_post():
|
||||
os.makedirs(storage_path, exist_ok=True)
|
||||
|
||||
encoded_files = [
|
||||
f for f in os.listdir(storage_path)
|
||||
f
|
||||
for f in os.listdir(storage_path)
|
||||
if f.startswith(f"last_result.{plugin}.encoded.{node_name}")
|
||||
]
|
||||
decoded_files = [
|
||||
f for f in os.listdir(storage_path)
|
||||
f
|
||||
for f in os.listdir(storage_path)
|
||||
if f.startswith(f"last_result.{plugin}.decoded.{node_name}")
|
||||
]
|
||||
file_count = len(encoded_files + decoded_files) + 1
|
||||
|
||||
file_path_new = os.path.join(
|
||||
storage_path,
|
||||
f"last_result.{plugin}.encoded.{node_name}.{file_count}.log"
|
||||
storage_path, f"last_result.{plugin}.encoded.{node_name}.{file_count}.log"
|
||||
)
|
||||
|
||||
try:
|
||||
|
||||
@@ -1,24 +1,23 @@
|
||||
import os
|
||||
import json
|
||||
|
||||
import conf
|
||||
from const import *
|
||||
from logger import mylog, logResult
|
||||
from logger import mylog
|
||||
from helper import timeNowTZ, timeNow, checkNewVersion
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
# Register NetAlertX directories using runtime configuration
|
||||
INSTALL_PATH = applicationPath
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# App state
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# A class to manage the application state and to provide a frontend accessible API point
|
||||
# To keep an existing value pass None
|
||||
class app_state_class:
|
||||
"""
|
||||
Represents the current state of the application for frontend communication.
|
||||
|
||||
|
||||
Attributes:
|
||||
lastUpdated (str): Timestamp of the last update.
|
||||
settingsSaved (int): Flag indicating if settings were saved.
|
||||
@@ -32,13 +31,16 @@ class app_state_class:
|
||||
isNewVersionChecked (int): Timestamp of last version check.
|
||||
"""
|
||||
|
||||
def __init__(self, currentState=None,
|
||||
settingsSaved=None,
|
||||
settingsImported=None,
|
||||
showSpinner=None,
|
||||
graphQLServerStarted=0,
|
||||
processScan=False,
|
||||
pluginsStates=None):
|
||||
def __init__(
|
||||
self,
|
||||
currentState=None,
|
||||
settingsSaved=None,
|
||||
settingsImported=None,
|
||||
showSpinner=None,
|
||||
graphQLServerStarted=0,
|
||||
processScan=False,
|
||||
pluginsStates=None,
|
||||
):
|
||||
"""
|
||||
Initialize the application state, optionally overwriting previous values.
|
||||
|
||||
@@ -55,40 +57,42 @@ class app_state_class:
|
||||
pluginsStates (dict, optional): Initial plugin states to merge with previous state.
|
||||
"""
|
||||
# json file containing the state to communicate with the frontend
|
||||
stateFile = apiPath + 'app_state.json'
|
||||
stateFile = apiPath + "app_state.json"
|
||||
previousState = ""
|
||||
|
||||
# Update self
|
||||
self.lastUpdated = str(timeNowTZ())
|
||||
|
||||
|
||||
if os.path.exists(stateFile):
|
||||
try:
|
||||
with open(stateFile, 'r') as json_file:
|
||||
try:
|
||||
with open(stateFile, "r") as json_file:
|
||||
previousState = json.load(json_file)
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
mylog('none', [f'[app_state_class] Failed to handle app_state.json: {e}'])
|
||||
mylog(
|
||||
"none", [f"[app_state_class] Failed to handle app_state.json: {e}"]
|
||||
)
|
||||
|
||||
# Check if the file exists and recover previous values
|
||||
if previousState != "":
|
||||
self.settingsSaved = previousState.get("settingsSaved", 0)
|
||||
self.settingsImported = previousState.get("settingsImported", 0)
|
||||
self.processScan = previousState.get("processScan", False)
|
||||
self.showSpinner = previousState.get("showSpinner", False)
|
||||
self.isNewVersion = previousState.get("isNewVersion", False)
|
||||
self.isNewVersionChecked = previousState.get("isNewVersionChecked", 0)
|
||||
self.graphQLServerStarted = previousState.get("graphQLServerStarted", 0)
|
||||
self.currentState = previousState.get("currentState", "Init")
|
||||
self.pluginsStates = previousState.get("pluginsStates", {})
|
||||
else: # init first time values
|
||||
self.settingsSaved = 0
|
||||
self.settingsImported = 0
|
||||
self.showSpinner = False
|
||||
self.processScan = False
|
||||
self.isNewVersion = checkNewVersion()
|
||||
self.isNewVersionChecked = int(timeNow().timestamp())
|
||||
self.graphQLServerStarted = 0
|
||||
self.currentState = "Init"
|
||||
self.pluginsStates = {}
|
||||
if previousState != "":
|
||||
self.settingsSaved = previousState.get("settingsSaved", 0)
|
||||
self.settingsImported = previousState.get("settingsImported", 0)
|
||||
self.processScan = previousState.get("processScan", False)
|
||||
self.showSpinner = previousState.get("showSpinner", False)
|
||||
self.isNewVersion = previousState.get("isNewVersion", False)
|
||||
self.isNewVersionChecked = previousState.get("isNewVersionChecked", 0)
|
||||
self.graphQLServerStarted = previousState.get("graphQLServerStarted", 0)
|
||||
self.currentState = previousState.get("currentState", "Init")
|
||||
self.pluginsStates = previousState.get("pluginsStates", {})
|
||||
else: # init first time values
|
||||
self.settingsSaved = 0
|
||||
self.settingsImported = 0
|
||||
self.showSpinner = False
|
||||
self.processScan = False
|
||||
self.isNewVersion = checkNewVersion()
|
||||
self.isNewVersionChecked = int(timeNow().timestamp())
|
||||
self.graphQLServerStarted = 0
|
||||
self.currentState = "Init"
|
||||
self.pluginsStates = {}
|
||||
|
||||
# Overwrite with provided parameters if supplied
|
||||
if settingsSaved is not None:
|
||||
@@ -107,8 +111,10 @@ class app_state_class:
|
||||
if pluginsStates is not None:
|
||||
for plugin, state in pluginsStates.items():
|
||||
if plugin in self.pluginsStates:
|
||||
# Only update existing keys if both are dicts
|
||||
if isinstance(self.pluginsStates[plugin], dict) and isinstance(state, dict):
|
||||
# Only update existing keys if both are dicts
|
||||
if isinstance(self.pluginsStates[plugin], dict) and isinstance(
|
||||
state, dict
|
||||
):
|
||||
self.pluginsStates[plugin].update(state)
|
||||
else:
|
||||
# Replace if types don't match
|
||||
@@ -119,41 +125,47 @@ class app_state_class:
|
||||
self.pluginsStates[plugin] = state
|
||||
|
||||
# check for new version every hour and if currently not running new version
|
||||
if self.isNewVersion is False and self.isNewVersionChecked + 3600 < int(timeNow().timestamp()):
|
||||
self.isNewVersion = checkNewVersion()
|
||||
self.isNewVersionChecked = int(timeNow().timestamp())
|
||||
if self.isNewVersion is False and self.isNewVersionChecked + 3600 < int(
|
||||
timeNow().timestamp()
|
||||
):
|
||||
self.isNewVersion = checkNewVersion()
|
||||
self.isNewVersionChecked = int(timeNow().timestamp())
|
||||
|
||||
# Update .json file
|
||||
# with open(stateFile, 'w') as json_file:
|
||||
# json.dump(self, json_file, cls=AppStateEncoder, indent=4)
|
||||
|
||||
|
||||
# Remove lastUpdated from the dictionary for comparison
|
||||
currentStateDict = self.__dict__.copy()
|
||||
currentStateDict.pop('lastUpdated', None)
|
||||
currentStateDict.pop("lastUpdated", None)
|
||||
|
||||
# Compare current state with previous state before updating
|
||||
if previousState != currentStateDict:
|
||||
# Sanity check before saving the .json file
|
||||
try:
|
||||
json_data = json.dumps(self, cls=AppStateEncoder, indent=4)
|
||||
with open(stateFile, 'w') as json_file:
|
||||
with open(stateFile, "w") as json_file:
|
||||
json_file.write(json_data)
|
||||
except (TypeError, ValueError) as e:
|
||||
mylog('none', [f'[app_state_class] Failed to serialize object to JSON: {e}'])
|
||||
mylog(
|
||||
"none",
|
||||
[f"[app_state_class] Failed to serialize object to JSON: {e}"],
|
||||
)
|
||||
|
||||
return
|
||||
return
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# method to update the state
|
||||
def updateState(newState = None,
|
||||
settingsSaved = None,
|
||||
settingsImported = None,
|
||||
showSpinner = None,
|
||||
graphQLServerStarted = None,
|
||||
processScan = None,
|
||||
pluginsStates=None):
|
||||
def updateState(
|
||||
newState=None,
|
||||
settingsSaved=None,
|
||||
settingsImported=None,
|
||||
showSpinner=None,
|
||||
graphQLServerStarted=None,
|
||||
processScan=None,
|
||||
pluginsStates=None,
|
||||
):
|
||||
"""
|
||||
Convenience method to create or update the app state.
|
||||
|
||||
@@ -169,25 +181,28 @@ def updateState(newState = None,
|
||||
Returns:
|
||||
app_state_class: Updated state object.
|
||||
"""
|
||||
return app_state_class( newState,
|
||||
settingsSaved,
|
||||
settingsImported,
|
||||
showSpinner,
|
||||
graphQLServerStarted,
|
||||
processScan,
|
||||
pluginsStates)
|
||||
return app_state_class(
|
||||
newState,
|
||||
settingsSaved,
|
||||
settingsImported,
|
||||
showSpinner,
|
||||
graphQLServerStarted,
|
||||
processScan,
|
||||
pluginsStates,
|
||||
)
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Checks if the object has a __dict__ attribute. If it does, it assumes that it's an instance of a class and serializes its attributes dynamically.
|
||||
# -------------------------------------------------------------------------------
|
||||
# Checks if the object has a __dict__ attribute. If it does, it assumes that it's an instance of a class and serializes its attributes dynamically.
|
||||
class AppStateEncoder(json.JSONEncoder):
|
||||
"""
|
||||
JSON encoder for application state objects.
|
||||
|
||||
Automatically serializes objects with a __dict__ attribute.
|
||||
"""
|
||||
|
||||
def default(self, obj):
|
||||
if hasattr(obj, '__dict__'):
|
||||
if hasattr(obj, "__dict__"):
|
||||
# If the object has a '__dict__', assume it's an instance of a class
|
||||
return obj.__dict__
|
||||
return super().default(obj)
|
||||
|
||||
@@ -1,26 +1,26 @@
|
||||
""" config related functions for NetAlertX """
|
||||
"""config related functions for NetAlertX"""
|
||||
|
||||
# TODO: Create and manage this as part of an app_state class object
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
|
||||
# These are global variables, not config items and should not exist !
|
||||
mySettings = []
|
||||
mySettingsSQLsafe = []
|
||||
cycle = 1
|
||||
userSubnets = []
|
||||
mySchedules = [] # bad solution for global - TO-DO
|
||||
tz = ''
|
||||
mySchedules = [] # bad solution for global - TO-DO
|
||||
tz = ""
|
||||
|
||||
# modified time of the most recently imported config file
|
||||
# set to a small value to force import at first run
|
||||
lastImportedConfFile = 1.1
|
||||
lastImportedConfFile = 1.1
|
||||
|
||||
plugins_once_run = False
|
||||
newVersionAvailable = False
|
||||
time_started = ''
|
||||
startTime = ''
|
||||
last_scan_run = ''
|
||||
last_version_check = ''
|
||||
time_started = ""
|
||||
startTime = ""
|
||||
last_scan_run = ""
|
||||
last_version_check = ""
|
||||
arpscan_devices = []
|
||||
|
||||
# ACTUAL CONFIGRATION ITEMS set to defaults
|
||||
@@ -28,19 +28,19 @@ arpscan_devices = []
|
||||
# -------------------------------------------
|
||||
# General
|
||||
# -------------------------------------------
|
||||
SCAN_SUBNETS = ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0']
|
||||
LOG_LEVEL = 'verbose'
|
||||
TIMEZONE = 'Europe/Berlin'
|
||||
UI_LANG = 'English (en_us)'
|
||||
UI_PRESENCE = ['online', 'offline', 'archived']
|
||||
UI_MY_DEVICES = ['online', 'offline', 'archived', 'new', 'down']
|
||||
SCAN_SUBNETS = ["192.168.1.0/24 --interface=eth1", "192.168.1.0/24 --interface=eth0"]
|
||||
LOG_LEVEL = "verbose"
|
||||
TIMEZONE = "Europe/Berlin"
|
||||
UI_LANG = "English (en_us)"
|
||||
UI_PRESENCE = ["online", "offline", "archived"]
|
||||
UI_MY_DEVICES = ["online", "offline", "archived", "new", "down"]
|
||||
UI_NOT_RANDOM_MAC = []
|
||||
DAYS_TO_KEEP_EVENTS = 90
|
||||
REPORT_DASHBOARD_URL = 'http://netalertx/'
|
||||
DAYS_TO_KEEP_EVENTS = 90
|
||||
REPORT_DASHBOARD_URL = "http://netalertx/"
|
||||
|
||||
# -------------------------------------------
|
||||
# Misc
|
||||
# -------------------------------------------
|
||||
|
||||
# API
|
||||
API_CUSTOM_SQL = 'SELECT * FROM Devices WHERE devPresentLastScan = 0'
|
||||
# API
|
||||
API_CUSTOM_SQL = "SELECT * FROM Devices WHERE devPresentLastScan = 0"
|
||||
|
||||
109
server/config_paths.py
Normal file
109
server/config_paths.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""Runtime path helpers for NetAlertX.
|
||||
|
||||
This module centralises path resolution so code can rely on the
|
||||
Docker environment variables while still working during local
|
||||
development and testing where those variables may not be set.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
__all__ = [
|
||||
"APP_PATH",
|
||||
"DATA_PATH",
|
||||
"CONFIG_PATH",
|
||||
"DB_PATH",
|
||||
"TMP_PATH",
|
||||
"API_PATH",
|
||||
"LOG_PATH",
|
||||
"FRONT_PATH",
|
||||
"SERVER_PATH",
|
||||
"BACK_PATH",
|
||||
"PLUGINS_PATH",
|
||||
"REPORT_TEMPLATES_PATH",
|
||||
"API_PATH_WITH_TRAILING_SEP",
|
||||
"LOG_PATH_WITH_TRAILING_SEP",
|
||||
"CONFIG_PATH_WITH_TRAILING_SEP",
|
||||
"DB_PATH_WITH_TRAILING_SEP",
|
||||
"PLUGINS_PATH_WITH_TRAILING_SEP",
|
||||
"REPORT_TEMPLATES_PATH_WITH_TRAILING_SEP",
|
||||
"ensure_trailing_sep",
|
||||
"APP_PATH_STR",
|
||||
"DATA_PATH_STR",
|
||||
"CONFIG_PATH_STR",
|
||||
"DB_PATH_STR",
|
||||
"TMP_PATH_STR",
|
||||
"API_PATH_STR",
|
||||
"LOG_PATH_STR",
|
||||
"FRONT_PATH_STR",
|
||||
"SERVER_PATH_STR",
|
||||
"BACK_PATH_STR",
|
||||
"PLUGINS_PATH_STR",
|
||||
"REPORT_TEMPLATES_PATH_STR",
|
||||
"ensure_in_syspath",
|
||||
]
|
||||
|
||||
_DEFAULT_APP_PATH = Path("/app")
|
||||
_DEFAULT_DATA_PATH = Path("/data")
|
||||
_DEFAULT_TMP_PATH = Path("/tmp")
|
||||
|
||||
|
||||
def _resolve_env_path(variable: str, default: Path) -> Path:
|
||||
"""Return the path from the environment or fall back to *default*."""
|
||||
value = os.getenv(variable)
|
||||
if value:
|
||||
return Path(value)
|
||||
return default
|
||||
|
||||
|
||||
def ensure_trailing_sep(path: Path) -> str:
|
||||
"""Return *path* as a string that always ends with the OS separator."""
|
||||
path_str = str(path)
|
||||
return path_str if path_str.endswith(os.sep) else f"{path_str}{os.sep}"
|
||||
|
||||
|
||||
APP_PATH = _resolve_env_path("NETALERTX_APP", _DEFAULT_APP_PATH)
|
||||
DATA_PATH = _resolve_env_path("NETALERTX_DATA", _DEFAULT_DATA_PATH)
|
||||
CONFIG_PATH = _resolve_env_path("NETALERTX_CONFIG", DATA_PATH / "config")
|
||||
DB_PATH = _resolve_env_path("NETALERTX_DB", DATA_PATH / "db")
|
||||
|
||||
TMP_PATH = _resolve_env_path("NETALERTX_TMP", _DEFAULT_TMP_PATH)
|
||||
API_PATH = _resolve_env_path("NETALERTX_API", TMP_PATH / "api")
|
||||
LOG_PATH = _resolve_env_path("NETALERTX_LOG", TMP_PATH / "log")
|
||||
|
||||
FRONT_PATH = APP_PATH / "front"
|
||||
SERVER_PATH = APP_PATH / "server"
|
||||
BACK_PATH = APP_PATH / "back"
|
||||
PLUGINS_PATH = FRONT_PATH / "plugins"
|
||||
REPORT_TEMPLATES_PATH = FRONT_PATH / "report_templates"
|
||||
|
||||
API_PATH_WITH_TRAILING_SEP = ensure_trailing_sep(API_PATH)
|
||||
LOG_PATH_WITH_TRAILING_SEP = ensure_trailing_sep(LOG_PATH)
|
||||
CONFIG_PATH_WITH_TRAILING_SEP = ensure_trailing_sep(CONFIG_PATH)
|
||||
DB_PATH_WITH_TRAILING_SEP = ensure_trailing_sep(DB_PATH)
|
||||
PLUGINS_PATH_WITH_TRAILING_SEP = ensure_trailing_sep(PLUGINS_PATH)
|
||||
REPORT_TEMPLATES_PATH_WITH_TRAILING_SEP = ensure_trailing_sep(REPORT_TEMPLATES_PATH)
|
||||
|
||||
APP_PATH_STR = str(APP_PATH)
|
||||
DATA_PATH_STR = str(DATA_PATH)
|
||||
CONFIG_PATH_STR = str(CONFIG_PATH)
|
||||
DB_PATH_STR = str(DB_PATH)
|
||||
TMP_PATH_STR = str(TMP_PATH)
|
||||
API_PATH_STR = str(API_PATH)
|
||||
LOG_PATH_STR = str(LOG_PATH)
|
||||
FRONT_PATH_STR = str(FRONT_PATH)
|
||||
SERVER_PATH_STR = str(SERVER_PATH)
|
||||
BACK_PATH_STR = str(BACK_PATH)
|
||||
PLUGINS_PATH_STR = str(PLUGINS_PATH)
|
||||
REPORT_TEMPLATES_PATH_STR = str(REPORT_TEMPLATES_PATH)
|
||||
|
||||
|
||||
def ensure_in_syspath(path: Path) -> str:
|
||||
"""Add *path* to ``sys.path`` if missing and return the string value."""
|
||||
path_str = str(path)
|
||||
if path_str not in sys.path:
|
||||
sys.path.append(path_str)
|
||||
return path_str
|
||||
@@ -1,34 +1,56 @@
|
||||
""" CONSTANTS for NetAlertX """
|
||||
"""CONSTANTS for NetAlertX"""
|
||||
|
||||
import os
|
||||
|
||||
#===============================================================================
|
||||
from config_paths import (
|
||||
API_PATH_STR,
|
||||
API_PATH_WITH_TRAILING_SEP,
|
||||
APP_PATH_STR,
|
||||
CONFIG_PATH_STR,
|
||||
CONFIG_PATH_WITH_TRAILING_SEP,
|
||||
DATA_PATH_STR,
|
||||
DB_PATH_STR,
|
||||
DB_PATH_WITH_TRAILING_SEP,
|
||||
LOG_PATH_STR,
|
||||
LOG_PATH_WITH_TRAILING_SEP,
|
||||
PLUGINS_PATH_WITH_TRAILING_SEP,
|
||||
REPORT_TEMPLATES_PATH_WITH_TRAILING_SEP,
|
||||
)
|
||||
|
||||
# ===============================================================================
|
||||
# PATHS
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
|
||||
applicationPath = APP_PATH_STR
|
||||
dataPath = DATA_PATH_STR
|
||||
configPath = CONFIG_PATH_STR
|
||||
dbFolderPath = DB_PATH_STR
|
||||
apiRoot = API_PATH_STR
|
||||
logRoot = LOG_PATH_STR
|
||||
|
||||
dbFileName = "app.db"
|
||||
confFileName = "app.conf"
|
||||
|
||||
confPath = CONFIG_PATH_WITH_TRAILING_SEP + confFileName
|
||||
dbPath = DB_PATH_WITH_TRAILING_SEP + dbFileName
|
||||
pluginsPath = PLUGINS_PATH_WITH_TRAILING_SEP.rstrip(os.sep)
|
||||
logPath = LOG_PATH_WITH_TRAILING_SEP.rstrip(os.sep)
|
||||
apiPath = API_PATH_WITH_TRAILING_SEP
|
||||
reportTemplatesPath = REPORT_TEMPLATES_PATH_WITH_TRAILING_SEP
|
||||
fullConfFolder = configPath
|
||||
fullConfPath = confPath
|
||||
fullDbPath = dbPath
|
||||
vendorsPath = os.getenv("VENDORSPATH", "/usr/share/arp-scan/ieee-oui.txt")
|
||||
vendorsPathNewest = os.getenv(
|
||||
"VENDORSPATH_NEWEST", "/usr/share/arp-scan/ieee-oui_all_filtered.txt"
|
||||
)
|
||||
|
||||
default_tz = "Europe/Berlin"
|
||||
|
||||
|
||||
applicationPath = '/app'
|
||||
dbFileName = 'app.db'
|
||||
confFileName = 'app.conf'
|
||||
confPath = "/config/" + confFileName
|
||||
dbPath = '/db/' + dbFileName
|
||||
|
||||
|
||||
pluginsPath = applicationPath + '/front/plugins'
|
||||
logPath = applicationPath + '/log'
|
||||
apiPath = applicationPath + '/api/'
|
||||
reportTemplatesPath = applicationPath + '/front/report_templates/'
|
||||
fullConfFolder = applicationPath + '/config'
|
||||
fullConfPath = applicationPath + confPath
|
||||
fullDbPath = applicationPath + dbPath
|
||||
vendorsPath = os.getenv('VENDORSPATH', '/usr/share/arp-scan/ieee-oui.txt')
|
||||
vendorsPathNewest = os.getenv('VENDORSPATH_NEWEST', '/usr/share/arp-scan/ieee-oui_all_filtered.txt')
|
||||
|
||||
default_tz = 'Europe/Berlin'
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# SQL queries
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
sql_devices_all = """
|
||||
SELECT
|
||||
rowid,
|
||||
@@ -78,8 +100,8 @@ sql_devices_all = """
|
||||
"""
|
||||
|
||||
sql_appevents = """select * from AppEvents order by DateTimeCreated desc"""
|
||||
# The below query calculates counts of devices in various categories:
|
||||
# (connected/online, offline, down, new, archived),
|
||||
# The below query calculates counts of devices in various categories:
|
||||
# (connected/online, offline, down, new, archived),
|
||||
# as well as a combined count for devices that match any status listed in the UI_MY_DEVICES setting
|
||||
sql_devices_tiles = """
|
||||
WITH Statuses AS (
|
||||
@@ -142,7 +164,7 @@ sql_devices_filters = """
|
||||
FROM Devices WHERE devSSID NOT IN ('', 'null') AND devSSID IS NOT NULL
|
||||
ORDER BY columnName;
|
||||
"""
|
||||
sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived,
|
||||
sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived,
|
||||
(select count(*) from Devices a where devIsNew = 1 ) as new,
|
||||
(select count(*) from Devices a where devName = '(unknown)' or devName = '(name not found)' ) as unknown
|
||||
from Online_History order by Scan_Date desc limit 1"""
|
||||
@@ -165,7 +187,7 @@ sql_new_devices = """SELECT * FROM (
|
||||
ON t1.devMac = t2.devMac_t2"""
|
||||
|
||||
|
||||
sql_generateGuid = '''
|
||||
sql_generateGuid = """
|
||||
lower(
|
||||
hex(randomblob(4)) || '-' || hex(randomblob(2)) || '-' || '4' ||
|
||||
substr(hex( randomblob(2)), 2) || '-' ||
|
||||
@@ -173,4 +195,4 @@ sql_generateGuid = '''
|
||||
substr(hex(randomblob(2)), 2) || '-' ||
|
||||
hex(randomblob(6))
|
||||
)
|
||||
'''
|
||||
"""
|
||||
|
||||
@@ -25,42 +25,48 @@ import uuid
|
||||
|
||||
# pycryptodome -------------------------------------------------------------------------
|
||||
|
||||
|
||||
def prepare_key(encryption_key):
|
||||
key = hashlib.sha256(encryption_key.encode()).digest()
|
||||
return key
|
||||
|
||||
|
||||
def encrypt_data(data, encryption_key):
|
||||
key = prepare_key(encryption_key)
|
||||
cipher = AES.new(key, AES.MODE_CBC)
|
||||
ct_bytes = cipher.encrypt(pad(data.encode('utf-8'), AES.block_size))
|
||||
iv = base64.b64encode(cipher.iv).decode('utf-8')
|
||||
ct = base64.b64encode(ct_bytes).decode('utf-8')
|
||||
ct_bytes = cipher.encrypt(pad(data.encode("utf-8"), AES.block_size))
|
||||
iv = base64.b64encode(cipher.iv).decode("utf-8")
|
||||
ct = base64.b64encode(ct_bytes).decode("utf-8")
|
||||
return iv + ct
|
||||
|
||||
|
||||
def decrypt_data(data, encryption_key):
|
||||
key = prepare_key(encryption_key)
|
||||
iv = base64.b64decode(data[:24])
|
||||
ct = base64.b64decode(data[24:])
|
||||
cipher = AES.new(key, AES.MODE_CBC, iv)
|
||||
pt = unpad(cipher.decrypt(ct), AES.block_size)
|
||||
return pt.decode('utf-8')
|
||||
return pt.decode("utf-8")
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
def get_random_bytes(length):
|
||||
# Generate random bytes
|
||||
random_bytes = os.urandom(length)
|
||||
|
||||
|
||||
# Convert bytes to hexadecimal string
|
||||
hex_string = random_bytes.hex()
|
||||
|
||||
|
||||
# Format hexadecimal string with hyphens
|
||||
formatted_hex = '-'.join(hex_string[i:i+2] for i in range(0, len(hex_string), 2))
|
||||
|
||||
formatted_hex = "-".join(
|
||||
hex_string[i : i + 2] for i in range(0, len(hex_string), 2)
|
||||
)
|
||||
|
||||
return formatted_hex
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def generate_deterministic_guid(plugin, primary_id, secondary_id):
|
||||
"""Generates a deterministic GUID based on plugin, primary ID, and secondary ID."""
|
||||
data = f"{plugin}-{primary_id}-{secondary_id}".encode("utf-8")
|
||||
return str(uuid.UUID(hashlib.md5(data).hexdigest()))
|
||||
return str(uuid.UUID(hashlib.md5(data).hexdigest()))
|
||||
|
||||
@@ -1,20 +1,25 @@
|
||||
""" all things database to support NetAlertX """
|
||||
"""all things database to support NetAlertX"""
|
||||
|
||||
import sqlite3
|
||||
|
||||
# Register NetAlertX modules
|
||||
# Register NetAlertX modules
|
||||
from const import fullDbPath, sql_devices_stats, sql_devices_all
|
||||
|
||||
from logger import mylog
|
||||
from db.db_helper import get_table_json, json_obj
|
||||
from workflows.app_events import AppEvent_obj
|
||||
from db.db_upgrade import ensure_column, \
|
||||
ensure_views, ensure_CurrentScan, \
|
||||
ensure_plugins_tables, ensure_Parameters, \
|
||||
ensure_Settings, ensure_Indexes
|
||||
from db.db_upgrade import (
|
||||
ensure_column,
|
||||
ensure_views,
|
||||
ensure_CurrentScan,
|
||||
ensure_plugins_tables,
|
||||
ensure_Parameters,
|
||||
ensure_Settings,
|
||||
ensure_Indexes,
|
||||
)
|
||||
|
||||
|
||||
class DB():
|
||||
class DB:
|
||||
"""
|
||||
DB Class to provide the basic database interactions.
|
||||
Open / Commit / Close / read / write
|
||||
@@ -50,31 +55,30 @@ class DB():
|
||||
"""
|
||||
# Check if DB is open
|
||||
if self.sql_connection is not None:
|
||||
mylog('debug', ['[Database] - open: DB already open'])
|
||||
mylog("debug", ["[Database] - open: DB already open"])
|
||||
return
|
||||
|
||||
mylog('verbose', '[Database] Opening DB')
|
||||
mylog("verbose", "[Database] Opening DB")
|
||||
# Open DB and Cursor
|
||||
try:
|
||||
self.sql_connection = sqlite3.connect(fullDbPath,
|
||||
isolation_level=None)
|
||||
self.sql_connection = sqlite3.connect(fullDbPath, isolation_level=None)
|
||||
|
||||
# The WAL journaling mode uses a write-ahead log instead of a
|
||||
# rollback journal to implement transactions.
|
||||
self.sql_connection.execute('pragma journal_mode=WAL;')
|
||||
self.sql_connection.execute("pragma journal_mode=WAL;")
|
||||
# When synchronous is NORMAL (1), the SQLite database engine will
|
||||
# still sync at the most critical moments,
|
||||
# but less often than in FULL mode.
|
||||
self.sql_connection.execute('PRAGMA synchronous=NORMAL;')
|
||||
self.sql_connection.execute("PRAGMA synchronous=NORMAL;")
|
||||
# When temp_store is MEMORY (2) temporary tables and indices
|
||||
# are kept as if they were in pure in-memory databases.
|
||||
self.sql_connection.execute('PRAGMA temp_store=MEMORY;')
|
||||
self.sql_connection.execute("PRAGMA temp_store=MEMORY;")
|
||||
|
||||
self.sql_connection.text_factory = str
|
||||
self.sql_connection.row_factory = sqlite3.Row
|
||||
self.sql = self.sql_connection.cursor()
|
||||
except sqlite3.Error as e:
|
||||
mylog('minimal', ['[Database] - Open DB Error: ', e])
|
||||
mylog("minimal", ["[Database] - Open DB Error: ", e])
|
||||
|
||||
def commitDB(self):
|
||||
"""
|
||||
@@ -83,7 +87,7 @@ class DB():
|
||||
bool: True if the commit was successful, False if the database connection is not open.
|
||||
"""
|
||||
if self.sql_connection is None:
|
||||
mylog('debug', 'commitDB: database is not open')
|
||||
mylog("debug", "commitDB: database is not open")
|
||||
return False
|
||||
|
||||
# Commit changes to DB
|
||||
@@ -109,7 +113,7 @@ class DB():
|
||||
Returns None if the database connection is not open.
|
||||
"""
|
||||
if self.sql_connection is None:
|
||||
mylog('debug', 'getQueryArray: database is not open')
|
||||
mylog("debug", "getQueryArray: database is not open")
|
||||
return
|
||||
|
||||
self.sql.execute(query)
|
||||
@@ -138,7 +142,7 @@ class DB():
|
||||
|
||||
try:
|
||||
# Start transactional upgrade
|
||||
self.sql_connection.execute('BEGIN IMMEDIATE;')
|
||||
self.sql_connection.execute("BEGIN IMMEDIATE;")
|
||||
|
||||
# Add Devices fields if missing
|
||||
if not ensure_column(self.sql, "Devices", "devFQDN", "TEXT"):
|
||||
@@ -169,14 +173,13 @@ class DB():
|
||||
# commit changes
|
||||
self.commitDB()
|
||||
except Exception as e:
|
||||
mylog('minimal', ['[Database] - initDB ERROR:', e])
|
||||
mylog("minimal", ["[Database] - initDB ERROR:", e])
|
||||
self.rollbackDB() # rollback any changes on error
|
||||
raise # re-raise the exception
|
||||
|
||||
# Init the AppEvent database table
|
||||
AppEvent_obj(self)
|
||||
|
||||
|
||||
# #-------------------------------------------------------------------------------
|
||||
# def get_table_as_json(self, sqlQuery):
|
||||
|
||||
@@ -201,7 +204,7 @@ class DB():
|
||||
def get_table_as_json(self, sqlQuery, parameters=None):
|
||||
"""
|
||||
Wrapper to use the central get_table_as_json helper.
|
||||
|
||||
|
||||
Args:
|
||||
sqlQuery (str): The SQL query to execute.
|
||||
parameters (dict, optional): Named parameters for the SQL query.
|
||||
@@ -209,7 +212,7 @@ class DB():
|
||||
try:
|
||||
result = get_table_json(self.sql, sqlQuery, parameters)
|
||||
except Exception as e:
|
||||
mylog('minimal', ['[Database] - get_table_as_json ERROR:', e])
|
||||
mylog("minimal", ["[Database] - get_table_as_json ERROR:", e])
|
||||
return json_obj({}, []) # return empty object on failure
|
||||
|
||||
# mylog('debug',[ '[Database] - get_table_as_json - returning ', len(rows), " rows with columns: ", columnNames])
|
||||
@@ -217,22 +220,30 @@ class DB():
|
||||
|
||||
return result
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# referece from here: https://codereview.stackexchange.com/questions/241043/interface-class-for-sqlite-databases
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
def read(self, query, *args):
|
||||
"""check the query and arguments are aligned and are read only"""
|
||||
# mylog('debug',[ '[Database] - Read All: SELECT Query: ', query, " params: ", args])
|
||||
try:
|
||||
assert query.count('?') == len(args)
|
||||
assert query.upper().strip().startswith('SELECT')
|
||||
assert query.count("?") == len(args)
|
||||
assert query.upper().strip().startswith("SELECT")
|
||||
self.sql.execute(query, args)
|
||||
rows = self.sql.fetchall()
|
||||
return rows
|
||||
except AssertionError:
|
||||
mylog('minimal', [ '[Database] - ERROR: inconsistent query and/or arguments.', query, " params: ", args])
|
||||
mylog(
|
||||
"minimal",
|
||||
[
|
||||
"[Database] - ERROR: inconsistent query and/or arguments.",
|
||||
query,
|
||||
" params: ",
|
||||
args,
|
||||
],
|
||||
)
|
||||
except sqlite3.Error as e:
|
||||
mylog('minimal', [ '[Database] - SQL ERROR: ', e])
|
||||
mylog("minimal", ["[Database] - SQL ERROR: ", e])
|
||||
return None
|
||||
|
||||
def read_one(self, query, *args):
|
||||
@@ -240,14 +251,22 @@ class DB():
|
||||
call read() with the same arguments but only returns the first row.
|
||||
should only be used when there is a single row result expected
|
||||
"""
|
||||
mylog('debug', ['[Database] - Read One: ', query, " params: ", args])
|
||||
mylog("debug", ["[Database] - Read One: ", query, " params: ", args])
|
||||
rows = self.read(query, *args)
|
||||
if not rows:
|
||||
return None
|
||||
if len(rows) == 1:
|
||||
return rows[0]
|
||||
if len(rows) > 1:
|
||||
mylog('verbose', ['[Database] - Warning!: query returns multiple rows, only first row is passed on!', query, " params: ", args])
|
||||
mylog(
|
||||
"verbose",
|
||||
[
|
||||
"[Database] - Warning!: query returns multiple rows, only first row is passed on!",
|
||||
query,
|
||||
" params: ",
|
||||
args,
|
||||
],
|
||||
)
|
||||
return rows[0]
|
||||
# empty result set
|
||||
return None
|
||||
@@ -298,7 +317,10 @@ def get_array_from_sql_rows(rows):
|
||||
list: A list of lists, where each inner list represents a row of data.
|
||||
"""
|
||||
# Convert result into list of lists
|
||||
return [list(row) if isinstance(row, (sqlite3.Row, tuple, list)) else [row] for row in rows]
|
||||
return [
|
||||
list(row) if isinstance(row, (sqlite3.Row, tuple, list)) else [row]
|
||||
for row in rows
|
||||
]
|
||||
|
||||
|
||||
def get_temp_db_connection():
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
import sys
|
||||
import sqlite3
|
||||
import os
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from helper import if_byte_then_to_str
|
||||
from logger import mylog
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# Return the SQL WHERE clause for filtering devices based on their status.
|
||||
|
||||
|
||||
def get_device_condition_by_status(device_status):
|
||||
"""
|
||||
Return the SQL WHERE clause for filtering devices based on their status.
|
||||
@@ -31,18 +33,18 @@ def get_device_condition_by_status(device_status):
|
||||
Defaults to 'WHERE 1=0' for unrecognized statuses.
|
||||
"""
|
||||
conditions = {
|
||||
'all': 'WHERE devIsArchived=0',
|
||||
'my': 'WHERE devIsArchived=0',
|
||||
'connected': 'WHERE devIsArchived=0 AND devPresentLastScan=1',
|
||||
'favorites': 'WHERE devIsArchived=0 AND devFavorite=1',
|
||||
'new': 'WHERE devIsArchived=0 AND devIsNew=1',
|
||||
'down': 'WHERE devIsArchived=0 AND devAlertDown != 0 AND devPresentLastScan=0',
|
||||
'archived': 'WHERE devIsArchived=1'
|
||||
"all": "WHERE devIsArchived=0",
|
||||
"my": "WHERE devIsArchived=0",
|
||||
"connected": "WHERE devIsArchived=0 AND devPresentLastScan=1",
|
||||
"favorites": "WHERE devIsArchived=0 AND devFavorite=1",
|
||||
"new": "WHERE devIsArchived=0 AND devIsNew=1",
|
||||
"down": "WHERE devIsArchived=0 AND devAlertDown != 0 AND devPresentLastScan=0",
|
||||
"archived": "WHERE devIsArchived=1",
|
||||
}
|
||||
return conditions.get(device_status, 'WHERE 1=0')
|
||||
return conditions.get(device_status, "WHERE 1=0")
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# Creates a JSON-like dictionary from a database row
|
||||
def row_to_json(names, row):
|
||||
"""
|
||||
@@ -57,7 +59,7 @@ def row_to_json(names, row):
|
||||
dict: A dictionary where keys are column names and values are the corresponding
|
||||
row values. Byte values are automatically converted to strings using
|
||||
`if_byte_then_to_str`.
|
||||
|
||||
|
||||
Example:
|
||||
names = ['id', 'name', 'data']
|
||||
row = {0: 1, 1: b'Example', 2: b'\x01\x02'}
|
||||
@@ -72,7 +74,7 @@ def row_to_json(names, row):
|
||||
return rowEntry
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
def sanitize_SQL_input(val):
|
||||
"""
|
||||
Sanitize a value for use in SQL queries by replacing single quotes in strings.
|
||||
@@ -81,19 +83,19 @@ def sanitize_SQL_input(val):
|
||||
val (any): The value to sanitize.
|
||||
|
||||
Returns:
|
||||
str or any:
|
||||
str or any:
|
||||
- Returns an empty string if val is None.
|
||||
- Returns a string with single quotes replaced by underscores if val is a string.
|
||||
- Returns val unchanged if it is any other type.
|
||||
"""
|
||||
if val is None:
|
||||
return ''
|
||||
return ""
|
||||
if isinstance(val, str):
|
||||
return val.replace("'", "_")
|
||||
return val # Return non-string values as they are
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------------------
|
||||
def get_date_from_period(period):
|
||||
"""
|
||||
Convert a period string into an SQLite date expression.
|
||||
@@ -105,10 +107,10 @@ def get_date_from_period(period):
|
||||
str: An SQLite date expression like "date('now', '-7 day')" corresponding to the period.
|
||||
"""
|
||||
days_map = {
|
||||
'7 days': 7,
|
||||
'1 month': 30,
|
||||
'1 year': 365,
|
||||
'100 years': 3650, # actually 10 years in original PHP
|
||||
"7 days": 7,
|
||||
"1 month": 30,
|
||||
"1 year": 365,
|
||||
"100 years": 3650, # actually 10 years in original PHP
|
||||
}
|
||||
|
||||
days = days_map.get(period, 1) # default 1 day
|
||||
@@ -117,7 +119,7 @@ def get_date_from_period(period):
|
||||
return period_sql
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
def print_table_schema(db, table):
|
||||
"""
|
||||
Print the schema of a database table to the log.
|
||||
@@ -134,20 +136,23 @@ def print_table_schema(db, table):
|
||||
result = sql.fetchall()
|
||||
|
||||
if not result:
|
||||
mylog('none', f'[Schema] Table "{table}" not found or has no columns.')
|
||||
mylog("none", f'[Schema] Table "{table}" not found or has no columns.')
|
||||
return
|
||||
|
||||
mylog('debug', f'[Schema] Structure for table: {table}')
|
||||
header = f"{'cid':<4} {'name':<20} {'type':<10} {'notnull':<8} {'default':<10} {'pk':<2}"
|
||||
mylog('debug', header)
|
||||
mylog('debug', '-' * len(header))
|
||||
mylog("debug", f"[Schema] Structure for table: {table}")
|
||||
header = (
|
||||
f"{'cid':<4} {'name':<20} {'type':<10} {'notnull':<8} {'default':<10} {'pk':<2}"
|
||||
)
|
||||
mylog("debug", header)
|
||||
mylog("debug", "-" * len(header))
|
||||
|
||||
for row in result:
|
||||
# row = (cid, name, type, notnull, dflt_value, pk)
|
||||
line = f"{row[0]:<4} {row[1]:<20} {row[2]:<10} {row[3]:<8} {str(row[4]):<10} {row[5]:<2}"
|
||||
mylog('debug', line)
|
||||
mylog("debug", line)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Generate a WHERE condition for SQLite based on a list of values.
|
||||
def list_to_where(logical_operator, column_name, condition_operator, values_list):
|
||||
"""
|
||||
@@ -177,9 +182,10 @@ def list_to_where(logical_operator, column_name, condition_operator, values_list
|
||||
for value in values_list[1:]:
|
||||
condition += f" {logical_operator} {column_name} {condition_operator} '{value}'"
|
||||
|
||||
return f'({condition})'
|
||||
return f"({condition})"
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def get_table_json(sql, sql_query, parameters=None):
|
||||
"""
|
||||
Execute a SQL query and return the results as JSON-like dict.
|
||||
@@ -198,22 +204,23 @@ def get_table_json(sql, sql_query, parameters=None):
|
||||
else:
|
||||
sql.execute(sql_query)
|
||||
rows = sql.fetchall()
|
||||
if (rows):
|
||||
if rows:
|
||||
# We only return data if we actually got some out of SQLite
|
||||
column_names = [col[0] for col in sql.description]
|
||||
data = [row_to_json(column_names, row) for row in rows]
|
||||
return json_obj({"data": data}, column_names)
|
||||
except sqlite3.Error as e:
|
||||
# SQLite error, e.g. malformed query
|
||||
mylog('verbose', ['[Database] - SQL ERROR: ', e])
|
||||
mylog("verbose", ["[Database] - SQL ERROR: ", e])
|
||||
except Exception as e:
|
||||
# Catch-all for other exceptions, e.g. iteration error
|
||||
mylog('verbose', ['[Database] - Unexpected ERROR: ', e])
|
||||
|
||||
mylog("verbose", ["[Database] - Unexpected ERROR: ", e])
|
||||
|
||||
# In case of any error or no data, return empty object
|
||||
return json_obj({"data": []}, [])
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
class json_obj:
|
||||
"""
|
||||
A wrapper class for JSON-style objects returned from database queries.
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
@@ -12,7 +13,7 @@ def ensure_column(sql, table: str, column_name: str, column_type: str) -> bool:
|
||||
"""
|
||||
Ensures a column exists in the specified table. If missing, attempts to add it.
|
||||
Returns True on success, False on failure.
|
||||
|
||||
|
||||
Parameters:
|
||||
- sql: database cursor or connection wrapper (must support execute() and fetchall()).
|
||||
- table: name of the table (e.g., "Devices").
|
||||
@@ -31,14 +32,37 @@ def ensure_column(sql, table: str, column_name: str, column_type: str) -> bool:
|
||||
|
||||
# Define the expected columns (hardcoded base schema) [v25.5.24] - available in teh default app.db
|
||||
expected_columns = [
|
||||
'devMac', 'devName', 'devOwner', 'devType', 'devVendor',
|
||||
'devFavorite', 'devGroup', 'devComments', 'devFirstConnection',
|
||||
'devLastConnection', 'devLastIP', 'devStaticIP', 'devScan',
|
||||
'devLogEvents', 'devAlertEvents', 'devAlertDown', 'devSkipRepeated',
|
||||
'devLastNotification', 'devPresentLastScan', 'devIsNew',
|
||||
'devLocation', 'devIsArchived', 'devParentMAC', 'devParentPort',
|
||||
'devIcon', 'devGUID', 'devSite', 'devSSID', 'devSyncHubNode',
|
||||
'devSourcePlugin', 'devCustomProps'
|
||||
"devMac",
|
||||
"devName",
|
||||
"devOwner",
|
||||
"devType",
|
||||
"devVendor",
|
||||
"devFavorite",
|
||||
"devGroup",
|
||||
"devComments",
|
||||
"devFirstConnection",
|
||||
"devLastConnection",
|
||||
"devLastIP",
|
||||
"devStaticIP",
|
||||
"devScan",
|
||||
"devLogEvents",
|
||||
"devAlertEvents",
|
||||
"devAlertDown",
|
||||
"devSkipRepeated",
|
||||
"devLastNotification",
|
||||
"devPresentLastScan",
|
||||
"devIsNew",
|
||||
"devLocation",
|
||||
"devIsArchived",
|
||||
"devParentMAC",
|
||||
"devParentPort",
|
||||
"devIcon",
|
||||
"devGUID",
|
||||
"devSite",
|
||||
"devSSID",
|
||||
"devSyncHubNode",
|
||||
"devSourcePlugin",
|
||||
"devCustomProps",
|
||||
]
|
||||
|
||||
# Check for mismatches in base schema
|
||||
@@ -46,46 +70,52 @@ def ensure_column(sql, table: str, column_name: str, column_type: str) -> bool:
|
||||
extra = set(actual_columns) - set(expected_columns)
|
||||
|
||||
if missing:
|
||||
msg = (f"[db_upgrade] ⚠ ERROR: Unexpected DB structure "
|
||||
f"(missing: {', '.join(missing) if missing else 'none'}, "
|
||||
f"extra: {', '.join(extra) if extra else 'none'}) - "
|
||||
"aborting schema change to prevent corruption. "
|
||||
"Check https://github.com/jokob-sk/NetAlertX/blob/main/docs/UPDATES.md")
|
||||
mylog('none', [msg])
|
||||
msg = (
|
||||
f"[db_upgrade] ⚠ ERROR: Unexpected DB structure "
|
||||
f"(missing: {', '.join(missing) if missing else 'none'}, "
|
||||
f"extra: {', '.join(extra) if extra else 'none'}) - "
|
||||
"aborting schema change to prevent corruption. "
|
||||
"Check https://github.com/jokob-sk/NetAlertX/blob/main/docs/UPDATES.md"
|
||||
)
|
||||
mylog("none", [msg])
|
||||
write_notification(msg)
|
||||
return False
|
||||
|
||||
if extra:
|
||||
msg = f"[db_upgrade] Extra DB columns detected in {table}: {', '.join(extra)}"
|
||||
mylog('none', [msg])
|
||||
msg = (
|
||||
f"[db_upgrade] Extra DB columns detected in {table}: {', '.join(extra)}"
|
||||
)
|
||||
mylog("none", [msg])
|
||||
|
||||
# Add missing column
|
||||
mylog('verbose', [f"[db_upgrade] Adding '{column_name}' ({column_type}) to {table} table"])
|
||||
mylog(
|
||||
"verbose",
|
||||
[f"[db_upgrade] Adding '{column_name}' ({column_type}) to {table} table"],
|
||||
)
|
||||
sql.execute(f'ALTER TABLE "{table}" ADD "{column_name}" {column_type}')
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
mylog('none', [f"[db_upgrade] ERROR while adding '{column_name}': {e}"])
|
||||
mylog("none", [f"[db_upgrade] ERROR while adding '{column_name}': {e}"])
|
||||
return False
|
||||
|
||||
|
||||
def ensure_views(sql) -> bool:
|
||||
"""
|
||||
Ensures required views exist.
|
||||
|
||||
Parameters:
|
||||
- sql: database cursor or connection wrapper (must support execute() and fetchall()).
|
||||
"""
|
||||
sql.execute(""" DROP VIEW IF EXISTS Events_Devices;""")
|
||||
sql.execute(""" CREATE VIEW Events_Devices AS
|
||||
"""
|
||||
Ensures required views exist.
|
||||
|
||||
Parameters:
|
||||
- sql: database cursor or connection wrapper (must support execute() and fetchall()).
|
||||
"""
|
||||
sql.execute(""" DROP VIEW IF EXISTS Events_Devices;""")
|
||||
sql.execute(""" CREATE VIEW Events_Devices AS
|
||||
SELECT *
|
||||
FROM Events
|
||||
LEFT JOIN Devices ON eve_MAC = devMac;
|
||||
""")
|
||||
|
||||
|
||||
sql.execute(""" DROP VIEW IF EXISTS LatestEventsPerMAC;""")
|
||||
sql.execute("""CREATE VIEW LatestEventsPerMAC AS
|
||||
|
||||
sql.execute(""" DROP VIEW IF EXISTS LatestEventsPerMAC;""")
|
||||
sql.execute("""CREATE VIEW LatestEventsPerMAC AS
|
||||
WITH RankedEvents AS (
|
||||
SELECT
|
||||
e.*,
|
||||
@@ -100,11 +130,13 @@ def ensure_views(sql) -> bool:
|
||||
LEFT JOIN Devices AS d ON e.eve_MAC = d.devMac
|
||||
INNER JOIN CurrentScan AS c ON e.eve_MAC = c.cur_MAC
|
||||
WHERE e.row_num = 1;""")
|
||||
|
||||
sql.execute(""" DROP VIEW IF EXISTS Sessions_Devices;""")
|
||||
sql.execute("""CREATE VIEW Sessions_Devices AS SELECT * FROM Sessions LEFT JOIN "Devices" ON ses_MAC = devMac;""")
|
||||
|
||||
sql.execute(""" CREATE VIEW IF NOT EXISTS LatestEventsPerMAC AS
|
||||
sql.execute(""" DROP VIEW IF EXISTS Sessions_Devices;""")
|
||||
sql.execute(
|
||||
"""CREATE VIEW Sessions_Devices AS SELECT * FROM Sessions LEFT JOIN "Devices" ON ses_MAC = devMac;"""
|
||||
)
|
||||
|
||||
sql.execute(""" CREATE VIEW IF NOT EXISTS LatestEventsPerMAC AS
|
||||
WITH RankedEvents AS (
|
||||
SELECT
|
||||
e.*,
|
||||
@@ -121,9 +153,9 @@ def ensure_views(sql) -> bool:
|
||||
WHERE e.row_num = 1;
|
||||
""")
|
||||
|
||||
# handling the Convert_Events_to_Sessions / Sessions screens
|
||||
sql.execute("""DROP VIEW IF EXISTS Convert_Events_to_Sessions;""")
|
||||
sql.execute("""CREATE VIEW Convert_Events_to_Sessions AS SELECT EVE1.eve_MAC,
|
||||
# handling the Convert_Events_to_Sessions / Sessions screens
|
||||
sql.execute("""DROP VIEW IF EXISTS Convert_Events_to_Sessions;""")
|
||||
sql.execute("""CREATE VIEW Convert_Events_to_Sessions AS SELECT EVE1.eve_MAC,
|
||||
EVE1.eve_IP,
|
||||
EVE1.eve_EventType AS eve_EventTypeConnection,
|
||||
EVE1.eve_DateTime AS eve_DateTimeConnection,
|
||||
@@ -151,7 +183,8 @@ def ensure_views(sql) -> bool:
|
||||
EVE1.eve_PairEventRowID IS NULL;
|
||||
""")
|
||||
|
||||
return True
|
||||
return True
|
||||
|
||||
|
||||
def ensure_Indexes(sql) -> bool:
|
||||
"""
|
||||
@@ -162,30 +195,51 @@ def ensure_Indexes(sql) -> bool:
|
||||
"""
|
||||
indexes = [
|
||||
# Sessions
|
||||
("idx_ses_mac_date",
|
||||
"CREATE INDEX idx_ses_mac_date ON Sessions(ses_MAC, ses_DateTimeConnection, ses_DateTimeDisconnection, ses_StillConnected)"),
|
||||
|
||||
(
|
||||
"idx_ses_mac_date",
|
||||
"CREATE INDEX idx_ses_mac_date ON Sessions(ses_MAC, ses_DateTimeConnection, ses_DateTimeDisconnection, ses_StillConnected)",
|
||||
),
|
||||
# Events
|
||||
("idx_eve_mac_date_type",
|
||||
"CREATE INDEX idx_eve_mac_date_type ON Events(eve_MAC, eve_DateTime, eve_EventType)"),
|
||||
("idx_eve_alert_pending",
|
||||
"CREATE INDEX idx_eve_alert_pending ON Events(eve_PendingAlertEmail)"),
|
||||
("idx_eve_mac_datetime_desc",
|
||||
"CREATE INDEX idx_eve_mac_datetime_desc ON Events(eve_MAC, eve_DateTime DESC)"),
|
||||
("idx_eve_pairevent",
|
||||
"CREATE INDEX idx_eve_pairevent ON Events(eve_PairEventRowID)"),
|
||||
("idx_eve_type_date",
|
||||
"CREATE INDEX idx_eve_type_date ON Events(eve_EventType, eve_DateTime)"),
|
||||
|
||||
(
|
||||
"idx_eve_mac_date_type",
|
||||
"CREATE INDEX idx_eve_mac_date_type ON Events(eve_MAC, eve_DateTime, eve_EventType)",
|
||||
),
|
||||
(
|
||||
"idx_eve_alert_pending",
|
||||
"CREATE INDEX idx_eve_alert_pending ON Events(eve_PendingAlertEmail)",
|
||||
),
|
||||
(
|
||||
"idx_eve_mac_datetime_desc",
|
||||
"CREATE INDEX idx_eve_mac_datetime_desc ON Events(eve_MAC, eve_DateTime DESC)",
|
||||
),
|
||||
(
|
||||
"idx_eve_pairevent",
|
||||
"CREATE INDEX idx_eve_pairevent ON Events(eve_PairEventRowID)",
|
||||
),
|
||||
(
|
||||
"idx_eve_type_date",
|
||||
"CREATE INDEX idx_eve_type_date ON Events(eve_EventType, eve_DateTime)",
|
||||
),
|
||||
# Devices
|
||||
("idx_dev_mac", "CREATE INDEX idx_dev_mac ON Devices(devMac)"),
|
||||
("idx_dev_present", "CREATE INDEX idx_dev_present ON Devices(devPresentLastScan)"),
|
||||
("idx_dev_alertdown", "CREATE INDEX idx_dev_alertdown ON Devices(devAlertDown)"),
|
||||
(
|
||||
"idx_dev_present",
|
||||
"CREATE INDEX idx_dev_present ON Devices(devPresentLastScan)",
|
||||
),
|
||||
(
|
||||
"idx_dev_alertdown",
|
||||
"CREATE INDEX idx_dev_alertdown ON Devices(devAlertDown)",
|
||||
),
|
||||
("idx_dev_isnew", "CREATE INDEX idx_dev_isnew ON Devices(devIsNew)"),
|
||||
("idx_dev_isarchived", "CREATE INDEX idx_dev_isarchived ON Devices(devIsArchived)"),
|
||||
(
|
||||
"idx_dev_isarchived",
|
||||
"CREATE INDEX idx_dev_isarchived ON Devices(devIsArchived)",
|
||||
),
|
||||
("idx_dev_favorite", "CREATE INDEX idx_dev_favorite ON Devices(devFavorite)"),
|
||||
("idx_dev_parentmac", "CREATE INDEX idx_dev_parentmac ON Devices(devParentMAC)"),
|
||||
|
||||
(
|
||||
"idx_dev_parentmac",
|
||||
"CREATE INDEX idx_dev_parentmac ON Devices(devParentMAC)",
|
||||
),
|
||||
# Optional filter indexes
|
||||
("idx_dev_site", "CREATE INDEX idx_dev_site ON Devices(devSite)"),
|
||||
("idx_dev_group", "CREATE INDEX idx_dev_group ON Devices(devGroup)"),
|
||||
@@ -193,12 +247,13 @@ def ensure_Indexes(sql) -> bool:
|
||||
("idx_dev_type", "CREATE INDEX idx_dev_type ON Devices(devType)"),
|
||||
("idx_dev_vendor", "CREATE INDEX idx_dev_vendor ON Devices(devVendor)"),
|
||||
("idx_dev_location", "CREATE INDEX idx_dev_location ON Devices(devLocation)"),
|
||||
|
||||
# Settings
|
||||
("idx_set_key", "CREATE INDEX idx_set_key ON Settings(setKey)"),
|
||||
|
||||
# Plugins_Objects
|
||||
("idx_plugins_plugin_mac_ip", "CREATE INDEX idx_plugins_plugin_mac_ip ON Plugins_Objects(Plugin, Object_PrimaryID, Object_SecondaryID)") # Issue #1251: Optimize name resolution lookup
|
||||
(
|
||||
"idx_plugins_plugin_mac_ip",
|
||||
"CREATE INDEX idx_plugins_plugin_mac_ip ON Plugins_Objects(Plugin, Object_PrimaryID, Object_SecondaryID)",
|
||||
), # Issue #1251: Optimize name resolution lookup
|
||||
]
|
||||
|
||||
for name, create_sql in indexes:
|
||||
@@ -208,19 +263,16 @@ def ensure_Indexes(sql) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def ensure_CurrentScan(sql) -> bool:
|
||||
"""
|
||||
Ensures required CurrentScan table exist.
|
||||
|
||||
Parameters:
|
||||
- sql: database cursor or connection wrapper (must support execute() and fetchall()).
|
||||
"""
|
||||
# 🐛 CurrentScan DEBUG: comment out below when debugging to keep the CurrentScan table after restarts/scan finishes
|
||||
sql.execute("DROP TABLE IF EXISTS CurrentScan;")
|
||||
sql.execute(""" CREATE TABLE IF NOT EXISTS CurrentScan (
|
||||
"""
|
||||
Ensures required CurrentScan table exist.
|
||||
|
||||
Parameters:
|
||||
- sql: database cursor or connection wrapper (must support execute() and fetchall()).
|
||||
"""
|
||||
# 🐛 CurrentScan DEBUG: comment out below when debugging to keep the CurrentScan table after restarts/scan finishes
|
||||
sql.execute("DROP TABLE IF EXISTS CurrentScan;")
|
||||
sql.execute(""" CREATE TABLE IF NOT EXISTS CurrentScan (
|
||||
cur_MAC STRING(50) NOT NULL COLLATE NOCASE,
|
||||
cur_IP STRING(50) NOT NULL COLLATE NOCASE,
|
||||
cur_Vendor STRING(250),
|
||||
@@ -237,42 +289,44 @@ def ensure_CurrentScan(sql) -> bool:
|
||||
);
|
||||
""")
|
||||
|
||||
return True
|
||||
return True
|
||||
|
||||
|
||||
def ensure_Parameters(sql) -> bool:
|
||||
"""
|
||||
Ensures required Parameters table exist.
|
||||
|
||||
Parameters:
|
||||
- sql: database cursor or connection wrapper (must support execute() and fetchall()).
|
||||
"""
|
||||
|
||||
# Re-creating Parameters table
|
||||
mylog('verbose', ["[db_upgrade] Re-creating Parameters table"])
|
||||
sql.execute("DROP TABLE Parameters;")
|
||||
"""
|
||||
Ensures required Parameters table exist.
|
||||
|
||||
sql.execute("""
|
||||
Parameters:
|
||||
- sql: database cursor or connection wrapper (must support execute() and fetchall()).
|
||||
"""
|
||||
|
||||
# Re-creating Parameters table
|
||||
mylog("verbose", ["[db_upgrade] Re-creating Parameters table"])
|
||||
sql.execute("DROP TABLE Parameters;")
|
||||
|
||||
sql.execute("""
|
||||
CREATE TABLE "Parameters" (
|
||||
"par_ID" TEXT PRIMARY KEY,
|
||||
"par_Value" TEXT
|
||||
);
|
||||
""")
|
||||
|
||||
return True
|
||||
""")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def ensure_Settings(sql) -> bool:
|
||||
"""
|
||||
Ensures required Settings table exist.
|
||||
|
||||
Parameters:
|
||||
- sql: database cursor or connection wrapper (must support execute() and fetchall()).
|
||||
"""
|
||||
|
||||
# Re-creating Settings table
|
||||
mylog('verbose', ["[db_upgrade] Re-creating Settings table"])
|
||||
"""
|
||||
Ensures required Settings table exist.
|
||||
|
||||
sql.execute(""" DROP TABLE IF EXISTS Settings;""")
|
||||
sql.execute("""
|
||||
Parameters:
|
||||
- sql: database cursor or connection wrapper (must support execute() and fetchall()).
|
||||
"""
|
||||
|
||||
# Re-creating Settings table
|
||||
mylog("verbose", ["[db_upgrade] Re-creating Settings table"])
|
||||
|
||||
sql.execute(""" DROP TABLE IF EXISTS Settings;""")
|
||||
sql.execute("""
|
||||
CREATE TABLE "Settings" (
|
||||
"setKey" TEXT,
|
||||
"setName" TEXT,
|
||||
@@ -284,21 +338,21 @@ def ensure_Settings(sql) -> bool:
|
||||
"setEvents" TEXT,
|
||||
"setOverriddenByEnv" INTEGER
|
||||
);
|
||||
""")
|
||||
""")
|
||||
|
||||
return True
|
||||
return True
|
||||
|
||||
|
||||
def ensure_plugins_tables(sql) -> bool:
|
||||
"""
|
||||
Ensures required plugins tables exist.
|
||||
|
||||
Parameters:
|
||||
- sql: database cursor or connection wrapper (must support execute() and fetchall()).
|
||||
"""
|
||||
|
||||
# Plugin state
|
||||
sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects(
|
||||
"""
|
||||
Ensures required plugins tables exist.
|
||||
|
||||
Parameters:
|
||||
- sql: database cursor or connection wrapper (must support execute() and fetchall()).
|
||||
"""
|
||||
|
||||
# Plugin state
|
||||
sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects(
|
||||
"Index" INTEGER,
|
||||
Plugin TEXT NOT NULL,
|
||||
Object_PrimaryID TEXT NOT NULL,
|
||||
@@ -321,10 +375,10 @@ def ensure_plugins_tables(sql) -> bool:
|
||||
ObjectGUID TEXT,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
); """
|
||||
sql.execute(sql_Plugins_Objects)
|
||||
sql.execute(sql_Plugins_Objects)
|
||||
|
||||
# Plugin execution results
|
||||
sql_Plugins_Events = """ CREATE TABLE IF NOT EXISTS Plugins_Events(
|
||||
# Plugin execution results
|
||||
sql_Plugins_Events = """ CREATE TABLE IF NOT EXISTS Plugins_Events(
|
||||
"Index" INTEGER,
|
||||
Plugin TEXT NOT NULL,
|
||||
Object_PrimaryID TEXT NOT NULL,
|
||||
@@ -346,10 +400,10 @@ def ensure_plugins_tables(sql) -> bool:
|
||||
"HelpVal4" TEXT,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
); """
|
||||
sql.execute(sql_Plugins_Events)
|
||||
sql.execute(sql_Plugins_Events)
|
||||
|
||||
# Plugin execution history
|
||||
sql_Plugins_History = """ CREATE TABLE IF NOT EXISTS Plugins_History(
|
||||
# Plugin execution history
|
||||
sql_Plugins_History = """ CREATE TABLE IF NOT EXISTS Plugins_History(
|
||||
"Index" INTEGER,
|
||||
Plugin TEXT NOT NULL,
|
||||
Object_PrimaryID TEXT NOT NULL,
|
||||
@@ -371,11 +425,11 @@ def ensure_plugins_tables(sql) -> bool:
|
||||
"HelpVal4" TEXT,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
); """
|
||||
sql.execute(sql_Plugins_History)
|
||||
sql.execute(sql_Plugins_History)
|
||||
|
||||
# Dynamically generated language strings
|
||||
sql.execute("DROP TABLE IF EXISTS Plugins_Language_Strings;")
|
||||
sql.execute(""" CREATE TABLE IF NOT EXISTS Plugins_Language_Strings(
|
||||
# Dynamically generated language strings
|
||||
sql.execute("DROP TABLE IF EXISTS Plugins_Language_Strings;")
|
||||
sql.execute(""" CREATE TABLE IF NOT EXISTS Plugins_Language_Strings(
|
||||
"Index" INTEGER,
|
||||
Language_Code TEXT NOT NULL,
|
||||
String_Key TEXT NOT NULL,
|
||||
@@ -384,4 +438,4 @@ def ensure_plugins_tables(sql) -> bool:
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
); """)
|
||||
|
||||
return True
|
||||
return True
|
||||
|
||||
@@ -11,10 +11,11 @@ License: GNU GPLv3
|
||||
|
||||
import re
|
||||
import sys
|
||||
import os
|
||||
from typing import Dict, List, Tuple, Any, Optional
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = "/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
@@ -28,27 +29,59 @@ class SafeConditionBuilder:
|
||||
|
||||
# Whitelist of allowed column names for filtering
|
||||
ALLOWED_COLUMNS = {
|
||||
'eve_MAC', 'eve_DateTime', 'eve_IP', 'eve_EventType', 'devName',
|
||||
'devComments', 'devLastIP', 'devVendor', 'devAlertEvents',
|
||||
'devAlertDown', 'devIsArchived', 'devPresentLastScan', 'devFavorite',
|
||||
'devIsNew', 'Plugin', 'Object_PrimaryId', 'Object_SecondaryId',
|
||||
'DateTimeChanged', 'Watched_Value1', 'Watched_Value2', 'Watched_Value3',
|
||||
'Watched_Value4', 'Status'
|
||||
"eve_MAC",
|
||||
"eve_DateTime",
|
||||
"eve_IP",
|
||||
"eve_EventType",
|
||||
"devName",
|
||||
"devComments",
|
||||
"devLastIP",
|
||||
"devVendor",
|
||||
"devAlertEvents",
|
||||
"devAlertDown",
|
||||
"devIsArchived",
|
||||
"devPresentLastScan",
|
||||
"devFavorite",
|
||||
"devIsNew",
|
||||
"Plugin",
|
||||
"Object_PrimaryId",
|
||||
"Object_SecondaryId",
|
||||
"DateTimeChanged",
|
||||
"Watched_Value1",
|
||||
"Watched_Value2",
|
||||
"Watched_Value3",
|
||||
"Watched_Value4",
|
||||
"Status",
|
||||
}
|
||||
|
||||
# Whitelist of allowed comparison operators
|
||||
ALLOWED_OPERATORS = {
|
||||
'=', '!=', '<>', '<', '>', '<=', '>=', 'LIKE', 'NOT LIKE',
|
||||
'IN', 'NOT IN', 'IS NULL', 'IS NOT NULL'
|
||||
"=",
|
||||
"!=",
|
||||
"<>",
|
||||
"<",
|
||||
">",
|
||||
"<=",
|
||||
">=",
|
||||
"LIKE",
|
||||
"NOT LIKE",
|
||||
"IN",
|
||||
"NOT IN",
|
||||
"IS NULL",
|
||||
"IS NOT NULL",
|
||||
}
|
||||
|
||||
# Whitelist of allowed logical operators
|
||||
ALLOWED_LOGICAL_OPERATORS = {'AND', 'OR'}
|
||||
ALLOWED_LOGICAL_OPERATORS = {"AND", "OR"}
|
||||
|
||||
# Whitelist of allowed event types
|
||||
ALLOWED_EVENT_TYPES = {
|
||||
'New Device', 'Connected', 'Disconnected', 'Device Down',
|
||||
'Down Reconnected', 'IP Changed'
|
||||
"New Device",
|
||||
"Connected",
|
||||
"Disconnected",
|
||||
"Device Down",
|
||||
"Down Reconnected",
|
||||
"IP Changed",
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
@@ -56,7 +89,7 @@ class SafeConditionBuilder:
|
||||
self.parameters = {}
|
||||
self.param_counter = 0
|
||||
|
||||
def _generate_param_name(self, prefix: str = 'param') -> str:
|
||||
def _generate_param_name(self, prefix: str = "param") -> str:
|
||||
"""Generate a unique parameter name for SQL binding."""
|
||||
self.param_counter += 1
|
||||
return f"{prefix}_{self.param_counter}"
|
||||
@@ -64,32 +97,32 @@ class SafeConditionBuilder:
|
||||
def _sanitize_string(self, value: str) -> str:
|
||||
"""
|
||||
Sanitize string input by removing potentially dangerous characters.
|
||||
|
||||
|
||||
Args:
|
||||
value: String to sanitize
|
||||
|
||||
|
||||
Returns:
|
||||
Sanitized string
|
||||
"""
|
||||
if not isinstance(value, str):
|
||||
return str(value)
|
||||
|
||||
|
||||
# Replace {s-quote} placeholder with single quote (maintaining compatibility)
|
||||
value = value.replace('{s-quote}', "'")
|
||||
|
||||
value = value.replace("{s-quote}", "'")
|
||||
|
||||
# Remove any null bytes, control characters, and excessive whitespace
|
||||
value = re.sub(r'[\x00-\x08\x0b\x0c\x0e-\x1f\x7f-\x84\x86-\x9f]', '', value)
|
||||
value = re.sub(r'\s+', ' ', value.strip())
|
||||
|
||||
value = re.sub(r"[\x00-\x08\x0b\x0c\x0e-\x1f\x7f-\x84\x86-\x9f]", "", value)
|
||||
value = re.sub(r"\s+", " ", value.strip())
|
||||
|
||||
return value
|
||||
|
||||
def _validate_column_name(self, column: str) -> bool:
|
||||
"""
|
||||
Validate that a column name is in the whitelist.
|
||||
|
||||
|
||||
Args:
|
||||
column: Column name to validate
|
||||
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
@@ -98,10 +131,10 @@ class SafeConditionBuilder:
|
||||
def _validate_operator(self, operator: str) -> bool:
|
||||
"""
|
||||
Validate that an operator is in the whitelist.
|
||||
|
||||
|
||||
Args:
|
||||
operator: Operator to validate
|
||||
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
@@ -110,10 +143,10 @@ class SafeConditionBuilder:
|
||||
def _validate_logical_operator(self, logical_op: str) -> bool:
|
||||
"""
|
||||
Validate that a logical operator is in the whitelist.
|
||||
|
||||
|
||||
Args:
|
||||
logical_op: Logical operator to validate
|
||||
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
@@ -124,13 +157,13 @@ class SafeConditionBuilder:
|
||||
Parse and build a safe SQL condition from a user-provided string.
|
||||
This method attempts to parse common condition patterns and convert
|
||||
them to parameterized queries.
|
||||
|
||||
|
||||
Args:
|
||||
condition_string: User-provided condition string
|
||||
|
||||
|
||||
Returns:
|
||||
Tuple of (safe_sql_snippet, parameters_dict)
|
||||
|
||||
|
||||
Raises:
|
||||
ValueError: If the condition contains invalid or unsafe elements
|
||||
"""
|
||||
@@ -139,7 +172,7 @@ class SafeConditionBuilder:
|
||||
|
||||
# Sanitize the input
|
||||
condition_string = self._sanitize_string(condition_string)
|
||||
|
||||
|
||||
# Reset parameters for this condition
|
||||
self.parameters = {}
|
||||
self.param_counter = 0
|
||||
@@ -147,7 +180,7 @@ class SafeConditionBuilder:
|
||||
try:
|
||||
return self._parse_condition(condition_string)
|
||||
except Exception as e:
|
||||
mylog('verbose', f'[SafeConditionBuilder] Error parsing condition: {e}')
|
||||
mylog("verbose", f"[SafeConditionBuilder] Error parsing condition: {e}")
|
||||
raise ValueError(f"Invalid condition format: {condition_string}")
|
||||
|
||||
def _parse_condition(self, condition: str) -> Tuple[str, Dict[str, Any]]:
|
||||
@@ -180,12 +213,16 @@ class SafeConditionBuilder:
|
||||
clause_text = condition
|
||||
|
||||
# Check for leading AND
|
||||
if condition.upper().startswith('AND ') or condition.upper().startswith('AND\t'):
|
||||
logical_op = 'AND'
|
||||
if condition.upper().startswith("AND ") or condition.upper().startswith(
|
||||
"AND\t"
|
||||
):
|
||||
logical_op = "AND"
|
||||
clause_text = condition[3:].strip()
|
||||
# Check for leading OR
|
||||
elif condition.upper().startswith('OR ') or condition.upper().startswith('OR\t'):
|
||||
logical_op = 'OR'
|
||||
elif condition.upper().startswith("OR ") or condition.upper().startswith(
|
||||
"OR\t"
|
||||
):
|
||||
logical_op = "OR"
|
||||
clause_text = condition[2:].strip()
|
||||
|
||||
# Parse the single condition
|
||||
@@ -224,13 +261,13 @@ class SafeConditionBuilder:
|
||||
remaining = condition[i:].upper()
|
||||
|
||||
# Check for AND (must be word boundary)
|
||||
if remaining.startswith('AND ') or remaining.startswith('AND\t'):
|
||||
if remaining.startswith("AND ") or remaining.startswith("AND\t"):
|
||||
logical_op_count += 1
|
||||
i += 3
|
||||
continue
|
||||
|
||||
# Check for OR (must be word boundary)
|
||||
if remaining.startswith('OR ') or remaining.startswith('OR\t'):
|
||||
if remaining.startswith("OR ") or remaining.startswith("OR\t"):
|
||||
logical_op_count += 1
|
||||
i += 2
|
||||
continue
|
||||
@@ -277,7 +314,9 @@ class SafeConditionBuilder:
|
||||
|
||||
return final_sql, all_params
|
||||
|
||||
def _split_by_logical_operators(self, condition: str) -> List[Tuple[str, Optional[str]]]:
|
||||
def _split_by_logical_operators(
|
||||
self, condition: str
|
||||
) -> List[Tuple[str, Optional[str]]]:
|
||||
"""
|
||||
Split a compound condition into individual clauses.
|
||||
|
||||
@@ -311,41 +350,45 @@ class SafeConditionBuilder:
|
||||
remaining = condition[i:].upper()
|
||||
|
||||
# Check if we're at a word boundary (start of string or after whitespace)
|
||||
at_word_boundary = (i == 0 or condition[i-1] in ' \t')
|
||||
at_word_boundary = i == 0 or condition[i - 1] in " \t"
|
||||
|
||||
# Check for AND (must be at word boundary)
|
||||
if at_word_boundary and (remaining.startswith('AND ') or remaining.startswith('AND\t')):
|
||||
if at_word_boundary and (
|
||||
remaining.startswith("AND ") or remaining.startswith("AND\t")
|
||||
):
|
||||
# Save current clause if we have one
|
||||
if current_clause:
|
||||
clause_text = ''.join(current_clause).strip()
|
||||
clause_text = "".join(current_clause).strip()
|
||||
if clause_text:
|
||||
clauses.append((clause_text, current_logical_op))
|
||||
current_clause = []
|
||||
|
||||
# Set the logical operator for the next clause
|
||||
current_logical_op = 'AND'
|
||||
current_logical_op = "AND"
|
||||
i += 3 # Skip 'AND'
|
||||
|
||||
# Skip whitespace after AND
|
||||
while i < len(condition) and condition[i] in ' \t':
|
||||
while i < len(condition) and condition[i] in " \t":
|
||||
i += 1
|
||||
continue
|
||||
|
||||
# Check for OR (must be at word boundary)
|
||||
if at_word_boundary and (remaining.startswith('OR ') or remaining.startswith('OR\t')):
|
||||
if at_word_boundary and (
|
||||
remaining.startswith("OR ") or remaining.startswith("OR\t")
|
||||
):
|
||||
# Save current clause if we have one
|
||||
if current_clause:
|
||||
clause_text = ''.join(current_clause).strip()
|
||||
clause_text = "".join(current_clause).strip()
|
||||
if clause_text:
|
||||
clauses.append((clause_text, current_logical_op))
|
||||
current_clause = []
|
||||
|
||||
# Set the logical operator for the next clause
|
||||
current_logical_op = 'OR'
|
||||
current_logical_op = "OR"
|
||||
i += 2 # Skip 'OR'
|
||||
|
||||
# Skip whitespace after OR
|
||||
while i < len(condition) and condition[i] in ' \t':
|
||||
while i < len(condition) and condition[i] in " \t":
|
||||
i += 1
|
||||
continue
|
||||
|
||||
@@ -355,13 +398,15 @@ class SafeConditionBuilder:
|
||||
|
||||
# Don't forget the last clause
|
||||
if current_clause:
|
||||
clause_text = ''.join(current_clause).strip()
|
||||
clause_text = "".join(current_clause).strip()
|
||||
if clause_text:
|
||||
clauses.append((clause_text, current_logical_op))
|
||||
|
||||
return clauses
|
||||
|
||||
def _parse_single_condition(self, condition: str, logical_op: Optional[str] = None) -> Tuple[str, Dict[str, Any]]:
|
||||
def _parse_single_condition(
|
||||
self, condition: str, logical_op: Optional[str] = None
|
||||
) -> Tuple[str, Dict[str, Any]]:
|
||||
"""
|
||||
Parse a single condition clause into safe SQL with parameters.
|
||||
|
||||
@@ -385,7 +430,7 @@ class SafeConditionBuilder:
|
||||
|
||||
# Simple pattern matching for common conditions
|
||||
# Pattern 1: [AND/OR] column operator value (supporting Unicode in quoted strings)
|
||||
pattern1 = r'^\s*(\w+)\s+(=|!=|<>|<|>|<=|>=|LIKE|NOT\s+LIKE)\s+\'([^\']*)\'\s*$'
|
||||
pattern1 = r"^\s*(\w+)\s+(=|!=|<>|<|>|<=|>=|LIKE|NOT\s+LIKE)\s+\'([^\']*)\'\s*$"
|
||||
match1 = re.match(pattern1, condition, re.IGNORECASE | re.UNICODE)
|
||||
|
||||
if match1:
|
||||
@@ -393,7 +438,7 @@ class SafeConditionBuilder:
|
||||
return self._build_simple_condition(logical_op, column, operator, value)
|
||||
|
||||
# Pattern 2: [AND/OR] column IN ('val1', 'val2', ...)
|
||||
pattern2 = r'^\s*(\w+)\s+(IN|NOT\s+IN)\s+\(([^)]+)\)\s*$'
|
||||
pattern2 = r"^\s*(\w+)\s+(IN|NOT\s+IN)\s+\(([^)]+)\)\s*$"
|
||||
match2 = re.match(pattern2, condition, re.IGNORECASE)
|
||||
|
||||
if match2:
|
||||
@@ -401,7 +446,7 @@ class SafeConditionBuilder:
|
||||
return self._build_in_condition(logical_op, column, operator, values_str)
|
||||
|
||||
# Pattern 3: [AND/OR] column IS NULL/IS NOT NULL
|
||||
pattern3 = r'^\s*(\w+)\s+(IS\s+NULL|IS\s+NOT\s+NULL)\s*$'
|
||||
pattern3 = r"^\s*(\w+)\s+(IS\s+NULL|IS\s+NOT\s+NULL)\s*$"
|
||||
match3 = re.match(pattern3, condition, re.IGNORECASE)
|
||||
|
||||
if match3:
|
||||
@@ -411,16 +456,17 @@ class SafeConditionBuilder:
|
||||
# If no patterns match, reject the condition for security
|
||||
raise ValueError(f"Unsupported condition pattern: {condition}")
|
||||
|
||||
def _build_simple_condition(self, logical_op: Optional[str], column: str,
|
||||
operator: str, value: str) -> Tuple[str, Dict[str, Any]]:
|
||||
def _build_simple_condition(
|
||||
self, logical_op: Optional[str], column: str, operator: str, value: str
|
||||
) -> Tuple[str, Dict[str, Any]]:
|
||||
"""Build a simple condition with parameter binding."""
|
||||
# Validate components
|
||||
if not self._validate_column_name(column):
|
||||
raise ValueError(f"Invalid column name: {column}")
|
||||
|
||||
|
||||
if not self._validate_operator(operator):
|
||||
raise ValueError(f"Invalid operator: {operator}")
|
||||
|
||||
|
||||
if logical_op and not self._validate_logical_operator(logical_op):
|
||||
raise ValueError(f"Invalid logical operator: {logical_op}")
|
||||
|
||||
@@ -432,18 +478,19 @@ class SafeConditionBuilder:
|
||||
sql_parts = []
|
||||
if logical_op:
|
||||
sql_parts.append(logical_op.upper())
|
||||
|
||||
|
||||
sql_parts.extend([column, operator.upper(), f":{param_name}"])
|
||||
|
||||
|
||||
return " ".join(sql_parts), self.parameters
|
||||
|
||||
def _build_in_condition(self, logical_op: Optional[str], column: str,
|
||||
operator: str, values_str: str) -> Tuple[str, Dict[str, Any]]:
|
||||
def _build_in_condition(
|
||||
self, logical_op: Optional[str], column: str, operator: str, values_str: str
|
||||
) -> Tuple[str, Dict[str, Any]]:
|
||||
"""Build an IN condition with parameter binding."""
|
||||
# Validate components
|
||||
if not self._validate_column_name(column):
|
||||
raise ValueError(f"Invalid column name: {column}")
|
||||
|
||||
|
||||
if logical_op and not self._validate_logical_operator(logical_op):
|
||||
raise ValueError(f"Invalid logical operator: {logical_op}")
|
||||
|
||||
@@ -452,7 +499,7 @@ class SafeConditionBuilder:
|
||||
# Simple regex to extract quoted values
|
||||
value_pattern = r"'([^']*)'"
|
||||
matches = re.findall(value_pattern, values_str)
|
||||
|
||||
|
||||
if not matches:
|
||||
raise ValueError("No valid values found in IN clause")
|
||||
|
||||
@@ -467,18 +514,19 @@ class SafeConditionBuilder:
|
||||
sql_parts = []
|
||||
if logical_op:
|
||||
sql_parts.append(logical_op.upper())
|
||||
|
||||
|
||||
sql_parts.extend([column, operator.upper(), f"({', '.join(param_names)})"])
|
||||
|
||||
|
||||
return " ".join(sql_parts), self.parameters
|
||||
|
||||
def _build_null_condition(self, logical_op: Optional[str], column: str,
|
||||
operator: str) -> Tuple[str, Dict[str, Any]]:
|
||||
def _build_null_condition(
|
||||
self, logical_op: Optional[str], column: str, operator: str
|
||||
) -> Tuple[str, Dict[str, Any]]:
|
||||
"""Build a NULL check condition."""
|
||||
# Validate components
|
||||
if not self._validate_column_name(column):
|
||||
raise ValueError(f"Invalid column name: {column}")
|
||||
|
||||
|
||||
if logical_op and not self._validate_logical_operator(logical_op):
|
||||
raise ValueError(f"Invalid logical operator: {logical_op}")
|
||||
|
||||
@@ -486,18 +534,18 @@ class SafeConditionBuilder:
|
||||
sql_parts = []
|
||||
if logical_op:
|
||||
sql_parts.append(logical_op.upper())
|
||||
|
||||
|
||||
sql_parts.extend([column, operator.upper()])
|
||||
|
||||
|
||||
return " ".join(sql_parts), {}
|
||||
|
||||
def build_device_name_filter(self, device_name: str) -> Tuple[str, Dict[str, Any]]:
|
||||
"""
|
||||
Build a safe device name filter condition.
|
||||
|
||||
|
||||
Args:
|
||||
device_name: Device name to filter for
|
||||
|
||||
|
||||
Returns:
|
||||
Tuple of (safe_sql_snippet, parameters_dict)
|
||||
"""
|
||||
@@ -505,74 +553,86 @@ class SafeConditionBuilder:
|
||||
return "", {}
|
||||
|
||||
device_name = self._sanitize_string(device_name)
|
||||
param_name = self._generate_param_name('device_name')
|
||||
param_name = self._generate_param_name("device_name")
|
||||
self.parameters[param_name] = device_name
|
||||
|
||||
return f"AND devName = :{param_name}", self.parameters
|
||||
|
||||
def build_condition(self, conditions: List[Dict[str, str]], logical_operator: str = "AND") -> Tuple[str, Dict[str, Any]]:
|
||||
def build_condition(
|
||||
self, conditions: List[Dict[str, str]], logical_operator: str = "AND"
|
||||
) -> Tuple[str, Dict[str, Any]]:
|
||||
"""
|
||||
Build a safe SQL condition from a list of condition dictionaries.
|
||||
|
||||
|
||||
Args:
|
||||
conditions: List of condition dicts with 'column', 'operator', 'value' keys
|
||||
logical_operator: Logical operator to join conditions (AND/OR)
|
||||
|
||||
|
||||
Returns:
|
||||
Tuple of (safe_sql_snippet, parameters_dict)
|
||||
"""
|
||||
if not conditions:
|
||||
return "", {}
|
||||
|
||||
|
||||
if not self._validate_logical_operator(logical_operator):
|
||||
return "", {}
|
||||
|
||||
|
||||
condition_parts = []
|
||||
all_params = {}
|
||||
|
||||
|
||||
for condition_dict in conditions:
|
||||
try:
|
||||
column = condition_dict.get('column', '')
|
||||
operator = condition_dict.get('operator', '')
|
||||
value = condition_dict.get('value', '')
|
||||
|
||||
column = condition_dict.get("column", "")
|
||||
operator = condition_dict.get("operator", "")
|
||||
value = condition_dict.get("value", "")
|
||||
|
||||
# Validate each component
|
||||
if not self._validate_column_name(column):
|
||||
mylog('verbose', [f'[SafeConditionBuilder] Invalid column: {column}'])
|
||||
mylog(
|
||||
"verbose", [f"[SafeConditionBuilder] Invalid column: {column}"]
|
||||
)
|
||||
return "", {}
|
||||
|
||||
|
||||
if not self._validate_operator(operator):
|
||||
mylog('verbose', [f'[SafeConditionBuilder] Invalid operator: {operator}'])
|
||||
mylog(
|
||||
"verbose",
|
||||
[f"[SafeConditionBuilder] Invalid operator: {operator}"],
|
||||
)
|
||||
return "", {}
|
||||
|
||||
|
||||
# Create parameter binding
|
||||
param_name = self._generate_param_name()
|
||||
all_params[param_name] = self._sanitize_string(str(value))
|
||||
|
||||
|
||||
# Build condition part
|
||||
condition_part = f"{column} {operator} :{param_name}"
|
||||
condition_parts.append(condition_part)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
mylog('verbose', [f'[SafeConditionBuilder] Error processing condition: {e}'])
|
||||
mylog(
|
||||
"verbose",
|
||||
[f"[SafeConditionBuilder] Error processing condition: {e}"],
|
||||
)
|
||||
return "", {}
|
||||
|
||||
|
||||
if not condition_parts:
|
||||
return "", {}
|
||||
|
||||
|
||||
# Join all parts with the logical operator
|
||||
final_condition = f" {logical_operator} ".join(condition_parts)
|
||||
self.parameters.update(all_params)
|
||||
|
||||
|
||||
return final_condition, self.parameters
|
||||
|
||||
def build_event_type_filter(self, event_types: List[str]) -> Tuple[str, Dict[str, Any]]:
|
||||
def build_event_type_filter(
|
||||
self, event_types: List[str]
|
||||
) -> Tuple[str, Dict[str, Any]]:
|
||||
"""
|
||||
Build a safe event type filter condition.
|
||||
|
||||
|
||||
Args:
|
||||
event_types: List of event types to filter for
|
||||
|
||||
|
||||
Returns:
|
||||
Tuple of (safe_sql_snippet, parameters_dict)
|
||||
"""
|
||||
@@ -586,7 +646,10 @@ class SafeConditionBuilder:
|
||||
if event_type in self.ALLOWED_EVENT_TYPES:
|
||||
valid_types.append(event_type)
|
||||
else:
|
||||
mylog('verbose', f'[SafeConditionBuilder] Invalid event type filtered out: {event_type}')
|
||||
mylog(
|
||||
"verbose",
|
||||
f"[SafeConditionBuilder] Invalid event type filtered out: {event_type}",
|
||||
)
|
||||
|
||||
if not valid_types:
|
||||
return "", {}
|
||||
@@ -594,21 +657,23 @@ class SafeConditionBuilder:
|
||||
# Generate parameters for each valid event type
|
||||
param_names = []
|
||||
for event_type in valid_types:
|
||||
param_name = self._generate_param_name('event_type')
|
||||
param_name = self._generate_param_name("event_type")
|
||||
self.parameters[param_name] = event_type
|
||||
param_names.append(f":{param_name}")
|
||||
|
||||
sql_snippet = f"AND eve_EventType IN ({', '.join(param_names)})"
|
||||
return sql_snippet, self.parameters
|
||||
|
||||
def get_safe_condition_legacy(self, condition_setting: str) -> Tuple[str, Dict[str, Any]]:
|
||||
def get_safe_condition_legacy(
|
||||
self, condition_setting: str
|
||||
) -> Tuple[str, Dict[str, Any]]:
|
||||
"""
|
||||
Convert legacy condition settings to safe parameterized queries.
|
||||
This method provides backward compatibility for existing condition formats.
|
||||
|
||||
|
||||
Args:
|
||||
condition_setting: The condition string from settings
|
||||
|
||||
|
||||
Returns:
|
||||
Tuple of (safe_sql_snippet, parameters_dict)
|
||||
"""
|
||||
@@ -619,15 +684,18 @@ class SafeConditionBuilder:
|
||||
return self.build_safe_condition(condition_setting)
|
||||
except ValueError as e:
|
||||
# Log the error and return empty condition for safety
|
||||
mylog('verbose', f'[SafeConditionBuilder] Unsafe condition rejected: {condition_setting}, Error: {e}')
|
||||
mylog(
|
||||
"verbose",
|
||||
f"[SafeConditionBuilder] Unsafe condition rejected: {condition_setting}, Error: {e}",
|
||||
)
|
||||
return "", {}
|
||||
|
||||
|
||||
def create_safe_condition_builder() -> SafeConditionBuilder:
|
||||
"""
|
||||
Factory function to create a new SafeConditionBuilder instance.
|
||||
|
||||
|
||||
Returns:
|
||||
New SafeConditionBuilder instance
|
||||
"""
|
||||
return SafeConditionBuilder()
|
||||
return SafeConditionBuilder()
|
||||
|
||||
561
server/helper.py
561
server/helper.py
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,4 @@
|
||||
|
||||
import os
|
||||
import time
|
||||
from pytz import timezone, all_timezones, UnknownTimeZoneError
|
||||
from cron_converter import Cron
|
||||
from pathlib import Path
|
||||
@@ -10,9 +8,16 @@ import shutil
|
||||
import re
|
||||
|
||||
# Register NetAlertX libraries
|
||||
import conf
|
||||
from const import fullConfPath, applicationPath, fullConfFolder, default_tz
|
||||
from helper import getBuildTimeStamp, fixPermissions, collect_lang_strings, updateSubnets, isJsonObject, setting_value_to_python_type, timeNowTZ, get_setting_value, generate_random_string
|
||||
import conf
|
||||
from const import fullConfPath, fullConfFolder, default_tz
|
||||
from helper import (
|
||||
getBuildTimeStamp,
|
||||
fixPermissions,
|
||||
collect_lang_strings,
|
||||
updateSubnets,
|
||||
timeNowTZ,
|
||||
generate_random_string,
|
||||
)
|
||||
from app_state import updateState
|
||||
from logger import mylog
|
||||
from api import update_api
|
||||
@@ -20,19 +25,34 @@ from scheduler import schedule_class
|
||||
from plugin import plugin_manager, print_plugin_info
|
||||
from plugin_utils import get_plugins_configs, get_set_value_for_init
|
||||
from messaging.in_app import write_notification
|
||||
from crypto_utils import get_random_bytes
|
||||
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# Initialise user defined values
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# Import user values
|
||||
# Check config dictionary
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# managing application settings, ensuring SQL safety for user input, and updating internal configuration lists
|
||||
def ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False, overriddenByEnv=0, all_plugins=[]):
|
||||
def ccd(
|
||||
key,
|
||||
default,
|
||||
config_dir,
|
||||
name,
|
||||
inputtype,
|
||||
options,
|
||||
group,
|
||||
events=None,
|
||||
desc="",
|
||||
setJsonMetadata=None,
|
||||
overrideTemplate=None,
|
||||
forceDefault=False,
|
||||
overriddenByEnv=0,
|
||||
all_plugins=[],
|
||||
):
|
||||
if events is None:
|
||||
events = []
|
||||
if setJsonMetadata is None:
|
||||
@@ -40,7 +60,7 @@ def ccd(key, default, config_dir, name, inputtype, options, group, events=None,
|
||||
if overrideTemplate is None:
|
||||
overrideTemplate = {}
|
||||
|
||||
# Use default initialization value
|
||||
# Use default initialization value
|
||||
result = default
|
||||
|
||||
# Use existing value if already supplied, otherwise default value is used
|
||||
@@ -48,8 +68,8 @@ def ccd(key, default, config_dir, name, inputtype, options, group, events=None,
|
||||
result = config_dir[key]
|
||||
|
||||
# Single quotes might break SQL queries, replacing them
|
||||
if inputtype == 'text':
|
||||
result = result.replace('\'', "{s-quote}")
|
||||
if inputtype == "text":
|
||||
result = result.replace("'", "{s-quote}")
|
||||
|
||||
# Add to config_dir and update plugin value if overridden by environment
|
||||
if overriddenByEnv == 1:
|
||||
@@ -59,30 +79,67 @@ def ccd(key, default, config_dir, name, inputtype, options, group, events=None,
|
||||
|
||||
for set in plugin["settings"]:
|
||||
setFunction = set["function"]
|
||||
# Setting code name / key
|
||||
plugKey = pref + "_" + setFunction
|
||||
# Setting code name / key
|
||||
plugKey = pref + "_" + setFunction
|
||||
|
||||
if plugKey == key:
|
||||
set["value"] = result
|
||||
set["value"] = result
|
||||
|
||||
# prepare SQL for DB update
|
||||
# prepare SQL for DB update
|
||||
# Create the tuples
|
||||
sql_safe_tuple = (key, name, desc, str(inputtype), options, str(result), group, str(events), overriddenByEnv)
|
||||
settings_tuple = (key, name, desc, inputtype, options, result, group, str(events), overriddenByEnv)
|
||||
sql_safe_tuple = (
|
||||
key,
|
||||
name,
|
||||
desc,
|
||||
str(inputtype),
|
||||
options,
|
||||
str(result),
|
||||
group,
|
||||
str(events),
|
||||
overriddenByEnv,
|
||||
)
|
||||
settings_tuple = (
|
||||
key,
|
||||
name,
|
||||
desc,
|
||||
inputtype,
|
||||
options,
|
||||
result,
|
||||
group,
|
||||
str(events),
|
||||
overriddenByEnv,
|
||||
)
|
||||
|
||||
# Update or append the tuples in the lists
|
||||
conf.mySettingsSQLsafe = update_or_append(conf.mySettingsSQLsafe, sql_safe_tuple, key)
|
||||
conf.mySettingsSQLsafe = update_or_append(
|
||||
conf.mySettingsSQLsafe, sql_safe_tuple, key
|
||||
)
|
||||
conf.mySettings = update_or_append(conf.mySettings, settings_tuple, key)
|
||||
|
||||
# Save metadata in dummy setting if not a metadata key
|
||||
if '__metadata' not in key:
|
||||
metadata_tuple = (f'{key}__metadata', "metadata name", "metadata desc", '{"dataType":"json", "elements": [{"elementType" : "textarea", "elementOptions" : [{"readonly": "true"}] ,"transformers": []}]}', '[]', json.dumps(setJsonMetadata), group, '[]', overriddenByEnv)
|
||||
conf.mySettingsSQLsafe = update_or_append(conf.mySettingsSQLsafe, metadata_tuple, f'{key}__metadata')
|
||||
conf.mySettings = update_or_append(conf.mySettings, metadata_tuple, f'{key}__metadata')
|
||||
if "__metadata" not in key:
|
||||
metadata_tuple = (
|
||||
f"{key}__metadata",
|
||||
"metadata name",
|
||||
"metadata desc",
|
||||
'{"dataType":"json", "elements": [{"elementType" : "textarea", "elementOptions" : [{"readonly": "true"}] ,"transformers": []}]}',
|
||||
"[]",
|
||||
json.dumps(setJsonMetadata),
|
||||
group,
|
||||
"[]",
|
||||
overriddenByEnv,
|
||||
)
|
||||
conf.mySettingsSQLsafe = update_or_append(
|
||||
conf.mySettingsSQLsafe, metadata_tuple, f"{key}__metadata"
|
||||
)
|
||||
conf.mySettings = update_or_append(
|
||||
conf.mySettings, metadata_tuple, f"{key}__metadata"
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Function to find and update the existing key in the list
|
||||
def update_or_append(settings_list, item_tuple, key):
|
||||
if settings_list is None:
|
||||
@@ -90,30 +147,31 @@ def update_or_append(settings_list, item_tuple, key):
|
||||
|
||||
for index, item in enumerate(settings_list):
|
||||
if item[0] == key:
|
||||
mylog('trace', ['[Import Config] OLD TUPLE : ', item])
|
||||
mylog("trace", ["[Import Config] OLD TUPLE : ", item])
|
||||
# Keep values marked as "_KEEP_" in existing entries
|
||||
updated_tuple = tuple(
|
||||
new_val if new_val != "_KEEP_" else old_val
|
||||
for old_val, new_val in zip(item, item_tuple)
|
||||
)
|
||||
mylog('trace', ['[Import Config] NEW TUPLE : ', updated_tuple])
|
||||
mylog("trace", ["[Import Config] NEW TUPLE : ", updated_tuple])
|
||||
settings_list[index] = updated_tuple
|
||||
mylog('trace', ['[Import Config] FOUND key : ', key])
|
||||
return settings_list
|
||||
mylog("trace", ["[Import Config] FOUND key : ", key])
|
||||
return settings_list
|
||||
|
||||
# Append the item only if no values are "_KEEP_"
|
||||
if "_KEEP_" not in item_tuple:
|
||||
settings_list.append(item_tuple)
|
||||
mylog('trace', ['[Import Config] ADDED key : ', key])
|
||||
mylog("trace", ["[Import Config] ADDED key : ", key])
|
||||
else:
|
||||
mylog('none', ['[Import Config] Skipped saving _KEEP_ for key : ', key])
|
||||
|
||||
mylog("none", ["[Import Config] Skipped saving _KEEP_ for key : ", key])
|
||||
|
||||
return settings_list
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def importConfigs (pm, db, all_plugins):
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
|
||||
def importConfigs(pm, db, all_plugins):
|
||||
sql = db.sql
|
||||
|
||||
# get config file name
|
||||
@@ -127,57 +185,216 @@ def importConfigs (pm, db, all_plugins):
|
||||
|
||||
fileModifiedTime = os.path.getmtime(config_file)
|
||||
|
||||
mylog('debug', ['[Import Config] checking config file '])
|
||||
mylog('debug', ['[Import Config] lastImportedConfFile :', conf.lastImportedConfFile])
|
||||
mylog('debug', ['[Import Config] fileModifiedTime :', fileModifiedTime])
|
||||
|
||||
mylog("debug", ["[Import Config] checking config file "])
|
||||
mylog(
|
||||
"debug",
|
||||
["[Import Config] lastImportedConfFile :", conf.lastImportedConfFile],
|
||||
)
|
||||
mylog("debug", ["[Import Config] fileModifiedTime :", fileModifiedTime])
|
||||
|
||||
if (fileModifiedTime == conf.lastImportedConfFile) and all_plugins is not None:
|
||||
mylog('debug', ['[Import Config] skipping config file import'])
|
||||
mylog("debug", ["[Import Config] skipping config file import"])
|
||||
return pm, all_plugins, False
|
||||
|
||||
# Header
|
||||
updateState("Import config", showSpinner = True)
|
||||
updateState("Import config", showSpinner=True)
|
||||
|
||||
# remove all plugin language strings
|
||||
sql.execute("DELETE FROM Plugins_Language_Strings;")
|
||||
db.commitDB()
|
||||
|
||||
mylog('debug', ['[Import Config] importing config file'])
|
||||
conf.mySettings = [] # reset settings
|
||||
conf.mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query
|
||||
|
||||
mylog("debug", ["[Import Config] importing config file"])
|
||||
conf.mySettings = [] # reset settings
|
||||
conf.mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query
|
||||
|
||||
# User values loaded from now
|
||||
c_d = read_config_file(config_file)
|
||||
|
||||
|
||||
# Import setting if found in the dictionary
|
||||
|
||||
# General
|
||||
|
||||
# General
|
||||
# ----------------------------------------
|
||||
# ccd(key, default, config_dir, name, inputtype, options, group, events=[], desc = "", regex = "", setJsonMetadata = {}, overrideTemplate = {})
|
||||
|
||||
conf.LOADED_PLUGINS = ccd('LOADED_PLUGINS', [] , c_d, 'Loaded plugins', '{"dataType":"array","elements":[{"elementType":"select","elementHasInputValue":1,"elementOptions":[{"multiple":"true","ordeable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"selectChange(this)"},{"getStringKey":"Gen_Change"}],"transformers":[]}]}', '[]', 'General')
|
||||
conf.DISCOVER_PLUGINS = ccd('DISCOVER_PLUGINS', True , c_d, 'Discover plugins', """{"dataType": "boolean","elements": [{"elementType": "input","elementOptions": [{ "type": "checkbox" }],"transformers": []}]}""", '[]', 'General')
|
||||
conf.SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', '''{"dataType": "array","elements": [{"elementType": "input","elementOptions": [{"placeholder": "192.168.1.0/24 --interface=eth1"},{"suffix": "_in"},{"cssClasses": "col-sm-10"},{"prefillValue": "null"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": ["_in"]},{"separator": ""},{"cssClasses": "col-xs-12"},{"onClick": "addList(this, false)"},{"getStringKey": "Gen_Add"}],"transformers": []},{"elementType": "select","elementHasInputValue": 1,"elementOptions": [{"multiple": "true"},{"readonly": "true"},{"editable": "true"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeAllOptions(this)"},{"getStringKey": "Gen_Remove_All"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeFromList(this)"},{"getStringKey": "Gen_Remove_Last"}],"transformers": []}]}''', '[]', 'General')
|
||||
conf.LOG_LEVEL = ccd('LOG_LEVEL', 'verbose' , c_d, 'Log verboseness', '{"dataType":"string", "elements": [{"elementType" : "select", "elementOptions" : [] ,"transformers": []}]}', "['none', 'minimal', 'verbose', 'debug', 'trace']", 'General')
|
||||
conf.TIMEZONE = ccd('TIMEZONE', default_tz , c_d, 'Time zone', '{"dataType":"string", "elements": [{"elementType" : "input", "elementOptions" : [] ,"transformers": []}]}', '[]', 'General')
|
||||
conf.PLUGINS_KEEP_HIST = ccd('PLUGINS_KEEP_HIST', 250 , c_d, 'Keep history entries', '{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}', '[]', 'General')
|
||||
conf.REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'update_REPORT_DASHBOARD_URL_setting' , c_d, 'NetAlertX URL', '{"dataType":"string", "elements": [{"elementType" : "input", "elementOptions" : [] ,"transformers": []}]}', '[]', 'General')
|
||||
conf.DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', '{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}', '[]', 'General')
|
||||
conf.HRS_TO_KEEP_NEWDEV = ccd('HRS_TO_KEEP_NEWDEV', 0 , c_d, 'Keep new devices for', '{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}', "[]", 'General')
|
||||
conf.HRS_TO_KEEP_OFFDEV = ccd('HRS_TO_KEEP_OFFDEV', 0 , c_d, 'Keep offline devices for', '{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}', "[]", 'General')
|
||||
conf.CLEAR_NEW_FLAG = ccd('CLEAR_NEW_FLAG', 0 , c_d, 'Clear new flag', '{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}', "[]", 'General')
|
||||
conf.REFRESH_FQDN = ccd('REFRESH_FQDN', False , c_d, 'Refresh FQDN', """{"dataType": "boolean","elements": [{"elementType": "input","elementOptions": [{ "type": "checkbox" }],"transformers": []}]}""", '[]', 'General')
|
||||
conf.API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE devPresentLastScan = 0' , c_d, 'Custom endpoint', '{"dataType":"string", "elements": [{"elementType" : "input", "elementOptions" : [] ,"transformers": []}]}', '[]', 'General')
|
||||
conf.VERSION = ccd('VERSION', '' , c_d, 'Version', '{"dataType":"string", "elements": [{"elementType" : "input", "elementOptions" : [{ "readonly": "true" }] ,"transformers": []}]}', '', 'General')
|
||||
conf.NETWORK_DEVICE_TYPES = ccd('NETWORK_DEVICE_TYPES', ['AP', 'Access Point', 'Gateway', 'Firewall', 'Hypervisor', 'Powerline', 'Switch', 'WLAN', 'PLC', 'Router','USB LAN Adapter', 'USB WIFI Adapter', 'Internet'] , c_d, 'Network device types', '{"dataType":"array","elements":[{"elementType":"input","elementOptions":[{"placeholder":"Enter value"},{"suffix":"_in"},{"cssClasses":"col-sm-10"},{"prefillValue":"null"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":["_in"]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"addList(this,false)"},{"getStringKey":"Gen_Add"}],"transformers":[]},{"elementType":"select", "elementHasInputValue":1,"elementOptions":[{"multiple":"true"},{"readonly":"true"},{"editable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeAllOptions(this)"},{"getStringKey":"Gen_Remove_All"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeFromList(this)"},{"getStringKey":"Gen_Remove_Last"}],"transformers":[]}]}', '[]', 'General')
|
||||
conf.GRAPHQL_PORT = ccd('GRAPHQL_PORT', 20212 , c_d, 'GraphQL port', '{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}', '[]', 'General')
|
||||
conf.API_TOKEN = ccd('API_TOKEN', 't_' + generate_random_string(20) , c_d, 'API token', '{"dataType": "string","elements": [{"elementType": "input","elementHasInputValue": 1,"elementOptions": [{ "cssClasses": "col-xs-12" }],"transformers": []},{"elementType": "button","elementOptions": [{ "getStringKey": "Gen_Generate" },{ "customParams": "API_TOKEN" },{ "onClick": "generateApiToken(this, 20)" },{ "cssClasses": "col-xs-12" }],"transformers": []}]}', '[]', 'General')
|
||||
|
||||
conf.LOADED_PLUGINS = ccd(
|
||||
"LOADED_PLUGINS",
|
||||
[],
|
||||
c_d,
|
||||
"Loaded plugins",
|
||||
'{"dataType":"array","elements":[{"elementType":"select","elementHasInputValue":1,"elementOptions":[{"multiple":"true","ordeable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"selectChange(this)"},{"getStringKey":"Gen_Change"}],"transformers":[]}]}',
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
conf.DISCOVER_PLUGINS = ccd(
|
||||
"DISCOVER_PLUGINS",
|
||||
True,
|
||||
c_d,
|
||||
"Discover plugins",
|
||||
"""{"dataType": "boolean","elements": [{"elementType": "input","elementOptions": [{ "type": "checkbox" }],"transformers": []}]}""",
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
conf.SCAN_SUBNETS = ccd(
|
||||
"SCAN_SUBNETS",
|
||||
["192.168.1.0/24 --interface=eth1", "192.168.1.0/24 --interface=eth0"],
|
||||
c_d,
|
||||
"Subnets to scan",
|
||||
"""{"dataType": "array","elements": [{"elementType": "input","elementOptions": [{"placeholder": "192.168.1.0/24 --interface=eth1"},{"suffix": "_in"},{"cssClasses": "col-sm-10"},{"prefillValue": "null"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": ["_in"]},{"separator": ""},{"cssClasses": "col-xs-12"},{"onClick": "addList(this, false)"},{"getStringKey": "Gen_Add"}],"transformers": []},{"elementType": "select","elementHasInputValue": 1,"elementOptions": [{"multiple": "true"},{"readonly": "true"},{"editable": "true"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeAllOptions(this)"},{"getStringKey": "Gen_Remove_All"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeFromList(this)"},{"getStringKey": "Gen_Remove_Last"}],"transformers": []}]}""",
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
conf.LOG_LEVEL = ccd(
|
||||
"LOG_LEVEL",
|
||||
"verbose",
|
||||
c_d,
|
||||
"Log verboseness",
|
||||
'{"dataType":"string", "elements": [{"elementType" : "select", "elementOptions" : [] ,"transformers": []}]}',
|
||||
"['none', 'minimal', 'verbose', 'debug', 'trace']",
|
||||
"General",
|
||||
)
|
||||
conf.TIMEZONE = ccd(
|
||||
"TIMEZONE",
|
||||
default_tz,
|
||||
c_d,
|
||||
"Time zone",
|
||||
'{"dataType":"string", "elements": [{"elementType" : "input", "elementOptions" : [] ,"transformers": []}]}',
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
conf.PLUGINS_KEEP_HIST = ccd(
|
||||
"PLUGINS_KEEP_HIST",
|
||||
250,
|
||||
c_d,
|
||||
"Keep history entries",
|
||||
'{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}',
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
conf.REPORT_DASHBOARD_URL = ccd(
|
||||
"REPORT_DASHBOARD_URL",
|
||||
"update_REPORT_DASHBOARD_URL_setting",
|
||||
c_d,
|
||||
"NetAlertX URL",
|
||||
'{"dataType":"string", "elements": [{"elementType" : "input", "elementOptions" : [] ,"transformers": []}]}',
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
conf.DAYS_TO_KEEP_EVENTS = ccd(
|
||||
"DAYS_TO_KEEP_EVENTS",
|
||||
90,
|
||||
c_d,
|
||||
"Delete events days",
|
||||
'{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}',
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
conf.HRS_TO_KEEP_NEWDEV = ccd(
|
||||
"HRS_TO_KEEP_NEWDEV",
|
||||
0,
|
||||
c_d,
|
||||
"Keep new devices for",
|
||||
'{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}',
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
conf.HRS_TO_KEEP_OFFDEV = ccd(
|
||||
"HRS_TO_KEEP_OFFDEV",
|
||||
0,
|
||||
c_d,
|
||||
"Keep offline devices for",
|
||||
'{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}',
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
conf.CLEAR_NEW_FLAG = ccd(
|
||||
"CLEAR_NEW_FLAG",
|
||||
0,
|
||||
c_d,
|
||||
"Clear new flag",
|
||||
'{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}',
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
conf.REFRESH_FQDN = ccd(
|
||||
"REFRESH_FQDN",
|
||||
False,
|
||||
c_d,
|
||||
"Refresh FQDN",
|
||||
"""{"dataType": "boolean","elements": [{"elementType": "input","elementOptions": [{ "type": "checkbox" }],"transformers": []}]}""",
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
conf.API_CUSTOM_SQL = ccd(
|
||||
"API_CUSTOM_SQL",
|
||||
"SELECT * FROM Devices WHERE devPresentLastScan = 0",
|
||||
c_d,
|
||||
"Custom endpoint",
|
||||
'{"dataType":"string", "elements": [{"elementType" : "input", "elementOptions" : [] ,"transformers": []}]}',
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
conf.VERSION = ccd(
|
||||
"VERSION",
|
||||
"",
|
||||
c_d,
|
||||
"Version",
|
||||
'{"dataType":"string", "elements": [{"elementType" : "input", "elementOptions" : [{ "readonly": "true" }] ,"transformers": []}]}',
|
||||
"",
|
||||
"General",
|
||||
)
|
||||
conf.NETWORK_DEVICE_TYPES = ccd(
|
||||
"NETWORK_DEVICE_TYPES",
|
||||
[
|
||||
"AP",
|
||||
"Access Point",
|
||||
"Gateway",
|
||||
"Firewall",
|
||||
"Hypervisor",
|
||||
"Powerline",
|
||||
"Switch",
|
||||
"WLAN",
|
||||
"PLC",
|
||||
"Router",
|
||||
"USB LAN Adapter",
|
||||
"USB WIFI Adapter",
|
||||
"Internet",
|
||||
],
|
||||
c_d,
|
||||
"Network device types",
|
||||
'{"dataType":"array","elements":[{"elementType":"input","elementOptions":[{"placeholder":"Enter value"},{"suffix":"_in"},{"cssClasses":"col-sm-10"},{"prefillValue":"null"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":["_in"]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"addList(this,false)"},{"getStringKey":"Gen_Add"}],"transformers":[]},{"elementType":"select", "elementHasInputValue":1,"elementOptions":[{"multiple":"true"},{"readonly":"true"},{"editable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeAllOptions(this)"},{"getStringKey":"Gen_Remove_All"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeFromList(this)"},{"getStringKey":"Gen_Remove_Last"}],"transformers":[]}]}',
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
conf.GRAPHQL_PORT = ccd(
|
||||
"GRAPHQL_PORT",
|
||||
20212,
|
||||
c_d,
|
||||
"GraphQL port",
|
||||
'{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}',
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
conf.API_TOKEN = ccd(
|
||||
"API_TOKEN",
|
||||
"t_" + generate_random_string(20),
|
||||
c_d,
|
||||
"API token",
|
||||
'{"dataType": "string","elements": [{"elementType": "input","elementHasInputValue": 1,"elementOptions": [{ "cssClasses": "col-xs-12" }],"transformers": []},{"elementType": "button","elementOptions": [{ "getStringKey": "Gen_Generate" },{ "customParams": "API_TOKEN" },{ "onClick": "generateApiToken(this, 20)" },{ "cssClasses": "col-xs-12" }],"transformers": []}]}',
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
|
||||
# UI
|
||||
conf.UI_LANG = ccd('UI_LANG', 'English (en_us)' , c_d, 'Language Interface', '{"dataType":"string", "elements": [{"elementType" : "select", "elementOptions" : [] ,"transformers": []}]}', "['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Chinese (zh_cn)']", 'UI')
|
||||
|
||||
conf.UI_LANG = ccd(
|
||||
"UI_LANG",
|
||||
"English (en_us)",
|
||||
c_d,
|
||||
"Language Interface",
|
||||
'{"dataType":"string", "elements": [{"elementType" : "select", "elementOptions" : [] ,"transformers": []}]}',
|
||||
"['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Chinese (zh_cn)']",
|
||||
"UI",
|
||||
)
|
||||
|
||||
# Init timezone in case it changed and handle invalid values
|
||||
try:
|
||||
if conf.TIMEZONE not in all_timezones:
|
||||
@@ -185,26 +402,33 @@ def importConfigs (pm, db, all_plugins):
|
||||
conf.tz = timezone(conf.TIMEZONE)
|
||||
except UnknownTimeZoneError:
|
||||
conf.tz = timezone(default_tz) # Init Default
|
||||
conf.TIMEZONE = ccd('TIMEZONE', conf.tz , c_d, '_KEEP_', '_KEEP_', '[]', 'General')
|
||||
mylog('none', [f"[Config] Invalid timezone '{conf.TIMEZONE}', defaulting to {default_tz}."])
|
||||
conf.TIMEZONE = ccd(
|
||||
"TIMEZONE", conf.tz, c_d, "_KEEP_", "_KEEP_", "[]", "General"
|
||||
)
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f"[Config] Invalid timezone '{conf.TIMEZONE}', defaulting to {default_tz}."
|
||||
],
|
||||
)
|
||||
|
||||
# TODO cleanup later ----------------------------------------------------------------------------------
|
||||
# init all time values as we have timezone - all this shoudl be moved into plugin/plugin settings
|
||||
conf.time_started = datetime.datetime.now(conf.tz)
|
||||
conf.time_started = datetime.datetime.now(conf.tz)
|
||||
conf.plugins_once_run = False
|
||||
|
||||
# timestamps of last execution times
|
||||
conf.startTime = conf.time_started
|
||||
now_minus_24h = conf.time_started - datetime.timedelta(hours = 24)
|
||||
conf.startTime = conf.time_started
|
||||
now_minus_24h = conf.time_started - datetime.timedelta(hours=24)
|
||||
|
||||
# set these times to the past to force the first run
|
||||
conf.last_scan_run = now_minus_24h
|
||||
conf.last_version_check = now_minus_24h
|
||||
# set these times to the past to force the first run
|
||||
conf.last_scan_run = now_minus_24h
|
||||
conf.last_version_check = now_minus_24h
|
||||
|
||||
# TODO cleanup later ----------------------------------------------------------------------------------
|
||||
|
||||
|
||||
# reset schedules
|
||||
conf.mySchedules = []
|
||||
conf.mySchedules = []
|
||||
|
||||
# Format and prepare the list of subnets
|
||||
conf.userSubnets = updateSubnets(conf.SCAN_SUBNETS)
|
||||
@@ -213,64 +437,87 @@ def importConfigs (pm, db, all_plugins):
|
||||
# -----------------
|
||||
|
||||
# necessary_plugins = ['UI', 'CUSTPROP', 'CLOUD' ,'DBCLNP', 'INTRNT','MAINT','NEWDEV', 'SETPWD', 'SYNC', 'VNDRPDT', 'WORKFLOWS']
|
||||
necessary_plugins = ['UI', 'CUSTPROP', 'DBCLNP', 'INTRNT','MAINT','NEWDEV', 'SETPWD', 'SYNC', 'VNDRPDT', 'WORKFLOWS']
|
||||
necessary_plugins = [
|
||||
"UI",
|
||||
"CUSTPROP",
|
||||
"DBCLNP",
|
||||
"INTRNT",
|
||||
"MAINT",
|
||||
"NEWDEV",
|
||||
"SETPWD",
|
||||
"SYNC",
|
||||
"VNDRPDT",
|
||||
"WORKFLOWS",
|
||||
]
|
||||
# make sure necessary plugins are loaded
|
||||
conf.LOADED_PLUGINS += [plugin for plugin in necessary_plugins if plugin not in conf.LOADED_PLUGINS]
|
||||
conf.LOADED_PLUGINS += [
|
||||
plugin for plugin in necessary_plugins if plugin not in conf.LOADED_PLUGINS
|
||||
]
|
||||
|
||||
all_plugins = get_plugins_configs(conf.DISCOVER_PLUGINS)
|
||||
|
||||
mylog('none', ['[Config] Plugins: Number of all plugins (including not loaded): ', len(all_plugins)])
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
"[Config] Plugins: Number of all plugins (including not loaded): ",
|
||||
len(all_plugins),
|
||||
],
|
||||
)
|
||||
|
||||
plugin_indexes_to_remove = []
|
||||
all_plugins_prefixes = [] # to init the LOADED_PLUGINS setting with correct options
|
||||
loaded_plugins_prefixes = [] # to init the LOADED_PLUGINS setting with correct initially selected values
|
||||
all_plugins_prefixes = [] # to init the LOADED_PLUGINS setting with correct options
|
||||
loaded_plugins_prefixes = [] # to init the LOADED_PLUGINS setting with correct initially selected values
|
||||
|
||||
# handle plugins
|
||||
index = 0
|
||||
for plugin in all_plugins:
|
||||
|
||||
# Header on the frontend and the app_state.json
|
||||
updateState(f"Check plugin ({index}/{len(all_plugins)})")
|
||||
updateState(f"Check plugin ({index}/{len(all_plugins)})")
|
||||
|
||||
index +=1
|
||||
index += 1
|
||||
|
||||
pref = plugin["unique_prefix"]
|
||||
pref = plugin["unique_prefix"]
|
||||
|
||||
all_plugins_prefixes.append(pref)
|
||||
|
||||
# The below lines are used to determine if the plugin should be loaded, or skipped based on user settings (conf.LOADED_PLUGINS)
|
||||
# ...or based on if is already enabled, or if the default configuration loads the plugin (RUN function != disabled )
|
||||
# ...or based on if is already enabled, or if the default configuration loads the plugin (RUN function != disabled )
|
||||
|
||||
# get run value (computationally expensive)
|
||||
plugin_run = get_set_value_for_init(plugin, c_d, "RUN")
|
||||
|
||||
# only include loaded plugins, and the ones that are enabled
|
||||
if pref in conf.LOADED_PLUGINS or plugin_run != 'disabled' or plugin_run is None:
|
||||
|
||||
print_plugin_info(plugin, ['display_name','description'])
|
||||
# only include loaded plugins, and the ones that are enabled
|
||||
if (
|
||||
pref in conf.LOADED_PLUGINS
|
||||
or plugin_run != "disabled"
|
||||
or plugin_run is None
|
||||
):
|
||||
print_plugin_info(plugin, ["display_name", "description"])
|
||||
|
||||
stringSqlParams = []
|
||||
|
||||
|
||||
# collect plugin level language strings
|
||||
stringSqlParams = collect_lang_strings(plugin, pref, stringSqlParams)
|
||||
|
||||
|
||||
for set in plugin["settings"]:
|
||||
setFunction = set["function"]
|
||||
# Setting code name / key
|
||||
key = pref + "_" + setFunction
|
||||
# Setting code name / key
|
||||
key = pref + "_" + setFunction
|
||||
|
||||
# set.get() - returns None if not found, set["options"] raises error
|
||||
# ccd(key, default, config_dir, name, inputtype, options, group, events=[], desc = "", setJsonMetadata = {}):
|
||||
v = ccd(key,
|
||||
set["default_value"],
|
||||
c_d,
|
||||
set["name"][0]["string"],
|
||||
set["type"] ,
|
||||
str(set["options"]),
|
||||
group = pref,
|
||||
events = set.get("events"),
|
||||
desc = set["description"][0]["string"],
|
||||
setJsonMetadata = set)
|
||||
v = ccd(
|
||||
key,
|
||||
set["default_value"],
|
||||
c_d,
|
||||
set["name"][0]["string"],
|
||||
set["type"],
|
||||
str(set["options"]),
|
||||
group=pref,
|
||||
events=set.get("events"),
|
||||
desc=set["description"][0]["string"],
|
||||
setJsonMetadata=set,
|
||||
)
|
||||
|
||||
# Save the user defined value into the object
|
||||
set["value"] = v
|
||||
@@ -281,28 +528,41 @@ def importConfigs (pm, db, all_plugins):
|
||||
for option in element.get("elementOptions", []):
|
||||
if "popupForm" in option:
|
||||
for popup_entry in option["popupForm"]:
|
||||
popup_pref = key + "_popupform_" + popup_entry.get("function", "")
|
||||
stringSqlParams = collect_lang_strings(popup_entry, popup_pref, stringSqlParams)
|
||||
popup_pref = (
|
||||
key
|
||||
+ "_popupform_"
|
||||
+ popup_entry.get("function", "")
|
||||
)
|
||||
stringSqlParams = collect_lang_strings(
|
||||
popup_entry, popup_pref, stringSqlParams
|
||||
)
|
||||
|
||||
# Collect settings related language strings
|
||||
# Creates an entry with key, for example ARPSCAN_CMD_name
|
||||
stringSqlParams = collect_lang_strings(set, pref + "_" + set["function"], stringSqlParams)
|
||||
stringSqlParams = collect_lang_strings(
|
||||
set, pref + "_" + set["function"], stringSqlParams
|
||||
)
|
||||
|
||||
# Collect column related language strings
|
||||
for clmn in plugin.get('database_column_definitions', []):
|
||||
for clmn in plugin.get("database_column_definitions", []):
|
||||
# Creates an entry with key, for example ARPSCAN_Object_PrimaryID_name
|
||||
stringSqlParams = collect_lang_strings(clmn, pref + "_" + clmn.get("column", ""), stringSqlParams)
|
||||
stringSqlParams = collect_lang_strings(
|
||||
clmn, pref + "_" + clmn.get("column", ""), stringSqlParams
|
||||
)
|
||||
|
||||
# bulk-import language strings
|
||||
sql.executemany ("""INSERT INTO Plugins_Language_Strings ("Language_Code", "String_Key", "String_Value", "Extra") VALUES (?, ?, ?, ?)""", stringSqlParams )
|
||||
sql.executemany(
|
||||
"""INSERT INTO Plugins_Language_Strings ("Language_Code", "String_Key", "String_Value", "Extra") VALUES (?, ?, ?, ?)""",
|
||||
stringSqlParams,
|
||||
)
|
||||
|
||||
else:
|
||||
# log which plugins to remove
|
||||
# log which plugins to remove
|
||||
index_to_remove = 0
|
||||
for plugin in all_plugins:
|
||||
if plugin["unique_prefix"] == pref:
|
||||
break
|
||||
index_to_remove +=1
|
||||
index_to_remove += 1
|
||||
|
||||
plugin_indexes_to_remove.append(index_to_remove)
|
||||
|
||||
@@ -310,151 +570,211 @@ def importConfigs (pm, db, all_plugins):
|
||||
# Sort the list of indexes in descending order to avoid index shifting issues
|
||||
plugin_indexes_to_remove.sort(reverse=True)
|
||||
for indx in plugin_indexes_to_remove:
|
||||
pref = all_plugins[indx]["unique_prefix"]
|
||||
mylog('none', [f'[Config] ⛔ Unloading {pref}'])
|
||||
pref = all_plugins[indx]["unique_prefix"]
|
||||
mylog("none", [f"[Config] ⛔ Unloading {pref}"])
|
||||
all_plugins.pop(indx)
|
||||
|
||||
# all_plugins has now only initialized plugins, get all prefixes
|
||||
for plugin in all_plugins:
|
||||
pref = plugin["unique_prefix"]
|
||||
pref = plugin["unique_prefix"]
|
||||
loaded_plugins_prefixes.append(pref)
|
||||
|
||||
# save the newly discovered plugins as options and default values
|
||||
conf.LOADED_PLUGINS = ccd('LOADED_PLUGINS', loaded_plugins_prefixes , c_d, '_KEEP_', '_KEEP_', str(sorted(all_plugins_prefixes)), 'General')
|
||||
|
||||
mylog('none', ['[Config] Number of Plugins to load: ', len(loaded_plugins_prefixes)])
|
||||
mylog('none', ['[Config] Plugins to load: ', loaded_plugins_prefixes])
|
||||
# save the newly discovered plugins as options and default values
|
||||
conf.LOADED_PLUGINS = ccd(
|
||||
"LOADED_PLUGINS",
|
||||
loaded_plugins_prefixes,
|
||||
c_d,
|
||||
"_KEEP_",
|
||||
"_KEEP_",
|
||||
str(sorted(all_plugins_prefixes)),
|
||||
"General",
|
||||
)
|
||||
|
||||
mylog(
|
||||
"none", ["[Config] Number of Plugins to load: ", len(loaded_plugins_prefixes)]
|
||||
)
|
||||
mylog("none", ["[Config] Plugins to load: ", loaded_plugins_prefixes])
|
||||
|
||||
conf.plugins_once_run = False
|
||||
|
||||
|
||||
# -----------------
|
||||
# HANDLE APP_CONF_OVERRIDE via app_conf_override.json
|
||||
|
||||
app_conf_override_path = fullConfFolder + '/app_conf_override.json'
|
||||
app_conf_override_path = fullConfFolder + "/app_conf_override.json"
|
||||
|
||||
if os.path.exists(app_conf_override_path):
|
||||
with open(app_conf_override_path, 'r') as f:
|
||||
with open(app_conf_override_path, "r") as f:
|
||||
try:
|
||||
# Load settings_override from the JSON file
|
||||
settings_override = json.load(f)
|
||||
|
||||
# Loop through settings_override dictionary
|
||||
for setting_name, value in settings_override.items():
|
||||
|
||||
# Ensure the value is treated as a string and passed directly
|
||||
if isinstance(value, str) == False:
|
||||
value = str(value)
|
||||
|
||||
|
||||
# Log the value being passed
|
||||
# ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False)
|
||||
mylog('verbose', [f"[Config] Setting override {setting_name} with value: {value}"])
|
||||
ccd(setting_name, value, c_d, '_KEEP_', '_KEEP_', '_KEEP_', '_KEEP_', None, "_KEEP_", None, None, True, 1, all_plugins)
|
||||
mylog(
|
||||
"verbose",
|
||||
[
|
||||
f"[Config] Setting override {setting_name} with value: {value}"
|
||||
],
|
||||
)
|
||||
ccd(
|
||||
setting_name,
|
||||
value,
|
||||
c_d,
|
||||
"_KEEP_",
|
||||
"_KEEP_",
|
||||
"_KEEP_",
|
||||
"_KEEP_",
|
||||
None,
|
||||
"_KEEP_",
|
||||
None,
|
||||
None,
|
||||
True,
|
||||
1,
|
||||
all_plugins,
|
||||
)
|
||||
|
||||
except json.JSONDecodeError:
|
||||
mylog('none', [f"[Config] [ERROR] Setting override decoding JSON from {app_conf_override_path}"])
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f"[Config] [ERROR] Setting override decoding JSON from {app_conf_override_path}"
|
||||
],
|
||||
)
|
||||
else:
|
||||
mylog('debug', [f"[Config] File {app_conf_override_path} does not exist."])
|
||||
|
||||
|
||||
mylog("debug", [f"[Config] File {app_conf_override_path} does not exist."])
|
||||
|
||||
# setup execution schedules AFTER OVERRIDE handling
|
||||
|
||||
# mylog('verbose', [f"[Config] c_d {c_d}"])
|
||||
|
||||
for plugin in all_plugins:
|
||||
for plugin in all_plugins:
|
||||
# Setup schedules
|
||||
run_val = get_set_value_for_init(plugin, c_d, "RUN")
|
||||
run_sch = get_set_value_for_init(plugin, c_d, "RUN_SCHD")
|
||||
|
||||
# mylog('verbose', [f"[Config] pref {plugin["unique_prefix"]} run_val {run_val} run_sch {run_sch} "])
|
||||
|
||||
if run_val == 'schedule':
|
||||
newSchedule = Cron(run_sch).schedule(start_date=datetime.datetime.now(conf.tz))
|
||||
conf.mySchedules.append(schedule_class(plugin["unique_prefix"], newSchedule, newSchedule.next(), False))
|
||||
if run_val == "schedule":
|
||||
newSchedule = Cron(run_sch).schedule(
|
||||
start_date=datetime.datetime.now(conf.tz)
|
||||
)
|
||||
conf.mySchedules.append(
|
||||
schedule_class(
|
||||
plugin["unique_prefix"], newSchedule, newSchedule.next(), False
|
||||
)
|
||||
)
|
||||
|
||||
# mylog('verbose', [f"[Config] conf.mySchedules {conf.mySchedules}"])
|
||||
|
||||
|
||||
# -----------------
|
||||
# HANDLE APP was upgraded message - clear cache
|
||||
|
||||
# Check if app was upgraded
|
||||
|
||||
buildTimestamp = getBuildTimeStamp()
|
||||
cur_version = conf.VERSION
|
||||
|
||||
mylog('debug', [f"[Config] buildTimestamp: '{buildTimestamp}'"])
|
||||
mylog('debug', [f"[Config] conf.VERSION : '{cur_version}'"])
|
||||
|
||||
if str(cur_version) != str(buildTimestamp):
|
||||
|
||||
mylog('none', ['[Config] App upgraded 🚀'])
|
||||
|
||||
# ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False)
|
||||
ccd('VERSION', buildTimestamp , c_d, '_KEEP_', '_KEEP_', '_KEEP_', '_KEEP_', None, "_KEEP_", None, None, True)
|
||||
|
||||
write_notification(f'[Upgrade] : App upgraded 🚀 Please clear the cache: <ol> <li>Click OK below</li> <li>Clear the browser cache (shift + browser refresh button)</li> <li> Clear app cache with the <i class="fa-solid fa-rotate"></i> (reload) button in the header</li><li>Go to Settings and click Save</li> </ol> Check out new features and what has changed in the <a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.', 'interrupt', timeNowTZ())
|
||||
|
||||
|
||||
# Check if app was upgraded
|
||||
|
||||
buildTimestamp = getBuildTimeStamp()
|
||||
cur_version = conf.VERSION
|
||||
|
||||
mylog("debug", [f"[Config] buildTimestamp: '{buildTimestamp}'"])
|
||||
mylog("debug", [f"[Config] conf.VERSION : '{cur_version}'"])
|
||||
|
||||
if str(cur_version) != str(buildTimestamp):
|
||||
mylog("none", ["[Config] App upgraded 🚀"])
|
||||
|
||||
# ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False)
|
||||
ccd(
|
||||
"VERSION",
|
||||
buildTimestamp,
|
||||
c_d,
|
||||
"_KEEP_",
|
||||
"_KEEP_",
|
||||
"_KEEP_",
|
||||
"_KEEP_",
|
||||
None,
|
||||
"_KEEP_",
|
||||
None,
|
||||
None,
|
||||
True,
|
||||
)
|
||||
|
||||
write_notification(
|
||||
'[Upgrade] : App upgraded 🚀 Please clear the cache: <ol> <li>Click OK below</li> <li>Clear the browser cache (shift + browser refresh button)</li> <li> Clear app cache with the <i class="fa-solid fa-rotate"></i> (reload) button in the header</li><li>Go to Settings and click Save</li> </ol> Check out new features and what has changed in the <a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.',
|
||||
"interrupt",
|
||||
timeNowTZ(),
|
||||
)
|
||||
|
||||
# -----------------
|
||||
# Initialization finished, update DB and API endpoints
|
||||
|
||||
# Insert settings into the DB
|
||||
sql.execute ("DELETE FROM Settings")
|
||||
|
||||
# Insert settings into the DB
|
||||
sql.execute("DELETE FROM Settings")
|
||||
# mylog('debug', [f"[Config] conf.mySettingsSQLsafe : '{conf.mySettingsSQLsafe}'"])
|
||||
sql.executemany ("""INSERT INTO Settings ("setKey", "setName", "setDescription", "setType", "setOptions",
|
||||
"setValue", "setGroup", "setEvents", "setOverriddenByEnv" ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""", conf.mySettingsSQLsafe)
|
||||
|
||||
sql.executemany(
|
||||
"""INSERT INTO Settings ("setKey", "setName", "setDescription", "setType", "setOptions",
|
||||
"setValue", "setGroup", "setEvents", "setOverriddenByEnv" ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
conf.mySettingsSQLsafe,
|
||||
)
|
||||
|
||||
db.commitDB()
|
||||
|
||||
# update only the settings datasource
|
||||
update_api(db, all_plugins, True, ["settings"])
|
||||
|
||||
# run plugins that are modifying the config
|
||||
update_api(db, all_plugins, True, ["settings"])
|
||||
|
||||
# run plugins that are modifying the config
|
||||
pm = plugin_manager(db, all_plugins)
|
||||
pm.clear_cache()
|
||||
pm.run_plugin_scripts('before_config_save')
|
||||
pm.run_plugin_scripts("before_config_save")
|
||||
|
||||
# Used to determine the next import
|
||||
conf.lastImportedConfFile = os.path.getmtime(config_file)
|
||||
conf.lastImportedConfFile = os.path.getmtime(config_file)
|
||||
|
||||
# updateState(newState (text),
|
||||
# settingsSaved = None (timestamp),
|
||||
# settingsImported = None (timestamp),
|
||||
# showSpinner = False (1/0),
|
||||
# updateState(newState (text),
|
||||
# settingsSaved = None (timestamp),
|
||||
# settingsImported = None (timestamp),
|
||||
# showSpinner = False (1/0),
|
||||
# graphQLServerStarted = 1 (1/0))
|
||||
updateState("Config imported", conf.lastImportedConfFile, conf.lastImportedConfFile, False, 1)
|
||||
|
||||
msg = '[Config] Imported new settings config'
|
||||
mylog('minimal', msg)
|
||||
|
||||
updateState(
|
||||
"Config imported",
|
||||
conf.lastImportedConfFile,
|
||||
conf.lastImportedConfFile,
|
||||
False,
|
||||
1,
|
||||
)
|
||||
|
||||
msg = "[Config] Imported new settings config"
|
||||
mylog("minimal", msg)
|
||||
|
||||
# front end app log loggging
|
||||
write_notification(msg, 'info', timeNowTZ())
|
||||
write_notification(msg, "info", timeNowTZ())
|
||||
|
||||
return pm, all_plugins, True
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
def read_config_file(filename):
|
||||
"""
|
||||
retuns dict on the config file key:value pairs
|
||||
"""
|
||||
mylog('minimal', '[Config] reading config file')
|
||||
mylog("minimal", "[Config] reading config file")
|
||||
# load the variables from .conf file
|
||||
code = compile(filename.read_text(), filename.name, "exec")
|
||||
confDict = {} # config dictionary
|
||||
confDict = {} # config dictionary
|
||||
exec(code, {"__builtins__": {}}, confDict)
|
||||
return confDict
|
||||
return confDict
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# DEPRECATE soonest after 10/10/2024
|
||||
# 🤔Idea/TODO: Check and compare versions/timestamps and only perform a replacement if config/version older than...
|
||||
replacements = {
|
||||
r'\bREPORT_TO\b': 'SMTP_REPORT_TO',
|
||||
r'\bSYNC_api_token\b': 'API_TOKEN',
|
||||
r'\bAPI_TOKEN=\'\'': f'API_TOKEN=\'t_{generate_random_string(20)}\'',
|
||||
r"\bREPORT_TO\b": "SMTP_REPORT_TO",
|
||||
r"\bSYNC_api_token\b": "API_TOKEN",
|
||||
r"\bAPI_TOKEN=\'\'": f"API_TOKEN='t_{generate_random_string(20)}'",
|
||||
}
|
||||
|
||||
|
||||
@@ -463,11 +783,13 @@ def renameSettings(config_file):
|
||||
contains_old_settings = False
|
||||
|
||||
# Open the original config_file for reading
|
||||
with open(str(config_file), 'r') as original_file: # Convert config_file to a string
|
||||
with open(
|
||||
str(config_file), "r"
|
||||
) as original_file: # Convert config_file to a string
|
||||
for line in original_file:
|
||||
# Use regular expressions with word boundaries to check for the old setting code names
|
||||
if any(re.search(key, line) for key in replacements.keys()):
|
||||
mylog('debug', f'[Config] Old setting names found in line: ({line})')
|
||||
mylog("debug", f"[Config] Old setting names found in line: ({line})")
|
||||
contains_old_settings = True
|
||||
break # Exit the loop if any old setting is found
|
||||
|
||||
@@ -477,12 +799,18 @@ def renameSettings(config_file):
|
||||
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
backup_file = f"{config_file}_old_setting_names_{timestamp}.bak"
|
||||
|
||||
mylog('debug', f'[Config] Old setting names will be replaced and a backup ({backup_file}) of the config created.')
|
||||
mylog(
|
||||
"debug",
|
||||
f"[Config] Old setting names will be replaced and a backup ({backup_file}) of the config created.",
|
||||
)
|
||||
|
||||
shutil.copy(str(config_file), backup_file) # Convert config_file to a string
|
||||
|
||||
# Open the original config_file for reading and create a temporary file for writing
|
||||
with open(str(config_file), 'r') as original_file, open(str(config_file) + "_temp", 'w') as temp_file: # Convert config_file to a string
|
||||
with (
|
||||
open(str(config_file), "r") as original_file,
|
||||
open(str(config_file) + "_temp", "w") as temp_file,
|
||||
): # Convert config_file to a string
|
||||
for line in original_file:
|
||||
# Use regular expressions with word boundaries for replacements
|
||||
for key, value in replacements.items():
|
||||
@@ -496,13 +824,13 @@ def renameSettings(config_file):
|
||||
temp_file.close()
|
||||
|
||||
# Replace the original config_file with the temporary file
|
||||
shutil.move(str(config_file) + "_temp", str(config_file)) # Convert config_file to a string
|
||||
shutil.move(
|
||||
str(config_file) + "_temp", str(config_file)
|
||||
) # Convert config_file to a string
|
||||
|
||||
# ensure correct ownership
|
||||
fixPermissions()
|
||||
else:
|
||||
mylog('debug', '[Config] No old setting names found in the file. No changes made.')
|
||||
|
||||
|
||||
|
||||
|
||||
mylog(
|
||||
"debug", "[Config] No old setting names found in the file. No changes made."
|
||||
)
|
||||
|
||||
@@ -3,54 +3,55 @@ import io
|
||||
import datetime
|
||||
import threading
|
||||
import queue
|
||||
import time
|
||||
import logging
|
||||
|
||||
# NetAlertX imports
|
||||
import conf
|
||||
from const import *
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# duplication from helper to avoid circle
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
def timeNowTZ():
|
||||
if conf.tz:
|
||||
return datetime.datetime.now(conf.tz).replace(microsecond=0)
|
||||
else:
|
||||
return datetime.datetime.now().replace(microsecond=0)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Map custom debug levels to Python logging levels
|
||||
custom_to_logging_levels = {
|
||||
'none': logging.NOTSET,
|
||||
'minimal': logging.WARNING,
|
||||
'verbose': logging.INFO,
|
||||
'debug': logging.DEBUG,
|
||||
'trace': logging.DEBUG, # Can map to DEBUG or lower custom level if needed
|
||||
"none": logging.NOTSET,
|
||||
"minimal": logging.WARNING,
|
||||
"verbose": logging.INFO,
|
||||
"debug": logging.DEBUG,
|
||||
"trace": logging.DEBUG, # Can map to DEBUG or lower custom level if needed
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# More verbose as the numbers go up
|
||||
debugLevels = [
|
||||
('none', 0), ('minimal', 1), ('verbose', 2), ('debug', 3), ('trace', 4)
|
||||
]
|
||||
debugLevels = [("none", 0), ("minimal", 1), ("verbose", 2), ("debug", 3), ("trace", 4)]
|
||||
|
||||
# use the LOG_LEVEL from the config, may be overridden
|
||||
currentLevel = conf.LOG_LEVEL
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# Queue for log messages
|
||||
log_queue = queue.Queue(maxsize=1000) # Increase size to handle spikes
|
||||
log_thread = None # Will hold the thread reference
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Custom logging handler
|
||||
class MyLogHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
log_entry = self.format(record)
|
||||
log_queue.put(log_entry)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Logger class
|
||||
class Logger:
|
||||
def __init__(self, LOG_LEVEL):
|
||||
@@ -77,14 +78,13 @@ class Logger:
|
||||
root_logger.handlers.clear()
|
||||
|
||||
my_log_handler = MyLogHandler()
|
||||
formatter = logging.Formatter('%(message)s', datefmt='%H:%M:%S')
|
||||
formatter = logging.Formatter("%(message)s", datefmt="%H:%M:%S")
|
||||
my_log_handler.setFormatter(formatter)
|
||||
|
||||
root_logger.addHandler(my_log_handler)
|
||||
root_logger.setLevel(custom_to_logging_levels.get(currentLevel, logging.NOTSET))
|
||||
|
||||
def mylog(self, requestedDebugLevel, *args):
|
||||
|
||||
self.reqLvl = self._to_num(requestedDebugLevel)
|
||||
self.setLvl = self._to_num(currentLevel)
|
||||
|
||||
@@ -93,9 +93,10 @@ class Logger:
|
||||
|
||||
def isAbove(self, requestedDebugLevel):
|
||||
reqLvl = self._to_num(requestedDebugLevel)
|
||||
return reqLvl is not None and self.setLvl is not None and self.setLvl >= reqLvl
|
||||
return reqLvl is not None and self.setLvl is not None and self.setLvl >= reqLvl
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Dedicated thread for writing logs
|
||||
def log_writer():
|
||||
buffer = []
|
||||
@@ -106,27 +107,31 @@ def log_writer():
|
||||
break
|
||||
buffer.append(log_entry)
|
||||
if len(buffer) >= 10:
|
||||
with open(logPath + "/app.log", 'a') as log_file:
|
||||
log_file.write('\n'.join(buffer) + '\n')
|
||||
with open(logPath + "/app.log", "a") as log_file:
|
||||
log_file.write("\n".join(buffer) + "\n")
|
||||
buffer.clear()
|
||||
except queue.Empty:
|
||||
if buffer:
|
||||
with open(logPath + "/app.log", 'a') as log_file:
|
||||
log_file.write('\n'.join(buffer) + '\n')
|
||||
with open(logPath + "/app.log", "a") as log_file:
|
||||
log_file.write("\n".join(buffer) + "\n")
|
||||
buffer.clear()
|
||||
|
||||
|
||||
def start_log_writer_thread():
|
||||
global log_thread
|
||||
if log_thread is None or not log_thread.is_alive():
|
||||
log_thread = threading.Thread(target=log_writer, daemon=True)
|
||||
log_thread.start()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def file_print(*args):
|
||||
result = timeNowTZ().strftime('%H:%M:%S') + ' '
|
||||
result = timeNowTZ().strftime("%H:%M:%S") + " "
|
||||
for arg in args:
|
||||
if isinstance(arg, list):
|
||||
arg = ' '.join(str(a) for a in arg) # so taht new lines are handled correctly also when passing a list
|
||||
arg = " ".join(
|
||||
str(a) for a in arg
|
||||
) # so taht new lines are handled correctly also when passing a list
|
||||
result += str(arg)
|
||||
|
||||
logging.log(custom_to_logging_levels.get(currentLevel, logging.NOTSET), result)
|
||||
@@ -134,30 +139,34 @@ def file_print(*args):
|
||||
|
||||
start_log_writer_thread()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def append_file_binary(file_path, input_data):
|
||||
with open(file_path, 'ab') as file:
|
||||
with open(file_path, "ab") as file:
|
||||
if isinstance(input_data, str):
|
||||
input_data = input_data.encode('utf-8')
|
||||
input_data = input_data.encode("utf-8")
|
||||
file.write(input_data)
|
||||
|
||||
|
||||
def logResult(stdout, stderr):
|
||||
if stderr is not None:
|
||||
append_file_binary(logPath + '/stderr.log', stderr)
|
||||
append_file_binary(logPath + "/stderr.log", stderr)
|
||||
if stdout is not None:
|
||||
append_file_binary(logPath + '/stdout.log', stdout)
|
||||
append_file_binary(logPath + "/stdout.log", stdout)
|
||||
|
||||
|
||||
def append_line_to_file(pPath, pText):
|
||||
if sys.version_info < (3, 0):
|
||||
file = io.open(pPath, mode='a', encoding='utf-8')
|
||||
file.write(pText.decode('unicode_escape'))
|
||||
file = io.open(pPath, mode="a", encoding="utf-8")
|
||||
file.write(pText.decode("unicode_escape"))
|
||||
file.close()
|
||||
else:
|
||||
file = open(pPath, 'a', encoding='utf-8')
|
||||
file = open(pPath, "a", encoding="utf-8")
|
||||
file.write(pText)
|
||||
file.close()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Create default logger instance and backward-compatible global mylog
|
||||
logger = Logger(conf.LOG_LEVEL)
|
||||
mylog = logger.mylog
|
||||
|
||||
@@ -1,31 +1,27 @@
|
||||
import datetime
|
||||
import os
|
||||
import sys
|
||||
import _io
|
||||
import json
|
||||
import uuid
|
||||
import socket
|
||||
import subprocess
|
||||
import requests
|
||||
from yattag import indent
|
||||
from json2table import convert
|
||||
import time
|
||||
|
||||
from flask import jsonify
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
# Register NetAlertX modules
|
||||
from const import apiPath
|
||||
from logger import mylog
|
||||
from helper import (
|
||||
timeNowTZ,
|
||||
)
|
||||
|
||||
import conf
|
||||
from const import applicationPath, logPath, apiPath, confFileName, reportTemplatesPath
|
||||
from logger import logResult, mylog
|
||||
from helper import generate_mac_links, removeDuplicateNewLines, timeNowTZ, get_file_content, write_file, get_setting_value, get_timezone_offset
|
||||
NOTIFICATION_API_FILE = apiPath + "user_notifications.json"
|
||||
|
||||
NOTIFICATION_API_FILE = apiPath + 'user_notifications.json'
|
||||
|
||||
# Show Frontend User Notification
|
||||
def write_notification(content, level='alert', timestamp=None):
|
||||
def write_notification(content, level="alert", timestamp=None):
|
||||
"""
|
||||
Create and append a new user notification entry to the notifications file.
|
||||
|
||||
@@ -39,33 +35,33 @@ def write_notification(content, level='alert', timestamp=None):
|
||||
None
|
||||
"""
|
||||
if timestamp is None:
|
||||
timestamp = timeNowTZ()
|
||||
timestamp = timeNowTZ()
|
||||
|
||||
# Generate GUID
|
||||
guid = str(uuid.uuid4())
|
||||
|
||||
# Prepare notification dictionary
|
||||
notification = {
|
||||
'timestamp': str(timestamp),
|
||||
'guid': guid,
|
||||
'read': 0,
|
||||
'level': level,
|
||||
'content': content
|
||||
"timestamp": str(timestamp),
|
||||
"guid": guid,
|
||||
"read": 0,
|
||||
"level": level,
|
||||
"content": content,
|
||||
}
|
||||
|
||||
# If file exists, load existing data, otherwise initialize as empty list
|
||||
if os.path.exists(NOTIFICATION_API_FILE):
|
||||
with open(NOTIFICATION_API_FILE, 'r') as file:
|
||||
with open(NOTIFICATION_API_FILE, "r") as file:
|
||||
# Check if the file object is of type _io.TextIOWrapper
|
||||
if isinstance(file, _io.TextIOWrapper):
|
||||
file_contents = file.read() # Read file contents
|
||||
if file_contents == '':
|
||||
file_contents = '[]' # If file is empty, initialize as empty list
|
||||
if file_contents == "":
|
||||
file_contents = "[]" # If file is empty, initialize as empty list
|
||||
|
||||
# mylog('debug', ['[Notification] User Notifications file: ', file_contents])
|
||||
notifications = json.loads(file_contents) # Parse JSON data
|
||||
else:
|
||||
mylog('none', '[Notification] File is not of type _io.TextIOWrapper')
|
||||
mylog("none", "[Notification] File is not of type _io.TextIOWrapper")
|
||||
notifications = []
|
||||
else:
|
||||
notifications = []
|
||||
@@ -74,9 +70,10 @@ def write_notification(content, level='alert', timestamp=None):
|
||||
notifications.append(notification)
|
||||
|
||||
# Write updated data back to file
|
||||
with open(NOTIFICATION_API_FILE, 'w') as file:
|
||||
with open(NOTIFICATION_API_FILE, "w") as file:
|
||||
json.dump(notifications, file, indent=4)
|
||||
|
||||
|
||||
# Trim notifications
|
||||
def remove_old(keepNumberOfEntries):
|
||||
"""
|
||||
@@ -90,30 +87,30 @@ def remove_old(keepNumberOfEntries):
|
||||
"""
|
||||
# Check if file exists
|
||||
if not os.path.exists(NOTIFICATION_API_FILE):
|
||||
mylog('info', '[Notification] No notifications file to clean.')
|
||||
mylog("info", "[Notification] No notifications file to clean.")
|
||||
return
|
||||
|
||||
# Load existing notifications
|
||||
try:
|
||||
with open(NOTIFICATION_API_FILE, 'r') as file:
|
||||
with open(NOTIFICATION_API_FILE, "r") as file:
|
||||
file_contents = file.read().strip()
|
||||
if file_contents == '':
|
||||
if file_contents == "":
|
||||
notifications = []
|
||||
else:
|
||||
notifications = json.loads(file_contents)
|
||||
except Exception as e:
|
||||
mylog('none', f'[Notification] Error reading notifications file: {e}')
|
||||
mylog("none", f"[Notification] Error reading notifications file: {e}")
|
||||
return
|
||||
|
||||
if not isinstance(notifications, list):
|
||||
mylog('none', '[Notification] Invalid format: not a list')
|
||||
mylog("none", "[Notification] Invalid format: not a list")
|
||||
return
|
||||
|
||||
# Sort by timestamp descending
|
||||
try:
|
||||
notifications.sort(key=lambda x: x['timestamp'], reverse=True)
|
||||
notifications.sort(key=lambda x: x["timestamp"], reverse=True)
|
||||
except KeyError:
|
||||
mylog('none', '[Notification] Missing timestamp in one or more entries')
|
||||
mylog("none", "[Notification] Missing timestamp in one or more entries")
|
||||
return
|
||||
|
||||
# Trim to the latest entries
|
||||
@@ -121,11 +118,14 @@ def remove_old(keepNumberOfEntries):
|
||||
|
||||
# Write back the trimmed list
|
||||
try:
|
||||
with open(NOTIFICATION_API_FILE, 'w') as file:
|
||||
with open(NOTIFICATION_API_FILE, "w") as file:
|
||||
json.dump(trimmed, file, indent=4)
|
||||
mylog('verbose', f'[Notification] Trimmed notifications to latest {keepNumberOfEntries}')
|
||||
mylog(
|
||||
"verbose",
|
||||
f"[Notification] Trimmed notifications to latest {keepNumberOfEntries}",
|
||||
)
|
||||
except Exception as e:
|
||||
mylog('none', f'Error writing trimmed notifications file: {e}')
|
||||
mylog("none", f"Error writing trimmed notifications file: {e}")
|
||||
|
||||
|
||||
def mark_all_notifications_read():
|
||||
@@ -162,6 +162,7 @@ def mark_all_notifications_read():
|
||||
mylog("debug", "[Notification] All notifications marked as read.")
|
||||
return {"success": True}
|
||||
|
||||
|
||||
def delete_notifications():
|
||||
"""
|
||||
Delete all notifications from the JSON file.
|
||||
@@ -194,7 +195,7 @@ def get_unread_notifications():
|
||||
|
||||
def mark_notification_as_read(guid=None, max_attempts=3):
|
||||
"""
|
||||
Mark a notification as read based on GUID.
|
||||
Mark a notification as read based on GUID.
|
||||
If guid is None, mark all notifications as read.
|
||||
|
||||
Args:
|
||||
@@ -208,7 +209,9 @@ def mark_notification_as_read(guid=None, max_attempts=3):
|
||||
|
||||
while attempts < max_attempts:
|
||||
try:
|
||||
if os.path.exists(NOTIFICATION_API_FILE) and os.access(NOTIFICATION_API_FILE, os.R_OK | os.W_OK):
|
||||
if os.path.exists(NOTIFICATION_API_FILE) and os.access(
|
||||
NOTIFICATION_API_FILE, os.R_OK | os.W_OK
|
||||
):
|
||||
with open(NOTIFICATION_API_FILE, "r") as f:
|
||||
notifications = json.load(f)
|
||||
|
||||
@@ -222,7 +225,7 @@ def mark_notification_as_read(guid=None, max_attempts=3):
|
||||
|
||||
return {"success": True}
|
||||
except Exception as e:
|
||||
mylog("none", f"[Notification] Attempt {attempts+1} failed: {e}")
|
||||
mylog("none", f"[Notification] Attempt {attempts + 1} failed: {e}")
|
||||
|
||||
attempts += 1
|
||||
time.sleep(0.5) # Sleep 0.5 seconds before retrying
|
||||
@@ -231,6 +234,7 @@ def mark_notification_as_read(guid=None, max_attempts=3):
|
||||
mylog("none", f"[Notification] {error_msg}")
|
||||
return {"success": False, "error": error_msg}
|
||||
|
||||
|
||||
def delete_notification(guid):
|
||||
"""
|
||||
Delete a notification from the notifications file based on its GUID.
|
||||
@@ -263,4 +267,3 @@ def delete_notification(guid):
|
||||
except Exception as e:
|
||||
mylog("none", f"[Notification] Failed to delete notification {guid}: {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
|
||||
@@ -1,50 +1,50 @@
|
||||
#---------------------------------------------------------------------------------#
|
||||
# ---------------------------------------------------------------------------------#
|
||||
# NetAlertX #
|
||||
# Open Source Network Guard / WIFI & LAN intrusion detector #
|
||||
# Open Source Network Guard / WIFI & LAN intrusion detector #
|
||||
# #
|
||||
# reporting.py - NetAlertX Back module. Template to email reporting in HTML format #
|
||||
#---------------------------------------------------------------------------------#
|
||||
# ---------------------------------------------------------------------------------#
|
||||
# Puche 2021 pi.alert.application@gmail.com GNU GPLv3 #
|
||||
# jokob-sk 2022 jokob.sk@gmail.com GNU GPLv3 #
|
||||
# leiweibau 2022 https://github.com/leiweibau GNU GPLv3 #
|
||||
# cvc90 2023 https://github.com/cvc90 GNU GPLv3 #
|
||||
#---------------------------------------------------------------------------------#
|
||||
# ---------------------------------------------------------------------------------#
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
import conf
|
||||
from const import applicationPath, logPath, apiPath, confFileName
|
||||
from helper import timeNowTZ, get_file_content, write_file, get_timezone_offset, get_setting_value
|
||||
from logger import logResult, mylog
|
||||
from helper import (
|
||||
get_timezone_offset,
|
||||
get_setting_value,
|
||||
)
|
||||
from logger import mylog
|
||||
from db.sql_safe_builder import create_safe_condition_builder
|
||||
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# REPORTING
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_notifications (db):
|
||||
# -------------------------------------------------------------------------------
|
||||
def get_notifications(db):
|
||||
sql = db.sql # TO-DO
|
||||
|
||||
sql = db.sql #TO-DO
|
||||
|
||||
# Reporting section
|
||||
mylog('verbose', ['[Notification] Check if something to report'])
|
||||
mylog("verbose", ["[Notification] Check if something to report"])
|
||||
|
||||
# prepare variables for JSON construction
|
||||
# prepare variables for JSON construction
|
||||
json_new_devices = []
|
||||
json_new_devices_meta = {}
|
||||
json_down_devices = []
|
||||
json_down_devices_meta = {}
|
||||
json_down_reconnected = []
|
||||
json_down_reconnected_meta = {}
|
||||
json_events = []
|
||||
json_events = []
|
||||
json_events_meta = {}
|
||||
json_plugins = []
|
||||
json_plugins_meta = {}
|
||||
@@ -52,37 +52,42 @@ def get_notifications (db):
|
||||
# Disable reporting on events for devices where reporting is disabled based on the MAC address
|
||||
|
||||
# Disable notifications (except down/down reconnected) on devices where devAlertEvents is disabled
|
||||
sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0
|
||||
sql.execute("""UPDATE Events SET eve_PendingAlertEmail = 0
|
||||
WHERE eve_PendingAlertEmail = 1 AND eve_EventType not in ('Device Down', 'Down Reconnected', 'New Device' ) AND eve_MAC IN
|
||||
(
|
||||
SELECT devMac FROM Devices WHERE devAlertEvents = 0
|
||||
)""")
|
||||
|
||||
# Disable down/down reconnected notifications on devices where devAlertDown is disabled
|
||||
sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0
|
||||
sql.execute("""UPDATE Events SET eve_PendingAlertEmail = 0
|
||||
WHERE eve_PendingAlertEmail = 1 AND eve_EventType in ('Device Down', 'Down Reconnected') AND eve_MAC IN
|
||||
(
|
||||
SELECT devMac FROM Devices WHERE devAlertDown = 0
|
||||
)""")
|
||||
|
||||
sections = get_setting_value('NTFPRCS_INCLUDED_SECTIONS')
|
||||
|
||||
mylog('verbose', ['[Notification] Included sections: ', sections ])
|
||||
sections = get_setting_value("NTFPRCS_INCLUDED_SECTIONS")
|
||||
|
||||
if 'new_devices' in sections:
|
||||
mylog("verbose", ["[Notification] Included sections: ", sections])
|
||||
|
||||
if "new_devices" in sections:
|
||||
# Compose New Devices Section (no empty lines in SQL queries!)
|
||||
# Use SafeConditionBuilder to prevent SQL injection vulnerabilities
|
||||
condition_builder = create_safe_condition_builder()
|
||||
new_dev_condition_setting = get_setting_value('NTFPRCS_new_dev_condition')
|
||||
|
||||
new_dev_condition_setting = get_setting_value("NTFPRCS_new_dev_condition")
|
||||
|
||||
try:
|
||||
safe_condition, parameters = condition_builder.get_safe_condition_legacy(new_dev_condition_setting)
|
||||
safe_condition, parameters = condition_builder.get_safe_condition_legacy(
|
||||
new_dev_condition_setting
|
||||
)
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType = 'New Device' {}
|
||||
ORDER BY eve_DateTime""".format(safe_condition)
|
||||
except Exception as e:
|
||||
mylog('verbose', ['[Notification] Error building safe condition for new devices: ', e])
|
||||
mylog(
|
||||
"verbose",
|
||||
["[Notification] Error building safe condition for new devices: ", e],
|
||||
)
|
||||
# Fall back to safe default (no additional conditions)
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
@@ -90,23 +95,23 @@ def get_notifications (db):
|
||||
ORDER BY eve_DateTime"""
|
||||
parameters = {}
|
||||
|
||||
mylog('debug', ['[Notification] new_devices SQL query: ', sqlQuery ])
|
||||
mylog('debug', ['[Notification] new_devices parameters: ', parameters ])
|
||||
mylog("debug", ["[Notification] new_devices SQL query: ", sqlQuery])
|
||||
mylog("debug", ["[Notification] new_devices parameters: ", parameters])
|
||||
|
||||
# Get the events as JSON using parameterized query
|
||||
json_obj = db.get_table_as_json(sqlQuery, parameters)
|
||||
|
||||
json_new_devices_meta = {
|
||||
"title": "🆕 New devices",
|
||||
"columnNames": json_obj.columnNames
|
||||
"columnNames": json_obj.columnNames,
|
||||
}
|
||||
|
||||
json_new_devices = json_obj.json["data"]
|
||||
json_new_devices = json_obj.json["data"]
|
||||
|
||||
if 'down_devices' in sections:
|
||||
# Compose Devices Down Section
|
||||
if "down_devices" in sections:
|
||||
# Compose Devices Down Section
|
||||
# - select only Down Alerts with pending email of devices that didn't reconnect within the specified time window
|
||||
minutes = int(get_setting_value('NTFPRCS_alert_down_time') or 0)
|
||||
minutes = int(get_setting_value("NTFPRCS_alert_down_time") or 0)
|
||||
tz_offset = get_timezone_offset()
|
||||
sqlQuery = f"""
|
||||
SELECT devName, eve_MAC, devVendor, eve_IP, eve_DateTime, eve_EventType
|
||||
@@ -123,54 +128,68 @@ def get_notifications (db):
|
||||
)
|
||||
ORDER BY down_events.eve_DateTime;
|
||||
"""
|
||||
|
||||
# Get the events as JSON
|
||||
|
||||
# Get the events as JSON
|
||||
json_obj = db.get_table_as_json(sqlQuery)
|
||||
|
||||
json_down_devices_meta = {
|
||||
json_down_devices_meta = {
|
||||
"title": "🔴 Down devices",
|
||||
"columnNames": json_obj.columnNames
|
||||
"columnNames": json_obj.columnNames,
|
||||
}
|
||||
json_down_devices = json_obj.json["data"]
|
||||
json_down_devices = json_obj.json["data"]
|
||||
|
||||
mylog('debug', ['[Notification] json_down_devices: ', json.dumps(json_down_devices) ])
|
||||
|
||||
if 'down_reconnected' in sections:
|
||||
# Compose Reconnected Down Section
|
||||
# - select only Devices, that were previously down and now are Connected
|
||||
sqlQuery = f"""
|
||||
mylog(
|
||||
"debug",
|
||||
["[Notification] json_down_devices: ", json.dumps(json_down_devices)],
|
||||
)
|
||||
|
||||
if "down_reconnected" in sections:
|
||||
# Compose Reconnected Down Section
|
||||
# - select only Devices, that were previously down and now are Connected
|
||||
sqlQuery = """
|
||||
SELECT devName, eve_MAC, devVendor, eve_IP, eve_DateTime, eve_EventType
|
||||
FROM Events_Devices AS reconnected_devices
|
||||
WHERE reconnected_devices.eve_EventType = 'Down Reconnected'
|
||||
AND reconnected_devices.eve_PendingAlertEmail = 1
|
||||
ORDER BY reconnected_devices.eve_DateTime;
|
||||
"""
|
||||
|
||||
# Get the events as JSON
|
||||
|
||||
# Get the events as JSON
|
||||
json_obj = db.get_table_as_json(sqlQuery)
|
||||
|
||||
json_down_reconnected_meta = {
|
||||
"title": "🔁 Reconnected down devices",
|
||||
"columnNames": json_obj.columnNames
|
||||
"columnNames": json_obj.columnNames,
|
||||
}
|
||||
json_down_reconnected = json_obj.json["data"]
|
||||
json_down_reconnected = json_obj.json["data"]
|
||||
|
||||
mylog('debug', ['[Notification] json_down_reconnected: ', json.dumps(json_down_reconnected) ])
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
"[Notification] json_down_reconnected: ",
|
||||
json.dumps(json_down_reconnected),
|
||||
],
|
||||
)
|
||||
|
||||
if 'events' in sections:
|
||||
if "events" in sections:
|
||||
# Compose Events Section (no empty lines in SQL queries!)
|
||||
# Use SafeConditionBuilder to prevent SQL injection vulnerabilities
|
||||
condition_builder = create_safe_condition_builder()
|
||||
event_condition_setting = get_setting_value('NTFPRCS_event_condition')
|
||||
|
||||
event_condition_setting = get_setting_value("NTFPRCS_event_condition")
|
||||
|
||||
try:
|
||||
safe_condition, parameters = condition_builder.get_safe_condition_legacy(event_condition_setting)
|
||||
safe_condition, parameters = condition_builder.get_safe_condition_legacy(
|
||||
event_condition_setting
|
||||
)
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed') {}
|
||||
ORDER BY eve_DateTime""".format(safe_condition)
|
||||
except Exception as e:
|
||||
mylog('verbose', ['[Notification] Error building safe condition for events: ', e])
|
||||
mylog(
|
||||
"verbose",
|
||||
["[Notification] Error building safe condition for events: ", e],
|
||||
)
|
||||
# Fall back to safe default (no additional conditions)
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
@@ -178,51 +197,43 @@ def get_notifications (db):
|
||||
ORDER BY eve_DateTime"""
|
||||
parameters = {}
|
||||
|
||||
mylog('debug', ['[Notification] events SQL query: ', sqlQuery ])
|
||||
mylog('debug', ['[Notification] events parameters: ', parameters ])
|
||||
|
||||
mylog("debug", ["[Notification] events SQL query: ", sqlQuery])
|
||||
mylog("debug", ["[Notification] events parameters: ", parameters])
|
||||
|
||||
# Get the events as JSON using parameterized query
|
||||
json_obj = db.get_table_as_json(sqlQuery, parameters)
|
||||
|
||||
json_events_meta = {
|
||||
"title": "⚡ Events",
|
||||
"columnNames": json_obj.columnNames
|
||||
}
|
||||
json_events = json_obj.json["data"]
|
||||
json_events_meta = {"title": "⚡ Events", "columnNames": json_obj.columnNames}
|
||||
json_events = json_obj.json["data"]
|
||||
|
||||
if 'plugins' in sections:
|
||||
if "plugins" in sections:
|
||||
# Compose Plugins Section
|
||||
sqlQuery = """SELECT Plugin, Object_PrimaryId, Object_SecondaryId, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status from Plugins_Events"""
|
||||
|
||||
# Get the events as JSON
|
||||
sqlQuery = """SELECT Plugin, Object_PrimaryId, Object_SecondaryId, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status from Plugins_Events"""
|
||||
|
||||
# Get the events as JSON
|
||||
json_obj = db.get_table_as_json(sqlQuery)
|
||||
|
||||
json_plugins_meta = {
|
||||
"title": "🔌 Plugins",
|
||||
"columnNames": json_obj.columnNames
|
||||
}
|
||||
json_plugins = json_obj.json["data"]
|
||||
json_plugins_meta = {"title": "🔌 Plugins", "columnNames": json_obj.columnNames}
|
||||
json_plugins = json_obj.json["data"]
|
||||
|
||||
|
||||
final_json = {
|
||||
"new_devices": json_new_devices,
|
||||
"new_devices_meta": json_new_devices_meta,
|
||||
"down_devices": json_down_devices,
|
||||
"down_devices_meta": json_down_devices_meta,
|
||||
"down_reconnected": json_down_reconnected,
|
||||
"down_reconnected_meta": json_down_reconnected_meta,
|
||||
"events": json_events,
|
||||
"events_meta": json_events_meta,
|
||||
"plugins": json_plugins,
|
||||
"plugins_meta": json_plugins_meta,
|
||||
}
|
||||
final_json = {
|
||||
"new_devices": json_new_devices,
|
||||
"new_devices_meta": json_new_devices_meta,
|
||||
"down_devices": json_down_devices,
|
||||
"down_devices_meta": json_down_devices_meta,
|
||||
"down_reconnected": json_down_reconnected,
|
||||
"down_reconnected_meta": json_down_reconnected_meta,
|
||||
"events": json_events,
|
||||
"events_meta": json_events_meta,
|
||||
"plugins": json_plugins,
|
||||
"plugins_meta": json_plugins_meta,
|
||||
}
|
||||
|
||||
return final_json
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def skip_repeated_notifications (db):
|
||||
# -------------------------------------------------------------------------------
|
||||
def skip_repeated_notifications(db):
|
||||
"""
|
||||
Skips sending alerts for devices recently notified.
|
||||
|
||||
@@ -235,9 +246,9 @@ def skip_repeated_notifications (db):
|
||||
|
||||
# Skip repeated notifications
|
||||
# due strfime : Overflow --> use "strftime / 60"
|
||||
mylog('verbose','[Skip Repeated Notifications] Skip Repeated')
|
||||
|
||||
db.sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0
|
||||
mylog("verbose", "[Skip Repeated Notifications] Skip Repeated")
|
||||
|
||||
db.sql.execute("""UPDATE Events SET eve_PendingAlertEmail = 0
|
||||
WHERE eve_PendingAlertEmail = 1 AND eve_MAC IN
|
||||
(
|
||||
SELECT devMac FROM Devices
|
||||
@@ -247,12 +258,6 @@ def skip_repeated_notifications (db):
|
||||
devSkipRepeated * 60) >
|
||||
(strftime('%s','now','localtime')/60 )
|
||||
)
|
||||
""" )
|
||||
|
||||
""")
|
||||
|
||||
db.commitDB()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,14 +1,9 @@
|
||||
import sys
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Device object handling (WIP)
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
class DeviceInstance:
|
||||
def __init__(self, db):
|
||||
self.db = db
|
||||
@@ -19,7 +14,7 @@ class DeviceInstance:
|
||||
SELECT * FROM Devices
|
||||
""")
|
||||
return self.db.sql.fetchall()
|
||||
|
||||
|
||||
# Get all with unknown names
|
||||
def getUnknown(self):
|
||||
self.db.sql.execute("""
|
||||
@@ -29,7 +24,6 @@ class DeviceInstance:
|
||||
|
||||
# Get specific column value based on devMac
|
||||
def getValueWithMac(self, column_name, devMac):
|
||||
|
||||
query = f"SELECT {column_name} FROM Devices WHERE devMac = ?"
|
||||
self.db.sql.execute(query, (devMac,))
|
||||
result = self.db.sql.fetchone()
|
||||
@@ -41,7 +35,7 @@ class DeviceInstance:
|
||||
SELECT * FROM Devices WHERE devAlertDown = 1 and devPresentLastScan = 0
|
||||
""")
|
||||
return self.db.sql.fetchall()
|
||||
|
||||
|
||||
# Get all down
|
||||
def getOffline(self):
|
||||
self.db.sql.execute("""
|
||||
@@ -57,7 +51,9 @@ class DeviceInstance:
|
||||
|
||||
# Check if a device exists by devGUID
|
||||
def exists(self, devGUID):
|
||||
self.db.sql.execute("SELECT COUNT(*) AS count FROM Devices WHERE devGUID = ?", (devGUID,))
|
||||
self.db.sql.execute(
|
||||
"SELECT COUNT(*) AS count FROM Devices WHERE devGUID = ?", (devGUID,)
|
||||
)
|
||||
result = self.db.sql.fetchone()
|
||||
return result["count"] > 0
|
||||
|
||||
@@ -65,20 +61,23 @@ class DeviceInstance:
|
||||
def updateField(self, devGUID, field, value):
|
||||
if not self.exists(devGUID):
|
||||
m = f"[Device] In 'updateField': GUID {devGUID} not found."
|
||||
mylog('none', m)
|
||||
mylog("none", m)
|
||||
raise ValueError(m)
|
||||
|
||||
self.db.sql.execute(f"""
|
||||
self.db.sql.execute(
|
||||
f"""
|
||||
UPDATE Devices SET {field} = ? WHERE devGUID = ?
|
||||
""", (value, devGUID))
|
||||
""",
|
||||
(value, devGUID),
|
||||
)
|
||||
self.db.commitDB()
|
||||
|
||||
# Delete a device by devGUID
|
||||
def delete(self, devGUID):
|
||||
if not self.exists(devGUID):
|
||||
m = f"[Device] In 'delete': GUID {devGUID} not found."
|
||||
mylog('none', m)
|
||||
mylog("none", m)
|
||||
raise ValueError(m)
|
||||
|
||||
self.db.sql.execute("DELETE FROM Devices WHERE devGUID = ?", (devGUID,))
|
||||
self.db.commitDB()
|
||||
self.db.commitDB()
|
||||
|
||||
@@ -1,25 +1,22 @@
|
||||
import json
|
||||
import sys
|
||||
import uuid
|
||||
import socket
|
||||
import subprocess
|
||||
from yattag import indent
|
||||
from json2table import convert
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = "/app"
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
# Register NetAlertX modules
|
||||
import conf
|
||||
from const import applicationPath, logPath, apiPath, reportTemplatesPath
|
||||
from logger import mylog, Logger
|
||||
from helper import generate_mac_links, \
|
||||
removeDuplicateNewLines, \
|
||||
timeNowTZ, \
|
||||
write_file, \
|
||||
get_setting_value, \
|
||||
get_timezone_offset
|
||||
from helper import (
|
||||
generate_mac_links,
|
||||
removeDuplicateNewLines,
|
||||
timeNowTZ,
|
||||
write_file,
|
||||
get_setting_value,
|
||||
get_timezone_offset,
|
||||
)
|
||||
from messaging.in_app import write_notification
|
||||
|
||||
|
||||
@@ -47,38 +44,42 @@ class NotificationInstance:
|
||||
""")
|
||||
|
||||
# Make sure log level is initialized correctly
|
||||
Logger(get_setting_value('LOG_LEVEL'))
|
||||
Logger(get_setting_value("LOG_LEVEL"))
|
||||
|
||||
self.save()
|
||||
|
||||
# Method to override processing of notifications
|
||||
def on_before_create(self, JSON, Extra):
|
||||
|
||||
return JSON, Extra
|
||||
|
||||
# Create a new DB entry if new notifications available, otherwise skip
|
||||
def create(self, JSON, Extra=""):
|
||||
|
||||
JSON, Extra = self.on_before_create(JSON, Extra)
|
||||
|
||||
# Write output data for debug
|
||||
write_file(logPath + '/report_output.json', json.dumps(JSON))
|
||||
write_file(logPath + "/report_output.json", json.dumps(JSON))
|
||||
|
||||
# Check if nothing to report, end
|
||||
if JSON["new_devices"] == [] and JSON["down_devices"] == [] and JSON["events"] == [] and JSON["plugins"] == [] and JSON["down_reconnected"] == []:
|
||||
if (
|
||||
JSON["new_devices"] == []
|
||||
and JSON["down_devices"] == []
|
||||
and JSON["events"] == []
|
||||
and JSON["plugins"] == []
|
||||
and JSON["down_reconnected"] == []
|
||||
):
|
||||
self.HasNotifications = False
|
||||
else:
|
||||
self.HasNotifications = True
|
||||
|
||||
self.GUID = str(uuid.uuid4())
|
||||
self.DateTimeCreated = timeNowTZ()
|
||||
self.DateTimePushed = ""
|
||||
self.Status = "new"
|
||||
self.JSON = JSON
|
||||
self.Text = ""
|
||||
self.HTML = ""
|
||||
self.PublishedVia = ""
|
||||
self.Extra = Extra
|
||||
self.GUID = str(uuid.uuid4())
|
||||
self.DateTimeCreated = timeNowTZ()
|
||||
self.DateTimePushed = ""
|
||||
self.Status = "new"
|
||||
self.JSON = JSON
|
||||
self.Text = ""
|
||||
self.HTML = ""
|
||||
self.PublishedVia = ""
|
||||
self.Extra = Extra
|
||||
|
||||
if self.HasNotifications:
|
||||
# if not notiStruc.json['data'] and not notiStruc.text and not notiStruc.html:
|
||||
@@ -88,136 +89,130 @@ class NotificationInstance:
|
||||
|
||||
Text = ""
|
||||
HTML = ""
|
||||
template_file_path = reportTemplatesPath + 'report_template.html'
|
||||
template_file_path = reportTemplatesPath + "report_template.html"
|
||||
|
||||
# Open text Template
|
||||
mylog('verbose', ['[Notification] Open text Template'])
|
||||
template_file = open(reportTemplatesPath + 'report_template.txt', 'r')
|
||||
mylog("verbose", ["[Notification] Open text Template"])
|
||||
template_file = open(reportTemplatesPath + "report_template.txt", "r")
|
||||
mail_text = template_file.read()
|
||||
template_file.close()
|
||||
|
||||
# Open html Template
|
||||
mylog('verbose', ['[Notification] Open html Template'])
|
||||
mylog("verbose", ["[Notification] Open html Template"])
|
||||
|
||||
template_file = open(template_file_path, 'r')
|
||||
template_file = open(template_file_path, "r")
|
||||
mail_html = template_file.read()
|
||||
template_file.close()
|
||||
|
||||
# prepare new version text
|
||||
newVersionText = ''
|
||||
newVersionText = ""
|
||||
if conf.newVersionAvailable:
|
||||
newVersionText = '🚀A new version is available.'
|
||||
newVersionText = "🚀A new version is available."
|
||||
|
||||
mail_text = mail_text.replace('<NEW_VERSION>', newVersionText)
|
||||
mail_html = mail_html.replace('<NEW_VERSION>', newVersionText)
|
||||
mail_text = mail_text.replace("<NEW_VERSION>", newVersionText)
|
||||
mail_html = mail_html.replace("<NEW_VERSION>", newVersionText)
|
||||
|
||||
# Report "REPORT_DATE" in Header & footer
|
||||
timeFormated = timeNowTZ().strftime('%Y-%m-%d %H:%M')
|
||||
mail_text = mail_text.replace('<REPORT_DATE>', timeFormated)
|
||||
mail_html = mail_html.replace('<REPORT_DATE>', timeFormated)
|
||||
timeFormated = timeNowTZ().strftime("%Y-%m-%d %H:%M")
|
||||
mail_text = mail_text.replace("<REPORT_DATE>", timeFormated)
|
||||
mail_html = mail_html.replace("<REPORT_DATE>", timeFormated)
|
||||
|
||||
# Report "SERVER_NAME" in Header & footer
|
||||
mail_text = mail_text.replace('<SERVER_NAME>', socket.gethostname())
|
||||
mail_html = mail_html.replace('<SERVER_NAME>', socket.gethostname())
|
||||
mail_text = mail_text.replace("<SERVER_NAME>", socket.gethostname())
|
||||
mail_html = mail_html.replace("<SERVER_NAME>", socket.gethostname())
|
||||
|
||||
# Report "VERSION" in Header & footer
|
||||
try:
|
||||
VERSIONFILE = subprocess.check_output(
|
||||
['php', applicationPath + '/front/php/templates/version.php'],
|
||||
timeout=5
|
||||
).decode('utf-8')
|
||||
["php", applicationPath + "/front/php/templates/version.php"],
|
||||
timeout=5,
|
||||
).decode("utf-8")
|
||||
except Exception as e:
|
||||
mylog('debug', [f'[Notification] Unable to read version.php: {e}'])
|
||||
VERSIONFILE = 'unknown'
|
||||
mylog("debug", [f"[Notification] Unable to read version.php: {e}"])
|
||||
VERSIONFILE = "unknown"
|
||||
|
||||
mail_text = mail_text.replace('<BUILD_VERSION>', VERSIONFILE)
|
||||
mail_html = mail_html.replace('<BUILD_VERSION>', VERSIONFILE)
|
||||
mail_text = mail_text.replace("<BUILD_VERSION>", VERSIONFILE)
|
||||
mail_html = mail_html.replace("<BUILD_VERSION>", VERSIONFILE)
|
||||
|
||||
# Report "BUILD" in Header & footer
|
||||
try:
|
||||
BUILDFILE = subprocess.check_output(
|
||||
['php', applicationPath + '/front/php/templates/build.php'],
|
||||
timeout=5
|
||||
).decode('utf-8')
|
||||
["php", applicationPath + "/front/php/templates/build.php"],
|
||||
timeout=5,
|
||||
).decode("utf-8")
|
||||
except Exception as e:
|
||||
mylog('debug', [f'[Notification] Unable to read build.php: {e}'])
|
||||
BUILDFILE = 'unknown'
|
||||
mylog("debug", [f"[Notification] Unable to read build.php: {e}"])
|
||||
BUILDFILE = "unknown"
|
||||
|
||||
mail_text = mail_text.replace('<BUILD_DATE>', BUILDFILE)
|
||||
mail_html = mail_html.replace('<BUILD_DATE>', BUILDFILE)
|
||||
mail_text = mail_text.replace("<BUILD_DATE>", BUILDFILE)
|
||||
mail_html = mail_html.replace("<BUILD_DATE>", BUILDFILE)
|
||||
|
||||
# Start generating the TEXT & HTML notification messages
|
||||
# new_devices
|
||||
# ---
|
||||
html, text = construct_notifications(self.JSON, "new_devices")
|
||||
|
||||
mail_text = mail_text.replace('<NEW_DEVICES_TABLE>', text + '\n')
|
||||
mail_html = mail_html.replace('<NEW_DEVICES_TABLE>', html)
|
||||
mylog('verbose', ['[Notification] New Devices sections done.'])
|
||||
mail_text = mail_text.replace("<NEW_DEVICES_TABLE>", text + "\n")
|
||||
mail_html = mail_html.replace("<NEW_DEVICES_TABLE>", html)
|
||||
mylog("verbose", ["[Notification] New Devices sections done."])
|
||||
|
||||
# down_devices
|
||||
# ---
|
||||
html, text = construct_notifications(self.JSON, "down_devices")
|
||||
|
||||
|
||||
mail_text = mail_text.replace('<DOWN_DEVICES_TABLE>', text + '\n')
|
||||
mail_html = mail_html.replace('<DOWN_DEVICES_TABLE>', html)
|
||||
mylog('verbose', ['[Notification] Down Devices sections done.'])
|
||||
mail_text = mail_text.replace("<DOWN_DEVICES_TABLE>", text + "\n")
|
||||
mail_html = mail_html.replace("<DOWN_DEVICES_TABLE>", html)
|
||||
mylog("verbose", ["[Notification] Down Devices sections done."])
|
||||
|
||||
# down_reconnected
|
||||
# ---
|
||||
html, text = construct_notifications(self.JSON, "down_reconnected")
|
||||
|
||||
|
||||
mail_text = mail_text.replace('<DOWN_RECONNECTED_TABLE>', text + '\n')
|
||||
mail_html = mail_html.replace('<DOWN_RECONNECTED_TABLE>', html)
|
||||
mylog('verbose', ['[Notification] Reconnected Down Devices sections done.'])
|
||||
|
||||
mail_text = mail_text.replace("<DOWN_RECONNECTED_TABLE>", text + "\n")
|
||||
mail_html = mail_html.replace("<DOWN_RECONNECTED_TABLE>", html)
|
||||
mylog("verbose", ["[Notification] Reconnected Down Devices sections done."])
|
||||
|
||||
# events
|
||||
# ---
|
||||
html, text = construct_notifications(self.JSON, "events")
|
||||
|
||||
|
||||
mail_text = mail_text.replace('<EVENTS_TABLE>', text + '\n')
|
||||
mail_html = mail_html.replace('<EVENTS_TABLE>', html)
|
||||
mylog('verbose', ['[Notification] Events sections done.'])
|
||||
|
||||
mail_text = mail_text.replace("<EVENTS_TABLE>", text + "\n")
|
||||
mail_html = mail_html.replace("<EVENTS_TABLE>", html)
|
||||
mylog("verbose", ["[Notification] Events sections done."])
|
||||
|
||||
# plugins
|
||||
# ---
|
||||
html, text = construct_notifications(self.JSON, "plugins")
|
||||
|
||||
mail_text = mail_text.replace('<PLUGINS_TABLE>', text + '\n')
|
||||
mail_html = mail_html.replace('<PLUGINS_TABLE>', html)
|
||||
mail_text = mail_text.replace("<PLUGINS_TABLE>", text + "\n")
|
||||
mail_html = mail_html.replace("<PLUGINS_TABLE>", html)
|
||||
|
||||
mylog('verbose', ['[Notification] Plugins sections done.'])
|
||||
mylog("verbose", ["[Notification] Plugins sections done."])
|
||||
|
||||
final_text = removeDuplicateNewLines(mail_text)
|
||||
|
||||
# Create clickable MAC links
|
||||
mail_html = generate_mac_links(mail_html, conf.REPORT_DASHBOARD_URL + '/deviceDetails.php?mac=')
|
||||
mail_html = generate_mac_links(
|
||||
mail_html, conf.REPORT_DASHBOARD_URL + "/deviceDetails.php?mac="
|
||||
)
|
||||
|
||||
final_html = indent(
|
||||
mail_html,
|
||||
indentation=' ',
|
||||
newline='\r\n',
|
||||
indent_text=True
|
||||
mail_html, indentation=" ", newline="\r\n", indent_text=True
|
||||
)
|
||||
|
||||
send_api(self.JSON, final_text, final_html)
|
||||
|
||||
# Write output data for debug
|
||||
write_file(logPath + '/report_output.txt', final_text)
|
||||
write_file(logPath + '/report_output.html', final_html)
|
||||
write_file(logPath + "/report_output.txt", final_text)
|
||||
write_file(logPath + "/report_output.html", final_html)
|
||||
|
||||
mylog('minimal', ['[Notification] Udating API files'])
|
||||
mylog("minimal", ["[Notification] Udating API files"])
|
||||
|
||||
self.Text = final_text
|
||||
self.HTML = final_html
|
||||
self.Text = final_text
|
||||
self.HTML = final_html
|
||||
|
||||
# Notify frontend
|
||||
write_notification(f'Report:{self.GUID}', "alert", self.DateTimeCreated)
|
||||
write_notification(f"Report:{self.GUID}", "alert", self.DateTimeCreated)
|
||||
|
||||
self.upsert()
|
||||
|
||||
@@ -236,20 +231,36 @@ class NotificationInstance:
|
||||
|
||||
# create or update a notification
|
||||
def upsert(self):
|
||||
self.db.sql.execute("""
|
||||
self.db.sql.execute(
|
||||
"""
|
||||
INSERT OR REPLACE INTO Notifications (GUID, DateTimeCreated, DateTimePushed, Status, JSON, Text, HTML, PublishedVia, Extra)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", (self.GUID, self.DateTimeCreated, self.DateTimePushed, self.Status, json.dumps(self.JSON), self.Text, self.HTML, self.PublishedVia, self.Extra))
|
||||
""",
|
||||
(
|
||||
self.GUID,
|
||||
self.DateTimeCreated,
|
||||
self.DateTimePushed,
|
||||
self.Status,
|
||||
json.dumps(self.JSON),
|
||||
self.Text,
|
||||
self.HTML,
|
||||
self.PublishedVia,
|
||||
self.Extra,
|
||||
),
|
||||
)
|
||||
|
||||
self.save()
|
||||
|
||||
# Remove notification object by GUID
|
||||
def remove(self, GUID):
|
||||
# Execute an SQL query to delete the notification with the specified GUID
|
||||
self.db.sql.execute("""
|
||||
self.db.sql.execute(
|
||||
"""
|
||||
DELETE FROM Notifications
|
||||
WHERE GUID = ?
|
||||
""", (GUID,))
|
||||
""",
|
||||
(GUID,),
|
||||
)
|
||||
self.save()
|
||||
|
||||
# Get all with the "new" status
|
||||
@@ -262,7 +273,6 @@ class NotificationInstance:
|
||||
|
||||
# Set all to "processed" status
|
||||
def setAllProcessed(self):
|
||||
|
||||
# Execute an SQL query to update the status of all notifications
|
||||
self.db.sql.execute("""
|
||||
UPDATE Notifications
|
||||
@@ -274,15 +284,17 @@ class NotificationInstance:
|
||||
|
||||
# Clear the Pending Email flag from all events and devices
|
||||
def clearPendingEmailFlag(self):
|
||||
|
||||
# Clean Pending Alert Events
|
||||
self.db.sql.execute("""
|
||||
self.db.sql.execute(
|
||||
"""
|
||||
UPDATE Devices SET devLastNotification = ?
|
||||
WHERE devMac IN (
|
||||
SELECT eve_MAC FROM Events
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
)
|
||||
""", (timeNowTZ(),))
|
||||
""",
|
||||
(timeNowTZ(),),
|
||||
)
|
||||
|
||||
self.db.sql.execute("""
|
||||
UPDATE Events SET eve_PendingAlertEmail = 0
|
||||
@@ -290,23 +302,26 @@ class NotificationInstance:
|
||||
AND eve_EventType !='Device Down' """)
|
||||
|
||||
# Clear down events flag after the reporting window passed
|
||||
minutes = int(get_setting_value('NTFPRCS_alert_down_time') or 0)
|
||||
minutes = int(get_setting_value("NTFPRCS_alert_down_time") or 0)
|
||||
tz_offset = get_timezone_offset()
|
||||
self.db.sql.execute("""
|
||||
self.db.sql.execute(
|
||||
"""
|
||||
UPDATE Events
|
||||
SET eve_PendingAlertEmail = 0
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType = 'Device Down'
|
||||
AND eve_DateTime < datetime('now', ?, ?)
|
||||
""", (f"-{minutes} minutes", tz_offset))
|
||||
""",
|
||||
(f"-{minutes} minutes", tz_offset),
|
||||
)
|
||||
|
||||
mylog('minimal', ['[Notification] Notifications changes: ',
|
||||
self.db.sql.rowcount])
|
||||
mylog(
|
||||
"minimal", ["[Notification] Notifications changes: ", self.db.sql.rowcount]
|
||||
)
|
||||
|
||||
# clear plugin events
|
||||
self.clearPluginEvents()
|
||||
|
||||
|
||||
def clearPluginEvents(self):
|
||||
# clear plugin events table
|
||||
self.db.sql.execute("DELETE FROM Plugins_Events")
|
||||
@@ -321,20 +336,20 @@ class NotificationInstance:
|
||||
# Reporting
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
def construct_notifications(JSON, section):
|
||||
|
||||
jsn = JSON[section]
|
||||
|
||||
# Return if empty
|
||||
if jsn == []:
|
||||
return '', ''
|
||||
return "", ""
|
||||
|
||||
tableTitle = JSON[section + "_meta"]["title"]
|
||||
headers = JSON[section + "_meta"]["columnNames"]
|
||||
tableTitle = JSON[section + "_meta"]["title"]
|
||||
headers = JSON[section + "_meta"]["columnNames"]
|
||||
|
||||
html = ''
|
||||
text = ''
|
||||
html = ""
|
||||
text = ""
|
||||
|
||||
table_attributes = {
|
||||
"style": "border-collapse: collapse; font-size: 12px; color:#70707",
|
||||
@@ -342,28 +357,32 @@ def construct_notifications(JSON, section):
|
||||
"cellspacing": 0,
|
||||
"cellpadding": "3px",
|
||||
"bordercolor": "#C0C0C0",
|
||||
"border": "1"
|
||||
}
|
||||
headerProps = "width='120px' style='color:white; font-size: 16px;' bgcolor='#64a0d6' "
|
||||
"border": "1",
|
||||
}
|
||||
headerProps = (
|
||||
"width='120px' style='color:white; font-size: 16px;' bgcolor='#64a0d6' "
|
||||
)
|
||||
thProps = "width='120px' style='color:#F0F0F0' bgcolor='#64a0d6' "
|
||||
|
||||
build_direction = "TOP_TO_BOTTOM"
|
||||
text_line = '{}\t{}\n'
|
||||
text_line = "{}\t{}\n"
|
||||
|
||||
if len(jsn) > 0:
|
||||
text = tableTitle + "\n---------\n"
|
||||
|
||||
# Convert a JSON into an HTML table
|
||||
html = convert({"data": jsn}, build_direction=build_direction, table_attributes=table_attributes)
|
||||
html = convert(
|
||||
{"data": jsn},
|
||||
build_direction=build_direction,
|
||||
table_attributes=table_attributes,
|
||||
)
|
||||
|
||||
# Cleanup the generated HTML table notification
|
||||
html = format_table(html,
|
||||
"data",
|
||||
headerProps,
|
||||
tableTitle).replace('<ul>',
|
||||
'<ul style="list-style:none;padding-left:0">'
|
||||
).replace("<td>null</td>",
|
||||
"<td></td>")
|
||||
html = (
|
||||
format_table(html, "data", headerProps, tableTitle)
|
||||
.replace("<ul>", '<ul style="list-style:none;padding-left:0">')
|
||||
.replace("<td>null</td>", "<td></td>")
|
||||
)
|
||||
|
||||
# prepare text-only message
|
||||
for device in jsn:
|
||||
@@ -371,8 +390,8 @@ def construct_notifications(JSON, section):
|
||||
padding = ""
|
||||
if len(header) < 4:
|
||||
padding = "\t"
|
||||
text += text_line.format(header + ': ' + padding, device[header])
|
||||
text += '\n'
|
||||
text += text_line.format(header + ": " + padding, device[header])
|
||||
text += "\n"
|
||||
|
||||
# Format HTML table headers
|
||||
for header in headers:
|
||||
@@ -383,18 +402,19 @@ def construct_notifications(JSON, section):
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
def send_api(json_final, mail_text, mail_html):
|
||||
mylog('verbose', ['[Send API] Updating notification_* files in ', apiPath])
|
||||
mylog("verbose", ["[Send API] Updating notification_* files in ", apiPath])
|
||||
|
||||
write_file(apiPath + 'notification_text.txt', mail_text)
|
||||
write_file(apiPath + 'notification_text.html', mail_html)
|
||||
write_file(apiPath + 'notification_json_final.json', json.dumps(json_final))
|
||||
write_file(apiPath + "notification_text.txt", mail_text)
|
||||
write_file(apiPath + "notification_text.html", mail_html)
|
||||
write_file(apiPath + "notification_json_final.json", json.dumps(json_final))
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Replacing table headers
|
||||
def format_table(html, thValue, props, newThValue=''):
|
||||
|
||||
if newThValue == '':
|
||||
def format_table(html, thValue, props, newThValue=""):
|
||||
if newThValue == "":
|
||||
newThValue = thValue
|
||||
|
||||
return html.replace("<th>"+thValue+"</th>", "<th "+props+" >"+newThValue+"</th>")
|
||||
return html.replace(
|
||||
"<th>" + thValue + "</th>", "<th " + props + " >" + newThValue + "</th>"
|
||||
)
|
||||
|
||||
@@ -1,14 +1,9 @@
|
||||
import sys
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Plugin object handling (WIP)
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
class PluginObjectInstance:
|
||||
def __init__(self, db):
|
||||
self.db = db
|
||||
@@ -19,16 +14,21 @@ class PluginObjectInstance:
|
||||
SELECT * FROM Plugins_Objects
|
||||
""")
|
||||
return self.db.sql.fetchall()
|
||||
|
||||
|
||||
# Get plugin object by ObjectGUID
|
||||
def getByGUID(self, ObjectGUID):
|
||||
self.db.sql.execute("SELECT * FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,))
|
||||
self.db.sql.execute(
|
||||
"SELECT * FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,)
|
||||
)
|
||||
result = self.db.sql.fetchone()
|
||||
return dict(result) if result else None
|
||||
|
||||
# Check if a plugin object exists by ObjectGUID
|
||||
def exists(self, ObjectGUID):
|
||||
self.db.sql.execute("SELECT COUNT(*) AS count FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,))
|
||||
self.db.sql.execute(
|
||||
"SELECT COUNT(*) AS count FROM Plugins_Objects WHERE ObjectGUID = ?",
|
||||
(ObjectGUID,),
|
||||
)
|
||||
result = self.db.sql.fetchone()
|
||||
return result["count"] > 0
|
||||
|
||||
@@ -36,30 +36,35 @@ class PluginObjectInstance:
|
||||
def getByPlugin(self, plugin):
|
||||
self.db.sql.execute("SELECT * FROM Plugins_Objects WHERE Plugin = ?", (plugin,))
|
||||
return self.db.sql.fetchall()
|
||||
|
||||
|
||||
# Get objects by status
|
||||
def getByStatus(self, status):
|
||||
self.db.sql.execute("SELECT * FROM Plugins_Objects WHERE Status = ?", (status,))
|
||||
return self.db.sql.fetchall()
|
||||
|
||||
|
||||
# Update a specific field for a plugin object
|
||||
def updateField(self, ObjectGUID, field, value):
|
||||
if not self.exists(ObjectGUID):
|
||||
m = f"[PluginObject] In 'updateField': GUID {ObjectGUID} not found."
|
||||
mylog('none', m)
|
||||
mylog("none", m)
|
||||
raise ValueError(m)
|
||||
|
||||
self.db.sql.execute(f"""
|
||||
self.db.sql.execute(
|
||||
f"""
|
||||
UPDATE Plugins_Objects SET {field} = ? WHERE ObjectGUID = ?
|
||||
""", (value, ObjectGUID))
|
||||
""",
|
||||
(value, ObjectGUID),
|
||||
)
|
||||
self.db.commitDB()
|
||||
|
||||
|
||||
# Delete a plugin object by ObjectGUID
|
||||
def delete(self, ObjectGUID):
|
||||
if not self.exists(ObjectGUID):
|
||||
m = f"[PluginObject] In 'delete': GUID {ObjectGUID} not found."
|
||||
mylog('none', m)
|
||||
mylog("none", m)
|
||||
raise ValueError(m)
|
||||
|
||||
self.db.sql.execute("DELETE FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,))
|
||||
self.db.sql.execute(
|
||||
"DELETE FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,)
|
||||
)
|
||||
self.db.commitDB()
|
||||
|
||||
@@ -1,14 +1,9 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
# Register NetAlertX modules
|
||||
from const import pluginsPath, logPath, applicationPath, reportTemplatesPath
|
||||
from const import logPath
|
||||
from logger import mylog
|
||||
|
||||
|
||||
class UserEventsQueueInstance:
|
||||
"""
|
||||
Handles the execution queue log file, allowing reading, writing,
|
||||
@@ -19,12 +14,11 @@ class UserEventsQueueInstance:
|
||||
self.log_path = logPath
|
||||
self.log_file = os.path.join(self.log_path, "execution_queue.log")
|
||||
|
||||
|
||||
def has_update_devices(self):
|
||||
lines = self.read_log()
|
||||
|
||||
for line in lines:
|
||||
if 'update_api|devices' in line:
|
||||
if "update_api|devices" in line:
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -35,7 +29,10 @@ class UserEventsQueueInstance:
|
||||
Returns an empty list if the file doesn't exist.
|
||||
"""
|
||||
if not os.path.exists(self.log_file):
|
||||
mylog('none', ['[UserEventsQueueInstance] Log file not found: ', self.log_file])
|
||||
mylog(
|
||||
"none",
|
||||
["[UserEventsQueueInstance] Log file not found: ", self.log_file],
|
||||
)
|
||||
return [] # No log file, return empty list
|
||||
with open(self.log_file, "r") as file:
|
||||
return file.readlines()
|
||||
@@ -64,7 +61,9 @@ class UserEventsQueueInstance:
|
||||
# Process the log file line by line
|
||||
with open(self.log_file, "r") as file:
|
||||
for line in file:
|
||||
columns = line.strip().split('|')[2:4] # Extract event and param columns
|
||||
columns = line.strip().split("|")[
|
||||
2:4
|
||||
] # Extract event and param columns
|
||||
if len(columns) == 2:
|
||||
event_name, _ = columns
|
||||
if event_name == event and not removed:
|
||||
@@ -76,10 +75,6 @@ class UserEventsQueueInstance:
|
||||
# Write back the remaining lines
|
||||
self.write_log(updated_lines)
|
||||
|
||||
|
||||
mylog('minimal', ['[UserEventsQueueInstance] Processed event: ', event])
|
||||
mylog("minimal", ["[UserEventsQueueInstance] Processed event: ", event])
|
||||
|
||||
return removed
|
||||
|
||||
|
||||
|
||||
|
||||
1063
server/plugin.py
1063
server/plugin.py
File diff suppressed because it is too large
Load Diff
@@ -1,16 +1,20 @@
|
||||
import os
|
||||
import json
|
||||
|
||||
import conf
|
||||
import conf
|
||||
from logger import mylog
|
||||
from const import pluginsPath, logPath, apiPath
|
||||
from helper import timeNowTZ, get_file_content, write_file, get_setting, get_setting_value, setting_value_to_python_type
|
||||
from app_state import updateState
|
||||
from crypto_utils import decrypt_data, generate_deterministic_guid
|
||||
from const import pluginsPath, apiPath
|
||||
from helper import (
|
||||
get_file_content,
|
||||
get_setting_value,
|
||||
setting_value_to_python_type,
|
||||
)
|
||||
from crypto_utils import decrypt_data
|
||||
|
||||
module_name = 'Plugin utils'
|
||||
module_name = "Plugin utils"
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def logEventStatusCounts(objName, pluginEvents):
|
||||
status_counts = {} # Dictionary to store counts for each status
|
||||
|
||||
@@ -22,91 +26,96 @@ def logEventStatusCounts(objName, pluginEvents):
|
||||
status_counts[status] = 1
|
||||
|
||||
for status, count in status_counts.items():
|
||||
mylog('debug', [f'[{module_name}] In {objName} there are {count} events with the status "{status}" '])
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
f'[{module_name}] In {objName} there are {count} events with the status "{status}" '
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def print_plugin_info(plugin, elements = ['display_name']):
|
||||
|
||||
mylog('verbose', [f'[{module_name}] ---------------------------------------------'])
|
||||
# -------------------------------------------------------------------------------
|
||||
def print_plugin_info(plugin, elements=["display_name"]):
|
||||
mylog("verbose", [f"[{module_name}] ---------------------------------------------"])
|
||||
|
||||
for el in elements:
|
||||
res = get_plugin_string(plugin, el)
|
||||
mylog('verbose', [f'[{module_name}] ', el ,': ', res])
|
||||
mylog("verbose", [f"[{module_name}] ", el, ": ", res])
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# Gets the whole setting object
|
||||
def get_plugin_setting_obj(plugin, function_key):
|
||||
|
||||
result = None
|
||||
|
||||
for set in plugin['settings']:
|
||||
for set in plugin["settings"]:
|
||||
if set["function"] == function_key:
|
||||
result = set
|
||||
|
||||
result = set
|
||||
|
||||
# if result == None:
|
||||
# mylog('debug', [f'[{module_name}] Setting with "function":"', function_key, '" is missing in plugin: ', get_plugin_string(plugin, 'display_name')])
|
||||
|
||||
return result
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Gets the setting value for a plugin from the default JSON
|
||||
def get_plugin_setting_value(plugin, function_key):
|
||||
|
||||
result = None
|
||||
|
||||
for set in plugin['settings']:
|
||||
for set in plugin["settings"]:
|
||||
if set["function"] == function_key:
|
||||
result = set
|
||||
|
||||
result = set
|
||||
|
||||
# if result == None:
|
||||
# mylog('debug', [f'[{module_name}] Setting with "function":"', function_key, '" is missing in plugin: ', get_plugin_string(plugin, 'display_name')])
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# Get localized string value on the top JSON depth, not recursive
|
||||
def get_plugin_string(props, el):
|
||||
result = ""
|
||||
|
||||
result = ''
|
||||
|
||||
if el in props['localized']:
|
||||
if el in props["localized"]:
|
||||
for val in props[el]:
|
||||
if val['language_code'] == 'en_us':
|
||||
result = val['string']
|
||||
|
||||
if result == '':
|
||||
result = 'en_us string missing'
|
||||
if val["language_code"] == "en_us":
|
||||
result = val["string"]
|
||||
|
||||
if result == "":
|
||||
result = "en_us string missing"
|
||||
|
||||
else:
|
||||
result = props[el]
|
||||
|
||||
|
||||
return result
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# generates a comma separated list of values from a list (or a string representing a list)
|
||||
def list_to_csv(arr):
|
||||
tmp = ''
|
||||
arrayItemStr = ''
|
||||
tmp = ""
|
||||
arrayItemStr = ""
|
||||
|
||||
mylog('debug', f'[{module_name}] Flattening the below array')
|
||||
mylog('debug', arr)
|
||||
mylog('debug', f'[{module_name}] isinstance(arr, list) : {isinstance(arr, list)} | isinstance(arr, str) : {isinstance(arr, str)}')
|
||||
mylog("debug", f"[{module_name}] Flattening the below array")
|
||||
mylog("debug", arr)
|
||||
mylog(
|
||||
"debug",
|
||||
f"[{module_name}] isinstance(arr, list) : {isinstance(arr, list)} | isinstance(arr, str) : {isinstance(arr, str)}",
|
||||
)
|
||||
|
||||
if isinstance(arr, str):
|
||||
tmpStr = arr.replace('[','').replace(']','').replace("'", '') # removing brackets and single quotes (not allowed)
|
||||
|
||||
if ',' in tmpStr:
|
||||
tmpStr = (
|
||||
arr.replace("[", "").replace("]", "").replace("'", "")
|
||||
) # removing brackets and single quotes (not allowed)
|
||||
|
||||
if "," in tmpStr:
|
||||
# Split the string into a list and trim whitespace
|
||||
cleanedStr = [tmpSubStr.strip() for tmpSubStr in tmpStr.split(',')]
|
||||
cleanedStr = [tmpSubStr.strip() for tmpSubStr in tmpStr.split(",")]
|
||||
|
||||
# Join the list elements using a comma
|
||||
result_string = ",".join(cleanedStr)
|
||||
result_string = ",".join(cleanedStr)
|
||||
else:
|
||||
result_string = tmpStr
|
||||
|
||||
@@ -115,83 +124,82 @@ def list_to_csv(arr):
|
||||
elif isinstance(arr, list):
|
||||
for arrayItem in arr:
|
||||
# only one column flattening is supported
|
||||
if isinstance(arrayItem, list):
|
||||
arrayItemStr = str(arrayItem[0]).replace("'", '').strip() # removing single quotes - not allowed
|
||||
if isinstance(arrayItem, list):
|
||||
arrayItemStr = (
|
||||
str(arrayItem[0]).replace("'", "").strip()
|
||||
) # removing single quotes - not allowed
|
||||
else:
|
||||
# is string already
|
||||
arrayItemStr = arrayItem
|
||||
|
||||
|
||||
tmp += f'{arrayItemStr},'
|
||||
tmp += f"{arrayItemStr},"
|
||||
|
||||
tmp = tmp[:-1] # Remove last comma ','
|
||||
|
||||
mylog('debug', f'[{module_name}] Flattened array: {tmp}')
|
||||
mylog("debug", f"[{module_name}] Flattened array: {tmp}")
|
||||
|
||||
return tmp
|
||||
|
||||
else:
|
||||
mylog('none', f'[{module_name}] ⚠ ERROR Could not convert array: {arr}')
|
||||
mylog("none", f"[{module_name}] ⚠ ERROR Could not convert array: {arr}")
|
||||
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# Combine plugin objects, keep user-defined values, created time, changed time if nothing changed and the index
|
||||
def combine_plugin_objects(old, new):
|
||||
|
||||
new.userData = old.userData
|
||||
new.index = old.index
|
||||
new.created = old.created
|
||||
def combine_plugin_objects(old, new):
|
||||
new.userData = old.userData
|
||||
new.index = old.index
|
||||
new.created = old.created
|
||||
|
||||
# Keep changed time if nothing changed
|
||||
if new.status in ['watched-not-changed']:
|
||||
if new.status in ["watched-not-changed"]:
|
||||
new.changed = old.changed
|
||||
|
||||
# return the new object, with some of the old values
|
||||
return new
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# Replace {wildcars} with parameters
|
||||
def resolve_wildcards_arr(commandArr, params):
|
||||
|
||||
mylog('debug', [f'[{module_name}] Pre-Resolved CMD: '] + commandArr)
|
||||
mylog("debug", [f"[{module_name}] Pre-Resolved CMD: "] + commandArr)
|
||||
|
||||
for param in params:
|
||||
# mylog('debug', ['[Plugins] key : {', param[0], '}'])
|
||||
# mylog('debug', ['[Plugins] resolved: ', param[1]])
|
||||
|
||||
i = 0
|
||||
|
||||
for comPart in commandArr:
|
||||
|
||||
commandArr[i] = comPart.replace('{' + str(param[0]) + '}', str(param[1])).replace('{s-quote}',"'")
|
||||
for comPart in commandArr:
|
||||
commandArr[i] = comPart.replace(
|
||||
"{" + str(param[0]) + "}", str(param[1])
|
||||
).replace("{s-quote}", "'")
|
||||
|
||||
i += 1
|
||||
|
||||
return commandArr
|
||||
return commandArr
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Function to extract layer number from "execution_order"
|
||||
def get_layer(plugin):
|
||||
order = plugin.get("execution_order", "Layer_N")
|
||||
if order == "Layer_N":
|
||||
return float('inf') # Treat as the last layer if "execution_order" is missing
|
||||
return int(order.split('_')[1])
|
||||
return float("inf") # Treat as the last layer if "execution_order" is missing
|
||||
return int(order.split("_")[1])
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def get_plugins_configs(loadAll):
|
||||
pluginsList = [] # Create an empty list to store plugin configurations
|
||||
pluginsListSorted = [] # Sorted by "execution_order" : "Layer_0" first, Layer_N last
|
||||
|
||||
|
||||
# Get a list of top-level directories in the specified pluginsPath
|
||||
dirs = next(os.walk(pluginsPath))[1]
|
||||
|
||||
|
||||
# Sort the directories list if needed
|
||||
dirs.sort() # This will sort the directories alphabetically
|
||||
|
||||
|
||||
# Loop through each directory (plugin folder) in dirs
|
||||
for d in dirs:
|
||||
# Check if the directory name does not start with "__" to skip python cache
|
||||
@@ -209,72 +217,97 @@ def get_plugins_configs(loadAll):
|
||||
# Fetch the list of enabled plugins from the config, default to an empty list if not set
|
||||
enabledPlugins = getattr(conf, "LOADED_PLUGINS", [])
|
||||
|
||||
# Load all plugins if `loadAll` is True, the plugin is in the enabled list,
|
||||
# Load all plugins if `loadAll` is True, the plugin is in the enabled list,
|
||||
# or no specific plugins are enabled (enabledPlugins is empty)
|
||||
if loadAll or plugJson["unique_prefix"] in enabledPlugins or enabledPlugins == []:
|
||||
|
||||
if (
|
||||
loadAll
|
||||
or plugJson["unique_prefix"] in enabledPlugins
|
||||
or enabledPlugins == []
|
||||
):
|
||||
# Load the contents of the config.json file as a JSON object and append it to pluginsList
|
||||
pluginsList.append(plugJson)
|
||||
|
||||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
# Handle the case when the file is not found or JSON decoding fails
|
||||
mylog('none', [f'[{module_name}] ⚠ ERROR - JSONDecodeError or FileNotFoundError for file {config_path}'])
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f"[{module_name}] ⚠ ERROR - JSONDecodeError or FileNotFoundError for file {config_path}"
|
||||
],
|
||||
)
|
||||
except Exception as e:
|
||||
mylog('none', [f'[{module_name}] ⚠ ERROR - Exception for file {config_path}: {str(e)}'])
|
||||
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f"[{module_name}] ⚠ ERROR - Exception for file {config_path}: {str(e)}"
|
||||
],
|
||||
)
|
||||
|
||||
# Sort pluginsList based on "execution_order"
|
||||
pluginsListSorted = sorted(pluginsList, key=get_layer)
|
||||
|
||||
|
||||
return pluginsListSorted # Return the sorted list of plugin configurations
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
def custom_plugin_decoder(pluginDict):
|
||||
return namedtuple('X', pluginDict.keys())(*pluginDict.values())
|
||||
return namedtuple("X", pluginDict.keys())(*pluginDict.values())
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Handle empty value
|
||||
def handle_empty(value):
|
||||
if value == '' or value is None:
|
||||
value = 'null'
|
||||
|
||||
return value
|
||||
# -------------------------------------------------------------------------------
|
||||
# Handle empty value
|
||||
def handle_empty(value):
|
||||
if value == "" or value is None:
|
||||
value = "null"
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
return value
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Get and return a plugin object based on key-value pairs
|
||||
# keyValues example: getPluginObject({"Plugin":"MQTT", "Watched_Value4":"someValue"})
|
||||
def getPluginObject(keyValues):
|
||||
|
||||
plugins_objects = apiPath + 'table_plugins_objects.json'
|
||||
plugins_objects = apiPath + "table_plugins_objects.json"
|
||||
|
||||
try:
|
||||
with open(plugins_objects, 'r') as json_file:
|
||||
with open(plugins_objects, "r") as json_file:
|
||||
data = json.load(json_file)
|
||||
|
||||
objectEntries = data.get("data", [])
|
||||
objectEntries = data.get("data", [])
|
||||
|
||||
for item in objectEntries:
|
||||
# Initialize a flag to check if all key-value pairs match
|
||||
all_match = True
|
||||
all_match = True
|
||||
|
||||
for key, value in keyValues.items():
|
||||
if item.get(key) != value:
|
||||
all_match = False
|
||||
break # No need to continue checking if one pair doesn't match
|
||||
|
||||
|
||||
if all_match:
|
||||
return item
|
||||
|
||||
mylog('verbose', [f'[{module_name}] 💬 INFO - Object not found {json.dumps(keyValues)} '])
|
||||
mylog(
|
||||
"verbose",
|
||||
[
|
||||
f"[{module_name}] 💬 INFO - Object not found {json.dumps(keyValues)} "
|
||||
],
|
||||
)
|
||||
|
||||
return {}
|
||||
|
||||
except (FileNotFoundError, json.JSONDecodeError, ValueError) as e:
|
||||
except (FileNotFoundError, json.JSONDecodeError, ValueError):
|
||||
# Handle the case when the file is not found, JSON decoding fails, or data is not in the expected format
|
||||
mylog('verbose', [f'[{module_name}] ⚠ ERROR - JSONDecodeError or FileNotFoundError for file {plugins_objects}'])
|
||||
mylog(
|
||||
"verbose",
|
||||
[
|
||||
f"[{module_name}] ⚠ ERROR - JSONDecodeError or FileNotFoundError for file {plugins_objects}"
|
||||
],
|
||||
)
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# decode any encoded last_result files
|
||||
def decode_and_rename_files(file_dir, file_prefix):
|
||||
@@ -287,8 +320,8 @@ def decode_and_rename_files(file_dir, file_prefix):
|
||||
|
||||
# key to decrypt data if SYNC loaded and key available
|
||||
encryption_key = None
|
||||
if "SYNC" in get_setting_value('LOADED_PLUGINS'):
|
||||
encryption_key = get_setting_value('SYNC_encryption_key')
|
||||
if "SYNC" in get_setting_value("LOADED_PLUGINS"):
|
||||
encryption_key = get_setting_value("SYNC_encryption_key")
|
||||
|
||||
# Check for files starting with the specified prefix
|
||||
matching_files = [f for f in os.listdir(file_dir) if f.startswith(file_prefix)]
|
||||
@@ -296,14 +329,13 @@ def decode_and_rename_files(file_dir, file_prefix):
|
||||
for filename in matching_files:
|
||||
# Create the full file path
|
||||
file_path = os.path.join(file_dir, filename)
|
||||
|
||||
|
||||
# Check if the file exists
|
||||
if os.path.exists(file_path):
|
||||
|
||||
# Check if the file name contains "encoded"
|
||||
if '.encoded.' in filename and encryption_key:
|
||||
if ".encoded." in filename and encryption_key:
|
||||
# Decrypt the entire file
|
||||
with open(file_path, 'r+') as f:
|
||||
with open(file_path, "r+") as f:
|
||||
encrypted_data = f.read()
|
||||
decrypted_data = decrypt_data(encrypted_data, encryption_key)
|
||||
|
||||
@@ -313,7 +345,7 @@ def decode_and_rename_files(file_dir, file_prefix):
|
||||
f.truncate()
|
||||
|
||||
# Rename the file e.g. from last_result.encoded.Node_1.1.log to last_result.decoded.Node_1.1.log
|
||||
new_filename = filename.replace('.encoded.', '.decoded.')
|
||||
new_filename = filename.replace(".encoded.", ".decoded.")
|
||||
os.rename(file_path, os.path.join(file_dir, new_filename))
|
||||
|
||||
files_to_process.append(new_filename)
|
||||
@@ -321,7 +353,7 @@ def decode_and_rename_files(file_dir, file_prefix):
|
||||
else:
|
||||
files_to_process.append(filename)
|
||||
else:
|
||||
mylog('debug', [f'[Plugins] The file {file_path} does not exist'])
|
||||
mylog("debug", [f"[Plugins] The file {file_path} does not exist"])
|
||||
|
||||
return files_to_process
|
||||
|
||||
@@ -342,18 +374,18 @@ def get_set_value_for_init(plugin, c_d, setting_key):
|
||||
Any: The value for the specified setting, converted to an appropriate Python type.
|
||||
"""
|
||||
|
||||
pref = plugin["unique_prefix"]
|
||||
pref = plugin["unique_prefix"]
|
||||
|
||||
# Step 1: Initialize the setting value as an empty string
|
||||
setting_value = ''
|
||||
|
||||
setting_value = ""
|
||||
|
||||
# Step 2: Get the default setting object for the plugin's specified key
|
||||
setting_obj = get_plugin_setting_obj(plugin, setting_key)
|
||||
|
||||
if setting_obj is not None:
|
||||
# Retrieve the type and default value from the setting object
|
||||
set_type = setting_obj.get('type') # Lowercase 'type'
|
||||
set_value = setting_obj.get('default_value')
|
||||
set_type = setting_obj.get("type") # Lowercase 'type'
|
||||
set_value = setting_obj.get("default_value")
|
||||
|
||||
# Convert the value to the appropriate Python type
|
||||
setting_value = setting_value_to_python_type(set_type, set_value)
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import sys
|
||||
import subprocess
|
||||
import conf
|
||||
import os
|
||||
import re
|
||||
from dateutil import parser
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from helper import timeNowTZ, get_setting_value, check_IP_format
|
||||
@@ -18,15 +17,20 @@ from scan.device_heuristics import guess_icon, guess_type
|
||||
from db.db_helper import sanitize_SQL_input, list_to_where
|
||||
|
||||
# Make sure log level is initialized correctly
|
||||
Logger(get_setting_value('LOG_LEVEL'))
|
||||
Logger(get_setting_value("LOG_LEVEL"))
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Removing devices from the CurrentScan DB table which the user chose to ignore by MAC or IP
|
||||
def exclude_ignored_devices(db):
|
||||
sql = db.sql # Database interface for executing queries
|
||||
|
||||
mac_condition = list_to_where('OR', 'cur_MAC', 'LIKE', get_setting_value('NEWDEV_ignored_MACs'))
|
||||
ip_condition = list_to_where('OR', 'cur_IP', 'LIKE', get_setting_value('NEWDEV_ignored_IPs'))
|
||||
mac_condition = list_to_where(
|
||||
"OR", "cur_MAC", "LIKE", get_setting_value("NEWDEV_ignored_MACs")
|
||||
)
|
||||
ip_condition = list_to_where(
|
||||
"OR", "cur_IP", "LIKE", get_setting_value("NEWDEV_ignored_IPs")
|
||||
)
|
||||
|
||||
# Only delete if either the MAC or IP matches an ignored condition
|
||||
conditions = []
|
||||
@@ -47,31 +51,31 @@ def exclude_ignored_devices(db):
|
||||
else:
|
||||
query = "DELETE FROM CurrentScan WHERE 1=1 AND 1=0" # No valid conditions, prevent deletion
|
||||
|
||||
mylog('debug', f'[New Devices] Excluding Ignored Devices Query: {query}')
|
||||
mylog("debug", f"[New Devices] Excluding Ignored Devices Query: {query}")
|
||||
|
||||
sql.execute(query)
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def update_devices_data_from_scan (db):
|
||||
sql = db.sql #TO-DO
|
||||
startTime = timeNowTZ().strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def update_devices_data_from_scan(db):
|
||||
sql = db.sql # TO-DO
|
||||
startTime = timeNowTZ().strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
# Update Last Connection
|
||||
mylog('debug', '[Update Devices] 1 Last Connection')
|
||||
mylog("debug", "[Update Devices] 1 Last Connection")
|
||||
sql.execute(f"""UPDATE Devices SET devLastConnection = '{startTime}',
|
||||
devPresentLastScan = 1
|
||||
WHERE EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE devMac = cur_MAC) """)
|
||||
|
||||
# Clean no active devices
|
||||
mylog('debug', '[Update Devices] 2 Clean no active devices')
|
||||
mylog("debug", "[Update Devices] 2 Clean no active devices")
|
||||
sql.execute("""UPDATE Devices SET devPresentLastScan = 0
|
||||
WHERE NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE devMac = cur_MAC) """)
|
||||
|
||||
# Update IP
|
||||
mylog('debug', '[Update Devices] - cur_IP -> devLastIP (always updated)')
|
||||
# Update IP
|
||||
mylog("debug", "[Update Devices] - cur_IP -> devLastIP (always updated)")
|
||||
sql.execute("""UPDATE Devices
|
||||
SET devLastIP = (
|
||||
SELECT cur_IP
|
||||
@@ -90,9 +94,8 @@ def update_devices_data_from_scan (db):
|
||||
AND cur_IP NOT IN ('', 'null', '(unknown)', '(Unknown)')
|
||||
)""")
|
||||
|
||||
|
||||
# Update only devices with empty, NULL or (u(U)nknown) vendors
|
||||
mylog('debug', '[Update Devices] - cur_Vendor -> (if empty) devVendor')
|
||||
mylog("debug", "[Update Devices] - cur_Vendor -> (if empty) devVendor")
|
||||
sql.execute("""UPDATE Devices
|
||||
SET devVendor = (
|
||||
SELECT cur_Vendor
|
||||
@@ -107,8 +110,8 @@ def update_devices_data_from_scan (db):
|
||||
WHERE Devices.devMac = CurrentScan.cur_MAC
|
||||
)""")
|
||||
|
||||
# Update only devices with empty or NULL devParentPort
|
||||
mylog('debug', '[Update Devices] - (if not empty) cur_Port -> devParentPort')
|
||||
# Update only devices with empty or NULL devParentPort
|
||||
mylog("debug", "[Update Devices] - (if not empty) cur_Port -> devParentPort")
|
||||
sql.execute("""UPDATE Devices
|
||||
SET devParentPort = (
|
||||
SELECT cur_Port
|
||||
@@ -125,8 +128,10 @@ def update_devices_data_from_scan (db):
|
||||
AND CurrentScan.cur_Port IS NOT NULL AND CurrentScan.cur_Port NOT IN ("", "null")
|
||||
)""")
|
||||
|
||||
# Update only devices with empty or NULL devParentMAC
|
||||
mylog('debug', '[Update Devices] - (if not empty) cur_NetworkNodeMAC -> devParentMAC')
|
||||
# Update only devices with empty or NULL devParentMAC
|
||||
mylog(
|
||||
"debug", "[Update Devices] - (if not empty) cur_NetworkNodeMAC -> devParentMAC"
|
||||
)
|
||||
sql.execute("""UPDATE Devices
|
||||
SET devParentMAC = (
|
||||
SELECT cur_NetworkNodeMAC
|
||||
@@ -144,9 +149,11 @@ def update_devices_data_from_scan (db):
|
||||
)
|
||||
""")
|
||||
|
||||
|
||||
# Update only devices with empty or NULL devSite
|
||||
mylog('debug', '[Update Devices] - (if not empty) cur_NetworkSite -> (if empty) devSite')
|
||||
# Update only devices with empty or NULL devSite
|
||||
mylog(
|
||||
"debug",
|
||||
"[Update Devices] - (if not empty) cur_NetworkSite -> (if empty) devSite",
|
||||
)
|
||||
sql.execute("""UPDATE Devices
|
||||
SET devSite = (
|
||||
SELECT cur_NetworkSite
|
||||
@@ -162,8 +169,8 @@ def update_devices_data_from_scan (db):
|
||||
AND CurrentScan.cur_NetworkSite IS NOT NULL AND CurrentScan.cur_NetworkSite NOT IN ("", "null")
|
||||
)""")
|
||||
|
||||
# Update only devices with empty or NULL devSSID
|
||||
mylog('debug', '[Update Devices] - (if not empty) cur_SSID -> (if empty) devSSID')
|
||||
# Update only devices with empty or NULL devSSID
|
||||
mylog("debug", "[Update Devices] - (if not empty) cur_SSID -> (if empty) devSSID")
|
||||
sql.execute("""UPDATE Devices
|
||||
SET devSSID = (
|
||||
SELECT cur_SSID
|
||||
@@ -180,7 +187,7 @@ def update_devices_data_from_scan (db):
|
||||
)""")
|
||||
|
||||
# Update only devices with empty or NULL devType
|
||||
mylog('debug', '[Update Devices] - (if not empty) cur_Type -> (if empty) devType')
|
||||
mylog("debug", "[Update Devices] - (if not empty) cur_Type -> (if empty) devType")
|
||||
sql.execute("""UPDATE Devices
|
||||
SET devType = (
|
||||
SELECT cur_Type
|
||||
@@ -197,8 +204,8 @@ def update_devices_data_from_scan (db):
|
||||
)""")
|
||||
|
||||
# Update (unknown) or (name not found) Names if available
|
||||
mylog('debug','[Update Devices] - (if not empty) cur_Name -> (if empty) devName')
|
||||
sql.execute (""" UPDATE Devices
|
||||
mylog("debug", "[Update Devices] - (if not empty) cur_Name -> (if empty) devName")
|
||||
sql.execute(""" UPDATE Devices
|
||||
SET devName = COALESCE((
|
||||
SELECT cur_Name
|
||||
FROM CurrentScan
|
||||
@@ -224,23 +231,25 @@ def update_devices_data_from_scan (db):
|
||||
WHERE devVendor IS NULL OR devVendor IN ("", "null", "(unknown)", "(Unknown)")
|
||||
"""
|
||||
|
||||
for device in sql.execute (query) :
|
||||
vendor = query_MAC_vendor (device['devMac'])
|
||||
if vendor != -1 and vendor != -2 :
|
||||
recordsToUpdate.append ([vendor, device['devMac']])
|
||||
for device in sql.execute(query):
|
||||
vendor = query_MAC_vendor(device["devMac"])
|
||||
if vendor != -1 and vendor != -2:
|
||||
recordsToUpdate.append([vendor, device["devMac"]])
|
||||
|
||||
if len(recordsToUpdate) > 0:
|
||||
sql.executemany ("UPDATE Devices SET devVendor = ? WHERE devMac = ? ", recordsToUpdate )
|
||||
if len(recordsToUpdate) > 0:
|
||||
sql.executemany(
|
||||
"UPDATE Devices SET devVendor = ? WHERE devMac = ? ", recordsToUpdate
|
||||
)
|
||||
|
||||
# Update devPresentLastScan based on NICs presence
|
||||
update_devPresentLastScan_based_on_nics(db)
|
||||
|
||||
|
||||
# Guess ICONS
|
||||
recordsToUpdate = []
|
||||
|
||||
default_icon = get_setting_value('NEWDEV_devIcon')
|
||||
default_icon = get_setting_value("NEWDEV_devIcon")
|
||||
|
||||
if get_setting_value('NEWDEV_replace_preset_icon'):
|
||||
if get_setting_value("NEWDEV_replace_preset_icon"):
|
||||
query = f"""SELECT * FROM Devices
|
||||
WHERE devIcon in ('', 'null', '{default_icon}')
|
||||
OR devIcon IS NULL"""
|
||||
@@ -248,62 +257,97 @@ def update_devices_data_from_scan (db):
|
||||
query = """SELECT * FROM Devices
|
||||
WHERE devIcon in ('', 'null')
|
||||
OR devIcon IS NULL"""
|
||||
|
||||
for device in sql.execute (query) :
|
||||
# Conditional logic for devIcon guessing
|
||||
devIcon = guess_icon(device['devVendor'], device['devMac'], device['devLastIP'], device['devName'], default_icon)
|
||||
|
||||
recordsToUpdate.append ([devIcon, device['devMac']])
|
||||
for device in sql.execute(query):
|
||||
# Conditional logic for devIcon guessing
|
||||
devIcon = guess_icon(
|
||||
device["devVendor"],
|
||||
device["devMac"],
|
||||
device["devLastIP"],
|
||||
device["devName"],
|
||||
default_icon,
|
||||
)
|
||||
|
||||
recordsToUpdate.append([devIcon, device["devMac"]])
|
||||
|
||||
mylog('debug',f'[Update Devices] recordsToUpdate: {recordsToUpdate}')
|
||||
|
||||
if len(recordsToUpdate) > 0:
|
||||
sql.executemany ("UPDATE Devices SET devIcon = ? WHERE devMac = ? ", recordsToUpdate )
|
||||
mylog("debug", f"[Update Devices] recordsToUpdate: {recordsToUpdate}")
|
||||
|
||||
if len(recordsToUpdate) > 0:
|
||||
sql.executemany(
|
||||
"UPDATE Devices SET devIcon = ? WHERE devMac = ? ", recordsToUpdate
|
||||
)
|
||||
|
||||
# Guess Type
|
||||
recordsToUpdate = []
|
||||
query = """SELECT * FROM Devices
|
||||
WHERE devType in ('', 'null')
|
||||
OR devType IS NULL"""
|
||||
default_type = get_setting_value('NEWDEV_devType')
|
||||
|
||||
for device in sql.execute (query) :
|
||||
# Conditional logic for devIcon guessing
|
||||
devType = guess_type(device['devVendor'], device['devMac'], device['devLastIP'], device['devName'], default_type)
|
||||
default_type = get_setting_value("NEWDEV_devType")
|
||||
|
||||
recordsToUpdate.append ([devType, device['devMac']])
|
||||
|
||||
if len(recordsToUpdate) > 0:
|
||||
sql.executemany ("UPDATE Devices SET devType = ? WHERE devMac = ? ", recordsToUpdate )
|
||||
|
||||
|
||||
mylog('debug','[Update Devices] Update devices end')
|
||||
for device in sql.execute(query):
|
||||
# Conditional logic for devIcon guessing
|
||||
devType = guess_type(
|
||||
device["devVendor"],
|
||||
device["devMac"],
|
||||
device["devLastIP"],
|
||||
device["devName"],
|
||||
default_type,
|
||||
)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def save_scanned_devices (db):
|
||||
sql = db.sql #TO-DO
|
||||
recordsToUpdate.append([devType, device["devMac"]])
|
||||
|
||||
if len(recordsToUpdate) > 0:
|
||||
sql.executemany(
|
||||
"UPDATE Devices SET devType = ? WHERE devMac = ? ", recordsToUpdate
|
||||
)
|
||||
|
||||
mylog("debug", "[Update Devices] Update devices end")
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def save_scanned_devices(db):
|
||||
sql = db.sql # TO-DO
|
||||
|
||||
# Add Local MAC of default local interface
|
||||
local_mac_cmd = ["/sbin/ifconfig `ip -o route get 1 | sed 's/^.*dev \\([^ ]*\\).*$/\\1/;q'` | grep ether | awk '{print $2}'"]
|
||||
local_mac = subprocess.Popen (local_mac_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip()
|
||||
local_mac_cmd = [
|
||||
"/sbin/ifconfig `ip -o route get 1 | sed 's/^.*dev \\([^ ]*\\).*$/\\1/;q'` | grep ether | awk '{print $2}'"
|
||||
]
|
||||
local_mac = (
|
||||
subprocess.Popen(
|
||||
local_mac_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
.communicate()[0]
|
||||
.decode()
|
||||
.strip()
|
||||
)
|
||||
|
||||
local_ip_cmd = ["ip -o route get 1 | sed 's/^.*src \\([^ ]*\\).*$/\\1/;q'"]
|
||||
local_ip = subprocess.Popen (local_ip_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip()
|
||||
local_ip = (
|
||||
subprocess.Popen(
|
||||
local_ip_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
.communicate()[0]
|
||||
.decode()
|
||||
.strip()
|
||||
)
|
||||
|
||||
mylog('debug', ['[Save Devices] Saving this IP into the CurrentScan table:', local_ip])
|
||||
mylog(
|
||||
"debug", ["[Save Devices] Saving this IP into the CurrentScan table:", local_ip]
|
||||
)
|
||||
|
||||
if check_IP_format(local_ip) == '':
|
||||
local_ip = '0.0.0.0'
|
||||
if check_IP_format(local_ip) == "":
|
||||
local_ip = "0.0.0.0"
|
||||
|
||||
# Proceed if variable contains valid MAC
|
||||
if check_mac_or_internet(local_mac):
|
||||
sql.execute (f"""INSERT OR IGNORE INTO CurrentScan (cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) VALUES ( '{local_mac}', '{local_ip}', Null, 'local_MAC') """)
|
||||
sql.execute(
|
||||
f"""INSERT OR IGNORE INTO CurrentScan (cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) VALUES ( '{local_mac}', '{local_ip}', Null, 'local_MAC') """
|
||||
)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def print_scan_stats(db):
|
||||
sql = db.sql # TO-DO
|
||||
sql = db.sql # TO-DO
|
||||
|
||||
query = """
|
||||
SELECT
|
||||
@@ -323,59 +367,71 @@ def print_scan_stats(db):
|
||||
sql.execute(query)
|
||||
stats = sql.fetchall()
|
||||
|
||||
mylog('verbose', f'[Scan Stats] Devices Detected.......: {stats[0]["devices_detected"]}')
|
||||
mylog('verbose', f'[Scan Stats] New Devices............: {stats[0]["new_devices"]}')
|
||||
mylog('verbose', f'[Scan Stats] Down Alerts............: {stats[0]["down_alerts"]}')
|
||||
mylog('verbose', f'[Scan Stats] New Down Alerts........: {stats[0]["new_down_alerts"]}')
|
||||
mylog('verbose', f'[Scan Stats] New Connections........: {stats[0]["new_connections"]}')
|
||||
mylog('verbose', f'[Scan Stats] Disconnections.........: {stats[0]["disconnections"]}')
|
||||
mylog('verbose', f'[Scan Stats] IP Changes.............: {stats[0]["ip_changes"]}')
|
||||
mylog(
|
||||
"verbose",
|
||||
f"[Scan Stats] Devices Detected.......: {stats[0]['devices_detected']}",
|
||||
)
|
||||
mylog("verbose", f"[Scan Stats] New Devices............: {stats[0]['new_devices']}")
|
||||
mylog("verbose", f"[Scan Stats] Down Alerts............: {stats[0]['down_alerts']}")
|
||||
mylog(
|
||||
"verbose",
|
||||
f"[Scan Stats] New Down Alerts........: {stats[0]['new_down_alerts']}",
|
||||
)
|
||||
mylog(
|
||||
"verbose",
|
||||
f"[Scan Stats] New Connections........: {stats[0]['new_connections']}",
|
||||
)
|
||||
mylog(
|
||||
"verbose", f"[Scan Stats] Disconnections.........: {stats[0]['disconnections']}"
|
||||
)
|
||||
mylog("verbose", f"[Scan Stats] IP Changes.............: {stats[0]['ip_changes']}")
|
||||
|
||||
# if str(stats[0]["new_devices"]) != '0':
|
||||
mylog('trace', f' ================ DEVICES table content ================')
|
||||
sql.execute('select * from Devices')
|
||||
mylog("trace", " ================ DEVICES table content ================")
|
||||
sql.execute("select * from Devices")
|
||||
rows = sql.fetchall()
|
||||
for row in rows:
|
||||
row_dict = dict(row)
|
||||
mylog('trace', f' {row_dict}')
|
||||
|
||||
mylog('trace', f' ================ CurrentScan table content ================')
|
||||
sql.execute('select * from CurrentScan')
|
||||
rows = sql.fetchall()
|
||||
for row in rows:
|
||||
row_dict = dict(row)
|
||||
mylog('trace', f' {row_dict}')
|
||||
|
||||
mylog('trace', f' ================ Events table content where eve_PendingAlertEmail = 1 ================')
|
||||
sql.execute('select * from Events where eve_PendingAlertEmail = 1')
|
||||
rows = sql.fetchall()
|
||||
for row in rows:
|
||||
row_dict = dict(row)
|
||||
mylog('trace', f' {row_dict}')
|
||||
mylog("trace", f" {row_dict}")
|
||||
|
||||
mylog('trace', f' ================ Events table COUNT ================')
|
||||
sql.execute('select count(*) from Events')
|
||||
mylog("trace", " ================ CurrentScan table content ================")
|
||||
sql.execute("select * from CurrentScan")
|
||||
rows = sql.fetchall()
|
||||
for row in rows:
|
||||
row_dict = dict(row)
|
||||
mylog('trace', f' {row_dict}')
|
||||
|
||||
mylog("trace", f" {row_dict}")
|
||||
|
||||
mylog('verbose', '[Scan Stats] Scan Method Statistics:')
|
||||
mylog(
|
||||
"trace",
|
||||
" ================ Events table content where eve_PendingAlertEmail = 1 ================",
|
||||
)
|
||||
sql.execute("select * from Events where eve_PendingAlertEmail = 1")
|
||||
rows = sql.fetchall()
|
||||
for row in rows:
|
||||
row_dict = dict(row)
|
||||
mylog("trace", f" {row_dict}")
|
||||
|
||||
mylog("trace", " ================ Events table COUNT ================")
|
||||
sql.execute("select count(*) from Events")
|
||||
rows = sql.fetchall()
|
||||
for row in rows:
|
||||
row_dict = dict(row)
|
||||
mylog("trace", f" {row_dict}")
|
||||
|
||||
mylog("verbose", "[Scan Stats] Scan Method Statistics:")
|
||||
for row in stats:
|
||||
if row["cur_ScanMethod"] is not None:
|
||||
mylog('verbose', f' {row["cur_ScanMethod"]}: {row["scan_method_count"]}')
|
||||
mylog("verbose", f" {row['cur_ScanMethod']}: {row['scan_method_count']}")
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def create_new_devices (db):
|
||||
sql = db.sql # TO-DO
|
||||
# -------------------------------------------------------------------------------
|
||||
def create_new_devices(db):
|
||||
sql = db.sql # TO-DO
|
||||
startTime = timeNowTZ()
|
||||
|
||||
# Insert events for new devices from CurrentScan (not yet in Devices)
|
||||
|
||||
|
||||
mylog('debug', '[New Devices] Insert "New Device" Events')
|
||||
mylog("debug", '[New Devices] Insert "New Device" Events')
|
||||
query_new_device_events = f"""
|
||||
INSERT INTO Events (
|
||||
eve_MAC, eve_IP, eve_DateTime,
|
||||
@@ -389,14 +445,14 @@ def create_new_devices (db):
|
||||
WHERE devMac = cur_MAC
|
||||
)
|
||||
"""
|
||||
|
||||
|
||||
# mylog('debug',f'[New Devices] Log Events Query: {query_new_device_events}')
|
||||
|
||||
|
||||
sql.execute(query_new_device_events)
|
||||
|
||||
mylog('debug',f'[New Devices] Insert Connection into session table')
|
||||
mylog("debug", "[New Devices] Insert Connection into session table")
|
||||
|
||||
sql.execute (f"""INSERT INTO Sessions (
|
||||
sql.execute(f"""INSERT INTO Sessions (
|
||||
ses_MAC, ses_IP, ses_EventTypeConnection, ses_DateTimeConnection,
|
||||
ses_EventTypeDisconnection, ses_DateTimeDisconnection,
|
||||
ses_StillConnected, ses_AdditionalInfo
|
||||
@@ -412,12 +468,12 @@ def create_new_devices (db):
|
||||
WHERE ses_MAC = cur_MAC AND ses_StillConnected = 1
|
||||
)
|
||||
""")
|
||||
|
||||
|
||||
# Create new devices from CurrentScan
|
||||
mylog('debug','[New Devices] 2 Create devices')
|
||||
mylog("debug", "[New Devices] 2 Create devices")
|
||||
|
||||
# default New Device values preparation
|
||||
newDevColumns = """devAlertEvents,
|
||||
newDevColumns = """devAlertEvents,
|
||||
devAlertDown,
|
||||
devPresentLastScan,
|
||||
devIsArchived,
|
||||
@@ -435,41 +491,66 @@ def create_new_devices (db):
|
||||
devReqNicsOnline
|
||||
"""
|
||||
|
||||
newDevDefaults = f"""{get_setting_value('NEWDEV_devAlertEvents')},
|
||||
{get_setting_value('NEWDEV_devAlertDown')},
|
||||
{get_setting_value('NEWDEV_devPresentLastScan')},
|
||||
{get_setting_value('NEWDEV_devIsArchived')},
|
||||
{get_setting_value('NEWDEV_devIsNew')},
|
||||
{get_setting_value('NEWDEV_devSkipRepeated')},
|
||||
{get_setting_value('NEWDEV_devScan')},
|
||||
'{sanitize_SQL_input(get_setting_value('NEWDEV_devOwner'))}',
|
||||
{get_setting_value('NEWDEV_devFavorite')},
|
||||
'{sanitize_SQL_input(get_setting_value('NEWDEV_devGroup'))}',
|
||||
'{sanitize_SQL_input(get_setting_value('NEWDEV_devComments'))}',
|
||||
{get_setting_value('NEWDEV_devLogEvents')},
|
||||
'{sanitize_SQL_input(get_setting_value('NEWDEV_devLocation'))}',
|
||||
'{sanitize_SQL_input(get_setting_value('NEWDEV_devCustomProps'))}',
|
||||
'{sanitize_SQL_input(get_setting_value('NEWDEV_devParentRelType'))}',
|
||||
{sanitize_SQL_input(get_setting_value('NEWDEV_devReqNicsOnline'))}
|
||||
newDevDefaults = f"""{get_setting_value("NEWDEV_devAlertEvents")},
|
||||
{get_setting_value("NEWDEV_devAlertDown")},
|
||||
{get_setting_value("NEWDEV_devPresentLastScan")},
|
||||
{get_setting_value("NEWDEV_devIsArchived")},
|
||||
{get_setting_value("NEWDEV_devIsNew")},
|
||||
{get_setting_value("NEWDEV_devSkipRepeated")},
|
||||
{get_setting_value("NEWDEV_devScan")},
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devOwner"))}',
|
||||
{get_setting_value("NEWDEV_devFavorite")},
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devGroup"))}',
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devComments"))}',
|
||||
{get_setting_value("NEWDEV_devLogEvents")},
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devLocation"))}',
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devCustomProps"))}',
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devParentRelType"))}',
|
||||
{sanitize_SQL_input(get_setting_value("NEWDEV_devReqNicsOnline"))}
|
||||
"""
|
||||
|
||||
# Fetch data from CurrentScan skipping ignored devices by IP and MAC
|
||||
query = f"""SELECT cur_MAC, cur_Name, cur_Vendor, cur_ScanMethod, cur_IP, cur_SyncHubNodeName, cur_NetworkNodeMAC, cur_PORT, cur_NetworkSite, cur_SSID, cur_Type
|
||||
FROM CurrentScan """
|
||||
query = """SELECT cur_MAC, cur_Name, cur_Vendor, cur_ScanMethod, cur_IP, cur_SyncHubNodeName, cur_NetworkNodeMAC, cur_PORT, cur_NetworkSite, cur_SSID, cur_Type
|
||||
FROM CurrentScan """
|
||||
|
||||
|
||||
mylog('debug',f'[New Devices] Collecting New Devices Query: {query}')
|
||||
mylog("debug", f"[New Devices] Collecting New Devices Query: {query}")
|
||||
current_scan_data = sql.execute(query).fetchall()
|
||||
|
||||
for row in current_scan_data:
|
||||
cur_MAC, cur_Name, cur_Vendor, cur_ScanMethod, cur_IP, cur_SyncHubNodeName, cur_NetworkNodeMAC, cur_PORT, cur_NetworkSite, cur_SSID, cur_Type = row
|
||||
(
|
||||
cur_MAC,
|
||||
cur_Name,
|
||||
cur_Vendor,
|
||||
cur_ScanMethod,
|
||||
cur_IP,
|
||||
cur_SyncHubNodeName,
|
||||
cur_NetworkNodeMAC,
|
||||
cur_PORT,
|
||||
cur_NetworkSite,
|
||||
cur_SSID,
|
||||
cur_Type,
|
||||
) = row
|
||||
|
||||
# Handle NoneType
|
||||
cur_Name = str(cur_Name).strip() if cur_Name else '(unknown)'
|
||||
cur_Type = str(cur_Type).strip() if cur_Type else get_setting_value("NEWDEV_devType")
|
||||
cur_NetworkNodeMAC = cur_NetworkNodeMAC.strip() if cur_NetworkNodeMAC else ''
|
||||
cur_NetworkNodeMAC = cur_NetworkNodeMAC if cur_NetworkNodeMAC and cur_MAC != "Internet" else (get_setting_value("NEWDEV_devParentMAC") if cur_MAC != "Internet" else "null")
|
||||
cur_SyncHubNodeName = cur_SyncHubNodeName if cur_SyncHubNodeName and cur_SyncHubNodeName != "null" else (get_setting_value("SYNC_node_name"))
|
||||
cur_Name = str(cur_Name).strip() if cur_Name else "(unknown)"
|
||||
cur_Type = (
|
||||
str(cur_Type).strip() if cur_Type else get_setting_value("NEWDEV_devType")
|
||||
)
|
||||
cur_NetworkNodeMAC = cur_NetworkNodeMAC.strip() if cur_NetworkNodeMAC else ""
|
||||
cur_NetworkNodeMAC = (
|
||||
cur_NetworkNodeMAC
|
||||
if cur_NetworkNodeMAC and cur_MAC != "Internet"
|
||||
else (
|
||||
get_setting_value("NEWDEV_devParentMAC")
|
||||
if cur_MAC != "Internet"
|
||||
else "null"
|
||||
)
|
||||
)
|
||||
cur_SyncHubNodeName = (
|
||||
cur_SyncHubNodeName
|
||||
if cur_SyncHubNodeName and cur_SyncHubNodeName != "null"
|
||||
else (get_setting_value("SYNC_node_name"))
|
||||
)
|
||||
|
||||
# Preparing the individual insert statement
|
||||
sqlQuery = f"""INSERT OR IGNORE INTO Devices
|
||||
@@ -509,17 +590,15 @@ def create_new_devices (db):
|
||||
{newDevDefaults}
|
||||
)"""
|
||||
|
||||
mylog('trace', f'[New Devices] Create device SQL: {sqlQuery}')
|
||||
mylog("trace", f"[New Devices] Create device SQL: {sqlQuery}")
|
||||
|
||||
sql.execute(sqlQuery, (startTime, startTime))
|
||||
|
||||
|
||||
mylog('debug','[New Devices] New Devices end')
|
||||
mylog("debug", "[New Devices] New Devices end")
|
||||
db.commitDB()
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
def update_devices_names(pm):
|
||||
sql = pm.db.sql
|
||||
resolver = NameResolver(pm.db)
|
||||
@@ -530,7 +609,11 @@ def update_devices_names(pm):
|
||||
|
||||
# Retrieve last time name resolution was checked (string or datetime)
|
||||
last_checked_str = pm.name_plugins_checked
|
||||
last_checked_dt = parser.parse(last_checked_str) if isinstance(last_checked_str, str) else last_checked_str
|
||||
last_checked_dt = (
|
||||
parser.parse(last_checked_str)
|
||||
if isinstance(last_checked_str, str)
|
||||
else last_checked_str
|
||||
)
|
||||
|
||||
# Collect valid state update timestamps for name-related plugins
|
||||
state_times = []
|
||||
@@ -545,28 +628,31 @@ def update_devices_names(pm):
|
||||
|
||||
# Skip if no plugin state changed since last check
|
||||
if last_checked_dt and latest_state_dt and latest_state_dt <= last_checked_dt:
|
||||
mylog('debug', '[Update Device Name] No relevant name plugin changes since last check — skipping update.')
|
||||
mylog(
|
||||
"debug",
|
||||
"[Update Device Name] No relevant name plugin changes since last check — skipping update.",
|
||||
)
|
||||
return
|
||||
|
||||
nameNotFound = "(name not found)"
|
||||
|
||||
# Define resolution strategies in priority order
|
||||
strategies = [
|
||||
(resolver.resolve_dig, 'DIGSCAN'),
|
||||
(resolver.resolve_mdns, 'AVAHISCAN'),
|
||||
(resolver.resolve_nslookup, 'NSLOOKUP'),
|
||||
(resolver.resolve_nbtlookup, 'NBTSCAN')
|
||||
(resolver.resolve_dig, "DIGSCAN"),
|
||||
(resolver.resolve_mdns, "AVAHISCAN"),
|
||||
(resolver.resolve_nslookup, "NSLOOKUP"),
|
||||
(resolver.resolve_nbtlookup, "NBTSCAN"),
|
||||
]
|
||||
|
||||
def resolve_devices(devices, resolve_both_name_and_fqdn=True):
|
||||
"""
|
||||
Attempts to resolve device names and/or FQDNs using available strategies.
|
||||
|
||||
|
||||
Parameters:
|
||||
devices (list): List of devices to resolve.
|
||||
resolve_both_name_and_fqdn (bool): If True, resolves both name and FQDN.
|
||||
If False, resolves only FQDN.
|
||||
|
||||
|
||||
Returns:
|
||||
recordsToUpdate (list): List of [newName, newFQDN, devMac] or [newFQDN, devMac] for DB update.
|
||||
recordsNotFound (list): List of [nameNotFound, devMac] for DB update.
|
||||
@@ -580,65 +666,93 @@ def update_devices_names(pm):
|
||||
|
||||
for device in devices:
|
||||
newName = nameNotFound
|
||||
newFQDN = ''
|
||||
newFQDN = ""
|
||||
|
||||
# Attempt each resolution strategy in order
|
||||
for resolve_fn, label in strategies:
|
||||
resolved = resolve_fn(device['devMac'], device['devLastIP'])
|
||||
resolved = resolve_fn(device["devMac"], device["devLastIP"])
|
||||
|
||||
# Only use name if resolving both name and FQDN
|
||||
newName = resolved.cleaned if resolve_both_name_and_fqdn else None
|
||||
newFQDN = resolved.raw
|
||||
|
||||
# If a valid result is found, record it and stop further attempts
|
||||
if newFQDN not in [nameNotFound, '', 'localhost.'] and ' communications error to ' not in newFQDN:
|
||||
if (
|
||||
newFQDN not in [nameNotFound, "", "localhost."]
|
||||
and " communications error to " not in newFQDN
|
||||
):
|
||||
foundStats[label] += 1
|
||||
|
||||
if resolve_both_name_and_fqdn:
|
||||
recordsToUpdate.append([newName, newFQDN, device['devMac']])
|
||||
recordsToUpdate.append([newName, newFQDN, device["devMac"]])
|
||||
else:
|
||||
recordsToUpdate.append([newFQDN, device['devMac']])
|
||||
recordsToUpdate.append([newFQDN, device["devMac"]])
|
||||
break
|
||||
|
||||
# If no name was resolved, queue device for "(name not found)" update
|
||||
if resolve_both_name_and_fqdn and newName == nameNotFound:
|
||||
notFound += 1
|
||||
if device['devName'] != nameNotFound:
|
||||
recordsNotFound.append([nameNotFound, device['devMac']])
|
||||
if device["devName"] != nameNotFound:
|
||||
recordsNotFound.append([nameNotFound, device["devMac"]])
|
||||
|
||||
return recordsToUpdate, recordsNotFound, foundStats, notFound
|
||||
|
||||
# --- Step 1: Update device names for unknown devices ---
|
||||
unknownDevices = device_handler.getUnknown()
|
||||
if unknownDevices:
|
||||
mylog('verbose', f'[Update Device Name] Trying to resolve devices without name. Unknown devices count: {len(unknownDevices)}')
|
||||
mylog(
|
||||
"verbose",
|
||||
f"[Update Device Name] Trying to resolve devices without name. Unknown devices count: {len(unknownDevices)}",
|
||||
)
|
||||
|
||||
# Try resolving both name and FQDN
|
||||
recordsToUpdate, recordsNotFound, foundStats, notFound = resolve_devices(unknownDevices)
|
||||
recordsToUpdate, recordsNotFound, foundStats, notFound = resolve_devices(
|
||||
unknownDevices
|
||||
)
|
||||
|
||||
# Log summary
|
||||
mylog('verbose', f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}/{foundStats['NBTSCAN']})")
|
||||
mylog('verbose', f'[Update Device Name] Names Not Found : {notFound}')
|
||||
mylog(
|
||||
"verbose",
|
||||
f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}/{foundStats['NBTSCAN']})",
|
||||
)
|
||||
mylog("verbose", f"[Update Device Name] Names Not Found : {notFound}")
|
||||
|
||||
# Apply updates to database
|
||||
sql.executemany("UPDATE Devices SET devName = ? WHERE devMac = ?", recordsNotFound)
|
||||
sql.executemany("UPDATE Devices SET devName = ?, devFQDN = ? WHERE devMac = ?", recordsToUpdate)
|
||||
sql.executemany(
|
||||
"UPDATE Devices SET devName = ? WHERE devMac = ?", recordsNotFound
|
||||
)
|
||||
sql.executemany(
|
||||
"UPDATE Devices SET devName = ?, devFQDN = ? WHERE devMac = ?",
|
||||
recordsToUpdate,
|
||||
)
|
||||
|
||||
# --- Step 2: Optionally refresh FQDN for all devices ---
|
||||
if get_setting_value("REFRESH_FQDN"):
|
||||
allDevices = device_handler.getAll()
|
||||
if allDevices:
|
||||
mylog('verbose', f'[Update FQDN] Trying to resolve FQDN. Devices count: {len(allDevices)}')
|
||||
mylog(
|
||||
"verbose",
|
||||
f"[Update FQDN] Trying to resolve FQDN. Devices count: {len(allDevices)}",
|
||||
)
|
||||
|
||||
# Try resolving only FQDN
|
||||
recordsToUpdate, _, foundStats, notFound = resolve_devices(allDevices, resolve_both_name_and_fqdn=False)
|
||||
recordsToUpdate, _, foundStats, notFound = resolve_devices(
|
||||
allDevices, resolve_both_name_and_fqdn=False
|
||||
)
|
||||
|
||||
# Log summary
|
||||
mylog('verbose', f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}/{foundStats['NBTSCAN']})")
|
||||
mylog('verbose', f'[Update FQDN] Names Not Found : {notFound}')
|
||||
mylog(
|
||||
"verbose",
|
||||
f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)}"+
|
||||
f"({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}"+
|
||||
f"/{foundStats['NBTSCAN']})",
|
||||
)
|
||||
mylog("verbose", f"[Update FQDN] Names Not Found : {notFound}")
|
||||
|
||||
# Apply FQDN-only updates
|
||||
sql.executemany("UPDATE Devices SET devFQDN = ? WHERE devMac = ?", recordsToUpdate)
|
||||
sql.executemany(
|
||||
"UPDATE Devices SET devFQDN = ? WHERE devMac = ?", recordsToUpdate
|
||||
)
|
||||
|
||||
# Commit all database changes
|
||||
pm.db.commitDB()
|
||||
@@ -650,7 +764,8 @@ def update_devices_names(pm):
|
||||
row = sql.fetchone()
|
||||
pm.name_plugins_checked = row[0] if row else None
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Updates devPresentLastScan for parent devices based on the presence of their NICs
|
||||
def update_devPresentLastScan_based_on_nics(db):
|
||||
"""
|
||||
@@ -707,65 +822,75 @@ def update_devPresentLastScan_based_on_nics(db):
|
||||
# Step 3: Execute batch update
|
||||
for present, mac in updates:
|
||||
sql.execute(
|
||||
"UPDATE Devices SET devPresentLastScan = ? WHERE devMac = ?",
|
||||
(present, mac)
|
||||
"UPDATE Devices SET devPresentLastScan = ? WHERE devMac = ?", (present, mac)
|
||||
)
|
||||
|
||||
db.commitDB()
|
||||
return len(updates)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Check if the variable contains a valid MAC address or "Internet"
|
||||
def check_mac_or_internet(input_str):
|
||||
# Regular expression pattern for matching a MAC address
|
||||
mac_pattern = r'([0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2})'
|
||||
mac_pattern = r"([0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2})"
|
||||
|
||||
if input_str.lower() == 'internet':
|
||||
if input_str.lower() == "internet":
|
||||
return True
|
||||
elif re.match(mac_pattern, input_str):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Lookup unknown vendors on devices
|
||||
def query_MAC_vendor (pMAC):
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Lookup unknown vendors on devices
|
||||
def query_MAC_vendor(pMAC):
|
||||
pMACstr = str(pMAC)
|
||||
|
||||
filePath = vendorsPath
|
||||
|
||||
|
||||
if os.path.isfile(vendorsPathNewest):
|
||||
filePath = vendorsPathNewest
|
||||
|
||||
|
||||
# Check MAC parameter
|
||||
mac = pMACstr.replace (':','').lower()
|
||||
if len(pMACstr) != 17 or len(mac) != 12 :
|
||||
return -2 # return -2 if ignored MAC
|
||||
mac = pMACstr.replace(":", "").lower()
|
||||
if len(pMACstr) != 17 or len(mac) != 12:
|
||||
return -2 # return -2 if ignored MAC
|
||||
|
||||
# Search vendor in HW Vendors DB
|
||||
mac_start_string6 = mac[0:6]
|
||||
mac_start_string9 = mac[0:9]
|
||||
mac_start_string6 = mac[0:6]
|
||||
mac_start_string9 = mac[0:9]
|
||||
|
||||
try:
|
||||
with open(filePath, 'r') as f:
|
||||
with open(filePath, "r") as f:
|
||||
for line in f:
|
||||
line_lower = line.lower() # Convert line to lowercase for case-insensitive matching
|
||||
if line_lower.startswith(mac_start_string6):
|
||||
parts = line.split('\t', 1)
|
||||
line_lower = (
|
||||
line.lower()
|
||||
) # Convert line to lowercase for case-insensitive matching
|
||||
if line_lower.startswith(mac_start_string6):
|
||||
parts = line.split("\t", 1)
|
||||
if len(parts) > 1:
|
||||
vendor = parts[1].strip()
|
||||
mylog('debug', [f"[Vendor Check] Found '{vendor}' for '{pMAC}' in {vendorsPath}"])
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
f"[Vendor Check] Found '{vendor}' for '{pMAC}' in {vendorsPath}"
|
||||
],
|
||||
)
|
||||
return vendor
|
||||
else:
|
||||
mylog('debug', [f'[Vendor Check] ⚠ ERROR: Match found, but line could not be processed: "{line_lower}"'])
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
f'[Vendor Check] ⚠ ERROR: Match found, but line could not be processed: "{line_lower}"'
|
||||
],
|
||||
)
|
||||
return -1
|
||||
|
||||
|
||||
return -1 # MAC address not found in the database
|
||||
except FileNotFoundError:
|
||||
mylog('none', [f"[Vendor Check] ⚠ ERROR: Vendors file {vendorsPath} not found."])
|
||||
mylog(
|
||||
"none", [f"[Vendor Check] ⚠ ERROR: Vendors file {vendorsPath} not found."]
|
||||
)
|
||||
return -1
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import base64
|
||||
from pathlib import Path
|
||||
from typing import Optional, List, Tuple, Dict
|
||||
from typing import Optional, Tuple
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = "/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
import conf
|
||||
from const import *
|
||||
from logger import mylog
|
||||
from helper import timeNowTZ, get_setting_value
|
||||
|
||||
# Load MAC/device-type/icon rules from external file
|
||||
MAC_TYPE_ICON_PATH = Path(f"{INSTALL_PATH}/back/device_heuristics_rules.json")
|
||||
@@ -30,15 +28,16 @@ try:
|
||||
rule["icon_base64"] = ""
|
||||
except Exception as e:
|
||||
MAC_TYPE_ICON_RULES = []
|
||||
mylog('none', f"[guess_device_attributes] Failed to load device_heuristics_rules.json: {e}")
|
||||
mylog(
|
||||
"none",
|
||||
f"[guess_device_attributes] Failed to load device_heuristics_rules.json: {e}",
|
||||
)
|
||||
|
||||
|
||||
# -----------------------------------------
|
||||
# Match device type and base64-encoded icon using MAC prefix and vendor patterns.
|
||||
def match_mac_and_vendor(
|
||||
mac_clean: str,
|
||||
vendor: str,
|
||||
default_type: str,
|
||||
default_icon: str
|
||||
mac_clean: str, vendor: str, default_type: str, default_icon: str
|
||||
) -> Tuple[str, str]:
|
||||
"""
|
||||
Match device type and base64-encoded icon using MAC prefix and vendor patterns.
|
||||
@@ -63,8 +62,7 @@ def match_mac_and_vendor(
|
||||
|
||||
if mac_clean.startswith(mac_prefix):
|
||||
if not vendor_pattern or vendor_pattern in vendor:
|
||||
|
||||
mylog('debug', f"[guess_device_attributes] Matched via MAC+Vendor")
|
||||
mylog("debug", "[guess_device_attributes] Matched via MAC+Vendor")
|
||||
|
||||
type_ = dev_type
|
||||
icon = base64_icon or default_icon
|
||||
@@ -72,14 +70,10 @@ def match_mac_and_vendor(
|
||||
|
||||
return default_type, default_icon
|
||||
|
||||
|
||||
# ---------------------------------------------------
|
||||
# Match device type and base64-encoded icon using vendor patterns.
|
||||
def match_vendor(
|
||||
vendor: str,
|
||||
default_type: str,
|
||||
default_icon: str
|
||||
) -> Tuple[str, str]:
|
||||
|
||||
def match_vendor(vendor: str, default_type: str, default_icon: str) -> Tuple[str, str]:
|
||||
vendor_lc = vendor.lower()
|
||||
|
||||
for rule in MAC_TYPE_ICON_RULES:
|
||||
@@ -92,9 +86,8 @@ def match_vendor(
|
||||
mac_prefix = pattern.get("mac_prefix", "")
|
||||
vendor_pattern = pattern.get("vendor", "").lower()
|
||||
|
||||
if vendor_pattern and vendor_pattern in vendor_lc:
|
||||
|
||||
mylog('debug', f"[guess_device_attributes] Matched via Vendor")
|
||||
if vendor_pattern and vendor_pattern in vendor_lc:
|
||||
mylog("debug", "[guess_device_attributes] Matched via Vendor")
|
||||
|
||||
icon = base64_icon or default_icon
|
||||
|
||||
@@ -102,13 +95,10 @@ def match_vendor(
|
||||
|
||||
return default_type, default_icon
|
||||
|
||||
|
||||
# ---------------------------------------------------
|
||||
# Match device type and base64-encoded icon using name patterns.
|
||||
def match_name(
|
||||
name: str,
|
||||
default_type: str,
|
||||
default_icon: str
|
||||
) -> Tuple[str, str]:
|
||||
def match_name(name: str, default_type: str, default_icon: str) -> Tuple[str, str]:
|
||||
"""
|
||||
Match device type and base64-encoded icon using name patterns from global MAC_TYPE_ICON_RULES.
|
||||
|
||||
@@ -130,8 +120,7 @@ def match_name(
|
||||
for pattern in name_patterns:
|
||||
# Use regex search to allow pattern substrings
|
||||
if re.search(pattern, name_lower, re.IGNORECASE):
|
||||
|
||||
mylog('debug', f"[guess_device_attributes] Matched via Name")
|
||||
mylog("debug", "[guess_device_attributes] Matched via Name")
|
||||
|
||||
type_ = dev_type
|
||||
icon = base64_icon or default_icon
|
||||
@@ -139,13 +128,10 @@ def match_name(
|
||||
|
||||
return default_type, default_icon
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
#
|
||||
def match_ip(
|
||||
ip: str,
|
||||
default_type: str,
|
||||
default_icon: str
|
||||
) -> Tuple[str, str]:
|
||||
def match_ip(ip: str, default_type: str, default_icon: str) -> Tuple[str, str]:
|
||||
"""
|
||||
Match device type and base64-encoded icon using IP regex patterns from global JSON.
|
||||
|
||||
@@ -167,8 +153,7 @@ def match_ip(
|
||||
|
||||
for pattern in ip_patterns:
|
||||
if re.match(pattern, ip):
|
||||
|
||||
mylog('debug', f"[guess_device_attributes] Matched via IP")
|
||||
mylog("debug", "[guess_device_attributes] Matched via IP")
|
||||
|
||||
type_ = dev_type
|
||||
icon = base64_icon or default_icon
|
||||
@@ -176,7 +161,8 @@ def match_ip(
|
||||
|
||||
return default_type, default_icon
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Guess device attributes such as type of device and associated device icon
|
||||
def guess_device_attributes(
|
||||
vendor: Optional[str],
|
||||
@@ -184,16 +170,19 @@ def guess_device_attributes(
|
||||
ip: Optional[str],
|
||||
name: Optional[str],
|
||||
default_icon: str,
|
||||
default_type: str
|
||||
default_type: str,
|
||||
) -> Tuple[str, str]:
|
||||
mylog('debug', f"[guess_device_attributes] Guessing attributes for (vendor|mac|ip|name): ('{vendor}'|'{mac}'|'{ip}'|'{name}')")
|
||||
mylog(
|
||||
"debug",
|
||||
f"[guess_device_attributes] Guessing attributes for (vendor|mac|ip|name): ('{vendor}'|'{mac}'|'{ip}'|'{name}')",
|
||||
)
|
||||
|
||||
# --- Normalize inputs ---
|
||||
vendor = str(vendor).lower().strip() if vendor else "unknown"
|
||||
mac = str(mac).upper().strip() if mac else "00:00:00:00:00:00"
|
||||
ip = str(ip).strip() if ip else "169.254.0.0"
|
||||
name = str(name).lower().strip() if name else "(unknown)"
|
||||
mac_clean = mac.replace(':', '').replace('-', '').upper()
|
||||
mac_clean = mac.replace(":", "").replace("-", "").upper()
|
||||
|
||||
# # Internet shortcut
|
||||
# if mac == "INTERNET":
|
||||
@@ -221,7 +210,10 @@ def guess_device_attributes(
|
||||
type_ = type_ or default_type
|
||||
icon = icon or default_icon
|
||||
|
||||
mylog('debug', f"[guess_device_attributes] Guessed attributes (icon|type_): ('{icon}'|'{type_}')")
|
||||
mylog(
|
||||
"debug",
|
||||
f"[guess_device_attributes] Guessed attributes (icon|type_): ('{icon}'|'{type_}')",
|
||||
)
|
||||
return icon, type_
|
||||
|
||||
|
||||
@@ -231,8 +223,8 @@ def guess_icon(
|
||||
mac: Optional[str],
|
||||
ip: Optional[str],
|
||||
name: Optional[str],
|
||||
default: str
|
||||
) -> str:
|
||||
default: str,
|
||||
) -> str:
|
||||
"""
|
||||
[DEPRECATED] Guess the appropriate FontAwesome icon for a device based on its attributes.
|
||||
Use guess_device_attributes instead.
|
||||
@@ -247,17 +239,18 @@ def guess_icon(
|
||||
Returns:
|
||||
str: Base64-encoded FontAwesome icon HTML string.
|
||||
"""
|
||||
|
||||
|
||||
icon, _ = guess_device_attributes(vendor, mac, ip, name, default, "unknown_type")
|
||||
return icon
|
||||
|
||||
|
||||
def guess_type(
|
||||
vendor: Optional[str],
|
||||
mac: Optional[str],
|
||||
ip: Optional[str],
|
||||
name: Optional[str],
|
||||
default: str
|
||||
) -> str:
|
||||
default: str,
|
||||
) -> str:
|
||||
"""
|
||||
[DEPRECATED] Guess the device type based on its attributes.
|
||||
Use guess_device_attributes instead.
|
||||
@@ -272,11 +265,11 @@ def guess_type(
|
||||
Returns:
|
||||
str: Device type.
|
||||
"""
|
||||
|
||||
|
||||
_, type_ = guess_device_attributes(vendor, mac, ip, name, "unknown_icon", default)
|
||||
return type_
|
||||
|
||||
|
||||
# Handler for when this is run as a program instead of called as a module.
|
||||
if __name__ == "__main__":
|
||||
mylog('error', "This module is not intended to be run directly.")
|
||||
|
||||
mylog("error", "This module is not intended to be run directly.")
|
||||
|
||||
@@ -1,26 +1,26 @@
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import socket
|
||||
import dns.resolver
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = "/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
import conf
|
||||
from const import *
|
||||
from logger import mylog
|
||||
from helper import get_setting_value
|
||||
|
||||
|
||||
class ResolvedName:
|
||||
def __init__(self, raw: str = "(name not found)", cleaned: str = "(name not found)"):
|
||||
def __init__(
|
||||
self, raw: str = "(name not found)", cleaned: str = "(name not found)"
|
||||
):
|
||||
self.raw = raw
|
||||
self.cleaned = cleaned
|
||||
|
||||
def __str__(self):
|
||||
return self.cleaned
|
||||
|
||||
|
||||
class NameResolver:
|
||||
def __init__(self, db):
|
||||
self.db = db
|
||||
@@ -66,18 +66,18 @@ class NameResolver:
|
||||
return self.resolve_from_plugin("DIGSCAN", pMAC, pIP)
|
||||
|
||||
def clean_device_name(self, name: str, match_ip: bool) -> str:
|
||||
mylog('debug', [f"[cleanDeviceName] input: {name}"])
|
||||
mylog("debug", [f"[cleanDeviceName] input: {name}"])
|
||||
|
||||
if match_ip:
|
||||
name += " (IP match)"
|
||||
|
||||
regexes = get_setting_value('NEWDEV_NAME_CLEANUP_REGEX') or []
|
||||
regexes = get_setting_value("NEWDEV_NAME_CLEANUP_REGEX") or []
|
||||
for rgx in regexes:
|
||||
mylog('trace', [f"[cleanDeviceName] applying regex: {rgx}"])
|
||||
mylog("trace", [f"[cleanDeviceName] applying regex: {rgx}"])
|
||||
name = re.sub(rgx, "", name)
|
||||
|
||||
name = re.sub(r'\.$', '', name)
|
||||
name = re.sub(r"\.$", "", name)
|
||||
name = name.replace(". (IP match)", " (IP match)")
|
||||
|
||||
mylog('debug', [f"[cleanDeviceName] output: {name}"])
|
||||
mylog("debug", [f"[cleanDeviceName] output: {name}"])
|
||||
return name
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
import conf
|
||||
from scan.device_handling import create_new_devices, print_scan_stats, save_scanned_devices, exclude_ignored_devices, update_devices_data_from_scan
|
||||
from scan.device_handling import (
|
||||
create_new_devices,
|
||||
print_scan_stats,
|
||||
save_scanned_devices,
|
||||
exclude_ignored_devices,
|
||||
update_devices_data_from_scan,
|
||||
)
|
||||
from helper import timeNowTZ, get_setting_value
|
||||
from db.db_helper import print_table_schema
|
||||
from logger import mylog, Logger
|
||||
@@ -13,73 +19,75 @@ from messaging.reporting import skip_repeated_notifications
|
||||
|
||||
|
||||
# Make sure log level is initialized correctly
|
||||
Logger(get_setting_value('LOG_LEVEL'))
|
||||
Logger(get_setting_value("LOG_LEVEL"))
|
||||
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# SCAN NETWORK
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
|
||||
def process_scan (db):
|
||||
|
||||
def process_scan(db):
|
||||
# Apply exclusions
|
||||
mylog('verbose','[Process Scan] Exclude ignored devices')
|
||||
exclude_ignored_devices (db)
|
||||
mylog("verbose", "[Process Scan] Exclude ignored devices")
|
||||
exclude_ignored_devices(db)
|
||||
|
||||
# Load current scan data
|
||||
mylog('verbose','[Process Scan] Processing scan results')
|
||||
save_scanned_devices (db)
|
||||
mylog("verbose", "[Process Scan] Processing scan results")
|
||||
save_scanned_devices(db)
|
||||
|
||||
db.commitDB()
|
||||
|
||||
# Print stats
|
||||
mylog('none','[Process Scan] Print Stats')
|
||||
print_scan_stats(db)
|
||||
mylog('none','[Process Scan] Stats end')
|
||||
|
||||
# Create Events
|
||||
mylog('verbose','[Process Scan] Sessions Events (connect / disconnect)')
|
||||
# Print stats
|
||||
mylog("none", "[Process Scan] Print Stats")
|
||||
print_scan_stats(db)
|
||||
mylog("none", "[Process Scan] Stats end")
|
||||
|
||||
# Create Events
|
||||
mylog("verbose", "[Process Scan] Sessions Events (connect / disconnect)")
|
||||
insert_events(db)
|
||||
|
||||
# Create New Devices
|
||||
# after create events -> avoid 'connection' event
|
||||
mylog('verbose','[Process Scan] Creating new devices')
|
||||
create_new_devices (db)
|
||||
mylog("verbose", "[Process Scan] Creating new devices")
|
||||
create_new_devices(db)
|
||||
|
||||
# Update devices info
|
||||
mylog('verbose','[Process Scan] Updating Devices Info')
|
||||
update_devices_data_from_scan (db)
|
||||
mylog("verbose", "[Process Scan] Updating Devices Info")
|
||||
update_devices_data_from_scan(db)
|
||||
|
||||
# Pair session events (Connection / Disconnection)
|
||||
mylog('verbose','[Process Scan] Pairing session events (connection / disconnection) ')
|
||||
pair_sessions_events(db)
|
||||
|
||||
# Sessions snapshot
|
||||
mylog('verbose','[Process Scan] Creating sessions snapshot')
|
||||
create_sessions_snapshot (db)
|
||||
mylog(
|
||||
"verbose", "[Process Scan] Pairing session events (connection / disconnection) "
|
||||
)
|
||||
pair_sessions_events(db)
|
||||
|
||||
# Sessions snapshot
|
||||
mylog('verbose','[Process Scan] Inserting scan results into Online_History')
|
||||
mylog("verbose", "[Process Scan] Creating sessions snapshot")
|
||||
create_sessions_snapshot(db)
|
||||
|
||||
# Sessions snapshot
|
||||
mylog("verbose", "[Process Scan] Inserting scan results into Online_History")
|
||||
insertOnlineHistory(db)
|
||||
|
||||
# Skip repeated notifications
|
||||
mylog('verbose','[Process Scan] Skipping repeated notifications')
|
||||
skip_repeated_notifications (db)
|
||||
|
||||
# Clear current scan as processed
|
||||
# Skip repeated notifications
|
||||
mylog("verbose", "[Process Scan] Skipping repeated notifications")
|
||||
skip_repeated_notifications(db)
|
||||
|
||||
# Clear current scan as processed
|
||||
# 🐛 CurrentScan DEBUG: comment out below when debugging to keep the CurrentScan table after restarts/scan finishes
|
||||
db.sql.execute ("DELETE FROM CurrentScan")
|
||||
|
||||
# Commit changes
|
||||
db.sql.execute("DELETE FROM CurrentScan")
|
||||
|
||||
# Commit changes
|
||||
db.commitDB()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def pair_sessions_events (db):
|
||||
|
||||
sql = db.sql #TO-DO
|
||||
# -------------------------------------------------------------------------------
|
||||
def pair_sessions_events(db):
|
||||
sql = db.sql # TO-DO
|
||||
# Pair Connection / New Device events
|
||||
|
||||
mylog('debug','[Pair Session] - 1 Connections / New Devices')
|
||||
sql.execute ("""UPDATE Events
|
||||
mylog("debug", "[Pair Session] - 1 Connections / New Devices")
|
||||
sql.execute("""UPDATE Events
|
||||
SET eve_PairEventRowid =
|
||||
(SELECT ROWID
|
||||
FROM Events AS EVE2
|
||||
@@ -90,49 +98,48 @@ def pair_sessions_events (db):
|
||||
ORDER BY EVE2.eve_DateTime ASC LIMIT 1)
|
||||
WHERE eve_EventType IN ('New Device', 'Connected', 'Down Reconnected')
|
||||
AND eve_PairEventRowid IS NULL
|
||||
""" )
|
||||
""")
|
||||
|
||||
# Pair Disconnection / Device Down
|
||||
mylog('debug','[Pair Session] - 2 Disconnections')
|
||||
sql.execute ("""UPDATE Events
|
||||
mylog("debug", "[Pair Session] - 2 Disconnections")
|
||||
sql.execute("""UPDATE Events
|
||||
SET eve_PairEventRowid =
|
||||
(SELECT ROWID
|
||||
FROM Events AS EVE2
|
||||
WHERE EVE2.eve_PairEventRowid = Events.ROWID)
|
||||
WHERE eve_EventType IN ('Device Down', 'Disconnected')
|
||||
AND eve_PairEventRowid IS NULL
|
||||
""" )
|
||||
""")
|
||||
|
||||
|
||||
mylog('debug','[Pair Session] Pair session end')
|
||||
mylog("debug", "[Pair Session] Pair session end")
|
||||
db.commitDB()
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def create_sessions_snapshot (db):
|
||||
sql = db.sql #TO-DO
|
||||
# -------------------------------------------------------------------------------
|
||||
def create_sessions_snapshot(db):
|
||||
sql = db.sql # TO-DO
|
||||
|
||||
# Clean sessions snapshot
|
||||
mylog('debug','[Sessions Snapshot] - 1 Clean')
|
||||
sql.execute ("DELETE FROM SESSIONS" )
|
||||
mylog("debug", "[Sessions Snapshot] - 1 Clean")
|
||||
sql.execute("DELETE FROM SESSIONS")
|
||||
|
||||
# Insert sessions
|
||||
mylog('debug','[Sessions Snapshot] - 2 Insert')
|
||||
sql.execute ("""INSERT INTO Sessions
|
||||
SELECT * FROM Convert_Events_to_Sessions""" )
|
||||
mylog("debug", "[Sessions Snapshot] - 2 Insert")
|
||||
sql.execute("""INSERT INTO Sessions
|
||||
SELECT * FROM Convert_Events_to_Sessions""")
|
||||
|
||||
mylog('debug','[Sessions Snapshot] Sessions end')
|
||||
mylog("debug", "[Sessions Snapshot] Sessions end")
|
||||
db.commitDB()
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def insert_events (db):
|
||||
sql = db.sql #TO-DO
|
||||
startTime = timeNowTZ()
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def insert_events(db):
|
||||
sql = db.sql # TO-DO
|
||||
startTime = timeNowTZ()
|
||||
|
||||
# Check device down
|
||||
mylog('debug','[Events] - 1 - Devices down')
|
||||
sql.execute (f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
mylog("debug", "[Events] - 1 - Devices down")
|
||||
sql.execute(f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
SELECT devMac, devLastIP, '{startTime}', 'Device Down', '', 1
|
||||
@@ -144,8 +151,8 @@ def insert_events (db):
|
||||
) """)
|
||||
|
||||
# Check new Connections or Down Reconnections
|
||||
mylog('debug','[Events] - 2 - New Connections')
|
||||
sql.execute (f""" INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
mylog("debug", "[Events] - 2 - New Connections")
|
||||
sql.execute(f""" INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
SELECT DISTINCT c.cur_MAC, c.cur_IP, '{startTime}',
|
||||
@@ -161,8 +168,8 @@ def insert_events (db):
|
||||
""")
|
||||
|
||||
# Check disconnections
|
||||
mylog('debug','[Events] - 3 - Disconnections')
|
||||
sql.execute (f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
mylog("debug", "[Events] - 3 - Disconnections")
|
||||
sql.execute(f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
SELECT devMac, devLastIP, '{startTime}', 'Disconnected', '',
|
||||
@@ -175,19 +182,19 @@ def insert_events (db):
|
||||
) """)
|
||||
|
||||
# Check IP Changed
|
||||
mylog('debug','[Events] - 4 - IP Changes')
|
||||
sql.execute (f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
mylog("debug", "[Events] - 4 - IP Changes")
|
||||
sql.execute(f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
SELECT cur_MAC, cur_IP, '{startTime}', 'IP Changed',
|
||||
'Previous IP: '|| devLastIP, devAlertEvents
|
||||
FROM Devices, CurrentScan
|
||||
WHERE devMac = cur_MAC
|
||||
AND devLastIP <> cur_IP """ )
|
||||
mylog('debug','[Events] - Events end')
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
AND devLastIP <> cur_IP """)
|
||||
mylog("debug", "[Events] - Events end")
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def insertOnlineHistory(db):
|
||||
sql = db.sql # TO-DO: Implement sql object
|
||||
|
||||
@@ -202,14 +209,16 @@ def insertOnlineHistory(db):
|
||||
COALESCE(SUM(CASE WHEN devPresentLastScan = 0 AND devAlertDown = 1 THEN 1 ELSE 0 END), 0) AS downDevices
|
||||
FROM Devices
|
||||
"""
|
||||
|
||||
deviceCounts = db.read(query)[0] # Assuming db.read returns a list of rows, take the first (and only) row
|
||||
|
||||
allDevices = deviceCounts['allDevices']
|
||||
archivedDevices = deviceCounts['archivedDevices']
|
||||
onlineDevices = deviceCounts['onlineDevices']
|
||||
downDevices = deviceCounts['downDevices']
|
||||
|
||||
deviceCounts = db.read(query)[
|
||||
0
|
||||
] # Assuming db.read returns a list of rows, take the first (and only) row
|
||||
|
||||
allDevices = deviceCounts["allDevices"]
|
||||
archivedDevices = deviceCounts["archivedDevices"]
|
||||
onlineDevices = deviceCounts["onlineDevices"]
|
||||
downDevices = deviceCounts["downDevices"]
|
||||
|
||||
offlineDevices = allDevices - archivedDevices - onlineDevices
|
||||
|
||||
# Prepare the insert query using parameterized inputs
|
||||
@@ -217,15 +226,26 @@ def insertOnlineHistory(db):
|
||||
INSERT INTO Online_History (Scan_Date, Online_Devices, Down_Devices, All_Devices, Archived_Devices, Offline_Devices)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
"""
|
||||
|
||||
mylog('debug', f'[Presence graph] Sql query: {insert_query} with values: {scanTimestamp}, {onlineDevices}, {downDevices}, {allDevices}, {archivedDevices}, {offlineDevices}')
|
||||
|
||||
# Debug output
|
||||
mylog(
|
||||
"debug",
|
||||
f"[Presence graph] Sql query: {insert_query} with values: {scanTimestamp}, {onlineDevices}, {downDevices}, {allDevices}, {archivedDevices}, {offlineDevices}",
|
||||
)
|
||||
|
||||
# Debug output
|
||||
print_table_schema(db, "Online_History")
|
||||
|
||||
# Insert the gathered data into the history table
|
||||
sql.execute(insert_query, (scanTimestamp, onlineDevices, downDevices, allDevices, archivedDevices, offlineDevices))
|
||||
sql.execute(
|
||||
insert_query,
|
||||
(
|
||||
scanTimestamp,
|
||||
onlineDevices,
|
||||
downDevices,
|
||||
allDevices,
|
||||
archivedDevices,
|
||||
offlineDevices,
|
||||
),
|
||||
)
|
||||
|
||||
db.commitDB()
|
||||
|
||||
|
||||
|
||||
@@ -1,43 +1,53 @@
|
||||
""" class to manage schedules """
|
||||
"""class to manage schedules"""
|
||||
|
||||
import datetime
|
||||
|
||||
from logger import mylog
|
||||
import conf
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
class schedule_class:
|
||||
def __init__(self, service, scheduleObject, last_next_schedule, was_last_schedule_used, last_run = 0):
|
||||
def __init__(
|
||||
self,
|
||||
service,
|
||||
scheduleObject,
|
||||
last_next_schedule,
|
||||
was_last_schedule_used,
|
||||
last_run=0,
|
||||
):
|
||||
self.service = service
|
||||
self.scheduleObject = scheduleObject
|
||||
self.last_next_schedule = last_next_schedule
|
||||
self.last_run = last_run
|
||||
self.was_last_schedule_used = was_last_schedule_used
|
||||
|
||||
def runScheduleCheck(self):
|
||||
self.was_last_schedule_used = was_last_schedule_used
|
||||
|
||||
result = False
|
||||
def runScheduleCheck(self):
|
||||
result = False
|
||||
|
||||
# Initialize the last run time if never run before
|
||||
if self.last_run == 0:
|
||||
self.last_run = (datetime.datetime.now(conf.tz) - datetime.timedelta(days=365)).replace(microsecond=0)
|
||||
self.last_run = (
|
||||
datetime.datetime.now(conf.tz) - datetime.timedelta(days=365)
|
||||
).replace(microsecond=0)
|
||||
|
||||
# get the current time with the currently specified timezone
|
||||
nowTime = datetime.datetime.now(conf.tz).replace(microsecond=0)
|
||||
|
||||
# Run the schedule if the current time is past the schedule time we saved last time and
|
||||
# (maybe the following check is unnecessary)
|
||||
# Run the schedule if the current time is past the schedule time we saved last time and
|
||||
# (maybe the following check is unnecessary)
|
||||
if nowTime > self.last_next_schedule:
|
||||
mylog('verbose',f'[Scheduler] run for {self.service}: YES')
|
||||
mylog("verbose", f"[Scheduler] run for {self.service}: YES")
|
||||
self.was_last_schedule_used = True
|
||||
result = True
|
||||
else:
|
||||
mylog('verbose',f'[Scheduler] run for {self.service}: NO')
|
||||
mylog("verbose", f"[Scheduler] run for {self.service}: NO")
|
||||
# mylog('debug',f'[Scheduler] - nowTime {nowTime}')
|
||||
# mylog('debug',f'[Scheduler] - self.last_next_schedule {self.last_next_schedule}')
|
||||
# mylog('debug',f'[Scheduler] - self.last_run {self.last_run}')
|
||||
|
||||
|
||||
if self.was_last_schedule_used:
|
||||
self.was_last_schedule_used = False
|
||||
self.last_next_schedule = self.scheduleObject.next()
|
||||
self.last_next_schedule = self.scheduleObject.next()
|
||||
|
||||
return result
|
||||
|
||||
@@ -1,26 +1,26 @@
|
||||
import sys
|
||||
import sqlite3
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
import conf
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value, timeNowTZ
|
||||
from helper import get_setting_value
|
||||
from models.device_instance import DeviceInstance
|
||||
from models.plugin_object_instance import PluginObjectInstance
|
||||
|
||||
# Make sure log level is initialized correctly
|
||||
Logger(get_setting_value('LOG_LEVEL'))
|
||||
Logger(get_setting_value("LOG_LEVEL"))
|
||||
|
||||
|
||||
from workflows.triggers import Trigger
|
||||
|
||||
class Action:
|
||||
"""Base class for all actions."""
|
||||
|
||||
def __init__(self, trigger):
|
||||
self.trigger = trigger
|
||||
def __init__(self, trigger):
|
||||
self.trigger = trigger
|
||||
|
||||
def execute(self, obj):
|
||||
"""Executes the action on the given object."""
|
||||
@@ -37,7 +37,10 @@ class UpdateFieldAction(Action):
|
||||
self.db = db
|
||||
|
||||
def execute(self):
|
||||
mylog('verbose', f"[WF] Updating field '{self.field}' to '{self.value}' for event object {self.trigger.object_type}")
|
||||
mylog(
|
||||
"verbose",
|
||||
f"[WF] Updating field '{self.field}' to '{self.value}' for event object {self.trigger.object_type}",
|
||||
)
|
||||
|
||||
obj = self.trigger.object
|
||||
|
||||
@@ -49,19 +52,19 @@ class UpdateFieldAction(Action):
|
||||
|
||||
# currently unused
|
||||
if isinstance(obj, dict) and "ObjectGUID" in obj:
|
||||
mylog('debug', f"[WF] Updating Object '{obj}' ")
|
||||
mylog("debug", f"[WF] Updating Object '{obj}' ")
|
||||
plugin_instance = PluginObjectInstance(self.db)
|
||||
plugin_instance.updateField(obj["ObjectGUID"], self.field, self.value)
|
||||
processed = True
|
||||
|
||||
elif isinstance(obj, dict) and "devGUID" in obj:
|
||||
mylog('debug', f"[WF] Updating Device '{obj}' ")
|
||||
mylog("debug", f"[WF] Updating Device '{obj}' ")
|
||||
device_instance = DeviceInstance(self.db)
|
||||
device_instance.updateField(obj["devGUID"], self.field, self.value)
|
||||
processed = True
|
||||
|
||||
if not processed:
|
||||
mylog('none', f"[WF] Could not process action for object: {obj}")
|
||||
mylog("none", f"[WF] Could not process action for object: {obj}")
|
||||
|
||||
return obj
|
||||
|
||||
@@ -74,7 +77,7 @@ class DeleteObjectAction(Action):
|
||||
self.db = db
|
||||
|
||||
def execute(self):
|
||||
mylog('verbose', f"[WF] Deleting event object {self.trigger.object_type}")
|
||||
mylog("verbose", f"[WF] Deleting event object {self.trigger.object_type}")
|
||||
|
||||
obj = self.trigger.object
|
||||
|
||||
@@ -84,21 +87,21 @@ class DeleteObjectAction(Action):
|
||||
|
||||
processed = False
|
||||
|
||||
# currently unused
|
||||
# currently unused
|
||||
if isinstance(obj, dict) and "ObjectGUID" in obj:
|
||||
mylog('debug', f"[WF] Updating Object '{obj}' ")
|
||||
mylog("debug", f"[WF] Updating Object '{obj}' ")
|
||||
plugin_instance = PluginObjectInstance(self.db)
|
||||
plugin_instance.delete(obj["ObjectGUID"])
|
||||
processed = True
|
||||
|
||||
elif isinstance(obj, dict) and "devGUID" in obj:
|
||||
mylog('debug', f"[WF] Updating Device '{obj}' ")
|
||||
mylog("debug", f"[WF] Updating Device '{obj}' ")
|
||||
device_instance = DeviceInstance(self.db)
|
||||
device_instance.delete(obj["devGUID"])
|
||||
processed = True
|
||||
|
||||
if not processed:
|
||||
mylog('none', f"[WF] Could not process action for object: {obj}")
|
||||
mylog("none", f"[WF] Could not process action for object: {obj}")
|
||||
|
||||
return obj
|
||||
|
||||
@@ -112,10 +115,14 @@ class RunPluginAction(Action):
|
||||
self.params = params
|
||||
|
||||
def execute(self):
|
||||
|
||||
obj = self.trigger.object
|
||||
|
||||
mylog('verbose', [f"Executing plugin '{self.plugin_name}' with parameters {self.params} for object {obj}"])
|
||||
mylog(
|
||||
"verbose",
|
||||
[
|
||||
f"Executing plugin '{self.plugin_name}' with parameters {self.params} for object {obj}"
|
||||
],
|
||||
)
|
||||
# PluginManager.run(self.plugin_name, self.parameters)
|
||||
return obj
|
||||
|
||||
@@ -130,7 +137,12 @@ class SendNotificationAction(Action):
|
||||
|
||||
def execute(self):
|
||||
obj = self.trigger.object
|
||||
mylog('verbose', [f"Sending notification via '{self.method}': {self.message} for object {obj}"])
|
||||
mylog(
|
||||
"verbose",
|
||||
[
|
||||
f"Sending notification via '{self.method}': {self.message} for object {obj}"
|
||||
],
|
||||
)
|
||||
# NotificationManager.send(self.method, self.message)
|
||||
return obj
|
||||
|
||||
@@ -144,4 +156,4 @@ class ActionGroup:
|
||||
def execute(self, obj):
|
||||
for action in self.actions:
|
||||
action.execute(obj)
|
||||
return obj
|
||||
return obj
|
||||
|
||||
@@ -1,37 +1,28 @@
|
||||
import datetime
|
||||
import json
|
||||
import uuid
|
||||
import os
|
||||
import sys
|
||||
import pytz
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
# Register NetAlertX modules
|
||||
import conf
|
||||
from helper import get_setting_value, timeNowTZ
|
||||
# Make sure the TIMEZONE for logging is correct
|
||||
# conf.tz = pytz.timezone(get_setting_value('TIMEZONE'))
|
||||
|
||||
from logger import mylog, Logger, logResult
|
||||
from helper import get_setting_value
|
||||
from logger import Logger
|
||||
from const import sql_generateGuid
|
||||
|
||||
# Make sure log level is initialized correctly
|
||||
Logger(get_setting_value('LOG_LEVEL'))
|
||||
Logger(get_setting_value("LOG_LEVEL"))
|
||||
|
||||
from const import applicationPath, logPath, apiPath, confFileName, sql_generateGuid
|
||||
from helper import timeNowTZ
|
||||
|
||||
class AppEvent_obj:
|
||||
def __init__(self, db):
|
||||
self.db = db
|
||||
|
||||
# Drop existing table
|
||||
# Drop existing table
|
||||
self.db.sql.execute("""DROP TABLE IF EXISTS "AppEvents" """)
|
||||
|
||||
# Drop all triggers
|
||||
self.drop_all_triggers()
|
||||
|
||||
|
||||
# Create the AppEvents table if missing
|
||||
self.create_app_events_table()
|
||||
|
||||
@@ -47,7 +38,7 @@ class AppEvent_obj:
|
||||
"ObjectStatusColumn": "'devPresentLastScan'",
|
||||
"ObjectIsNew": "NEW.devIsNew",
|
||||
"ObjectIsArchived": "NEW.devIsArchived",
|
||||
"ObjectPlugin": "'DEVICES'"
|
||||
"ObjectPlugin": "'DEVICES'",
|
||||
}
|
||||
}
|
||||
# ,
|
||||
@@ -66,7 +57,6 @@ class AppEvent_obj:
|
||||
# }
|
||||
}
|
||||
|
||||
|
||||
# Re-Create triggers dynamically
|
||||
for table, config in self.object_mapping.items():
|
||||
self.create_trigger(table, "insert", config)
|
||||
@@ -130,8 +120,8 @@ class AppEvent_obj:
|
||||
SELECT 1 FROM AppEvents
|
||||
WHERE AppEventProcessed = 0
|
||||
AND ObjectType = '{table_name}'
|
||||
AND ObjectGUID = {manage_prefix(config['fields']['ObjectGUID'], event)}
|
||||
AND ObjectStatus = {manage_prefix(config['fields']['ObjectStatus'], event)}
|
||||
AND ObjectGUID = {manage_prefix(config["fields"]["ObjectGUID"], event)}
|
||||
AND ObjectStatus = {manage_prefix(config["fields"]["ObjectStatus"], event)}
|
||||
AND AppEventType = '{event.lower()}'
|
||||
)
|
||||
BEGIN
|
||||
@@ -156,15 +146,15 @@ class AppEvent_obj:
|
||||
DATETIME('now'),
|
||||
FALSE,
|
||||
'{table_name}',
|
||||
{manage_prefix(config['fields']['ObjectGUID'], event)}, -- ObjectGUID
|
||||
{manage_prefix(config['fields']['ObjectPrimaryID'], event)}, -- ObjectPrimaryID
|
||||
{manage_prefix(config['fields']['ObjectSecondaryID'], event)}, -- ObjectSecondaryID
|
||||
{manage_prefix(config['fields']['ObjectStatus'], event)}, -- ObjectStatus
|
||||
{manage_prefix(config['fields']['ObjectStatusColumn'], event)}, -- ObjectStatusColumn
|
||||
{manage_prefix(config['fields']['ObjectIsNew'], event)}, -- ObjectIsNew
|
||||
{manage_prefix(config['fields']['ObjectIsArchived'], event)}, -- ObjectIsArchived
|
||||
{manage_prefix(config['fields']['ObjectForeignKey'], event)}, -- ObjectForeignKey
|
||||
{manage_prefix(config['fields']['ObjectPlugin'], event)}, -- ObjectForeignKey
|
||||
{manage_prefix(config["fields"]["ObjectGUID"], event)}, -- ObjectGUID
|
||||
{manage_prefix(config["fields"]["ObjectPrimaryID"], event)}, -- ObjectPrimaryID
|
||||
{manage_prefix(config["fields"]["ObjectSecondaryID"], event)}, -- ObjectSecondaryID
|
||||
{manage_prefix(config["fields"]["ObjectStatus"], event)}, -- ObjectStatus
|
||||
{manage_prefix(config["fields"]["ObjectStatusColumn"], event)}, -- ObjectStatusColumn
|
||||
{manage_prefix(config["fields"]["ObjectIsNew"], event)}, -- ObjectIsNew
|
||||
{manage_prefix(config["fields"]["ObjectIsArchived"], event)}, -- ObjectIsArchived
|
||||
{manage_prefix(config["fields"]["ObjectForeignKey"], event)}, -- ObjectForeignKey
|
||||
{manage_prefix(config["fields"]["ObjectPlugin"], event)}, -- ObjectForeignKey
|
||||
'{event.lower()}'
|
||||
);
|
||||
END;
|
||||
@@ -178,9 +168,9 @@ class AppEvent_obj:
|
||||
# Commit changes
|
||||
self.db.commitDB()
|
||||
|
||||
|
||||
# Manage prefixes of column names
|
||||
def manage_prefix(field, event):
|
||||
if event == "delete":
|
||||
return field.replace("NEW.", "OLD.")
|
||||
return field
|
||||
|
||||
return field
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
import conf
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value, timeNowTZ
|
||||
from helper import get_setting_value
|
||||
|
||||
# Make sure log level is initialized correctly
|
||||
Logger(get_setting_value('LOG_LEVEL'))
|
||||
Logger(get_setting_value("LOG_LEVEL"))
|
||||
|
||||
|
||||
class Condition:
|
||||
"""Evaluates a single condition."""
|
||||
@@ -23,11 +24,13 @@ class Condition:
|
||||
self.negate = condition_json.get("negate", False)
|
||||
|
||||
def evaluate(self, trigger):
|
||||
|
||||
# try finding the value of the field on the event triggering this workflow or thre object triggering the app event
|
||||
appEvent_value = trigger.event[self.field] if self.field in trigger.event.keys() else None
|
||||
eveObj_value = trigger.object[self.field] if self.field in trigger.object.keys() else None
|
||||
|
||||
appEvent_value = (
|
||||
trigger.event[self.field] if self.field in trigger.event.keys() else None
|
||||
)
|
||||
eveObj_value = (
|
||||
trigger.object[self.field] if self.field in trigger.object.keys() else None
|
||||
)
|
||||
|
||||
# proceed only if value found
|
||||
if appEvent_value is None and eveObj_value is None:
|
||||
@@ -46,7 +49,7 @@ class Condition:
|
||||
result = bool(re.match(self.value, str(obj_value)))
|
||||
else:
|
||||
m = f"[WF] Unsupported operator: {self.operator}"
|
||||
mylog('none', [m])
|
||||
mylog("none", [m])
|
||||
raise ValueError(m)
|
||||
|
||||
return not result if self.negate else result
|
||||
@@ -56,8 +59,10 @@ class ConditionGroup:
|
||||
"""Handles condition groups with AND, OR logic, supporting nested groups."""
|
||||
|
||||
def __init__(self, group_json):
|
||||
|
||||
mylog('verbose', [f"[WF] ConditionGroup json.dumps(group_json): {json.dumps(group_json)}"])
|
||||
mylog(
|
||||
"verbose",
|
||||
[f"[WF] ConditionGroup json.dumps(group_json): {json.dumps(group_json)}"],
|
||||
)
|
||||
|
||||
self.logic = group_json.get("logic", "AND").upper()
|
||||
self.conditions = []
|
||||
@@ -77,5 +82,5 @@ class ConditionGroup:
|
||||
return any(results)
|
||||
else:
|
||||
m = f"[WF] ConditionGroup unsupported logic: {self.logic}"
|
||||
mylog('verbose', [m])
|
||||
mylog("verbose", [m])
|
||||
raise ValueError(m)
|
||||
|
||||
@@ -1,22 +1,21 @@
|
||||
import sys
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
import conf
|
||||
from const import fullConfFolder
|
||||
import workflows.actions
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value, timeNowTZ
|
||||
from helper import get_setting_value
|
||||
|
||||
# Make sure log level is initialized correctly
|
||||
Logger(get_setting_value('LOG_LEVEL'))
|
||||
Logger(get_setting_value("LOG_LEVEL"))
|
||||
|
||||
from workflows.triggers import Trigger
|
||||
from workflows.conditions import ConditionGroup
|
||||
from workflows.actions import *
|
||||
from workflows.actions import DeleteObjectAction, RunPluginAction, UpdateFieldAction
|
||||
|
||||
class WorkflowManager:
|
||||
def __init__(self, db):
|
||||
@@ -27,12 +26,12 @@ class WorkflowManager:
|
||||
def load_workflows(self):
|
||||
"""Load workflows from workflows.json."""
|
||||
try:
|
||||
workflows_json_path = fullConfFolder + '/workflows.json'
|
||||
with open(workflows_json_path, 'r') as f:
|
||||
workflows_json_path = fullConfFolder + "/workflows.json"
|
||||
with open(workflows_json_path, "r") as f:
|
||||
workflows = json.load(f)
|
||||
return workflows
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
mylog('none', ['[WF] Failed to load workflows.json'])
|
||||
mylog("none", ["[WF] Failed to load workflows.json"])
|
||||
return []
|
||||
|
||||
def get_new_app_events(self):
|
||||
@@ -43,43 +42,50 @@ class WorkflowManager:
|
||||
ORDER BY DateTimeCreated ASC
|
||||
""").fetchall()
|
||||
|
||||
mylog('none', [f'[WF] get_new_app_events - new events count: {len(result)}'])
|
||||
mylog("none", [f"[WF] get_new_app_events - new events count: {len(result)}"])
|
||||
|
||||
return result
|
||||
|
||||
def process_event(self, event):
|
||||
"""Process the events. Check if events match a workflow trigger"""
|
||||
|
||||
|
||||
evGuid = event["GUID"]
|
||||
|
||||
mylog('verbose', [f"[WF] Processing event with GUID {evGuid}"])
|
||||
|
||||
mylog("verbose", [f"[WF] Processing event with GUID {evGuid}"])
|
||||
|
||||
# Check if the trigger conditions match
|
||||
for workflow in self.workflows:
|
||||
|
||||
# Ensure workflow is enabled before proceeding
|
||||
if workflow.get("enabled", "No").lower() == "yes":
|
||||
wfName = workflow["name"]
|
||||
mylog('debug', [f"[WF] Checking if '{evGuid}' triggers the workflow '{wfName}'"])
|
||||
|
||||
mylog(
|
||||
"debug",
|
||||
[f"[WF] Checking if '{evGuid}' triggers the workflow '{wfName}'"],
|
||||
)
|
||||
|
||||
# construct trigger object which also evaluates if the current event triggers it
|
||||
trigger = Trigger(workflow["trigger"], event, self.db)
|
||||
|
||||
if trigger.triggered:
|
||||
|
||||
mylog('verbose', [f"[WF] Event with GUID '{evGuid}' triggered the workflow '{wfName}'"])
|
||||
mylog(
|
||||
"verbose",
|
||||
[
|
||||
f"[WF] Event with GUID '{evGuid}' triggered the workflow '{wfName}'"
|
||||
],
|
||||
)
|
||||
|
||||
self.execute_workflow(workflow, trigger)
|
||||
|
||||
# After processing the event, mark the event as processed (set AppEventProcessed to 1)
|
||||
self.db.sql.execute("""
|
||||
self.db.sql.execute(
|
||||
"""
|
||||
UPDATE AppEvents
|
||||
SET AppEventProcessed = 1
|
||||
WHERE "Index" = ?
|
||||
""", (event['Index'],)) # Pass the event's unique identifier
|
||||
""",
|
||||
(event["Index"],),
|
||||
) # Pass the event's unique identifier
|
||||
self.db.commitDB()
|
||||
|
||||
|
||||
|
||||
def execute_workflow(self, workflow, trigger):
|
||||
"""Execute the actions in the given workflow if conditions are met."""
|
||||
@@ -88,25 +94,27 @@ class WorkflowManager:
|
||||
|
||||
# Ensure conditions exist
|
||||
if not isinstance(workflow.get("conditions"), list):
|
||||
m = f"[WF] workflow['conditions'] must be a list"
|
||||
mylog('none', [m])
|
||||
m = "[WF] workflow['conditions'] must be a list"
|
||||
mylog("none", [m])
|
||||
raise ValueError(m)
|
||||
|
||||
# Evaluate each condition group separately
|
||||
for condition_group in workflow["conditions"]:
|
||||
|
||||
evaluator = ConditionGroup(condition_group)
|
||||
|
||||
if evaluator.evaluate(trigger): # If any group evaluates to True
|
||||
|
||||
mylog('none', [f"[WF] Workflow {wfName} will be executed - conditions were evaluated as TRUE"])
|
||||
mylog('debug', [f"[WF] Workflow condition_group: {condition_group}"])
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f"[WF] Workflow {wfName} will be executed - conditions were evaluated as TRUE"
|
||||
],
|
||||
)
|
||||
mylog("debug", [f"[WF] Workflow condition_group: {condition_group}"])
|
||||
|
||||
self.execute_actions(workflow["actions"], trigger)
|
||||
return # Stop if a condition group succeeds
|
||||
|
||||
mylog('none', ["[WF] No condition group matched. Actions not executed."])
|
||||
|
||||
mylog("none", ["[WF] No condition group matched. Actions not executed."])
|
||||
|
||||
def execute_actions(self, actions, trigger):
|
||||
"""Execute the actions defined in a workflow."""
|
||||
@@ -134,7 +142,7 @@ class WorkflowManager:
|
||||
|
||||
else:
|
||||
m = f"[WF] Unsupported action type: {action['type']}"
|
||||
mylog('none', [m])
|
||||
mylog("none", [m])
|
||||
raise ValueError(m)
|
||||
|
||||
action_instance.execute() # Execute the action
|
||||
@@ -147,7 +155,7 @@ class WorkflowManager:
|
||||
# field = action["field"]
|
||||
# value = action["value"]
|
||||
# action_instance = UpdateFieldAction(field, value)
|
||||
# action_instance.execute(trigger.event)
|
||||
# action_instance.execute(trigger.event)
|
||||
|
||||
# elif action["type"] == "run_plugin":
|
||||
# # Action type is "run_plugin", so map to RunPluginAction
|
||||
@@ -164,6 +172,3 @@ class WorkflowManager:
|
||||
# else:
|
||||
# # Handle unsupported action types
|
||||
# raise ValueError(f"Unsupported action type: {action['type']}")
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
import sys
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
import conf
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value, timeNowTZ
|
||||
from helper import get_setting_value
|
||||
from database import get_array_from_sql_rows
|
||||
|
||||
# Make sure log level is initialized correctly
|
||||
Logger(get_setting_value('LOG_LEVEL'))
|
||||
Logger(get_setting_value("LOG_LEVEL"))
|
||||
|
||||
|
||||
class Trigger:
|
||||
@@ -21,16 +21,23 @@ class Trigger:
|
||||
"""
|
||||
:param name: Friendly name of the trigger
|
||||
:param triggerJson: JSON trigger object {"object_type":"Devices",event_type":"update"}
|
||||
:param event: The actual event that the trigger is evaluated against
|
||||
:param event: The actual event that the trigger is evaluated against
|
||||
:param db: DB connection in case trigger matches and object needs to be retrieved
|
||||
"""
|
||||
self.object_type = triggerJson["object_type"]
|
||||
self.event_type = triggerJson["event_type"]
|
||||
self.event = event # Store the triggered event context, if provided
|
||||
self.triggered = self.object_type == event["ObjectType"] and self.event_type == event["AppEventType"]
|
||||
|
||||
mylog('debug', [f"""[WF] self.triggered '{self.triggered}' for event '{get_array_from_sql_rows(event)} and trigger {json.dumps(triggerJson)}' """])
|
||||
self.triggered = (
|
||||
self.object_type == event["ObjectType"]
|
||||
and self.event_type == event["AppEventType"]
|
||||
)
|
||||
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
f"""[WF] self.triggered '{self.triggered}' for event '{get_array_from_sql_rows(event)} and trigger {json.dumps(triggerJson)}' """
|
||||
],
|
||||
)
|
||||
|
||||
if self.triggered:
|
||||
# object type corresponds with the DB table name
|
||||
@@ -42,7 +49,7 @@ class Trigger:
|
||||
refField = "ObjectGUID"
|
||||
else:
|
||||
m = f"[WF] Unsupported object_type: {self.object_type}"
|
||||
mylog('none', [m])
|
||||
mylog("none", [m])
|
||||
raise ValueError(m)
|
||||
|
||||
query = f"""
|
||||
@@ -50,16 +57,14 @@ class Trigger:
|
||||
{db_table}
|
||||
WHERE {refField} = '{event["ObjectGUID"]}'
|
||||
"""
|
||||
|
||||
mylog('debug', [query])
|
||||
|
||||
mylog("debug", [query])
|
||||
|
||||
result = db.sql.execute(query).fetchall()
|
||||
self.object = result[0]
|
||||
else:
|
||||
self.object = None
|
||||
|
||||
|
||||
def set_event(self, event):
|
||||
"""Set or update the event context for this trigger"""
|
||||
self.event = event
|
||||
|
||||
|
||||
Reference in New Issue
Block a user