mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2025-12-06 17:15:38 -08:00
BE: short-circuit of name resolution #1251
Signed-off-by: jokob-sk <jokob.sk@gmail.com>
This commit is contained in:
@@ -35,7 +35,6 @@ from database import DB
|
||||
from messaging.reporting import get_notifications
|
||||
from models.notification_instance import NotificationInstance
|
||||
from models.user_events_queue_instance import UserEventsQueueInstance
|
||||
from plugin import plugin_manager
|
||||
from scan.device_handling import update_devices_names
|
||||
from workflows.manager import WorkflowManager
|
||||
|
||||
@@ -152,15 +151,20 @@ def main ():
|
||||
process_scan(db)
|
||||
updateState("Scan processed", None, None, None, None, False)
|
||||
|
||||
# --------
|
||||
# Reporting
|
||||
# Name resolution
|
||||
# --------------------------------------------
|
||||
|
||||
# run plugins before notification processing (e.g. Plugins to discover device names)
|
||||
pm.run_plugin_scripts('before_name_updates')
|
||||
|
||||
# Resolve devices names
|
||||
mylog('debug','[Main] Resolve devices names')
|
||||
update_devices_names(db)
|
||||
update_devices_names(pm)
|
||||
|
||||
|
||||
# --------
|
||||
# Reporting
|
||||
|
||||
# Check if new devices found
|
||||
sql.execute (sql_new_devices)
|
||||
newDevices = sql.fetchall()
|
||||
|
||||
@@ -16,7 +16,44 @@ INSTALL_PATH="/app"
|
||||
# A class to manage the application state and to provide a frontend accessible API point
|
||||
# To keep an existing value pass None
|
||||
class app_state_class:
|
||||
def __init__(self, currentState = None, settingsSaved=None, settingsImported=None, showSpinner=False, graphQLServerStarted=0, processScan=False):
|
||||
"""
|
||||
Represents the current state of the application for frontend communication.
|
||||
|
||||
Attributes:
|
||||
lastUpdated (str): Timestamp of the last update.
|
||||
settingsSaved (int): Flag indicating if settings were saved.
|
||||
settingsImported (int): Flag indicating if settings were imported.
|
||||
showSpinner (bool): Whether the UI spinner should be shown.
|
||||
processScan (bool): Whether a scan process is active.
|
||||
graphQLServerStarted (int): Timestamp of GraphQL server start.
|
||||
currentState (str): Current state string.
|
||||
pluginsStates (dict): Per-plugin state information.
|
||||
isNewVersion (bool): Flag indicating if a new version is available.
|
||||
isNewVersionChecked (int): Timestamp of last version check.
|
||||
"""
|
||||
|
||||
def __init__(self, currentState=None,
|
||||
settingsSaved=None,
|
||||
settingsImported=None,
|
||||
showSpinner=False,
|
||||
graphQLServerStarted=0,
|
||||
processScan=False,
|
||||
pluginsStates=None):
|
||||
"""
|
||||
Initialize the application state, optionally overwriting previous values.
|
||||
|
||||
Loads previous state from 'app_state.json' if available, otherwise initializes defaults.
|
||||
New values provided via parameters overwrite previous state.
|
||||
|
||||
Args:
|
||||
currentState (str, optional): Initial current state.
|
||||
settingsSaved (int, optional): Initial settingsSaved flag.
|
||||
settingsImported (int, optional): Initial settingsImported flag.
|
||||
showSpinner (bool, optional): Initial showSpinner flag.
|
||||
graphQLServerStarted (int, optional): Initial GraphQL server timestamp.
|
||||
processScan (bool, optional): Initial processScan flag.
|
||||
pluginsStates (dict, optional): Initial plugin states to merge with previous state.
|
||||
"""
|
||||
# json file containing the state to communicate with the frontend
|
||||
stateFile = apiPath + 'app_state.json'
|
||||
previousState = ""
|
||||
@@ -27,7 +64,7 @@ class app_state_class:
|
||||
if os.path.exists(stateFile):
|
||||
try:
|
||||
with open(stateFile, 'r') as json_file:
|
||||
previousState = json.load(json_file)
|
||||
previousState = json.load(json_file)
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
mylog('none', [f'[app_state_class] Failed to handle app_state.json: {e}'])
|
||||
|
||||
@@ -41,6 +78,7 @@ class app_state_class:
|
||||
self.isNewVersionChecked = previousState.get("isNewVersionChecked", 0)
|
||||
self.graphQLServerStarted = previousState.get("graphQLServerStarted", 0)
|
||||
self.currentState = previousState.get("currentState", "Init")
|
||||
self.pluginsStates = previousState.get("pluginsStates", {})
|
||||
else: # init first time values
|
||||
self.settingsSaved = 0
|
||||
self.settingsImported = 0
|
||||
@@ -50,6 +88,7 @@ class app_state_class:
|
||||
self.isNewVersionChecked = int(timeNow().timestamp())
|
||||
self.graphQLServerStarted = 0
|
||||
self.currentState = "Init"
|
||||
self.pluginsStates = {}
|
||||
|
||||
# Overwrite with provided parameters if supplied
|
||||
if settingsSaved is not None:
|
||||
@@ -64,6 +103,16 @@ class app_state_class:
|
||||
self.processScan = processScan
|
||||
if currentState is not None:
|
||||
self.currentState = currentState
|
||||
# Merge plugin states instead of overwriting
|
||||
if pluginsStates is not None:
|
||||
for plugin, state in pluginsStates.items():
|
||||
if plugin in self.pluginsStates:
|
||||
# Only update existing keys
|
||||
self.pluginsStates[plugin].update(state)
|
||||
else:
|
||||
# Optionally ignore or add new plugin entries
|
||||
# To ignore new plugins, comment out the next line
|
||||
self.pluginsStates[plugin] = state
|
||||
|
||||
# check for new version every hour and if currently not running new version
|
||||
if self.isNewVersion is False and self.isNewVersionChecked + 3600 < int(timeNow().timestamp()):
|
||||
@@ -74,7 +123,7 @@ class app_state_class:
|
||||
# with open(stateFile, 'w') as json_file:
|
||||
# json.dump(self, json_file, cls=AppStateEncoder, indent=4)
|
||||
|
||||
# Remove lastUpdated from the dictionary for comparison
|
||||
# Remove lastUpdated from the dictionary for comparison
|
||||
currentStateDict = self.__dict__.copy()
|
||||
currentStateDict.pop('lastUpdated', None)
|
||||
|
||||
@@ -94,16 +143,47 @@ class app_state_class:
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# method to update the state
|
||||
def updateState(newState = None, settingsSaved = None, settingsImported = None, showSpinner = False, graphQLServerStarted = None, processScan = None):
|
||||
def updateState(newState = None,
|
||||
settingsSaved = None,
|
||||
settingsImported = None,
|
||||
showSpinner = False,
|
||||
graphQLServerStarted = None,
|
||||
processScan = None,
|
||||
pluginsStates=None):
|
||||
"""
|
||||
Convenience method to create or update the app state.
|
||||
|
||||
return app_state_class(newState, settingsSaved, settingsImported, showSpinner, graphQLServerStarted, processScan)
|
||||
Args:
|
||||
newState (str, optional): Current state to set.
|
||||
settingsSaved (int, optional): Flag for settings saved.
|
||||
settingsImported (int, optional): Flag for settings imported.
|
||||
showSpinner (bool, optional): Flag to control UI spinner.
|
||||
graphQLServerStarted (int, optional): Timestamp of GraphQL server start.
|
||||
processScan (bool, optional): Flag indicating if a scan is active.
|
||||
pluginsStates (dict, optional): Plugin state updates.
|
||||
|
||||
Returns:
|
||||
app_state_class: Updated state object.
|
||||
"""
|
||||
return app_state_class( newState,
|
||||
settingsSaved,
|
||||
settingsImported,
|
||||
showSpinner,
|
||||
graphQLServerStarted,
|
||||
processScan,
|
||||
pluginsStates)
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Checks if the object has a __dict__ attribute. If it does, it assumes that it's an instance of a class and serializes its attributes dynamically.
|
||||
class AppStateEncoder(json.JSONEncoder):
|
||||
"""
|
||||
JSON encoder for application state objects.
|
||||
|
||||
Automatically serializes objects with a __dict__ attribute.
|
||||
"""
|
||||
def default(self, obj):
|
||||
if hasattr(obj, '__dict__'):
|
||||
# If the object has a '__dict__', assume it's an instance of a class
|
||||
return obj.__dict__
|
||||
return super().default(obj)
|
||||
return super().default(obj)
|
||||
|
||||
@@ -195,7 +195,10 @@ def ensure_Indexes(sql) -> bool:
|
||||
("idx_dev_location", "CREATE INDEX idx_dev_location ON Devices(devLocation)"),
|
||||
|
||||
# Settings
|
||||
("idx_set_key", "CREATE INDEX idx_set_key ON Settings(setKey)")
|
||||
("idx_set_key", "CREATE INDEX idx_set_key ON Settings(setKey)"),
|
||||
|
||||
# Plugins_Objects
|
||||
("idx_plugins_plugin_mac_ip", "CREATE INDEX idx_plugins_plugin_mac_ip ON Plugins_Objects(Plugin, Object_PrimaryID, Object_SecondaryID)") # Issue #1251: Optimize name resolution lookup
|
||||
]
|
||||
|
||||
for name, create_sql in indexes:
|
||||
|
||||
112
server/plugin.py
112
server/plugin.py
@@ -26,6 +26,8 @@ class plugin_manager:
|
||||
def __init__(self, db, all_plugins):
|
||||
self.db = db
|
||||
self.all_plugins = all_plugins
|
||||
self.plugin_states = {}
|
||||
self.name_plugins_checked = None
|
||||
|
||||
# object cache of settings and schedules for faster lookups
|
||||
self._cache = {}
|
||||
@@ -66,20 +68,6 @@ class plugin_manager:
|
||||
# 🔹 Lookup RUN setting from cache instead of calling get_plugin_setting_obj each time
|
||||
run_setting = self._cache["settings"].get(prefix, {}).get("RUN")
|
||||
|
||||
# set = get_plugin_setting_obj(plugin, "RUN")
|
||||
|
||||
# mylog('debug', [f'[run_plugin_scripts] plugin: {plugin}'])
|
||||
# mylog('debug', [f'[run_plugin_scripts] set: {set}'])
|
||||
# if set != None and set['value'] == runType:
|
||||
# if runType != "schedule":
|
||||
# shouldRun = True
|
||||
# elif runType == "schedule":
|
||||
# # run if overdue scheduled time
|
||||
# # check schedules if any contains a unique plugin prefix matching the current plugin
|
||||
# for schd in conf.mySchedules:
|
||||
# if schd.service == prefix:
|
||||
# # Check if schedule overdue
|
||||
# shouldRun = schd.runScheduleCheck()
|
||||
if run_setting != None and run_setting['value'] == runType:
|
||||
if runType != "schedule":
|
||||
shouldRun = True
|
||||
@@ -91,19 +79,6 @@ class plugin_manager:
|
||||
# Check if schedule overdue
|
||||
shouldRun = schd.runScheduleCheck()
|
||||
|
||||
# if shouldRun:
|
||||
# # Header
|
||||
# updateState(f"Plugin: {prefix}")
|
||||
|
||||
# print_plugin_info(plugin, ['display_name'])
|
||||
# mylog('debug', ['[Plugins] CMD: ', get_plugin_setting_obj(plugin, "CMD")["value"]])
|
||||
# execute_plugin(self.db, self.all_plugins, plugin)
|
||||
# # update last run time
|
||||
# if runType == "schedule":
|
||||
# for schd in conf.mySchedules:
|
||||
# if schd.service == prefix:
|
||||
# # note the last time the scheduled plugin run was executed
|
||||
# schd.last_run = timeNowTZ()
|
||||
if shouldRun:
|
||||
# Header
|
||||
updateState(f"Plugin: {prefix}")
|
||||
@@ -116,6 +91,10 @@ class plugin_manager:
|
||||
|
||||
execute_plugin(self.db, self.all_plugins, plugin)
|
||||
|
||||
# Update plugin states in app_state
|
||||
current_plugin_state = self.get_plugin_states(prefix) # get latest plugin state
|
||||
updateState(pluginsStates={prefix: current_plugin_state.get(prefix, {})})
|
||||
|
||||
# update last run time
|
||||
if runType == "schedule":
|
||||
schd = self._cache["schedules"].get(prefix)
|
||||
@@ -183,12 +162,20 @@ class plugin_manager:
|
||||
|
||||
mylog('minimal', ['[', timeNowTZ(), '] START Run: ', runType])
|
||||
|
||||
# run the plugin to run
|
||||
# run the plugin
|
||||
for plugin in self.all_plugins:
|
||||
if plugin["unique_prefix"] == runType:
|
||||
if plugin["unique_prefix"] == runType:
|
||||
|
||||
pluginName = plugin["unique_prefix"]
|
||||
|
||||
execute_plugin(self.db, self.all_plugins, plugin)
|
||||
|
||||
mylog('minimal', ['[', timeNowTZ(), '] END Run: ', runType])
|
||||
# Update plugin states in app_state
|
||||
current_plugin_state = self.get_plugin_states(pluginName) # get latest plugin state
|
||||
updateState(pluginsStates={pluginName: current_plugin_state.get(pluginName, {})})
|
||||
|
||||
mylog('minimal', ['[', timeNowTZ(), '] END Run: ', runType])
|
||||
|
||||
return
|
||||
|
||||
|
||||
@@ -214,6 +201,71 @@ class plugin_manager:
|
||||
mylog('minimal', ['[Test] END Test: ', runType])
|
||||
|
||||
return
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_plugin_states(self, plugin_name=None):
|
||||
"""
|
||||
Returns plugin state summary suitable for updateState(..., pluginsStates=...).
|
||||
If plugin_name is provided, only calculates stats for that plugin.
|
||||
Structure per plugin:
|
||||
{
|
||||
"lastChanged": str,
|
||||
"totalObjects": int,
|
||||
"newObjects": int,
|
||||
"changedObjects": int
|
||||
}
|
||||
"""
|
||||
sql = self.db.sql
|
||||
plugin_states = {}
|
||||
|
||||
if plugin_name: # Only compute for single plugin
|
||||
sql.execute("""
|
||||
SELECT MAX(DateTimeChanged) AS last_changed,
|
||||
COUNT(*) AS total_objects,
|
||||
SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects
|
||||
FROM Plugins_Objects
|
||||
WHERE Plugin = ?
|
||||
""", (plugin_name,))
|
||||
row = sql.fetchone()
|
||||
last_changed, total_objects, new_objects = row if row else ("", 0, 0)
|
||||
new_objects = new_objects or 0 # ensure it's int
|
||||
changed_objects = total_objects - new_objects
|
||||
|
||||
plugin_states[plugin_name] = {
|
||||
"lastChanged": last_changed or "",
|
||||
"totalObjects": total_objects or 0,
|
||||
"newObjects": new_objects or 0,
|
||||
"changedObjects": changed_objects or 0
|
||||
}
|
||||
|
||||
# Save in memory
|
||||
self.plugin_states[plugin_name] = plugin_states[plugin_name]
|
||||
|
||||
else: # Compute for all plugins (full refresh)
|
||||
sql.execute("""
|
||||
SELECT Plugin,
|
||||
MAX(DateTimeChanged) AS last_changed,
|
||||
COUNT(*) AS total_objects,
|
||||
SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects
|
||||
FROM Plugins_Objects
|
||||
GROUP BY Plugin
|
||||
""")
|
||||
for plugin, last_changed, total_objects, new_objects in sql.fetchall():
|
||||
new_objects = new_objects or 0 # ensure it's int
|
||||
changed_objects = total_objects - new_objects
|
||||
plugin_states[plugin] = {
|
||||
"lastChanged": last_changed or "",
|
||||
"totalObjects": total_objects or 0,
|
||||
"newObjects": new_objects or 0,
|
||||
"changedObjects": changed_objects or 0
|
||||
}
|
||||
|
||||
# Save in memory
|
||||
self.plugin_states = plugin_states
|
||||
|
||||
return plugin_states
|
||||
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
@@ -516,19 +516,40 @@ def create_new_devices (db):
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def update_devices_names(db):
|
||||
sql = db.sql
|
||||
resolver = NameResolver(db)
|
||||
device_handler = DeviceInstance(db)
|
||||
def update_devices_names(pm):
|
||||
sql = pm.db.sql
|
||||
resolver = NameResolver(pm.db)
|
||||
device_handler = DeviceInstance(pm.db)
|
||||
|
||||
# --- Short-circuit if no plugin that resolves names changed ---
|
||||
name_plugins = ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]
|
||||
|
||||
# Get last check timestamp from plugin manager
|
||||
last_checked = pm.name_plugins_checked
|
||||
|
||||
# Determine the latest 'lastChanged' timestamp among name plugins
|
||||
latest_change = max(
|
||||
[pm.plugin_states.get(p, {}).get("lastChanged") for p in name_plugins if pm.plugin_states.get(p)],
|
||||
default=None
|
||||
)
|
||||
|
||||
# Convert to comparable datetime if needed
|
||||
from dateutil import parser
|
||||
latest_change_dt = parser.parse(latest_change) if latest_change else None
|
||||
|
||||
# Skip if nothing changed since last check
|
||||
if last_checked and latest_change_dt and latest_change_dt <= last_checked:
|
||||
mylog('debug', '[Update Device Name] No relevant plugin changes since last check, skipping.')
|
||||
return
|
||||
|
||||
nameNotFound = "(name not found)"
|
||||
|
||||
# Define resolution strategies in priority order
|
||||
strategies = [
|
||||
(resolver.resolve_dig, 'dig'),
|
||||
(resolver.resolve_mdns, 'mdns'),
|
||||
(resolver.resolve_nslookup, 'nslookup'),
|
||||
(resolver.resolve_nbtlookup, 'nbtlookup')
|
||||
(resolver.resolve_dig, 'DIGSCAN'),
|
||||
(resolver.resolve_mdns, 'AVAHISCAN'),
|
||||
(resolver.resolve_nslookup, 'NSLOOKUP'),
|
||||
(resolver.resolve_nbtlookup, 'NBTSCAN')
|
||||
]
|
||||
|
||||
def resolve_devices(devices, resolve_both_name_and_fqdn=True):
|
||||
@@ -590,7 +611,7 @@ def update_devices_names(db):
|
||||
recordsToUpdate, recordsNotFound, foundStats, notFound = resolve_devices(unknownDevices)
|
||||
|
||||
# Log summary
|
||||
mylog('verbose', f"[Update Device Name] Names Found (DiG/mDNS/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['dig']}/{foundStats['mdns']}/{foundStats['nslookup']}/{foundStats['nbtlookup']})")
|
||||
mylog('verbose', f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}/{foundStats['NBTSCAN']})")
|
||||
mylog('verbose', f'[Update Device Name] Names Not Found : {notFound}')
|
||||
|
||||
# Apply updates to database
|
||||
@@ -607,14 +628,18 @@ def update_devices_names(db):
|
||||
recordsToUpdate, _, foundStats, notFound = resolve_devices(allDevices, resolve_both_name_and_fqdn=False)
|
||||
|
||||
# Log summary
|
||||
mylog('verbose', f"[Update FQDN] Names Found (DiG/mDNS/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['dig']}/{foundStats['mdns']}/{foundStats['nslookup']}/{foundStats['nbtlookup']})")
|
||||
mylog('verbose', f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}/{foundStats['NBTSCAN']})")
|
||||
mylog('verbose', f'[Update FQDN] Names Not Found : {notFound}')
|
||||
|
||||
# Apply FQDN-only updates
|
||||
sql.executemany("UPDATE Devices SET devFQDN = ? WHERE devMac = ?", recordsToUpdate)
|
||||
|
||||
# Commit all database changes
|
||||
db.commitDB()
|
||||
pm.db.commitDB()
|
||||
|
||||
# --- Step 3: Log last checked time ---
|
||||
# After resolving names, update last checked
|
||||
pm.name_plugins_checked = timeNowTZ()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Updates devPresentLastScan for parent devices based on the presence of their NICs
|
||||
|
||||
@@ -35,7 +35,7 @@ class NameResolver:
|
||||
WHERE Plugin = '{plugin}' AND Object_PrimaryID = '{pMAC}'
|
||||
""")
|
||||
result = sql.fetchall()
|
||||
self.db.commitDB()
|
||||
# self.db.commitDB() # Issue #1251: Optimize name resolution lookup
|
||||
if result:
|
||||
raw = result[0][0]
|
||||
return ResolvedName(raw, self.clean_device_name(raw, False))
|
||||
@@ -46,7 +46,7 @@ class NameResolver:
|
||||
WHERE Plugin = '{plugin}' AND Object_SecondaryID = '{pIP}'
|
||||
""")
|
||||
result = sql.fetchall()
|
||||
self.db.commitDB()
|
||||
# self.db.commitDB() # Issue #1251: Optimize name resolution lookup
|
||||
if result:
|
||||
raw = result[0][0]
|
||||
return ResolvedName(raw, self.clean_device_name(raw, True))
|
||||
|
||||
Reference in New Issue
Block a user