mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2025-12-06 17:15:38 -08:00
Merge branch next_release into main
This commit is contained in:
@@ -1,21 +1,28 @@
|
||||
## Overview
|
||||
|
||||
NMAP-scan is a command-line tool to discover and fingerprint IP hosts on the local network. The NMAP-scan (and other Network-scan plugin times using the `SCAN_SUBNETS` setting) time depends on the number of IP addresses to check so set this up carefully with the appropriate network mask and interface. Check the [subnets documentation](https://github.com/jokob-sk/NetAlertX/blob/main/docs/SUBNETS.md) for help with setting up VLANs, what VLANs are supported, or how to figure out the network mask and your interface.
|
||||
**NMAP-scan** is a command-line tool used to discover and fingerprint IP hosts on your network.
|
||||
The NMAP-scan (and other Network-scan plugins using the `SCAN_SUBNETS` setting) runtime depends on the number of IP addresses to check — so configure it carefully with the appropriate **network mask** and **interface**.
|
||||
|
||||
Refer to the [subnets documentation](https://github.com/jokob-sk/NetAlertX/blob/main/docs/SUBNETS.md) for help with setting up VLANs, understanding which VLANs are supported, and determining your network mask and interface.
|
||||
|
||||
> [!NOTE]
|
||||
> The `NMAPDEV` plugin is great for detecting the availability of devices, however ARP scan might be better covering multiple VLANS and subnets as NMAP can't pickup the MAC address from other subnets (this is an NMAP limitation) which are necessary to identify a device. You can always combine different scan methods. You can find all available network scanning options (marked as `🔍 dev scanner`) in the [Plugins overview](https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS.md) readme.
|
||||
> The `NMAPDEV` plugin is excellent for detecting device availability, but **ARP-scan** is better for scanning across multiple VLANs and subnets.
|
||||
> NMAP cannot retrieve MAC addresses from other subnets (an NMAP limitation), which are often required to identify devices.
|
||||
> You can safely combine different scan methods.
|
||||
> See all available network scanning options (marked with `🔍 dev scanner`) in the [Plugins overview](https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS.md).
|
||||
|
||||
This plugin is **not optimized for name resolution** (use `NSLOOKUP` or `AVAHISCAN` instead), but if a name is available it will appear in the **Resolved Name** column.
|
||||
|
||||
This plugin is not the best for name resolution (Use e.g.: `NSLOOKUP`, `AVAHISCAN` instead), however if available a name will be displayed in the `Resolved Name` column.
|
||||
---
|
||||
|
||||
### Usage
|
||||
|
||||
- Go to settings and set the `SCAN_SUBNETS` setting as per [subnets documentation](https://github.com/jokob-sk/NetAlertX/blob/main/docs/SUBNETS.md).
|
||||
- Enable the plugin by changing the RUN parameter from disabled to your preferred run time (usually: `schedule`).
|
||||
- Specify the schedule in the `NMAPDEV_RUN_SCHD` setting
|
||||
- Adjust the timeout if needed in the `NMAPDEV_RUN_TIMEOUT` setting
|
||||
- If scanning remote networks you may want to enable the `NMAPDEV_FAKE_MAC` setting. Please read the setting description carefully.
|
||||
- Review remaining settings
|
||||
- SAVE
|
||||
- Wait for the next scan to finish
|
||||
|
||||
1. In **Settings**, configure the `SCAN_SUBNETS` value as described in the [subnets documentation](https://github.com/jokob-sk/NetAlertX/blob/main/docs/SUBNETS.md).
|
||||
The plugin automatically **strips unsupported `--vlan` parameters** and replaces `--interface` with `-e`.
|
||||
2. Enable the plugin by setting the `RUN` parameter from `disabled` to your preferred run mode (usually `schedule`).
|
||||
3. Specify the schedule using the `NMAPDEV_RUN_SCHD` setting.
|
||||
4. Adjust the scan timeout if necessary with the `NMAPDEV_RUN_TIMEOUT` setting.
|
||||
5. If scanning **remote networks**, consider enabling the `NMAPDEV_FAKE_MAC` setting — review its description carefully before use.
|
||||
6. Review all remaining settings.
|
||||
7. Click **SAVE**.
|
||||
8. Wait for the next scheduled scan to complete.
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"code_name": "nmap_dev_scan",
|
||||
"unique_prefix": "NMAPDEV",
|
||||
"plugin_type": "device_scanner",
|
||||
"execution_order" : "Layer_3",
|
||||
"execution_order": "Layer_3",
|
||||
"enabled": true,
|
||||
"data_source": "script",
|
||||
"mapped_to_table": "CurrentScan",
|
||||
@@ -16,7 +16,11 @@
|
||||
}
|
||||
],
|
||||
"show_ui": true,
|
||||
"localized": ["display_name", "description", "icon"],
|
||||
"localized": [
|
||||
"display_name",
|
||||
"description",
|
||||
"icon"
|
||||
],
|
||||
"display_name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -49,7 +53,11 @@
|
||||
"type": {
|
||||
"dataType": "string",
|
||||
"elements": [
|
||||
{ "elementType": "select", "elementOptions": [], "transformers": [] }
|
||||
{
|
||||
"elementType": "select",
|
||||
"elementOptions": [],
|
||||
"transformers": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"default_value": "disabled",
|
||||
@@ -60,8 +68,13 @@
|
||||
"always_after_scan",
|
||||
"on_new_device"
|
||||
],
|
||||
"localized": ["name", "description"],
|
||||
"events": ["run"],
|
||||
"localized": [
|
||||
"name",
|
||||
"description"
|
||||
],
|
||||
"events": [
|
||||
"run"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -98,14 +111,21 @@
|
||||
"elements": [
|
||||
{
|
||||
"elementType": "input",
|
||||
"elementOptions": [{ "readonly": "true" }],
|
||||
"elementOptions": [
|
||||
{
|
||||
"readonly": "true"
|
||||
}
|
||||
],
|
||||
"transformers": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"default_value": "python3 /app/front/plugins/nmap_dev_scan/nmap_dev.py ",
|
||||
"options": [],
|
||||
"localized": ["name", "description"],
|
||||
"localized": [
|
||||
"name",
|
||||
"description"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -142,14 +162,21 @@
|
||||
"elements": [
|
||||
{
|
||||
"elementType": "input",
|
||||
"elementOptions": [{ "type": "number" }],
|
||||
"elementOptions": [
|
||||
{
|
||||
"type": "number"
|
||||
}
|
||||
],
|
||||
"transformers": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"default_value": 300,
|
||||
"options": [],
|
||||
"localized": ["name", "description"],
|
||||
"localized": [
|
||||
"name",
|
||||
"description"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -212,7 +239,10 @@
|
||||
},
|
||||
"default_value": "*/5 * * * *",
|
||||
"options": [],
|
||||
"localized": ["name", "description"],
|
||||
"localized": [
|
||||
"name",
|
||||
"description"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -249,7 +279,11 @@
|
||||
"elements": [
|
||||
{
|
||||
"elementType": "select",
|
||||
"elementOptions": [{ "multiple": "true" }],
|
||||
"elementOptions": [
|
||||
{
|
||||
"multiple": "true"
|
||||
}
|
||||
],
|
||||
"transformers": []
|
||||
}
|
||||
]
|
||||
@@ -261,7 +295,10 @@
|
||||
"Watched_Value3",
|
||||
"Watched_Value4"
|
||||
],
|
||||
"localized": ["name", "description"],
|
||||
"localized": [
|
||||
"name",
|
||||
"description"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -298,19 +335,28 @@
|
||||
"elements": [
|
||||
{
|
||||
"elementType": "select",
|
||||
"elementOptions": [{ "multiple": "true" }],
|
||||
"elementOptions": [
|
||||
{
|
||||
"multiple": "true"
|
||||
}
|
||||
],
|
||||
"transformers": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"default_value": ["new"],
|
||||
"default_value": [
|
||||
"new"
|
||||
],
|
||||
"options": [
|
||||
"new",
|
||||
"watched-changed",
|
||||
"watched-not-changed",
|
||||
"missing-in-last-scan"
|
||||
],
|
||||
"localized": ["name", "description"],
|
||||
"localized": [
|
||||
"name",
|
||||
"description"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -345,12 +391,19 @@
|
||||
"type": {
|
||||
"dataType": "string",
|
||||
"elements": [
|
||||
{ "elementType": "input", "elementOptions": [], "transformers": [] }
|
||||
{
|
||||
"elementType": "input",
|
||||
"elementOptions": [],
|
||||
"transformers": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"default_value": "sudo nmap -sn -PR -oX - ",
|
||||
"options": [],
|
||||
"localized": ["name", "description"],
|
||||
"localized": [
|
||||
"name",
|
||||
"description"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -371,14 +424,21 @@
|
||||
"elements": [
|
||||
{
|
||||
"elementType": "input",
|
||||
"elementOptions": [{ "type": "checkbox" }],
|
||||
"elementOptions": [
|
||||
{
|
||||
"type": "checkbox"
|
||||
}
|
||||
],
|
||||
"transformers": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"default_value": false,
|
||||
"options": [],
|
||||
"localized": ["name", "description"],
|
||||
"localized": [
|
||||
"name",
|
||||
"description"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -401,7 +461,9 @@
|
||||
"type": "none",
|
||||
"default_value": "",
|
||||
"options": [],
|
||||
"localized": ["name"],
|
||||
"localized": [
|
||||
"name"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -417,7 +479,9 @@
|
||||
"type": "device_name_mac",
|
||||
"default_value": "",
|
||||
"options": [],
|
||||
"localized": ["name"],
|
||||
"localized": [
|
||||
"name"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -433,7 +497,9 @@
|
||||
"type": "device_ip",
|
||||
"default_value": "",
|
||||
"options": [],
|
||||
"localized": ["name"],
|
||||
"localized": [
|
||||
"name"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -449,7 +515,9 @@
|
||||
"type": "label",
|
||||
"default_value": "",
|
||||
"options": [],
|
||||
"localized": ["name"],
|
||||
"localized": [
|
||||
"name"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -465,7 +533,9 @@
|
||||
"type": "label",
|
||||
"default_value": "",
|
||||
"options": [],
|
||||
"localized": ["name"],
|
||||
"localized": [
|
||||
"name"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -489,7 +559,9 @@
|
||||
"type": "label",
|
||||
"default_value": "",
|
||||
"options": [],
|
||||
"localized": ["name"],
|
||||
"localized": [
|
||||
"name"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -512,7 +584,9 @@
|
||||
"type": "label",
|
||||
"default_value": "",
|
||||
"options": [],
|
||||
"localized": ["name"],
|
||||
"localized": [
|
||||
"name"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -535,7 +609,9 @@
|
||||
"type": "label",
|
||||
"default_value": "",
|
||||
"options": [],
|
||||
"localized": ["name"],
|
||||
"localized": [
|
||||
"name"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -558,7 +634,9 @@
|
||||
"type": "label",
|
||||
"default_value": "",
|
||||
"options": [],
|
||||
"localized": ["name"],
|
||||
"localized": [
|
||||
"name"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -598,7 +676,9 @@
|
||||
"replacement": "<div style='text-align:center'><i class='fa-solid fa-question'></i></div>"
|
||||
}
|
||||
],
|
||||
"localized": ["name"],
|
||||
"localized": [
|
||||
"name"
|
||||
],
|
||||
"name": [
|
||||
{
|
||||
"language_code": "en_us",
|
||||
@@ -615,4 +695,4 @@
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -116,6 +116,9 @@ def execute_scan(subnets_list, timeout, fakeMac, args):
|
||||
|
||||
|
||||
def execute_scan_on_interface (interface, timeout, args):
|
||||
# Remove unsupported VLAN flags
|
||||
interface = re.sub(r'--vlan=\S+', '', interface).strip()
|
||||
|
||||
# Prepare command arguments
|
||||
scan_args = args.split() + interface.replace('--interface=','-e ').split()
|
||||
|
||||
|
||||
@@ -35,7 +35,6 @@ from database import DB
|
||||
from messaging.reporting import get_notifications
|
||||
from models.notification_instance import NotificationInstance
|
||||
from models.user_events_queue_instance import UserEventsQueueInstance
|
||||
from plugin import plugin_manager
|
||||
from scan.device_handling import update_devices_names
|
||||
from workflows.manager import WorkflowManager
|
||||
|
||||
@@ -152,17 +151,20 @@ def main ():
|
||||
process_scan(db)
|
||||
updateState("Scan processed", None, None, None, None, False)
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Reporting
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
# Name resolution
|
||||
# --------------------------------------------
|
||||
|
||||
# run plugins before notification processing (e.g. Plugins to discover device names)
|
||||
pm.run_plugin_scripts('before_name_updates')
|
||||
|
||||
# Resolve devices names
|
||||
mylog('debug','[Main] Resolve devices names')
|
||||
update_devices_names(db)
|
||||
update_devices_names(pm)
|
||||
|
||||
|
||||
# --------
|
||||
# Reporting
|
||||
|
||||
# Check if new devices found
|
||||
sql.execute (sql_new_devices)
|
||||
newDevices = sql.fetchall()
|
||||
|
||||
@@ -16,7 +16,44 @@ INSTALL_PATH="/app"
|
||||
# A class to manage the application state and to provide a frontend accessible API point
|
||||
# To keep an existing value pass None
|
||||
class app_state_class:
|
||||
def __init__(self, currentState = None, settingsSaved=None, settingsImported=None, showSpinner=False, graphQLServerStarted=0, processScan=False):
|
||||
"""
|
||||
Represents the current state of the application for frontend communication.
|
||||
|
||||
Attributes:
|
||||
lastUpdated (str): Timestamp of the last update.
|
||||
settingsSaved (int): Flag indicating if settings were saved.
|
||||
settingsImported (int): Flag indicating if settings were imported.
|
||||
showSpinner (bool): Whether the UI spinner should be shown.
|
||||
processScan (bool): Whether a scan process is active.
|
||||
graphQLServerStarted (int): Timestamp of GraphQL server start.
|
||||
currentState (str): Current state string.
|
||||
pluginsStates (dict): Per-plugin state information.
|
||||
isNewVersion (bool): Flag indicating if a new version is available.
|
||||
isNewVersionChecked (int): Timestamp of last version check.
|
||||
"""
|
||||
|
||||
def __init__(self, currentState=None,
|
||||
settingsSaved=None,
|
||||
settingsImported=None,
|
||||
showSpinner=None,
|
||||
graphQLServerStarted=0,
|
||||
processScan=False,
|
||||
pluginsStates=None):
|
||||
"""
|
||||
Initialize the application state, optionally overwriting previous values.
|
||||
|
||||
Loads previous state from 'app_state.json' if available, otherwise initializes defaults.
|
||||
New values provided via parameters overwrite previous state.
|
||||
|
||||
Args:
|
||||
currentState (str, optional): Initial current state.
|
||||
settingsSaved (int, optional): Initial settingsSaved flag.
|
||||
settingsImported (int, optional): Initial settingsImported flag.
|
||||
showSpinner (bool, optional): Initial showSpinner flag.
|
||||
graphQLServerStarted (int, optional): Initial GraphQL server timestamp.
|
||||
processScan (bool, optional): Initial processScan flag.
|
||||
pluginsStates (dict, optional): Initial plugin states to merge with previous state.
|
||||
"""
|
||||
# json file containing the state to communicate with the frontend
|
||||
stateFile = apiPath + 'app_state.json'
|
||||
previousState = ""
|
||||
@@ -27,7 +64,7 @@ class app_state_class:
|
||||
if os.path.exists(stateFile):
|
||||
try:
|
||||
with open(stateFile, 'r') as json_file:
|
||||
previousState = json.load(json_file)
|
||||
previousState = json.load(json_file)
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
mylog('none', [f'[app_state_class] Failed to handle app_state.json: {e}'])
|
||||
|
||||
@@ -41,6 +78,7 @@ class app_state_class:
|
||||
self.isNewVersionChecked = previousState.get("isNewVersionChecked", 0)
|
||||
self.graphQLServerStarted = previousState.get("graphQLServerStarted", 0)
|
||||
self.currentState = previousState.get("currentState", "Init")
|
||||
self.pluginsStates = previousState.get("pluginsStates", {})
|
||||
else: # init first time values
|
||||
self.settingsSaved = 0
|
||||
self.settingsImported = 0
|
||||
@@ -50,6 +88,7 @@ class app_state_class:
|
||||
self.isNewVersionChecked = int(timeNow().timestamp())
|
||||
self.graphQLServerStarted = 0
|
||||
self.currentState = "Init"
|
||||
self.pluginsStates = {}
|
||||
|
||||
# Overwrite with provided parameters if supplied
|
||||
if settingsSaved is not None:
|
||||
@@ -64,6 +103,20 @@ class app_state_class:
|
||||
self.processScan = processScan
|
||||
if currentState is not None:
|
||||
self.currentState = currentState
|
||||
# Merge plugin states instead of overwriting
|
||||
if pluginsStates is not None:
|
||||
for plugin, state in pluginsStates.items():
|
||||
if plugin in self.pluginsStates:
|
||||
# Only update existing keys if both are dicts
|
||||
if isinstance(self.pluginsStates[plugin], dict) and isinstance(state, dict):
|
||||
self.pluginsStates[plugin].update(state)
|
||||
else:
|
||||
# Replace if types don't match
|
||||
self.pluginsStates[plugin] = state
|
||||
else:
|
||||
# Optionally ignore or add new plugin entries
|
||||
# To ignore new plugins, comment out the next line
|
||||
self.pluginsStates[plugin] = state
|
||||
|
||||
# check for new version every hour and if currently not running new version
|
||||
if self.isNewVersion is False and self.isNewVersionChecked + 3600 < int(timeNow().timestamp()):
|
||||
@@ -74,7 +127,7 @@ class app_state_class:
|
||||
# with open(stateFile, 'w') as json_file:
|
||||
# json.dump(self, json_file, cls=AppStateEncoder, indent=4)
|
||||
|
||||
# Remove lastUpdated from the dictionary for comparison
|
||||
# Remove lastUpdated from the dictionary for comparison
|
||||
currentStateDict = self.__dict__.copy()
|
||||
currentStateDict.pop('lastUpdated', None)
|
||||
|
||||
@@ -88,22 +141,53 @@ class app_state_class:
|
||||
except (TypeError, ValueError) as e:
|
||||
mylog('none', [f'[app_state_class] Failed to serialize object to JSON: {e}'])
|
||||
|
||||
return # Allows chaining by returning self
|
||||
return
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# method to update the state
|
||||
def updateState(newState = None, settingsSaved = None, settingsImported = None, showSpinner = False, graphQLServerStarted = None, processScan = None):
|
||||
def updateState(newState = None,
|
||||
settingsSaved = None,
|
||||
settingsImported = None,
|
||||
showSpinner = None,
|
||||
graphQLServerStarted = None,
|
||||
processScan = None,
|
||||
pluginsStates=None):
|
||||
"""
|
||||
Convenience method to create or update the app state.
|
||||
|
||||
return app_state_class(newState, settingsSaved, settingsImported, showSpinner, graphQLServerStarted, processScan)
|
||||
Args:
|
||||
newState (str, optional): Current state to set.
|
||||
settingsSaved (int, optional): Flag for settings saved.
|
||||
settingsImported (int, optional): Flag for settings imported.
|
||||
showSpinner (bool, optional): Flag to control UI spinner.
|
||||
graphQLServerStarted (int, optional): Timestamp of GraphQL server start.
|
||||
processScan (bool, optional): Flag indicating if a scan is active.
|
||||
pluginsStates (dict, optional): Plugin state updates.
|
||||
|
||||
Returns:
|
||||
app_state_class: Updated state object.
|
||||
"""
|
||||
return app_state_class( newState,
|
||||
settingsSaved,
|
||||
settingsImported,
|
||||
showSpinner,
|
||||
graphQLServerStarted,
|
||||
processScan,
|
||||
pluginsStates)
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Checks if the object has a __dict__ attribute. If it does, it assumes that it's an instance of a class and serializes its attributes dynamically.
|
||||
class AppStateEncoder(json.JSONEncoder):
|
||||
"""
|
||||
JSON encoder for application state objects.
|
||||
|
||||
Automatically serializes objects with a __dict__ attribute.
|
||||
"""
|
||||
def default(self, obj):
|
||||
if hasattr(obj, '__dict__'):
|
||||
# If the object has a '__dict__', assume it's an instance of a class
|
||||
return obj.__dict__
|
||||
return super().default(obj)
|
||||
return super().default(obj)
|
||||
|
||||
@@ -195,7 +195,10 @@ def ensure_Indexes(sql) -> bool:
|
||||
("idx_dev_location", "CREATE INDEX idx_dev_location ON Devices(devLocation)"),
|
||||
|
||||
# Settings
|
||||
("idx_set_key", "CREATE INDEX idx_set_key ON Settings(setKey)")
|
||||
("idx_set_key", "CREATE INDEX idx_set_key ON Settings(setKey)"),
|
||||
|
||||
# Plugins_Objects
|
||||
("idx_plugins_plugin_mac_ip", "CREATE INDEX idx_plugins_plugin_mac_ip ON Plugins_Objects(Plugin, Object_PrimaryID, Object_SecondaryID)") # Issue #1251: Optimize name resolution lookup
|
||||
]
|
||||
|
||||
for name, create_sql in indexes:
|
||||
|
||||
@@ -449,8 +449,8 @@ def read_config_file(filename):
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# DEPERECATED soonest after 10/10/2024
|
||||
# 🤔Idea/TODO: Check and compare versions/timestamps amd only perform a replacement if config/version older than...
|
||||
# DEPRECATE soonest after 10/10/2024
|
||||
# 🤔Idea/TODO: Check and compare versions/timestamps and only perform a replacement if config/version older than...
|
||||
replacements = {
|
||||
r'\bREPORT_TO\b': 'SMTP_REPORT_TO',
|
||||
r'\bSYNC_api_token\b': 'API_TOKEN',
|
||||
|
||||
117
server/plugin.py
117
server/plugin.py
@@ -26,6 +26,8 @@ class plugin_manager:
|
||||
def __init__(self, db, all_plugins):
|
||||
self.db = db
|
||||
self.all_plugins = all_plugins
|
||||
self.plugin_states = {}
|
||||
self.name_plugins_checked = None
|
||||
|
||||
# object cache of settings and schedules for faster lookups
|
||||
self._cache = {}
|
||||
@@ -66,20 +68,6 @@ class plugin_manager:
|
||||
# 🔹 Lookup RUN setting from cache instead of calling get_plugin_setting_obj each time
|
||||
run_setting = self._cache["settings"].get(prefix, {}).get("RUN")
|
||||
|
||||
# set = get_plugin_setting_obj(plugin, "RUN")
|
||||
|
||||
# mylog('debug', [f'[run_plugin_scripts] plugin: {plugin}'])
|
||||
# mylog('debug', [f'[run_plugin_scripts] set: {set}'])
|
||||
# if set != None and set['value'] == runType:
|
||||
# if runType != "schedule":
|
||||
# shouldRun = True
|
||||
# elif runType == "schedule":
|
||||
# # run if overdue scheduled time
|
||||
# # check schedules if any contains a unique plugin prefix matching the current plugin
|
||||
# for schd in conf.mySchedules:
|
||||
# if schd.service == prefix:
|
||||
# # Check if schedule overdue
|
||||
# shouldRun = schd.runScheduleCheck()
|
||||
if run_setting != None and run_setting['value'] == runType:
|
||||
if runType != "schedule":
|
||||
shouldRun = True
|
||||
@@ -91,19 +79,6 @@ class plugin_manager:
|
||||
# Check if schedule overdue
|
||||
shouldRun = schd.runScheduleCheck()
|
||||
|
||||
# if shouldRun:
|
||||
# # Header
|
||||
# updateState(f"Plugin: {prefix}")
|
||||
|
||||
# print_plugin_info(plugin, ['display_name'])
|
||||
# mylog('debug', ['[Plugins] CMD: ', get_plugin_setting_obj(plugin, "CMD")["value"]])
|
||||
# execute_plugin(self.db, self.all_plugins, plugin)
|
||||
# # update last run time
|
||||
# if runType == "schedule":
|
||||
# for schd in conf.mySchedules:
|
||||
# if schd.service == prefix:
|
||||
# # note the last time the scheduled plugin run was executed
|
||||
# schd.last_run = timeNowTZ()
|
||||
if shouldRun:
|
||||
# Header
|
||||
updateState(f"Plugin: {prefix}")
|
||||
@@ -116,6 +91,10 @@ class plugin_manager:
|
||||
|
||||
execute_plugin(self.db, self.all_plugins, plugin)
|
||||
|
||||
# Update plugin states in app_state
|
||||
current_plugin_state = self.get_plugin_states(prefix) # get latest plugin state
|
||||
updateState(pluginsStates={prefix: current_plugin_state.get(prefix, {})})
|
||||
|
||||
# update last run time
|
||||
if runType == "schedule":
|
||||
schd = self._cache["schedules"].get(prefix)
|
||||
@@ -183,12 +162,20 @@ class plugin_manager:
|
||||
|
||||
mylog('minimal', ['[', timeNowTZ(), '] START Run: ', runType])
|
||||
|
||||
# run the plugin to run
|
||||
# run the plugin
|
||||
for plugin in self.all_plugins:
|
||||
if plugin["unique_prefix"] == runType:
|
||||
if plugin["unique_prefix"] == runType:
|
||||
|
||||
pluginName = plugin["unique_prefix"]
|
||||
|
||||
execute_plugin(self.db, self.all_plugins, plugin)
|
||||
|
||||
mylog('minimal', ['[', timeNowTZ(), '] END Run: ', runType])
|
||||
# Update plugin states in app_state
|
||||
current_plugin_state = self.get_plugin_states(pluginName) # get latest plugin state
|
||||
updateState(pluginsStates={pluginName: current_plugin_state.get(pluginName, {})})
|
||||
|
||||
mylog('minimal', ['[', timeNowTZ(), '] END Run: ', runType])
|
||||
|
||||
return
|
||||
|
||||
|
||||
@@ -214,6 +201,76 @@ class plugin_manager:
|
||||
mylog('minimal', ['[Test] END Test: ', runType])
|
||||
|
||||
return
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_plugin_states(self, plugin_name=None):
|
||||
"""
|
||||
Returns plugin state summary suitable for updateState(..., pluginsStates=...).
|
||||
If plugin_name is provided, only calculates stats for that plugin.
|
||||
Structure per plugin:
|
||||
{
|
||||
"lastChanged": str,
|
||||
"totalObjects": int,
|
||||
"newObjects": int,
|
||||
"changedObjects": int,
|
||||
"stateUpdated": str
|
||||
}
|
||||
"""
|
||||
sql = self.db.sql
|
||||
plugin_states = {}
|
||||
|
||||
if plugin_name: # Only compute for single plugin
|
||||
sql.execute("""
|
||||
SELECT MAX(DateTimeChanged) AS last_changed,
|
||||
COUNT(*) AS total_objects,
|
||||
SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects,
|
||||
CURRENT_TIMESTAMP AS state_updated
|
||||
FROM Plugins_Objects
|
||||
WHERE Plugin = ?
|
||||
""", (plugin_name,))
|
||||
row = sql.fetchone()
|
||||
last_changed, total_objects, new_objects, state_updated = row if row else ("", 0, 0, "")
|
||||
new_objects = new_objects or 0 # ensure it's int
|
||||
changed_objects = total_objects - new_objects
|
||||
|
||||
plugin_states[plugin_name] = {
|
||||
"lastChanged": last_changed or "",
|
||||
"totalObjects": total_objects or 0,
|
||||
"newObjects": new_objects or 0,
|
||||
"changedObjects": changed_objects or 0,
|
||||
"stateUpdated": state_updated or ""
|
||||
}
|
||||
|
||||
# Save in memory
|
||||
self.plugin_states[plugin_name] = plugin_states[plugin_name]
|
||||
|
||||
else: # Compute for all plugins (full refresh)
|
||||
sql.execute("""
|
||||
SELECT Plugin,
|
||||
MAX(DateTimeChanged) AS last_changed,
|
||||
COUNT(*) AS total_objects,
|
||||
SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects,
|
||||
CURRENT_TIMESTAMP AS state_updated
|
||||
FROM Plugins_Objects
|
||||
GROUP BY Plugin
|
||||
""")
|
||||
for plugin, last_changed, total_objects, new_objects, state_updated in sql.fetchall():
|
||||
new_objects = new_objects or 0 # ensure it's int
|
||||
changed_objects = total_objects - new_objects
|
||||
plugin_states[plugin] = {
|
||||
"lastChanged": last_changed or "",
|
||||
"totalObjects": total_objects or 0,
|
||||
"newObjects": new_objects or 0,
|
||||
"changedObjects": changed_objects or 0,
|
||||
"stateUpdated": state_updated or ""
|
||||
}
|
||||
|
||||
# Save in memory
|
||||
self.plugin_states = plugin_states
|
||||
|
||||
return plugin_states
|
||||
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
@@ -3,19 +3,23 @@ import subprocess
|
||||
import conf
|
||||
import os
|
||||
import re
|
||||
from dateutil import parser
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from helper import timeNowTZ, get_setting_value, check_IP_format
|
||||
from logger import mylog
|
||||
from logger import mylog, Logger
|
||||
from const import vendorsPath, vendorsPathNewest, sql_generateGuid
|
||||
from models.device_instance import DeviceInstance
|
||||
from scan.name_resolution import NameResolver
|
||||
from scan.device_heuristics import guess_icon, guess_type
|
||||
from db.db_helper import sanitize_SQL_input, list_to_where
|
||||
|
||||
# Make sure log level is initialized correctly
|
||||
Logger(get_setting_value('LOG_LEVEL'))
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Removing devices from the CurrentScan DB table which the user chose to ignore by MAC or IP
|
||||
def exclude_ignored_devices(db):
|
||||
@@ -516,19 +520,42 @@ def create_new_devices (db):
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def update_devices_names(db):
|
||||
sql = db.sql
|
||||
resolver = NameResolver(db)
|
||||
device_handler = DeviceInstance(db)
|
||||
def update_devices_names(pm):
|
||||
sql = pm.db.sql
|
||||
resolver = NameResolver(pm.db)
|
||||
device_handler = DeviceInstance(pm.db)
|
||||
|
||||
# --- Short-circuit if no name-resolution plugin has changed ---
|
||||
name_plugins = ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]
|
||||
|
||||
# Retrieve last time name resolution was checked (string or datetime)
|
||||
last_checked_str = pm.name_plugins_checked
|
||||
last_checked_dt = parser.parse(last_checked_str) if isinstance(last_checked_str, str) else last_checked_str
|
||||
|
||||
# Collect valid state update timestamps for name-related plugins
|
||||
state_times = []
|
||||
for p in name_plugins:
|
||||
state_updated = pm.plugin_states.get(p, {}).get("stateUpdated")
|
||||
if state_updated and state_updated.strip(): # skip empty or None
|
||||
state_times.append(state_updated)
|
||||
|
||||
# Determine the latest valid stateUpdated timestamp
|
||||
latest_state_str = max(state_times, default=None)
|
||||
latest_state_dt = parser.parse(latest_state_str) if latest_state_str else None
|
||||
|
||||
# Skip if no plugin state changed since last check
|
||||
if last_checked_dt and latest_state_dt and latest_state_dt <= last_checked_dt:
|
||||
mylog('debug', '[Update Device Name] No relevant name plugin changes since last check — skipping update.')
|
||||
return
|
||||
|
||||
nameNotFound = "(name not found)"
|
||||
|
||||
# Define resolution strategies in priority order
|
||||
strategies = [
|
||||
(resolver.resolve_dig, 'dig'),
|
||||
(resolver.resolve_mdns, 'mdns'),
|
||||
(resolver.resolve_nslookup, 'nslookup'),
|
||||
(resolver.resolve_nbtlookup, 'nbtlookup')
|
||||
(resolver.resolve_dig, 'DIGSCAN'),
|
||||
(resolver.resolve_mdns, 'AVAHISCAN'),
|
||||
(resolver.resolve_nslookup, 'NSLOOKUP'),
|
||||
(resolver.resolve_nbtlookup, 'NBTSCAN')
|
||||
]
|
||||
|
||||
def resolve_devices(devices, resolve_both_name_and_fqdn=True):
|
||||
@@ -590,7 +617,7 @@ def update_devices_names(db):
|
||||
recordsToUpdate, recordsNotFound, foundStats, notFound = resolve_devices(unknownDevices)
|
||||
|
||||
# Log summary
|
||||
mylog('verbose', f"[Update Device Name] Names Found (DiG/mDNS/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['dig']}/{foundStats['mdns']}/{foundStats['nslookup']}/{foundStats['nbtlookup']})")
|
||||
mylog('verbose', f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}/{foundStats['NBTSCAN']})")
|
||||
mylog('verbose', f'[Update Device Name] Names Not Found : {notFound}')
|
||||
|
||||
# Apply updates to database
|
||||
@@ -607,14 +634,21 @@ def update_devices_names(db):
|
||||
recordsToUpdate, _, foundStats, notFound = resolve_devices(allDevices, resolve_both_name_and_fqdn=False)
|
||||
|
||||
# Log summary
|
||||
mylog('verbose', f"[Update FQDN] Names Found (DiG/mDNS/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['dig']}/{foundStats['mdns']}/{foundStats['nslookup']}/{foundStats['nbtlookup']})")
|
||||
mylog('verbose', f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}/{foundStats['NBTSCAN']})")
|
||||
mylog('verbose', f'[Update FQDN] Names Not Found : {notFound}')
|
||||
|
||||
# Apply FQDN-only updates
|
||||
sql.executemany("UPDATE Devices SET devFQDN = ? WHERE devMac = ?", recordsToUpdate)
|
||||
|
||||
# Commit all database changes
|
||||
db.commitDB()
|
||||
pm.db.commitDB()
|
||||
|
||||
# --- Step 3: Log last checked time ---
|
||||
# After resolving names, update last checked
|
||||
sql = pm.db.sql
|
||||
sql.execute("SELECT CURRENT_TIMESTAMP")
|
||||
row = sql.fetchone()
|
||||
pm.name_plugins_checked = row[0] if row else None
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Updates devPresentLastScan for parent devices based on the presence of their NICs
|
||||
|
||||
@@ -35,7 +35,7 @@ class NameResolver:
|
||||
WHERE Plugin = '{plugin}' AND Object_PrimaryID = '{pMAC}'
|
||||
""")
|
||||
result = sql.fetchall()
|
||||
self.db.commitDB()
|
||||
# self.db.commitDB() # Issue #1251: Optimize name resolution lookup
|
||||
if result:
|
||||
raw = result[0][0]
|
||||
return ResolvedName(raw, self.clean_device_name(raw, False))
|
||||
@@ -46,7 +46,7 @@ class NameResolver:
|
||||
WHERE Plugin = '{plugin}' AND Object_SecondaryID = '{pIP}'
|
||||
""")
|
||||
result = sql.fetchall()
|
||||
self.db.commitDB()
|
||||
# self.db.commitDB() # Issue #1251: Optimize name resolution lookup
|
||||
if result:
|
||||
raw = result[0][0]
|
||||
return ResolvedName(raw, self.clean_device_name(raw, True))
|
||||
|
||||
Reference in New Issue
Block a user