Merge branch 'next_release' of https://github.com/netalertx/NetAlertX into next_release

This commit is contained in:
Jokob @NetAlertX
2026-03-15 01:42:23 +00:00
158 changed files with 7576 additions and 2892 deletions

View File

@@ -124,8 +124,10 @@ def main():
# last time any scan or maintenance/upkeep was run
conf.last_scan_run = loop_start_time
# Header
updateState("Process: Start")
# Header (also broadcasts last_scan_run to frontend via SSE / app_state.json)
updateState("Process: Start",
last_scan_run=loop_start_time.replace(microsecond=0).isoformat(),
next_scan_time="")
# Timestamp
startTime = loop_start_time
@@ -134,6 +136,17 @@ def main():
# Check if any plugins need to run on schedule
pm.run_plugin_scripts("schedule")
# Compute the next scheduled run time AFTER schedule check (which updates last_next_schedule)
# Only device_scanner plugins have meaningful next_scan times for user display
scanner_prefixes = {p["unique_prefix"] for p in all_plugins if p.get("plugin_type") == "device_scanner"}
scanner_next = [s.last_next_schedule for s in conf.mySchedules if s.service in scanner_prefixes]
# Get the earliest next scan time across all device scanners and broadcast.
# updateState validates the value is in the future before storing/broadcasting.
if scanner_next:
next_scan_dt = min(scanner_next)
updateState(next_scan_time=next_scan_dt.replace(microsecond=0).isoformat())
# determine run/scan type based on passed time
# --------------------------------------------

View File

@@ -42,6 +42,7 @@ from .dbquery_endpoint import read_query, write_query, update_query, delete_quer
from .sync_endpoint import handle_sync_post, handle_sync_get # noqa: E402 [flake8 lint suppression]
from .logs_endpoint import clean_log # noqa: E402 [flake8 lint suppression]
from .health_endpoint import get_health_status # noqa: E402 [flake8 lint suppression]
from .languages_endpoint import get_languages # noqa: E402 [flake8 lint suppression]
from models.user_events_queue_instance import UserEventsQueueInstance # noqa: E402 [flake8 lint suppression]
from models.event_instance import EventInstance # noqa: E402 [flake8 lint suppression]
@@ -95,6 +96,7 @@ from .openapi.schemas import ( # noqa: E402 [flake8 lint suppression]
DbQueryUpdateRequest, DbQueryDeleteRequest,
AddToQueueRequest, GetSettingResponse,
RecentEventsRequest, SetDeviceAliasRequest,
LanguagesResponse,
)
from .sse_endpoint import ( # noqa: E402 [flake8 lint suppression]
@@ -1962,6 +1964,34 @@ def check_health(payload=None):
}), 500
@app.route("/languages", methods=["GET"])
@validate_request(
operation_id="get_languages",
summary="Get Supported Languages",
description="Returns the canonical list of supported UI languages loaded from languages.json.",
response_model=LanguagesResponse,
tags=["system", "languages"],
auth_callable=is_authorized
)
def list_languages(payload=None):
"""Return the canonical language registry."""
try:
data = get_languages()
return jsonify({"success": True, **data}), 200
except FileNotFoundError:
return jsonify({
"success": False,
"error": "languages.json not found",
"message": "Language registry file is missing"
}), 500
except ValueError as e:
return jsonify({
"success": False,
"error": str(e),
"message": "Language registry file is malformed"
}), 500
# --------------------------
# Background Server Start
# --------------------------

View File

@@ -85,7 +85,7 @@ class Device(ObjectType):
devStatus = String(description="Online/Offline status")
devIsRandomMac = Int(description="Calculated: Is MAC address randomized?")
devParentChildrenCount = Int(description="Calculated: Number of children attached to this parent")
devIpLong = Int(description="Calculated: IP address in long format")
devIpLong = String(description="Calculated: IP address in long format (returned as string to support the full unsigned 32-bit range)")
devFilterStatus = String(description="Calculated: Device status for UI filtering")
devFQDN = String(description="Fully Qualified Domain Name")
devParentRelType = String(description="Relationship type to parent")
@@ -100,11 +100,15 @@ class Device(ObjectType):
devParentPortSource = String(description="Source tracking for devParentPort (USER, LOCKED, NEWDEV, or plugin prefix)")
devParentRelTypeSource = String(description="Source tracking for devParentRelType (USER, LOCKED, NEWDEV, or plugin prefix)")
devVlanSource = String(description="Source tracking for devVlan")
devFlapping = Int(description="Indicates flapping device (device changing between online/offline states frequently)")
devCanSleep = Int(description="Can this device sleep? (0 or 1). When enabled, offline periods within NTFPRCS_sleep_time are reported as Sleeping instead of Down.")
devIsSleeping = Int(description="Computed: Is device currently in a sleep window? (0 or 1)")
class DeviceResult(ObjectType):
devices = List(Device)
count = Int()
db_count = Int(description="Total device count in the database, before any status/filter/search is applied")
# --- SETTINGS ---
@@ -195,7 +199,14 @@ class Query(ObjectType):
devices_data = json.load(f)["data"]
except (FileNotFoundError, json.JSONDecodeError) as e:
mylog("none", f"[graphql_schema] Error loading devices data: {e}")
return DeviceResult(devices=[], count=0)
return DeviceResult(devices=[], count=0, db_count=0)
# Int fields that may arrive from the DB as empty strings — coerce to None
_INT_FIELDS = [
"devFavorite", "devStaticIP", "devScan", "devLogEvents", "devAlertEvents",
"devAlertDown", "devSkipRepeated", "devPresentLastScan", "devIsNew",
"devIsArchived", "devReqNicsOnline", "devFlapping", "devCanSleep", "devIsSleeping",
]
# Add dynamic fields to each device
for device in devices_data:
@@ -203,10 +214,20 @@ class Query(ObjectType):
device["devParentChildrenCount"] = get_number_of_children(
device["devMac"], devices_data
)
device["devIpLong"] = format_ip_long(device.get("devLastIP", ""))
# Return as string — IPv4 long values can exceed Int's signed 32-bit max (2,147,483,647)
device["devIpLong"] = str(format_ip_long(device.get("devLastIP", "")))
# Coerce empty strings to None so GraphQL Int serialisation doesn't fail
for _field in _INT_FIELDS:
if device.get(_field) == "":
device[_field] = None
mylog("trace", f"[graphql_schema] devices_data: {devices_data}")
# Raw DB count — before any status, filter, or search is applied.
# Used by the frontend to distinguish "no devices in DB" from "filter returned nothing".
db_count = len(devices_data)
# initialize total_count
total_count = len(devices_data)
@@ -246,7 +267,7 @@ class Query(ObjectType):
)
is_down = (
device["devPresentLastScan"] == 0 and device["devAlertDown"] and "down" in allowed_statuses
device["devPresentLastScan"] == 0 and device["devAlertDown"] and device.get("devIsSleeping", 0) == 0 and "down" in allowed_statuses
)
is_offline = (
@@ -266,7 +287,7 @@ class Query(ObjectType):
filtered.append(device)
devices_data = filtered
# 🔻 START If you change anything here, also update get_device_condition_by_status
# 🔻 START If you change anything here, also update get_device_conditions
elif status == "connected":
devices_data = [
device
@@ -281,11 +302,17 @@ class Query(ObjectType):
devices_data = [
device for device in devices_data if device["devIsNew"] == 1 and device["devIsArchived"] == 0
]
elif status == "sleeping":
devices_data = [
device
for device in devices_data
if device.get("devIsSleeping", 0) == 1 and device["devIsArchived"] == 0
]
elif status == "down":
devices_data = [
device
for device in devices_data
if device["devPresentLastScan"] == 0 and device["devAlertDown"] and device["devIsArchived"] == 0
if device["devPresentLastScan"] == 0 and device["devAlertDown"] and device.get("devIsSleeping", 0) == 0 and device["devIsArchived"] == 0
]
elif status == "archived":
devices_data = [
@@ -323,7 +350,25 @@ class Query(ObjectType):
for device in devices_data
if device["devType"] in network_dev_types and device["devPresentLastScan"] == 0 and device["devIsArchived"] == 0
]
# 🔺 END If you change anything here, also update get_device_condition_by_status
elif status == "unstable_devices":
devices_data = [
device
for device in devices_data
if device["devIsArchived"] == 0 and device["devFlapping"] == 1
]
elif status == "unstable_favorites":
devices_data = [
device
for device in devices_data
if device["devIsArchived"] == 0 and device["devFavorite"] == 1 and device["devFlapping"] == 1
]
elif status == "unstable_network_devices":
devices_data = [
device
for device in devices_data
if device["devIsArchived"] == 0 and device["devType"] in network_dev_types and device["devFlapping"] == 1
]
# 🔺 END If you change anything here, also update get_device_conditions
elif status == "all_devices":
devices_data = devices_data # keep all
@@ -399,7 +444,7 @@ class Query(ObjectType):
# Convert dict objects to Device instances to enable field resolution
devices = [Device(**device) for device in devices_data]
return DeviceResult(devices=devices, count=total_count)
return DeviceResult(devices=devices, count=total_count, db_count=db_count)
# --- SETTINGS ---
settings = Field(SettingResult, filters=List(FilterOptionsInput))
@@ -526,7 +571,7 @@ class Query(ObjectType):
language_folder = '/app/front/php/templates/language/'
if os.path.exists(language_folder):
for filename in os.listdir(language_folder):
if filename.endswith('.json'):
if filename.endswith('.json') and filename != 'languages.json':
file_lang_code = filename.replace('.json', '')
# Filter by langCode if provided

View File

@@ -0,0 +1,43 @@
"""Languages endpoint — returns the canonical language registry from languages.json."""
import json
import os
from logger import mylog
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
LANGUAGES_JSON_PATH = os.path.join(
INSTALL_PATH, "front", "php", "templates", "language", "language_definitions", "languages.json"
)
def get_languages():
"""
Load and return the canonical language registry.
Returns a dict with keys:
- default (str): the fallback language code
- languages (list[dict]): each entry has 'code' and 'display'
Raises:
FileNotFoundError: if languages.json is missing
ValueError: if the JSON is malformed or missing required fields
"""
try:
with open(LANGUAGES_JSON_PATH, "r", encoding="utf-8") as f:
data = json.load(f)
except FileNotFoundError:
mylog("none", [f"[languages] languages.json not found at {LANGUAGES_JSON_PATH}"])
raise
except json.JSONDecodeError as e:
mylog("none", [f"[languages] Failed to parse languages.json: {e}"])
raise ValueError(f"Malformed languages.json: {e}") from e
if "default" not in data or "languages" not in data:
raise ValueError("languages.json must contain 'default' and 'languages' keys")
return {
"default": data["default"],
"languages": data["languages"],
"count": len(data["languages"]),
}

View File

@@ -35,7 +35,7 @@ COLUMN_NAME_PATTERN = re.compile(r"^[a-zA-Z0-9_]+$")
ALLOWED_DEVICE_COLUMNS = Literal[
"devName", "devOwner", "devType", "devVendor",
"devGroup", "devLocation", "devComments", "devFavorite",
"devParentMAC"
"devParentMAC", "devCanSleep"
]
ALLOWED_NMAP_MODES = Literal[
@@ -204,9 +204,19 @@ class DeviceInfo(BaseModel):
description="Present in last scan (0 or 1)",
json_schema_extra={"enum": [0, 1]}
)
devStatus: Optional[Literal["online", "offline"]] = Field(
devStatus: Optional[Literal["online", "offline", "sleeping"]] = Field(
None,
description="Online/Offline status"
description="Online/Offline/Sleeping status"
)
devCanSleep: Optional[int] = Field(
0,
description="Can device sleep? (0=No, 1=Yes). When enabled, offline periods within NTFPRCS_sleep_time window are shown as Sleeping.",
json_schema_extra={"enum": [0, 1]}
)
devIsSleeping: Optional[int] = Field(
0,
description="Computed: Is device currently in a sleep window? (0=No, 1=Yes)",
json_schema_extra={"enum": [0, 1]}
)
devMacSource: Optional[str] = Field(None, description="Source of devMac (USER, LOCKED, or plugin prefix)")
devNameSource: Optional[str] = Field(None, description="Source of devName")
@@ -228,14 +238,15 @@ class DeviceSearchResponse(BaseResponse):
class DeviceListRequest(BaseModel):
"""Request for listing devices by status."""
status: Optional[Literal[
"connected", "down", "favorites", "new", "archived", "all", "my",
"connected", "down", "sleeping", "favorites", "new", "archived", "all", "my",
"offline"
]] = Field(
None,
description=(
"Filter devices by status:\n"
"- connected: Active devices present in the last scan\n"
"- down: Devices with active 'Device Down' alert\n"
"- down: Devices with active 'Device Down' alert (excludes sleeping)\n"
"- sleeping: Devices in a sleep window (devCanSleep=1, offline within NTFPRCS_sleep_time)\n"
"- favorites: Devices marked as favorite\n"
"- new: Devices flagged as new\n"
"- archived: Devices moved to archive\n"
@@ -1031,6 +1042,41 @@ class GetSettingResponse(BaseResponse):
value: Any = Field(None, description="The setting value")
# =============================================================================
# LANGUAGES SCHEMAS
# =============================================================================
class LanguageEntry(BaseModel):
"""A single supported language entry."""
model_config = ConfigDict(extra="allow")
code: str = Field(..., description="ISO language code (e.g. 'en_us')")
display: str = Field(..., description="Human-readable display name (e.g. 'English (en_us)')")
class LanguagesResponse(BaseResponse):
"""Response for GET /languages — the canonical language registry."""
model_config = ConfigDict(
extra="allow",
json_schema_extra={
"examples": [{
"success": True,
"default": "en_us",
"count": 20,
"languages": [
{"code": "en_us", "display": "English (en_us)"},
{"code": "de_de", "display": "German (de_de)"}
]
}]
}
)
default: str = Field(..., description="Default/fallback language code")
count: int = Field(..., description="Total number of supported languages")
languages: List[LanguageEntry] = Field(..., description="All supported languages")
# =============================================================================
# GRAPHQL SCHEMAS
# =============================================================================

View File

@@ -4,7 +4,7 @@ import json
from const import applicationPath, apiPath
from logger import mylog
from helper import checkNewVersion
from utils.datetime_utils import timeNowUTC
from utils.datetime_utils import timeNowUTC, is_datetime_future, normalizeTimeStamp
from api_server.sse_broadcast import broadcast_state_update
# Register NetAlertX directories using runtime configuration
@@ -43,7 +43,9 @@ class app_state_class:
processScan=False,
pluginsStates=None,
appVersion=None,
buildTimestamp=None
buildTimestamp=None,
last_scan_run=None,
next_scan_time=None
):
"""
Initialize the application state, optionally overwriting previous values.
@@ -89,6 +91,8 @@ class app_state_class:
self.pluginsStates = previousState.get("pluginsStates", {})
self.appVersion = previousState.get("appVersion", "")
self.buildTimestamp = previousState.get("buildTimestamp", "")
self.last_scan_run = previousState.get("last_scan_run", "")
self.next_scan_time = previousState.get("next_scan_time", "")
else: # init first time values
self.settingsSaved = 0
self.settingsImported = 0
@@ -101,6 +105,8 @@ class app_state_class:
self.pluginsStates = {}
self.appVersion = ""
self.buildTimestamp = ""
self.last_scan_run = ""
self.next_scan_time = ""
# Overwrite with provided parameters if supplied
if settingsSaved is not None:
@@ -133,6 +139,15 @@ class app_state_class:
self.appVersion = appVersion
if buildTimestamp is not None:
self.buildTimestamp = buildTimestamp
if last_scan_run is not None:
self.last_scan_run = last_scan_run
if next_scan_time is not None:
# Guard against stale/past timestamps — only store if genuinely in the future.
# This enforces correctness regardless of which caller sets next_scan_time.
if next_scan_time == "" or is_datetime_future(normalizeTimeStamp(next_scan_time)):
self.next_scan_time = next_scan_time
else:
self.next_scan_time = ""
# check for new version every hour and if currently not running new version
if self.isNewVersion is False and self.isNewVersionChecked + 3600 < int(
timeNowUTC(as_string=False).timestamp()
@@ -165,7 +180,9 @@ class app_state_class:
self.settingsImported,
timestamp=self.lastUpdated,
appVersion=self.appVersion,
buildTimestamp=self.buildTimestamp
buildTimestamp=self.buildTimestamp,
last_scan_run=self.last_scan_run,
next_scan_time=self.next_scan_time
)
except Exception as e:
mylog("none", [f"[app_state] SSE broadcast: {e}"])
@@ -183,7 +200,9 @@ def updateState(newState = None,
processScan = None,
pluginsStates=None,
appVersion=None,
buildTimestamp=None):
buildTimestamp=None,
last_scan_run=None,
next_scan_time=None):
"""
Convenience method to create or update the app state.
@@ -197,6 +216,8 @@ def updateState(newState = None,
pluginsStates (dict, optional): Plugin state updates.
appVersion (str, optional): Application version.
buildTimestamp (str, optional): Build timestamp.
last_scan_run (str, optional): ISO timestamp of last backend scan run.
next_scan_time (str, optional): ISO timestamp of next scheduled device_scanner run.
Returns:
app_state_class: Updated state object.
@@ -210,7 +231,9 @@ def updateState(newState = None,
processScan,
pluginsStates,
appVersion,
buildTimestamp
buildTimestamp,
last_scan_run,
next_scan_time
)

View File

@@ -58,70 +58,13 @@ NULL_EQUIVALENTS = ["", "null", "(unknown)", "(Unknown)", "(name not found)"]
# Convert list to SQL string: wrap each value in single quotes and escape single quotes if needed
NULL_EQUIVALENTS_SQL = ",".join("'" + v.replace("'", "''") + "'" for v in NULL_EQUIVALENTS)
# ===============================================================================
# SQL queries
# ===============================================================================
sql_devices_all = """
SELECT
rowid,
IFNULL(devMac, '') AS devMac,
IFNULL(devName, '') AS devName,
IFNULL(devOwner, '') AS devOwner,
IFNULL(devType, '') AS devType,
IFNULL(devVendor, '') AS devVendor,
IFNULL(devFavorite, '') AS devFavorite,
IFNULL(devGroup, '') AS devGroup,
IFNULL(devComments, '') AS devComments,
IFNULL(devFirstConnection, '') AS devFirstConnection,
IFNULL(devLastConnection, '') AS devLastConnection,
IFNULL(devLastIP, '') AS devLastIP,
IFNULL(devPrimaryIPv4, '') AS devPrimaryIPv4,
IFNULL(devPrimaryIPv6, '') AS devPrimaryIPv6,
IFNULL(devVlan, '') AS devVlan,
IFNULL(devForceStatus, '') AS devForceStatus,
IFNULL(devStaticIP, '') AS devStaticIP,
IFNULL(devScan, '') AS devScan,
IFNULL(devLogEvents, '') AS devLogEvents,
IFNULL(devAlertEvents, '') AS devAlertEvents,
IFNULL(devAlertDown, '') AS devAlertDown,
IFNULL(devSkipRepeated, '') AS devSkipRepeated,
IFNULL(devLastNotification, '') AS devLastNotification,
IFNULL(devPresentLastScan, 0) AS devPresentLastScan,
IFNULL(devIsNew, '') AS devIsNew,
IFNULL(devLocation, '') AS devLocation,
IFNULL(devIsArchived, '') AS devIsArchived,
IFNULL(devParentMAC, '') AS devParentMAC,
IFNULL(devParentPort, '') AS devParentPort,
IFNULL(devIcon, '') AS devIcon,
IFNULL(devGUID, '') AS devGUID,
IFNULL(devSite, '') AS devSite,
IFNULL(devSSID, '') AS devSSID,
IFNULL(devSyncHubNode, '') AS devSyncHubNode,
IFNULL(devSourcePlugin, '') AS devSourcePlugin,
IFNULL(devCustomProps, '') AS devCustomProps,
IFNULL(devFQDN, '') AS devFQDN,
IFNULL(devParentRelType, '') AS devParentRelType,
IFNULL(devReqNicsOnline, '') AS devReqNicsOnline,
IFNULL(devMacSource, '') AS devMacSource,
IFNULL(devNameSource, '') AS devNameSource,
IFNULL(devFQDNSource, '') AS devFQDNSource,
IFNULL(devLastIPSource, '') AS devLastIPSource,
IFNULL(devVendorSource, '') AS devVendorSource,
IFNULL(devSSIDSource, '') AS devSSIDSource,
IFNULL(devParentMACSource, '') AS devParentMACSource,
IFNULL(devParentPortSource, '') AS devParentPortSource,
IFNULL(devParentRelTypeSource, '') AS devParentRelTypeSource,
IFNULL(devVlanSource, '') AS devVlanSource,
CASE
WHEN devIsNew = 1 THEN 'New'
WHEN devPresentLastScan = 1 THEN 'On-line'
WHEN devPresentLastScan = 0 AND devAlertDown != 0 THEN 'Down'
WHEN devIsArchived = 1 THEN 'Archived'
WHEN devPresentLastScan = 0 THEN 'Off-line'
ELSE 'Unknown status'
END AS devStatus
FROM Devices
sql_devices_all = """
SELECT
*
FROM DevicesView
"""
sql_appevents = """select * from AppEvents order by DateTimeCreated desc"""

View File

@@ -10,7 +10,6 @@ from db.db_helper import get_table_json, json_obj
from workflows.app_events import AppEvent_obj
from db.db_upgrade import (
ensure_column,
ensure_views,
ensure_CurrentScan,
ensure_plugins_tables,
ensure_Parameters,
@@ -75,6 +74,17 @@ class DB:
# When temp_store is MEMORY (2) temporary tables and indices
# are kept as if they were in pure in-memory databases.
self.sql_connection.execute("PRAGMA temp_store=MEMORY;")
# WAL size limit: auto-checkpoint when WAL approaches this size,
# even if other connections are active. Prevents unbounded WAL growth
# on systems with multiple long-lived processes (backend, nginx, PHP-FPM).
# User-configurable via PRAGMA_JOURNAL_SIZE_LIMIT setting (default 50 MB).
try:
from helper import get_setting_value
wal_limit_mb = int(get_setting_value("PRAGMA_JOURNAL_SIZE_LIMIT", "50"))
wal_limit_bytes = wal_limit_mb * 1000000
except Exception:
wal_limit_bytes = 50000000 # 50 MB fallback
self.sql_connection.execute(f"PRAGMA journal_size_limit={wal_limit_bytes};")
self.sql_connection.text_factory = str
self.sql_connection.row_factory = sqlite3.Row
@@ -181,6 +191,8 @@ class DB:
raise RuntimeError("ensure_column(devParentRelTypeSource) failed")
if not ensure_column(self.sql, "Devices", "devVlanSource", "TEXT"):
raise RuntimeError("ensure_column(devVlanSource) failed")
if not ensure_column(self.sql, "Devices", "devCanSleep", "INTEGER"):
raise RuntimeError("ensure_column(devCanSleep) failed")
# Settings table setup
ensure_Settings(self.sql)
@@ -197,8 +209,9 @@ class DB:
# CurrentScan table setup
ensure_CurrentScan(self.sql)
# Views
ensure_views(self.sql)
# Views are created in importConfigs() after settings are committed,
# so NTFPRCS_sleep_time is available when the view is built.
# ensure_views is NOT called here.
# Indexes
ensure_Indexes(self.sql)
@@ -330,5 +343,13 @@ def get_temp_db_connection():
conn = sqlite3.connect(fullDbPath, timeout=5, isolation_level=None)
conn.execute("PRAGMA journal_mode=WAL;")
conn.execute("PRAGMA busy_timeout=5000;") # 5s wait before giving up
# Apply user-configured WAL size limit (default 50 MB in initialise.py)
try:
from helper import get_setting_value
wal_limit_mb = int(get_setting_value("PRAGMA_JOURNAL_SIZE_LIMIT", "50"))
wal_limit_bytes = wal_limit_mb * 1000000
except Exception:
wal_limit_bytes = 50000000 # 50 MB fallback
conn.execute(f"PRAGMA journal_size_limit={wal_limit_bytes};")
conn.row_factory = sqlite3.Row
return conn

View File

@@ -14,22 +14,29 @@ from const import NULL_EQUIVALENTS_SQL # noqa: E402 [flake8 lint suppression]
def get_device_conditions():
network_dev_types = ",".join("'" + v.replace("'", "''") + "'" for v in get_setting_value("NETWORK_DEVICE_TYPES"))
# DO NOT CHANGE ORDER
# Base archived condition
base_active = "devIsArchived=0"
# DO NOT CHANGE ORDER - if you add or change something update graphql endpoint as well
conditions = {
"all": "WHERE devIsArchived=0",
"my": "WHERE devIsArchived=0",
"all": f"WHERE {base_active}",
"my": f"WHERE {base_active}",
"connected": "WHERE devPresentLastScan=1",
"favorites": "WHERE devIsArchived=0 AND devFavorite=1",
"new": "WHERE devIsArchived=0 AND devIsNew=1",
"down": "WHERE devIsArchived=0 AND devAlertDown != 0 AND devPresentLastScan=0",
"offline": "WHERE devIsArchived=0 AND devPresentLastScan=0",
"favorites": f"WHERE {base_active} AND devFavorite=1",
"new": f"WHERE {base_active} AND devIsNew=1",
"sleeping": f"WHERE {base_active} AND devIsSleeping=1",
"down": f"WHERE {base_active} AND devAlertDown != 0 AND devPresentLastScan=0 AND devIsSleeping=0",
"offline": f"WHERE {base_active} AND devPresentLastScan=0",
"archived": "WHERE devIsArchived=1",
"network_devices": f"WHERE devIsArchived=0 AND devType in ({network_dev_types})",
"network_devices_down": f"WHERE devIsArchived=0 AND devType in ({network_dev_types}) AND devPresentLastScan=0",
"unknown": f"WHERE devIsArchived=0 AND devName in ({NULL_EQUIVALENTS_SQL})",
"known": f"WHERE devIsArchived=0 AND devName not in ({NULL_EQUIVALENTS_SQL})",
"favorites_offline": "WHERE devIsArchived=0 AND devFavorite=1 AND devPresentLastScan=0",
"new_online": "WHERE devIsArchived=0 AND devIsNew=1 AND devPresentLastScan=0",
"network_devices": f"WHERE {base_active} AND devType IN ({network_dev_types})",
"network_devices_down": f"WHERE {base_active} AND devType IN ({network_dev_types}) AND devPresentLastScan=0",
"unknown": f"WHERE {base_active} AND devName IN ({NULL_EQUIVALENTS_SQL})",
"known": f"WHERE {base_active} AND devName NOT IN ({NULL_EQUIVALENTS_SQL})",
"favorites_offline": f"WHERE {base_active} AND devFavorite=1 AND devPresentLastScan=0",
"new_online": f"WHERE {base_active} AND devIsNew=1 AND devPresentLastScan=1",
"unstable_devices": f"WHERE {base_active} AND devFlapping=1",
"unstable_favorites": f"WHERE {base_active} AND devFavorite=1 AND devFlapping=1",
"unstable_network_devices": f"WHERE {base_active} AND devType IN ({network_dev_types}) AND devFlapping=1",
}
return conditions

View File

@@ -28,6 +28,7 @@ EXPECTED_DEVICES_COLUMNS = [
"devLogEvents",
"devAlertEvents",
"devAlertDown",
"devCanSleep",
"devSkipRepeated",
"devLastNotification",
"devPresentLastScan",
@@ -232,6 +233,115 @@ def ensure_views(sql) -> bool:
""")
FLAP_THRESHOLD = 3
FLAP_WINDOW_HOURS = 1
# Read sleep window from settings; fall back to 30 min if not yet configured.
# Uses the same sql cursor (no separate connection) to avoid lock contention.
# Note: changing NTFPRCS_sleep_time requires a restart to take effect,
# same behaviour as FLAP_THRESHOLD / FLAP_WINDOW_HOURS.
try:
sql.execute("SELECT setValue FROM Settings WHERE setKey = 'NTFPRCS_sleep_time'")
_sleep_row = sql.fetchone()
SLEEP_MINUTES = int(_sleep_row[0]) if _sleep_row and _sleep_row[0] else 30
except Exception:
SLEEP_MINUTES = 30
sql.execute(""" DROP VIEW IF EXISTS DevicesView;""")
sql.execute(f""" CREATE VIEW DevicesView AS
-- CTE computes devIsSleeping and devFlapping so devStatus can
-- reference them without duplicating the sub-expressions.
WITH base AS (
SELECT
rowid,
LOWER(IFNULL(devMac, '')) AS devMac,
IFNULL(devName, '') AS devName,
IFNULL(devOwner, '') AS devOwner,
IFNULL(devType, '') AS devType,
IFNULL(devVendor, '') AS devVendor,
IFNULL(devFavorite, '') AS devFavorite,
IFNULL(devGroup, '') AS devGroup,
IFNULL(devComments, '') AS devComments,
IFNULL(devFirstConnection, '') AS devFirstConnection,
IFNULL(devLastConnection, '') AS devLastConnection,
IFNULL(devLastIP, '') AS devLastIP,
IFNULL(devPrimaryIPv4, '') AS devPrimaryIPv4,
IFNULL(devPrimaryIPv6, '') AS devPrimaryIPv6,
IFNULL(devVlan, '') AS devVlan,
IFNULL(devForceStatus, '') AS devForceStatus,
IFNULL(devStaticIP, '') AS devStaticIP,
IFNULL(devScan, '') AS devScan,
IFNULL(devLogEvents, '') AS devLogEvents,
IFNULL(devAlertEvents, '') AS devAlertEvents,
IFNULL(devAlertDown, 0) AS devAlertDown,
IFNULL(devCanSleep, 0) AS devCanSleep,
IFNULL(devSkipRepeated, '') AS devSkipRepeated,
IFNULL(devLastNotification, '') AS devLastNotification,
IFNULL(devPresentLastScan, 0) AS devPresentLastScan,
IFNULL(devIsNew, '') AS devIsNew,
IFNULL(devLocation, '') AS devLocation,
IFNULL(devIsArchived, '') AS devIsArchived,
LOWER(IFNULL(devParentMAC, '')) AS devParentMAC,
IFNULL(devParentPort, '') AS devParentPort,
IFNULL(devIcon, '') AS devIcon,
IFNULL(devGUID, '') AS devGUID,
IFNULL(devSite, '') AS devSite,
IFNULL(devSSID, '') AS devSSID,
IFNULL(devSyncHubNode, '') AS devSyncHubNode,
IFNULL(devSourcePlugin, '') AS devSourcePlugin,
IFNULL(devCustomProps, '') AS devCustomProps,
IFNULL(devFQDN, '') AS devFQDN,
IFNULL(devParentRelType, '') AS devParentRelType,
IFNULL(devReqNicsOnline, '') AS devReqNicsOnline,
IFNULL(devMacSource, '') AS devMacSource,
IFNULL(devNameSource, '') AS devNameSource,
IFNULL(devFQDNSource, '') AS devFQDNSource,
IFNULL(devLastIPSource, '') AS devLastIPSource,
IFNULL(devVendorSource, '') AS devVendorSource,
IFNULL(devSSIDSource, '') AS devSSIDSource,
IFNULL(devParentMACSource, '') AS devParentMACSource,
IFNULL(devParentPortSource, '') AS devParentPortSource,
IFNULL(devParentRelTypeSource, '') AS devParentRelTypeSource,
IFNULL(devVlanSource, '') AS devVlanSource,
-- devIsSleeping: opted-in, absent, and still within the sleep window
CASE
WHEN devCanSleep = 1
AND devPresentLastScan = 0
AND devLastConnection >= datetime('now', '-{SLEEP_MINUTES} minutes')
THEN 1
ELSE 0
END AS devIsSleeping,
-- devFlapping: toggling online/offline frequently within the flap window
CASE
WHEN EXISTS (
SELECT 1
FROM Events e
WHERE LOWER(e.eve_MAC) = LOWER(Devices.devMac)
AND e.eve_EventType IN ('Connected','Disconnected','Device Down','Down Reconnected')
AND e.eve_DateTime >= datetime('now', '-{FLAP_WINDOW_HOURS} hours')
GROUP BY e.eve_MAC
HAVING COUNT(*) >= {FLAP_THRESHOLD}
)
THEN 1
ELSE 0
END AS devFlapping
FROM Devices
)
SELECT *,
-- devStatus references devIsSleeping from the CTE (no duplication)
CASE
WHEN devIsNew = 1 THEN 'New'
WHEN devPresentLastScan = 1 THEN 'On-line'
WHEN devIsSleeping = 1 THEN 'Sleeping'
WHEN devAlertDown != 0 THEN 'Down'
WHEN devIsArchived = 1 THEN 'Archived'
WHEN devPresentLastScan = 0 THEN 'Off-line'
ELSE 'Unknown status'
END AS devStatus
FROM base
""")
return True
@@ -300,6 +410,10 @@ def ensure_Indexes(sql) -> bool:
"idx_dev_alertdown",
"CREATE INDEX idx_dev_alertdown ON Devices(devAlertDown)",
),
(
"idx_dev_cansleep",
"CREATE INDEX idx_dev_cansleep ON Devices(devCanSleep)",
),
("idx_dev_isnew", "CREATE INDEX idx_dev_isnew ON Devices(devIsNew)"),
(
"idx_dev_isarchived",
@@ -324,6 +438,14 @@ def ensure_Indexes(sql) -> bool:
"idx_plugins_plugin_mac_ip",
"CREATE INDEX idx_plugins_plugin_mac_ip ON Plugins_Objects(Plugin, Object_PrimaryID, Object_SecondaryID)",
), # Issue #1251: Optimize name resolution lookup
# Plugins_History: covers both the db_cleanup window function
# (PARTITION BY Plugin ORDER BY DateTimeChanged DESC) and the
# API query (SELECT * … ORDER BY DateTimeChanged DESC).
# Without this, both ops do a full 48k-row table sort on every cycle.
(
"idx_plugins_history_plugin_dt",
"CREATE INDEX idx_plugins_history_plugin_dt ON Plugins_History(Plugin, DateTimeChanged DESC)",
),
]
for name, create_sql in indexes:

View File

@@ -10,9 +10,10 @@ import uuid
# Register NetAlertX libraries
import conf
from const import fullConfPath, fullConfFolder, default_tz
from const import fullConfPath, fullConfFolder, default_tz, applicationPath
from db.db_upgrade import ensure_views
from helper import getBuildTimeStampAndVersion, collect_lang_strings, updateSubnets, generate_random_string
from utils.datetime_utils import timeNowUTC
from utils.datetime_utils import timeNowUTC, ensure_future_datetime
from app_state import updateState
from logger import mylog
from api import update_api
@@ -21,6 +22,31 @@ from plugin import plugin_manager, print_plugin_info
from utils.plugin_utils import get_plugins_configs, get_set_value_for_init
from messaging.in_app import write_notification
# ===============================================================================
# Language helpers
# ===============================================================================
_LANGUAGES_JSON = os.path.join(
applicationPath, "front", "php", "templates", "language", "language_definitions" ,"languages.json"
)
def _load_language_display_names():
"""Return a JSON-serialised list of display names from languages.json.
Falls back to a hardcoded English-only list on any error so that
the settings page is never broken by a missing/corrupt file.
"""
try:
with open(_LANGUAGES_JSON, "r", encoding="utf-8") as f:
data = json.load(f)
names = [entry["display"] for entry in data["languages"]]
return json.dumps(names)
except Exception as e:
mylog("none", [f"[languages] Failed to load languages.json, using fallback: {e}"])
return '["English (en_us)"]'
# ===============================================================================
# Initialise user defined values
# ===============================================================================
@@ -316,6 +342,15 @@ def importConfigs(pm, db, all_plugins):
"[]",
"General",
)
conf.PRAGMA_JOURNAL_SIZE_LIMIT = ccd(
"PRAGMA_JOURNAL_SIZE_LIMIT",
50,
c_d,
"WAL size limit (MB)",
'{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}',
"[]",
"General",
)
conf.REFRESH_FQDN = ccd(
"REFRESH_FQDN",
False,
@@ -401,7 +436,7 @@ def importConfigs(pm, db, all_plugins):
c_d,
"Language Interface",
'{"dataType":"string", "elements": [{"elementType" : "select", "elementOptions" : [] ,"transformers": []}]}',
"['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Japanese (ja_jp)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Vietnamese (vi_vn)', 'Chinese (zh_cn)']", # noqa: E501 - inline JSON
_load_language_display_names(), # derived from languages.json
"UI",
)
@@ -647,9 +682,12 @@ def importConfigs(pm, db, all_plugins):
newSchedule = Cron(run_sch).schedule(
start_date=timeNowUTC(as_string=False)
)
# Get initial next schedule time, ensuring it's in the future
next_schedule_time = ensure_future_datetime(newSchedule, timeNowUTC(as_string=False))
conf.mySchedules.append(
schedule_class(
plugin["unique_prefix"], newSchedule, newSchedule.next(), False
plugin["unique_prefix"], newSchedule, next_schedule_time, False
)
)
@@ -680,7 +718,7 @@ def importConfigs(pm, db, all_plugins):
<li> Clear app cache with the <i class="fa-solid fa-rotate"></i> (reload) button in the header</li>\
<li>Go to Settings and click Save</li> </ol>\
Check out new features and what has changed in the \
<a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.""",
<a href="https://github.com/netalertx/NetAlertX/releases" target="_blank">📓 release notes</a>.""",
'interrupt',
timeNowUTC()
)
@@ -699,6 +737,12 @@ def importConfigs(pm, db, all_plugins):
db.commitDB()
# Rebuild DevicesView now that settings (including NTFPRCS_sleep_time) are committed.
# This is the single call site — initDB() deliberately skips it so the view
# always gets the real user value, not an empty-Settings fallback.
ensure_views(sql)
db.commitDB()
# update only the settings datasource
update_api(db, all_plugins, True, ["settings"])

View File

@@ -126,6 +126,8 @@ def get_notifications(db):
AND eve_MAC IN (SELECT devMac FROM Devices WHERE devAlertDown = 0)
""")
alert_down_minutes = int(get_setting_value("NTFPRCS_alert_down_time") or 0)
sections = get_setting_value("NTFPRCS_INCLUDED_SECTIONS") or []
mylog("verbose", ["[Notification] Included sections: ", sections])
@@ -155,101 +157,8 @@ def get_notifications(db):
return ""
<<<<<<< Updated upstream
# -------------------------
# SQL templates
# -------------------------
sql_templates = {
"new_devices": """
SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments
FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device' {condition}
ORDER BY eve_DateTime
""",
"down_devices": """
SELECT
devName,
eve_MAC,
devVendor,
eve_IP,
eve_DateTime,
eve_EventType
FROM Events_Devices AS down_events
WHERE eve_PendingAlertEmail = 1
AND down_events.eve_EventType = 'Device Down'
AND eve_DateTime < datetime('now', '-0 minutes')
AND NOT EXISTS (
SELECT 1
FROM Events AS connected_events
WHERE connected_events.eve_MAC = down_events.eve_MAC
AND connected_events.eve_EventType = 'Connected'
AND connected_events.eve_DateTime > down_events.eve_DateTime
)
ORDER BY down_events.eve_DateTime
""",
"down_reconnected": """
SELECT
devName,
eve_MAC,
devVendor,
eve_IP,
eve_DateTime,
eve_EventType
FROM Events_Devices AS reconnected_devices
WHERE reconnected_devices.eve_EventType = 'Down Reconnected'
AND reconnected_devices.eve_PendingAlertEmail = 1
ORDER BY reconnected_devices.eve_DateTime
""",
"events": """
SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments
FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed') {condition}
ORDER BY eve_DateTime
""",
"plugins": """
SELECT
Plugin,
Object_PrimaryId,
Object_SecondaryId,
DateTimeChanged,
Watched_Value1,
Watched_Value2,
Watched_Value3,
Watched_Value4,
Status
FROM Plugins_Events
"""
}
# Titles for metadata
section_titles = {
"new_devices": "🆕 New devices",
"down_devices": "🔴 Down devices",
"down_reconnected": "🔁 Reconnected down devices",
"events": "⚡ Events",
"plugins": "🔌 Plugins"
}
# Sections that support dynamic conditions
sections_with_conditions = {"new_devices", "events"}
=======
# SQL templates with placeholders for runtime values
# {condition} and {alert_down_minutes} are formatted at query time
>>>>>>> Stashed changes
# Initialize final structure
final_json = {}

View File

@@ -61,8 +61,8 @@ class DeviceInstance:
def getDown(self):
return self._fetchall("""
SELECT * FROM Devices
WHERE devAlertDown = 1 AND devPresentLastScan = 0
SELECT * FROM DevicesView
WHERE devAlertDown = 1 AND devPresentLastScan = 0 AND devIsSleeping = 0
""")
def getOffline(self):
@@ -338,7 +338,7 @@ class DeviceInstance:
for key, condition in conditions.items():
# Make sure the alias is SQL-safe (no spaces or special chars)
alias = key.replace(" ", "_").lower()
sub_queries.append(f'(SELECT COUNT(*) FROM Devices {condition}) AS "{alias}"')
sub_queries.append(f'(SELECT COUNT(*) FROM DevicesView {condition}) AS "{alias}"')
# Join all sub-selects with commas
query = "SELECT\n " + ",\n ".join(sub_queries)
@@ -360,7 +360,7 @@ class DeviceInstance:
for key, condition in conditions.items():
# Make sure the alias is SQL-safe (no spaces or special chars)
alias = key.replace(" ", "_").lower()
sub_queries.append(f'(SELECT COUNT(*) FROM Devices {condition}) AS "{alias}"')
sub_queries.append(f'(SELECT COUNT(*) FROM DevicesView {condition}) AS "{alias}"')
# Join all sub-selects with commas
query = "SELECT\n " + ",\n ".join(sub_queries)
@@ -381,7 +381,8 @@ class DeviceInstance:
# Build condition for SQL
condition = get_device_condition_by_status(status) if status else ""
query = f"SELECT * FROM Devices {condition}"
# Only DevicesView has devFlapping
query = f"SELECT * FROM DevicesView {condition}"
sql.execute(query)
table_data = []
@@ -453,7 +454,9 @@ class DeviceInstance:
"devPresenceHours": 0,
"devFQDN": "",
"devForceStatus" : "dont_force",
"devVlan": ""
"devVlan": "",
"devCanSleep": 0,
"devIsSleeping": 0
}
return device_data
@@ -462,44 +465,41 @@ class DeviceInstance:
# Fetch device info + computed fields
sql = f"""
SELECT
d.*,
CASE
WHEN d.devAlertDown != 0 AND d.devPresentLastScan = 0 THEN 'Down'
WHEN d.devPresentLastScan = 1 THEN 'On-line'
ELSE 'Off-line'
END AS devStatus,
SELECT
d.*,
LOWER(d.devMac) AS devMac,
LOWER(d.devParentMAC) AS devParentMAC,
(SELECT COUNT(*) FROM Sessions
WHERE ses_MAC = d.devMac AND (
ses_DateTimeConnection >= {period_date_sql} OR
ses_DateTimeDisconnection >= {period_date_sql} OR
ses_StillConnected = 1
)) AS devSessions,
(SELECT COUNT(*) FROM Sessions
WHERE LOWER(ses_MAC) = LOWER(d.devMac) AND (
ses_DateTimeConnection >= {period_date_sql} OR
ses_DateTimeDisconnection >= {period_date_sql} OR
ses_StillConnected = 1
)) AS devSessions,
(SELECT COUNT(*) FROM Events
WHERE eve_MAC = d.devMac AND eve_DateTime >= {period_date_sql}
AND eve_EventType NOT IN ('Connected','Disconnected')) AS devEvents,
(SELECT COUNT(*) FROM Events
WHERE LOWER(eve_MAC) = LOWER(d.devMac) AND eve_DateTime >= {period_date_sql}
AND eve_EventType NOT IN ('Connected','Disconnected')) AS devEvents,
(SELECT COUNT(*) FROM Events
WHERE eve_MAC = d.devMac AND eve_DateTime >= {period_date_sql}
AND eve_EventType = 'Device Down') AS devDownAlerts,
(SELECT COUNT(*) FROM Events
WHERE LOWER(eve_MAC) = LOWER(d.devMac) AND eve_DateTime >= {period_date_sql}
AND eve_EventType = 'Device Down') AS devDownAlerts,
(SELECT CAST(MAX(0, SUM(
julianday(IFNULL(ses_DateTimeDisconnection,'{now}')) -
julianday(CASE WHEN ses_DateTimeConnection < {period_date_sql}
THEN {period_date_sql} ELSE ses_DateTimeConnection END)
) * 24) AS INT)
FROM Sessions
WHERE ses_MAC = d.devMac
AND ses_DateTimeConnection IS NOT NULL
AND (ses_DateTimeDisconnection IS NOT NULL OR ses_StillConnected = 1)
AND (ses_DateTimeConnection >= {period_date_sql}
OR ses_DateTimeDisconnection >= {period_date_sql} OR ses_StillConnected = 1)
) AS devPresenceHours
(SELECT CAST(MAX(0, SUM(
julianday(IFNULL(ses_DateTimeDisconnection,'{now}')) -
julianday(CASE WHEN ses_DateTimeConnection < {period_date_sql}
THEN {period_date_sql} ELSE ses_DateTimeConnection END)
) * 24) AS INT)
FROM Sessions
WHERE LOWER(ses_MAC) = LOWER(d.devMac)
AND ses_DateTimeConnection IS NOT NULL
AND (ses_DateTimeDisconnection IS NOT NULL OR ses_StillConnected = 1)
AND (ses_DateTimeConnection >= {period_date_sql}
OR ses_DateTimeDisconnection >= {period_date_sql} OR ses_StillConnected = 1)
) AS devPresenceHours
FROM Devices d
WHERE d.devMac = ? OR CAST(d.rowid AS TEXT) = ?
FROM DevicesView d
WHERE LOWER(d.devMac) = LOWER(?) OR CAST(d.rowid AS TEXT) = ?
"""
conn = get_temp_db_connection()
@@ -567,7 +567,8 @@ class DeviceInstance:
"devIsArchived",
"devCustomProps",
"devForceStatus",
"devVlan"
"devVlan",
"devCanSleep"
}
# Only mark USER for tracked fields that this method actually updates.
@@ -613,12 +614,12 @@ class DeviceInstance:
devMac, devName, devOwner, devType, devVendor, devIcon,
devFavorite, devGroup, devLocation, devComments,
devParentMAC, devParentPort, devSSID, devSite,
devStaticIP, devScan, devAlertEvents, devAlertDown,
devStaticIP, devScan, devAlertEvents, devAlertDown, devCanSleep,
devParentRelType, devReqNicsOnline, devSkipRepeated,
devIsNew, devIsArchived, devLastConnection,
devFirstConnection, devLastIP, devGUID, devCustomProps,
devSourcePlugin, devForceStatus, devVlan
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
"""
values = (
@@ -640,6 +641,7 @@ class DeviceInstance:
data.get("devScan") or 0,
data.get("devAlertEvents") or 0,
data.get("devAlertDown") or 0,
data.get("devCanSleep") or 0,
data.get("devParentRelType") or "default",
data.get("devReqNicsOnline") or 0,
data.get("devSkipRepeated") or 0,
@@ -661,7 +663,7 @@ class DeviceInstance:
devName=?, devOwner=?, devType=?, devVendor=?, devIcon=?,
devFavorite=?, devGroup=?, devLocation=?, devComments=?,
devParentMAC=?, devParentPort=?, devSSID=?, devSite=?,
devStaticIP=?, devScan=?, devAlertEvents=?, devAlertDown=?,
devStaticIP=?, devScan=?, devAlertEvents=?, devAlertDown=?, devCanSleep=?,
devParentRelType=?, devReqNicsOnline=?, devSkipRepeated=?,
devIsNew=?, devIsArchived=?, devCustomProps=?, devForceStatus=?, devVlan=?
WHERE devMac=?
@@ -684,6 +686,7 @@ class DeviceInstance:
data.get("devScan") or 0,
data.get("devAlertEvents") or 0,
data.get("devAlertDown") or 0,
data.get("devCanSleep") or 0,
data.get("devParentRelType") or "default",
data.get("devReqNicsOnline") or 0,
data.get("devSkipRepeated") or 0,
@@ -817,9 +820,9 @@ class DeviceInstance:
conn = get_temp_db_connection()
cur = conn.cursor()
# Build safe SQL with column name
sql = f"UPDATE Devices SET {column_name}=? WHERE devMac=?"
cur.execute(sql, (column_value, mac))
# Convert the MAC to lowercase for comparison
sql = f"UPDATE Devices SET {column_name}=? WHERE LOWER(devMac)=?"
cur.execute(sql, (column_value, mac.lower()))
conn.commit()
if cur.rowcount > 0:

View File

@@ -218,3 +218,50 @@ class EventInstance:
# Return as list
return [row[0], row[1], row[2], row[3], row[4], row[5]]
def get_unstable_devices(self, hours: int = 1, threshold: int = 3, macs_only: bool = True):
"""
Return unstable devices based on flap detection.
A device is considered unstable if it has >= threshold events within the last `hours`.
Events considered:
- Connected
- Disconnected
- Device Down
- Down Reconnected
Args:
hours (int): Time window in hours (default: 1)
threshold (int): Minimum number of events to be considered unstable (default: 3)
macs_only (bool): If True, return only MAC addresses (set). Otherwise return full rows.
Returns:
set[str] OR list[dict]
"""
if hours <= 0 or threshold <= 0:
mylog("warn", f"[Events] get_unstable_devices invalid params: hours={hours}, threshold={threshold}")
return set() if macs_only else []
conn = self._conn()
sql = """
SELECT eve_MAC, COUNT(*) as event_count
FROM Events
WHERE eve_EventType IN ('Connected','Disconnected','Device Down','Down Reconnected')
AND eve_DateTime >= datetime('now', ?)
GROUP BY eve_MAC
HAVING COUNT(*) >= ?
"""
# SQLite expects "-1 hours" format
window = f"-{hours} hours"
rows = conn.execute(sql, (window, threshold)).fetchall()
conn.close()
if macs_only:
return {row["eve_MAC"] for row in rows}
return [dict(row) for row in rows]

View File

@@ -1,4 +1,5 @@
import json
import re
import uuid
import socket
from yattag import indent
@@ -307,8 +308,16 @@ def construct_notifications(JSON, section):
build_direction = "TOP_TO_BOTTOM"
text_line = "{}\t{}\n"
# Read template settings
show_headers = get_setting_value("NTFPRCS_TEXT_SECTION_HEADERS")
if show_headers is None or show_headers == "":
show_headers = True
text_template = get_setting_value(f"NTFPRCS_TEXT_TEMPLATE_{section}") or ""
if len(jsn) > 0:
text = tableTitle + "\n---------\n"
# Section header (text)
if show_headers:
text = tableTitle + "\n---------\n"
# Convert a JSON into an HTML table
html = convert(
@@ -325,13 +334,24 @@ def construct_notifications(JSON, section):
)
# prepare text-only message
for device in jsn:
for header in headers:
padding = ""
if len(header) < 4:
padding = "\t"
text += text_line.format(header + ": " + padding, device[header])
text += "\n"
if text_template:
# Custom template: replace {FieldName} placeholders per device
for device in jsn:
line = re.sub(
r'\{(.+?)\}',
lambda m: str(device.get(m.group(1), m.group(0))),
text_template,
)
text += line + "\n"
else:
# Legacy fallback: vertical Header: Value list
for device in jsn:
for header in headers:
padding = ""
if len(header) < 4:
padding = "\t"
text += text_line.format(header + ": " + padding, device[header])
text += "\n"
# Format HTML table headers
for header in headers:

View File

@@ -49,7 +49,7 @@ class PluginObjectInstance:
"SELECT * FROM Plugins_Objects WHERE Plugin = ?", (plugin,)
)
def getLastNCreatedPerPLugin(self, plugin, entries=1):
def getLastNCreatedPerPlugin(self, plugin, entries=1):
return self._fetchall(
"""
SELECT *

View File

@@ -533,8 +533,12 @@ def print_scan_stats(db):
SELECT
(SELECT COUNT(*) FROM CurrentScan) AS devices_detected,
(SELECT COUNT(*) FROM CurrentScan WHERE NOT EXISTS (SELECT 1 FROM Devices WHERE devMac = scanMac)) AS new_devices,
(SELECT COUNT(*) FROM Devices WHERE devAlertDown != 0 AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE devMac = scanMac)) AS down_alerts,
(SELECT COUNT(*) FROM Devices WHERE devAlertDown != 0 AND devPresentLastScan = 1 AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE devMac = scanMac)) AS new_down_alerts,
(SELECT COUNT(*) FROM DevicesView WHERE devAlertDown != 0 AND devIsSleeping = 0 AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE devMac = scanMac)) AS down_alerts,
(SELECT COUNT(*) FROM DevicesView
WHERE devAlertDown != 0 AND devCanSleep = 0
AND devPresentLastScan = 1
AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE devMac = scanMac)
) AS new_down_alerts,
(SELECT COUNT(*) FROM Devices WHERE devPresentLastScan = 0) AS new_connections,
(SELECT COUNT(*) FROM Devices WHERE devPresentLastScan = 1 AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE devMac = scanMac)) AS disconnections,
(SELECT COUNT(*) FROM Devices, CurrentScan

View File

@@ -169,19 +169,39 @@ def insert_events(db):
sql = db.sql # TO-DO
startTime = timeNowUTC()
# Check device down
mylog("debug", "[Events] - 1 - Devices down")
# Check device down non-sleeping devices (immediate on first absence)
mylog("debug", "[Events] - 1a - Devices down (non-sleeping)")
sql.execute(f"""INSERT OR IGNORE INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
SELECT devMac, devLastIP, '{startTime}', 'Device Down', '', 1
FROM Devices
FROM DevicesView
WHERE devAlertDown != 0
AND devCanSleep = 0
AND devPresentLastScan = 1
AND NOT EXISTS (SELECT 1 FROM CurrentScan
WHERE devMac = scanMac
) """)
# Check device down sleeping devices whose sleep window has expired
mylog("debug", "[Events] - 1b - Devices down (sleep expired)")
sql.execute(f"""INSERT OR IGNORE INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
SELECT devMac, devLastIP, '{startTime}', 'Device Down', '', 1
FROM DevicesView
WHERE devAlertDown != 0
AND devCanSleep = 1
AND devIsSleeping = 0
AND devPresentLastScan = 0
AND NOT EXISTS (SELECT 1 FROM CurrentScan
WHERE devMac = scanMac)
AND NOT EXISTS (SELECT 1 FROM Events
WHERE eve_MAC = devMac
AND eve_EventType = 'Device Down'
AND eve_DateTime >= devLastConnection
) """)
# Check new Connections or Down Reconnections
mylog("debug", "[Events] - 2 - New Connections")
sql.execute(f""" INSERT OR IGNORE INTO Events (eve_MAC, eve_IP, eve_DateTime,
@@ -242,8 +262,8 @@ def insertOnlineHistory(db):
COUNT(*) AS allDevices,
COALESCE(SUM(CASE WHEN devIsArchived = 1 THEN 1 ELSE 0 END), 0) AS archivedDevices,
COALESCE(SUM(CASE WHEN devPresentLastScan = 1 THEN 1 ELSE 0 END), 0) AS onlineDevices,
COALESCE(SUM(CASE WHEN devPresentLastScan = 0 AND devAlertDown = 1 THEN 1 ELSE 0 END), 0) AS downDevices
FROM Devices
COALESCE(SUM(CASE WHEN devPresentLastScan = 0 AND devAlertDown = 1 AND devIsSleeping = 0 THEN 1 ELSE 0 END), 0) AS downDevices
FROM DevicesView
"""
deviceCounts = db.read(query)[

View File

@@ -3,7 +3,7 @@
import datetime
from logger import mylog
from utils.datetime_utils import timeNowTZ
from utils.datetime_utils import timeNowTZ, ensure_future_datetime
# -------------------------------------------------------------------------------
@@ -48,6 +48,7 @@ class schedule_class:
if self.was_last_schedule_used:
self.was_last_schedule_used = False
self.last_next_schedule = self.scheduleObject.next()
# Get the next scheduled time, ensuring it's in the future
self.last_next_schedule = ensure_future_datetime(self.scheduleObject, timeNowTZ(as_string=False))
return result

View File

@@ -88,6 +88,61 @@ def get_timezone_offset():
# Date and time methods
# -------------------------------------------------------------------------------
def is_datetime_future(dt, current_threshold=None):
"""
Check if a datetime is strictly in the future.
Utility for validating that a datetime hasn't already passed.
Used after retrieving pre-computed schedule times to ensure they're still valid.
Args:
dt: datetime.datetime object to validate
current_threshold: datetime to compare against. If None, uses timeNowUTC(as_string=False)
Returns:
bool: True if dt is in the future, False otherwise
Examples:
if is_datetime_future(next_scan_dt):
broadcast_to_frontend(next_scan_dt)
"""
if dt is None:
return False
if current_threshold is None:
current_threshold = timeNowUTC(as_string=False)
return dt > current_threshold
def ensure_future_datetime(schedule_obj, current_threshold=None):
"""
Ensure a schedule's next() call returns a datetime strictly in the future.
Keeps calling .next() until a future time is returned — never raises.
Args:
schedule_obj: A schedule object with a .next() method (e.g., from croniter/APScheduler)
current_threshold: datetime to compare against. If None, uses timeNowTZ(as_string=False)
Returns:
datetime.datetime: A guaranteed future datetime from schedule_obj.next()
Examples:
newSchedule = Cron(run_sch).schedule(start_date=timeNowUTC(as_string=False))
next_time = ensure_future_datetime(newSchedule)
"""
if current_threshold is None:
current_threshold = timeNowTZ(as_string=False)
next_time = schedule_obj.next()
while next_time <= current_threshold:
next_time = schedule_obj.next()
return next_time
def normalizeTimeStamp(inputTimeStamp):
"""
Normalize various timestamp formats into a datetime.datetime object.