Merge branch 'chore_timestamps' of https://github.com/netalertx/NetAlertX into chore_timestamps

This commit is contained in:
Jokob @NetAlertX
2026-02-17 23:17:05 +00:00
60 changed files with 1575 additions and 2163 deletions

View File

@@ -56,7 +56,7 @@ default_tz = "Europe/Berlin"
NULL_EQUIVALENTS = ["", "null", "(unknown)", "(Unknown)", "(name not found)"]
# Convert list to SQL string: wrap each value in single quotes and escape single quotes if needed
NULL_EQUIVALENTS_SQL = ",".join(f"'{v.replace('\'', '\'\'')}'" for v in NULL_EQUIVALENTS)
NULL_EQUIVALENTS_SQL = ",".join("'" + v.replace("'", "''") + "'" for v in NULL_EQUIVALENTS)
# ===============================================================================

View File

@@ -12,8 +12,9 @@ from const import NULL_EQUIVALENTS_SQL # noqa: E402 [flake8 lint suppression]
def get_device_conditions():
network_dev_types = ",".join(f"'{v.replace('\'', '\'\'')}'" for v in get_setting_value("NETWORK_DEVICE_TYPES"))
network_dev_types = ",".join("'" + v.replace("'", "''") + "'" for v in get_setting_value("NETWORK_DEVICE_TYPES"))
# DO NOT CHANGE ORDER
conditions = {
"all": "WHERE devIsArchived=0",
"my": "WHERE devIsArchived=0",
@@ -27,6 +28,7 @@ def get_device_conditions():
"network_devices_down": f"WHERE devIsArchived=0 AND devType in ({network_dev_types}) AND devPresentLastScan=0",
"unknown": f"WHERE devIsArchived=0 AND devName in ({NULL_EQUIVALENTS_SQL})",
"known": f"WHERE devIsArchived=0 AND devName not in ({NULL_EQUIVALENTS_SQL})",
"favorites_offline": "WHERE devIsArchived=0 AND devFavorite=1 AND devPresentLastScan=0",
}
return conditions

View File

@@ -1,10 +1,6 @@
import sys
import os
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
import conf
from zoneinfo import ZoneInfo
import datetime as dt
from logger import mylog # noqa: E402 [flake8 lint suppression]
from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
@@ -246,6 +242,23 @@ def ensure_Indexes(sql) -> bool:
Parameters:
- sql: database cursor or connection wrapper (must support execute()).
"""
# Remove after 12/12/2026 - prevens idx_events_unique from failing - dedupe
clean_duplicate_events = """
DELETE FROM Events
WHERE rowid NOT IN (
SELECT MIN(rowid)
FROM Events
GROUP BY
eve_MAC,
eve_IP,
eve_EventType,
eve_DateTime
);
"""
sql.execute(clean_duplicate_events)
indexes = [
# Sessions
(
@@ -273,6 +286,10 @@ def ensure_Indexes(sql) -> bool:
"idx_eve_type_date",
"CREATE INDEX idx_eve_type_date ON Events(eve_EventType, eve_DateTime)",
),
(
"idx_events_unique",
"CREATE UNIQUE INDEX idx_events_unique ON Events (eve_MAC, eve_IP, eve_EventType, eve_DateTime)",
),
# Devices
("idx_dev_mac", "CREATE INDEX idx_dev_mac ON Devices(devMac)"),
(
@@ -503,26 +520,25 @@ def ensure_plugins_tables(sql) -> bool:
def is_timestamps_in_utc(sql) -> bool:
"""
Check if existing timestamps in Devices table are already in UTC format.
Strategy:
1. Sample 10 non-NULL devFirstConnection timestamps from Devices
2. For each timestamp, assume it's UTC and calculate what it would be in local time
3. Check if timestamps have a consistent offset pattern (indicating local time storage)
4. If offset is consistently > 0, they're likely local timestamps (need migration)
5. If offset is ~0 or inconsistent, they're likely already UTC (skip migration)
Returns:
bool: True if timestamps appear to be in UTC already, False if they need migration
"""
try:
# Get timezone offset in seconds
import conf
from zoneinfo import ZoneInfo
import datetime as dt
now = dt.datetime.now(dt.UTC).replace(microsecond=0)
current_offset_seconds = 0
try:
if isinstance(conf.tz, dt.tzinfo):
tz = conf.tz
@@ -532,13 +548,13 @@ def is_timestamps_in_utc(sql) -> bool:
tz = None
except Exception:
tz = None
if tz:
local_now = dt.datetime.now(tz).replace(microsecond=0)
local_offset = local_now.utcoffset().total_seconds()
utc_offset = now.utcoffset().total_seconds() if now.utcoffset() else 0
current_offset_seconds = int(local_offset - utc_offset)
# Sample timestamps from Devices table
sql.execute("""
SELECT devFirstConnection, devLastConnection, devLastNotification
@@ -546,27 +562,27 @@ def is_timestamps_in_utc(sql) -> bool:
WHERE devFirstConnection IS NOT NULL
LIMIT 10
""")
samples = []
for row in sql.fetchall():
for ts in row:
if ts:
samples.append(ts)
if not samples:
mylog("verbose", "[db_upgrade] No timestamp samples found in Devices - assuming UTC")
return True # Empty DB, assume UTC
# Parse samples and check if they have timezone info (which would indicate migration already done)
has_tz_marker = any('+' in str(ts) or 'Z' in str(ts) for ts in samples)
if has_tz_marker:
mylog("verbose", "[db_upgrade] Timestamps have timezone markers - already migrated to UTC")
return True
mylog("debug", f"[db_upgrade] Sampled {len(samples)} timestamps. Current TZ offset: {current_offset_seconds}s")
mylog("verbose", "[db_upgrade] Timestamps appear to be in system local time - migration needed")
return False
except Exception as e:
mylog("warn", f"[db_upgrade] Error checking UTC status: {e} - assuming UTC")
return True
@@ -574,63 +590,91 @@ def is_timestamps_in_utc(sql) -> bool:
def migrate_timestamps_to_utc(sql) -> bool:
"""
Migrate all timestamp columns in the database from local time to UTC.
This function determines if migration is needed based on the VERSION setting:
- Fresh installs (no VERSION): Skip migration - timestamps already UTC from timeNowUTC()
- Version >= 26.2.6: Skip migration - already using UTC timestamps
- Version < 26.2.6: Run migration - convert local timestamps to UTC
Affected tables:
- Devices: devFirstConnection, devLastConnection, devLastNotification
- Events: eve_DateTime
- Sessions: ses_DateTimeConnection, ses_DateTimeDisconnection
- Notifications: DateTimeCreated, DateTimePushed
- Online_History: Scan_Date
- Plugins_Objects: DateTimeCreated, DateTimeChanged
- Plugins_Events: DateTimeCreated, DateTimeChanged
- Plugins_History: DateTimeCreated, DateTimeChanged
- AppEvents: DateTimeCreated
Safely migrate timestamp columns from local time to UTC.
Migration rules (fail-safe):
- Default behaviour: RUN migration unless proven safe to skip
- Version > 26.2.6 → timestamps already UTC → skip
- Missing / unknown / unparsable version → migrate
- Migration flag present → skip
- Detection says already UTC → skip
Returns:
bool: True if migration completed or wasn't needed, False on error
bool: True if migration completed or not needed, False on error
"""
try:
import conf
from zoneinfo import ZoneInfo
import datetime as dt
# Check VERSION from Settings table (from previous app run)
sql.execute("SELECT setValue FROM Settings WHERE setKey = 'VERSION'")
# -------------------------------------------------
# Check migration flag (idempotency protection)
# -------------------------------------------------
try:
sql.execute("SELECT setValue FROM Settings WHERE setKey='DB_TIMESTAMPS_UTC_MIGRATED'")
result = sql.fetchone()
if result and str(result[0]) == "1":
mylog("verbose", "[db_upgrade] UTC timestamp migration already completed - skipping")
return True
except Exception:
pass
# -------------------------------------------------
# Read previous version
# -------------------------------------------------
sql.execute("SELECT setValue FROM Settings WHERE setKey='VERSION'")
result = sql.fetchone()
prev_version = result[0] if result else ""
# Fresh install: VERSION is empty → timestamps already UTC from timeNowUTC()
if not prev_version or prev_version == "" or prev_version == "unknown":
mylog("verbose", "[db_upgrade] Fresh install detected - timestamps already in UTC format")
mylog("verbose", f"[db_upgrade] Version '{prev_version}' detected.")
# Default behaviour: migrate unless proven safe
should_migrate = True
# -------------------------------------------------
# Version-based safety check
# -------------------------------------------------
if prev_version and str(prev_version).lower() != "unknown":
try:
version_parts = prev_version.lstrip('v').split('.')
major = int(version_parts[0]) if len(version_parts) > 0 else 0
minor = int(version_parts[1]) if len(version_parts) > 1 else 0
patch = int(version_parts[2]) if len(version_parts) > 2 else 0
# UTC timestamps introduced AFTER v26.2.6
if (major, minor, patch) > (26, 2, 6):
should_migrate = False
mylog(
"verbose",
f"[db_upgrade] Version {prev_version} confirmed UTC timestamps - skipping migration",
)
except (ValueError, IndexError) as e:
mylog(
"warn",
f"[db_upgrade] Could not parse version '{prev_version}': {e} - running migration as safety measure",
)
else:
mylog(
"warn",
"[db_upgrade] VERSION missing/unknown - running migration as safety measure",
)
# -------------------------------------------------
# Detection fallback
# -------------------------------------------------
if should_migrate:
try:
if is_timestamps_in_utc(sql):
mylog(
"verbose",
"[db_upgrade] Timestamps appear already UTC - skipping migration",
)
return True
except Exception as e:
mylog(
"warn",
f"[db_upgrade] UTC detection failed ({e}) - continuing with migration",
)
else:
return True
# Parse version - format: "26.2.6" or "v26.2.6"
try:
version_parts = prev_version.strip('v').split('.')
major = int(version_parts[0]) if len(version_parts) > 0 else 0
minor = int(version_parts[1]) if len(version_parts) > 1 else 0
patch = int(version_parts[2]) if len(version_parts) > 2 else 0
# UTC timestamps introduced in v26.2.6
# If upgrading from 26.2.6 or later, timestamps are already UTC
if (major > 26) or (major == 26 and minor > 2) or (major == 26 and minor == 2 and patch >= 6):
mylog("verbose", f"[db_upgrade] Version {prev_version} already uses UTC timestamps - skipping migration")
return True
mylog("verbose", f"[db_upgrade] Upgrading from {prev_version} (< v26.2.6) - migrating timestamps to UTC")
except (ValueError, IndexError) as e:
mylog("warn", f"[db_upgrade] Could not parse version '{prev_version}': {e} - checking timestamps")
# Fallback: use detection logic
if is_timestamps_in_utc(sql):
mylog("verbose", "[db_upgrade] Timestamps appear to be in UTC - skipping migration")
return True
# Get timezone offset
try:
@@ -642,15 +686,15 @@ def migrate_timestamps_to_utc(sql) -> bool:
tz = None
except Exception:
tz = None
if tz:
now_local = dt.datetime.now(tz)
offset_hours = (now_local.utcoffset().total_seconds()) / 3600
else:
offset_hours = 0
mylog("verbose", f"[db_upgrade] Starting UTC timestamp migration (offset: {offset_hours} hours)")
# List of tables and their datetime columns
timestamp_columns = {
'Devices': ['devFirstConnection', 'devLastConnection', 'devLastNotification'],
@@ -663,7 +707,7 @@ def migrate_timestamps_to_utc(sql) -> bool:
'Plugins_History': ['DateTimeCreated', 'DateTimeChanged'],
'AppEvents': ['DateTimeCreated'],
}
for table, columns in timestamp_columns.items():
try:
# Check if table exists
@@ -671,7 +715,7 @@ def migrate_timestamps_to_utc(sql) -> bool:
if not sql.fetchone():
mylog("debug", f"[db_upgrade] Table '{table}' does not exist - skipping")
continue
for column in columns:
try:
# Update non-NULL timestamps
@@ -691,22 +735,21 @@ def migrate_timestamps_to_utc(sql) -> bool:
SET {column} = DATETIME({column}, '+{abs_hours} hours', '+{abs_mins} minutes')
WHERE {column} IS NOT NULL
""")
row_count = sql.rowcount
if row_count > 0:
mylog("verbose", f"[db_upgrade] Migrated {row_count} timestamps in {table}.{column}")
except Exception as e:
mylog("warn", f"[db_upgrade] Error updating {table}.{column}: {e}")
continue
except Exception as e:
mylog("warn", f"[db_upgrade] Error processing table {table}: {e}")
continue
mylog("none", "[db_upgrade] ✓ UTC timestamp migration completed successfully")
return True
except Exception as e:
mylog("none", f"[db_upgrade] ERROR during timestamp migration: {e}")
return False

View File

@@ -401,7 +401,7 @@ def importConfigs(pm, db, all_plugins):
c_d,
"Language Interface",
'{"dataType":"string", "elements": [{"elementType" : "select", "elementOptions" : [] ,"transformers": []}]}',
"['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Japanese (ja_jp)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Chinese (zh_cn)']", # noqa: E501 - inline JSON
"['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Japanese (ja_jp)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Vietnamese (vi_vn)', 'Chinese (zh_cn)']", # noqa: E501 - inline JSON
"UI",
)

View File

@@ -9,7 +9,7 @@ import logging
# NetAlertX imports
import conf
from const import logPath
from utils.datetime_utils import timeNowUTC
from utils.datetime_utils import timeNowTZ
DEFAULT_LEVEL = "none"
@@ -124,12 +124,12 @@ def start_log_writer_thread():
# -------------------------------------------------------------------------------
def file_print(*args):
result = timeNowUTC(as_string=False).strftime("%H:%M:%S") + " "
result = timeNowTZ(as_string=False).strftime("%H:%M:%S") + " "
for arg in args:
if isinstance(arg, list):
arg = " ".join(
str(a) for a in arg
) # so taht new lines are handled correctly also when passing a list
) # so that new lines are handled correctly also when passing a list
result += str(arg)
logging.log(custom_to_logging_levels.get(currentLevel, logging.NOTSET), result)

View File

@@ -10,9 +10,10 @@
# cvc90 2023 https://github.com/cvc90 GNU GPLv3 #
# ---------------------------------------------------------------------------------#
import json
import os
import json
import sys
from zoneinfo import ZoneInfo
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
@@ -23,231 +24,237 @@ from helper import ( # noqa: E402 [flake8 lint suppression]
)
from logger import mylog # noqa: E402 [flake8 lint suppression]
from db.sql_safe_builder import create_safe_condition_builder # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import get_timezone_offset # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import format_date_iso # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# ===============================================================================
# Timezone conversion
# ===============================================================================
DATETIME_FIELDS = {
"new_devices": ["Datetime"],
"down_devices": ["eve_DateTime"],
"down_reconnected": ["eve_DateTime"],
"events": ["Datetime"],
"plugins": ["DateTimeChanged"],
}
def get_datetime_fields_from_columns(column_names):
return [
col for col in column_names
if "date" in col.lower() or "time" in col.lower()
]
def apply_timezone_to_json(json_obj, section=None):
data = json_obj.json["data"]
columns = json_obj.columnNames
fields = DATETIME_FIELDS.get(section) or get_datetime_fields_from_columns(columns)
return apply_timezone(data, fields)
def apply_timezone(data, fields):
"""
Convert UTC datetime fields in a list of dicts to the configured timezone.
Args:
data (list[dict]): Rows returned from DB
fields (list[str]): Field names to convert
Returns:
list[dict]: Modified data with timezone-aware ISO strings
"""
if not data or not fields:
return data
# Determine local timezone
tz = conf.tz
if isinstance(tz, str):
tz = ZoneInfo(tz)
for row in data:
if not isinstance(row, dict):
continue
for field in fields:
value = row.get(field)
if not value:
continue
try:
# Convert DB UTC string → local timezone ISO
# format_date_iso already assumes UTC if naive
row[field] = format_date_iso(value)
except Exception:
# Never crash, leave original value if conversion fails
continue
return data
# ===============================================================================
# REPORTING
# ===============================================================================
# -------------------------------------------------------------------------------
def get_notifications(db):
sql = db.sql # TO-DO
"""
Fetch notifications for all configured sections, applying timezone conversions.
# Reporting section
mylog("verbose", ["[Notification] Check if something to report"])
Args:
db: Database object with `.sql` for executing queries.
# prepare variables for JSON construction
json_new_devices = []
json_new_devices_meta = {}
json_down_devices = []
json_down_devices_meta = {}
json_down_reconnected = []
json_down_reconnected_meta = {}
json_events = []
json_events_meta = {}
json_plugins = []
json_plugins_meta = {}
Returns:
dict: JSON-ready dict with data and metadata for each section.
"""
sql = db.sql
# Disable reporting on events for devices where reporting is disabled based on the MAC address
mylog("verbose", "[Notification] Check if something to report")
# Disable notifications (except down/down reconnected) on devices where devAlertEvents is disabled
sql.execute("""UPDATE Events SET eve_PendingAlertEmail = 0
WHERE eve_PendingAlertEmail = 1 AND eve_EventType not in ('Device Down', 'Down Reconnected', 'New Device' ) AND eve_MAC IN
(
SELECT devMac FROM Devices WHERE devAlertEvents = 0
)""")
# Disable down/down reconnected notifications on devices where devAlertDown is disabled
sql.execute("""UPDATE Events SET eve_PendingAlertEmail = 0
WHERE eve_PendingAlertEmail = 1 AND eve_EventType in ('Device Down', 'Down Reconnected') AND eve_MAC IN
(
SELECT devMac FROM Devices WHERE devAlertDown = 0
)""")
# Disable events where reporting is disabled
sql.execute("""
UPDATE Events SET eve_PendingAlertEmail = 0
WHERE eve_PendingAlertEmail = 1
AND eve_EventType NOT IN ('Device Down', 'Down Reconnected', 'New Device')
AND eve_MAC IN (SELECT devMac FROM Devices WHERE devAlertEvents = 0)
""")
sql.execute("""
UPDATE Events SET eve_PendingAlertEmail = 0
WHERE eve_PendingAlertEmail = 1
AND eve_EventType IN ('Device Down', 'Down Reconnected')
AND eve_MAC IN (SELECT devMac FROM Devices WHERE devAlertDown = 0)
""")
sections = get_setting_value("NTFPRCS_INCLUDED_SECTIONS")
mylog("verbose", ["[Notification] Included sections: ", sections])
if "new_devices" in sections:
# Compose New Devices Section (no empty lines in SQL queries!)
# Use SafeConditionBuilder to prevent SQL injection vulnerabilities
condition_builder = create_safe_condition_builder()
new_dev_condition_setting = get_setting_value("NTFPRCS_new_dev_condition")
try:
safe_condition, parameters = condition_builder.get_safe_condition_legacy(
new_dev_condition_setting
)
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device' {}
ORDER BY eve_DateTime""".format(safe_condition)
except (ValueError, KeyError, TypeError) as e:
mylog("verbose", ["[Notification] Error building safe condition for new devices: ", e])
# Fall back to safe default (no additional conditions)
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device'
ORDER BY eve_DateTime"""
parameters = {}
mylog("debug", ["[Notification] new_devices SQL query: ", sqlQuery])
mylog("debug", ["[Notification] new_devices parameters: ", parameters])
# Get the events as JSON using parameterized query
json_obj = db.get_table_as_json(sqlQuery, parameters)
json_new_devices_meta = {
"title": "🆕 New devices",
"columnNames": json_obj.columnNames,
}
json_new_devices = json_obj.json["data"]
if "down_devices" in sections:
# Compose Devices Down Section
# - select only Down Alerts with pending email of devices that didn't reconnect within the specified time window
minutes = int(get_setting_value("NTFPRCS_alert_down_time") or 0)
tz_offset = get_timezone_offset()
sqlQuery = f"""
SELECT devName, eve_MAC, devVendor, eve_IP, eve_DateTime, eve_EventType
FROM Events_Devices AS down_events
WHERE eve_PendingAlertEmail = 1
AND down_events.eve_EventType = 'Device Down'
AND eve_DateTime < datetime('now', '-{minutes} minutes', '{tz_offset}')
AND NOT EXISTS (
SELECT 1
FROM Events AS connected_events
WHERE connected_events.eve_MAC = down_events.eve_MAC
AND connected_events.eve_EventType = 'Connected'
AND connected_events.eve_DateTime > down_events.eve_DateTime
)
ORDER BY down_events.eve_DateTime;
"""
# Get the events as JSON
json_obj = db.get_table_as_json(sqlQuery)
json_down_devices_meta = {
"title": "🔴 Down devices",
"columnNames": json_obj.columnNames,
}
json_down_devices = json_obj.json["data"]
mylog("debug", f"[Notification] json_down_devices: {json.dumps(json_down_devices)}")
if "down_reconnected" in sections:
# Compose Reconnected Down Section
# - select only Devices, that were previously down and now are Connected
sqlQuery = """
SELECT devName, eve_MAC, devVendor, eve_IP, eve_DateTime, eve_EventType
FROM Events_Devices AS reconnected_devices
WHERE reconnected_devices.eve_EventType = 'Down Reconnected'
AND reconnected_devices.eve_PendingAlertEmail = 1
ORDER BY reconnected_devices.eve_DateTime;
"""
# Get the events as JSON
json_obj = db.get_table_as_json(sqlQuery)
json_down_reconnected_meta = {
"title": "🔁 Reconnected down devices",
"columnNames": json_obj.columnNames,
}
json_down_reconnected = json_obj.json["data"]
mylog("debug", f"[Notification] json_down_reconnected: {json.dumps(json_down_reconnected)}")
if "events" in sections:
# Compose Events Section (no empty lines in SQL queries!)
# Use SafeConditionBuilder to prevent SQL injection vulnerabilities
condition_builder = create_safe_condition_builder()
event_condition_setting = get_setting_value("NTFPRCS_event_condition")
try:
safe_condition, parameters = condition_builder.get_safe_condition_legacy(
event_condition_setting
)
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed') {}
ORDER BY eve_DateTime""".format(safe_condition)
except Exception as e:
mylog("verbose", f"[Notification] Error building safe condition for events: {e}")
# Fall back to safe default (no additional conditions)
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed')
ORDER BY eve_DateTime"""
parameters = {}
mylog("debug", ["[Notification] events SQL query: ", sqlQuery])
mylog("debug", ["[Notification] events parameters: ", parameters])
# Get the events as JSON using parameterized query
json_obj = db.get_table_as_json(sqlQuery, parameters)
json_events_meta = {"title": "⚡ Events", "columnNames": json_obj.columnNames}
json_events = json_obj.json["data"]
if "plugins" in sections:
# Compose Plugins Section
sqlQuery = """SELECT
Plugin,
Object_PrimaryId,
Object_SecondaryId,
DateTimeChanged,
Watched_Value1,
Watched_Value2,
Watched_Value3,
Watched_Value4,
Status
from Plugins_Events"""
# Get the events as JSON
json_obj = db.get_table_as_json(sqlQuery)
json_plugins_meta = {"title": "🔌 Plugins", "columnNames": json_obj.columnNames}
json_plugins = json_obj.json["data"]
final_json = {
"new_devices": json_new_devices,
"new_devices_meta": json_new_devices_meta,
"down_devices": json_down_devices,
"down_devices_meta": json_down_devices_meta,
"down_reconnected": json_down_reconnected,
"down_reconnected_meta": json_down_reconnected_meta,
"events": json_events,
"events_meta": json_events_meta,
"plugins": json_plugins,
"plugins_meta": json_plugins_meta,
# Define SQL templates per section
sql_templates = {
"new_devices": """
SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments
FROM Events_Devices
WHERE eve_PendingAlertEmail = 1 AND eve_EventType = 'New Device' {condition}
ORDER BY eve_DateTime
""",
"down_devices": f"""
SELECT
devName,
eve_MAC,
devVendor,
eve_IP,
eve_DateTime,
eve_EventType
FROM Events_Devices AS down_events
WHERE eve_PendingAlertEmail = 1
AND down_events.eve_EventType = 'Device Down'
AND eve_DateTime < datetime('now', '-{int(get_setting_value("NTFPRCS_alert_down_time") or 0)} minutes')
AND NOT EXISTS (
SELECT 1
FROM Events AS connected_events
WHERE connected_events.eve_MAC = down_events.eve_MAC
AND connected_events.eve_EventType = 'Connected'
AND connected_events.eve_DateTime > down_events.eve_DateTime
)
ORDER BY down_events.eve_DateTime
""",
"down_reconnected": """
SELECT
devName,
eve_MAC,
devVendor,
eve_IP,
eve_DateTime,
eve_EventType
FROM Events_Devices AS reconnected_devices
WHERE reconnected_devices.eve_EventType = 'Down Reconnected'
AND reconnected_devices.eve_PendingAlertEmail = 1
ORDER BY reconnected_devices.eve_DateTime
""",
"events": """
SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments
FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed') {condition}
ORDER BY eve_DateTime
""",
"plugins": """
SELECT
Plugin,
Object_PrimaryId,
Object_SecondaryId,
DateTimeChanged,
Watched_Value1,
Watched_Value2,
Watched_Value3,
Watched_Value4,
Status
FROM Plugins_Events
"""
}
# Titles for metadata
section_titles = {
"new_devices": "🆕 New devices",
"down_devices": "🔴 Down devices",
"down_reconnected": "🔁 Reconnected down devices",
"events": "⚡ Events",
"plugins": "🔌 Plugins"
}
final_json = {}
# Pre-initialize final_json with all expected keys
final_json = {}
for section in ["new_devices", "down_devices", "down_reconnected", "events", "plugins"]:
final_json[section] = []
final_json[f"{section}_meta"] = {"title": section_titles.get(section, section), "columnNames": []}
# Loop through each included section
for section in sections:
try:
# Build safe condition for sections that support it
condition_builder = create_safe_condition_builder()
condition_setting = get_setting_value(f"NTFPRCS_{section}_condition")
safe_condition, parameters = condition_builder.get_safe_condition_legacy(condition_setting)
sqlQuery = sql_templates.get(section, "").format(condition=safe_condition)
except Exception:
# Fallback if safe condition fails
sqlQuery = sql_templates.get(section, "").format(condition="")
parameters = {}
mylog("debug", [f"[Notification] {section} SQL query: ", sqlQuery])
mylog("debug", [f"[Notification] {section} parameters: ", parameters])
# Fetch data as JSON
json_obj = db.get_table_as_json(sqlQuery, parameters)
mylog("debug", [f"[Notification] json_obj.json: {json.dumps(json_obj.json)}"])
# Apply timezone conversion
json_obj.json["data"] = apply_timezone_to_json(json_obj, section=section)
# Save data and metadata
final_json[section] = json_obj.json["data"]
final_json[f"{section}_meta"] = {
"title": section_titles.get(section, section),
"columnNames": json_obj.columnNames
}
mylog("debug", [f"[Notification] final_json: {json.dumps(final_json)}"])
return final_json

View File

@@ -536,6 +536,12 @@ class DeviceInstance:
normalized_mac = normalize_mac(mac)
normalized_parent_mac = normalize_mac(data.get("devParentMAC") or "")
if normalized_mac == normalized_parent_mac:
return {
"success": False,
"error": "Can't set current node as the node parent."
}
fields_updated_by_set_device_data = {
"devName",
"devOwner",

View File

@@ -88,7 +88,7 @@ class EventInstance:
def add(self, mac, ip, eventType, info="", pendingAlert=True, pairRow=None):
conn = self._conn()
conn.execute("""
INSERT INTO Events (
INSERT OR IGNORE INTO Events (
eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail, eve_PairEventRowid
@@ -124,7 +124,7 @@ class EventInstance:
cur = conn.cursor()
cur.execute(
"""
INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, eve_EventType, eve_AdditionalInfo, eve_PendingAlertEmail)
INSERT OR IGNORE INTO Events (eve_MAC, eve_IP, eve_DateTime, eve_EventType, eve_AdditionalInfo, eve_PendingAlertEmail)
VALUES (?, ?, ?, ?, ?, ?)
""",
(mac, ip, start_time, event_type, additional_info, pending_alert),

View File

@@ -16,7 +16,7 @@ from helper import (
getBuildTimeStampAndVersion,
)
from messaging.in_app import write_notification
from utils.datetime_utils import timeNowUTC, get_timezone_offset
from utils.datetime_utils import timeNowUTC, timeNowTZ, get_timezone_offset
# -----------------------------------------------------------------------------
@@ -107,7 +107,7 @@ class NotificationInstance:
mail_html = mail_html.replace("NEW_VERSION", newVersionText)
# Report "REPORT_DATE" in Header & footer
timeFormated = timeNowUTC()
timeFormated = timeNowTZ()
mail_text = mail_text.replace("REPORT_DATE", timeFormated)
mail_html = mail_html.replace("REPORT_DATE", timeFormated)

View File

@@ -49,6 +49,18 @@ class PluginObjectInstance:
"SELECT * FROM Plugins_Objects WHERE Plugin = ?", (plugin,)
)
def getLastNCreatedPerPLugin(self, plugin, entries=1):
return self._fetchall(
"""
SELECT *
FROM Plugins_Objects
WHERE Plugin = ?
ORDER BY DateTimeCreated DESC
LIMIT ?
""",
(plugin, entries),
)
def getByField(self, plugPrefix, matchedColumn, matchedKey, returnFields=None):
rows = self._fetchall(
f"SELECT * FROM Plugins_Objects WHERE Plugin = ? AND {matchedColumn} = ?",

View File

@@ -606,7 +606,7 @@ def create_new_devices(db):
mylog("debug", '[New Devices] Insert "New Device" Events')
query_new_device_events = f"""
INSERT INTO Events (
INSERT OR IGNORE INTO Events (
eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail

View File

@@ -171,7 +171,7 @@ def insert_events(db):
# Check device down
mylog("debug", "[Events] - 1 - Devices down")
sql.execute(f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
sql.execute(f"""INSERT OR IGNORE INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
SELECT devMac, devLastIP, '{startTime}', 'Device Down', '', 1
@@ -184,7 +184,7 @@ def insert_events(db):
# Check new Connections or Down Reconnections
mylog("debug", "[Events] - 2 - New Connections")
sql.execute(f""" INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
sql.execute(f""" INSERT OR IGNORE INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
SELECT DISTINCT c.scanMac, c.scanLastIP, '{startTime}',
@@ -201,7 +201,7 @@ def insert_events(db):
# Check disconnections
mylog("debug", "[Events] - 3 - Disconnections")
sql.execute(f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
sql.execute(f"""INSERT OR IGNORE INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
SELECT devMac, devLastIP, '{startTime}', 'Disconnected', '',
@@ -215,7 +215,7 @@ def insert_events(db):
# Check IP Changed
mylog("debug", "[Events] - 4 - IP Changes")
sql.execute(f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
sql.execute(f"""INSERT OR IGNORE INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
SELECT scanMac, scanLastIP, '{startTime}', 'IP Changed',

View File

@@ -3,7 +3,7 @@
import datetime
from logger import mylog
from utils.datetime_utils import timeNowUTC
from utils.datetime_utils import timeNowTZ
# -------------------------------------------------------------------------------
@@ -28,11 +28,11 @@ class schedule_class:
# Initialize the last run time if never run before
if self.last_run == 0:
self.last_run = (
timeNowUTC(as_string=False) - datetime.timedelta(days=365)
timeNowTZ(as_string=False) - datetime.timedelta(days=365)
).replace(microsecond=0)
# get the current time with the currently specified timezone
nowTime = timeNowUTC(as_string=False)
nowTime = timeNowTZ(as_string=False)
# Run the schedule if the current time is past the schedule time we saved last time and
# (maybe the following check is unnecessary)

View File

@@ -47,6 +47,33 @@ def timeNowUTC(as_string=True):
return utc_now.strftime(DATETIME_PATTERN) if as_string else utc_now
def timeNowTZ(as_string=True):
"""
Return the current time in the configured local timezone.
Falls back to UTC if conf.tz is invalid or missing.
"""
# Get canonical UTC time
utc_now = timeNowUTC(as_string=False)
# Resolve timezone safely
tz = None
try:
if isinstance(conf.tz, datetime.tzinfo):
tz = conf.tz
elif isinstance(conf.tz, str) and conf.tz:
tz = ZoneInfo(conf.tz)
except Exception:
tz = None
if tz is None:
tz = datetime.UTC # fallback to UTC
# Convert to local timezone (or UTC fallback)
local_now = utc_now.astimezone(tz)
return local_now.strftime(DATETIME_PATTERN) if as_string else local_now
def get_timezone_offset():
if conf.tz:
now = timeNowUTC(as_string=False).astimezone(conf.tz)