mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2025-12-07 09:36:05 -08:00
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
# !/usr/bin/env python
|
||||
#
|
||||
# -------------------------------------------------------------------------------
|
||||
# NetAlertX v2.70 / 2021-02-01
|
||||
@@ -22,9 +22,9 @@ from pathlib import Path
|
||||
|
||||
# Register NetAlertX modules
|
||||
import conf
|
||||
from const import *
|
||||
from logger import mylog
|
||||
from helper import filePermissions
|
||||
from const import fullConfPath, sql_new_devices
|
||||
from logger import mylog
|
||||
from helper import filePermissions
|
||||
from utils.datetime_utils import timeNowTZ
|
||||
from app_state import updateState
|
||||
from api import update_api
|
||||
@@ -48,12 +48,12 @@ main structure of NetAlertX
|
||||
Initialise All
|
||||
Rename old settings
|
||||
start Loop forever
|
||||
initialise loop
|
||||
initialise loop
|
||||
(re)import config
|
||||
(re)import plugin config
|
||||
run plugins (once)
|
||||
run frontend events
|
||||
update API
|
||||
update API
|
||||
run plugins (scheduled)
|
||||
processing scan results
|
||||
run plugins (after Scan)
|
||||
@@ -111,7 +111,7 @@ def main():
|
||||
loop_start_time = conf.loop_start_time # TODO fix
|
||||
|
||||
# Handle plugins executed ONCE
|
||||
if conf.plugins_once_run == False:
|
||||
if conf.plugins_once_run is False:
|
||||
pm.run_plugin_scripts("once")
|
||||
conf.plugins_once_run = True
|
||||
|
||||
@@ -146,7 +146,7 @@ def main():
|
||||
processScan = updateState("Check scan").processScan
|
||||
mylog("debug", [f"[MAIN] processScan: {processScan}"])
|
||||
|
||||
if processScan == True:
|
||||
if processScan is True:
|
||||
mylog("debug", "[MAIN] start processing scan results")
|
||||
process_scan(db)
|
||||
updateState("Scan processed", None, None, None, None, False)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# !/usr/bin/env python
|
||||
import json
|
||||
import time
|
||||
import threading
|
||||
@@ -145,8 +146,7 @@ class api_endpoint_class:
|
||||
self.needsUpdate = True
|
||||
# Only update changeDetectedWhen if it hasn't been set recently
|
||||
if not self.changeDetectedWhen or current_time > (
|
||||
self.changeDetectedWhen
|
||||
+ datetime.timedelta(seconds=self.debounce_interval)
|
||||
self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
|
||||
):
|
||||
self.changeDetectedWhen = (
|
||||
current_time # Set timestamp for change detection
|
||||
@@ -164,8 +164,7 @@ class api_endpoint_class:
|
||||
self.needsUpdate = True
|
||||
# Only update changeDetectedWhen if it hasn't been set recently
|
||||
if not self.changeDetectedWhen or current_time > (
|
||||
self.changeDetectedWhen
|
||||
+ datetime.timedelta(seconds=self.debounce_interval)
|
||||
self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
|
||||
):
|
||||
self.changeDetectedWhen = (
|
||||
current_time # Initialize timestamp for new endpoint
|
||||
@@ -180,17 +179,15 @@ class api_endpoint_class:
|
||||
current_time = timeNowTZ()
|
||||
|
||||
# Debugging info to understand the issue
|
||||
# mylog('debug', [f'[API] api_endpoint_class: {self.fileName} is_ad_hoc_user_event {self.is_ad_hoc_user_event} last_update_time={self.last_update_time}, debounce time={self.last_update_time + datetime.timedelta(seconds=self.debounce_interval)}.'])
|
||||
# mylog('debug', [f'[API] api_endpoint_class: {self.fileName} is_ad_hoc_user_event
|
||||
# {self.is_ad_hoc_user_event} last_update_time={self.last_update_time},
|
||||
# debounce time={self.last_update_time + datetime.timedelta(seconds=self.debounce_interval)}.'])
|
||||
|
||||
# Only attempt to write if the debounce time has passed
|
||||
if forceUpdate == True or (
|
||||
self.needsUpdate
|
||||
and (
|
||||
self.changeDetectedWhen is None
|
||||
or current_time
|
||||
> (
|
||||
self.changeDetectedWhen
|
||||
+ datetime.timedelta(seconds=self.debounce_interval)
|
||||
if forceUpdate is True or (
|
||||
self.needsUpdate and (
|
||||
self.changeDetectedWhen is None or current_time > (
|
||||
self.changeDetectedWhen + datetime.timedelta(seconds=self.debounce_interval)
|
||||
)
|
||||
)
|
||||
):
|
||||
|
||||
@@ -9,25 +9,68 @@ from flask_cors import CORS
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
from helper import get_setting_value
|
||||
from db.db_helper import get_date_from_period
|
||||
from app_state import updateState
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from db.db_helper import get_date_from_period # noqa: E402 [flake8 lint suppression]
|
||||
from app_state import updateState # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
from .graphql_endpoint import devicesSchema
|
||||
from .device_endpoint import get_device_data, set_device_data, delete_device, delete_device_events, reset_device_props, copy_device, update_device_column
|
||||
from .devices_endpoint import get_all_devices, delete_unknown_devices, delete_all_with_empty_macs, delete_devices, export_devices, import_csv, devices_totals, devices_by_status
|
||||
from .events_endpoint import delete_events, delete_events_older_than, get_events, create_event, get_events_totals
|
||||
from .history_endpoint import delete_online_history
|
||||
from .prometheus_endpoint import get_metric_stats
|
||||
from .sessions_endpoint import get_sessions, delete_session, create_session, get_sessions_calendar, get_device_sessions, get_session_events
|
||||
from .nettools_endpoint import wakeonlan, traceroute, speedtest, nslookup, nmap_scan, internet_info
|
||||
from .dbquery_endpoint import read_query, write_query, update_query, delete_query
|
||||
from .sync_endpoint import handle_sync_post, handle_sync_get
|
||||
from .logs_endpoint import clean_log
|
||||
from models.user_events_queue_instance import UserEventsQueueInstance
|
||||
from messaging.in_app import write_notification, mark_all_notifications_read, delete_notifications, get_unread_notifications, delete_notification, mark_notification_as_read
|
||||
from .graphql_endpoint import devicesSchema # noqa: E402 [flake8 lint suppression]
|
||||
from .device_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||
get_device_data,
|
||||
set_device_data,
|
||||
delete_device,
|
||||
delete_device_events,
|
||||
reset_device_props,
|
||||
copy_device,
|
||||
update_device_column
|
||||
)
|
||||
from .devices_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||
get_all_devices,
|
||||
delete_unknown_devices,
|
||||
delete_all_with_empty_macs,
|
||||
delete_devices,
|
||||
export_devices,
|
||||
import_csv,
|
||||
devices_totals,
|
||||
devices_by_status
|
||||
)
|
||||
from .events_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||
delete_events,
|
||||
delete_events_older_than,
|
||||
get_events,
|
||||
create_event,
|
||||
get_events_totals
|
||||
)
|
||||
from .history_endpoint import delete_online_history # noqa: E402 [flake8 lint suppression]
|
||||
from .prometheus_endpoint import get_metric_stats # noqa: E402 [flake8 lint suppression]
|
||||
from .sessions_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||
get_sessions,
|
||||
delete_session,
|
||||
create_session,
|
||||
get_sessions_calendar,
|
||||
get_device_sessions,
|
||||
get_session_events
|
||||
)
|
||||
from .nettools_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||
wakeonlan,
|
||||
traceroute,
|
||||
speedtest,
|
||||
nslookup,
|
||||
nmap_scan,
|
||||
internet_info
|
||||
)
|
||||
from .dbquery_endpoint import read_query, write_query, update_query, delete_query # noqa: E402 [flake8 lint suppression]
|
||||
from .sync_endpoint import handle_sync_post, handle_sync_get # noqa: E402 [flake8 lint suppression]
|
||||
from .logs_endpoint import clean_log # noqa: E402 [flake8 lint suppression]
|
||||
from models.user_events_queue_instance import UserEventsQueueInstance # noqa: E402 [flake8 lint suppression]
|
||||
from messaging.in_app import ( # noqa: E402 [flake8 lint suppression]
|
||||
write_notification,
|
||||
mark_all_notifications_read,
|
||||
delete_notifications,
|
||||
get_unread_notifications,
|
||||
delete_notification,
|
||||
mark_notification_as_read
|
||||
)
|
||||
|
||||
# Flask application
|
||||
app = Flask(__name__)
|
||||
@@ -50,13 +93,14 @@ CORS(
|
||||
allow_headers=["Authorization", "Content-Type"],
|
||||
)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Custom handler for 404 - Route not found
|
||||
# -------------------------------------------------------------------
|
||||
@app.errorhandler(404)
|
||||
def not_found(error):
|
||||
response = {
|
||||
"success": False,
|
||||
"success": False,
|
||||
"error": "API route not found",
|
||||
"message": f"The requested URL {error.description if hasattr(error, 'description') else ''} was not found on the server.",
|
||||
}
|
||||
@@ -200,7 +244,7 @@ def api_get_devices():
|
||||
def api_delete_devices():
|
||||
if not is_authorized():
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
|
||||
macs = request.json.get("macs") if request.is_json else None
|
||||
|
||||
return delete_devices(macs)
|
||||
@@ -338,7 +382,7 @@ def dbquery_read():
|
||||
|
||||
if not raw_sql_b64:
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
|
||||
|
||||
|
||||
return read_query(raw_sql_b64)
|
||||
|
||||
|
||||
@@ -350,7 +394,7 @@ def dbquery_write():
|
||||
data = request.get_json() or {}
|
||||
raw_sql_b64 = data.get("rawSql")
|
||||
if not raw_sql_b64:
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
|
||||
|
||||
return write_query(raw_sql_b64)
|
||||
|
||||
@@ -363,7 +407,13 @@ def dbquery_update():
|
||||
data = request.get_json() or {}
|
||||
required = ["columnName", "id", "dbtable", "columns", "values"]
|
||||
if not all(data.get(k) for k in required):
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"}), 400
|
||||
return jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"message": "ERROR: Missing parameters",
|
||||
"error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"
|
||||
}
|
||||
), 400
|
||||
|
||||
return update_query(
|
||||
column_name=data["columnName"],
|
||||
@@ -418,12 +468,13 @@ def api_clean_log():
|
||||
|
||||
return clean_log(file)
|
||||
|
||||
|
||||
@app.route("/logs/add-to-execution-queue", methods=["POST"])
|
||||
def api_add_to_execution_queue():
|
||||
|
||||
if not is_authorized():
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
|
||||
queue = UserEventsQueueInstance()
|
||||
|
||||
# Get JSON payload safely
|
||||
@@ -499,7 +550,7 @@ def api_delete_old_events(days: int):
|
||||
"""
|
||||
if not is_authorized():
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
|
||||
return delete_events_older_than(days)
|
||||
|
||||
|
||||
@@ -619,7 +670,7 @@ def api_write_notification():
|
||||
|
||||
if not content:
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing content"}), 400
|
||||
|
||||
|
||||
write_notification(content, level)
|
||||
return jsonify({"success": True})
|
||||
|
||||
@@ -672,7 +723,8 @@ def api_mark_notification_read(guid):
|
||||
return jsonify({"success": True})
|
||||
else:
|
||||
return jsonify({"success": False, "message": "ERROR", "error": result.get("error")}), 500
|
||||
|
||||
|
||||
|
||||
# --------------------------
|
||||
# SYNC endpoint
|
||||
# --------------------------
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
# !/usr/bin/env python
|
||||
|
||||
import os
|
||||
import base64
|
||||
@@ -9,7 +9,7 @@ from flask import jsonify
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
def read_query(raw_sql_b64):
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
#!/usr/bin/env python
|
||||
# !/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from flask import jsonify, request
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from helper import is_random_mac, get_setting_value
|
||||
from utils.datetime_utils import timeNowDB, format_date
|
||||
from db.db_helper import row_to_json, get_date_from_period
|
||||
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
|
||||
from helper import is_random_mac, get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB, format_date # noqa: E402 [flake8 lint suppression]
|
||||
from db.db_helper import row_to_json, get_date_from_period # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# --------------------------
|
||||
# Device Endpoints Functions
|
||||
@@ -27,10 +26,10 @@ def get_device_data(mac):
|
||||
cur = conn.cursor()
|
||||
|
||||
now = timeNowDB()
|
||||
|
||||
|
||||
# Special case for new device
|
||||
if mac.lower() == "new":
|
||||
|
||||
|
||||
device_data = {
|
||||
"devMac": "",
|
||||
"devName": "",
|
||||
@@ -89,10 +88,10 @@ def get_device_data(mac):
|
||||
ELSE 'Off-line'
|
||||
END AS devStatus,
|
||||
|
||||
(SELECT COUNT(*) FROM Sessions
|
||||
(SELECT COUNT(*) FROM Sessions
|
||||
WHERE ses_MAC = d.devMac AND (
|
||||
ses_DateTimeConnection >= {period_date_sql} OR
|
||||
ses_DateTimeDisconnection >= {period_date_sql} OR
|
||||
ses_DateTimeConnection >= {period_date_sql} OR
|
||||
ses_DateTimeDisconnection >= {period_date_sql} OR
|
||||
ses_StillConnected = 1
|
||||
)) AS devSessions,
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
# !/usr/bin/env python
|
||||
|
||||
import os
|
||||
import base64
|
||||
@@ -14,16 +14,13 @@ from logger import mylog
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from db.db_helper import get_table_json, get_device_condition_by_status
|
||||
from utils.datetime_utils import format_date
|
||||
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
|
||||
from db.db_helper import get_table_json, get_device_condition_by_status # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Device Endpoints Functions
|
||||
# --------------------------
|
||||
|
||||
|
||||
def get_all_devices():
|
||||
"""Retrieve all devices from the database."""
|
||||
conn = get_temp_db_connection()
|
||||
@@ -139,7 +136,6 @@ def export_devices(export_format):
|
||||
def import_csv(file_storage=None):
|
||||
data = ""
|
||||
skipped = []
|
||||
error = None
|
||||
|
||||
# 1. Try JSON `content` (base64-encoded CSV)
|
||||
if request.is_json and request.json.get("content"):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
# !/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
@@ -9,10 +9,10 @@ from flask import jsonify
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from helper import is_random_mac, mylog
|
||||
from db.db_helper import row_to_json, get_date_from_period
|
||||
from utils.datetime_utils import format_date, format_date_iso, format_event_date, ensure_datetime
|
||||
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
|
||||
from helper import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from db.db_helper import row_to_json, get_date_from_period # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import ensure_datetime # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
# --------------------------
|
||||
@@ -120,14 +120,14 @@ def get_events_totals(period: str = "7 days"):
|
||||
cur = conn.cursor()
|
||||
|
||||
sql = f"""
|
||||
SELECT
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM Events WHERE eve_DateTime >= {period_date_sql}) AS all_events,
|
||||
(SELECT COUNT(*) FROM Sessions WHERE
|
||||
(SELECT COUNT(*) FROM Sessions WHERE
|
||||
ses_DateTimeConnection >= {period_date_sql}
|
||||
OR ses_DateTimeDisconnection >= {period_date_sql}
|
||||
OR ses_StillConnected = 1
|
||||
) AS sessions,
|
||||
(SELECT COUNT(*) FROM Sessions WHERE
|
||||
(SELECT COUNT(*) FROM Sessions WHERE
|
||||
(ses_DateTimeConnection IS NULL AND ses_DateTimeDisconnection >= {period_date_sql})
|
||||
OR (ses_DateTimeDisconnection IS NULL AND ses_StillConnected = 0 AND ses_DateTimeConnection >= {period_date_sql})
|
||||
) AS missing,
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import graphene
|
||||
from graphene import ObjectType, String, Int, Boolean, List, Field, InputObjectType, Argument
|
||||
from graphene import (
|
||||
ObjectType, String, Int, Boolean, List, Field, InputObjectType, Argument
|
||||
)
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
@@ -8,9 +10,9 @@ import os
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
from const import apiPath
|
||||
from helper import (
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from const import apiPath # noqa: E402 [flake8 lint suppression]
|
||||
from helper import ( # noqa: E402 [flake8 lint suppression]
|
||||
is_random_mac,
|
||||
get_number_of_children,
|
||||
format_ip_long,
|
||||
@@ -111,12 +113,14 @@ class SettingResult(ObjectType):
|
||||
settings = List(Setting)
|
||||
count = Int()
|
||||
|
||||
# --- LANGSTRINGS ---
|
||||
# --- LANGSTRINGS ---
|
||||
|
||||
|
||||
# In-memory cache for lang strings
|
||||
_langstrings_cache = {} # caches lists per file (core JSON or plugin)
|
||||
_langstrings_cache_mtime = {} # tracks last modified times
|
||||
|
||||
|
||||
# LangString ObjectType
|
||||
class LangString(ObjectType):
|
||||
langCode = String()
|
||||
@@ -128,6 +132,7 @@ class LangStringResult(ObjectType):
|
||||
langStrings = List(LangString)
|
||||
count = Int()
|
||||
|
||||
|
||||
# Define Query Type with Pagination Support
|
||||
class Query(ObjectType):
|
||||
# --- DEVICES ---
|
||||
@@ -184,31 +189,39 @@ class Query(ObjectType):
|
||||
if (device.get("devParentRelType") not in hidden_relationships)
|
||||
]
|
||||
|
||||
devices_data = [
|
||||
device
|
||||
for device in devices_data
|
||||
if (
|
||||
(
|
||||
device["devPresentLastScan"] == 1
|
||||
and "online" in allowed_statuses
|
||||
)
|
||||
or (device["devIsNew"] == 1 and "new" in allowed_statuses)
|
||||
or (
|
||||
device["devPresentLastScan"] == 0
|
||||
and device["devAlertDown"]
|
||||
and "down" in allowed_statuses
|
||||
)
|
||||
or (
|
||||
device["devPresentLastScan"] == 0
|
||||
and "offline" in allowed_statuses
|
||||
)
|
||||
and device["devIsArchived"] == 0
|
||||
or (
|
||||
device["devIsArchived"] == 1
|
||||
and "archived" in allowed_statuses
|
||||
)
|
||||
filtered = []
|
||||
|
||||
for device in devices_data:
|
||||
is_online = (
|
||||
device["devPresentLastScan"] == 1 and "online" in allowed_statuses
|
||||
)
|
||||
]
|
||||
|
||||
is_new = (
|
||||
device["devIsNew"] == 1 and "new" in allowed_statuses
|
||||
)
|
||||
|
||||
is_down = (
|
||||
device["devPresentLastScan"] == 0 and device["devAlertDown"] and "down" in allowed_statuses
|
||||
)
|
||||
|
||||
is_offline = (
|
||||
device["devPresentLastScan"] == 0 and "offline" in allowed_statuses
|
||||
)
|
||||
|
||||
is_archived = (
|
||||
device["devIsArchived"] == 1 and "archived" in allowed_statuses
|
||||
)
|
||||
|
||||
# Matches if not archived and status matches OR it is archived and allowed
|
||||
matches = (
|
||||
(is_online or is_new or is_down or is_offline) and device["devIsArchived"] == 0
|
||||
) or is_archived
|
||||
|
||||
if matches:
|
||||
filtered.append(device)
|
||||
|
||||
devices_data = filtered
|
||||
|
||||
elif status == "connected":
|
||||
devices_data = [
|
||||
device
|
||||
@@ -257,8 +270,7 @@ class Query(ObjectType):
|
||||
devices_data = [
|
||||
device
|
||||
for device in devices_data
|
||||
if str(device.get(filter.filterColumn, "")).lower()
|
||||
== str(filter.filterValue).lower()
|
||||
if str(device.get(filter.filterColumn, "")).lower() == str(filter.filterValue).lower()
|
||||
]
|
||||
|
||||
# Search data if a search term is provided
|
||||
@@ -340,7 +352,7 @@ class Query(ObjectType):
|
||||
|
||||
return SettingResult(settings=settings, count=len(settings))
|
||||
|
||||
# --- LANGSTRINGS ---
|
||||
# --- LANGSTRINGS ---
|
||||
langStrings = Field(
|
||||
LangStringResult,
|
||||
langCode=Argument(String, required=False),
|
||||
@@ -437,11 +449,11 @@ class Query(ObjectType):
|
||||
if en_fallback:
|
||||
langStrings[i] = en_fallback[0]
|
||||
|
||||
mylog('trace', f'[graphql_schema] Collected {len(langStrings)} language strings '
|
||||
f'(langCode={langCode}, key={langStringKey}, fallback_to_en={fallback_to_en})')
|
||||
mylog('trace', f'[graphql_schema] Collected {len(langStrings)} language strings (langCode={langCode}, key={langStringKey}, fallback_to_en={fallback_to_en})')
|
||||
|
||||
return LangStringResult(langStrings=langStrings, count=len(langStrings))
|
||||
|
||||
|
||||
# helps sorting inconsistent dataset mixed integers and strings
|
||||
def mixed_type_sort_key(value):
|
||||
if value is None or value == "":
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
# !/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
@@ -8,7 +8,7 @@ from flask import jsonify
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
# --------------------------------------------------
|
||||
|
||||
@@ -3,18 +3,18 @@ import sys
|
||||
from flask import jsonify
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
INSTALL_PATH = os.getenv('NETALERTX_APP', '/app')
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from const import logPath
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from messaging.in_app import write_notification
|
||||
from const import logPath # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog, Logger # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value # noqa: E402 [flake8 lint suppression]
|
||||
from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# Make sure log level is initialized correctly
|
||||
Logger(get_setting_value('LOG_LEVEL'))
|
||||
|
||||
|
||||
def clean_log(log_file):
|
||||
"""
|
||||
Purge the content of an allowed log file within the /app/log/ directory.
|
||||
@@ -55,4 +55,3 @@ def clean_log(log_file):
|
||||
mylog('none', [msg])
|
||||
write_notification(msg, 'interrupt')
|
||||
return jsonify({"success": False, "message": msg}), 500
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ import os
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
from const import apiPath
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from const import apiPath # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
def escape_label_value(val):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
# !/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sqlite3
|
||||
@@ -9,10 +9,10 @@ from flask import jsonify
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from helper import is_random_mac, get_setting_value, mylog, format_ip_long
|
||||
from db.db_helper import row_to_json, get_date_from_period
|
||||
from utils.datetime_utils import format_date_iso, format_event_date, format_date_diff, parse_datetime, format_date
|
||||
from database import get_temp_db_connection # noqa: E402 [flake8 lint suppression]
|
||||
from helper import get_setting_value, format_ip_long # noqa: E402 [flake8 lint suppression]
|
||||
from db.db_helper import get_date_from_period # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import format_date_iso, format_event_date, format_date_diff, format_date # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
# --------------------------
|
||||
@@ -33,7 +33,7 @@ def create_session(
|
||||
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO Sessions (ses_MAC, ses_IP, ses_DateTimeConnection, ses_DateTimeDisconnection,
|
||||
INSERT INTO Sessions (ses_MAC, ses_IP, ses_DateTimeConnection, ses_DateTimeDisconnection,
|
||||
ses_EventTypeConnection, ses_EventTypeDisconnection)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
@@ -105,7 +105,7 @@ def get_sessions_calendar(start_date, end_date):
|
||||
-- If ses_EventTypeConnection is missing, backfill from last disconnection
|
||||
-- If ses_EventTypeDisconnection is missing, forward-fill from next connection
|
||||
|
||||
SELECT
|
||||
SELECT
|
||||
SES1.ses_MAC, SES1.ses_EventTypeConnection, SES1.ses_DateTimeConnection,
|
||||
SES1.ses_EventTypeDisconnection, SES1.ses_DateTimeDisconnection, SES1.ses_IP,
|
||||
SES1.ses_AdditionalInfo, SES1.ses_StillConnected,
|
||||
@@ -113,9 +113,9 @@ def get_sessions_calendar(start_date, end_date):
|
||||
CASE
|
||||
WHEN SES1.ses_EventTypeConnection = '<missing event>' THEN
|
||||
IFNULL(
|
||||
(SELECT MAX(SES2.ses_DateTimeDisconnection)
|
||||
FROM Sessions AS SES2
|
||||
WHERE SES2.ses_MAC = SES1.ses_MAC
|
||||
(SELECT MAX(SES2.ses_DateTimeDisconnection)
|
||||
FROM Sessions AS SES2
|
||||
WHERE SES2.ses_MAC = SES1.ses_MAC
|
||||
AND SES2.ses_DateTimeDisconnection < SES1.ses_DateTimeDisconnection
|
||||
AND SES2.ses_DateTimeDisconnection BETWEEN Date(?) AND Date(?)
|
||||
),
|
||||
@@ -126,9 +126,9 @@ def get_sessions_calendar(start_date, end_date):
|
||||
|
||||
CASE
|
||||
WHEN SES1.ses_EventTypeDisconnection = '<missing event>' THEN
|
||||
(SELECT MIN(SES2.ses_DateTimeConnection)
|
||||
FROM Sessions AS SES2
|
||||
WHERE SES2.ses_MAC = SES1.ses_MAC
|
||||
(SELECT MIN(SES2.ses_DateTimeConnection)
|
||||
FROM Sessions AS SES2
|
||||
WHERE SES2.ses_MAC = SES1.ses_MAC
|
||||
AND SES2.ses_DateTimeConnection > SES1.ses_DateTimeConnection
|
||||
AND SES2.ses_DateTimeConnection BETWEEN Date(?) AND Date(?)
|
||||
)
|
||||
@@ -162,8 +162,7 @@ def get_sessions_calendar(start_date, end_date):
|
||||
|
||||
# Determine color
|
||||
if (
|
||||
row["ses_EventTypeConnection"] == "<missing event>"
|
||||
or row["ses_EventTypeDisconnection"] == "<missing event>"
|
||||
row["ses_EventTypeConnection"] == "<missing event>" or row["ses_EventTypeDisconnection"] == "<missing event>"
|
||||
):
|
||||
color = "#f39c12"
|
||||
elif row["ses_StillConnected"] == 1:
|
||||
@@ -205,7 +204,7 @@ def get_device_sessions(mac, period):
|
||||
cur = conn.cursor()
|
||||
|
||||
sql = f"""
|
||||
SELECT
|
||||
SELECT
|
||||
IFNULL(ses_DateTimeConnection, ses_DateTimeDisconnection) AS ses_DateTimeOrder,
|
||||
ses_EventTypeConnection,
|
||||
ses_DateTimeConnection,
|
||||
@@ -293,7 +292,7 @@ def get_session_events(event_type, period_date):
|
||||
|
||||
# Base SQLs
|
||||
sql_events = f"""
|
||||
SELECT
|
||||
SELECT
|
||||
eve_DateTime AS eve_DateTimeOrder,
|
||||
devName,
|
||||
devOwner,
|
||||
@@ -314,7 +313,7 @@ def get_session_events(event_type, period_date):
|
||||
"""
|
||||
|
||||
sql_sessions = """
|
||||
SELECT
|
||||
SELECT
|
||||
IFNULL(ses_DateTimeConnection, ses_DateTimeDisconnection) AS ses_DateTimeOrder,
|
||||
devName,
|
||||
devOwner,
|
||||
@@ -337,8 +336,7 @@ def get_session_events(event_type, period_date):
|
||||
sql = sql_events
|
||||
elif event_type == "sessions":
|
||||
sql = (
|
||||
sql_sessions
|
||||
+ f"""
|
||||
sql_sessions + f"""
|
||||
WHERE (
|
||||
ses_DateTimeConnection >= {period_date}
|
||||
OR ses_DateTimeDisconnection >= {period_date}
|
||||
@@ -348,8 +346,7 @@ def get_session_events(event_type, period_date):
|
||||
)
|
||||
elif event_type == "missing":
|
||||
sql = (
|
||||
sql_sessions
|
||||
+ f"""
|
||||
sql_sessions + f"""
|
||||
WHERE (
|
||||
(ses_DateTimeConnection IS NULL AND ses_DateTimeDisconnection >= {period_date})
|
||||
OR (ses_DateTimeDisconnection IS NULL AND ses_StillConnected = 0 AND ses_DateTimeConnection >= {period_date})
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import os
|
||||
import json
|
||||
|
||||
from const import *
|
||||
from const import applicationPath, apiPath
|
||||
from logger import mylog
|
||||
from helper import checkNewVersion
|
||||
from utils.datetime_utils import timeNowDB, timeNow
|
||||
@@ -32,14 +32,17 @@ class app_state_class:
|
||||
isNewVersionChecked (int): Timestamp of last version check.
|
||||
"""
|
||||
|
||||
def __init__(self, currentState=None,
|
||||
settingsSaved=None,
|
||||
settingsImported=None,
|
||||
showSpinner=None,
|
||||
graphQLServerStarted=0,
|
||||
processScan=False,
|
||||
pluginsStates=None,
|
||||
appVersion=None):
|
||||
def __init__(
|
||||
self,
|
||||
currentState=None,
|
||||
settingsSaved=None,
|
||||
settingsImported=None,
|
||||
showSpinner=None,
|
||||
graphQLServerStarted=0,
|
||||
processScan=False,
|
||||
pluginsStates=None,
|
||||
appVersion=None
|
||||
):
|
||||
"""
|
||||
Initialize the application state, optionally overwriting previous values.
|
||||
|
||||
@@ -62,7 +65,7 @@ class app_state_class:
|
||||
|
||||
# Update self
|
||||
self.lastUpdated = str(timeNowDB())
|
||||
|
||||
|
||||
if os.path.exists(stateFile):
|
||||
try:
|
||||
with open(stateFile, "r") as json_file:
|
||||
@@ -73,7 +76,7 @@ class app_state_class:
|
||||
)
|
||||
|
||||
# Check if the file exists and recover previous values
|
||||
if previousState != "":
|
||||
if previousState != "":
|
||||
self.settingsSaved = previousState.get("settingsSaved", 0)
|
||||
self.settingsImported = previousState.get("settingsImported", 0)
|
||||
self.processScan = previousState.get("processScan", False)
|
||||
@@ -82,9 +85,9 @@ class app_state_class:
|
||||
self.isNewVersionChecked = previousState.get("isNewVersionChecked", 0)
|
||||
self.graphQLServerStarted = previousState.get("graphQLServerStarted", 0)
|
||||
self.currentState = previousState.get("currentState", "Init")
|
||||
self.pluginsStates = previousState.get("pluginsStates", {})
|
||||
self.appVersion = previousState.get("appVersion", "")
|
||||
else: # init first time values
|
||||
self.pluginsStates = previousState.get("pluginsStates", {})
|
||||
self.appVersion = previousState.get("appVersion", "")
|
||||
else: # init first time values
|
||||
self.settingsSaved = 0
|
||||
self.settingsImported = 0
|
||||
self.showSpinner = False
|
||||
@@ -158,12 +161,12 @@ class app_state_class:
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# method to update the state
|
||||
def updateState(newState = None,
|
||||
settingsSaved = None,
|
||||
settingsImported = None,
|
||||
showSpinner = None,
|
||||
graphQLServerStarted = None,
|
||||
processScan = None,
|
||||
def updateState(newState = None,
|
||||
settingsSaved = None,
|
||||
settingsImported = None,
|
||||
showSpinner = None,
|
||||
graphQLServerStarted = None,
|
||||
processScan = None,
|
||||
pluginsStates=None,
|
||||
appVersion=None):
|
||||
"""
|
||||
@@ -182,14 +185,16 @@ def updateState(newState = None,
|
||||
Returns:
|
||||
app_state_class: Updated state object.
|
||||
"""
|
||||
return app_state_class( newState,
|
||||
settingsSaved,
|
||||
settingsImported,
|
||||
showSpinner,
|
||||
graphQLServerStarted,
|
||||
processScan,
|
||||
pluginsStates,
|
||||
appVersion)
|
||||
return app_state_class(
|
||||
newState,
|
||||
settingsSaved,
|
||||
settingsImported,
|
||||
showSpinner,
|
||||
graphQLServerStarted,
|
||||
processScan,
|
||||
pluginsStates,
|
||||
appVersion
|
||||
)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
@@ -52,7 +52,7 @@ default_tz = "Europe/Berlin"
|
||||
# SQL queries
|
||||
# ===============================================================================
|
||||
sql_devices_all = """
|
||||
SELECT
|
||||
SELECT
|
||||
rowid,
|
||||
IFNULL(devMac, '') AS devMac,
|
||||
IFNULL(devName, '') AS devName,
|
||||
@@ -88,7 +88,7 @@ sql_devices_all = """
|
||||
IFNULL(devFQDN, '') AS devFQDN,
|
||||
IFNULL(devParentRelType, '') AS devParentRelType,
|
||||
IFNULL(devReqNicsOnline, '') AS devReqNicsOnline,
|
||||
CASE
|
||||
CASE
|
||||
WHEN devIsNew = 1 THEN 'New'
|
||||
WHEN devPresentLastScan = 1 THEN 'On-line'
|
||||
WHEN devPresentLastScan = 0 AND devAlertDown != 0 THEN 'Down'
|
||||
@@ -133,7 +133,7 @@ sql_devices_tiles = """
|
||||
(SELECT COUNT(*) FROM Devices) AS "all_devices",
|
||||
-- My Devices count
|
||||
(SELECT COUNT(*) FROM MyDevicesFilter) AS my_devices
|
||||
FROM Statuses;
|
||||
FROM Statuses;
|
||||
"""
|
||||
sql_devices_filters = """
|
||||
SELECT DISTINCT 'devSite' AS columnName, devSite AS columnValue
|
||||
@@ -164,9 +164,9 @@ sql_devices_filters = """
|
||||
FROM Devices WHERE devSSID NOT IN ('', 'null') AND devSSID IS NOT NULL
|
||||
ORDER BY columnName;
|
||||
"""
|
||||
sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived,
|
||||
(select count(*) from Devices a where devIsNew = 1 ) as new,
|
||||
(select count(*) from Devices a where devName = '(unknown)' or devName = '(name not found)' ) as unknown
|
||||
sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived,
|
||||
(select count(*) from Devices a where devIsNew = 1 ) as new,
|
||||
(select count(*) from Devices a where devName = '(unknown)' or devName = '(name not found)' ) as unknown
|
||||
from Online_History order by Scan_Date desc limit 1"""
|
||||
sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0"
|
||||
sql_settings = "SELECT * FROM Settings"
|
||||
@@ -176,23 +176,23 @@ sql_notifications_all = "SELECT * FROM Notifications"
|
||||
sql_online_history = "SELECT * FROM Online_History"
|
||||
sql_plugins_events = "SELECT * FROM Plugins_Events"
|
||||
sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY DateTimeChanged DESC"
|
||||
sql_new_devices = """SELECT * FROM (
|
||||
SELECT eve_IP as devLastIP, eve_MAC as devMac
|
||||
sql_new_devices = """SELECT * FROM (
|
||||
SELECT eve_IP as devLastIP, eve_MAC as devMac
|
||||
FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType = 'New Device'
|
||||
ORDER BY eve_DateTime ) t1
|
||||
LEFT JOIN
|
||||
( SELECT devName, devMac as devMac_t2 FROM Devices) t2
|
||||
LEFT JOIN
|
||||
( SELECT devName, devMac as devMac_t2 FROM Devices) t2
|
||||
ON t1.devMac = t2.devMac_t2"""
|
||||
|
||||
|
||||
sql_generateGuid = """
|
||||
lower(
|
||||
hex(randomblob(4)) || '-' || hex(randomblob(2)) || '-' || '4' ||
|
||||
substr(hex( randomblob(2)), 2) || '-' ||
|
||||
hex(randomblob(4)) || '-' || hex(randomblob(2)) || '-' || '4' ||
|
||||
substr(hex( randomblob(2)), 2) || '-' ||
|
||||
substr('AB89', 1 + (abs(random()) % 4) , 1) ||
|
||||
substr(hex(randomblob(2)), 2) || '-' ||
|
||||
substr(hex(randomblob(2)), 2) || '-' ||
|
||||
hex(randomblob(6))
|
||||
)
|
||||
"""
|
||||
|
||||
@@ -180,7 +180,7 @@ class DB:
|
||||
# Init the AppEvent database table
|
||||
AppEvent_obj(self)
|
||||
|
||||
# #-------------------------------------------------------------------------------
|
||||
# # -------------------------------------------------------------------------------
|
||||
# def get_table_as_json(self, sqlQuery):
|
||||
|
||||
# # mylog('debug',[ '[Database] - get_table_as_json - Query: ', sqlQuery])
|
||||
|
||||
@@ -6,8 +6,8 @@ import os
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from helper import if_byte_then_to_str
|
||||
from logger import mylog
|
||||
from helper import if_byte_then_to_str # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
@@ -5,8 +5,8 @@ import os
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
from messaging.in_app import write_notification
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from messaging.in_app import write_notification # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
def ensure_column(sql, table: str, column_name: str, column_type: str) -> bool:
|
||||
@@ -108,23 +108,23 @@ def ensure_views(sql) -> bool:
|
||||
- sql: database cursor or connection wrapper (must support execute() and fetchall()).
|
||||
"""
|
||||
sql.execute(""" DROP VIEW IF EXISTS Events_Devices;""")
|
||||
sql.execute(""" CREATE VIEW Events_Devices AS
|
||||
SELECT *
|
||||
FROM Events
|
||||
sql.execute(""" CREATE VIEW Events_Devices AS
|
||||
SELECT *
|
||||
FROM Events
|
||||
LEFT JOIN Devices ON eve_MAC = devMac;
|
||||
""")
|
||||
|
||||
sql.execute(""" DROP VIEW IF EXISTS LatestEventsPerMAC;""")
|
||||
sql.execute("""CREATE VIEW LatestEventsPerMAC AS
|
||||
WITH RankedEvents AS (
|
||||
SELECT
|
||||
SELECT
|
||||
e.*,
|
||||
ROW_NUMBER() OVER (PARTITION BY e.eve_MAC ORDER BY e.eve_DateTime DESC) AS row_num
|
||||
FROM Events AS e
|
||||
)
|
||||
SELECT
|
||||
e.*,
|
||||
d.*,
|
||||
SELECT
|
||||
e.*,
|
||||
d.*,
|
||||
c.*
|
||||
FROM RankedEvents AS e
|
||||
LEFT JOIN Devices AS d ON e.eve_MAC = d.devMac
|
||||
@@ -138,14 +138,14 @@ def ensure_views(sql) -> bool:
|
||||
|
||||
sql.execute(""" CREATE VIEW IF NOT EXISTS LatestEventsPerMAC AS
|
||||
WITH RankedEvents AS (
|
||||
SELECT
|
||||
SELECT
|
||||
e.*,
|
||||
ROW_NUMBER() OVER (PARTITION BY e.eve_MAC ORDER BY e.eve_DateTime DESC) AS row_num
|
||||
FROM Events AS e
|
||||
)
|
||||
SELECT
|
||||
e.*,
|
||||
d.*,
|
||||
SELECT
|
||||
e.*,
|
||||
d.*,
|
||||
c.*
|
||||
FROM RankedEvents AS e
|
||||
LEFT JOIN Devices AS d ON e.eve_MAC = d.devMac
|
||||
@@ -272,7 +272,7 @@ def ensure_CurrentScan(sql) -> bool:
|
||||
"""
|
||||
# 🐛 CurrentScan DEBUG: comment out below when debugging to keep the CurrentScan table after restarts/scan finishes
|
||||
sql.execute("DROP TABLE IF EXISTS CurrentScan;")
|
||||
sql.execute(""" CREATE TABLE IF NOT EXISTS CurrentScan (
|
||||
sql.execute(""" CREATE TABLE IF NOT EXISTS CurrentScan (
|
||||
cur_MAC STRING(50) NOT NULL COLLATE NOCASE,
|
||||
cur_IP STRING(50) NOT NULL COLLATE NOCASE,
|
||||
cur_Vendor STRING(250),
|
||||
@@ -354,7 +354,7 @@ def ensure_plugins_tables(sql) -> bool:
|
||||
# Plugin state
|
||||
sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects(
|
||||
"Index" INTEGER,
|
||||
Plugin TEXT NOT NULL,
|
||||
Plugin TEXT NOT NULL,
|
||||
Object_PrimaryID TEXT NOT NULL,
|
||||
Object_SecondaryID TEXT NOT NULL,
|
||||
DateTimeCreated TEXT NOT NULL,
|
||||
|
||||
@@ -18,7 +18,7 @@ from typing import Dict, List, Tuple, Any, Optional
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
class SafeConditionBuilder:
|
||||
@@ -494,8 +494,6 @@ class SafeConditionBuilder:
|
||||
if logical_op and not self._validate_logical_operator(logical_op):
|
||||
raise ValueError(f"Invalid logical operator: {logical_op}")
|
||||
|
||||
# Parse values from the IN clause
|
||||
values = []
|
||||
# Simple regex to extract quoted values
|
||||
value_pattern = r"'([^']*)'"
|
||||
matches = re.findall(value_pattern, values_str)
|
||||
|
||||
@@ -7,25 +7,22 @@ import os
|
||||
import re
|
||||
import unicodedata
|
||||
import subprocess
|
||||
import pytz
|
||||
import json
|
||||
import requests
|
||||
import base64
|
||||
import hashlib
|
||||
import random
|
||||
import email
|
||||
import string
|
||||
import ipaddress
|
||||
|
||||
import conf
|
||||
from const import *
|
||||
from const import applicationPath, fullConfPath, fullDbPath, dbPath, confPath, apiPath
|
||||
from logger import mylog, logResult
|
||||
|
||||
# Register NetAlertX directories using runtime configuration
|
||||
INSTALL_PATH = applicationPath
|
||||
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# File system permission handling
|
||||
# -------------------------------------------------------------------------------
|
||||
@@ -58,12 +55,6 @@ def checkPermissionsOK():
|
||||
return (confR_access, dbR_access)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def fixPermissions():
|
||||
# Try fixing access rights if needed
|
||||
chmodCommands = []
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def initialiseFile(pathToCheck, defaultFile):
|
||||
# if file not readable (missing?) try to copy over the backed-up (default) one
|
||||
@@ -71,9 +62,7 @@ def initialiseFile(pathToCheck, defaultFile):
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
"[Setup] ("
|
||||
+ pathToCheck
|
||||
+ ") file is not readable or missing. Trying to copy over the default one."
|
||||
"[Setup] (" + pathToCheck + ") file is not readable or missing. Trying to copy over the default one."
|
||||
],
|
||||
)
|
||||
try:
|
||||
@@ -89,22 +78,14 @@ def initialiseFile(pathToCheck, defaultFile):
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
"[Setup] ⚠ ERROR copying ("
|
||||
+ defaultFile
|
||||
+ ") to ("
|
||||
+ pathToCheck
|
||||
+ "). Make sure the app has Read & Write access to the parent directory."
|
||||
"[Setup] ⚠ ERROR copying (" + defaultFile + ") to (" + pathToCheck + "). Make sure the app has Read & Write access to the parent directory."
|
||||
],
|
||||
)
|
||||
else:
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
"[Setup] ("
|
||||
+ defaultFile
|
||||
+ ") copied over successfully to ("
|
||||
+ pathToCheck
|
||||
+ ")."
|
||||
"[Setup] (" + defaultFile + ") copied over successfully to (" + pathToCheck + ")."
|
||||
],
|
||||
)
|
||||
|
||||
@@ -116,10 +97,7 @@ def initialiseFile(pathToCheck, defaultFile):
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
"[Setup] ⚠ ERROR copying ("
|
||||
+ defaultFile
|
||||
+ "). Make sure the app has Read & Write access to "
|
||||
+ pathToCheck
|
||||
"[Setup] ⚠ ERROR copying (" + defaultFile + "). Make sure the app has Read & Write access to " + pathToCheck
|
||||
],
|
||||
)
|
||||
mylog("none", [e.output])
|
||||
@@ -130,16 +108,13 @@ def filePermissions():
|
||||
# check and initialize .conf
|
||||
(confR_access, dbR_access) = checkPermissionsOK() # Initial check
|
||||
|
||||
if confR_access == False:
|
||||
if confR_access is False:
|
||||
initialiseFile(fullConfPath, f"{INSTALL_PATH}/back/app.conf")
|
||||
|
||||
# check and initialize .db
|
||||
if dbR_access == False:
|
||||
if dbR_access is False:
|
||||
initialiseFile(fullDbPath, f"{INSTALL_PATH}/back/app.db")
|
||||
|
||||
# last attempt
|
||||
fixPermissions()
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# File manipulation methods
|
||||
@@ -292,7 +267,7 @@ def get_setting_value(key):
|
||||
value = setting_value_to_python_type(set_type, set_value)
|
||||
else:
|
||||
value = setting_value_to_python_type(set_type, str(set_value))
|
||||
|
||||
|
||||
SETTINGS_SECONDARYCACHE[key] = value
|
||||
|
||||
return value
|
||||
@@ -382,7 +357,7 @@ def setting_value_to_python_type(set_type, set_value):
|
||||
if isinstance(set_value, str):
|
||||
try:
|
||||
value = json.loads(set_value.replace("'", "\""))
|
||||
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
mylog(
|
||||
"none",
|
||||
@@ -413,17 +388,12 @@ def setting_value_to_python_type(set_type, set_value):
|
||||
value = set_value
|
||||
|
||||
elif (
|
||||
dataType == "string"
|
||||
and elementType == "input"
|
||||
and any(opt.get("readonly") == "true" for opt in elementOptions)
|
||||
dataType == "string" and elementType == "input" and any(opt.get("readonly") == "true" for opt in elementOptions)
|
||||
):
|
||||
value = reverseTransformers(str(set_value), transformers)
|
||||
|
||||
elif (
|
||||
dataType == "string"
|
||||
and elementType == "input"
|
||||
and any(opt.get("type") == "password" for opt in elementOptions)
|
||||
and "sha256" in transformers
|
||||
dataType == "string" and elementType == "input" and any(opt.get("type") == "password" for opt in elementOptions) and "sha256" in transformers
|
||||
):
|
||||
value = hashlib.sha256(set_value.encode()).hexdigest()
|
||||
|
||||
@@ -602,23 +572,23 @@ def normalize_string(text):
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
def is_random_mac(mac: str) -> bool:
|
||||
"""Determine if a MAC address is random, respecting user-defined prefixes not to mark as random."""
|
||||
# # -------------------------------------------------------------------------------------------
|
||||
# def is_random_mac(mac: str) -> bool:
|
||||
# """Determine if a MAC address is random, respecting user-defined prefixes not to mark as random."""
|
||||
|
||||
is_random = mac[1].upper() in ["2", "6", "A", "E"]
|
||||
# is_random = mac[1].upper() in ["2", "6", "A", "E"]
|
||||
|
||||
# Get prefixes from settings
|
||||
prefixes = get_setting_value("UI_NOT_RANDOM_MAC")
|
||||
# # Get prefixes from settings
|
||||
# prefixes = get_setting_value("UI_NOT_RANDOM_MAC")
|
||||
|
||||
# If detected as random, make sure it doesn't start with a prefix the user wants to exclude
|
||||
if is_random:
|
||||
for prefix in prefixes:
|
||||
if mac.upper().startswith(prefix.upper()):
|
||||
is_random = False
|
||||
break
|
||||
# # If detected as random, make sure it doesn't start with a prefix the user wants to exclude
|
||||
# if is_random:
|
||||
# for prefix in prefixes:
|
||||
# if mac.upper().startswith(prefix.upper()):
|
||||
# is_random = False
|
||||
# break
|
||||
|
||||
return is_random
|
||||
# return is_random
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
@@ -653,6 +623,7 @@ def extract_ip_addresses(text):
|
||||
# -------------------------------------------------------------------------------
|
||||
# Helper function to determine if a MAC address is random
|
||||
def is_random_mac(mac):
|
||||
"""Determine if a MAC address is random, respecting user-defined prefixes not to mark as random."""
|
||||
# Check if second character matches "2", "6", "A", "E" (case insensitive)
|
||||
is_random = mac[1].upper() in ["2", "6", "A", "E"]
|
||||
|
||||
@@ -773,7 +744,6 @@ def getBuildTimeStampAndVersion():
|
||||
return tuple(results)
|
||||
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def checkNewVersion():
|
||||
mylog("debug", ["[Version check] Checking if new version available"])
|
||||
|
||||
@@ -8,9 +8,9 @@ import shutil
|
||||
import re
|
||||
|
||||
# Register NetAlertX libraries
|
||||
import conf
|
||||
from const import fullConfPath, applicationPath, fullConfFolder, default_tz
|
||||
from helper import getBuildTimeStampAndVersion, fixPermissions, collect_lang_strings, updateSubnets, isJsonObject, setting_value_to_python_type, get_setting_value, generate_random_string
|
||||
import conf
|
||||
from const import fullConfPath, fullConfFolder, default_tz
|
||||
from helper import getBuildTimeStampAndVersion, fixPermissions, collect_lang_strings, updateSubnets, generate_random_string
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from app_state import updateState
|
||||
from logger import mylog
|
||||
@@ -19,7 +19,6 @@ from scheduler import schedule_class
|
||||
from plugin import plugin_manager, print_plugin_info
|
||||
from utils.plugin_utils import get_plugins_configs, get_set_value_for_init
|
||||
from messaging.in_app import write_notification
|
||||
from utils.crypto_utils import get_random_bytes
|
||||
|
||||
# ===============================================================================
|
||||
# Initialise user defined values
|
||||
@@ -59,7 +58,7 @@ def ccd(
|
||||
result = default
|
||||
|
||||
# Use existing value if already supplied, otherwise default value is used
|
||||
if forceDefault == False and key in config_dir:
|
||||
if forceDefault is False and key in config_dir:
|
||||
result = config_dir[key]
|
||||
|
||||
# Single quotes might break SQL queries, replacing them
|
||||
@@ -216,7 +215,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
[],
|
||||
c_d,
|
||||
"Loaded plugins",
|
||||
'{"dataType":"array","elements":[{"elementType":"select","elementHasInputValue":1,"elementOptions":[{"multiple":"true","ordeable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"selectChange(this)"},{"getStringKey":"Gen_Change"}],"transformers":[]}]}',
|
||||
'{"dataType":"array","elements":[{"elementType":"select","elementHasInputValue":1,"elementOptions":[{"multiple":"true","ordeable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"selectChange(this)"},{"getStringKey":"Gen_Change"}],"transformers":[]}]}', # noqa: E501
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
@@ -234,7 +233,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
["192.168.1.0/24 --interface=eth1", "192.168.1.0/24 --interface=eth0"],
|
||||
c_d,
|
||||
"Subnets to scan",
|
||||
"""{"dataType": "array","elements": [{"elementType": "input","elementOptions": [{"placeholder": "192.168.1.0/24 --interface=eth1"},{"suffix": "_in"},{"cssClasses": "col-sm-10"},{"prefillValue": "null"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": ["_in"]},{"separator": ""},{"cssClasses": "col-xs-12"},{"onClick": "addList(this, false)"},{"getStringKey": "Gen_Add"}],"transformers": []},{"elementType": "select","elementHasInputValue": 1,"elementOptions": [{"multiple": "true"},{"readonly": "true"},{"editable": "true"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeAllOptions(this)"},{"getStringKey": "Gen_Remove_All"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeFromList(this)"},{"getStringKey": "Gen_Remove_Last"}],"transformers": []}]}""",
|
||||
"""{"dataType": "array","elements": [{"elementType": "input","elementOptions": [{"placeholder": "192.168.1.0/24 --interface=eth1"},{"suffix": "_in"},{"cssClasses": "col-sm-10"},{"prefillValue": "null"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": ["_in"]},{"separator": ""},{"cssClasses": "col-xs-12"},{"onClick": "addList(this, false)"},{"getStringKey": "Gen_Add"}],"transformers": []},{"elementType": "select","elementHasInputValue": 1,"elementOptions": [{"multiple": "true"},{"readonly": "true"},{"editable": "true"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeAllOptions(this)"},{"getStringKey": "Gen_Remove_All"}],"transformers": []},{"elementType": "button","elementOptions": [{"sourceSuffixes": []},{"separator": ""},{"cssClasses": "col-xs-6"},{"onClick": "removeFromList(this)"},{"getStringKey": "Gen_Remove_Last"}],"transformers": []}]}""", # noqa: E501 - inline JSON
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
@@ -356,7 +355,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
],
|
||||
c_d,
|
||||
"Network device types",
|
||||
'{"dataType":"array","elements":[{"elementType":"input","elementOptions":[{"placeholder":"Enter value"},{"suffix":"_in"},{"cssClasses":"col-sm-10"},{"prefillValue":"null"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":["_in"]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"addList(this,false)"},{"getStringKey":"Gen_Add"}],"transformers":[]},{"elementType":"select", "elementHasInputValue":1,"elementOptions":[{"multiple":"true"},{"readonly":"true"},{"editable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeAllOptions(this)"},{"getStringKey":"Gen_Remove_All"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeFromList(this)"},{"getStringKey":"Gen_Remove_Last"}],"transformers":[]}]}',
|
||||
'{"dataType":"array","elements":[{"elementType":"input","elementOptions":[{"placeholder":"Enter value"},{"suffix":"_in"},{"cssClasses":"col-sm-10"},{"prefillValue":"null"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":["_in"]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"addList(this,false)"},{"getStringKey":"Gen_Add"}],"transformers":[]},{"elementType":"select", "elementHasInputValue":1,"elementOptions":[{"multiple":"true"},{"readonly":"true"},{"editable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeAllOptions(this)"},{"getStringKey":"Gen_Remove_All"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeFromList(this)"},{"getStringKey":"Gen_Remove_Last"}],"transformers":[]}]}', # noqa: E501 - inline JSON
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
@@ -374,7 +373,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
"t_" + generate_random_string(20),
|
||||
c_d,
|
||||
"API token",
|
||||
'{"dataType": "string","elements": [{"elementType": "input","elementHasInputValue": 1,"elementOptions": [{ "cssClasses": "col-xs-12" }],"transformers": []},{"elementType": "button","elementOptions": [{ "getStringKey": "Gen_Generate" },{ "customParams": "API_TOKEN" },{ "onClick": "generateApiToken(this, 20)" },{ "cssClasses": "col-xs-12" }],"transformers": []}]}',
|
||||
'{"dataType": "string","elements": [{"elementType": "input","elementHasInputValue": 1,"elementOptions": [{ "cssClasses": "col-xs-12" }],"transformers": []},{"elementType": "button","elementOptions": [{ "getStringKey": "Gen_Generate" },{ "customParams": "API_TOKEN" },{ "onClick": "generateApiToken(this, 20)" },{ "cssClasses": "col-xs-12" }],"transformers": []}]}', # noqa: E501 - inline JSON
|
||||
"[]",
|
||||
"General",
|
||||
)
|
||||
@@ -386,7 +385,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
c_d,
|
||||
"Language Interface",
|
||||
'{"dataType":"string", "elements": [{"elementType" : "select", "elementOptions" : [] ,"transformers": []}]}',
|
||||
"['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Japanese (ja_jp)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Chinese (zh_cn)']",
|
||||
"['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Japanese (ja_jp)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Chinese (zh_cn)']", # noqa: E501 - inline JSON
|
||||
"UI",
|
||||
)
|
||||
|
||||
@@ -483,9 +482,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
|
||||
# only include loaded plugins, and the ones that are enabled
|
||||
if (
|
||||
pref in conf.LOADED_PLUGINS
|
||||
or plugin_run != "disabled"
|
||||
or plugin_run is None
|
||||
pref in conf.LOADED_PLUGINS or plugin_run != "disabled" or plugin_run is None
|
||||
):
|
||||
print_plugin_info(plugin, ["display_name", "description"])
|
||||
|
||||
@@ -524,9 +521,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
if "popupForm" in option:
|
||||
for popup_entry in option["popupForm"]:
|
||||
popup_pref = (
|
||||
key
|
||||
+ "_popupform_"
|
||||
+ popup_entry.get("function", "")
|
||||
key + "_popupform_" + popup_entry.get("function", "")
|
||||
)
|
||||
stringSqlParams = collect_lang_strings(
|
||||
popup_entry, popup_pref, stringSqlParams
|
||||
@@ -606,7 +601,7 @@ def importConfigs(pm, db, all_plugins):
|
||||
# Loop through settings_override dictionary
|
||||
for setting_name, value in settings_override.items():
|
||||
# Ensure the value is treated as a string and passed directly
|
||||
if isinstance(value, str) == False:
|
||||
if isinstance(value, str) is False:
|
||||
value = str(value)
|
||||
|
||||
# Log the value being passed
|
||||
@@ -669,23 +664,31 @@ def importConfigs(pm, db, all_plugins):
|
||||
|
||||
# -----------------
|
||||
# HANDLE APP was upgraded message - clear cache
|
||||
|
||||
|
||||
# Check if app was upgraded
|
||||
|
||||
|
||||
buildTimestamp, new_version = getBuildTimeStampAndVersion()
|
||||
prev_version = conf.VERSION if conf.VERSION != '' else "unknown"
|
||||
|
||||
|
||||
mylog('debug', [f"[Config] buildTimestamp | prev_version | .VERSION file: '{buildTimestamp}|{prev_version}|{new_version}'"])
|
||||
|
||||
|
||||
if str(prev_version) != str(new_version):
|
||||
|
||||
mylog('none', ['[Config] App upgraded 🚀'])
|
||||
|
||||
|
||||
mylog('none', ['[Config] App upgraded 🚀'])
|
||||
|
||||
# ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False)
|
||||
ccd('VERSION', new_version , c_d, '_KEEP_', '_KEEP_', '_KEEP_', '_KEEP_', None, "_KEEP_", None, None, True)
|
||||
|
||||
write_notification(f'[Upgrade] : App upgraded from <code>{prev_version}</code> to <code>{new_version}</code> 🚀 Please clear the cache: <ol> <li>Click OK below</li> <li>Clear the browser cache (shift + browser refresh button)</li> <li> Clear app cache with the <i class="fa-solid fa-rotate"></i> (reload) button in the header</li><li>Go to Settings and click Save</li> </ol> Check out new features and what has changed in the <a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.', 'interrupt', timeNowDB())
|
||||
|
||||
|
||||
write_notification(f'[Upgrade]: App upgraded from <code>{prev_version}</code> to \
|
||||
<code>{new_version}</ code> 🚀 Please clear the cache: \
|
||||
<ol> <li>Click OK below</li> <li>Clear the browser cache (shift + \
|
||||
browser refresh button)</li> <li> Clear app cache with the <i class="fa-solid fa-rotate"></i> \
|
||||
(reload) button in the header</li><li>Go to Settings and click Save</li> </ol>\
|
||||
Check out new features and what has changed in the \
|
||||
<a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.',
|
||||
'interrupt',
|
||||
timeNowDB()
|
||||
)
|
||||
|
||||
# -----------------
|
||||
# Initialization finished, update DB and API endpoints
|
||||
@@ -717,13 +720,13 @@ def importConfigs(pm, db, all_plugins):
|
||||
# settingsImported = None (timestamp),
|
||||
# showSpinner = False (1/0),
|
||||
# graphQLServerStarted = 1 (1/0))
|
||||
updateState("Config imported", conf.lastImportedConfFile, conf.lastImportedConfFile, False, 1, None, None, new_version)
|
||||
|
||||
updateState("Config imported", conf.lastImportedConfFile, conf.lastImportedConfFile, False, 1, None, None, new_version)
|
||||
|
||||
msg = '[Config] Imported new settings config'
|
||||
mylog('minimal', msg)
|
||||
|
||||
|
||||
# front end app log loggging
|
||||
write_notification(msg, 'info', timeNowDB())
|
||||
write_notification(msg, 'info', timeNowDB())
|
||||
|
||||
return pm, all_plugins, True
|
||||
|
||||
|
||||
@@ -1,19 +1,14 @@
|
||||
import sys
|
||||
import io
|
||||
import datetime
|
||||
# import datetime
|
||||
import threading
|
||||
import queue
|
||||
import logging
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
# from zoneinfo import ZoneInfo
|
||||
|
||||
# NetAlertX imports
|
||||
import conf
|
||||
from const import *
|
||||
from const import logPath
|
||||
from utils.datetime_utils import timeNowTZ
|
||||
|
||||
|
||||
|
||||
@@ -11,13 +11,9 @@ from flask import jsonify
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from const import apiPath
|
||||
from logger import mylog
|
||||
|
||||
import conf
|
||||
from const import applicationPath, logPath, apiPath, confFileName, reportTemplatesPath
|
||||
from logger import mylog
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from const import apiPath # noqa: E402 [flake8 lint suppression]
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import timeNowDB # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
|
||||
NOTIFICATION_API_FILE = apiPath + 'user_notifications.json'
|
||||
@@ -38,7 +34,7 @@ def write_notification(content, level="alert", timestamp=None):
|
||||
None
|
||||
"""
|
||||
if timestamp is None:
|
||||
timestamp = timeNowDB()
|
||||
timestamp = timeNowDB()
|
||||
|
||||
# Generate GUID
|
||||
guid = str(uuid.uuid4())
|
||||
|
||||
@@ -18,12 +18,12 @@ import sys
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from helper import (
|
||||
from helper import ( # noqa: E402 [flake8 lint suppression]
|
||||
get_setting_value,
|
||||
)
|
||||
from logger import mylog
|
||||
from db.sql_safe_builder import create_safe_condition_builder
|
||||
from utils.datetime_utils import get_timezone_offset
|
||||
from logger import mylog # noqa: E402 [flake8 lint suppression]
|
||||
from db.sql_safe_builder import create_safe_condition_builder # noqa: E402 [flake8 lint suppression]
|
||||
from utils.datetime_utils import get_timezone_offset # noqa: E402 [flake8 lint suppression]
|
||||
|
||||
# ===============================================================================
|
||||
# REPORTING
|
||||
@@ -56,14 +56,14 @@ def get_notifications(db):
|
||||
WHERE eve_PendingAlertEmail = 1 AND eve_EventType not in ('Device Down', 'Down Reconnected', 'New Device' ) AND eve_MAC IN
|
||||
(
|
||||
SELECT devMac FROM Devices WHERE devAlertEvents = 0
|
||||
)""")
|
||||
)""")
|
||||
|
||||
# Disable down/down reconnected notifications on devices where devAlertDown is disabled
|
||||
sql.execute("""UPDATE Events SET eve_PendingAlertEmail = 0
|
||||
WHERE eve_PendingAlertEmail = 1 AND eve_EventType in ('Device Down', 'Down Reconnected') AND eve_MAC IN
|
||||
(
|
||||
SELECT devMac FROM Devices WHERE devAlertDown = 0
|
||||
)""")
|
||||
)""")
|
||||
|
||||
sections = get_setting_value("NTFPRCS_INCLUDED_SECTIONS")
|
||||
|
||||
@@ -79,20 +79,32 @@ def get_notifications(db):
|
||||
safe_condition, parameters = condition_builder.get_safe_condition_legacy(
|
||||
new_dev_condition_setting
|
||||
)
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
sqlQuery = """SELECT
|
||||
eve_MAC as MAC,
|
||||
eve_DateTime as Datetime,
|
||||
devLastIP as IP,
|
||||
eve_EventType as "Event Type",
|
||||
devName as "Device name",
|
||||
devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType = 'New Device' {}
|
||||
ORDER BY eve_DateTime""".format(safe_condition)
|
||||
ORDER BY eve_DateTime""".format(safe_condition)
|
||||
except Exception as e:
|
||||
mylog(
|
||||
"verbose",
|
||||
["[Notification] Error building safe condition for new devices: ", e],
|
||||
)
|
||||
# Fall back to safe default (no additional conditions)
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
sqlQuery = """SELECT
|
||||
eve_MAC as MAC,
|
||||
eve_DateTime as Datetime,
|
||||
devLastIP as IP,
|
||||
eve_EventType as "Event Type",
|
||||
devName as "Device name",
|
||||
devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType = 'New Device'
|
||||
ORDER BY eve_DateTime"""
|
||||
ORDER BY eve_DateTime"""
|
||||
parameters = {}
|
||||
|
||||
mylog("debug", ["[Notification] new_devices SQL query: ", sqlQuery])
|
||||
@@ -114,17 +126,17 @@ def get_notifications(db):
|
||||
minutes = int(get_setting_value("NTFPRCS_alert_down_time") or 0)
|
||||
tz_offset = get_timezone_offset()
|
||||
sqlQuery = f"""
|
||||
SELECT devName, eve_MAC, devVendor, eve_IP, eve_DateTime, eve_EventType
|
||||
SELECT devName, eve_MAC, devVendor, eve_IP, eve_DateTime, eve_EventType
|
||||
FROM Events_Devices AS down_events
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND down_events.eve_EventType = 'Device Down'
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND down_events.eve_EventType = 'Device Down'
|
||||
AND eve_DateTime < datetime('now', '-{minutes} minutes', '{tz_offset}')
|
||||
AND NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM Events AS connected_events
|
||||
WHERE connected_events.eve_MAC = down_events.eve_MAC
|
||||
AND connected_events.eve_EventType = 'Connected'
|
||||
AND connected_events.eve_DateTime > down_events.eve_DateTime
|
||||
AND connected_events.eve_DateTime > down_events.eve_DateTime
|
||||
)
|
||||
ORDER BY down_events.eve_DateTime;
|
||||
"""
|
||||
@@ -181,20 +193,32 @@ def get_notifications(db):
|
||||
safe_condition, parameters = condition_builder.get_safe_condition_legacy(
|
||||
event_condition_setting
|
||||
)
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
sqlQuery = """SELECT
|
||||
eve_MAC as MAC,
|
||||
eve_DateTime as Datetime,
|
||||
devLastIP as IP,
|
||||
eve_EventType as "Event Type",
|
||||
devName as "Device name",
|
||||
devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed') {}
|
||||
ORDER BY eve_DateTime""".format(safe_condition)
|
||||
ORDER BY eve_DateTime""".format(safe_condition)
|
||||
except Exception as e:
|
||||
mylog(
|
||||
"verbose",
|
||||
["[Notification] Error building safe condition for events: ", e],
|
||||
)
|
||||
# Fall back to safe default (no additional conditions)
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, devLastIP as IP, eve_EventType as "Event Type", devName as "Device name", devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
sqlQuery = """SELECT
|
||||
eve_MAC as MAC,
|
||||
eve_DateTime as Datetime,
|
||||
devLastIP as IP,
|
||||
eve_EventType as "Event Type",
|
||||
devName as "Device name",
|
||||
devComments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed')
|
||||
ORDER BY eve_DateTime"""
|
||||
ORDER BY eve_DateTime"""
|
||||
parameters = {}
|
||||
|
||||
mylog("debug", ["[Notification] events SQL query: ", sqlQuery])
|
||||
@@ -208,7 +232,17 @@ def get_notifications(db):
|
||||
|
||||
if "plugins" in sections:
|
||||
# Compose Plugins Section
|
||||
sqlQuery = """SELECT Plugin, Object_PrimaryId, Object_SecondaryId, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status from Plugins_Events"""
|
||||
sqlQuery = """SELECT
|
||||
Plugin,
|
||||
Object_PrimaryId,
|
||||
Object_SecondaryId,
|
||||
DateTimeChanged,
|
||||
Watched_Value1,
|
||||
Watched_Value2,
|
||||
Watched_Value3,
|
||||
Watched_Value4,
|
||||
Status
|
||||
from Plugins_Events"""
|
||||
|
||||
# Get the events as JSON
|
||||
json_obj = db.get_table_as_json(sqlQuery)
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
import json
|
||||
import uuid
|
||||
import socket
|
||||
import subprocess
|
||||
from yattag import indent
|
||||
from json2table import convert
|
||||
|
||||
# Register NetAlertX modules
|
||||
import conf
|
||||
from const import applicationPath, logPath, apiPath, reportTemplatesPath
|
||||
from const import logPath, apiPath, reportTemplatesPath
|
||||
from logger import mylog, Logger
|
||||
from helper import (
|
||||
generate_mac_links,
|
||||
@@ -62,11 +61,7 @@ class NotificationInstance:
|
||||
|
||||
# Check if nothing to report, end
|
||||
if (
|
||||
JSON["new_devices"] == []
|
||||
and JSON["down_devices"] == []
|
||||
and JSON["events"] == []
|
||||
and JSON["plugins"] == []
|
||||
and JSON["down_reconnected"] == []
|
||||
JSON["new_devices"] == [] and JSON["down_devices"] == [] and JSON["events"] == [] and JSON["plugins"] == [] and JSON["down_reconnected"] == []
|
||||
):
|
||||
self.HasNotifications = False
|
||||
else:
|
||||
@@ -88,8 +83,6 @@ class NotificationInstance:
|
||||
# else:
|
||||
# mylog('debug', ['[Notification] notiStruc:', json.dumps(notiStruc.__dict__, indent=4)])
|
||||
|
||||
Text = ""
|
||||
HTML = ""
|
||||
template_file_path = reportTemplatesPath + "report_template.html"
|
||||
|
||||
# Open text Template
|
||||
@@ -274,7 +267,7 @@ class NotificationInstance:
|
||||
# Clear the Pending Email flag from all events and devices
|
||||
def clearPendingEmailFlag(self):
|
||||
|
||||
# Clean Pending Alert Events
|
||||
# Clean Pending Alert Events
|
||||
self.db.sql.execute("""
|
||||
UPDATE Devices SET devLastNotification = ?
|
||||
WHERE devMac IN (
|
||||
|
||||
@@ -100,7 +100,7 @@ class UserEventsQueueInstance:
|
||||
if not action or not isinstance(action, str):
|
||||
msg = "[UserEventsQueueInstance] Invalid or missing action"
|
||||
mylog('none', [msg])
|
||||
|
||||
|
||||
return False, msg
|
||||
|
||||
try:
|
||||
@@ -109,15 +109,11 @@ class UserEventsQueueInstance:
|
||||
|
||||
msg = f'[UserEventsQueueInstance] Action "{action}" added to the execution queue.'
|
||||
mylog('minimal', [msg])
|
||||
|
||||
|
||||
return True, msg
|
||||
|
||||
except Exception as e:
|
||||
msg = f"[UserEventsQueueInstance] ERROR Failed to write to {self.log_file}: {e}"
|
||||
mylog('none', [msg])
|
||||
|
||||
|
||||
return False, msg
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
257
server/plugin.py
257
server/plugin.py
@@ -9,12 +9,21 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
# Register NetAlertX modules
|
||||
import conf
|
||||
from const import pluginsPath, logPath, applicationPath, reportTemplatesPath
|
||||
from logger import mylog, Logger
|
||||
from helper import get_file_content, write_file, get_setting, get_setting_value
|
||||
from logger import mylog, Logger
|
||||
from helper import get_file_content, get_setting, get_setting_value
|
||||
from utils.datetime_utils import timeNowTZ, timeNowDB
|
||||
from app_state import updateState
|
||||
from api import update_api
|
||||
from utils.plugin_utils import logEventStatusCounts, get_plugin_string, get_plugin_setting_obj, print_plugin_info, list_to_csv, combine_plugin_objects, resolve_wildcards_arr, handle_empty, custom_plugin_decoder, decode_and_rename_files
|
||||
from utils.plugin_utils import (
|
||||
logEventStatusCounts,
|
||||
get_plugin_setting_obj,
|
||||
print_plugin_info,
|
||||
list_to_csv,
|
||||
combine_plugin_objects,
|
||||
resolve_wildcards_arr,
|
||||
handle_empty,
|
||||
decode_and_rename_files
|
||||
)
|
||||
from models.notification_instance import NotificationInstance
|
||||
from messaging.in_app import write_notification
|
||||
from models.user_events_queue_instance import UserEventsQueueInstance
|
||||
@@ -57,13 +66,7 @@ class plugin_manager:
|
||||
# Header
|
||||
updateState("Run: Plugins")
|
||||
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
"[Plugins] Check if any plugins need to be executed on run type: ",
|
||||
runType,
|
||||
],
|
||||
)
|
||||
mylog("debug", f"[Plugins] Check if any plugins need to be executed on run type: {runType}")
|
||||
|
||||
for plugin in self.all_plugins:
|
||||
shouldRun = False
|
||||
@@ -72,7 +75,7 @@ class plugin_manager:
|
||||
# 🔹 Lookup RUN setting from cache instead of calling get_plugin_setting_obj each time
|
||||
run_setting = self._cache["settings"].get(prefix, {}).get("RUN")
|
||||
|
||||
if run_setting != None and run_setting["value"] == runType:
|
||||
if run_setting is not None and run_setting["value"] == runType:
|
||||
if runType != "schedule":
|
||||
shouldRun = True
|
||||
elif runType == "schedule":
|
||||
@@ -91,10 +94,7 @@ class plugin_manager:
|
||||
|
||||
# 🔹 CMD also retrieved from cache
|
||||
cmd_setting = self._cache["settings"].get(prefix, {}).get("CMD")
|
||||
mylog(
|
||||
"debug",
|
||||
["[Plugins] CMD: ", cmd_setting["value"] if cmd_setting else None],
|
||||
)
|
||||
mylog("debug", f"[Plugins] CMD: {cmd_setting["value"] if cmd_setting else None}")
|
||||
|
||||
execute_plugin(self.db, self.all_plugins, plugin)
|
||||
|
||||
@@ -130,13 +130,7 @@ class plugin_manager:
|
||||
mylog("debug", ["[check_and_run_user_event] User Execution Queue is empty"])
|
||||
return # Exit early if the log file is empty
|
||||
else:
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
"[check_and_run_user_event] Process User Execution Queue:"
|
||||
+ ", ".join(map(str, lines))
|
||||
],
|
||||
)
|
||||
mylog("debug", "[check_and_run_user_event] Process User Execution Queue:" + ", ".join(map(str, lines)))
|
||||
|
||||
for line in lines:
|
||||
# Extract event name and parameters from the log line
|
||||
@@ -160,15 +154,7 @@ class plugin_manager:
|
||||
update_api(self.db, self.all_plugins, False, param.split(","), True)
|
||||
|
||||
else:
|
||||
mylog(
|
||||
"minimal",
|
||||
[
|
||||
"[check_and_run_user_event] WARNING: Unhandled event in execution queue: ",
|
||||
event,
|
||||
" | ",
|
||||
param,
|
||||
],
|
||||
)
|
||||
mylog("minimal", f"[check_and_run_user_event] WARNING: Unhandled event in execution queue: {event} | {param}")
|
||||
execution_log.finalize_event(
|
||||
event
|
||||
) # Finalize unknown events to remove them
|
||||
@@ -183,9 +169,9 @@ class plugin_manager:
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def handle_run(self, runType):
|
||||
|
||||
|
||||
mylog('minimal', ['[', timeNowDB(), '] START Run: ', runType])
|
||||
|
||||
|
||||
# run the plugin
|
||||
for plugin in self.all_plugins:
|
||||
if plugin["unique_prefix"] == runType:
|
||||
@@ -201,7 +187,7 @@ class plugin_manager:
|
||||
pluginsStates={pluginName: current_plugin_state.get(pluginName, {})}
|
||||
)
|
||||
|
||||
mylog('minimal', ['[', timeNowDB(), '] END Run: ', runType])
|
||||
mylog('minimal', ['[', timeNowDB(), '] END Run: ', runType])
|
||||
|
||||
return
|
||||
|
||||
@@ -210,7 +196,7 @@ class plugin_manager:
|
||||
mylog("minimal", ["[", timeNowTZ(), "] [Test] START Test: ", runType])
|
||||
|
||||
mylog('minimal', ['[', timeNowDB(), '] [Test] START Test: ', runType])
|
||||
|
||||
|
||||
# Prepare test samples
|
||||
sample_json = json.loads(
|
||||
get_file_content(reportTemplatesPath + "webhook_json_sample.json")
|
||||
@@ -312,7 +298,7 @@ class plugin_param:
|
||||
if param["type"] == "setting":
|
||||
inputValue = get_setting(param["value"])
|
||||
|
||||
if inputValue != None:
|
||||
if inputValue is not None:
|
||||
setVal = inputValue["setValue"] # setting value
|
||||
setTyp = inputValue["setType"] # setting type
|
||||
|
||||
@@ -337,9 +323,7 @@ class plugin_param:
|
||||
resolved = list_to_csv(setVal)
|
||||
|
||||
else:
|
||||
mylog(
|
||||
"none", ["[Plugins] ⚠ ERROR: Parameter probably not converted."]
|
||||
)
|
||||
mylog("none", "[Plugins] ⚠ ERROR: Parameter probably not converted.")
|
||||
return json.dumps(setVal)
|
||||
|
||||
# Get SQL result
|
||||
@@ -390,15 +374,10 @@ def run_plugin(command, set_RUN_TIMEOUT, plugin):
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
mylog("none", [e.output])
|
||||
mylog("none", ["[Plugins] ⚠ ERROR - enable LOG_LEVEL=debug and check logs"])
|
||||
mylog("none", "[Plugins] ⚠ ERROR - enable LOG_LEVEL=debug and check logs")
|
||||
return None
|
||||
except subprocess.TimeoutExpired:
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f"[Plugins] ⚠ ERROR - TIMEOUT - the plugin {plugin['unique_prefix']} forcefully terminated as timeout reached. Increase TIMEOUT setting and scan interval."
|
||||
],
|
||||
)
|
||||
mylog("none", f"[Plugins] ⚠ ERROR - TIMEOUT - the plugin {plugin['unique_prefix']} forcefully terminated as timeout reached. Increase TIMEOUT setting and scan interval.")
|
||||
return None
|
||||
|
||||
|
||||
@@ -411,11 +390,11 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
set = get_plugin_setting_obj(plugin, "CMD")
|
||||
|
||||
# handle missing "function":"CMD" setting
|
||||
if set == None:
|
||||
if set is None:
|
||||
return
|
||||
|
||||
set_CMD = set["value"]
|
||||
|
||||
|
||||
# Replace hardcoded /app paths with environment-aware path
|
||||
if "/app/front/plugins" in set_CMD:
|
||||
set_CMD = set_CMD.replace("/app/front/plugins", str(pluginsPath))
|
||||
@@ -441,13 +420,8 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
for param in plugin["params"]:
|
||||
tempParam = plugin_param(param, plugin, db)
|
||||
|
||||
if tempParam.resolved == None:
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f'[Plugins] The parameter "name":"{tempParam.name}" for "value": {tempParam.value} was resolved as None'
|
||||
],
|
||||
)
|
||||
if tempParam.resolved is None:
|
||||
mylog("none", f'[Plugins] The parameter "name":"{tempParam.name}" for "value": {tempParam.value} was resolved as None')
|
||||
|
||||
else:
|
||||
# params.append( [param["name"], resolved] )
|
||||
@@ -456,14 +430,9 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
if tempParam.multiplyTimeout:
|
||||
set_RUN_TIMEOUT = set_RUN_TIMEOUT * tempParam.paramValuesCount
|
||||
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
f'[Plugins] The parameter "name":"{param["name"]}" will multiply the timeout {tempParam.paramValuesCount} times. Total timeout: {set_RUN_TIMEOUT}s'
|
||||
],
|
||||
)
|
||||
mylog("debug", f'[Plugins] The parameter "name":"{param["name"]}" will multiply timeout {tempParam.paramValuesCount}x. Total timeout: {set_RUN_TIMEOUT}s')
|
||||
|
||||
mylog("debug", ["[Plugins] Timeout: ", set_RUN_TIMEOUT])
|
||||
mylog("debug", f"[Plugins] Timeout: {set_RUN_TIMEOUT}")
|
||||
|
||||
# build SQL query parameters to insert into the DB
|
||||
sqlParams = []
|
||||
@@ -475,8 +444,8 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
command = resolve_wildcards_arr(set_CMD.split(), params)
|
||||
|
||||
# Execute command
|
||||
mylog("verbose", ["[Plugins] Executing: ", set_CMD])
|
||||
mylog("debug", ["[Plugins] Resolved : ", command])
|
||||
mylog("verbose", f"[Plugins] Executing: {set_CMD}")
|
||||
mylog("debug", f"[Plugins] Resolved : {command}")
|
||||
|
||||
# Using ThreadPoolExecutor to handle concurrent subprocesses
|
||||
with ThreadPoolExecutor(max_workers=5) as executor:
|
||||
@@ -521,12 +490,7 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
columns = line.split("|")
|
||||
# There have to be 9 or 13 columns
|
||||
if len(columns) not in [9, 13]:
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f"[Plugins] Wrong number of input values, must be 9 or 13, got {len(columns)} from: {line}"
|
||||
],
|
||||
)
|
||||
mylog("none", f"[Plugins] Wrong number of input values, must be 9 or 13, got {len(columns)} from: {line}")
|
||||
continue # Skip lines with incorrect number of columns
|
||||
|
||||
# Common part of the SQL parameters
|
||||
@@ -581,9 +545,7 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
# keep current instance log file, delete all from other nodes
|
||||
if filename != "last_result.log" and os.path.exists(full_path):
|
||||
os.remove(full_path) # DEBUG:TODO uncomment 🐛
|
||||
mylog(
|
||||
"verbose", [f"[Plugins] Processed and deleted file: {full_path} "]
|
||||
)
|
||||
mylog("verbose", f"[Plugins] Processed and deleted file: {full_path} ")
|
||||
|
||||
# app-db-query
|
||||
if plugin["data_source"] == "app-db-query":
|
||||
@@ -591,7 +553,7 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
q = set_CMD.replace("{s-quote}", "'")
|
||||
|
||||
# Execute command
|
||||
mylog("verbose", ["[Plugins] Executing: ", q])
|
||||
mylog("verbose", f"[Plugins] Executing: {q}")
|
||||
|
||||
# set_CMD should contain a SQL query
|
||||
arr = db.get_sql_array(q)
|
||||
@@ -650,7 +612,7 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
# Append the final parameters to sqlParams
|
||||
sqlParams.append(tuple(base_params))
|
||||
else:
|
||||
mylog("none", ["[Plugins] Skipped invalid sql result"])
|
||||
mylog("none", "[Plugins] Skipped invalid sql result")
|
||||
|
||||
# app-db-query
|
||||
if plugin["data_source"] == "sqlite-db-query":
|
||||
@@ -659,19 +621,14 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
q = set_CMD.replace("{s-quote}", "'")
|
||||
|
||||
# Execute command
|
||||
mylog("verbose", ["[Plugins] Executing: ", q])
|
||||
mylog("verbose", f"[Plugins] Executing: {q}")
|
||||
|
||||
# ------- necessary settings check --------
|
||||
set = get_plugin_setting_obj(plugin, "DB_PATH")
|
||||
|
||||
# handle missing "function":"DB_PATH" setting
|
||||
if set == None:
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
"[Plugins] ⚠ ERROR: DB_PATH setting for plugin type sqlite-db-query missing."
|
||||
],
|
||||
)
|
||||
if set is None:
|
||||
mylog("none", "[Plugins] ⚠ ERROR: DB_PATH setting for plugin type sqlite-db-query missing.")
|
||||
return
|
||||
|
||||
fullSqlitePath = set["value"]
|
||||
@@ -679,25 +636,14 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
# try attaching the sqlite DB
|
||||
try:
|
||||
sql.execute(
|
||||
"ATTACH DATABASE '"
|
||||
+ fullSqlitePath
|
||||
+ "' AS EXTERNAL_"
|
||||
+ plugin["unique_prefix"]
|
||||
"ATTACH DATABASE '" + fullSqlitePath + "' AS EXTERNAL_" + plugin["unique_prefix"]
|
||||
)
|
||||
arr = db.get_sql_array(q)
|
||||
sql.execute("DETACH DATABASE EXTERNAL_" + plugin["unique_prefix"])
|
||||
|
||||
except sqlite3.Error as e:
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f"[Plugins] ⚠ ERROR: DB_PATH setting ({fullSqlitePath}) for plugin {plugin['unique_prefix']}. Did you mount it correctly?"
|
||||
],
|
||||
)
|
||||
mylog(
|
||||
"none",
|
||||
["[Plugins] ⚠ ERROR: ATTACH DATABASE failed with SQL ERROR: ", e],
|
||||
)
|
||||
mylog("none", f"[Plugins] ⚠ ERROR: DB_PATH setting ({fullSqlitePath}) for plugin {plugin['unique_prefix']}. Did you mount it correctly?")
|
||||
mylog("none", f"[Plugins] ⚠ ERROR: ATTACH DATABASE failed with SQL ERROR: {e}")
|
||||
return
|
||||
|
||||
for row in arr:
|
||||
@@ -748,24 +694,14 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
# Append the final parameters to sqlParams
|
||||
sqlParams.append(tuple(base_params))
|
||||
else:
|
||||
mylog("none", ["[Plugins] Skipped invalid sql result"])
|
||||
mylog("none", "[Plugins] Skipped invalid sql result")
|
||||
|
||||
# check if the subprocess / SQL query failed / there was no valid output
|
||||
if len(sqlParams) == 0:
|
||||
mylog(
|
||||
"none",
|
||||
[
|
||||
f'[Plugins] No output received from the plugin "{plugin["unique_prefix"]}"'
|
||||
],
|
||||
)
|
||||
mylog("none", f'[Plugins] No output received from the plugin "{plugin["unique_prefix"]}"')
|
||||
|
||||
else:
|
||||
mylog(
|
||||
"verbose",
|
||||
[
|
||||
f"[Plugins] SUCCESS for {plugin['unique_prefix']} received {len(sqlParams)} entries"
|
||||
],
|
||||
)
|
||||
mylog("verbose", f"[Plugins] SUCCESS for {plugin['unique_prefix']} received {len(sqlParams)} entries")
|
||||
# mylog('debug', ['[Plugins] sqlParam entries: ', sqlParams])
|
||||
|
||||
# create objects
|
||||
@@ -782,12 +718,7 @@ def execute_plugin(db, all_plugins, plugin):
|
||||
# check if we need to update devices api endpoint as well to prevent long user waits on Loading...
|
||||
userUpdatedDevices = UserEventsQueueInstance().has_update_devices()
|
||||
|
||||
mylog(
|
||||
"verbose",
|
||||
[
|
||||
f"[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}"
|
||||
],
|
||||
)
|
||||
mylog("verbose", f"[Plugins] Should I update API (userUpdatedDevices): {userUpdatedDevices}")
|
||||
|
||||
if userUpdatedDevices:
|
||||
endpoints += ["devices"]
|
||||
@@ -807,7 +738,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
|
||||
pluginPref = plugin["unique_prefix"]
|
||||
|
||||
mylog("verbose", ["[Plugins] Processing : ", pluginPref])
|
||||
mylog("verbose", f"[Plugins] Processing : {pluginPref}")
|
||||
|
||||
try:
|
||||
# Begin a transaction
|
||||
@@ -827,20 +758,8 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
for eve in plugEventsArr:
|
||||
pluginEvents.append(plugin_object_class(plugin, eve))
|
||||
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
"[Plugins] Existing objects from Plugins_Objects: ",
|
||||
len(pluginObjects),
|
||||
],
|
||||
)
|
||||
mylog(
|
||||
"debug",
|
||||
[
|
||||
"[Plugins] Logged events from the plugin run : ",
|
||||
len(pluginEvents),
|
||||
],
|
||||
)
|
||||
mylog("debug", f"[Plugins] Existing objects from Plugins_Objects: {len(pluginObjects)}")
|
||||
mylog("debug", f"[Plugins] Logged events from the plugin run : {len(pluginEvents)}")
|
||||
|
||||
# Loop thru all current events and update the status to "exists" if the event matches an existing object
|
||||
index = 0
|
||||
@@ -857,8 +776,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
if tmpObjFromEvent.status == "exists":
|
||||
# compare hash of the changed watched columns for uniqueness - make sure you compare the values with the same idsHash before checking watchedHash
|
||||
if any(
|
||||
x.idsHash == tmpObjFromEvent.idsHash
|
||||
and x.watchedHash != tmpObjFromEvent.watchedHash
|
||||
x.idsHash == tmpObjFromEvent.idsHash and x.watchedHash != tmpObjFromEvent.watchedHash
|
||||
for x in pluginObjects
|
||||
):
|
||||
pluginEvents[index].status = "watched-changed"
|
||||
@@ -879,7 +797,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
# if wasn't missing before, mark as changed
|
||||
if tmpObj.status != "missing-in-last-scan":
|
||||
tmpObj.changed = timeNowDB()
|
||||
tmpObj.status = "missing-in-last-scan"
|
||||
tmpObj.status = "missing-in-last-scan"
|
||||
# mylog('debug', [f'[Plugins] Missing from last scan (PrimaryID | SecondaryID): {tmpObj.primaryId} | {tmpObj.secondaryId}'])
|
||||
|
||||
# Merge existing plugin objects with newly discovered ones and update existing ones with new values
|
||||
@@ -955,25 +873,17 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
# combine all DB insert and update events into one for history
|
||||
history_to_insert.append(values)
|
||||
|
||||
mylog("debug", ["[Plugins] pluginEvents count: ", len(pluginEvents)])
|
||||
mylog("debug", ["[Plugins] pluginObjects count: ", len(pluginObjects)])
|
||||
mylog("debug", f"[Plugins] pluginEvents count: {len(pluginEvents)}")
|
||||
mylog("debug", f"[Plugins] pluginObjects count: {len(pluginObjects)}")
|
||||
|
||||
mylog(
|
||||
"debug", ["[Plugins] events_to_insert count: ", len(events_to_insert)]
|
||||
)
|
||||
mylog(
|
||||
"debug", ["[Plugins] history_to_insert count: ", len(history_to_insert)]
|
||||
)
|
||||
mylog(
|
||||
"debug", ["[Plugins] objects_to_insert count: ", len(objects_to_insert)]
|
||||
)
|
||||
mylog(
|
||||
"debug", ["[Plugins] objects_to_update count: ", len(objects_to_update)]
|
||||
)
|
||||
mylog("debug", f"[Plugins] events_to_insert count: {len(events_to_insert)}")
|
||||
mylog("debug", f"[Plugins] history_to_insert count: {len(history_to_insert)}")
|
||||
mylog("debug", f"[Plugins] objects_to_insert count: {len(objects_to_insert)}")
|
||||
mylog("debug", f"[Plugins] objects_to_update count: {len(objects_to_update)}")
|
||||
|
||||
mylog("trace", ["[Plugins] objects_to_update: ", objects_to_update])
|
||||
mylog("trace", ["[Plugins] events_to_insert: ", events_to_insert])
|
||||
mylog("trace", ["[Plugins] history_to_insert: ", history_to_insert])
|
||||
mylog("trace", f"[Plugins] objects_to_update: {objects_to_update}")
|
||||
mylog("trace", f"[Plugins] events_to_insert: {events_to_insert}")
|
||||
mylog("trace", f"[Plugins] history_to_insert: {history_to_insert}")
|
||||
|
||||
logEventStatusCounts("pluginEvents", pluginEvents)
|
||||
logEventStatusCounts("pluginObjects", pluginObjects)
|
||||
@@ -982,12 +892,12 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
if objects_to_insert:
|
||||
sql.executemany(
|
||||
"""
|
||||
INSERT INTO Plugins_Objects
|
||||
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
|
||||
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
|
||||
INSERT INTO Plugins_Objects
|
||||
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
|
||||
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
|
||||
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
|
||||
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
|
||||
"ObjectGUID")
|
||||
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
|
||||
"ObjectGUID")
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
objects_to_insert,
|
||||
@@ -998,10 +908,10 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
sql.executemany(
|
||||
"""
|
||||
UPDATE Plugins_Objects
|
||||
SET "Plugin" = ?, "Object_PrimaryID" = ?, "Object_SecondaryID" = ?, "DateTimeCreated" = ?,
|
||||
"DateTimeChanged" = ?, "Watched_Value1" = ?, "Watched_Value2" = ?, "Watched_Value3" = ?,
|
||||
"Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ?,
|
||||
"HelpVal1" = ?, "HelpVal2" = ?, "HelpVal3" = ?, "HelpVal4" = ?,
|
||||
SET "Plugin" = ?, "Object_PrimaryID" = ?, "Object_SecondaryID" = ?, "DateTimeCreated" = ?,
|
||||
"DateTimeChanged" = ?, "Watched_Value1" = ?, "Watched_Value2" = ?, "Watched_Value3" = ?,
|
||||
"Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ?,
|
||||
"HelpVal1" = ?, "HelpVal2" = ?, "HelpVal3" = ?, "HelpVal4" = ?,
|
||||
"ObjectGUID" = ?
|
||||
WHERE "Index" = ?
|
||||
""",
|
||||
@@ -1012,12 +922,12 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
if events_to_insert:
|
||||
sql.executemany(
|
||||
"""
|
||||
INSERT INTO Plugins_Events
|
||||
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
|
||||
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
|
||||
INSERT INTO Plugins_Events
|
||||
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
|
||||
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
|
||||
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
|
||||
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
|
||||
"ObjectGUID")
|
||||
"ObjectGUID")
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
events_to_insert,
|
||||
@@ -1027,12 +937,12 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
if history_to_insert:
|
||||
sql.executemany(
|
||||
"""
|
||||
INSERT INTO Plugins_History
|
||||
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
|
||||
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
|
||||
INSERT INTO Plugins_History
|
||||
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
|
||||
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
|
||||
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
|
||||
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
|
||||
"ObjectGUID")
|
||||
"ObjectGUID")
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
history_to_insert,
|
||||
@@ -1044,7 +954,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
except Exception as e:
|
||||
# Rollback the transaction in case of an error
|
||||
conn.rollback()
|
||||
mylog("none", ["[Plugins] ⚠ ERROR: ", e])
|
||||
mylog("none", f"[Plugins] ⚠ ERROR: {e}")
|
||||
raise e
|
||||
|
||||
# Perform database table mapping if enabled for the plugin
|
||||
@@ -1056,7 +966,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
dbTable = plugin["mapped_to_table"]
|
||||
|
||||
# Log a debug message indicating the mapping of objects to the database table.
|
||||
mylog("debug", ["[Plugins] Mapping objects to database table: ", dbTable])
|
||||
mylog("debug", f"[Plugins] Mapping objects to database table: {dbTable}")
|
||||
|
||||
# Initialize lists to hold mapped column names, columnsStr, and valuesStr for SQL query.
|
||||
mappedCols = []
|
||||
@@ -1121,8 +1031,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
|
||||
# Check if there's a default value specified for this column in the JSON.
|
||||
if (
|
||||
"mapped_to_column_data" in col
|
||||
and "value" in col["mapped_to_column_data"]
|
||||
"mapped_to_column_data" in col and "value" in col["mapped_to_column_data"]
|
||||
):
|
||||
tmpList.append(col["mapped_to_column_data"]["value"])
|
||||
|
||||
@@ -1133,8 +1042,8 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
q = f"INSERT OR IGNORE INTO {dbTable} ({columnsStr}) VALUES ({valuesStr})"
|
||||
|
||||
# Log a debug message showing the generated SQL query for mapping.
|
||||
mylog("debug", ["[Plugins] SQL query for mapping: ", q])
|
||||
mylog("debug", ["[Plugins] SQL sqlParams for mapping: ", sqlParams])
|
||||
mylog("debug", f"[Plugins] SQL query for mapping: {q}")
|
||||
mylog("debug", f"[Plugins] SQL sqlParams for mapping: {sqlParams}")
|
||||
|
||||
# Execute the SQL query using 'sql.executemany()' and the 'sqlParams' list of tuples.
|
||||
# This will insert multiple rows into the database in one go.
|
||||
|
||||
@@ -1,14 +1,6 @@
|
||||
import sys
|
||||
import subprocess
|
||||
import os
|
||||
import re
|
||||
import datetime
|
||||
from dateutil import parser
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from helper import get_setting_value, check_IP_format
|
||||
from utils.datetime_utils import timeNowDB, normalizeTimeStamp
|
||||
from logger import mylog, Logger
|
||||
@@ -44,7 +36,7 @@ def exclude_ignored_devices(db):
|
||||
# Join conditions and prepare the query
|
||||
conditions_str = " OR ".join(conditions)
|
||||
if conditions_str:
|
||||
query = f"""DELETE FROM CurrentScan WHERE
|
||||
query = f"""DELETE FROM CurrentScan WHERE
|
||||
1=1
|
||||
AND (
|
||||
{conditions_str}
|
||||
@@ -57,22 +49,23 @@ def exclude_ignored_devices(db):
|
||||
|
||||
sql.execute(query)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def update_devices_data_from_scan (db):
|
||||
sql = db.sql #TO-DO
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def update_devices_data_from_scan(db):
|
||||
sql = db.sql # TO-DO
|
||||
startTime = timeNowDB()
|
||||
|
||||
# Update Last Connection
|
||||
mylog("debug", "[Update Devices] 1 Last Connection")
|
||||
sql.execute(f"""UPDATE Devices SET devLastConnection = '{startTime}',
|
||||
devPresentLastScan = 1
|
||||
WHERE EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE devMac = cur_MAC) """)
|
||||
|
||||
# Clean no active devices
|
||||
mylog("debug", "[Update Devices] 2 Clean no active devices")
|
||||
sql.execute("""UPDATE Devices SET devPresentLastScan = 0
|
||||
WHERE NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE devMac = cur_MAC) """)
|
||||
|
||||
# Update IP
|
||||
@@ -103,7 +96,7 @@ def update_devices_data_from_scan (db):
|
||||
FROM CurrentScan
|
||||
WHERE Devices.devMac = CurrentScan.cur_MAC
|
||||
)
|
||||
WHERE
|
||||
WHERE
|
||||
(devVendor IS NULL OR devVendor IN ("", "null", "(unknown)", "(Unknown)"))
|
||||
AND EXISTS (
|
||||
SELECT 1
|
||||
@@ -116,12 +109,12 @@ def update_devices_data_from_scan (db):
|
||||
sql.execute("""UPDATE Devices
|
||||
SET devParentPort = (
|
||||
SELECT cur_Port
|
||||
FROM CurrentScan
|
||||
WHERE Devices.devMac = CurrentScan.cur_MAC
|
||||
FROM CurrentScan
|
||||
WHERE Devices.devMac = CurrentScan.cur_MAC
|
||||
)
|
||||
WHERE
|
||||
WHERE
|
||||
(devParentPort IS NULL OR devParentPort IN ("", "null", "(unknown)", "(Unknown)"))
|
||||
AND
|
||||
AND
|
||||
EXISTS (
|
||||
SELECT 1
|
||||
FROM CurrentScan
|
||||
@@ -139,9 +132,9 @@ def update_devices_data_from_scan (db):
|
||||
FROM CurrentScan
|
||||
WHERE Devices.devMac = CurrentScan.cur_MAC
|
||||
)
|
||||
WHERE
|
||||
WHERE
|
||||
(devParentMAC IS NULL OR devParentMAC IN ("", "null", "(unknown)", "(Unknown)"))
|
||||
AND
|
||||
AND
|
||||
EXISTS (
|
||||
SELECT 1
|
||||
FROM CurrentScan
|
||||
@@ -161,7 +154,7 @@ def update_devices_data_from_scan (db):
|
||||
FROM CurrentScan
|
||||
WHERE Devices.devMac = CurrentScan.cur_MAC
|
||||
)
|
||||
WHERE
|
||||
WHERE
|
||||
(devSite IS NULL OR devSite IN ("", "null"))
|
||||
AND EXISTS (
|
||||
SELECT 1
|
||||
@@ -178,7 +171,7 @@ def update_devices_data_from_scan (db):
|
||||
FROM CurrentScan
|
||||
WHERE Devices.devMac = CurrentScan.cur_MAC
|
||||
)
|
||||
WHERE
|
||||
WHERE
|
||||
(devSSID IS NULL OR devSSID IN ("", "null"))
|
||||
AND EXISTS (
|
||||
SELECT 1
|
||||
@@ -195,7 +188,7 @@ def update_devices_data_from_scan (db):
|
||||
FROM CurrentScan
|
||||
WHERE Devices.devMac = CurrentScan.cur_MAC
|
||||
)
|
||||
WHERE
|
||||
WHERE
|
||||
(devType IS NULL OR devType IN ("", "null"))
|
||||
AND EXISTS (
|
||||
SELECT 1
|
||||
@@ -208,17 +201,17 @@ def update_devices_data_from_scan (db):
|
||||
mylog("debug", "[Update Devices] - (if not empty) cur_Name -> (if empty) devName")
|
||||
sql.execute(""" UPDATE Devices
|
||||
SET devName = COALESCE((
|
||||
SELECT cur_Name
|
||||
SELECT cur_Name
|
||||
FROM CurrentScan
|
||||
WHERE cur_MAC = devMac
|
||||
AND cur_Name IS NOT NULL
|
||||
AND cur_Name <> 'null'
|
||||
AND cur_Name <> ''
|
||||
), devName)
|
||||
WHERE (devName IN ('(unknown)', '(name not found)', '')
|
||||
WHERE (devName IN ('(unknown)', '(name not found)', '')
|
||||
OR devName IS NULL)
|
||||
AND EXISTS (
|
||||
SELECT 1
|
||||
SELECT 1
|
||||
FROM CurrentScan
|
||||
WHERE cur_MAC = devMac
|
||||
AND cur_Name IS NOT NULL
|
||||
@@ -425,9 +418,9 @@ def print_scan_stats(db):
|
||||
mylog("verbose", f" {row['cur_ScanMethod']}: {row['scan_method_count']}")
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def create_new_devices (db):
|
||||
sql = db.sql # TO-DO
|
||||
# -------------------------------------------------------------------------------
|
||||
def create_new_devices(db):
|
||||
sql = db.sql # TO-DO
|
||||
startTime = timeNowDB()
|
||||
|
||||
# Insert events for new devices from CurrentScan (not yet in Devices)
|
||||
@@ -474,36 +467,36 @@ def create_new_devices (db):
|
||||
mylog("debug", "[New Devices] 2 Create devices")
|
||||
|
||||
# default New Device values preparation
|
||||
newDevColumns = """devAlertEvents,
|
||||
devAlertDown,
|
||||
devPresentLastScan,
|
||||
devIsArchived,
|
||||
devIsNew,
|
||||
devSkipRepeated,
|
||||
devScan,
|
||||
devOwner,
|
||||
devFavorite,
|
||||
devGroup,
|
||||
devComments,
|
||||
devLogEvents,
|
||||
newDevColumns = """devAlertEvents,
|
||||
devAlertDown,
|
||||
devPresentLastScan,
|
||||
devIsArchived,
|
||||
devIsNew,
|
||||
devSkipRepeated,
|
||||
devScan,
|
||||
devOwner,
|
||||
devFavorite,
|
||||
devGroup,
|
||||
devComments,
|
||||
devLogEvents,
|
||||
devLocation,
|
||||
devCustomProps,
|
||||
devParentRelType,
|
||||
devReqNicsOnline
|
||||
"""
|
||||
|
||||
newDevDefaults = f"""{get_setting_value("NEWDEV_devAlertEvents")},
|
||||
{get_setting_value("NEWDEV_devAlertDown")},
|
||||
{get_setting_value("NEWDEV_devPresentLastScan")},
|
||||
{get_setting_value("NEWDEV_devIsArchived")},
|
||||
{get_setting_value("NEWDEV_devIsNew")},
|
||||
{get_setting_value("NEWDEV_devSkipRepeated")},
|
||||
{get_setting_value("NEWDEV_devScan")},
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devOwner"))}',
|
||||
{get_setting_value("NEWDEV_devFavorite")},
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devGroup"))}',
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devComments"))}',
|
||||
{get_setting_value("NEWDEV_devLogEvents")},
|
||||
newDevDefaults = f"""{get_setting_value("NEWDEV_devAlertEvents")},
|
||||
{get_setting_value("NEWDEV_devAlertDown")},
|
||||
{get_setting_value("NEWDEV_devPresentLastScan")},
|
||||
{get_setting_value("NEWDEV_devIsArchived")},
|
||||
{get_setting_value("NEWDEV_devIsNew")},
|
||||
{get_setting_value("NEWDEV_devSkipRepeated")},
|
||||
{get_setting_value("NEWDEV_devScan")},
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devOwner"))}',
|
||||
{get_setting_value("NEWDEV_devFavorite")},
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devGroup"))}',
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devComments"))}',
|
||||
{get_setting_value("NEWDEV_devLogEvents")},
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devLocation"))}',
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devCustomProps"))}',
|
||||
'{sanitize_SQL_input(get_setting_value("NEWDEV_devParentRelType"))}',
|
||||
@@ -511,7 +504,7 @@ def create_new_devices (db):
|
||||
"""
|
||||
|
||||
# Fetch data from CurrentScan skipping ignored devices by IP and MAC
|
||||
query = """SELECT cur_MAC, cur_Name, cur_Vendor, cur_ScanMethod, cur_IP, cur_SyncHubNodeName, cur_NetworkNodeMAC, cur_PORT, cur_NetworkSite, cur_SSID, cur_Type
|
||||
query = """SELECT cur_MAC, cur_Name, cur_Vendor, cur_ScanMethod, cur_IP, cur_SyncHubNodeName, cur_NetworkNodeMAC, cur_PORT, cur_NetworkSite, cur_SSID, cur_Type
|
||||
FROM CurrentScan """
|
||||
|
||||
mylog("debug", f"[New Devices] Collecting New Devices Query: {query}")
|
||||
@@ -554,40 +547,40 @@ def create_new_devices (db):
|
||||
)
|
||||
|
||||
# Preparing the individual insert statement
|
||||
sqlQuery = f"""INSERT OR IGNORE INTO Devices
|
||||
sqlQuery = f"""INSERT OR IGNORE INTO Devices
|
||||
(
|
||||
devMac,
|
||||
devName,
|
||||
devMac,
|
||||
devName,
|
||||
devVendor,
|
||||
devLastIP,
|
||||
devFirstConnection,
|
||||
devLastConnection,
|
||||
devSyncHubNode,
|
||||
devLastIP,
|
||||
devFirstConnection,
|
||||
devLastConnection,
|
||||
devSyncHubNode,
|
||||
devGUID,
|
||||
devParentMAC,
|
||||
devParentMAC,
|
||||
devParentPort,
|
||||
devSite,
|
||||
devSite,
|
||||
devSSID,
|
||||
devType,
|
||||
devSourcePlugin,
|
||||
devType,
|
||||
devSourcePlugin,
|
||||
{newDevColumns}
|
||||
)
|
||||
VALUES
|
||||
VALUES
|
||||
(
|
||||
'{sanitize_SQL_input(cur_MAC)}',
|
||||
'{sanitize_SQL_input(cur_MAC)}',
|
||||
'{sanitize_SQL_input(cur_Name)}',
|
||||
'{sanitize_SQL_input(cur_Vendor)}',
|
||||
'{sanitize_SQL_input(cur_IP)}',
|
||||
?,
|
||||
?,
|
||||
'{sanitize_SQL_input(cur_SyncHubNodeName)}',
|
||||
'{sanitize_SQL_input(cur_Vendor)}',
|
||||
'{sanitize_SQL_input(cur_IP)}',
|
||||
?,
|
||||
?,
|
||||
'{sanitize_SQL_input(cur_SyncHubNodeName)}',
|
||||
{sql_generateGuid},
|
||||
'{sanitize_SQL_input(cur_NetworkNodeMAC)}',
|
||||
'{sanitize_SQL_input(cur_PORT)}',
|
||||
'{sanitize_SQL_input(cur_NetworkSite)}',
|
||||
'{sanitize_SQL_input(cur_NetworkSite)}',
|
||||
'{sanitize_SQL_input(cur_SSID)}',
|
||||
'{sanitize_SQL_input(cur_Type)}',
|
||||
'{sanitize_SQL_input(cur_ScanMethod)}',
|
||||
'{sanitize_SQL_input(cur_Type)}',
|
||||
'{sanitize_SQL_input(cur_ScanMethod)}',
|
||||
{newDevDefaults}
|
||||
)"""
|
||||
|
||||
@@ -598,7 +591,8 @@ def create_new_devices (db):
|
||||
mylog("debug", "[New Devices] New Devices end")
|
||||
db.commitDB()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Check if plugins data changed
|
||||
def check_plugin_data_changed(pm, plugins_to_check):
|
||||
"""
|
||||
@@ -630,7 +624,7 @@ def check_plugin_data_changed(pm, plugins_to_check):
|
||||
|
||||
for plugin_name in plugins_to_check:
|
||||
|
||||
last_data_change = pm.plugin_states.get(plugin_name, {}).get("lastDataChange")
|
||||
last_data_change = pm.plugin_states.get(plugin_name, {}).get("lastDataChange")
|
||||
last_data_check = pm.plugin_checks.get(plugin_name, "")
|
||||
|
||||
if not last_data_change:
|
||||
@@ -639,13 +633,13 @@ def check_plugin_data_changed(pm, plugins_to_check):
|
||||
# Normalize and validate last_changed timestamp
|
||||
last_changed_ts = normalizeTimeStamp(last_data_change)
|
||||
|
||||
if last_changed_ts == None:
|
||||
if last_changed_ts is None:
|
||||
mylog('none', f'[check_plugin_data_changed] Unexpected last_data_change timestamp for {plugin_name} (input|output): ({last_data_change}|{last_changed_ts})')
|
||||
|
||||
# Normalize and validate last_data_check timestamp
|
||||
last_data_check_ts = normalizeTimeStamp(last_data_check)
|
||||
|
||||
if last_data_check_ts == None:
|
||||
if last_data_check_ts is None:
|
||||
mylog('none', f'[check_plugin_data_changed] Unexpected last_data_check timestamp for {plugin_name} (input|output): ({last_data_check}|{last_data_check_ts})')
|
||||
|
||||
# Track which plugins have newer state than last_checked
|
||||
@@ -660,15 +654,19 @@ def check_plugin_data_changed(pm, plugins_to_check):
|
||||
|
||||
# Continue if changes detected
|
||||
for p in plugins_changed:
|
||||
mylog('debug', f'[check_plugin_data_changed] {p} changed (last_data_change|last_data_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})')
|
||||
mylog(
|
||||
'debug',
|
||||
f'[check_plugin_data_changed] {p} changed (last_data_change|last_data_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})'
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def update_devices_names(pm):
|
||||
|
||||
# --- Short-circuit if no name-resolution plugin has changed ---
|
||||
if check_plugin_data_changed(pm, ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]) == False:
|
||||
if check_plugin_data_changed(pm, ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]) is False:
|
||||
mylog('debug', '[Update Device Name] No relevant plugin changes since last check.')
|
||||
return
|
||||
|
||||
@@ -676,8 +674,8 @@ def update_devices_names(pm):
|
||||
|
||||
sql = pm.db.sql
|
||||
resolver = NameResolver(pm.db)
|
||||
device_handler = DeviceInstance(pm.db)
|
||||
|
||||
device_handler = DeviceInstance(pm.db)
|
||||
|
||||
nameNotFound = "(name not found)"
|
||||
|
||||
# Define resolution strategies in priority order
|
||||
@@ -722,8 +720,7 @@ def update_devices_names(pm):
|
||||
|
||||
# If a valid result is found, record it and stop further attempts
|
||||
if (
|
||||
newFQDN not in [nameNotFound, "", "localhost."]
|
||||
and " communications error to " not in newFQDN
|
||||
newFQDN not in [nameNotFound, "", "localhost."] and " communications error to " not in newFQDN
|
||||
):
|
||||
foundStats[label] += 1
|
||||
|
||||
@@ -750,14 +747,14 @@ def update_devices_names(pm):
|
||||
)
|
||||
|
||||
# Try resolving both name and FQDN
|
||||
recordsToUpdate, recordsNotFound, foundStats, notFound = resolve_devices(
|
||||
recordsToUpdate, recordsNotFound, fs, notFound = resolve_devices(
|
||||
unknownDevices
|
||||
)
|
||||
|
||||
# Log summary
|
||||
mylog(
|
||||
"verbose",
|
||||
f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}/{foundStats['NBTSCAN']})",
|
||||
f"[Update Device Name] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']})",
|
||||
)
|
||||
mylog("verbose", f"[Update Device Name] Names Not Found : {notFound}")
|
||||
|
||||
@@ -780,16 +777,14 @@ def update_devices_names(pm):
|
||||
)
|
||||
|
||||
# Try resolving only FQDN
|
||||
recordsToUpdate, _, foundStats, notFound = resolve_devices(
|
||||
recordsToUpdate, _, fs, notFound = resolve_devices(
|
||||
allDevices, resolve_both_name_and_fqdn=False
|
||||
)
|
||||
|
||||
# Log summary
|
||||
mylog(
|
||||
"verbose",
|
||||
f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)}"+
|
||||
f"({foundStats['DIGSCAN']}/{foundStats['AVAHISCAN']}/{foundStats['NSLOOKUP']}"+
|
||||
f"/{foundStats['NBTSCAN']})",
|
||||
f"[Update FQDN] Names Found (DIGSCAN/AVAHISCAN/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)}({fs['DIGSCAN']}/{fs['AVAHISCAN']}/{fs['NSLOOKUP']}/{fs['NBTSCAN']})",
|
||||
)
|
||||
mylog("verbose", f"[Update FQDN] Names Not Found : {notFound}")
|
||||
|
||||
@@ -803,7 +798,7 @@ def update_devices_names(pm):
|
||||
|
||||
# --- Step 3: Log last checked time ---
|
||||
# After resolving names, update last checked
|
||||
pm.plugin_checks = {"DIGSCAN": timeNowDB(), "AVAHISCAN": timeNowDB(), "NSLOOKUP": timeNowDB(), "NBTSCAN": timeNowDB() }
|
||||
pm.plugin_checks = {"DIGSCAN": timeNowDB(), "AVAHISCAN": timeNowDB(), "NSLOOKUP": timeNowDB(), "NBTSCAN": timeNowDB()}
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
@@ -901,7 +896,6 @@ def query_MAC_vendor(pMAC):
|
||||
|
||||
# Search vendor in HW Vendors DB
|
||||
mac_start_string6 = mac[0:6]
|
||||
mac_start_string9 = mac[0:9]
|
||||
|
||||
try:
|
||||
with open(filePath, "r") as f:
|
||||
|
||||
@@ -1,16 +1,13 @@
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import base64
|
||||
from pathlib import Path
|
||||
from typing import Optional, Tuple
|
||||
from logger import mylog
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
|
||||
# Load MAC/device-type/icon rules from external file
|
||||
MAC_TYPE_ICON_PATH = Path(f"{INSTALL_PATH}/back/device_heuristics_rules.json")
|
||||
@@ -83,7 +80,7 @@ def match_vendor(vendor: str, default_type: str, default_icon: str) -> Tuple[str
|
||||
|
||||
for pattern in patterns:
|
||||
# Only apply fallback when no MAC prefix is specified
|
||||
mac_prefix = pattern.get("mac_prefix", "")
|
||||
# mac_prefix = pattern.get("mac_prefix", "")
|
||||
vendor_pattern = pattern.get("vendor", "").lower()
|
||||
|
||||
if vendor_pattern and vendor_pattern in vendor_lc:
|
||||
|
||||
@@ -1,11 +1,4 @@
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog
|
||||
from helper import get_setting_value
|
||||
|
||||
@@ -31,7 +24,7 @@ class NameResolver:
|
||||
|
||||
# Check by MAC
|
||||
sql.execute(f"""
|
||||
SELECT Watched_Value2 FROM Plugins_Objects
|
||||
SELECT Watched_Value2 FROM Plugins_Objects
|
||||
WHERE Plugin = '{plugin}' AND Object_PrimaryID = '{pMAC}'
|
||||
""")
|
||||
result = sql.fetchall()
|
||||
@@ -42,9 +35,9 @@ class NameResolver:
|
||||
|
||||
# Check name by IP if enabled
|
||||
if get_setting_value('NEWDEV_IP_MATCH_NAME'):
|
||||
|
||||
|
||||
sql.execute(f"""
|
||||
SELECT Watched_Value2 FROM Plugins_Objects
|
||||
SELECT Watched_Value2 FROM Plugins_Objects
|
||||
WHERE Plugin = '{plugin}' AND Object_SecondaryID = '{pIP}'
|
||||
""")
|
||||
result = sql.fetchall()
|
||||
|
||||
@@ -1,10 +1,3 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from scan.device_handling import (
|
||||
create_new_devices,
|
||||
print_scan_stats,
|
||||
@@ -14,7 +7,7 @@ from scan.device_handling import (
|
||||
)
|
||||
from helper import get_setting_value
|
||||
from db.db_helper import print_table_schema
|
||||
from utils.datetime_utils import timeNowDB, timeNowTZ
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from logger import mylog, Logger
|
||||
from messaging.reporting import skip_repeated_notifications
|
||||
|
||||
@@ -133,20 +126,20 @@ def create_sessions_snapshot(db):
|
||||
db.commitDB()
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def insert_events (db):
|
||||
sql = db.sql #TO-DO
|
||||
startTime = timeNowDB()
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def insert_events(db):
|
||||
sql = db.sql # TO-DO
|
||||
startTime = timeNowDB()
|
||||
|
||||
# Check device down
|
||||
mylog("debug", "[Events] - 1 - Devices down")
|
||||
sql.execute(f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
SELECT devMac, devLastIP, '{startTime}', 'Device Down', '', 1
|
||||
FROM Devices
|
||||
FROM Devices
|
||||
WHERE devAlertDown != 0
|
||||
AND devPresentLastScan = 1
|
||||
AND devPresentLastScan = 1
|
||||
AND NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE devMac = cur_MAC
|
||||
) """)
|
||||
@@ -156,15 +149,15 @@ def insert_events (db):
|
||||
sql.execute(f""" INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
SELECT DISTINCT c.cur_MAC, c.cur_IP, '{startTime}',
|
||||
CASE
|
||||
WHEN last_event.eve_EventType = 'Device Down' and last_event.eve_PendingAlertEmail = 0 THEN 'Down Reconnected'
|
||||
ELSE 'Connected'
|
||||
SELECT DISTINCT c.cur_MAC, c.cur_IP, '{startTime}',
|
||||
CASE
|
||||
WHEN last_event.eve_EventType = 'Device Down' and last_event.eve_PendingAlertEmail = 0 THEN 'Down Reconnected'
|
||||
ELSE 'Connected'
|
||||
END,
|
||||
'',
|
||||
1
|
||||
FROM CurrentScan AS c
|
||||
LEFT JOIN LatestEventsPerMAC AS last_event ON c.cur_MAC = last_event.eve_MAC
|
||||
FROM CurrentScan AS c
|
||||
LEFT JOIN LatestEventsPerMAC AS last_event ON c.cur_MAC = last_event.eve_MAC
|
||||
WHERE last_event.devPresentLastScan = 0 OR last_event.eve_MAC IS NULL
|
||||
""")
|
||||
|
||||
@@ -190,7 +183,7 @@ def insert_events (db):
|
||||
SELECT cur_MAC, cur_IP, '{startTime}', 'IP Changed',
|
||||
'Previous IP: '|| devLastIP, devAlertEvents
|
||||
FROM Devices, CurrentScan
|
||||
WHERE devMac = cur_MAC
|
||||
WHERE devMac = cur_MAC
|
||||
AND devLastIP <> cur_IP """)
|
||||
mylog("debug", "[Events] - Events end")
|
||||
|
||||
|
||||
@@ -1,49 +1,43 @@
|
||||
#!/usr/bin/env python
|
||||
# !/usr/bin/env python
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from datetime import datetime
|
||||
# from datetime import datetime
|
||||
from dateutil import parser
|
||||
import datetime
|
||||
import re
|
||||
import pytz
|
||||
from pytz import timezone
|
||||
from typing import Union
|
||||
from zoneinfo import ZoneInfo
|
||||
import email.utils
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
import conf
|
||||
from const import *
|
||||
# from const import *
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# DateTime
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
DATETIME_PATTERN = "%Y-%m-%d %H:%M:%S"
|
||||
DATETIME_REGEX = re.compile(r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$')
|
||||
|
||||
|
||||
def timeNowTZ():
|
||||
if conf.tz:
|
||||
return datetime.datetime.now(conf.tz).replace(microsecond=0)
|
||||
else:
|
||||
return datetime.datetime.now().replace(microsecond=0)
|
||||
|
||||
|
||||
def timeNow():
|
||||
return datetime.datetime.now().replace(microsecond=0)
|
||||
|
||||
def get_timezone_offset():
|
||||
|
||||
def get_timezone_offset():
|
||||
now = datetime.datetime.now(conf.tz)
|
||||
offset_hours = now.utcoffset().total_seconds() / 3600
|
||||
offset_hours = now.utcoffset().total_seconds() / 3600
|
||||
offset_formatted = "{:+03d}:{:02d}".format(int(offset_hours), int((offset_hours % 1) * 60))
|
||||
return offset_formatted
|
||||
|
||||
|
||||
def timeNowDB(local=True):
|
||||
"""
|
||||
Return the current time (local or UTC) as ISO 8601 for DB storage.
|
||||
@@ -67,9 +61,9 @@ def timeNowDB(local=True):
|
||||
return datetime.datetime.now(datetime.UTC).strftime(DATETIME_PATTERN)
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
# Date and time methods
|
||||
#-------------------------------------------------------------------------------
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
def normalizeTimeStamp(inputTimeStamp):
|
||||
"""
|
||||
@@ -91,7 +85,7 @@ def normalizeTimeStamp(inputTimeStamp):
|
||||
|
||||
# Epoch timestamp (integer or float)
|
||||
if isinstance(inputTimeStamp, (int, float)):
|
||||
try:
|
||||
try:
|
||||
return datetime.datetime.fromtimestamp(inputTimeStamp)
|
||||
except (OSError, OverflowError, ValueError):
|
||||
return None
|
||||
@@ -125,6 +119,7 @@ def format_date_iso(date1: str) -> str:
|
||||
dt = datetime.datetime.fromisoformat(date1) if isinstance(date1, str) else date1
|
||||
return dt.isoformat()
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
def format_event_date(date_str: str, event_type: str) -> str:
|
||||
"""Format event date with fallback rules."""
|
||||
@@ -135,6 +130,7 @@ def format_event_date(date_str: str, event_type: str) -> str:
|
||||
else:
|
||||
return "<still connected>"
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
def ensure_datetime(dt: Union[str, datetime.datetime, None]) -> datetime.datetime:
|
||||
if dt is None:
|
||||
@@ -157,6 +153,7 @@ def parse_datetime(dt_str):
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def format_date(date_str: str) -> str:
|
||||
try:
|
||||
dt = parse_datetime(date_str)
|
||||
@@ -168,13 +165,14 @@ def format_date(date_str: str) -> str:
|
||||
except (ValueError, AttributeError, TypeError):
|
||||
return "invalid"
|
||||
|
||||
|
||||
def format_date_diff(date1, date2, tz_name):
|
||||
"""
|
||||
Return difference between two datetimes as 'Xd HH:MM'.
|
||||
Uses app timezone if datetime is naive.
|
||||
date2 can be None (uses now).
|
||||
"""
|
||||
# Get timezone from settings
|
||||
# Get timezone from settings
|
||||
tz = pytz.timezone(tz_name)
|
||||
|
||||
def parse_dt(dt):
|
||||
@@ -184,8 +182,8 @@ def format_date_diff(date1, date2, tz_name):
|
||||
try:
|
||||
dt_parsed = email.utils.parsedate_to_datetime(dt)
|
||||
except (ValueError, TypeError):
|
||||
# fallback: parse ISO string
|
||||
dt_parsed = datetime.datetime.fromisoformat(dt)
|
||||
# fallback: parse ISO string
|
||||
dt_parsed = datetime.datetime.fromisoformat(dt)
|
||||
# convert naive GMT/UTC to app timezone
|
||||
if dt_parsed.tzinfo is None:
|
||||
dt_parsed = tz.localize(dt_parsed)
|
||||
@@ -208,4 +206,4 @@ def format_date_diff(date1, date2, tz_name):
|
||||
"hours": hours,
|
||||
"minutes": minutes,
|
||||
"total_minutes": total_minutes
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import os
|
||||
import json
|
||||
|
||||
from collections import namedtuple
|
||||
import conf
|
||||
from logger import mylog
|
||||
from utils.crypto_utils import decrypt_data
|
||||
@@ -220,9 +220,7 @@ def get_plugins_configs(loadAll):
|
||||
# Load all plugins if `loadAll` is True, the plugin is in the enabled list,
|
||||
# or no specific plugins are enabled (enabledPlugins is empty)
|
||||
if (
|
||||
loadAll
|
||||
or plugJson["unique_prefix"] in enabledPlugins
|
||||
or enabledPlugins == []
|
||||
loadAll or plugJson["unique_prefix"] in enabledPlugins or enabledPlugins == []
|
||||
):
|
||||
# Load the contents of the config.json file as a JSON object and append it to pluginsList
|
||||
pluginsList.append(plugJson)
|
||||
|
||||
@@ -1,11 +1,4 @@
|
||||
import sqlite3
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from models.device_instance import DeviceInstance
|
||||
@@ -15,7 +8,6 @@ from models.plugin_object_instance import PluginObjectInstance
|
||||
Logger(get_setting_value("LOG_LEVEL"))
|
||||
|
||||
|
||||
|
||||
class Action:
|
||||
"""Base class for all actions."""
|
||||
|
||||
|
||||
@@ -1,10 +1,3 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from helper import get_setting_value
|
||||
from logger import Logger
|
||||
from const import sql_generateGuid
|
||||
@@ -96,11 +89,11 @@ class AppEvent_obj:
|
||||
"ObjectPrimaryID" TEXT,
|
||||
"ObjectSecondaryID" TEXT,
|
||||
"ObjectForeignKey" TEXT,
|
||||
"ObjectIndex" TEXT,
|
||||
"ObjectIsNew" BOOLEAN,
|
||||
"ObjectIsArchived" BOOLEAN,
|
||||
"ObjectIndex" TEXT,
|
||||
"ObjectIsNew" BOOLEAN,
|
||||
"ObjectIsArchived" BOOLEAN,
|
||||
"ObjectStatusColumn" TEXT,
|
||||
"ObjectStatus" TEXT,
|
||||
"ObjectStatus" TEXT,
|
||||
"AppEventType" TEXT,
|
||||
"Helper1" TEXT,
|
||||
"Helper2" TEXT,
|
||||
@@ -117,11 +110,11 @@ class AppEvent_obj:
|
||||
CREATE TRIGGER IF NOT EXISTS "{trigger_name}"
|
||||
AFTER {event.upper()} ON "{table_name}"
|
||||
WHEN NOT EXISTS (
|
||||
SELECT 1 FROM AppEvents
|
||||
WHERE AppEventProcessed = 0
|
||||
SELECT 1 FROM AppEvents
|
||||
WHERE AppEventProcessed = 0
|
||||
AND ObjectType = '{table_name}'
|
||||
AND ObjectGUID = {manage_prefix(config["fields"]["ObjectGUID"], event)}
|
||||
AND ObjectStatus = {manage_prefix(config["fields"]["ObjectStatus"], event)}
|
||||
AND ObjectStatus = {manage_prefix(config["fields"]["ObjectStatus"], event)}
|
||||
AND AppEventType = '{event.lower()}'
|
||||
)
|
||||
BEGIN
|
||||
@@ -142,10 +135,10 @@ class AppEvent_obj:
|
||||
"AppEventType"
|
||||
)
|
||||
VALUES (
|
||||
{sql_generateGuid},
|
||||
DATETIME('now'),
|
||||
FALSE,
|
||||
'{table_name}',
|
||||
{sql_generateGuid},
|
||||
DATETIME('now'),
|
||||
FALSE,
|
||||
'{table_name}',
|
||||
{manage_prefix(config["fields"]["ObjectGUID"], event)}, -- ObjectGUID
|
||||
{manage_prefix(config["fields"]["ObjectPrimaryID"], event)}, -- ObjectPrimaryID
|
||||
{manage_prefix(config["fields"]["ObjectSecondaryID"], event)}, -- ObjectSecondaryID
|
||||
|
||||
@@ -1,12 +1,5 @@
|
||||
import re
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
|
||||
|
||||
@@ -1,22 +1,17 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from const import fullConfFolder
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
|
||||
# Make sure log level is initialized correctly
|
||||
Logger(get_setting_value("LOG_LEVEL"))
|
||||
|
||||
from workflows.triggers import Trigger
|
||||
from workflows.conditions import ConditionGroup
|
||||
from workflows.actions import DeleteObjectAction, RunPluginAction, UpdateFieldAction
|
||||
|
||||
|
||||
# Make sure log level is initialized correctly
|
||||
Logger(get_setting_value("LOG_LEVEL"))
|
||||
|
||||
|
||||
class WorkflowManager:
|
||||
def __init__(self, db):
|
||||
self.db = db
|
||||
|
||||
@@ -1,11 +1,4 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from database import get_array_from_sql_rows
|
||||
@@ -28,8 +21,7 @@ class Trigger:
|
||||
self.event_type = triggerJson["event_type"]
|
||||
self.event = event # Store the triggered event context, if provided
|
||||
self.triggered = (
|
||||
self.object_type == event["ObjectType"]
|
||||
and self.event_type == event["AppEventType"]
|
||||
self.object_type == event["ObjectType"] and self.event_type == event["AppEventType"]
|
||||
)
|
||||
|
||||
mylog(
|
||||
@@ -53,9 +45,9 @@ class Trigger:
|
||||
raise ValueError(m)
|
||||
|
||||
query = f"""
|
||||
SELECT * FROM
|
||||
SELECT * FROM
|
||||
{db_table}
|
||||
WHERE {refField} = '{event["ObjectGUID"]}'
|
||||
WHERE {refField} = '{event["ObjectGUID"]}'
|
||||
"""
|
||||
|
||||
mylog("debug", [query])
|
||||
|
||||
Reference in New Issue
Block a user