mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2025-12-06 17:15:38 -08:00
@@ -23,8 +23,9 @@ from pathlib import Path
|
||||
# Register NetAlertX modules
|
||||
import conf
|
||||
from const import *
|
||||
from logger import mylog
|
||||
from helper import filePermissions, timeNowTZ
|
||||
from logger import mylog
|
||||
from helper import filePermissions
|
||||
from utils.datetime_utils import timeNowTZ
|
||||
from app_state import updateState
|
||||
from api import update_api
|
||||
from scan.session_events import process_scan
|
||||
|
||||
@@ -21,7 +21,8 @@ from const import (
|
||||
sql_devices_filters,
|
||||
)
|
||||
from logger import mylog
|
||||
from helper import write_file, get_setting_value, timeNowTZ
|
||||
from helper import write_file, get_setting_value
|
||||
from utils.datetime_utils import timeNowTZ
|
||||
from app_state import updateState
|
||||
from models.user_events_queue_instance import UserEventsQueueInstance
|
||||
|
||||
|
||||
@@ -14,61 +14,20 @@ from helper import get_setting_value
|
||||
from db.db_helper import get_date_from_period
|
||||
from app_state import updateState
|
||||
|
||||
from api_server.graphql_endpoint import devicesSchema
|
||||
from api_server.device_endpoint import (
|
||||
get_device_data,
|
||||
set_device_data,
|
||||
delete_device,
|
||||
delete_device_events,
|
||||
reset_device_props,
|
||||
copy_device,
|
||||
update_device_column,
|
||||
)
|
||||
from api_server.devices_endpoint import (
|
||||
get_all_devices,
|
||||
delete_unknown_devices,
|
||||
delete_all_with_empty_macs,
|
||||
delete_devices,
|
||||
export_devices,
|
||||
import_csv,
|
||||
devices_totals,
|
||||
devices_by_status,
|
||||
)
|
||||
from api_server.events_endpoint import (
|
||||
delete_events,
|
||||
delete_events_older_than,
|
||||
get_events,
|
||||
create_event,
|
||||
get_events_totals,
|
||||
)
|
||||
from api_server.history_endpoint import delete_online_history
|
||||
from api_server.prometheus_endpoint import get_metric_stats
|
||||
from api_server.sessions_endpoint import (
|
||||
get_sessions,
|
||||
delete_session,
|
||||
create_session,
|
||||
get_sessions_calendar,
|
||||
get_device_sessions,
|
||||
get_session_events,
|
||||
)
|
||||
from api_server.nettools_endpoint import (
|
||||
wakeonlan,
|
||||
traceroute,
|
||||
speedtest,
|
||||
nslookup,
|
||||
nmap_scan,
|
||||
internet_info,
|
||||
)
|
||||
from api_server.dbquery_endpoint import read_query, write_query, update_query, delete_query
|
||||
from api_server.sync_endpoint import handle_sync_post, handle_sync_get
|
||||
from messaging.in_app import (
|
||||
write_notification,
|
||||
mark_all_notifications_read,
|
||||
delete_notifications,
|
||||
get_unread_notifications,
|
||||
delete_notification,
|
||||
mark_notification_as_read,
|
||||
)
|
||||
|
||||
from .graphql_endpoint import devicesSchema
|
||||
from .device_endpoint import get_device_data, set_device_data, delete_device, delete_device_events, reset_device_props, copy_device, update_device_column
|
||||
from .devices_endpoint import get_all_devices, delete_unknown_devices, delete_all_with_empty_macs, delete_devices, export_devices, import_csv, devices_totals, devices_by_status
|
||||
from .events_endpoint import delete_events, delete_events_older_than, get_events, create_event, get_events_totals
|
||||
from .history_endpoint import delete_online_history
|
||||
from .prometheus_endpoint import get_metric_stats
|
||||
from .sessions_endpoint import get_sessions, delete_session, create_session, get_sessions_calendar, get_device_sessions, get_session_events
|
||||
from .nettools_endpoint import wakeonlan, traceroute, speedtest, nslookup, nmap_scan, internet_info
|
||||
from .dbquery_endpoint import read_query, write_query, update_query, delete_query
|
||||
from .sync_endpoint import handle_sync_post, handle_sync_get
|
||||
from .logs_endpoint import clean_log
|
||||
from models.user_events_queue_instance import UserEventsQueueInstance
|
||||
from messaging.in_app import write_notification, mark_all_notifications_read, delete_notifications, get_unread_notifications, delete_notification, mark_notification_as_read
|
||||
|
||||
# Flask application
|
||||
app = Flask(__name__)
|
||||
@@ -85,11 +44,24 @@ CORS(
|
||||
r"/dbquery/*": {"origins": "*"},
|
||||
r"/messaging/*": {"origins": "*"},
|
||||
r"/events/*": {"origins": "*"},
|
||||
r"/logs/*": {"origins": "*"}
|
||||
},
|
||||
supports_credentials=True,
|
||||
allow_headers=["Authorization", "Content-Type"],
|
||||
)
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Custom handler for 404 - Route not found
|
||||
# -------------------------------------------------------------------
|
||||
@app.errorhandler(404)
|
||||
def not_found(error):
|
||||
response = {
|
||||
"success": False,
|
||||
"error": "API route not found",
|
||||
"message": f"The requested URL {error.description if hasattr(error, 'description') else ''} was not found on the server.",
|
||||
}
|
||||
return jsonify(response), 404
|
||||
|
||||
# --------------------------
|
||||
# GraphQL Endpoints
|
||||
# --------------------------
|
||||
@@ -107,9 +79,9 @@ def graphql_debug():
|
||||
def graphql_endpoint():
|
||||
# Check for API token in headers
|
||||
if not is_authorized():
|
||||
msg = "[graphql_server] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct."
|
||||
mylog("verbose", [msg])
|
||||
return jsonify({"error": msg}), 401
|
||||
msg = '[graphql_server] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct.'
|
||||
mylog('verbose', [msg])
|
||||
return jsonify({"success": False, "message": msg}), 401
|
||||
|
||||
# Retrieve and log request data
|
||||
data = request.get_json()
|
||||
@@ -137,7 +109,7 @@ def graphql_endpoint():
|
||||
@app.route("/settings/<setKey>", methods=["GET"])
|
||||
def api_get_setting(setKey):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
value = get_setting_value(setKey)
|
||||
return jsonify({"success": True, "value": value})
|
||||
|
||||
@@ -150,51 +122,49 @@ def api_get_setting(setKey):
|
||||
@app.route("/device/<mac>", methods=["GET"])
|
||||
def api_get_device(mac):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
return get_device_data(mac)
|
||||
|
||||
|
||||
@app.route("/device/<mac>", methods=["POST"])
|
||||
def api_set_device(mac):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
return set_device_data(mac, request.json)
|
||||
|
||||
|
||||
@app.route("/device/<mac>/delete", methods=["DELETE"])
|
||||
def api_delete_device(mac):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
return delete_device(mac)
|
||||
|
||||
|
||||
@app.route("/device/<mac>/events/delete", methods=["DELETE"])
|
||||
def api_delete_device_events(mac):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
return delete_device_events(mac)
|
||||
|
||||
|
||||
@app.route("/device/<mac>/reset-props", methods=["POST"])
|
||||
def api_reset_device_props(mac):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
return reset_device_props(mac, request.json)
|
||||
|
||||
|
||||
@app.route("/device/copy", methods=["POST"])
|
||||
def api_copy_device():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
data = request.get_json() or {}
|
||||
mac_from = data.get("macFrom")
|
||||
mac_to = data.get("macTo")
|
||||
|
||||
if not mac_from or not mac_to:
|
||||
return jsonify(
|
||||
{"success": False, "error": "macFrom and macTo are required"}
|
||||
), 400
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "macFrom and macTo are required"}), 400
|
||||
|
||||
return copy_device(mac_from, mac_to)
|
||||
|
||||
@@ -202,16 +172,14 @@ def api_copy_device():
|
||||
@app.route("/device/<mac>/update-column", methods=["POST"])
|
||||
def api_update_device_column(mac):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
data = request.get_json() or {}
|
||||
column_name = data.get("columnName")
|
||||
column_value = data.get("columnValue")
|
||||
|
||||
if not column_name or not column_value:
|
||||
return jsonify(
|
||||
{"success": False, "error": "columnName and columnValue are required"}
|
||||
), 400
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "columnName and columnValue are required"}), 400
|
||||
|
||||
return update_device_column(mac, column_name, column_value)
|
||||
|
||||
@@ -224,15 +192,15 @@ def api_update_device_column(mac):
|
||||
@app.route("/devices", methods=["GET"])
|
||||
def api_get_devices():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
return get_all_devices()
|
||||
|
||||
|
||||
@app.route("/devices", methods=["DELETE"])
|
||||
def api_delete_devices():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
macs = request.json.get("macs") if request.is_json else None
|
||||
|
||||
return delete_devices(macs)
|
||||
@@ -241,14 +209,14 @@ def api_delete_devices():
|
||||
@app.route("/devices/empty-macs", methods=["DELETE"])
|
||||
def api_delete_all_empty_macs():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
return delete_all_with_empty_macs()
|
||||
|
||||
|
||||
@app.route("/devices/unknown", methods=["DELETE"])
|
||||
def api_delete_unknown_devices():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
return delete_unknown_devices()
|
||||
|
||||
|
||||
@@ -256,7 +224,7 @@ def api_delete_unknown_devices():
|
||||
@app.route("/devices/export/<format>", methods=["GET"])
|
||||
def api_export_devices(format=None):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
export_format = (format or request.args.get("format", "csv")).lower()
|
||||
return export_devices(export_format)
|
||||
@@ -265,21 +233,21 @@ def api_export_devices(format=None):
|
||||
@app.route("/devices/import", methods=["POST"])
|
||||
def api_import_csv():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
return import_csv(request.files.get("file"))
|
||||
|
||||
|
||||
@app.route("/devices/totals", methods=["GET"])
|
||||
def api_devices_totals():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
return devices_totals()
|
||||
|
||||
|
||||
@app.route("/devices/by-status", methods=["GET"])
|
||||
def api_devices_by_status():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
status = request.args.get("status", "") if request.args else None
|
||||
|
||||
@@ -292,7 +260,7 @@ def api_devices_by_status():
|
||||
@app.route("/nettools/wakeonlan", methods=["POST"])
|
||||
def api_wakeonlan():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
mac = request.json.get("devMac")
|
||||
return wakeonlan(mac)
|
||||
@@ -301,7 +269,7 @@ def api_wakeonlan():
|
||||
@app.route("/nettools/traceroute", methods=["POST"])
|
||||
def api_traceroute():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
ip = request.json.get("devLastIP")
|
||||
return traceroute(ip)
|
||||
|
||||
@@ -309,7 +277,7 @@ def api_traceroute():
|
||||
@app.route("/nettools/speedtest", methods=["GET"])
|
||||
def api_speedtest():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
return speedtest()
|
||||
|
||||
|
||||
@@ -320,11 +288,11 @@ def api_nslookup():
|
||||
Expects JSON with 'devLastIP'.
|
||||
"""
|
||||
if not is_authorized():
|
||||
return jsonify({"success": False, "error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
data = request.get_json(silent=True)
|
||||
if not data or "devLastIP" not in data:
|
||||
return jsonify({"success": False, "error": "Missing 'devLastIP'"}), 400
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing 'devLastIP'"}), 400
|
||||
|
||||
ip = data["devLastIP"]
|
||||
return nslookup(ip)
|
||||
@@ -337,11 +305,11 @@ def api_nmap():
|
||||
Expects JSON with 'scan' (IP address) and 'mode' (scan mode).
|
||||
"""
|
||||
if not is_authorized():
|
||||
return jsonify({"success": False, "error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
data = request.get_json(silent=True)
|
||||
if not data or "scan" not in data or "mode" not in data:
|
||||
return jsonify({"success": False, "error": "Missing 'scan' or 'mode'"}), 400
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing 'scan' or 'mode'"}), 400
|
||||
|
||||
ip = data["scan"]
|
||||
mode = data["mode"]
|
||||
@@ -351,7 +319,7 @@ def api_nmap():
|
||||
@app.route("/nettools/internetinfo", methods=["GET"])
|
||||
def api_internet_info():
|
||||
if not is_authorized():
|
||||
return jsonify({"success": False, "error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
return internet_info()
|
||||
|
||||
|
||||
@@ -363,26 +331,26 @@ def api_internet_info():
|
||||
@app.route("/dbquery/read", methods=["POST"])
|
||||
def dbquery_read():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
data = request.get_json() or {}
|
||||
raw_sql_b64 = data.get("rawSql")
|
||||
|
||||
if not raw_sql_b64:
|
||||
return jsonify({"error": "rawSql is required"}), 400
|
||||
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
|
||||
|
||||
return read_query(raw_sql_b64)
|
||||
|
||||
|
||||
@app.route("/dbquery/write", methods=["POST"])
|
||||
def dbquery_write():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
data = request.get_json() or {}
|
||||
raw_sql_b64 = data.get("rawSql")
|
||||
if not raw_sql_b64:
|
||||
return jsonify({"error": "rawSql is required"}), 400
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "rawSql is required"}), 400
|
||||
|
||||
return write_query(raw_sql_b64)
|
||||
|
||||
@@ -390,12 +358,12 @@ def dbquery_write():
|
||||
@app.route("/dbquery/update", methods=["POST"])
|
||||
def dbquery_update():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
data = request.get_json() or {}
|
||||
required = ["columnName", "id", "dbtable", "columns", "values"]
|
||||
if not all(data.get(k) for k in required):
|
||||
return jsonify({"error": "Missing required parameters"}), 400
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'columnName', 'id', 'dbtable', 'columns', or 'values' query parameter"}), 400
|
||||
|
||||
return update_query(
|
||||
column_name=data["columnName"],
|
||||
@@ -409,12 +377,12 @@ def dbquery_update():
|
||||
@app.route("/dbquery/delete", methods=["POST"])
|
||||
def dbquery_delete():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
data = request.get_json() or {}
|
||||
required = ["columnName", "id", "dbtable"]
|
||||
if not all(data.get(k) for k in required):
|
||||
return jsonify({"error": "Missing required parameters"}), 400
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'columnName', 'id', or 'dbtable' query parameter"}), 400
|
||||
|
||||
return delete_query(
|
||||
column_name=data["columnName"],
|
||||
@@ -431,10 +399,51 @@ def dbquery_delete():
|
||||
@app.route("/history", methods=["DELETE"])
|
||||
def api_delete_online_history():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
return delete_online_history()
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Logs
|
||||
# --------------------------
|
||||
|
||||
@app.route("/logs", methods=["DELETE"])
|
||||
def api_clean_log():
|
||||
if not is_authorized():
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
file = request.args.get("file")
|
||||
if not file:
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing 'file' query parameter"}), 400
|
||||
|
||||
return clean_log(file)
|
||||
|
||||
@app.route("/logs/add-to-execution-queue", methods=["POST"])
|
||||
def api_add_to_execution_queue():
|
||||
|
||||
if not is_authorized():
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
queue = UserEventsQueueInstance()
|
||||
|
||||
# Get JSON payload safely
|
||||
data = request.get_json(silent=True) or {}
|
||||
action = data.get("action")
|
||||
|
||||
if not action:
|
||||
return jsonify({
|
||||
"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'action' field in JSON body"}), 400
|
||||
|
||||
success, message = queue.add_event(action)
|
||||
status_code = 200 if success else 400
|
||||
|
||||
response = {"success": success, "message": message}
|
||||
if not success:
|
||||
response["error"] = "ERROR"
|
||||
|
||||
return jsonify(response), status_code
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Device Events
|
||||
# --------------------------
|
||||
@@ -443,7 +452,7 @@ def api_delete_online_history():
|
||||
@app.route("/events/create/<mac>", methods=["POST"])
|
||||
def api_create_event(mac):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
data = request.json or {}
|
||||
ip = data.get("ip", "0.0.0.0")
|
||||
@@ -462,21 +471,21 @@ def api_create_event(mac):
|
||||
@app.route("/events/<mac>", methods=["DELETE"])
|
||||
def api_events_by_mac(mac):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
return delete_device_events(mac)
|
||||
|
||||
|
||||
@app.route("/events", methods=["DELETE"])
|
||||
def api_delete_all_events():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
return delete_events()
|
||||
|
||||
|
||||
@app.route("/events", methods=["GET"])
|
||||
def api_get_events():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
mac = request.args.get("mac")
|
||||
return get_events(mac)
|
||||
@@ -489,15 +498,15 @@ def api_delete_old_events(days: int):
|
||||
Example: DELETE /events/30
|
||||
"""
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
return delete_events_older_than(days)
|
||||
|
||||
|
||||
@app.route("/sessions/totals", methods=["GET"])
|
||||
def api_get_events_totals():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
period = get_date_from_period(request.args.get("period", "7 days"))
|
||||
return get_events_totals(period)
|
||||
@@ -511,7 +520,7 @@ def api_get_events_totals():
|
||||
@app.route("/sessions/create", methods=["POST"])
|
||||
def api_create_session():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
data = request.json
|
||||
mac = data.get("mac")
|
||||
@@ -522,7 +531,7 @@ def api_create_session():
|
||||
event_type_disc = data.get("event_type_disc", "Disconnected")
|
||||
|
||||
if not mac or not ip or not start_time:
|
||||
return jsonify({"success": False, "error": "Missing required parameters"}), 400
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing required 'mac', 'ip', or 'start_time' query parameter"}), 400
|
||||
|
||||
return create_session(
|
||||
mac, ip, start_time, end_time, event_type_conn, event_type_disc
|
||||
@@ -532,11 +541,11 @@ def api_create_session():
|
||||
@app.route("/sessions/delete", methods=["DELETE"])
|
||||
def api_delete_session():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
mac = request.json.get("mac") if request.is_json else None
|
||||
if not mac:
|
||||
return jsonify({"success": False, "error": "Missing MAC parameter"}), 400
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing 'mac' query parameter"}), 400
|
||||
|
||||
return delete_session(mac)
|
||||
|
||||
@@ -544,7 +553,7 @@ def api_delete_session():
|
||||
@app.route("/sessions/list", methods=["GET"])
|
||||
def api_get_sessions():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
mac = request.args.get("mac")
|
||||
start_date = request.args.get("start_date")
|
||||
@@ -556,7 +565,7 @@ def api_get_sessions():
|
||||
@app.route("/sessions/calendar", methods=["GET"])
|
||||
def api_get_sessions_calendar():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
# Query params: /sessions/calendar?start=2025-08-01&end=2025-08-21
|
||||
start_date = request.args.get("start")
|
||||
@@ -568,7 +577,7 @@ def api_get_sessions_calendar():
|
||||
@app.route("/sessions/<mac>", methods=["GET"])
|
||||
def api_device_sessions(mac):
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
period = request.args.get("period", "1 day")
|
||||
return get_device_sessions(mac, period)
|
||||
@@ -577,7 +586,7 @@ def api_device_sessions(mac):
|
||||
@app.route("/sessions/session-events", methods=["GET"])
|
||||
def api_get_session_events():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
session_event_type = request.args.get("type", "all")
|
||||
period = get_date_from_period(request.args.get("period", "7 days"))
|
||||
@@ -590,7 +599,7 @@ def api_get_session_events():
|
||||
@app.route("/metrics")
|
||||
def metrics():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
# Return Prometheus metrics as plain text
|
||||
return Response(get_metric_stats(), mimetype="text/plain")
|
||||
@@ -602,15 +611,15 @@ def metrics():
|
||||
@app.route("/messaging/in-app/write", methods=["POST"])
|
||||
def api_write_notification():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
data = request.json or {}
|
||||
content = data.get("content")
|
||||
level = data.get("level", "alert")
|
||||
|
||||
if not content:
|
||||
return jsonify({"success": False, "error": "Missing content"}), 400
|
||||
|
||||
return jsonify({"success": False, "message": "ERROR: Missing parameters", "error": "Missing content"}), 400
|
||||
|
||||
write_notification(content, level)
|
||||
return jsonify({"success": True})
|
||||
|
||||
@@ -618,7 +627,7 @@ def api_write_notification():
|
||||
@app.route("/messaging/in-app/unread", methods=["GET"])
|
||||
def api_get_unread_notifications():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
return get_unread_notifications()
|
||||
|
||||
@@ -626,7 +635,7 @@ def api_get_unread_notifications():
|
||||
@app.route("/messaging/in-app/read/all", methods=["POST"])
|
||||
def api_mark_all_notifications_read():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
return jsonify(mark_all_notifications_read())
|
||||
|
||||
@@ -634,7 +643,7 @@ def api_mark_all_notifications_read():
|
||||
@app.route("/messaging/in-app/delete", methods=["DELETE"])
|
||||
def api_delete_all_notifications():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
return delete_notifications()
|
||||
|
||||
@@ -643,35 +652,34 @@ def api_delete_all_notifications():
|
||||
def api_delete_notification(guid):
|
||||
"""Delete a single notification by GUID."""
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
result = delete_notification(guid)
|
||||
if result.get("success"):
|
||||
return jsonify({"success": True})
|
||||
else:
|
||||
return jsonify({"success": False, "error": result.get("error")}), 500
|
||||
return jsonify({"success": False, "message": "ERROR", "error": result.get("error")}), 500
|
||||
|
||||
|
||||
@app.route("/messaging/in-app/read/<guid>", methods=["POST"])
|
||||
def api_mark_notification_read(guid):
|
||||
"""Mark a single notification as read by GUID."""
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
result = mark_notification_as_read(guid)
|
||||
if result.get("success"):
|
||||
return jsonify({"success": True})
|
||||
else:
|
||||
return jsonify({"success": False, "error": result.get("error")}), 500
|
||||
|
||||
|
||||
return jsonify({"success": False, "message": "ERROR", "error": result.get("error")}), 500
|
||||
|
||||
# --------------------------
|
||||
# SYNC endpoint
|
||||
# --------------------------
|
||||
@app.route("/sync", methods=["GET", "POST"])
|
||||
def sync_endpoint():
|
||||
if not is_authorized():
|
||||
return jsonify({"error": "Forbidden"}), 403
|
||||
return jsonify({"success": False, "message": "ERROR: Not authorized", "error": "Forbidden"}), 403
|
||||
|
||||
if request.method == "GET":
|
||||
return handle_sync_get()
|
||||
@@ -681,7 +689,7 @@ def sync_endpoint():
|
||||
msg = "[sync endpoint] Method Not Allowed"
|
||||
write_notification(msg, "alert")
|
||||
mylog("verbose", [msg])
|
||||
return jsonify({"error": "Method Not Allowed"}), 405
|
||||
return jsonify({"success": False, "message": "ERROR: No allowed", "error": "Method Not Allowed"}), 405
|
||||
|
||||
|
||||
# --------------------------
|
||||
|
||||
@@ -10,7 +10,8 @@ INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from helper import is_random_mac, format_date, get_setting_value
|
||||
from helper import is_random_mac, get_setting_value
|
||||
from utils.datetime_utils import timeNowDB, format_date
|
||||
from db.db_helper import row_to_json, get_date_from_period
|
||||
|
||||
# --------------------------
|
||||
@@ -25,9 +26,11 @@ def get_device_data(mac):
|
||||
conn = get_temp_db_connection()
|
||||
cur = conn.cursor()
|
||||
|
||||
now = timeNowDB()
|
||||
|
||||
# Special case for new device
|
||||
if mac.lower() == "new":
|
||||
now = datetime.now().strftime("%Y-%m-%d %H:%M")
|
||||
|
||||
device_data = {
|
||||
"devMac": "",
|
||||
"devName": "",
|
||||
@@ -75,7 +78,6 @@ def get_device_data(mac):
|
||||
# Compute period date for sessions/events
|
||||
period = request.args.get("period", "") # e.g., '7 days', '1 month', etc.
|
||||
period_date_sql = get_date_from_period(period)
|
||||
current_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
# Fetch device info + computed fields
|
||||
sql = f"""
|
||||
@@ -103,7 +105,7 @@ def get_device_data(mac):
|
||||
AND eve_EventType = 'Device Down') AS devDownAlerts,
|
||||
|
||||
(SELECT CAST(MAX(0, SUM(
|
||||
julianday(IFNULL(ses_DateTimeDisconnection,'{current_date}')) -
|
||||
julianday(IFNULL(ses_DateTimeDisconnection,'{now}')) -
|
||||
julianday(CASE WHEN ses_DateTimeConnection < {period_date_sql}
|
||||
THEN {period_date_sql} ELSE ses_DateTimeConnection END)
|
||||
) * 24) AS INT)
|
||||
@@ -186,10 +188,8 @@ def set_device_data(mac, data):
|
||||
data.get("devSkipRepeated", 0),
|
||||
data.get("devIsNew", 0),
|
||||
data.get("devIsArchived", 0),
|
||||
data.get("devLastConnection", datetime.now().strftime("%Y-%m-%d %H:%M:%S")),
|
||||
data.get(
|
||||
"devFirstConnection", datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
),
|
||||
data.get("devLastConnection", timeNowDB()),
|
||||
data.get("devFirstConnection", timeNowDB()),
|
||||
data.get("devLastIP", ""),
|
||||
data.get("devGUID", ""),
|
||||
data.get("devCustomProps", ""),
|
||||
|
||||
@@ -16,6 +16,7 @@ sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from db.db_helper import get_table_json, get_device_condition_by_status
|
||||
from utils.datetime_utils import format_date
|
||||
|
||||
|
||||
# --------------------------
|
||||
|
||||
@@ -10,11 +10,9 @@ INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from helper import (
|
||||
mylog,
|
||||
ensure_datetime,
|
||||
)
|
||||
from helper import is_random_mac, mylog
|
||||
from db.db_helper import row_to_json, get_date_from_period
|
||||
from utils.datetime_utils import format_date, format_date_iso, format_event_date, ensure_datetime
|
||||
|
||||
|
||||
# --------------------------
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import graphene
|
||||
from graphene import ObjectType, String, Int, Boolean, List, Field, InputObjectType
|
||||
from graphene import ObjectType, String, Int, Boolean, List, Field, InputObjectType, Argument
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
@@ -111,6 +111,22 @@ class SettingResult(ObjectType):
|
||||
settings = List(Setting)
|
||||
count = Int()
|
||||
|
||||
# --- LANGSTRINGS ---
|
||||
|
||||
# In-memory cache for lang strings
|
||||
_langstrings_cache = {} # caches lists per file (core JSON or plugin)
|
||||
_langstrings_cache_mtime = {} # tracks last modified times
|
||||
|
||||
# LangString ObjectType
|
||||
class LangString(ObjectType):
|
||||
langCode = String()
|
||||
langStringKey = String()
|
||||
langStringText = String()
|
||||
|
||||
|
||||
class LangStringResult(ObjectType):
|
||||
langStrings = List(LangString)
|
||||
count = Int()
|
||||
|
||||
# Define Query Type with Pagination Support
|
||||
class Query(ObjectType):
|
||||
@@ -324,6 +340,107 @@ class Query(ObjectType):
|
||||
|
||||
return SettingResult(settings=settings, count=len(settings))
|
||||
|
||||
# --- LANGSTRINGS ---
|
||||
langStrings = Field(
|
||||
LangStringResult,
|
||||
langCode=Argument(String, required=False),
|
||||
langStringKey=Argument(String, required=False)
|
||||
)
|
||||
|
||||
def resolve_langStrings(self, info, langCode=None, langStringKey=None, fallback_to_en=True):
|
||||
"""
|
||||
Collect language strings, optionally filtered by language code and/or string key.
|
||||
Caches in memory for performance. Can fallback to 'en_us' if a string is missing.
|
||||
"""
|
||||
global _langstrings_cache, _langstrings_cache_mtime
|
||||
|
||||
langStrings = []
|
||||
|
||||
# --- CORE JSON FILES ---
|
||||
language_folder = '/app/front/php/templates/language/'
|
||||
if os.path.exists(language_folder):
|
||||
for filename in os.listdir(language_folder):
|
||||
if filename.endswith('.json'):
|
||||
file_lang_code = filename.replace('.json', '')
|
||||
|
||||
# Filter by langCode if provided
|
||||
if langCode and file_lang_code != langCode:
|
||||
continue
|
||||
|
||||
file_path = os.path.join(language_folder, filename)
|
||||
file_mtime = os.path.getmtime(file_path)
|
||||
cache_key = f'core_{file_lang_code}'
|
||||
|
||||
# Use cached data if available and not modified
|
||||
if cache_key in _langstrings_cache_mtime and _langstrings_cache_mtime[cache_key] == file_mtime:
|
||||
lang_list = _langstrings_cache[cache_key]
|
||||
else:
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
lang_list = [
|
||||
LangString(
|
||||
langCode=file_lang_code,
|
||||
langStringKey=key,
|
||||
langStringText=value
|
||||
) for key, value in data.items()
|
||||
]
|
||||
_langstrings_cache[cache_key] = lang_list
|
||||
_langstrings_cache_mtime[cache_key] = file_mtime
|
||||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
||||
mylog('none', f'[graphql_schema] Error loading core language strings from {filename}: {e}')
|
||||
lang_list = []
|
||||
|
||||
langStrings.extend(lang_list)
|
||||
|
||||
# --- PLUGIN STRINGS ---
|
||||
plugin_file = folder + 'table_plugins_language_strings.json'
|
||||
try:
|
||||
file_mtime = os.path.getmtime(plugin_file)
|
||||
cache_key = 'plugin'
|
||||
if cache_key in _langstrings_cache_mtime and _langstrings_cache_mtime[cache_key] == file_mtime:
|
||||
plugin_list = _langstrings_cache[cache_key]
|
||||
else:
|
||||
with open(plugin_file, 'r', encoding='utf-8') as f:
|
||||
plugin_data = json.load(f).get("data", [])
|
||||
plugin_list = [
|
||||
LangString(
|
||||
langCode=entry.get("Language_Code"),
|
||||
langStringKey=entry.get("String_Key"),
|
||||
langStringText=entry.get("String_Value")
|
||||
) for entry in plugin_data
|
||||
]
|
||||
_langstrings_cache[cache_key] = plugin_list
|
||||
_langstrings_cache_mtime[cache_key] = file_mtime
|
||||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
||||
mylog('none', f'[graphql_schema] Error loading plugin language strings from {plugin_file}: {e}')
|
||||
plugin_list = []
|
||||
|
||||
# Filter plugin strings by langCode if provided
|
||||
if langCode:
|
||||
plugin_list = [p for p in plugin_list if p.langCode == langCode]
|
||||
|
||||
langStrings.extend(plugin_list)
|
||||
|
||||
# --- Filter by string key if requested ---
|
||||
if langStringKey:
|
||||
langStrings = [ls for ls in langStrings if ls.langStringKey == langStringKey]
|
||||
|
||||
# --- Fallback to en_us if enabled and requested lang is missing ---
|
||||
if fallback_to_en and langCode and langCode != "en_us":
|
||||
for i, ls in enumerate(langStrings):
|
||||
if not ls.langStringText: # empty string triggers fallback
|
||||
# try to get en_us version
|
||||
en_list = _langstrings_cache.get("core_en_us", [])
|
||||
en_list += [p for p in _langstrings_cache.get("plugin", []) if p.langCode == "en_us"]
|
||||
en_fallback = [e for e in en_list if e.langStringKey == ls.langStringKey]
|
||||
if en_fallback:
|
||||
langStrings[i] = en_fallback[0]
|
||||
|
||||
mylog('trace', f'[graphql_schema] Collected {len(langStrings)} language strings '
|
||||
f'(langCode={langCode}, key={langStringKey}, fallback_to_en={fallback_to_en})')
|
||||
|
||||
return LangStringResult(langStrings=langStrings, count=len(langStrings))
|
||||
|
||||
# helps sorting inconsistent dataset mixed integers and strings
|
||||
def mixed_type_sort_key(value):
|
||||
|
||||
58
server/api_server/logs_endpoint.py
Normal file
58
server/api_server/logs_endpoint.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import os
|
||||
import sys
|
||||
from flask import jsonify
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from const import logPath
|
||||
from logger import mylog, Logger
|
||||
from helper import get_setting_value
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from messaging.in_app import write_notification
|
||||
|
||||
# Make sure log level is initialized correctly
|
||||
Logger(get_setting_value('LOG_LEVEL'))
|
||||
|
||||
def clean_log(log_file):
|
||||
"""
|
||||
Purge the content of an allowed log file within the /app/log/ directory.
|
||||
|
||||
Args:
|
||||
log_file (str): Name of the log file to purge.
|
||||
|
||||
Returns:
|
||||
flask.Response: JSON response with success and message keys
|
||||
"""
|
||||
allowed_files = [
|
||||
'app.log', 'app_front.log', 'IP_changes.log', 'stdout.log', 'stderr.log',
|
||||
'app.php_errors.log', 'execution_queue.log', 'db_is_locked.log'
|
||||
]
|
||||
|
||||
# Validate filename if purging allowed
|
||||
if log_file not in allowed_files:
|
||||
msg = f"[clean_log] File {log_file} is not allowed to be purged"
|
||||
|
||||
mylog('none', [msg])
|
||||
write_notification(msg, 'interrupt')
|
||||
return jsonify({"success": False, "message": msg}), 400
|
||||
|
||||
log_path = os.path.join(logPath, log_file)
|
||||
|
||||
try:
|
||||
# Purge content
|
||||
with open(log_path, "w") as f:
|
||||
f.write("File manually purged\n")
|
||||
msg = f"[clean_log] File {log_file} purged successfully"
|
||||
|
||||
mylog('minimal', [msg])
|
||||
write_notification(msg, 'interrupt')
|
||||
return jsonify({"success": True, "message": msg}), 200
|
||||
except Exception as e:
|
||||
msg = f"[clean_log] ERROR Failed to purge {log_file}: {e}"
|
||||
|
||||
mylog('none', [msg])
|
||||
write_notification(msg, 'interrupt')
|
||||
return jsonify({"success": False, "message": msg}), 500
|
||||
|
||||
@@ -10,14 +10,9 @@ INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
from database import get_temp_db_connection
|
||||
from helper import (
|
||||
format_date,
|
||||
format_date_iso,
|
||||
format_event_date,
|
||||
format_date_diff,
|
||||
format_ip_long,
|
||||
)
|
||||
from db.db_helper import get_date_from_period
|
||||
from helper import is_random_mac, get_setting_value, mylog, format_ip_long
|
||||
from db.db_helper import row_to_json, get_date_from_period
|
||||
from utils.datetime_utils import format_date_iso, format_event_date, format_date_diff, parse_datetime, format_date
|
||||
|
||||
|
||||
# --------------------------
|
||||
@@ -231,6 +226,7 @@ def get_device_sessions(mac, period):
|
||||
cur.execute(sql, (mac,))
|
||||
rows = cur.fetchall()
|
||||
conn.close()
|
||||
tz_name = get_setting_value("TIMEZONE") or "UTC"
|
||||
|
||||
table_data = {"data": []}
|
||||
|
||||
@@ -255,11 +251,9 @@ def get_device_sessions(mac, period):
|
||||
] in ("<missing event>", None):
|
||||
dur = "..."
|
||||
elif row["ses_StillConnected"]:
|
||||
dur = format_date_diff(row["ses_DateTimeConnection"], None)["text"]
|
||||
dur = format_date_diff(row["ses_DateTimeConnection"], None, tz_name)["text"]
|
||||
else:
|
||||
dur = format_date_diff(
|
||||
row["ses_DateTimeConnection"], row["ses_DateTimeDisconnection"]
|
||||
)["text"]
|
||||
dur = format_date_diff(row["ses_DateTimeConnection"], row["ses_DateTimeDisconnection"], tz_name)["text"]
|
||||
|
||||
# Additional Info
|
||||
info = row["ses_AdditionalInfo"]
|
||||
@@ -295,6 +289,7 @@ def get_session_events(event_type, period_date):
|
||||
conn = get_temp_db_connection()
|
||||
conn.row_factory = sqlite3.Row
|
||||
cur = conn.cursor()
|
||||
tz_name = get_setting_value("TIMEZONE") or "UTC"
|
||||
|
||||
# Base SQLs
|
||||
sql_events = f"""
|
||||
@@ -382,11 +377,11 @@ def get_session_events(event_type, period_date):
|
||||
if event_type in ("sessions", "missing"):
|
||||
# Duration
|
||||
if row[5] and row[6]:
|
||||
delta = format_date_diff(row[5], row[6])
|
||||
delta = format_date_diff(row[5], row[6], tz_name)
|
||||
row[7] = delta["text"]
|
||||
row[8] = int(delta["total_minutes"] * 60) # seconds
|
||||
elif row[12] == 1:
|
||||
delta = format_date_diff(row[5], None)
|
||||
delta = format_date_diff(row[5], None, tz_name)
|
||||
row[7] = delta["text"]
|
||||
row[8] = int(delta["total_minutes"] * 60) # seconds
|
||||
else:
|
||||
|
||||
@@ -2,7 +2,8 @@ import os
|
||||
import base64
|
||||
from flask import jsonify, request
|
||||
from logger import mylog
|
||||
from helper import get_setting_value, timeNowTZ
|
||||
from helper import get_setting_value
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from messaging.in_app import write_notification
|
||||
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
@@ -17,22 +18,20 @@ def handle_sync_get():
|
||||
raw_data = f.read()
|
||||
except FileNotFoundError:
|
||||
msg = f"[Plugin: SYNC] Data file not found: {file_path}"
|
||||
write_notification(msg, "alert", timeNowTZ())
|
||||
write_notification(msg, "alert", timeNowDB())
|
||||
mylog("verbose", [msg])
|
||||
return jsonify({"error": msg}), 500
|
||||
|
||||
response_data = base64.b64encode(raw_data).decode("utf-8")
|
||||
|
||||
write_notification("[Plugin: SYNC] Data sent", "info", timeNowTZ())
|
||||
return jsonify(
|
||||
{
|
||||
"node_name": get_setting_value("SYNC_node_name"),
|
||||
"status": 200,
|
||||
"message": "OK",
|
||||
"data_base64": response_data,
|
||||
"timestamp": timeNowTZ(),
|
||||
}
|
||||
), 200
|
||||
write_notification("[Plugin: SYNC] Data sent", "info", timeNowDB())
|
||||
return jsonify({
|
||||
"node_name": get_setting_value("SYNC_node_name"),
|
||||
"status": 200,
|
||||
"message": "OK",
|
||||
"data_base64": response_data,
|
||||
"timestamp": timeNowDB()
|
||||
}), 200
|
||||
|
||||
|
||||
def handle_sync_post():
|
||||
@@ -65,11 +64,11 @@ def handle_sync_post():
|
||||
f.write(data)
|
||||
except Exception as e:
|
||||
msg = f"[Plugin: SYNC] Failed to store data: {e}"
|
||||
write_notification(msg, "alert", timeNowTZ())
|
||||
write_notification(msg, "alert", timeNowDB())
|
||||
mylog("verbose", [msg])
|
||||
return jsonify({"error": msg}), 500
|
||||
|
||||
msg = f"[Plugin: SYNC] Data received ({file_path_new})"
|
||||
write_notification(msg, "info", timeNowTZ())
|
||||
write_notification(msg, "info", timeNowDB())
|
||||
mylog("verbose", [msg])
|
||||
return jsonify({"message": "Data received and stored successfully"}), 200
|
||||
|
||||
@@ -3,7 +3,8 @@ import json
|
||||
|
||||
from const import *
|
||||
from logger import mylog
|
||||
from helper import timeNowTZ, timeNow, checkNewVersion
|
||||
from helper import checkNewVersion
|
||||
from utils.datetime_utils import timeNowDB, timeNow
|
||||
|
||||
# Register NetAlertX directories using runtime configuration
|
||||
INSTALL_PATH = applicationPath
|
||||
@@ -31,16 +32,14 @@ class app_state_class:
|
||||
isNewVersionChecked (int): Timestamp of last version check.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
currentState=None,
|
||||
settingsSaved=None,
|
||||
settingsImported=None,
|
||||
showSpinner=None,
|
||||
graphQLServerStarted=0,
|
||||
processScan=False,
|
||||
pluginsStates=None,
|
||||
):
|
||||
def __init__(self, currentState=None,
|
||||
settingsSaved=None,
|
||||
settingsImported=None,
|
||||
showSpinner=None,
|
||||
graphQLServerStarted=0,
|
||||
processScan=False,
|
||||
pluginsStates=None,
|
||||
appVersion=None):
|
||||
"""
|
||||
Initialize the application state, optionally overwriting previous values.
|
||||
|
||||
@@ -55,14 +54,15 @@ class app_state_class:
|
||||
graphQLServerStarted (int, optional): Initial GraphQL server timestamp.
|
||||
processScan (bool, optional): Initial processScan flag.
|
||||
pluginsStates (dict, optional): Initial plugin states to merge with previous state.
|
||||
appVersion (str, optional): Application version.
|
||||
"""
|
||||
# json file containing the state to communicate with the frontend
|
||||
stateFile = apiPath + "app_state.json"
|
||||
previousState = ""
|
||||
|
||||
# Update self
|
||||
self.lastUpdated = str(timeNowTZ())
|
||||
|
||||
self.lastUpdated = str(timeNowDB())
|
||||
|
||||
if os.path.exists(stateFile):
|
||||
try:
|
||||
with open(stateFile, "r") as json_file:
|
||||
@@ -73,26 +73,28 @@ class app_state_class:
|
||||
)
|
||||
|
||||
# Check if the file exists and recover previous values
|
||||
if previousState != "":
|
||||
self.settingsSaved = previousState.get("settingsSaved", 0)
|
||||
self.settingsImported = previousState.get("settingsImported", 0)
|
||||
self.processScan = previousState.get("processScan", False)
|
||||
self.showSpinner = previousState.get("showSpinner", False)
|
||||
self.isNewVersion = previousState.get("isNewVersion", False)
|
||||
self.isNewVersionChecked = previousState.get("isNewVersionChecked", 0)
|
||||
self.graphQLServerStarted = previousState.get("graphQLServerStarted", 0)
|
||||
self.currentState = previousState.get("currentState", "Init")
|
||||
self.pluginsStates = previousState.get("pluginsStates", {})
|
||||
else: # init first time values
|
||||
self.settingsSaved = 0
|
||||
self.settingsImported = 0
|
||||
self.showSpinner = False
|
||||
self.processScan = False
|
||||
self.isNewVersion = checkNewVersion()
|
||||
self.isNewVersionChecked = int(timeNow().timestamp())
|
||||
self.graphQLServerStarted = 0
|
||||
self.currentState = "Init"
|
||||
self.pluginsStates = {}
|
||||
if previousState != "":
|
||||
self.settingsSaved = previousState.get("settingsSaved", 0)
|
||||
self.settingsImported = previousState.get("settingsImported", 0)
|
||||
self.processScan = previousState.get("processScan", False)
|
||||
self.showSpinner = previousState.get("showSpinner", False)
|
||||
self.isNewVersion = previousState.get("isNewVersion", False)
|
||||
self.isNewVersionChecked = previousState.get("isNewVersionChecked", 0)
|
||||
self.graphQLServerStarted = previousState.get("graphQLServerStarted", 0)
|
||||
self.currentState = previousState.get("currentState", "Init")
|
||||
self.pluginsStates = previousState.get("pluginsStates", {})
|
||||
self.appVersion = previousState.get("appVersion", "")
|
||||
else: # init first time values
|
||||
self.settingsSaved = 0
|
||||
self.settingsImported = 0
|
||||
self.showSpinner = False
|
||||
self.processScan = False
|
||||
self.isNewVersion = checkNewVersion()
|
||||
self.isNewVersionChecked = int(timeNow().timestamp())
|
||||
self.graphQLServerStarted = 0
|
||||
self.currentState = "Init"
|
||||
self.pluginsStates = {}
|
||||
self.appVersion = ""
|
||||
|
||||
# Overwrite with provided parameters if supplied
|
||||
if settingsSaved is not None:
|
||||
@@ -112,9 +114,7 @@ class app_state_class:
|
||||
for plugin, state in pluginsStates.items():
|
||||
if plugin in self.pluginsStates:
|
||||
# Only update existing keys if both are dicts
|
||||
if isinstance(self.pluginsStates[plugin], dict) and isinstance(
|
||||
state, dict
|
||||
):
|
||||
if isinstance(self.pluginsStates[plugin], dict) and isinstance(state, dict):
|
||||
self.pluginsStates[plugin].update(state)
|
||||
else:
|
||||
# Replace if types don't match
|
||||
@@ -123,7 +123,8 @@ class app_state_class:
|
||||
# Optionally ignore or add new plugin entries
|
||||
# To ignore new plugins, comment out the next line
|
||||
self.pluginsStates[plugin] = state
|
||||
|
||||
if appVersion is not None:
|
||||
self.appVersion = appVersion
|
||||
# check for new version every hour and if currently not running new version
|
||||
if self.isNewVersion is False and self.isNewVersionChecked + 3600 < int(
|
||||
timeNow().timestamp()
|
||||
@@ -157,15 +158,14 @@ class app_state_class:
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# method to update the state
|
||||
def updateState(
|
||||
newState=None,
|
||||
settingsSaved=None,
|
||||
settingsImported=None,
|
||||
showSpinner=None,
|
||||
graphQLServerStarted=None,
|
||||
processScan=None,
|
||||
pluginsStates=None,
|
||||
):
|
||||
def updateState(newState = None,
|
||||
settingsSaved = None,
|
||||
settingsImported = None,
|
||||
showSpinner = None,
|
||||
graphQLServerStarted = None,
|
||||
processScan = None,
|
||||
pluginsStates=None,
|
||||
appVersion=None):
|
||||
"""
|
||||
Convenience method to create or update the app state.
|
||||
|
||||
@@ -177,19 +177,19 @@ def updateState(
|
||||
graphQLServerStarted (int, optional): Timestamp of GraphQL server start.
|
||||
processScan (bool, optional): Flag indicating if a scan is active.
|
||||
pluginsStates (dict, optional): Plugin state updates.
|
||||
appVersion (str, optional): Application version.
|
||||
|
||||
Returns:
|
||||
app_state_class: Updated state object.
|
||||
"""
|
||||
return app_state_class(
|
||||
newState,
|
||||
settingsSaved,
|
||||
settingsImported,
|
||||
showSpinner,
|
||||
graphQLServerStarted,
|
||||
processScan,
|
||||
pluginsStates,
|
||||
)
|
||||
return app_state_class( newState,
|
||||
settingsSaved,
|
||||
settingsImported,
|
||||
showSpinner,
|
||||
graphQLServerStarted,
|
||||
processScan,
|
||||
pluginsStates,
|
||||
appVersion)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
210
server/helper.py
210
server/helper.py
@@ -7,7 +7,6 @@ import os
|
||||
import re
|
||||
import unicodedata
|
||||
import subprocess
|
||||
from typing import Union
|
||||
import pytz
|
||||
import json
|
||||
import requests
|
||||
@@ -26,151 +25,6 @@ from logger import mylog, logResult
|
||||
INSTALL_PATH = applicationPath
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# DateTime
|
||||
# -------------------------------------------------------------------------------
|
||||
# Get the current time in the current TimeZone
|
||||
def timeNowTZ():
|
||||
if conf.tz:
|
||||
return datetime.datetime.now(conf.tz).replace(microsecond=0)
|
||||
else:
|
||||
return datetime.datetime.now().replace(microsecond=0)
|
||||
# if isinstance(conf.TIMEZONE, str):
|
||||
# tz = pytz.timezone(conf.TIMEZONE)
|
||||
# else:
|
||||
# tz = conf.TIMEZONE
|
||||
|
||||
# return datetime.datetime.now(tz).replace(microsecond=0)
|
||||
|
||||
|
||||
def timeNow():
|
||||
return datetime.datetime.now().replace(microsecond=0)
|
||||
|
||||
|
||||
def get_timezone_offset():
|
||||
now = datetime.datetime.now(conf.tz)
|
||||
offset_hours = now.utcoffset().total_seconds() / 3600
|
||||
offset_formatted = "{:+03d}:{:02d}".format(
|
||||
int(offset_hours), int((offset_hours % 1) * 60)
|
||||
)
|
||||
return offset_formatted
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Date and time methods
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
# # -------------------------------------------------------------------------------------------
|
||||
# def format_date(date_str: str) -> str:
|
||||
# """Format a date string as 'YYYY-MM-DD HH:MM'"""
|
||||
# dt = datetime.datetime.fromisoformat(date_str) if isinstance(date_str, str) else date_str
|
||||
# return dt.strftime('%Y-%m-%d %H:%M')
|
||||
|
||||
# # -------------------------------------------------------------------------------------------
|
||||
# def format_date_diff(date1: str, date2: str) -> str:
|
||||
# """Return difference between two dates formatted as 'Xd HH:MM'"""
|
||||
# dt1 = datetime.datetime.fromisoformat(date1) if isinstance(date1, str) else date1
|
||||
# dt2 = datetime.datetime.fromisoformat(date2) if isinstance(date2, str) else date2
|
||||
# delta = dt2 - dt1
|
||||
|
||||
# days = delta.days
|
||||
# hours, remainder = divmod(delta.seconds, 3600)
|
||||
# minutes = remainder // 60
|
||||
|
||||
# return f"{days}d {hours:02}:{minutes:02}"
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
def format_date_iso(date1: str) -> str:
|
||||
"""Return ISO 8601 string for a date or None if empty"""
|
||||
if date1 is None:
|
||||
return None
|
||||
dt = datetime.datetime.fromisoformat(date1) if isinstance(date1, str) else date1
|
||||
return dt.isoformat()
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
def format_event_date(date_str: str, event_type: str) -> str:
|
||||
"""Format event date with fallback rules."""
|
||||
if date_str:
|
||||
return format_date(date_str)
|
||||
elif event_type == "<missing event>":
|
||||
return "<missing event>"
|
||||
else:
|
||||
return "<still connected>"
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
def ensure_datetime(dt: Union[str, datetime.datetime, None]) -> datetime.datetime:
|
||||
if dt is None:
|
||||
return timeNowTZ()
|
||||
if isinstance(dt, str):
|
||||
return datetime.datetime.fromisoformat(dt)
|
||||
return dt
|
||||
|
||||
|
||||
def parse_datetime(dt_str):
|
||||
if not dt_str:
|
||||
return None
|
||||
try:
|
||||
# Try ISO8601 first
|
||||
return datetime.datetime.fromisoformat(dt_str)
|
||||
except ValueError:
|
||||
# Try RFC1123 / HTTP format
|
||||
try:
|
||||
return datetime.datetime.strptime(dt_str, "%a, %d %b %Y %H:%M:%S GMT")
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def format_date(date_str: str) -> str:
|
||||
dt = parse_datetime(date_str)
|
||||
return dt.strftime("%Y-%m-%d %H:%M") if dt else "invalid"
|
||||
|
||||
|
||||
def format_date_diff(date1, date2):
|
||||
"""
|
||||
Return difference between two datetimes as 'Xd HH:MM'.
|
||||
Uses app timezone if datetime is naive.
|
||||
date2 can be None (uses now).
|
||||
"""
|
||||
# Get timezone from settings
|
||||
tz_name = get_setting_value("TIMEZONE") or "UTC"
|
||||
tz = pytz.timezone(tz_name)
|
||||
|
||||
def parse_dt(dt):
|
||||
if dt is None:
|
||||
return datetime.datetime.now(tz)
|
||||
if isinstance(dt, str):
|
||||
try:
|
||||
dt_parsed = email.utils.parsedate_to_datetime(dt)
|
||||
except Exception:
|
||||
# fallback: parse ISO string
|
||||
dt_parsed = datetime.datetime.fromisoformat(dt)
|
||||
# convert naive GMT/UTC to app timezone
|
||||
if dt_parsed.tzinfo is None:
|
||||
dt_parsed = tz.localize(dt_parsed)
|
||||
else:
|
||||
dt_parsed = dt_parsed.astimezone(tz)
|
||||
return dt_parsed
|
||||
return dt if dt.tzinfo else tz.localize(dt)
|
||||
|
||||
dt1 = parse_dt(date1)
|
||||
dt2 = parse_dt(date2)
|
||||
|
||||
delta = dt2 - dt1
|
||||
total_minutes = int(delta.total_seconds() // 60)
|
||||
days, rem_minutes = divmod(total_minutes, 1440) # 1440 mins in a day
|
||||
hours, minutes = divmod(rem_minutes, 60)
|
||||
|
||||
return {
|
||||
"text": f"{days}d {hours:02}:{minutes:02}",
|
||||
"days": days,
|
||||
"hours": hours,
|
||||
"minutes": minutes,
|
||||
"total_minutes": total_minutes,
|
||||
}
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# File system permission handling
|
||||
@@ -438,10 +292,12 @@ def get_setting_value(key):
|
||||
value = setting_value_to_python_type(set_type, set_value)
|
||||
else:
|
||||
value = setting_value_to_python_type(set_type, str(set_value))
|
||||
|
||||
SETTINGS_SECONDARYCACHE[key] = value
|
||||
|
||||
return value
|
||||
|
||||
# Otherwise fall back to retrive from json
|
||||
# Otherwise fall back to retrieve from json
|
||||
setting = get_setting(key)
|
||||
|
||||
if setting is not None:
|
||||
@@ -525,11 +381,8 @@ def setting_value_to_python_type(set_type, set_value):
|
||||
elif dataType == "array" and elementType == "select":
|
||||
if isinstance(set_value, str):
|
||||
try:
|
||||
value = json.loads(set_value.replace("'", '"'))
|
||||
|
||||
# reverse transformations to all entries
|
||||
value = reverseTransformers(value, transformers)
|
||||
|
||||
value = json.loads(set_value.replace("'", "\""))
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
mylog(
|
||||
"none",
|
||||
@@ -541,7 +394,10 @@ def setting_value_to_python_type(set_type, set_value):
|
||||
elif isinstance(set_value, list):
|
||||
value = set_value
|
||||
|
||||
elif dataType == "object" and elementType == "input":
|
||||
# Always apply transformers (base64, etc.) to array entries
|
||||
value = reverseTransformers(value, transformers)
|
||||
|
||||
elif dataType == 'object' and elementType == 'input':
|
||||
if isinstance(set_value, str):
|
||||
try:
|
||||
value = reverseTransformers(json.loads(set_value), transformers)
|
||||
@@ -887,38 +743,42 @@ def collect_lang_strings(json, pref, stringSqlParams):
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# Get the value from the buildtimestamp.txt and initialize it if missing
|
||||
def getBuildTimeStamp():
|
||||
def getBuildTimeStampAndVersion():
|
||||
"""
|
||||
Retrieves the build timestamp from 'front/buildtimestamp.txt' within the
|
||||
application directory.
|
||||
|
||||
If the file does not exist, it is created and initialized with the value '0'.
|
||||
Retrieves the build timestamp and version from files within the
|
||||
application directory. Initializes them if missing.
|
||||
|
||||
Returns:
|
||||
int: The integer value of the build timestamp read from the file.
|
||||
Returns 0 if the file is empty or just initialized.
|
||||
tuple: (int buildTimestamp, str version)
|
||||
"""
|
||||
buildTimestamp = 0
|
||||
build_timestamp_path = os.path.join(applicationPath, "front/buildtimestamp.txt")
|
||||
files_defaults = [
|
||||
('front/buildtimestamp.txt', '0'),
|
||||
('.VERSION', 'unknown')
|
||||
]
|
||||
|
||||
# Ensure file exists, initialize if missing
|
||||
if not os.path.exists(build_timestamp_path):
|
||||
with open(build_timestamp_path, "w") as f:
|
||||
f.write("0")
|
||||
results = []
|
||||
|
||||
# Now safely read the timestamp
|
||||
with open(build_timestamp_path, "r") as f:
|
||||
buildTimestamp = int(f.read().strip() or 0)
|
||||
for filename, default in files_defaults:
|
||||
path = os.path.join(applicationPath, filename)
|
||||
if not os.path.exists(path):
|
||||
with open(path, 'w') as f:
|
||||
f.write(default)
|
||||
|
||||
with open(path, 'r') as f:
|
||||
content = f.read().strip() or default
|
||||
# Convert buildtimestamp to int, leave version as string
|
||||
value = int(content) if filename.endswith('buildtimestamp.txt') else content
|
||||
results.append(value)
|
||||
|
||||
return tuple(results)
|
||||
|
||||
return buildTimestamp
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def checkNewVersion():
|
||||
mylog("debug", ["[Version check] Checking if new version available"])
|
||||
|
||||
newVersion = False
|
||||
buildTimestamp = getBuildTimeStamp()
|
||||
buildTimestamp, _version = getBuildTimeStampAndVersion()
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
@@ -946,8 +806,8 @@ def checkNewVersion():
|
||||
)
|
||||
|
||||
if releaseTimestamp > buildTimestamp + 600:
|
||||
mylog("none", ["[Version check] New version of the container available!"])
|
||||
newVersion = True
|
||||
mylog('none', ["[Version check] New version of the container available!"])
|
||||
return True
|
||||
else:
|
||||
mylog("none", ["[Version check] Running the latest version."])
|
||||
else:
|
||||
@@ -956,7 +816,7 @@ def checkNewVersion():
|
||||
["[Version check] ⚠ ERROR: Received unexpected response from GitHub."],
|
||||
)
|
||||
|
||||
return newVersion
|
||||
return False
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
@@ -8,23 +8,18 @@ import shutil
|
||||
import re
|
||||
|
||||
# Register NetAlertX libraries
|
||||
import conf
|
||||
from const import fullConfPath, fullConfFolder, default_tz
|
||||
from helper import (
|
||||
getBuildTimeStamp,
|
||||
fixPermissions,
|
||||
collect_lang_strings,
|
||||
updateSubnets,
|
||||
timeNowTZ,
|
||||
generate_random_string,
|
||||
)
|
||||
import conf
|
||||
from const import fullConfPath, applicationPath, fullConfFolder, default_tz
|
||||
from helper import getBuildTimeStampAndVersion, fixPermissions, collect_lang_strings, updateSubnets, isJsonObject, setting_value_to_python_type, get_setting_value, generate_random_string
|
||||
from utils.datetime_utils import timeNowDB
|
||||
from app_state import updateState
|
||||
from logger import mylog
|
||||
from api import update_api
|
||||
from scheduler import schedule_class
|
||||
from plugin import plugin_manager, print_plugin_info
|
||||
from plugin_utils import get_plugins_configs, get_set_value_for_init
|
||||
from utils.plugin_utils import get_plugins_configs, get_set_value_for_init
|
||||
from messaging.in_app import write_notification
|
||||
from utils.crypto_utils import get_random_bytes
|
||||
|
||||
# ===============================================================================
|
||||
# Initialise user defined values
|
||||
@@ -674,39 +669,23 @@ def importConfigs(pm, db, all_plugins):
|
||||
|
||||
# -----------------
|
||||
# HANDLE APP was upgraded message - clear cache
|
||||
|
||||
|
||||
# Check if app was upgraded
|
||||
|
||||
buildTimestamp = getBuildTimeStamp()
|
||||
cur_version = conf.VERSION
|
||||
|
||||
mylog("debug", [f"[Config] buildTimestamp: '{buildTimestamp}'"])
|
||||
mylog("debug", [f"[Config] conf.VERSION : '{cur_version}'"])
|
||||
|
||||
if str(cur_version) != str(buildTimestamp):
|
||||
mylog("none", ["[Config] App upgraded 🚀"])
|
||||
|
||||
|
||||
buildTimestamp, new_version = getBuildTimeStampAndVersion()
|
||||
prev_version = conf.VERSION
|
||||
|
||||
mylog('debug', [f"[Config] buildTimestamp | prev_version | .VERSION file: '{buildTimestamp}|{prev_version}|{new_version}'"])
|
||||
|
||||
if str(prev_version) != str(new_version):
|
||||
|
||||
mylog('none', ['[Config] App upgraded 🚀'])
|
||||
|
||||
# ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False)
|
||||
ccd(
|
||||
"VERSION",
|
||||
buildTimestamp,
|
||||
c_d,
|
||||
"_KEEP_",
|
||||
"_KEEP_",
|
||||
"_KEEP_",
|
||||
"_KEEP_",
|
||||
None,
|
||||
"_KEEP_",
|
||||
None,
|
||||
None,
|
||||
True,
|
||||
)
|
||||
|
||||
write_notification(
|
||||
'[Upgrade] : App upgraded 🚀 Please clear the cache: <ol> <li>Click OK below</li> <li>Clear the browser cache (shift + browser refresh button)</li> <li> Clear app cache with the <i class="fa-solid fa-rotate"></i> (reload) button in the header</li><li>Go to Settings and click Save</li> </ol> Check out new features and what has changed in the <a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.',
|
||||
"interrupt",
|
||||
timeNowTZ(),
|
||||
)
|
||||
ccd('VERSION', new_version , c_d, '_KEEP_', '_KEEP_', '_KEEP_', '_KEEP_', None, "_KEEP_", None, None, True)
|
||||
|
||||
write_notification(f'[Upgrade] : App upgraded from {prev_version} to {new_version} 🚀 Please clear the cache: <ol> <li>Click OK below</li> <li>Clear the browser cache (shift + browser refresh button)</li> <li> Clear app cache with the <i class="fa-solid fa-rotate"></i> (reload) button in the header</li><li>Go to Settings and click Save</li> </ol> Check out new features and what has changed in the <a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.', 'interrupt', timeNowDB())
|
||||
|
||||
|
||||
# -----------------
|
||||
# Initialization finished, update DB and API endpoints
|
||||
@@ -738,19 +717,13 @@ def importConfigs(pm, db, all_plugins):
|
||||
# settingsImported = None (timestamp),
|
||||
# showSpinner = False (1/0),
|
||||
# graphQLServerStarted = 1 (1/0))
|
||||
updateState(
|
||||
"Config imported",
|
||||
conf.lastImportedConfFile,
|
||||
conf.lastImportedConfFile,
|
||||
False,
|
||||
1,
|
||||
)
|
||||
|
||||
msg = "[Config] Imported new settings config"
|
||||
mylog("minimal", msg)
|
||||
|
||||
updateState("Config imported", conf.lastImportedConfFile, conf.lastImportedConfFile, False, 1, None, None, new_version)
|
||||
|
||||
msg = '[Config] Imported new settings config'
|
||||
mylog('minimal', msg)
|
||||
|
||||
# front end app log loggging
|
||||
write_notification(msg, "info", timeNowTZ())
|
||||
write_notification(msg, 'info', timeNowDB())
|
||||
|
||||
return pm, all_plugins, True
|
||||
|
||||
|
||||
@@ -4,20 +4,17 @@ import datetime
|
||||
import threading
|
||||
import queue
|
||||
import logging
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
# NetAlertX imports
|
||||
import conf
|
||||
from const import *
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
# duplication from helper to avoid circle
|
||||
# -------------------------------------------------------------------------------
|
||||
def timeNowTZ():
|
||||
if conf.tz:
|
||||
return datetime.datetime.now(conf.tz).replace(microsecond=0)
|
||||
else:
|
||||
return datetime.datetime.now().replace(microsecond=0)
|
||||
from utils.datetime_utils import timeNowTZ
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
@@ -17,7 +17,10 @@ from helper import (
|
||||
timeNowTZ,
|
||||
)
|
||||
|
||||
NOTIFICATION_API_FILE = apiPath + "user_notifications.json"
|
||||
import conf
|
||||
from const import applicationPath, logPath, apiPath, confFileName, reportTemplatesPath
|
||||
from logger import mylog
|
||||
from utils.datetime_utils import timeNowDB
|
||||
|
||||
|
||||
# Show Frontend User Notification
|
||||
@@ -35,7 +38,7 @@ def write_notification(content, level="alert", timestamp=None):
|
||||
None
|
||||
"""
|
||||
if timestamp is None:
|
||||
timestamp = timeNowTZ()
|
||||
timestamp = timeNowDB()
|
||||
|
||||
# Generate GUID
|
||||
guid = str(uuid.uuid4())
|
||||
|
||||
@@ -24,6 +24,7 @@ from helper import (
|
||||
)
|
||||
from logger import mylog
|
||||
from db.sql_safe_builder import create_safe_condition_builder
|
||||
from utils.datetime_utils import get_timezone_offset
|
||||
|
||||
# ===============================================================================
|
||||
# REPORTING
|
||||
|
||||
@@ -12,12 +12,12 @@ from logger import mylog, Logger
|
||||
from helper import (
|
||||
generate_mac_links,
|
||||
removeDuplicateNewLines,
|
||||
timeNowTZ,
|
||||
write_file,
|
||||
get_setting_value,
|
||||
get_timezone_offset,
|
||||
)
|
||||
from messaging.in_app import write_notification
|
||||
from utils.datetime_utils import timeNowDB, get_timezone_offset
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
@@ -71,15 +71,15 @@ class NotificationInstance:
|
||||
else:
|
||||
self.HasNotifications = True
|
||||
|
||||
self.GUID = str(uuid.uuid4())
|
||||
self.DateTimeCreated = timeNowTZ()
|
||||
self.DateTimePushed = ""
|
||||
self.Status = "new"
|
||||
self.JSON = JSON
|
||||
self.Text = ""
|
||||
self.HTML = ""
|
||||
self.PublishedVia = ""
|
||||
self.Extra = Extra
|
||||
self.GUID = str(uuid.uuid4())
|
||||
self.DateTimeCreated = timeNowDB()
|
||||
self.DateTimePushed = ""
|
||||
self.Status = "new"
|
||||
self.JSON = JSON
|
||||
self.Text = ""
|
||||
self.HTML = ""
|
||||
self.PublishedVia = ""
|
||||
self.Extra = Extra
|
||||
|
||||
if self.HasNotifications:
|
||||
# if not notiStruc.json['data'] and not notiStruc.text and not notiStruc.html:
|
||||
@@ -113,9 +113,9 @@ class NotificationInstance:
|
||||
mail_html = mail_html.replace("<NEW_VERSION>", newVersionText)
|
||||
|
||||
# Report "REPORT_DATE" in Header & footer
|
||||
timeFormated = timeNowTZ().strftime("%Y-%m-%d %H:%M")
|
||||
mail_text = mail_text.replace("<REPORT_DATE>", timeFormated)
|
||||
mail_html = mail_html.replace("<REPORT_DATE>", timeFormated)
|
||||
timeFormated = timeNowDB()
|
||||
mail_text = mail_text.replace('<REPORT_DATE>', timeFormated)
|
||||
mail_html = mail_html.replace('<REPORT_DATE>', timeFormated)
|
||||
|
||||
# Report "SERVER_NAME" in Header & footer
|
||||
mail_text = mail_text.replace("<SERVER_NAME>", socket.gethostname())
|
||||
@@ -226,7 +226,7 @@ class NotificationInstance:
|
||||
# Updates the Published properties
|
||||
def updatePublishedVia(self, newPublishedVia):
|
||||
self.PublishedVia = newPublishedVia
|
||||
self.DateTimePushed = timeNowTZ()
|
||||
self.DateTimePushed = timeNowDB()
|
||||
self.upsert()
|
||||
|
||||
# create or update a notification
|
||||
@@ -284,17 +284,15 @@ class NotificationInstance:
|
||||
|
||||
# Clear the Pending Email flag from all events and devices
|
||||
def clearPendingEmailFlag(self):
|
||||
# Clean Pending Alert Events
|
||||
self.db.sql.execute(
|
||||
"""
|
||||
|
||||
# Clean Pending Alert Events
|
||||
self.db.sql.execute("""
|
||||
UPDATE Devices SET devLastNotification = ?
|
||||
WHERE devMac IN (
|
||||
SELECT eve_MAC FROM Events
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
)
|
||||
""",
|
||||
(timeNowTZ(),),
|
||||
)
|
||||
""", (timeNowDB(),))
|
||||
|
||||
self.db.sql.execute("""
|
||||
UPDATE Events SET eve_PendingAlertEmail = 0
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import os
|
||||
import uuid
|
||||
|
||||
from const import logPath
|
||||
from logger import mylog
|
||||
from utils.datetime_utils import timeNowDB
|
||||
|
||||
|
||||
class UserEventsQueueInstance:
|
||||
@@ -78,3 +80,44 @@ class UserEventsQueueInstance:
|
||||
mylog("minimal", ["[UserEventsQueueInstance] Processed event: ", event])
|
||||
|
||||
return removed
|
||||
|
||||
def add_event(self, action):
|
||||
"""
|
||||
Append an action to the execution queue log file.
|
||||
|
||||
Args:
|
||||
action (str): Description of the action to queue.
|
||||
|
||||
Returns:
|
||||
tuple: (success: bool, message: str)
|
||||
success - True if the event was successfully added.
|
||||
message - Log message describing the result.
|
||||
"""
|
||||
timestamp = timeNowDB()
|
||||
# Generate GUID
|
||||
guid = str(uuid.uuid4())
|
||||
|
||||
if not action or not isinstance(action, str):
|
||||
msg = "[UserEventsQueueInstance] Invalid or missing action"
|
||||
mylog('none', [msg])
|
||||
|
||||
return False, msg
|
||||
|
||||
try:
|
||||
with open(self.log_file, "a") as f:
|
||||
f.write(f"[{timestamp}]|{guid}|{action}\n")
|
||||
|
||||
msg = f'[UserEventsQueueInstance] Action "{action}" added to the execution queue.'
|
||||
mylog('minimal', [msg])
|
||||
|
||||
return True, msg
|
||||
|
||||
except Exception as e:
|
||||
msg = f"[UserEventsQueueInstance] ERROR Failed to write to {self.log_file}: {e}"
|
||||
mylog('none', [msg])
|
||||
|
||||
return False, msg
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -8,30 +8,17 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
|
||||
# Register NetAlertX modules
|
||||
import conf
|
||||
from const import logPath, reportTemplatesPath, pluginsPath, applicationPath
|
||||
from logger import mylog, Logger
|
||||
from helper import (
|
||||
timeNowTZ,
|
||||
get_file_content,
|
||||
get_setting,
|
||||
get_setting_value,
|
||||
)
|
||||
from const import pluginsPath, logPath, applicationPath, reportTemplatesPath
|
||||
from logger import mylog, Logger
|
||||
from helper import get_file_content, write_file, get_setting, get_setting_value
|
||||
from utils.datetime_utils import timeNowTZ, timeNowDB
|
||||
from app_state import updateState
|
||||
from api import update_api
|
||||
from plugin_utils import (
|
||||
logEventStatusCounts,
|
||||
get_plugin_setting_obj,
|
||||
print_plugin_info,
|
||||
list_to_csv,
|
||||
combine_plugin_objects,
|
||||
resolve_wildcards_arr,
|
||||
handle_empty,
|
||||
decode_and_rename_files,
|
||||
)
|
||||
from utils.plugin_utils import logEventStatusCounts, get_plugin_string, get_plugin_setting_obj, print_plugin_info, list_to_csv, combine_plugin_objects, resolve_wildcards_arr, handle_empty, custom_plugin_decoder, decode_and_rename_files
|
||||
from models.notification_instance import NotificationInstance
|
||||
from messaging.in_app import write_notification
|
||||
from models.user_events_queue_instance import UserEventsQueueInstance
|
||||
from crypto_utils import generate_deterministic_guid
|
||||
from utils.crypto_utils import generate_deterministic_guid
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
@@ -40,7 +27,7 @@ class plugin_manager:
|
||||
self.db = db
|
||||
self.all_plugins = all_plugins
|
||||
self.plugin_states = {}
|
||||
self.name_plugins_checked = None
|
||||
self.plugin_checks = {}
|
||||
|
||||
# object cache of settings and schedules for faster lookups
|
||||
self._cache = {}
|
||||
@@ -112,12 +99,11 @@ class plugin_manager:
|
||||
execute_plugin(self.db, self.all_plugins, plugin)
|
||||
|
||||
# Update plugin states in app_state
|
||||
current_plugin_state = self.get_plugin_states(
|
||||
prefix
|
||||
) # get latest plugin state
|
||||
updateState(
|
||||
pluginsStates={prefix: current_plugin_state.get(prefix, {})}
|
||||
)
|
||||
current_plugin_state = self.get_plugin_states(prefix) # get latest plugin state
|
||||
|
||||
# mylog('debug', f'current_plugin_state: {current_plugin_state}')
|
||||
|
||||
updateState(pluginsStates={prefix: current_plugin_state.get(prefix, {})})
|
||||
|
||||
# update last run time
|
||||
if runType == "schedule":
|
||||
@@ -189,26 +175,17 @@ class plugin_manager:
|
||||
|
||||
# Notify user about executed events (if applicable)
|
||||
if len(executed_events) > 0 and executed_events:
|
||||
executed_events_message = ", ".join(executed_events)
|
||||
mylog(
|
||||
"minimal",
|
||||
[
|
||||
"[check_and_run_user_event] INFO: Executed events: ",
|
||||
executed_events_message,
|
||||
],
|
||||
)
|
||||
write_notification(
|
||||
f"[Ad-hoc events] Events executed: {executed_events_message}",
|
||||
"interrupt",
|
||||
timeNowTZ(),
|
||||
)
|
||||
executed_events_message = ', '.join(executed_events)
|
||||
mylog('minimal', ['[check_and_run_user_event] INFO: Executed events: ', executed_events_message])
|
||||
write_notification(f"[Ad-hoc events] Events executed: {executed_events_message}", "interrupt", timeNowDB())
|
||||
|
||||
return
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def handle_run(self, runType):
|
||||
mylog("minimal", ["[", timeNowTZ(), "] START Run: ", runType])
|
||||
|
||||
|
||||
mylog('minimal', ['[', timeNowDB(), '] START Run: ', runType])
|
||||
|
||||
# run the plugin
|
||||
for plugin in self.all_plugins:
|
||||
if plugin["unique_prefix"] == runType:
|
||||
@@ -224,7 +201,7 @@ class plugin_manager:
|
||||
pluginsStates={pluginName: current_plugin_state.get(pluginName, {})}
|
||||
)
|
||||
|
||||
mylog("minimal", ["[", timeNowTZ(), "] END Run: ", runType])
|
||||
mylog('minimal', ['[', timeNowDB(), '] END Run: ', runType])
|
||||
|
||||
return
|
||||
|
||||
@@ -232,6 +209,8 @@ class plugin_manager:
|
||||
def handle_test(self, runType):
|
||||
mylog("minimal", ["[", timeNowTZ(), "] [Test] START Test: ", runType])
|
||||
|
||||
mylog('minimal', ['[', timeNowDB(), '] [Test] START Test: ', runType])
|
||||
|
||||
# Prepare test samples
|
||||
sample_json = json.loads(
|
||||
get_file_content(reportTemplatesPath + "webhook_json_sample.json")
|
||||
@@ -258,7 +237,7 @@ class plugin_manager:
|
||||
If plugin_name is provided, only calculates stats for that plugin.
|
||||
Structure per plugin:
|
||||
{
|
||||
"lastChanged": str,
|
||||
"lastDataChange": str,
|
||||
"totalObjects": int,
|
||||
"newObjects": int,
|
||||
"changedObjects": int,
|
||||
@@ -267,32 +246,30 @@ class plugin_manager:
|
||||
"""
|
||||
sql = self.db.sql
|
||||
plugin_states = {}
|
||||
now_str = timeNowDB()
|
||||
|
||||
if plugin_name: # Only compute for single plugin
|
||||
sql.execute(
|
||||
"""
|
||||
SELECT MAX(DateTimeChanged) AS last_changed,
|
||||
COUNT(*) AS total_objects,
|
||||
SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects,
|
||||
CURRENT_TIMESTAMP AS state_updated
|
||||
SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects
|
||||
FROM Plugins_Objects
|
||||
WHERE Plugin = ?
|
||||
""",
|
||||
(plugin_name,),
|
||||
)
|
||||
row = sql.fetchone()
|
||||
last_changed, total_objects, new_objects, state_updated = (
|
||||
row if row else ("", 0, 0, "")
|
||||
)
|
||||
last_changed, total_objects, new_objects = row if row else ("", 0, 0)
|
||||
new_objects = new_objects or 0 # ensure it's int
|
||||
changed_objects = total_objects - new_objects
|
||||
|
||||
plugin_states[plugin_name] = {
|
||||
"lastChanged": last_changed or "",
|
||||
"lastDataChange": last_changed or "",
|
||||
"totalObjects": total_objects or 0,
|
||||
"newObjects": new_objects or 0,
|
||||
"changedObjects": changed_objects or 0,
|
||||
"stateUpdated": state_updated or "",
|
||||
"stateUpdated": now_str
|
||||
}
|
||||
|
||||
# Save in memory
|
||||
@@ -303,26 +280,19 @@ class plugin_manager:
|
||||
SELECT Plugin,
|
||||
MAX(DateTimeChanged) AS last_changed,
|
||||
COUNT(*) AS total_objects,
|
||||
SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects,
|
||||
CURRENT_TIMESTAMP AS state_updated
|
||||
SUM(CASE WHEN DateTimeCreated = DateTimeChanged THEN 1 ELSE 0 END) AS new_objects
|
||||
FROM Plugins_Objects
|
||||
GROUP BY Plugin
|
||||
""")
|
||||
for (
|
||||
plugin,
|
||||
last_changed,
|
||||
total_objects,
|
||||
new_objects,
|
||||
state_updated,
|
||||
) in sql.fetchall():
|
||||
for plugin, last_changed, total_objects, new_objects in sql.fetchall():
|
||||
new_objects = new_objects or 0 # ensure it's int
|
||||
changed_objects = total_objects - new_objects
|
||||
plugin_states[plugin] = {
|
||||
"lastChanged": last_changed or "",
|
||||
"lastDataChange": last_changed or "",
|
||||
"totalObjects": total_objects or 0,
|
||||
"newObjects": new_objects or 0,
|
||||
"changedObjects": changed_objects or 0,
|
||||
"stateUpdated": state_updated or "",
|
||||
"stateUpdated": now_str
|
||||
}
|
||||
|
||||
# Save in memory
|
||||
@@ -908,8 +878,8 @@ def process_plugin_events(db, plugin, plugEventsArr):
|
||||
if isMissing:
|
||||
# if wasn't missing before, mark as changed
|
||||
if tmpObj.status != "missing-in-last-scan":
|
||||
tmpObj.changed = timeNowTZ().strftime("%Y-%m-%d %H:%M:%S")
|
||||
tmpObj.status = "missing-in-last-scan"
|
||||
tmpObj.changed = timeNowDB()
|
||||
tmpObj.status = "missing-in-last-scan"
|
||||
# mylog('debug', [f'[Plugins] Missing from last scan (PrimaryID | SecondaryID): {tmpObj.primaryId} | {tmpObj.secondaryId}'])
|
||||
|
||||
# Merge existing plugin objects with newly discovered ones and update existing ones with new values
|
||||
|
||||
@@ -2,13 +2,15 @@ import sys
|
||||
import subprocess
|
||||
import os
|
||||
import re
|
||||
import datetime
|
||||
from dateutil import parser
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
|
||||
sys.path.extend([f"{INSTALL_PATH}/server"])
|
||||
|
||||
from helper import timeNowTZ, get_setting_value, check_IP_format
|
||||
from helper import get_setting_value, check_IP_format
|
||||
from utils.datetime_utils import timeNowDB, normalizeTimeStamp
|
||||
from logger import mylog, Logger
|
||||
from const import vendorsPath, vendorsPathNewest, sql_generateGuid
|
||||
from models.device_instance import DeviceInstance
|
||||
@@ -55,11 +57,10 @@ def exclude_ignored_devices(db):
|
||||
|
||||
sql.execute(query)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def update_devices_data_from_scan(db):
|
||||
sql = db.sql # TO-DO
|
||||
startTime = timeNowTZ().strftime("%Y-%m-%d %H:%M:%S")
|
||||
#-------------------------------------------------------------------------------
|
||||
def update_devices_data_from_scan (db):
|
||||
sql = db.sql #TO-DO
|
||||
startTime = timeNowDB()
|
||||
|
||||
# Update Last Connection
|
||||
mylog("debug", "[Update Devices] 1 Last Connection")
|
||||
@@ -424,10 +425,10 @@ def print_scan_stats(db):
|
||||
mylog("verbose", f" {row['cur_ScanMethod']}: {row['scan_method_count']}")
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def create_new_devices(db):
|
||||
sql = db.sql # TO-DO
|
||||
startTime = timeNowTZ()
|
||||
#-------------------------------------------------------------------------------
|
||||
def create_new_devices (db):
|
||||
sql = db.sql # TO-DO
|
||||
startTime = timeNowDB()
|
||||
|
||||
# Insert events for new devices from CurrentScan (not yet in Devices)
|
||||
|
||||
@@ -597,43 +598,86 @@ def create_new_devices(db):
|
||||
mylog("debug", "[New Devices] New Devices end")
|
||||
db.commitDB()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Check if plugins data changed
|
||||
def check_plugin_data_changed(pm, plugins_to_check):
|
||||
"""
|
||||
Checks whether any of the specified plugins have updated data since their
|
||||
last recorded check time.
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def update_devices_names(pm):
|
||||
sql = pm.db.sql
|
||||
resolver = NameResolver(pm.db)
|
||||
device_handler = DeviceInstance(pm.db)
|
||||
This function compares each plugin's `lastDataChange` timestamp from
|
||||
`pm.plugin_states` with its corresponding `lastDataCheck` timestamp from
|
||||
`pm.plugin_checks`. If a plugin's data has changed more recently than it
|
||||
was last checked, it is flagged as changed.
|
||||
|
||||
# --- Short-circuit if no name-resolution plugin has changed ---
|
||||
name_plugins = ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]
|
||||
Args:
|
||||
pm (object): Plugin manager or state object containing:
|
||||
- plugin_states (dict): Per-plugin metadata with "lastDataChange".
|
||||
- plugin_checks (dict): Per-plugin last check timestamps.
|
||||
plugins_to_check (list[str]): List of plugin names to validate.
|
||||
|
||||
# Retrieve last time name resolution was checked (string or datetime)
|
||||
last_checked_str = pm.name_plugins_checked
|
||||
last_checked_dt = (
|
||||
parser.parse(last_checked_str)
|
||||
if isinstance(last_checked_str, str)
|
||||
else last_checked_str
|
||||
)
|
||||
Returns:
|
||||
bool: True if any plugin data has changed since last check,
|
||||
otherwise False.
|
||||
|
||||
# Collect valid state update timestamps for name-related plugins
|
||||
state_times = []
|
||||
for p in name_plugins:
|
||||
state_updated = pm.plugin_states.get(p, {}).get("stateUpdated")
|
||||
if state_updated and state_updated.strip(): # skip empty or None
|
||||
state_times.append(state_updated)
|
||||
Logging:
|
||||
- Logs unexpected or invalid timestamps at level 'none'.
|
||||
- Logs when no changes are detected at level 'debug'.
|
||||
- Logs each changed plugin at level 'debug'.
|
||||
"""
|
||||
|
||||
# Determine the latest valid stateUpdated timestamp
|
||||
latest_state_str = max(state_times, default=None)
|
||||
latest_state_dt = parser.parse(latest_state_str) if latest_state_str else None
|
||||
plugins_changed = []
|
||||
|
||||
for plugin_name in plugins_to_check:
|
||||
|
||||
last_data_change = pm.plugin_states.get(plugin_name, {}).get("lastDataChange")
|
||||
last_data_check = pm.plugin_checks.get(plugin_name, "")
|
||||
|
||||
if not last_data_change:
|
||||
continue
|
||||
|
||||
# Normalize and validate last_changed timestamp
|
||||
last_changed_ts = normalizeTimeStamp(last_data_change)
|
||||
|
||||
if last_changed_ts == None:
|
||||
mylog('none', f'[check_plugin_data_changed] Unexpected last_data_change timestamp for {plugin_name} (input|output): ({last_data_change}|{last_changed_ts})')
|
||||
|
||||
# Normalize and validate last_data_check timestamp
|
||||
last_data_check_ts = normalizeTimeStamp(last_data_check)
|
||||
|
||||
if last_data_check_ts == None:
|
||||
mylog('none', f'[check_plugin_data_changed] Unexpected last_data_check timestamp for {plugin_name} (input|output): ({last_data_check}|{last_data_check_ts})')
|
||||
|
||||
# Track which plugins have newer state than last_checked
|
||||
if last_data_check_ts is None or last_changed_ts is None or last_changed_ts > last_data_check_ts:
|
||||
mylog('debug', f'[check_plugin_data_changed] {plugin_name} changed (last_changed_ts|last_data_check_ts): ({last_changed_ts}|{last_data_check_ts})')
|
||||
plugins_changed.append(plugin_name)
|
||||
|
||||
# Skip if no plugin state changed since last check
|
||||
if last_checked_dt and latest_state_dt and latest_state_dt <= last_checked_dt:
|
||||
mylog(
|
||||
"debug",
|
||||
"[Update Device Name] No relevant name plugin changes since last check — skipping update.",
|
||||
)
|
||||
if len(plugins_changed) == 0:
|
||||
mylog('debug', f'[check_plugin_data_changed] No relevant plugin changes since last check for {plugins_to_check}')
|
||||
return False
|
||||
|
||||
# Continue if changes detected
|
||||
for p in plugins_changed:
|
||||
mylog('debug', f'[check_plugin_data_changed] {p} changed (last_data_change|last_data_check): ({pm.plugin_states.get(p, {}).get("lastDataChange")}|{pm.plugin_checks.get(p)})')
|
||||
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def update_devices_names(pm):
|
||||
|
||||
# --- Short-circuit if no name-resolution plugin has changed ---
|
||||
if check_plugin_data_changed(pm, ["DIGSCAN", "NSLOOKUP", "NBTSCAN", "AVAHISCAN"]) == False:
|
||||
mylog('debug', '[Update Device Name] No relevant plugin changes since last check.')
|
||||
return
|
||||
|
||||
mylog('debug', '[Update Device Name] Check if unknown devices present to resolve names for or if REFRESH_FQDN enabled.')
|
||||
|
||||
sql = pm.db.sql
|
||||
resolver = NameResolver(pm.db)
|
||||
device_handler = DeviceInstance(pm.db)
|
||||
|
||||
nameNotFound = "(name not found)"
|
||||
|
||||
# Define resolution strategies in priority order
|
||||
@@ -759,10 +803,7 @@ def update_devices_names(pm):
|
||||
|
||||
# --- Step 3: Log last checked time ---
|
||||
# After resolving names, update last checked
|
||||
sql = pm.db.sql
|
||||
sql.execute("SELECT CURRENT_TIMESTAMP")
|
||||
row = sql.fetchone()
|
||||
pm.name_plugins_checked = row[0] if row else None
|
||||
pm.plugin_checks = {"DIGSCAN": timeNowDB(), "AVAHISCAN": timeNowDB(), "NSLOOKUP": timeNowDB(), "NBTSCAN": timeNowDB() }
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
|
||||
@@ -71,7 +71,8 @@ class NameResolver:
|
||||
if match_ip:
|
||||
name += " (IP match)"
|
||||
|
||||
regexes = get_setting_value("NEWDEV_NAME_CLEANUP_REGEX") or []
|
||||
regexes = get_setting_value('NEWDEV_NAME_CLEANUP_REGEX') or []
|
||||
mylog('trace', [f"[cleanDeviceName] applying regexes: {regexes}"])
|
||||
for rgx in regexes:
|
||||
mylog("trace", [f"[cleanDeviceName] applying regex: {rgx}"])
|
||||
name = re.sub(rgx, "", name)
|
||||
|
||||
@@ -12,8 +12,9 @@ from scan.device_handling import (
|
||||
exclude_ignored_devices,
|
||||
update_devices_data_from_scan,
|
||||
)
|
||||
from helper import timeNowTZ, get_setting_value
|
||||
from helper import get_setting_value
|
||||
from db.db_helper import print_table_schema
|
||||
from utils.datetime_utils import timeNowDB, timeNowTZ
|
||||
from logger import mylog, Logger
|
||||
from messaging.reporting import skip_repeated_notifications
|
||||
|
||||
@@ -132,11 +133,11 @@ def create_sessions_snapshot(db):
|
||||
db.commitDB()
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def insert_events(db):
|
||||
sql = db.sql # TO-DO
|
||||
startTime = timeNowTZ()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def insert_events (db):
|
||||
sql = db.sql #TO-DO
|
||||
startTime = timeNowDB()
|
||||
|
||||
# Check device down
|
||||
mylog("debug", "[Events] - 1 - Devices down")
|
||||
sql.execute(f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
@@ -198,7 +199,7 @@ def insert_events(db):
|
||||
def insertOnlineHistory(db):
|
||||
sql = db.sql # TO-DO: Implement sql object
|
||||
|
||||
scanTimestamp = timeNowTZ()
|
||||
scanTimestamp = timeNowDB()
|
||||
|
||||
# Query to fetch all relevant device counts in one go
|
||||
query = """
|
||||
|
||||
0
server/crypto_utils.py → server/utils/crypto_utils.py
Executable file → Normal file
0
server/crypto_utils.py → server/utils/crypto_utils.py
Executable file → Normal file
211
server/utils/datetime_utils.py
Normal file
211
server/utils/datetime_utils.py
Normal file
@@ -0,0 +1,211 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from dateutil import parser
|
||||
import datetime
|
||||
import re
|
||||
import pytz
|
||||
from pytz import timezone
|
||||
from typing import Union
|
||||
from zoneinfo import ZoneInfo
|
||||
import email.utils
|
||||
|
||||
# Register NetAlertX directories
|
||||
INSTALL_PATH="/app"
|
||||
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
|
||||
|
||||
import conf
|
||||
from const import *
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# DateTime
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
DATETIME_PATTERN = "%Y-%m-%d %H:%M:%S"
|
||||
DATETIME_REGEX = re.compile(r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$')
|
||||
|
||||
def timeNowTZ():
|
||||
if conf.tz:
|
||||
return datetime.datetime.now(conf.tz).replace(microsecond=0)
|
||||
else:
|
||||
return datetime.datetime.now().replace(microsecond=0)
|
||||
|
||||
def timeNow():
|
||||
return datetime.datetime.now().replace(microsecond=0)
|
||||
|
||||
def get_timezone_offset():
|
||||
now = datetime.datetime.now(conf.tz)
|
||||
offset_hours = now.utcoffset().total_seconds() / 3600
|
||||
offset_formatted = "{:+03d}:{:02d}".format(int(offset_hours), int((offset_hours % 1) * 60))
|
||||
return offset_formatted
|
||||
|
||||
def timeNowDB(local=True):
|
||||
"""
|
||||
Return the current time (local or UTC) as ISO 8601 for DB storage.
|
||||
Safe for SQLite, PostgreSQL, etc.
|
||||
|
||||
Example local: '2025-11-04 18:09:11'
|
||||
Example UTC: '2025-11-04 07:09:11'
|
||||
"""
|
||||
if local:
|
||||
try:
|
||||
if isinstance(conf.tz, datetime.tzinfo):
|
||||
tz = conf.tz
|
||||
elif conf.tz:
|
||||
tz = ZoneInfo(conf.tz)
|
||||
else:
|
||||
tz = None
|
||||
except Exception:
|
||||
tz = None
|
||||
return datetime.datetime.now(tz).strftime(DATETIME_PATTERN)
|
||||
else:
|
||||
return datetime.datetime.now(datetime.UTC).strftime(DATETIME_PATTERN)
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Date and time methods
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def normalizeTimeStamp(inputTimeStamp):
|
||||
"""
|
||||
Normalize various timestamp formats into a datetime.datetime object.
|
||||
|
||||
Supports:
|
||||
- SQLite-style 'YYYY-MM-DD HH:MM:SS'
|
||||
- ISO 8601 'YYYY-MM-DDTHH:MM:SSZ'
|
||||
- Epoch timestamps (int or float)
|
||||
- datetime.datetime objects (returned as-is)
|
||||
- Empty or invalid values (returns None)
|
||||
"""
|
||||
if inputTimeStamp is None:
|
||||
return None
|
||||
|
||||
# Already a datetime
|
||||
if isinstance(inputTimeStamp, datetime.datetime):
|
||||
return inputTimeStamp
|
||||
|
||||
# Epoch timestamp (integer or float)
|
||||
if isinstance(inputTimeStamp, (int, float)):
|
||||
try:
|
||||
return datetime.datetime.fromtimestamp(inputTimeStamp)
|
||||
except (OSError, OverflowError, ValueError):
|
||||
return None
|
||||
|
||||
# String formats (SQLite / ISO8601)
|
||||
if isinstance(inputTimeStamp, str):
|
||||
inputTimeStamp = inputTimeStamp.strip()
|
||||
if not inputTimeStamp:
|
||||
return None
|
||||
try:
|
||||
# match the "2025-11-08 14:32:10" format
|
||||
pattern = DATETIME_REGEX
|
||||
|
||||
if pattern.match(inputTimeStamp):
|
||||
return datetime.datetime.strptime(inputTimeStamp, DATETIME_PATTERN)
|
||||
else:
|
||||
# Handles SQLite and ISO8601 automatically
|
||||
return parser.parse(inputTimeStamp)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# Unrecognized type
|
||||
return None
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
def format_date_iso(date1: str) -> str:
|
||||
"""Return ISO 8601 string for a date or None if empty"""
|
||||
if date1 is None:
|
||||
return None
|
||||
dt = datetime.datetime.fromisoformat(date1) if isinstance(date1, str) else date1
|
||||
return dt.isoformat()
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
def format_event_date(date_str: str, event_type: str) -> str:
|
||||
"""Format event date with fallback rules."""
|
||||
if date_str:
|
||||
return format_date(date_str)
|
||||
elif event_type == "<missing event>":
|
||||
return "<missing event>"
|
||||
else:
|
||||
return "<still connected>"
|
||||
|
||||
# -------------------------------------------------------------------------------------------
|
||||
def ensure_datetime(dt: Union[str, datetime.datetime, None]) -> datetime.datetime:
|
||||
if dt is None:
|
||||
return timeNowTZ()
|
||||
if isinstance(dt, str):
|
||||
return datetime.datetime.fromisoformat(dt)
|
||||
return dt
|
||||
|
||||
|
||||
def parse_datetime(dt_str):
|
||||
if not dt_str:
|
||||
return None
|
||||
try:
|
||||
# Try ISO8601 first
|
||||
return datetime.datetime.fromisoformat(dt_str)
|
||||
except ValueError:
|
||||
# Try RFC1123 / HTTP format
|
||||
try:
|
||||
return datetime.datetime.strptime(dt_str, '%a, %d %b %Y %H:%M:%S GMT')
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
def format_date(date_str: str) -> str:
|
||||
try:
|
||||
dt = parse_datetime(date_str)
|
||||
if dt.tzinfo is None:
|
||||
# Set timezone if missing — change to timezone.utc if you prefer UTC
|
||||
now = datetime.datetime.now(conf.tz)
|
||||
dt = dt.replace(tzinfo=now.astimezone().tzinfo)
|
||||
return dt.astimezone().isoformat()
|
||||
except (ValueError, AttributeError, TypeError):
|
||||
return "invalid"
|
||||
|
||||
def format_date_diff(date1, date2, tz_name):
|
||||
"""
|
||||
Return difference between two datetimes as 'Xd HH:MM'.
|
||||
Uses app timezone if datetime is naive.
|
||||
date2 can be None (uses now).
|
||||
"""
|
||||
# Get timezone from settings
|
||||
tz = pytz.timezone(tz_name)
|
||||
|
||||
def parse_dt(dt):
|
||||
if dt is None:
|
||||
return datetime.datetime.now(tz)
|
||||
if isinstance(dt, str):
|
||||
try:
|
||||
dt_parsed = email.utils.parsedate_to_datetime(dt)
|
||||
except (ValueError, TypeError):
|
||||
# fallback: parse ISO string
|
||||
dt_parsed = datetime.datetime.fromisoformat(dt)
|
||||
# convert naive GMT/UTC to app timezone
|
||||
if dt_parsed.tzinfo is None:
|
||||
dt_parsed = tz.localize(dt_parsed)
|
||||
else:
|
||||
dt_parsed = dt_parsed.astimezone(tz)
|
||||
return dt_parsed
|
||||
return dt if dt.tzinfo else tz.localize(dt)
|
||||
|
||||
dt1 = parse_dt(date1)
|
||||
dt2 = parse_dt(date2)
|
||||
|
||||
delta = dt2 - dt1
|
||||
total_minutes = int(delta.total_seconds() // 60)
|
||||
days, rem_minutes = divmod(total_minutes, 1440) # 1440 mins in a day
|
||||
hours, minutes = divmod(rem_minutes, 60)
|
||||
|
||||
return {
|
||||
"text": f"{days}d {hours:02}:{minutes:02}",
|
||||
"days": days,
|
||||
"hours": hours,
|
||||
"minutes": minutes,
|
||||
"total_minutes": total_minutes
|
||||
}
|
||||
@@ -3,13 +3,13 @@ import json
|
||||
|
||||
import conf
|
||||
from logger import mylog
|
||||
from utils.crypto_utils import decrypt_data
|
||||
from const import pluginsPath, apiPath
|
||||
from helper import (
|
||||
get_file_content,
|
||||
get_setting_value,
|
||||
setting_value_to_python_type,
|
||||
)
|
||||
from crypto_utils import decrypt_data
|
||||
|
||||
module_name = "Plugin utils"
|
||||
|
||||
Reference in New Issue
Block a user