/data and /tmp standarization

This commit is contained in:
Adam Outler
2025-11-04 22:26:35 +00:00
parent 90a07c61eb
commit 5b871865db
250 changed files with 7462 additions and 4940 deletions

View File

@@ -1,30 +1,74 @@
import threading
import sys
import os
from flask import Flask, request, jsonify, Response
from flask_cors import CORS
# Register NetAlertX directories
INSTALL_PATH = "/app"
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
from helper import get_setting_value, timeNowTZ
from helper import get_setting_value
from db.db_helper import get_date_from_period
from app_state import updateState
from .graphql_endpoint import devicesSchema
from .device_endpoint import get_device_data, set_device_data, delete_device, delete_device_events, reset_device_props, copy_device, update_device_column
from .devices_endpoint import get_all_devices, delete_unknown_devices, delete_all_with_empty_macs, delete_devices, export_devices, import_csv, devices_totals, devices_by_status
from .events_endpoint import delete_events, delete_events_older_than, get_events, create_event, get_events_totals
from .history_endpoint import delete_online_history
from .prometheus_endpoint import get_metric_stats
from .sessions_endpoint import get_sessions, delete_session, create_session, get_sessions_calendar, get_device_sessions, get_session_events
from .nettools_endpoint import wakeonlan, traceroute, speedtest, nslookup, nmap_scan, internet_info
from .dbquery_endpoint import read_query, write_query, update_query, delete_query
from .sync_endpoint import handle_sync_post, handle_sync_get
from messaging.in_app import write_notification, mark_all_notifications_read, delete_notifications, get_unread_notifications, delete_notification, mark_notification_as_read
from api_server.graphql_endpoint import devicesSchema
from api_server.device_endpoint import (
get_device_data,
set_device_data,
delete_device,
delete_device_events,
reset_device_props,
copy_device,
update_device_column,
)
from api_server.devices_endpoint import (
get_all_devices,
delete_unknown_devices,
delete_all_with_empty_macs,
delete_devices,
export_devices,
import_csv,
devices_totals,
devices_by_status,
)
from api_server.events_endpoint import (
delete_events,
delete_events_older_than,
get_events,
create_event,
get_events_totals,
)
from api_server.history_endpoint import delete_online_history
from api_server.prometheus_endpoint import get_metric_stats
from api_server.sessions_endpoint import (
get_sessions,
delete_session,
create_session,
get_sessions_calendar,
get_device_sessions,
get_session_events,
)
from api_server.nettools_endpoint import (
wakeonlan,
traceroute,
speedtest,
nslookup,
nmap_scan,
internet_info,
)
from api_server.dbquery_endpoint import read_query, write_query, update_query, delete_query
from api_server.sync_endpoint import handle_sync_post, handle_sync_get
from messaging.in_app import (
write_notification,
mark_all_notifications_read,
delete_notifications,
get_unread_notifications,
delete_notification,
mark_notification_as_read,
)
# Flask application
app = Flask(__name__)
@@ -40,34 +84,36 @@ CORS(
r"/settings/*": {"origins": "*"},
r"/dbquery/*": {"origins": "*"},
r"/messaging/*": {"origins": "*"},
r"/events/*": {"origins": "*"}
r"/events/*": {"origins": "*"},
},
supports_credentials=True,
allow_headers=["Authorization", "Content-Type"]
allow_headers=["Authorization", "Content-Type"],
)
# --------------------------
# GraphQL Endpoints
# --------------------------
# Endpoint used when accessed via browser
@app.route("/graphql", methods=["GET"])
def graphql_debug():
# Handles GET requests
return "NetAlertX GraphQL server running."
# Endpoint for GraphQL queries
@app.route("/graphql", methods=["POST"])
def graphql_endpoint():
# Check for API token in headers
if not is_authorized():
msg = '[graphql_server] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct.'
mylog('verbose', [msg])
msg = "[graphql_server] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct."
mylog("verbose", [msg])
return jsonify({"error": msg}), 401
# Retrieve and log request data
data = request.get_json()
mylog('verbose', [f'[graphql_server] data: {data}'])
mylog("verbose", [f"[graphql_server] data: {data}"])
# Execute the GraphQL query
result = devicesSchema.execute(data.get("query"), variables=data.get("variables"))
@@ -82,10 +128,12 @@ def graphql_endpoint():
return jsonify(response)
# --------------------------
# Settings Endpoints
# --------------------------
@app.route("/settings/<setKey>", methods=["GET"])
def api_get_setting(setKey):
if not is_authorized():
@@ -93,40 +141,47 @@ def api_get_setting(setKey):
value = get_setting_value(setKey)
return jsonify({"success": True, "value": value})
# --------------------------
# Device Endpoints
# --------------------------
@app.route("/device/<mac>", methods=["GET"])
def api_get_device(mac):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return get_device_data(mac)
@app.route("/device/<mac>", methods=["POST"])
def api_set_device(mac):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return set_device_data(mac, request.json)
@app.route("/device/<mac>/delete", methods=["DELETE"])
def api_delete_device(mac):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return delete_device(mac)
@app.route("/device/<mac>/events/delete", methods=["DELETE"])
def api_delete_device_events(mac):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return delete_device_events(mac)
@app.route("/device/<mac>/reset-props", methods=["POST"])
def api_reset_device_props(mac):
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return reset_device_props(mac, request.json)
@app.route("/device/copy", methods=["POST"])
def api_copy_device():
if not is_authorized():
@@ -137,10 +192,13 @@ def api_copy_device():
mac_to = data.get("macTo")
if not mac_from or not mac_to:
return jsonify({"success": False, "error": "macFrom and macTo are required"}), 400
return jsonify(
{"success": False, "error": "macFrom and macTo are required"}
), 400
return copy_device(mac_from, mac_to)
@app.route("/device/<mac>/update-column", methods=["POST"])
def api_update_device_column(mac):
if not is_authorized():
@@ -151,35 +209,42 @@ def api_update_device_column(mac):
column_value = data.get("columnValue")
if not column_name or not column_value:
return jsonify({"success": False, "error": "columnName and columnValue are required"}), 400
return jsonify(
{"success": False, "error": "columnName and columnValue are required"}
), 400
return update_device_column(mac, column_name, column_value)
# --------------------------
# Devices Collections
# --------------------------
@app.route("/devices", methods=["GET"])
def api_get_devices():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return get_all_devices()
@app.route("/devices", methods=["DELETE"])
def api_delete_devices():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
macs = request.json.get("macs") if request.is_json else None
return delete_devices(macs)
@app.route("/devices/empty-macs", methods=["DELETE"])
def api_delete_all_empty_macs():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return delete_all_with_empty_macs()
@app.route("/devices/unknown", methods=["DELETE"])
def api_delete_unknown_devices():
if not is_authorized():
@@ -196,18 +261,21 @@ def api_export_devices(format=None):
export_format = (format or request.args.get("format", "csv")).lower()
return export_devices(export_format)
@app.route("/devices/import", methods=["POST"])
def api_import_csv():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return import_csv(request.files.get("file"))
@app.route("/devices/totals", methods=["GET"])
def api_devices_totals():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return devices_totals()
@app.route("/devices/by-status", methods=["GET"])
def api_devices_by_status():
if not is_authorized():
@@ -217,6 +285,7 @@ def api_devices_by_status():
return devices_by_status(status)
# --------------------------
# Net tools
# --------------------------
@@ -228,6 +297,7 @@ def api_wakeonlan():
mac = request.json.get("devMac")
return wakeonlan(mac)
@app.route("/nettools/traceroute", methods=["POST"])
def api_traceroute():
if not is_authorized():
@@ -235,12 +305,14 @@ def api_traceroute():
ip = request.json.get("devLastIP")
return traceroute(ip)
@app.route("/nettools/speedtest", methods=["GET"])
def api_speedtest():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return speedtest()
@app.route("/nettools/nslookup", methods=["POST"])
def api_nslookup():
"""
@@ -257,6 +329,7 @@ def api_nslookup():
ip = data["devLastIP"]
return nslookup(ip)
@app.route("/nettools/nmap", methods=["POST"])
def api_nmap():
"""
@@ -273,7 +346,7 @@ def api_nmap():
ip = data["scan"]
mode = data["mode"]
return nmap_scan(ip, mode)
@app.route("/nettools/internetinfo", methods=["GET"])
def api_internet_info():
@@ -286,6 +359,7 @@ def api_internet_info():
# DB query
# --------------------------
@app.route("/dbquery/read", methods=["POST"])
def dbquery_read():
if not is_authorized():
@@ -296,9 +370,9 @@ def dbquery_read():
if not raw_sql_b64:
return jsonify({"error": "rawSql is required"}), 400
return read_query(raw_sql_b64)
@app.route("/dbquery/write", methods=["POST"])
def dbquery_write():
@@ -324,12 +398,12 @@ def dbquery_update():
return jsonify({"error": "Missing required parameters"}), 400
return update_query(
column_name=data["columnName"],
ids=data["id"],
dbtable=data["dbtable"],
columns=data["columns"],
values=data["values"],
)
column_name=data["columnName"],
ids=data["id"],
dbtable=data["dbtable"],
columns=data["columns"],
values=data["values"],
)
@app.route("/dbquery/delete", methods=["POST"])
@@ -342,26 +416,30 @@ def dbquery_delete():
if not all(data.get(k) for k in required):
return jsonify({"error": "Missing required parameters"}), 400
return delete_query(
column_name=data["columnName"],
ids=data["id"],
dbtable=data["dbtable"],
)
return delete_query(
column_name=data["columnName"],
ids=data["id"],
dbtable=data["dbtable"],
)
# --------------------------
# Online history
# --------------------------
@app.route("/history", methods=["DELETE"])
def api_delete_online_history():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return delete_online_history()
# --------------------------
# Device Events
# --------------------------
@app.route("/events/create/<mac>", methods=["POST"])
def api_create_event(mac):
if not is_authorized():
@@ -387,12 +465,14 @@ def api_events_by_mac(mac):
return jsonify({"error": "Forbidden"}), 403
return delete_device_events(mac)
@app.route("/events", methods=["DELETE"])
def api_delete_all_events():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return delete_events()
@app.route("/events", methods=["GET"])
def api_get_events():
if not is_authorized():
@@ -401,6 +481,7 @@ def api_get_events():
mac = request.args.get("mac")
return get_events(mac)
@app.route("/events/<int:days>", methods=["DELETE"])
def api_delete_old_events(days: int):
"""
@@ -409,9 +490,10 @@ def api_delete_old_events(days: int):
"""
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
return delete_events_older_than(days)
@app.route("/sessions/totals", methods=["GET"])
def api_get_events_totals():
if not is_authorized():
@@ -420,10 +502,12 @@ def api_get_events_totals():
period = get_date_from_period(request.args.get("period", "7 days"))
return get_events_totals(period)
# --------------------------
# Sessions
# --------------------------
@app.route("/sessions/create", methods=["POST"])
def api_create_session():
if not is_authorized():
@@ -440,7 +524,9 @@ def api_create_session():
if not mac or not ip or not start_time:
return jsonify({"success": False, "error": "Missing required parameters"}), 400
return create_session(mac, ip, start_time, end_time, event_type_conn, event_type_disc)
return create_session(
mac, ip, start_time, end_time, event_type_conn, event_type_disc
)
@app.route("/sessions/delete", methods=["DELETE"])
@@ -466,6 +552,7 @@ def api_get_sessions():
return get_sessions(mac, start_date, end_date)
@app.route("/sessions/calendar", methods=["GET"])
def api_get_sessions_calendar():
if not is_authorized():
@@ -477,6 +564,7 @@ def api_get_sessions_calendar():
return get_sessions_calendar(start_date, end_date)
@app.route("/sessions/<mac>", methods=["GET"])
def api_device_sessions(mac):
if not is_authorized():
@@ -485,6 +573,7 @@ def api_device_sessions(mac):
period = request.args.get("period", "1 day")
return get_device_sessions(mac, period)
@app.route("/sessions/session-events", methods=["GET"])
def api_get_session_events():
if not is_authorized():
@@ -494,6 +583,7 @@ def api_get_session_events():
period = get_date_from_period(request.args.get("period", "7 days"))
return get_session_events(session_event_type, period)
# --------------------------
# Prometheus metrics endpoint
# --------------------------
@@ -503,7 +593,8 @@ def metrics():
return jsonify({"error": "Forbidden"}), 403
# Return Prometheus metrics as plain text
return Response(get_metric_stats(), mimetype="text/plain")
return Response(get_metric_stats(), mimetype="text/plain")
# --------------------------
# In-app notifications
@@ -519,10 +610,11 @@ def api_write_notification():
if not content:
return jsonify({"success": False, "error": "Missing content"}), 400
write_notification(content, level)
return jsonify({"success": True})
@app.route("/messaging/in-app/unread", methods=["GET"])
def api_get_unread_notifications():
if not is_authorized():
@@ -530,6 +622,7 @@ def api_get_unread_notifications():
return get_unread_notifications()
@app.route("/messaging/in-app/read/all", methods=["POST"])
def api_mark_all_notifications_read():
if not is_authorized():
@@ -537,6 +630,7 @@ def api_mark_all_notifications_read():
return jsonify(mark_all_notifications_read())
@app.route("/messaging/in-app/delete", methods=["DELETE"])
def api_delete_all_notifications():
if not is_authorized():
@@ -544,6 +638,7 @@ def api_delete_all_notifications():
return delete_notifications()
@app.route("/messaging/in-app/delete/<guid>", methods=["DELETE"])
def api_delete_notification(guid):
"""Delete a single notification by GUID."""
@@ -556,6 +651,7 @@ def api_delete_notification(guid):
else:
return jsonify({"success": False, "error": result.get("error")}), 500
@app.route("/messaging/in-app/read/<guid>", methods=["POST"])
def api_mark_notification_read(guid):
"""Mark a single notification as read by GUID."""
@@ -567,7 +663,8 @@ def api_mark_notification_read(guid):
return jsonify({"success": True})
else:
return jsonify({"success": False, "error": result.get("error")}), 500
# --------------------------
# SYNC endpoint
# --------------------------
@@ -586,6 +683,7 @@ def sync_endpoint():
mylog("verbose", [msg])
return jsonify({"error": "Method Not Allowed"}), 405
# --------------------------
# Background Server Start
# --------------------------
@@ -594,7 +692,7 @@ def is_authorized():
is_authorized = token == f"Bearer {get_setting_value('API_TOKEN')}"
if not is_authorized:
msg = f"[api] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct."
msg = "[api] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct."
write_notification(msg, "alert")
mylog("verbose", [msg])
@@ -605,19 +703,15 @@ def start_server(graphql_port, app_state):
"""Start the GraphQL server in a background thread."""
if app_state.graphQLServerStarted == 0:
mylog('verbose', [f'[graphql endpoint] Starting on port: {graphql_port}'])
mylog("verbose", [f"[graphql endpoint] Starting on port: {graphql_port}"])
# Start Flask app in a separate thread
thread = threading.Thread(
target=lambda: app.run(
host="0.0.0.0",
port=graphql_port,
debug=True,
use_reloader=False
host="0.0.0.0", port=graphql_port, debug=True, use_reloader=False
)
)
thread.start()
# Update the state to indicate the server has started
app_state = updateState("Process: Idle", None, None, None, 1)
app_state = updateState("Process: Idle", None, None, None, 1)

View File

@@ -1,20 +1,12 @@
#!/usr/bin/env python
import json
import argparse
import os
import pathlib
import base64
import re
import sys
from datetime import datetime
from flask import jsonify, request, Response
import csv
import io
from io import StringIO
from flask import jsonify
# Register NetAlertX directories
INSTALL_PATH="/app"
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
@@ -100,4 +92,4 @@ def delete_query(column_name, ids, dbtable):
conn.close()
return jsonify({"success": True, "deleted_count": deleted_count})
except Exception as e:
return jsonify({"success": False, "error": str(e)}), 400
return jsonify({"success": False, "error": str(e)}), 400

View File

@@ -1,16 +1,12 @@
#!/usr/bin/env python
import json
import subprocess
import argparse
import os
import pathlib
import sys
from datetime import datetime
from flask import jsonify, request
# Register NetAlertX directories
INSTALL_PATH="/app"
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
@@ -21,13 +17,14 @@ from db.db_helper import row_to_json, get_date_from_period
# Device Endpoints Functions
# --------------------------
def get_device_data(mac):
"""Fetch device info with children, event stats, and presence calculation."""
# Open temporary connection for this request
conn = get_temp_db_connection()
cur = conn.cursor()
# Special case for new device
if mac.lower() == "new":
now = datetime.now().strftime("%Y-%m-%d %H:%M")
@@ -71,12 +68,12 @@ def get_device_data(mac):
"devEvents": 0,
"devDownAlerts": 0,
"devPresenceHours": 0,
"devFQDN": ""
"devFQDN": "",
}
return jsonify(device_data)
# Compute period date for sessions/events
period = request.args.get('period', '') # e.g., '7 days', '1 month', etc.
period = request.args.get("period", "") # e.g., '7 days', '1 month', etc.
period_date_sql = get_date_from_period(period)
current_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
@@ -128,18 +125,21 @@ def get_device_data(mac):
return jsonify({"error": "Device not found"}), 404
device_data = row_to_json(list(row.keys()), row)
device_data['devFirstConnection'] = format_date(device_data['devFirstConnection'])
device_data['devLastConnection'] = format_date(device_data['devLastConnection'])
device_data['devIsRandomMAC'] = is_random_mac(device_data['devMac'])
device_data["devFirstConnection"] = format_date(device_data["devFirstConnection"])
device_data["devLastConnection"] = format_date(device_data["devLastConnection"])
device_data["devIsRandomMAC"] = is_random_mac(device_data["devMac"])
# Fetch children
cur.execute("SELECT * FROM Devices WHERE devParentMAC = ? ORDER BY devPresentLastScan DESC", ( device_data['devMac'],))
cur.execute(
"SELECT * FROM Devices WHERE devParentMAC = ? ORDER BY devPresentLastScan DESC",
(device_data["devMac"],),
)
children_rows = cur.fetchall()
children = [row_to_json(list(r.keys()), r) for r in children_rows]
children_nics = [c for c in children if c.get("devParentRelType") == "nic"]
device_data['devChildrenDynamic'] = children
device_data['devChildrenNicsDynamic'] = children_nics
device_data["devChildrenDynamic"] = children
device_data["devChildrenNicsDynamic"] = children_nics
conn.close()
@@ -187,7 +187,9 @@ def set_device_data(mac, data):
data.get("devIsNew", 0),
data.get("devIsArchived", 0),
data.get("devLastConnection", datetime.now().strftime("%Y-%m-%d %H:%M:%S")),
data.get("devFirstConnection", datetime.now().strftime("%Y-%m-%d %H:%M:%S")),
data.get(
"devFirstConnection", datetime.now().strftime("%Y-%m-%d %H:%M:%S")
),
data.get("devLastIP", ""),
data.get("devGUID", ""),
data.get("devCustomProps", ""),
@@ -206,31 +208,31 @@ def set_device_data(mac, data):
WHERE devMac=?
"""
values = (
data.get("devName", ""),
data.get("devOwner", ""),
data.get("devType", ""),
data.get("devVendor", ""),
data.get("devIcon", ""),
data.get("devFavorite", 0),
data.get("devGroup", ""),
data.get("devLocation", ""),
data.get("devComments", ""),
data.get("devParentMAC", ""),
data.get("devParentPort", ""),
data.get("devSSID", ""),
data.get("devSite", ""),
data.get("devStaticIP", 0),
data.get("devScan", 0),
data.get("devAlertEvents", 0),
data.get("devAlertDown", 0),
data.get("devParentRelType", "default"),
data.get("devReqNicsOnline", 0),
data.get("devSkipRepeated", 0),
data.get("devIsNew", 0),
data.get("devIsArchived", 0),
data.get("devCustomProps", ""),
mac
)
data.get("devName", ""),
data.get("devOwner", ""),
data.get("devType", ""),
data.get("devVendor", ""),
data.get("devIcon", ""),
data.get("devFavorite", 0),
data.get("devGroup", ""),
data.get("devLocation", ""),
data.get("devComments", ""),
data.get("devParentMAC", ""),
data.get("devParentPort", ""),
data.get("devSSID", ""),
data.get("devSite", ""),
data.get("devStaticIP", 0),
data.get("devScan", 0),
data.get("devAlertEvents", 0),
data.get("devAlertDown", 0),
data.get("devParentRelType", "default"),
data.get("devReqNicsOnline", 0),
data.get("devSkipRepeated", 0),
data.get("devIsNew", 0),
data.get("devIsArchived", 0),
data.get("devCustomProps", ""),
mac,
)
conn = get_temp_db_connection()
cur = conn.cursor()
@@ -240,7 +242,6 @@ def set_device_data(mac, data):
return jsonify({"success": True})
def delete_device(mac):
"""Delete a device by MAC."""
conn = get_temp_db_connection()
@@ -274,12 +275,13 @@ def reset_device_props(mac, data=None):
conn.close()
return jsonify({"success": True})
def update_device_column(mac, column_name, column_value):
"""
Update a specific column for a given device.
Example: update_device_column("AA:BB:CC:DD:EE:FF", "devParentMAC", "Internet")
"""
conn = get_temp_db_connection()
cur = conn.cursor()
@@ -292,11 +294,12 @@ def update_device_column(mac, column_name, column_value):
return jsonify({"success": True})
else:
return jsonify({"success": False, "error": "Device not found"}), 404
conn.close()
return jsonify({"success": True})
def copy_device(mac_from, mac_to):
"""
Copy a device entry from one MAC to another.
@@ -310,7 +313,10 @@ def copy_device(mac_from, mac_to):
cur.execute("DROP TABLE IF EXISTS temp_devices")
# Create temporary table with source device
cur.execute("CREATE TABLE temp_devices AS SELECT * FROM Devices WHERE devMac = ?", (mac_from,))
cur.execute(
"CREATE TABLE temp_devices AS SELECT * FROM Devices WHERE devMac = ?",
(mac_from,),
)
# Update temporary table to target MAC
cur.execute("UPDATE temp_devices SET devMac = ?", (mac_to,))
@@ -319,18 +325,21 @@ def copy_device(mac_from, mac_to):
cur.execute("DELETE FROM Devices WHERE devMac = ?", (mac_to,))
# Insert new entry from temporary table
cur.execute("INSERT INTO Devices SELECT * FROM temp_devices WHERE devMac = ?", (mac_to,))
cur.execute(
"INSERT INTO Devices SELECT * FROM temp_devices WHERE devMac = ?", (mac_to,)
)
# Drop temporary table
cur.execute("DROP TABLE temp_devices")
conn.commit()
return jsonify({"success": True, "message": f"Device copied from {mac_from} to {mac_to}"})
return jsonify(
{"success": True, "message": f"Device copied from {mac_from} to {mac_to}"}
)
except Exception as e:
conn.rollback()
return jsonify({"success": False, "error": str(e)})
finally:
conn.close()

View File

@@ -1,25 +1,20 @@
#!/usr/bin/env python
import json
import subprocess
import argparse
import os
import pathlib
import base64
import re
import sys
from datetime import datetime
import sqlite3
from flask import jsonify, request, Response
import csv
import io
from io import StringIO
from logger import mylog
# Register NetAlertX directories
INSTALL_PATH="/app"
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from helper import is_random_mac, format_date, get_setting_value
from db.db_helper import get_table_json, get_device_condition_by_status
@@ -27,6 +22,7 @@ from db.db_helper import get_table_json, get_device_condition_by_status
# Device Endpoints Functions
# --------------------------
def get_all_devices():
"""Retrieve all devices from the database."""
conn = get_temp_db_connection()
@@ -41,6 +37,7 @@ def get_all_devices():
conn.close()
return jsonify({"success": True, "devices": devices})
def delete_devices(macs):
"""
Delete devices from the Devices table.
@@ -75,6 +72,7 @@ def delete_devices(macs):
return jsonify({"success": True, "deleted_count": deleted_count})
def delete_all_with_empty_macs():
"""Delete devices with empty MAC addresses."""
conn = get_temp_db_connection()
@@ -85,15 +83,19 @@ def delete_all_with_empty_macs():
conn.close()
return jsonify({"success": True, "deleted": deleted})
def delete_unknown_devices():
"""Delete devices marked as unknown."""
conn = get_temp_db_connection()
cur = conn.cursor()
cur.execute("""DELETE FROM Devices WHERE devName='(unknown)' OR devName='(name not found)'""")
cur.execute(
"""DELETE FROM Devices WHERE devName='(unknown)' OR devName='(name not found)'"""
)
conn.commit()
conn.close()
return jsonify({"success": True, "deleted": cur.rowcount})
def export_devices(export_format):
"""
Export devices from the Devices table in teh desired format.
@@ -112,15 +114,12 @@ def export_devices(export_format):
list(devices_json["data"][0].keys()) if devices_json["data"] else []
)
if export_format == "json":
# Convert to standard dict for Flask JSON
return jsonify({
"data": [row for row in devices_json["data"]],
"columns": list(columns)
})
return jsonify(
{"data": [row for row in devices_json["data"]], "columns": list(columns)}
)
elif export_format == "csv":
si = StringIO()
writer = csv.DictWriter(si, fieldnames=columns, quoting=csv.QUOTE_ALL)
writer.writeheader()
@@ -135,6 +134,7 @@ def export_devices(export_format):
else:
return jsonify({"error": f"Unsupported format '{export_format}'"}), 400
def import_csv(file_storage=None):
data = ""
skipped = []
@@ -143,7 +143,9 @@ def import_csv(file_storage=None):
# 1. Try JSON `content` (base64-encoded CSV)
if request.is_json and request.json.get("content"):
try:
data = base64.b64decode(request.json["content"], validate=True).decode("utf-8")
data = base64.b64decode(request.json["content"], validate=True).decode(
"utf-8"
)
except Exception as e:
return jsonify({"error": f"Base64 decode failed: {e}"}), 400
@@ -153,7 +155,8 @@ def import_csv(file_storage=None):
# 3. Fallback: try local file (same as PHP `$file = '../../../config/devices.csv';`)
else:
local_file = "/app/config/devices.csv"
config_root = os.environ.get("NETALERTX_CONFIG", "/data/config")
local_file = os.path.join(config_root, "devices.csv")
try:
with open(local_file, "r", encoding="utf-8") as f:
data = f.read()
@@ -164,11 +167,7 @@ def import_csv(file_storage=None):
return jsonify({"error": "No CSV data found"}), 400
# --- Clean up newlines inside quoted fields ---
data = re.sub(
r'"([^"]*)"',
lambda m: m.group(0).replace("\n", " "),
data
)
data = re.sub(r'"([^"]*)"', lambda m: m.group(0).replace("\n", " "), data)
# --- Parse CSV ---
lines = data.splitlines()
@@ -202,11 +201,8 @@ def import_csv(file_storage=None):
conn.commit()
conn.close()
return jsonify({
"success": True,
"inserted": row_count,
"skipped_lines": skipped
})
return jsonify({"success": True, "inserted": row_count, "skipped_lines": skipped})
def devices_totals():
conn = get_temp_db_connection()
@@ -215,15 +211,17 @@ def devices_totals():
# Build a combined query with sub-selects for each status
query = f"""
SELECT
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status('my')}) AS devices,
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status('connected')}) AS connected,
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status('favorites')}) AS favorites,
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status('new')}) AS new,
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status('down')}) AS down,
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status('archived')}) AS archived
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status("my")}) AS devices,
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status("connected")}) AS connected,
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status("favorites")}) AS favorites,
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status("new")}) AS new,
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status("down")}) AS down,
(SELECT COUNT(*) FROM Devices {get_device_condition_by_status("archived")}) AS archived
"""
sql.execute(query)
row = sql.fetchone() # returns a tuple like (devices, connected, favorites, new, down, archived)
row = (
sql.fetchone()
) # returns a tuple like (devices, connected, favorites, new, down, archived)
conn.close()
@@ -252,12 +250,13 @@ def devices_by_status(status=None):
if r.get("devFavorite") == 1:
dev_name = f'<span class="text-yellow">&#9733</span>&nbsp;{dev_name}'
table_data.append({
"id": r.get("devMac", ""),
"title": dev_name,
"favorite": r.get("devFavorite", 0)
})
table_data.append(
{
"id": r.get("devMac", ""),
"title": dev_name,
"favorite": r.get("devFavorite", 0),
}
)
conn.close()
return jsonify(table_data)

View File

@@ -1,20 +1,19 @@
#!/usr/bin/env python
import json
import subprocess
import argparse
import os
import pathlib
import sys
from datetime import datetime
from flask import jsonify, request
from flask import jsonify
# Register NetAlertX directories
INSTALL_PATH="/app"
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from helper import is_random_mac, format_date, get_setting_value, format_date_iso, format_event_date, timeNowTZ, mylog, ensure_datetime
from helper import (
mylog,
ensure_datetime,
)
from db.db_helper import row_to_json, get_date_from_period
@@ -24,12 +23,12 @@ from db.db_helper import row_to_json, get_date_from_period
def create_event(
mac: str,
ip: str,
event_type: str = "Device Down",
additional_info: str = "",
mac: str,
ip: str,
event_type: str = "Device Down",
additional_info: str = "",
pending_alert: int = 1,
event_time: datetime | None = None
event_time: datetime | None = None,
):
"""
Insert a single event into the Events table and return a standardized JSON response.
@@ -42,10 +41,13 @@ def create_event(
start_time = ensure_datetime(event_time)
cur.execute("""
cur.execute(
"""
INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime, eve_EventType, eve_AdditionalInfo, eve_PendingAlertEmail)
VALUES (?, ?, ?, ?, ?, ?)
""", (mac, ip, start_time, event_type, additional_info, pending_alert))
""",
(mac, ip, start_time, event_type, additional_info, pending_alert),
)
conn.commit()
conn.close()
@@ -75,6 +77,7 @@ def get_events(mac=None):
conn.close()
return jsonify({"success": True, "events": events})
def delete_events_older_than(days):
"""Delete all events older than a specified number of days"""
@@ -83,15 +86,15 @@ def delete_events_older_than(days):
# Use a parameterized query with sqlite date function
sql = "DELETE FROM Events WHERE eve_DateTime <= date('now', ?)"
cur.execute(sql, [f'-{days} days'])
cur.execute(sql, [f"-{days} days"])
conn.commit()
conn.close()
return jsonify({
"success": True,
"message": f"Deleted events older than {days} days"
})
return jsonify(
{"success": True, "message": f"Deleted events older than {days} days"}
)
def delete_events():
"""Delete all events"""
@@ -107,7 +110,6 @@ def delete_events():
return jsonify({"success": True, "message": "Deleted all events"})
def get_events_totals(period: str = "7 days"):
"""
Return counts for events and sessions totals over a given period.
@@ -143,4 +145,3 @@ def get_events_totals(period: str = "7 days"):
# Return as JSON array
result_json = [row[0], row[1], row[2], row[3], row[4], row[5]]
return jsonify(result_json)

View File

@@ -2,24 +2,32 @@ import graphene
from graphene import ObjectType, String, Int, Boolean, List, Field, InputObjectType
import json
import sys
import os
# Register NetAlertX directories
INSTALL_PATH="/app"
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
from const import apiPath
from helper import is_random_mac, get_number_of_children, format_ip_long, get_setting_value
from helper import (
is_random_mac,
get_number_of_children,
format_ip_long,
get_setting_value,
)
# Define a base URL with the user's home directory
folder = apiPath
folder = apiPath
# --- DEVICES ---
# --- DEVICES ---
# Pagination and Sorting Input Types
class SortOptionsInput(InputObjectType):
field = String()
order = String()
class FilterOptionsInput(InputObjectType):
filterColumn = String()
filterValue = String()
@@ -37,45 +45,45 @@ class PageQueryOptionsInput(InputObjectType):
# Device ObjectType
class Device(ObjectType):
rowid = Int()
devMac = String()
devName = String()
devOwner = String()
devType = String()
devVendor = String()
devFavorite = Int()
devGroup = String()
devComments = String()
devFirstConnection = String()
devLastConnection = String()
devLastIP = String()
devStaticIP = Int()
devScan = Int()
devLogEvents = Int()
devAlertEvents = Int()
devAlertDown = Int()
devSkipRepeated = Int()
devLastNotification = String()
devPresentLastScan = Int()
devIsNew = Int()
devLocation = String()
devIsArchived = Int()
devParentMAC = String()
devParentPort = String()
devIcon = String()
devGUID = String()
devSite = String()
devSSID = String()
devSyncHubNode = String()
devMac = String()
devName = String()
devOwner = String()
devType = String()
devVendor = String()
devFavorite = Int()
devGroup = String()
devComments = String()
devFirstConnection = String()
devLastConnection = String()
devLastIP = String()
devStaticIP = Int()
devScan = Int()
devLogEvents = Int()
devAlertEvents = Int()
devAlertDown = Int()
devSkipRepeated = Int()
devLastNotification = String()
devPresentLastScan = Int()
devIsNew = Int()
devLocation = String()
devIsArchived = Int()
devParentMAC = String()
devParentPort = String()
devIcon = String()
devGUID = String()
devSite = String()
devSSID = String()
devSyncHubNode = String()
devSourcePlugin = String()
devCustomProps = String()
devStatus = String()
devIsRandomMac = Int()
devParentChildrenCount = Int()
devIpLong = Int()
devFilterStatus = String()
devFQDN = String()
devParentRelType = String()
devReqNicsOnline = Int()
devIsRandomMac = Int()
devParentChildrenCount = Int()
devIpLong = Int()
devFilterStatus = String()
devFQDN = String()
devParentRelType = String()
devReqNicsOnline = Int()
class DeviceResult(ObjectType):
@@ -83,7 +91,8 @@ class DeviceResult(ObjectType):
count = Int()
# --- SETTINGS ---
# --- SETTINGS ---
# Setting ObjectType
class Setting(ObjectType):
@@ -102,110 +111,168 @@ class SettingResult(ObjectType):
settings = List(Setting)
count = Int()
# Define Query Type with Pagination Support
class Query(ObjectType):
# --- DEVICES ---
devices = Field(DeviceResult, options=PageQueryOptionsInput())
def resolve_devices(self, info, options=None):
# mylog('none', f'[graphql_schema] resolve_devices: {self}')
try:
with open(folder + 'table_devices.json', 'r') as f:
with open(folder + "table_devices.json", "r") as f:
devices_data = json.load(f)["data"]
except (FileNotFoundError, json.JSONDecodeError) as e:
mylog('none', f'[graphql_schema] Error loading devices data: {e}')
mylog("none", f"[graphql_schema] Error loading devices data: {e}")
return DeviceResult(devices=[], count=0)
# Add dynamic fields to each device
for device in devices_data:
device["devIsRandomMac"] = 1 if is_random_mac(device["devMac"]) else 0
device["devParentChildrenCount"] = get_number_of_children(device["devMac"], devices_data)
device["devParentChildrenCount"] = get_number_of_children(
device["devMac"], devices_data
)
device["devIpLong"] = format_ip_long(device.get("devLastIP", ""))
mylog('trace', f'[graphql_schema] devices_data: {devices_data}')
mylog("trace", f"[graphql_schema] devices_data: {devices_data}")
# initialize total_count
total_count = len(devices_data)
# Apply sorting if options are provided
if options:
# Define status-specific filtering
if options.status:
status = options.status
mylog('trace', f'[graphql_schema] Applying status filter: {status}')
mylog("trace", f"[graphql_schema] Applying status filter: {status}")
# Include devices matching criteria in UI_MY_DEVICES
allowed_statuses = get_setting_value("UI_MY_DEVICES")
hidden_relationships = get_setting_value("UI_hide_rel_types")
network_dev_types = get_setting_value("NETWORK_DEVICE_TYPES")
allowed_statuses = get_setting_value("UI_MY_DEVICES")
hidden_relationships = get_setting_value("UI_hide_rel_types")
network_dev_types = get_setting_value("NETWORK_DEVICE_TYPES")
mylog('trace', f'[graphql_schema] allowed_statuses: {allowed_statuses}')
mylog('trace', f'[graphql_schema] hidden_relationships: {hidden_relationships}')
mylog('trace', f'[graphql_schema] network_dev_types: {network_dev_types}')
mylog("trace", f"[graphql_schema] allowed_statuses: {allowed_statuses}")
mylog(
"trace",
f"[graphql_schema] hidden_relationships: {hidden_relationships}",
)
mylog(
"trace", f"[graphql_schema] network_dev_types: {network_dev_types}"
)
# Filtering based on the "status"
if status == "my_devices":
devices_data = [
device for device in devices_data
if ( device.get("devParentRelType") not in hidden_relationships)
device
for device in devices_data
if (device.get("devParentRelType") not in hidden_relationships)
]
devices_data = [
device for device in devices_data
device
for device in devices_data
if (
(device["devPresentLastScan"] == 1 and 'online' in allowed_statuses) or
(device["devIsNew"] == 1 and 'new' in allowed_statuses) or
(device["devPresentLastScan"] == 0 and device["devAlertDown"] and 'down' in allowed_statuses) or
(device["devPresentLastScan"] == 0 and 'offline' in allowed_statuses) and device["devIsArchived"] == 0 or
(device["devIsArchived"] == 1 and 'archived' in allowed_statuses)
(
device["devPresentLastScan"] == 1
and "online" in allowed_statuses
)
or (device["devIsNew"] == 1 and "new" in allowed_statuses)
or (
device["devPresentLastScan"] == 0
and device["devAlertDown"]
and "down" in allowed_statuses
)
or (
device["devPresentLastScan"] == 0
and "offline" in allowed_statuses
)
and device["devIsArchived"] == 0
or (
device["devIsArchived"] == 1
and "archived" in allowed_statuses
)
)
]
elif status == "connected":
devices_data = [device for device in devices_data if device["devPresentLastScan"] == 1]
devices_data = [
device
for device in devices_data
if device["devPresentLastScan"] == 1
]
elif status == "favorites":
devices_data = [device for device in devices_data if device["devFavorite"] == 1]
devices_data = [
device for device in devices_data if device["devFavorite"] == 1
]
elif status == "new":
devices_data = [device for device in devices_data if device["devIsNew"] == 1]
devices_data = [
device for device in devices_data if device["devIsNew"] == 1
]
elif status == "down":
devices_data = [
device for device in devices_data
device
for device in devices_data
if device["devPresentLastScan"] == 0 and device["devAlertDown"]
]
elif status == "archived":
devices_data = [device for device in devices_data if device["devIsArchived"] == 1]
devices_data = [
device
for device in devices_data
if device["devIsArchived"] == 1
]
elif status == "offline":
devices_data = [device for device in devices_data if device["devPresentLastScan"] == 0]
devices_data = [
device
for device in devices_data
if device["devPresentLastScan"] == 0
]
elif status == "network_devices":
devices_data = [device for device in devices_data if device["devType"] in network_dev_types]
devices_data = [
device
for device in devices_data
if device["devType"] in network_dev_types
]
elif status == "all_devices":
devices_data = devices_data # keep all
devices_data = devices_data # keep all
# additional filters
if options.filters:
for filter in options.filters:
if filter.filterColumn and filter.filterValue:
devices_data = [
device for device in devices_data
if str(device.get(filter.filterColumn, "")).lower() == str(filter.filterValue).lower()
device
for device in devices_data
if str(device.get(filter.filterColumn, "")).lower()
== str(filter.filterValue).lower()
]
# Search data if a search term is provided
if options.search:
# Define static list of searchable fields
searchable_fields = [
"devName", "devMac", "devOwner", "devType", "devVendor", "devLastIP",
"devGroup", "devComments", "devLocation", "devStatus", "devSSID",
"devSite", "devSourcePlugin", "devSyncHubNode", "devFQDN", "devParentRelType", "devParentMAC"
"devName",
"devMac",
"devOwner",
"devType",
"devVendor",
"devLastIP",
"devGroup",
"devComments",
"devLocation",
"devStatus",
"devSSID",
"devSite",
"devSourcePlugin",
"devSyncHubNode",
"devFQDN",
"devParentRelType",
"devParentMAC",
]
search_term = options.search.lower()
devices_data = [
device for device in devices_data
device
for device in devices_data
if any(
search_term in str(device.get(field, "")).lower()
for field in searchable_fields # Search only predefined fields
@@ -218,12 +285,14 @@ class Query(ObjectType):
devices_data = sorted(
devices_data,
key=lambda x: mixed_type_sort_key(
x.get(sort_option.field).lower() if isinstance(x.get(sort_option.field), str) else x.get(sort_option.field)
x.get(sort_option.field).lower()
if isinstance(x.get(sort_option.field), str)
else x.get(sort_option.field)
),
reverse=(sort_option.order.lower() == "desc")
reverse=(sort_option.order.lower() == "desc"),
)
# capture total count after all the filtering and searching, BEFORE pagination
# capture total count after all the filtering and searching, BEFORE pagination
total_count = len(devices_data)
# Then apply pagination
@@ -234,24 +303,21 @@ class Query(ObjectType):
# Convert dict objects to Device instances to enable field resolution
devices = [Device(**device) for device in devices_data]
return DeviceResult(devices=devices, count=total_count)
# --- SETTINGS ---
settings = Field(SettingResult)
# --- SETTINGS ---
settings = Field(SettingResult)
def resolve_settings(root, info):
try:
with open(folder + 'table_settings.json', 'r') as f:
with open(folder + "table_settings.json", "r") as f:
settings_data = json.load(f)["data"]
except (FileNotFoundError, json.JSONDecodeError) as e:
mylog('none', f'[graphql_schema] Error loading settings data: {e}')
mylog("none", f"[graphql_schema] Error loading settings data: {e}")
return SettingResult(settings=[], count=0)
mylog('trace', f'[graphql_schema] settings_data: {settings_data}')
mylog("trace", f"[graphql_schema] settings_data: {settings_data}")
# Convert to Setting objects
settings = [Setting(**setting) for setting in settings_data]
@@ -259,15 +325,15 @@ class Query(ObjectType):
return SettingResult(settings=settings, count=len(settings))
# helps sorting inconsistent dataset mixed integers and strings
def mixed_type_sort_key(value):
if value is None or value == "":
return (2, '') # Place None or empty strings last
return (2, "") # Place None or empty strings last
try:
return (0, int(value)) # Integers get priority
except (ValueError, TypeError):
return (1, str(value)) # Strings come next
# Schema Definition
devicesSchema = graphene.Schema(query=Query)

View File

@@ -1,26 +1,21 @@
#!/usr/bin/env python
import json
import subprocess
import argparse
import os
import pathlib
import sys
from datetime import datetime
from flask import jsonify, request
from flask import jsonify
# Register NetAlertX directories
INSTALL_PATH="/app"
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from helper import is_random_mac, format_date, get_setting_value
# --------------------------------------------------
# Online History Activity Endpoints Functions
# --------------------------------------------------
def delete_online_history():
"""Delete all online history activity"""
@@ -32,4 +27,4 @@ def delete_online_history():
conn.commit()
conn.close()
return jsonify({"success": True, "message": "Deleted online history"})
return jsonify({"success": True, "message": "Deleted online history"})

View File

@@ -6,26 +6,27 @@ import shutil
import os
from flask import jsonify
# Register NetAlertX directories
INSTALL_PATH = "/app"
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
# Resolve speedtest-cli path once at module load and validate it.
# We do this once to avoid repeated PATH lookups and to fail fast when
# the binary isn't available or executable.
SPEEDTEST_CLI_PATH = None
def _get_speedtest_cli_path():
"""Resolve and validate the speedtest-cli executable path."""
path = shutil.which("speedtest-cli")
if path is None:
raise RuntimeError(
"speedtest-cli not found in PATH. Please install it: pip install speedtest-cli"
"speedtest-cli not found in PATH. Please install it: "
"pip install speedtest-cli"
)
if not os.access(path, os.X_OK):
raise RuntimeError(f"speedtest-cli found at {path} but is not executable")
raise RuntimeError(
f"speedtest-cli found at {path} but is not executable"
)
return path
try:
SPEEDTEST_CLI_PATH = _get_speedtest_cli_path()
except Exception as e:
@@ -33,22 +34,32 @@ except Exception as e:
print(f"Warning: {e}", file=sys.stderr)
SPEEDTEST_CLI_PATH = None
def wakeonlan(mac):
def wakeonlan(mac):
# Validate MAC
if not re.match(r'^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$', mac):
if not re.match(r"^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$", mac):
return jsonify({"success": False, "error": f"Invalid MAC: {mac}"}), 400
try:
result = subprocess.run(
["wakeonlan", mac],
capture_output=True,
text=True,
check=True
["wakeonlan", mac], capture_output=True, text=True, check=True
)
return jsonify(
{
"success": True,
"message": "WOL packet sent",
"output": result.stdout.strip(),
}
)
return jsonify({"success": True, "message": "WOL packet sent", "output": result.stdout.strip()})
except subprocess.CalledProcessError as e:
return jsonify({"success": False, "error": "Failed to send WOL packet", "details": e.stderr.strip()}), 500
return jsonify(
{
"success": False,
"error": "Failed to send WOL packet",
"details": e.stderr.strip(),
}
), 500
def traceroute(ip):
"""
@@ -77,10 +88,10 @@ def traceroute(ip):
# --------------------------
try:
result = subprocess.run(
["traceroute", ip], # Command and argument
capture_output=True, # Capture stdout/stderr
text=True, # Return output as string
check=True # Raise CalledProcessError on non-zero exit
["traceroute", ip], # Command and argument
capture_output=True, # Capture stdout/stderr
text=True, # Return output as string
check=True, # Raise CalledProcessError on non-zero exit
)
# Return success response with traceroute output
return jsonify({"success": True, "output": result.stdout.strip()})
@@ -90,11 +101,13 @@ def traceroute(ip):
# --------------------------
except subprocess.CalledProcessError as e:
# Return 500 if traceroute fails
return jsonify({
"success": False,
"error": "Traceroute failed",
"details": e.stderr.strip()
}), 500
return jsonify(
{
"success": False,
"error": "Traceroute failed",
"details": e.stderr.strip(),
}
), 500
def speedtest():
@@ -105,10 +118,12 @@ def speedtest():
# If the CLI wasn't found at module load, return a 503 so the caller
# knows the service is unavailable rather than failing unpredictably.
if SPEEDTEST_CLI_PATH is None:
return jsonify({
"success": False,
"error": "speedtest-cli is not installed or not found in PATH"
}), 503
return jsonify(
{
"success": False,
"error": "speedtest-cli is not installed or not found in PATH",
}
), 503
try:
# Run speedtest-cli command using the resolved absolute path
@@ -116,7 +131,7 @@ def speedtest():
[SPEEDTEST_CLI_PATH, "--secure", "--simple"],
capture_output=True,
text=True,
check=True
check=True,
)
# Return each line as a list
@@ -124,18 +139,22 @@ def speedtest():
return jsonify({"success": True, "output": output_lines})
except subprocess.CalledProcessError as e:
return jsonify({
"success": False,
"error": "Speedtest failed",
"details": e.stderr.strip()
}), 500
return jsonify(
{
"success": False,
"error": "Speedtest failed",
"details": e.stderr.strip(),
}
), 500
except Exception as e:
return jsonify({
"success": False,
"error": "Failed to run speedtest",
"details": str(e)
}), 500
return jsonify(
{
"success": False,
"error": "Failed to run speedtest",
"details": str(e),
}
), 500
def nslookup(ip):
@@ -147,29 +166,25 @@ def nslookup(ip):
try:
ipaddress.ip_address(ip)
except ValueError:
return jsonify({
"success": False,
"error": "Invalid IP address"
}), 400
return jsonify({"success": False, "error": "Invalid IP address"}), 400
try:
# Run nslookup command
result = subprocess.run(
["nslookup", ip],
capture_output=True,
text=True,
check=True
["nslookup", ip], capture_output=True, text=True, check=True
)
output_lines = result.stdout.strip().split("\n")
return jsonify({"success": True, "output": output_lines})
except subprocess.CalledProcessError as e:
return jsonify({
"success": False,
"error": "nslookup failed",
"details": e.stderr.strip()
}), 500
return jsonify(
{
"success": False,
"error": "nslookup failed",
"details": e.stderr.strip(),
}
), 500
def nmap_scan(ip, mode):
@@ -186,24 +201,20 @@ def nmap_scan(ip, mode):
try:
ipaddress.ip_address(ip)
except ValueError:
return jsonify({
"success": False,
"error": "Invalid IP address"
}), 400
return jsonify({"success": False, "error": "Invalid IP address"}), 400
# Map scan modes to nmap arguments
mode_args = {
"fast": ["-F"],
"normal": [],
"detail": ["-A"],
"skipdiscovery": ["-Pn"]
"skipdiscovery": ["-Pn"],
}
if mode not in mode_args:
return jsonify({
"success": False,
"error": f"Invalid scan mode '{mode}'"
}), 400
return jsonify(
{"success": False, "error": f"Invalid scan mode '{mode}'"}
), 400
try:
# Build and run nmap command
@@ -212,23 +223,22 @@ def nmap_scan(ip, mode):
cmd,
capture_output=True,
text=True,
check=True
check=True,
)
output_lines = result.stdout.strip().split("\n")
return jsonify({
"success": True,
"mode": mode,
"ip": ip,
"output": output_lines
})
return jsonify(
{"success": True, "mode": mode, "ip": ip, "output": output_lines}
)
except subprocess.CalledProcessError as e:
return jsonify({
"success": False,
"error": "nmap scan failed",
"details": e.stderr.strip()
}), 500
return jsonify(
{
"success": False,
"error": "nmap scan failed",
"details": e.stderr.strip(),
}
), 500
def internet_info():
@@ -242,7 +252,7 @@ def internet_info():
["curl", "-s", "https://ipinfo.io"],
capture_output=True,
text=True,
check=True
check=True,
)
output = result.stdout.strip()
@@ -250,13 +260,20 @@ def internet_info():
raise ValueError("Empty response from ipinfo.io")
# Clean up the JSON-like string by removing { } , and "
cleaned_output = output.replace("{", "").replace("}", "").replace(",", "").replace('"', "")
cleaned_output = (
output.replace("{", "")
.replace("}", "")
.replace(",", "")
.replace('"', "")
)
return jsonify({"success": True, "output": cleaned_output})
except (subprocess.CalledProcessError, ValueError) as e:
return jsonify({
"success": False,
"error": "Failed to fetch internet info",
"details": str(e)
}), 500
return jsonify(
{
"success": False,
"error": "Failed to fetch internet info",
"details": str(e),
}
), 500

View File

@@ -1,51 +1,54 @@
import json
import sys
import os
# Register NetAlertX directories
INSTALL_PATH = "/app"
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog
from const import apiPath
from helper import is_random_mac, get_number_of_children, format_ip_long, get_setting_value
def escape_label_value(val):
"""
Escape special characters for Prometheus labels.
"""
return str(val).replace('\\', '\\\\').replace('\n', '\\n').replace('"', '\\"')
return str(val).replace("\\", "\\\\").replace("\n", "\\n").replace('"', '\\"')
# Define a base URL with the user's home directory
folder = apiPath
def get_metric_stats():
output = []
# 1. Dashboard totals
try:
with open(folder + 'table_devices_tiles.json', 'r') as f:
with open(folder + "table_devices_tiles.json", "r") as f:
tiles_data = json.load(f)["data"]
if isinstance(tiles_data, list) and tiles_data:
totals = tiles_data[0]
output.append(f'netalertx_connected_devices {totals.get("connected", 0)}')
output.append(f'netalertx_offline_devices {totals.get("offline", 0)}')
output.append(f'netalertx_down_devices {totals.get("down", 0)}')
output.append(f'netalertx_new_devices {totals.get("new", 0)}')
output.append(f'netalertx_archived_devices {totals.get("archived", 0)}')
output.append(f'netalertx_favorite_devices {totals.get("favorites", 0)}')
output.append(f'netalertx_my_devices {totals.get("my_devices", 0)}')
output.append(f"netalertx_connected_devices {totals.get('connected', 0)}")
output.append(f"netalertx_offline_devices {totals.get('offline', 0)}")
output.append(f"netalertx_down_devices {totals.get('down', 0)}")
output.append(f"netalertx_new_devices {totals.get('new', 0)}")
output.append(f"netalertx_archived_devices {totals.get('archived', 0)}")
output.append(f"netalertx_favorite_devices {totals.get('favorites', 0)}")
output.append(f"netalertx_my_devices {totals.get('my_devices', 0)}")
else:
output.append("# Unexpected format in table_devices_tiles.json")
except (FileNotFoundError, json.JSONDecodeError) as e:
mylog('none', f'[metrics] Error loading tiles data: {e}')
mylog("none", f"[metrics] Error loading tiles data: {e}")
output.append(f"# Error loading tiles data: {e}")
except Exception as e:
output.append(f"# General error loading dashboard totals: {e}")
# 2. Device-level metrics
try:
with open(folder + 'table_devices.json', 'r') as f:
with open(folder + "table_devices.json", "r") as f:
data = json.load(f)
devices = data.get("data", [])
@@ -68,7 +71,7 @@ def get_metric_stats():
)
except (FileNotFoundError, json.JSONDecodeError) as e:
mylog('none', f'[metrics] Error loading devices data: {e}')
mylog("none", f"[metrics] Error loading devices data: {e}")
output.append(f"# Error loading devices data: {e}")
except Exception as e:
output.append(f"# General error processing device metrics: {e}")

View File

@@ -1,39 +1,49 @@
#!/usr/bin/env python
import json
import subprocess
import argparse
import os
import pathlib
import sqlite3
import time
import sys
from datetime import datetime
from flask import jsonify, request
from flask import jsonify
# Register NetAlertX directories
INSTALL_PATH="/app"
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from database import get_temp_db_connection
from helper import is_random_mac, format_date, get_setting_value, format_date_iso, format_event_date, mylog, timeNowTZ, format_date_diff, format_ip_long, parse_datetime
from db.db_helper import row_to_json, get_date_from_period
from helper import (
format_date,
format_date_iso,
format_event_date,
format_date_diff,
format_ip_long,
)
from db.db_helper import get_date_from_period
# --------------------------
# Sessions Endpoints Functions
# --------------------------
# -------------------------------------------------------------------------------------------
def create_session(mac, ip, start_time, end_time=None, event_type_conn="Connected", event_type_disc="Disconnected"):
def create_session(
mac,
ip,
start_time,
end_time=None,
event_type_conn="Connected",
event_type_disc="Disconnected",
):
"""Insert a new session into Sessions table"""
conn = get_temp_db_connection()
cur = conn.cursor()
cur.execute("""
cur.execute(
"""
INSERT INTO Sessions (ses_MAC, ses_IP, ses_DateTimeConnection, ses_DateTimeDisconnection,
ses_EventTypeConnection, ses_EventTypeDisconnection)
VALUES (?, ?, ?, ?, ?, ?)
""", (mac, ip, start_time, end_time, event_type_conn, event_type_disc))
""",
(mac, ip, start_time, end_time, event_type_conn, event_type_disc),
)
conn.commit()
conn.close()
@@ -83,7 +93,6 @@ def get_sessions(mac=None, start_date=None, end_date=None):
return jsonify({"success": True, "sessions": table_data})
def get_sessions_calendar(start_date, end_date):
"""
Fetch sessions between a start and end date for calendar display.
@@ -137,7 +146,19 @@ def get_sessions_calendar(start_date, end_date):
OR SES1.ses_StillConnected = 1
"""
cur.execute(sql, (start_date, end_date, start_date, end_date, start_date, end_date, start_date, end_date))
cur.execute(
sql,
(
start_date,
end_date,
start_date,
end_date,
start_date,
end_date,
start_date,
end_date,
),
)
rows = cur.fetchall()
table_data = []
@@ -145,7 +166,10 @@ def get_sessions_calendar(start_date, end_date):
row = dict(r)
# Determine color
if row["ses_EventTypeConnection"] == "<missing event>" or row["ses_EventTypeDisconnection"] == "<missing event>":
if (
row["ses_EventTypeConnection"] == "<missing event>"
or row["ses_EventTypeDisconnection"] == "<missing event>"
):
color = "#f39c12"
elif row["ses_StillConnected"] == 1:
color = "#00a659"
@@ -160,21 +184,22 @@ def get_sessions_calendar(start_date, end_date):
)
# Append calendar entry
table_data.append({
"resourceId": row["ses_MAC"],
"title": "",
"start": format_date_iso(row["ses_DateTimeConnectionCorrected"]),
"end": format_date_iso(row["ses_DateTimeDisconnectionCorrected"]),
"color": color,
"tooltip": tooltip,
"className": "no-border"
})
table_data.append(
{
"resourceId": row["ses_MAC"],
"title": "",
"start": format_date_iso(row["ses_DateTimeConnectionCorrected"]),
"end": format_date_iso(row["ses_DateTimeDisconnectionCorrected"]),
"color": color,
"tooltip": tooltip,
"className": "no-border",
}
)
conn.close()
return jsonify({"success": True, "sessions": table_data})
def get_device_sessions(mac, period):
"""
Fetch device sessions for a given MAC address and period.
@@ -203,7 +228,6 @@ def get_device_sessions(mac, period):
)
"""
cur.execute(sql, (mac,))
rows = cur.fetchall()
conn.close()
@@ -226,12 +250,16 @@ def get_device_sessions(mac, period):
end = format_date(row["ses_DateTimeDisconnection"])
# Duration
if row["ses_EventTypeConnection"] in ("<missing event>", None) or row["ses_EventTypeDisconnection"] in ("<missing event>", None):
if row["ses_EventTypeConnection"] in ("<missing event>", None) or row[
"ses_EventTypeDisconnection"
] in ("<missing event>", None):
dur = "..."
elif row["ses_StillConnected"]:
dur = format_date_diff(row["ses_DateTimeConnection"], None)["text"]
else:
dur = format_date_diff(row["ses_DateTimeConnection"], row["ses_DateTimeDisconnection"])["text"]
dur = format_date_diff(
row["ses_DateTimeConnection"], row["ses_DateTimeDisconnection"]
)["text"]
# Additional Info
info = row["ses_AdditionalInfo"]
@@ -239,15 +267,17 @@ def get_device_sessions(mac, period):
info = f"{row['ses_EventTypeConnection']}: {info}"
# Push row data
table_data["data"].append({
"ses_MAC": mac,
"ses_DateTimeOrder": row["ses_DateTimeOrder"],
"ses_Connection": ini,
"ses_Disconnection": end,
"ses_Duration": dur,
"ses_IP": row["ses_IP"],
"ses_Info": info,
})
table_data["data"].append(
{
"ses_MAC": mac,
"ses_DateTimeOrder": row["ses_DateTimeOrder"],
"ses_Connection": ini,
"ses_Disconnection": end,
"ses_Duration": dur,
"ses_IP": row["ses_IP"],
"ses_Info": info,
}
)
# Control no rows
if not table_data["data"]:
@@ -255,10 +285,7 @@ def get_device_sessions(mac, period):
sessions = table_data["data"]
return jsonify({
"success": True,
"sessions": sessions
})
return jsonify({"success": True, "sessions": sessions})
def get_session_events(event_type, period_date):
@@ -291,7 +318,7 @@ def get_session_events(event_type, period_date):
WHERE eve_DateTime >= {period_date}
"""
sql_sessions = f"""
sql_sessions = """
SELECT
IFNULL(ses_DateTimeConnection, ses_DateTimeDisconnection) AS ses_DateTimeOrder,
devName,
@@ -314,20 +341,26 @@ def get_session_events(event_type, period_date):
if event_type == "all":
sql = sql_events
elif event_type == "sessions":
sql = sql_sessions + f"""
sql = (
sql_sessions
+ f"""
WHERE (
ses_DateTimeConnection >= {period_date}
OR ses_DateTimeDisconnection >= {period_date}
OR ses_StillConnected = 1
)
"""
)
elif event_type == "missing":
sql = sql_sessions + f"""
sql = (
sql_sessions
+ f"""
WHERE (
(ses_DateTimeConnection IS NULL AND ses_DateTimeDisconnection >= {period_date})
OR (ses_DateTimeDisconnection IS NULL AND ses_StillConnected = 0 AND ses_DateTimeConnection >= {period_date})
)
"""
)
elif event_type == "voided":
sql = sql_events + ' AND eve_EventType LIKE "VOIDED%"'
elif event_type == "new":
@@ -335,7 +368,7 @@ def get_session_events(event_type, period_date):
elif event_type == "down":
sql = sql_events + ' AND eve_EventType = "Device Down"'
else:
sql = sql_events + ' AND 1=0'
sql = sql_events + " AND 1=0"
cur.execute(sql)
rows = cur.fetchall()

View File

@@ -5,7 +5,8 @@ from logger import mylog
from helper import get_setting_value, timeNowTZ
from messaging.in_app import write_notification
INSTALL_PATH = "/app"
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
def handle_sync_get():
"""Handle GET requests for SYNC (NODE → HUB)."""
@@ -23,13 +24,15 @@ def handle_sync_get():
response_data = base64.b64encode(raw_data).decode("utf-8")
write_notification("[Plugin: SYNC] Data sent", "info", timeNowTZ())
return jsonify({
"node_name": get_setting_value("SYNC_node_name"),
"status": 200,
"message": "OK",
"data_base64": response_data,
"timestamp": timeNowTZ()
}), 200
return jsonify(
{
"node_name": get_setting_value("SYNC_node_name"),
"status": 200,
"message": "OK",
"data_base64": response_data,
"timestamp": timeNowTZ(),
}
), 200
def handle_sync_post():
@@ -42,18 +45,19 @@ def handle_sync_post():
os.makedirs(storage_path, exist_ok=True)
encoded_files = [
f for f in os.listdir(storage_path)
f
for f in os.listdir(storage_path)
if f.startswith(f"last_result.{plugin}.encoded.{node_name}")
]
decoded_files = [
f for f in os.listdir(storage_path)
f
for f in os.listdir(storage_path)
if f.startswith(f"last_result.{plugin}.decoded.{node_name}")
]
file_count = len(encoded_files + decoded_files) + 1
file_path_new = os.path.join(
storage_path,
f"last_result.{plugin}.encoded.{node_name}.{file_count}.log"
storage_path, f"last_result.{plugin}.encoded.{node_name}.{file_count}.log"
)
try: