From 840bfe32d203be059d9bcb1d2d2d837884c4c808 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Thu, 14 Aug 2025 14:28:10 +1000 Subject: [PATCH] sync plugin endpoint refactor --- docs/PLUGINS.md | 3 +- front/js/settings_utils.js | 36 ++++---- front/plugins/sync/README.md | 2 +- front/plugins/sync/config.json | 4 +- front/plugins/sync/sync.py | 87 +++++++++++-------- server/api_server/api_server_start.py | 38 ++++++-- ...{graphql_schema.py => graphql_endpoint.py} | 0 ...heus_metrics.py => prometheus_endpoint.py} | 0 server/api_server/sync_endpoint.py | 71 +++++++++++++++ server/messaging/in_app.py | 65 +++++++------- 10 files changed, 207 insertions(+), 99 deletions(-) rename server/api_server/{graphql_schema.py => graphql_endpoint.py} (100%) rename server/api_server/{prometheus_metrics.py => prometheus_endpoint.py} (100%) create mode 100755 server/api_server/sync_endpoint.py diff --git a/docs/PLUGINS.md b/docs/PLUGINS.md index dfa52ac0..c9093d2c 100755 --- a/docs/PLUGINS.md +++ b/docs/PLUGINS.md @@ -79,10 +79,11 @@ Device-detecting plugins insert values into the `CurrentScan` database table. T | `SETPWD` | [set_password](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/set_password/) | ⚙ | Set password | | Yes | | `SMTP` | [_publisher_email](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/_publisher_email/) | ▶️ | Email notifications | | | | `SNMPDSC` | [snmp_discovery](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/snmp_discovery/) | 🔍/📥 | SNMP device import & sync | | | -| `SYNC` | [sync](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/sync/) | 🔍/⚙/📥 | Sync & import from NetAlertX instances | 🖧 🔄 | Yes | +| `SYNC` | [sync](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/sync/) | 🔍/⚙/📥 | Sync & import from NetAlertX instances | 🖧 🔄 | Yes | | `TELEGRAM` | [_publisher_telegram](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/_publisher_telegram/) | ▶️ | Telegram notifications | | | | `UI` | [ui_settings](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/ui_settings/) | ♻ | UI specific settings | | Yes | | `UNFIMP` | [unifi_import](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/unifi_import/) | 🔍/📥/🆎 | UniFi device import & sync | 🖧 | | +| `UNIFIAPI` | [unifi_api_import](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/unifi_api_import/) | 🔍/📥/🆎 | UniFi device import (SM API, multi-site) | | | | `VNDRPDT` | [vendor_update](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/vendor_update/) | ⚙ | Vendor database update | | | | `WEBHOOK` | [_publisher_webhook](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/_publisher_webhook/) | ▶️ | Webhook notifications | | | | `WEBMON` | [website_monitor](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/website_monitor/) | ♻ | Website down monitoring | | | diff --git a/front/js/settings_utils.js b/front/js/settings_utils.js index 8d604d78..c0056a66 100755 --- a/front/js/settings_utils.js +++ b/front/js/settings_utils.js @@ -314,16 +314,14 @@ function addViaPopupForm(element) { console.log(`toId | curValue: ${toId} | ${curValue}`); showModalPopupForm( - ` ${getString( - "Gen_Update_Value" - )}`, // title - getString("settings_update_item_warning"), // message - getString("Gen_Cancel"), // btnCancel - getString("Gen_Add"), // btnOK - null, // curValue - popupFormJson, // popupform - toId, // parentSettingKey - element // triggeredBy + ` ${getString("Gen_Add")}`, // title + "", // message + getString("Gen_Cancel"), // btnCancel + getString("Gen_Add"), // btnOK + null, // curValue + popupFormJson, // popupform + toId, // parentSettingKey + element // triggeredBy ); // flag something changes to prevent navigating from page @@ -470,16 +468,14 @@ function initListInteractionOptions(element) { const popupFormJson = parsed.find(obj => "popupForm" in obj)?.popupForm ?? null; showModalPopupForm( - ` ${getString( - "Gen_Update_Value" - )}`, // title - getString("settings_update_item_warning"), // message - getString("Gen_Cancel"), // btnCancel - getString("Gen_Update"), // btnOK - curValue, // curValue - popupFormJson, // popupform - toId, // parentSettingKey - this // triggeredBy + ` ${getString("Gen_Update_Value")}`, // title + "", // message + getString("Gen_Cancel"), // btnCancel + getString("Gen_Update"), // btnOK + curValue, // curValue + popupFormJson, // popupform + toId, // parentSettingKey + this // triggeredBy ); } else { // Fallback to normal field input diff --git a/front/plugins/sync/README.md b/front/plugins/sync/README.md index f0d73dbc..fb6f51c1 100755 --- a/front/plugins/sync/README.md +++ b/front/plugins/sync/README.md @@ -115,7 +115,7 @@ Initially, I had one virtual machine (VM) with 6 network cards, one for each VLA 2. Set the schedule (5 minutes works for me). 3. **API Token**: Use any string, but it must match the clients (e.g., `abc123`). 4. **Encryption Key**: Use any string, but it must match the clients (e.g., `abc123`). -5. Under **Nodes**, add the full URL for each client, e.g., `http://192.168.1.20.20211/`. +5. Under **Nodes**, add the full URL for each client, e.g., `http://192.168.1.20.20212/`, where the port `20212` is the value of the `GRAPHQL_PORT` setting of the given node (client) 6. **Node Name**: Leave blank. 7. Check **Sync Devices**. diff --git a/front/plugins/sync/config.json b/front/plugins/sync/config.json index 0f97305c..5e216264 100755 --- a/front/plugins/sync/config.json +++ b/front/plugins/sync/config.json @@ -245,7 +245,7 @@ "description": [ { "language_code": "en_us", - "string": "If specified, the hub will pull Devices data from the listed nodes. The API_TOKEN and SYNC_encryption_key must be set to the same value across the hub and all the nodes to ensure proper authentication and communication." + "string": "If specified, the hub will pull Devices data from the listed nodes. The API_TOKEN and SYNC_encryption_key must be set to the same value across the hub and all the nodes to ensure proper authentication and communication. Add full host URL and use the value of the GRAPHQL_PORT setting of the target, as the port." } ] }, @@ -271,7 +271,7 @@ "description": [ { "language_code": "en_us", - "string": "The URL of the hub (target instance). Set on the Node. Without a trailig slash, for example http://192.168.1.82:20211" + "string": "The URL of the hub (target instance) with the targets GRAPHQL_PORT set as port. Set on the Node. Without a trailig slash, for example http://192.168.1.82:20212" } ] }, diff --git a/front/plugins/sync/sync.py b/front/plugins/sync/sync.py index 00197b37..02fe054a 100755 --- a/front/plugins/sync/sync.py +++ b/front/plugins/sync/sync.py @@ -265,66 +265,81 @@ def main(): return 0 +# ------------------------------------------------------------------ +# Data retrieval methods +api_endpoints = [ + f"/sync", # New Python-based endpoint + f"/plugins/sync/hub.php" # Legacy PHP endpoint +] # send data to the HUB def send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url): - # Encrypt the log data using the encryption_key + """Send encrypted data to HUB, preferring /sync endpoint and falling back to PHP version.""" encrypted_data = encrypt_data(file_content, encryption_key) - mylog('verbose', [f'[{pluginName}] Sending encrypted_data: "{encrypted_data}"']) - # Prepare the data payload for the POST request data = { 'data': encrypted_data, 'file_path': file_path, 'plugin': pref, 'node_name': node_name } - # Set the authorization header with the API token headers = {'Authorization': f'Bearer {api_token}'} - api_endpoint = f"{hub_url}/plugins/sync/hub.php" - response = requests.post(api_endpoint, data=data, headers=headers) - mylog('verbose', [f'[{pluginName}] response: "{response}"']) + for endpoint in api_endpoints: + + final_endpoint = hub_url + endpoint + + try: + response = requests.post(final_endpoint, data=data, headers=headers, timeout=5) + mylog('verbose', [f'[{pluginName}] Tried endpoint: {final_endpoint}, status: {response.status_code}']) + + if response.status_code == 200: + message = f'[{pluginName}] Data for "{file_path}" sent successfully via {final_endpoint}' + mylog('verbose', [message]) + write_notification(message, 'info', timeNowTZ()) + return True + + except requests.RequestException as e: + mylog('verbose', [f'[{pluginName}] Error calling {final_endpoint}: {e}']) + + # If all endpoints fail + message = f'[{pluginName}] Failed to send data for "{file_path}" via all endpoints' + mylog('verbose', [message]) + write_notification(message, 'alert', timeNowTZ()) + return False + - if response.status_code == 200: - message = f'[{pluginName}] Data for "{file_path}" sent successfully' - mylog('verbose', [message]) - write_notification(message, 'info', timeNowTZ()) - else: - message = f'[{pluginName}] Failed to send data for "{file_path}" (Status code: {response.status_code})' - mylog('verbose', [message]) - write_notification(message, 'alert', timeNowTZ()) - # get data from the nodes to the HUB def get_data(api_token, node_url): + """Get data from NODE, preferring /sync endpoint and falling back to PHP version.""" mylog('verbose', [f'[{pluginName}] Getting data from node: "{node_url}"']) - - # Set the authorization header with the API token headers = {'Authorization': f'Bearer {api_token}'} - api_endpoint = f"{node_url}/plugins/sync/hub.php" - response = requests.get(api_endpoint, headers=headers) - # mylog('verbose', [f'[{pluginName}] response: "{response.text}"']) + for endpoint in api_endpoints: + + final_endpoint = node_url + endpoint - if response.status_code == 200: try: - # Parse JSON response - response_json = response.json() - - return response_json + response = requests.get(final_endpoint, headers=headers, timeout=5) + mylog('verbose', [f'[{pluginName}] Tried endpoint: {final_endpoint}, status: {response.status_code}']) - except json.JSONDecodeError: - message = f'[{pluginName}] Failed to parse JSON response from "{node_url}"' - mylog('verbose', [message]) - write_notification(message, 'alert', timeNowTZ()) - return "" + if response.status_code == 200: + try: + return response.json() + except json.JSONDecodeError: + message = f'[{pluginName}] Failed to parse JSON from {final_endpoint}' + mylog('verbose', [message]) + write_notification(message, 'alert', timeNowTZ()) + return "" + except requests.RequestException as e: + mylog('verbose', [f'[{pluginName}] Error calling {final_endpoint}: {e}']) - else: - message = f'[{pluginName}] Failed to send data for "{node_url}" (Status code: {response.status_code})' - mylog('verbose', [message]) - write_notification(message, 'alert', timeNowTZ()) - return "" + # If all endpoints fail + message = f'[{pluginName}] Failed to get data from "{node_url}" via all endpoints' + mylog('verbose', [message]) + write_notification(message, 'alert', timeNowTZ()) + return "" diff --git a/server/api_server/api_server_start.py b/server/api_server/api_server_start.py index 7502e399..2a6d0f09 100755 --- a/server/api_server/api_server_start.py +++ b/server/api_server/api_server_start.py @@ -1,9 +1,9 @@ import threading from flask import Flask, request, jsonify, Response from flask_cors import CORS -from .graphql_schema import devicesSchema -from .prometheus_metrics import getMetricStats -from graphene import Schema +from .graphql_endpoint import devicesSchema +from .prometheus_endpoint import getMetricStats +from .sync_endpoint import handle_sync_post, handle_sync_get import sys # Register NetAlertX directories @@ -57,21 +57,43 @@ def metrics(): # Check for API token in headers if not is_authorized(): - msg = '[metrics] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct.' - mylog('verbose', [msg]) - return jsonify({"error": msg}), 401 + return jsonify({"error": "Forbidden"}), 403 # Return Prometheus metrics as plain text return Response(getMetricStats(), mimetype="text/plain") +# -------------------------- +# SYNC endpoint +# -------------------------- +@app.route("/sync", methods=["GET", "POST"]) +def sync_endpoint(): + if not is_authorized(): + return jsonify({"error": "Forbidden"}), 403 + + if request.method == "GET": + return handle_sync_get() + elif request.method == "POST": + return handle_sync_post() + else: + msg = "[sync endpoint] Method Not Allowed" + write_notification(msg, "alert") + mylog("verbose", [msg]) + return jsonify({"error": "Method Not Allowed"}), 405 # -------------------------- # Background Server Start # -------------------------- def is_authorized(): token = request.headers.get("Authorization") - return token == f"Bearer {get_setting_value('API_TOKEN')}" + is_authorized = token == f"Bearer {get_setting_value('API_TOKEN')}" + + if not is_authorized: + msg = f"[api] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct." + write_notification(msg, "alert") + mylog("verbose", [msg]) + + return is_authorized def start_server(graphql_port, app_state): @@ -79,7 +101,7 @@ def start_server(graphql_port, app_state): if app_state.graphQLServerStarted == 0: - mylog('verbose', [f'[graphql_server] Starting on port: {graphql_port}']) + mylog('verbose', [f'[graphql endpoint] Starting on port: {graphql_port}']) # Start Flask app in a separate thread thread = threading.Thread( diff --git a/server/api_server/graphql_schema.py b/server/api_server/graphql_endpoint.py similarity index 100% rename from server/api_server/graphql_schema.py rename to server/api_server/graphql_endpoint.py diff --git a/server/api_server/prometheus_metrics.py b/server/api_server/prometheus_endpoint.py similarity index 100% rename from server/api_server/prometheus_metrics.py rename to server/api_server/prometheus_endpoint.py diff --git a/server/api_server/sync_endpoint.py b/server/api_server/sync_endpoint.py new file mode 100755 index 00000000..66e8f0f1 --- /dev/null +++ b/server/api_server/sync_endpoint.py @@ -0,0 +1,71 @@ +import os +import base64 +from flask import jsonify, request +from logger import mylog +from helper import get_setting_value, timeNowTZ +from messaging.in_app import write_notification + +INSTALL_PATH = "/app" + +def handle_sync_get(): + """Handle GET requests for SYNC (NODE → HUB).""" + file_path = INSTALL_PATH + "/api/table_devices.json" + + try: + with open(file_path, "rb") as f: + raw_data = f.read() + except FileNotFoundError: + msg = f"[Plugin: SYNC] Data file not found: {file_path}" + write_notification(msg, "alert", timeNowTZ()) + mylog("verbose", [msg]) + return jsonify({"error": msg}), 500 + + response_data = base64.b64encode(raw_data).decode("utf-8") + + write_notification("[Plugin: SYNC] Data sent", "info", timeNowTZ()) + return jsonify({ + "node_name": get_setting_value("SYNC_node_name"), + "status": 200, + "message": "OK", + "data_base64": response_data, + "timestamp": timeNowTZ() + }), 200 + + +def handle_sync_post(): + """Handle POST requests for SYNC (HUB receiving from NODE).""" + data = request.form.get("data", "") + node_name = request.form.get("node_name", "") + plugin = request.form.get("plugin", "") + + storage_path = INSTALL_PATH + "/log/plugins" + os.makedirs(storage_path, exist_ok=True) + + encoded_files = [ + f for f in os.listdir(storage_path) + if f.startswith(f"last_result.{plugin}.encoded.{node_name}") + ] + decoded_files = [ + f for f in os.listdir(storage_path) + if f.startswith(f"last_result.{plugin}.decoded.{node_name}") + ] + file_count = len(encoded_files + decoded_files) + 1 + + file_path_new = os.path.join( + storage_path, + f"last_result.{plugin}.encoded.{node_name}.{file_count}.log" + ) + + try: + with open(file_path_new, "w") as f: + f.write(data) + except Exception as e: + msg = f"[Plugin: SYNC] Failed to store data: {e}" + write_notification(msg, "alert", timeNowTZ()) + mylog("verbose", [msg]) + return jsonify({"error": msg}), 500 + + msg = f"[Plugin: SYNC] Data received ({file_path_new})" + write_notification(msg, "info", timeNowTZ()) + mylog("verbose", [msg]) + return jsonify({"message": "Data received and stored successfully"}), 200 diff --git a/server/messaging/in_app.py b/server/messaging/in_app.py index 4ad99871..29e727ad 100755 --- a/server/messaging/in_app.py +++ b/server/messaging/in_app.py @@ -24,43 +24,46 @@ from helper import generate_mac_links, removeDuplicateNewLines, timeNowTZ, get_f NOTIFICATION_API_FILE = apiPath + 'user_notifications.json' # Show Frontend User Notification -def write_notification(content, level, timestamp): +def write_notification(content, level='alert', timestamp=None): - # Generate GUID - guid = str(uuid.uuid4()) + if timestamp is None: + timestamp = timeNowTZ() - # Prepare notification dictionary - notification = { - 'timestamp': str(timestamp), - 'guid': guid, - 'read': 0, - 'level': level, - 'content': content - } + # Generate GUID + guid = str(uuid.uuid4()) - # If file exists, load existing data, otherwise initialize as empty list - if os.path.exists(NOTIFICATION_API_FILE): - with open(NOTIFICATION_API_FILE, 'r') as file: - # Check if the file object is of type _io.TextIOWrapper - if isinstance(file, _io.TextIOWrapper): - file_contents = file.read() # Read file contents - if file_contents == '': - file_contents = '[]' # If file is empty, initialize as empty list + # Prepare notification dictionary + notification = { + 'timestamp': str(timestamp), + 'guid': guid, + 'read': 0, + 'level': level, + 'content': content + } - # mylog('debug', ['[Notification] User Notifications file: ', file_contents]) - notifications = json.loads(file_contents) # Parse JSON data - else: - mylog('none', '[Notification] File is not of type _io.TextIOWrapper') - notifications = [] - else: - notifications = [] + # If file exists, load existing data, otherwise initialize as empty list + if os.path.exists(NOTIFICATION_API_FILE): + with open(NOTIFICATION_API_FILE, 'r') as file: + # Check if the file object is of type _io.TextIOWrapper + if isinstance(file, _io.TextIOWrapper): + file_contents = file.read() # Read file contents + if file_contents == '': + file_contents = '[]' # If file is empty, initialize as empty list - # Append new notification - notifications.append(notification) + # mylog('debug', ['[Notification] User Notifications file: ', file_contents]) + notifications = json.loads(file_contents) # Parse JSON data + else: + mylog('none', '[Notification] File is not of type _io.TextIOWrapper') + notifications = [] + else: + notifications = [] - # Write updated data back to file - with open(NOTIFICATION_API_FILE, 'w') as file: - json.dump(notifications, file, indent=4) + # Append new notification + notifications.append(notification) + + # Write updated data back to file + with open(NOTIFICATION_API_FILE, 'w') as file: + json.dump(notifications, file, indent=4) # Trim notifications def remove_old(keepNumberOfEntries):