sync plugin endpoint refactor

This commit is contained in:
jokob-sk
2025-08-14 14:28:10 +10:00
parent f33ef9861b
commit 840bfe32d2
10 changed files with 207 additions and 99 deletions

View File

@@ -79,10 +79,11 @@ Device-detecting plugins insert values into the `CurrentScan` database table. T
| `SETPWD` | [set_password](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/set_password/) | ⚙ | Set password | | Yes | | `SETPWD` | [set_password](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/set_password/) | ⚙ | Set password | | Yes |
| `SMTP` | [_publisher_email](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/_publisher_email/) | ▶️ | Email notifications | | | | `SMTP` | [_publisher_email](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/_publisher_email/) | ▶️ | Email notifications | | |
| `SNMPDSC` | [snmp_discovery](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/snmp_discovery/) | 🔍/📥 | SNMP device import & sync | | | | `SNMPDSC` | [snmp_discovery](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/snmp_discovery/) | 🔍/📥 | SNMP device import & sync | | |
| `SYNC` | [sync](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/sync/) | 🔍/⚙/📥 | Sync & import from NetAlertX instances | 🖧 🔄 | Yes | | `SYNC` | [sync](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/sync/) | 🔍/⚙/📥 | Sync & import from NetAlertX instances | 🖧 🔄 | Yes |
| `TELEGRAM` | [_publisher_telegram](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/_publisher_telegram/) | ▶️ | Telegram notifications | | | | `TELEGRAM` | [_publisher_telegram](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/_publisher_telegram/) | ▶️ | Telegram notifications | | |
| `UI` | [ui_settings](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/ui_settings/) | ♻ | UI specific settings | | Yes | | `UI` | [ui_settings](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/ui_settings/) | ♻ | UI specific settings | | Yes |
| `UNFIMP` | [unifi_import](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/unifi_import/) | 🔍/📥/🆎 | UniFi device import & sync | 🖧 | | | `UNFIMP` | [unifi_import](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/unifi_import/) | 🔍/📥/🆎 | UniFi device import & sync | 🖧 | |
| `UNIFIAPI` | [unifi_api_import](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/unifi_api_import/) | 🔍/📥/🆎 | UniFi device import (SM API, multi-site) | | |
| `VNDRPDT` | [vendor_update](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/vendor_update/) | ⚙ | Vendor database update | | | | `VNDRPDT` | [vendor_update](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/vendor_update/) | ⚙ | Vendor database update | | |
| `WEBHOOK` | [_publisher_webhook](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/_publisher_webhook/) | ▶️ | Webhook notifications | | | | `WEBHOOK` | [_publisher_webhook](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/_publisher_webhook/) | ▶️ | Webhook notifications | | |
| `WEBMON` | [website_monitor](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/website_monitor/) | ♻ | Website down monitoring | | | | `WEBMON` | [website_monitor](https://github.com/jokob-sk/NetAlertX/tree/main/front/plugins/website_monitor/) | ♻ | Website down monitoring | | |

View File

@@ -314,16 +314,14 @@ function addViaPopupForm(element) {
console.log(`toId | curValue: ${toId} | ${curValue}`); console.log(`toId | curValue: ${toId} | ${curValue}`);
showModalPopupForm( showModalPopupForm(
`<i class="fa fa-pen-to-square"></i> ${getString( `<i class="fa-solid fa-square-plus"></i> ${getString("Gen_Add")}`, // title
"Gen_Update_Value" "", // message
)}`, // title getString("Gen_Cancel"), // btnCancel
getString("settings_update_item_warning"), // message getString("Gen_Add"), // btnOK
getString("Gen_Cancel"), // btnCancel null, // curValue
getString("Gen_Add"), // btnOK popupFormJson, // popupform
null, // curValue toId, // parentSettingKey
popupFormJson, // popupform element // triggeredBy
toId, // parentSettingKey
element // triggeredBy
); );
// flag something changes to prevent navigating from page // flag something changes to prevent navigating from page
@@ -470,16 +468,14 @@ function initListInteractionOptions(element) {
const popupFormJson = parsed.find(obj => "popupForm" in obj)?.popupForm ?? null; const popupFormJson = parsed.find(obj => "popupForm" in obj)?.popupForm ?? null;
showModalPopupForm( showModalPopupForm(
`<i class="fa fa-pen-to-square"></i> ${getString( `<i class="fa fa-pen-to-square"></i> ${getString("Gen_Update_Value")}`, // title
"Gen_Update_Value" "", // message
)}`, // title getString("Gen_Cancel"), // btnCancel
getString("settings_update_item_warning"), // message getString("Gen_Update"), // btnOK
getString("Gen_Cancel"), // btnCancel curValue, // curValue
getString("Gen_Update"), // btnOK popupFormJson, // popupform
curValue, // curValue toId, // parentSettingKey
popupFormJson, // popupform this // triggeredBy
toId, // parentSettingKey
this // triggeredBy
); );
} else { } else {
// Fallback to normal field input // Fallback to normal field input

View File

@@ -115,7 +115,7 @@ Initially, I had one virtual machine (VM) with 6 network cards, one for each VLA
2. Set the schedule (5 minutes works for me). 2. Set the schedule (5 minutes works for me).
3. **API Token**: Use any string, but it must match the clients (e.g., `abc123`). 3. **API Token**: Use any string, but it must match the clients (e.g., `abc123`).
4. **Encryption Key**: Use any string, but it must match the clients (e.g., `abc123`). 4. **Encryption Key**: Use any string, but it must match the clients (e.g., `abc123`).
5. Under **Nodes**, add the full URL for each client, e.g., `http://192.168.1.20.20211/`. 5. Under **Nodes**, add the full URL for each client, e.g., `http://192.168.1.20.20212/`, where the port `20212` is the value of the `GRAPHQL_PORT` setting of the given node (client)
6. **Node Name**: Leave blank. 6. **Node Name**: Leave blank.
7. Check **Sync Devices**. 7. Check **Sync Devices**.

View File

@@ -245,7 +245,7 @@
"description": [ "description": [
{ {
"language_code": "en_us", "language_code": "en_us",
"string": "If specified, the hub will pull Devices data from the listed nodes. The <code>API_TOKEN</code> and <code>SYNC_encryption_key</code> must be set to the same value across the hub and all the nodes to ensure proper authentication and communication." "string": "If specified, the hub will pull Devices data from the listed nodes. The <code>API_TOKEN</code> and <code>SYNC_encryption_key</code> must be set to the same value across the hub and all the nodes to ensure proper authentication and communication. Add full host URL and use the value of the <code>GRAPHQL_PORT</code> setting of the target, as the port."
} }
] ]
}, },
@@ -271,7 +271,7 @@
"description": [ "description": [
{ {
"language_code": "en_us", "language_code": "en_us",
"string": "The URL of the hub (target instance). Set on the Node. Without a trailig slash, for example <code>http://192.168.1.82:20211</code>" "string": "The URL of the hub (target instance) with the targets <code>GRAPHQL_PORT</code> set as port. Set on the Node. Without a trailig slash, for example <code>http://192.168.1.82:20212</code>"
} }
] ]
}, },

View File

@@ -265,66 +265,81 @@ def main():
return 0 return 0
# ------------------------------------------------------------------
# Data retrieval methods
api_endpoints = [
f"/sync", # New Python-based endpoint
f"/plugins/sync/hub.php" # Legacy PHP endpoint
]
# send data to the HUB # send data to the HUB
def send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url): def send_data(api_token, file_content, encryption_key, file_path, node_name, pref, hub_url):
# Encrypt the log data using the encryption_key """Send encrypted data to HUB, preferring /sync endpoint and falling back to PHP version."""
encrypted_data = encrypt_data(file_content, encryption_key) encrypted_data = encrypt_data(file_content, encryption_key)
mylog('verbose', [f'[{pluginName}] Sending encrypted_data: "{encrypted_data}"']) mylog('verbose', [f'[{pluginName}] Sending encrypted_data: "{encrypted_data}"'])
# Prepare the data payload for the POST request
data = { data = {
'data': encrypted_data, 'data': encrypted_data,
'file_path': file_path, 'file_path': file_path,
'plugin': pref, 'plugin': pref,
'node_name': node_name 'node_name': node_name
} }
# Set the authorization header with the API token
headers = {'Authorization': f'Bearer {api_token}'} headers = {'Authorization': f'Bearer {api_token}'}
api_endpoint = f"{hub_url}/plugins/sync/hub.php"
response = requests.post(api_endpoint, data=data, headers=headers)
mylog('verbose', [f'[{pluginName}] response: "{response}"']) for endpoint in api_endpoints:
final_endpoint = hub_url + endpoint
try:
response = requests.post(final_endpoint, data=data, headers=headers, timeout=5)
mylog('verbose', [f'[{pluginName}] Tried endpoint: {final_endpoint}, status: {response.status_code}'])
if response.status_code == 200:
message = f'[{pluginName}] Data for "{file_path}" sent successfully via {final_endpoint}'
mylog('verbose', [message])
write_notification(message, 'info', timeNowTZ())
return True
except requests.RequestException as e:
mylog('verbose', [f'[{pluginName}] Error calling {final_endpoint}: {e}'])
# If all endpoints fail
message = f'[{pluginName}] Failed to send data for "{file_path}" via all endpoints'
mylog('verbose', [message])
write_notification(message, 'alert', timeNowTZ())
return False
if response.status_code == 200:
message = f'[{pluginName}] Data for "{file_path}" sent successfully'
mylog('verbose', [message])
write_notification(message, 'info', timeNowTZ())
else:
message = f'[{pluginName}] Failed to send data for "{file_path}" (Status code: {response.status_code})'
mylog('verbose', [message])
write_notification(message, 'alert', timeNowTZ())
# get data from the nodes to the HUB # get data from the nodes to the HUB
def get_data(api_token, node_url): def get_data(api_token, node_url):
"""Get data from NODE, preferring /sync endpoint and falling back to PHP version."""
mylog('verbose', [f'[{pluginName}] Getting data from node: "{node_url}"']) mylog('verbose', [f'[{pluginName}] Getting data from node: "{node_url}"'])
# Set the authorization header with the API token
headers = {'Authorization': f'Bearer {api_token}'} headers = {'Authorization': f'Bearer {api_token}'}
api_endpoint = f"{node_url}/plugins/sync/hub.php"
response = requests.get(api_endpoint, headers=headers)
# mylog('verbose', [f'[{pluginName}] response: "{response.text}"']) for endpoint in api_endpoints:
final_endpoint = node_url + endpoint
if response.status_code == 200:
try: try:
# Parse JSON response response = requests.get(final_endpoint, headers=headers, timeout=5)
response_json = response.json() mylog('verbose', [f'[{pluginName}] Tried endpoint: {final_endpoint}, status: {response.status_code}'])
return response_json
except json.JSONDecodeError: if response.status_code == 200:
message = f'[{pluginName}] Failed to parse JSON response from "{node_url}"' try:
mylog('verbose', [message]) return response.json()
write_notification(message, 'alert', timeNowTZ()) except json.JSONDecodeError:
return "" message = f'[{pluginName}] Failed to parse JSON from {final_endpoint}'
mylog('verbose', [message])
write_notification(message, 'alert', timeNowTZ())
return ""
except requests.RequestException as e:
mylog('verbose', [f'[{pluginName}] Error calling {final_endpoint}: {e}'])
else: # If all endpoints fail
message = f'[{pluginName}] Failed to send data for "{node_url}" (Status code: {response.status_code})' message = f'[{pluginName}] Failed to get data from "{node_url}" via all endpoints'
mylog('verbose', [message]) mylog('verbose', [message])
write_notification(message, 'alert', timeNowTZ()) write_notification(message, 'alert', timeNowTZ())
return "" return ""

View File

@@ -1,9 +1,9 @@
import threading import threading
from flask import Flask, request, jsonify, Response from flask import Flask, request, jsonify, Response
from flask_cors import CORS from flask_cors import CORS
from .graphql_schema import devicesSchema from .graphql_endpoint import devicesSchema
from .prometheus_metrics import getMetricStats from .prometheus_endpoint import getMetricStats
from graphene import Schema from .sync_endpoint import handle_sync_post, handle_sync_get
import sys import sys
# Register NetAlertX directories # Register NetAlertX directories
@@ -57,21 +57,43 @@ def metrics():
# Check for API token in headers # Check for API token in headers
if not is_authorized(): if not is_authorized():
msg = '[metrics] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct.' return jsonify({"error": "Forbidden"}), 403
mylog('verbose', [msg])
return jsonify({"error": msg}), 401
# Return Prometheus metrics as plain text # Return Prometheus metrics as plain text
return Response(getMetricStats(), mimetype="text/plain") return Response(getMetricStats(), mimetype="text/plain")
# --------------------------
# SYNC endpoint
# --------------------------
@app.route("/sync", methods=["GET", "POST"])
def sync_endpoint():
if not is_authorized():
return jsonify({"error": "Forbidden"}), 403
if request.method == "GET":
return handle_sync_get()
elif request.method == "POST":
return handle_sync_post()
else:
msg = "[sync endpoint] Method Not Allowed"
write_notification(msg, "alert")
mylog("verbose", [msg])
return jsonify({"error": "Method Not Allowed"}), 405
# -------------------------- # --------------------------
# Background Server Start # Background Server Start
# -------------------------- # --------------------------
def is_authorized(): def is_authorized():
token = request.headers.get("Authorization") token = request.headers.get("Authorization")
return token == f"Bearer {get_setting_value('API_TOKEN')}" is_authorized = token == f"Bearer {get_setting_value('API_TOKEN')}"
if not is_authorized:
msg = f"[api] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct."
write_notification(msg, "alert")
mylog("verbose", [msg])
return is_authorized
def start_server(graphql_port, app_state): def start_server(graphql_port, app_state):
@@ -79,7 +101,7 @@ def start_server(graphql_port, app_state):
if app_state.graphQLServerStarted == 0: if app_state.graphQLServerStarted == 0:
mylog('verbose', [f'[graphql_server] Starting on port: {graphql_port}']) mylog('verbose', [f'[graphql endpoint] Starting on port: {graphql_port}'])
# Start Flask app in a separate thread # Start Flask app in a separate thread
thread = threading.Thread( thread = threading.Thread(

View File

@@ -0,0 +1,71 @@
import os
import base64
from flask import jsonify, request
from logger import mylog
from helper import get_setting_value, timeNowTZ
from messaging.in_app import write_notification
INSTALL_PATH = "/app"
def handle_sync_get():
"""Handle GET requests for SYNC (NODE → HUB)."""
file_path = INSTALL_PATH + "/api/table_devices.json"
try:
with open(file_path, "rb") as f:
raw_data = f.read()
except FileNotFoundError:
msg = f"[Plugin: SYNC] Data file not found: {file_path}"
write_notification(msg, "alert", timeNowTZ())
mylog("verbose", [msg])
return jsonify({"error": msg}), 500
response_data = base64.b64encode(raw_data).decode("utf-8")
write_notification("[Plugin: SYNC] Data sent", "info", timeNowTZ())
return jsonify({
"node_name": get_setting_value("SYNC_node_name"),
"status": 200,
"message": "OK",
"data_base64": response_data,
"timestamp": timeNowTZ()
}), 200
def handle_sync_post():
"""Handle POST requests for SYNC (HUB receiving from NODE)."""
data = request.form.get("data", "")
node_name = request.form.get("node_name", "")
plugin = request.form.get("plugin", "")
storage_path = INSTALL_PATH + "/log/plugins"
os.makedirs(storage_path, exist_ok=True)
encoded_files = [
f for f in os.listdir(storage_path)
if f.startswith(f"last_result.{plugin}.encoded.{node_name}")
]
decoded_files = [
f for f in os.listdir(storage_path)
if f.startswith(f"last_result.{plugin}.decoded.{node_name}")
]
file_count = len(encoded_files + decoded_files) + 1
file_path_new = os.path.join(
storage_path,
f"last_result.{plugin}.encoded.{node_name}.{file_count}.log"
)
try:
with open(file_path_new, "w") as f:
f.write(data)
except Exception as e:
msg = f"[Plugin: SYNC] Failed to store data: {e}"
write_notification(msg, "alert", timeNowTZ())
mylog("verbose", [msg])
return jsonify({"error": msg}), 500
msg = f"[Plugin: SYNC] Data received ({file_path_new})"
write_notification(msg, "info", timeNowTZ())
mylog("verbose", [msg])
return jsonify({"message": "Data received and stored successfully"}), 200

View File

@@ -24,43 +24,46 @@ from helper import generate_mac_links, removeDuplicateNewLines, timeNowTZ, get_f
NOTIFICATION_API_FILE = apiPath + 'user_notifications.json' NOTIFICATION_API_FILE = apiPath + 'user_notifications.json'
# Show Frontend User Notification # Show Frontend User Notification
def write_notification(content, level, timestamp): def write_notification(content, level='alert', timestamp=None):
# Generate GUID if timestamp is None:
guid = str(uuid.uuid4()) timestamp = timeNowTZ()
# Prepare notification dictionary # Generate GUID
notification = { guid = str(uuid.uuid4())
'timestamp': str(timestamp),
'guid': guid,
'read': 0,
'level': level,
'content': content
}
# If file exists, load existing data, otherwise initialize as empty list # Prepare notification dictionary
if os.path.exists(NOTIFICATION_API_FILE): notification = {
with open(NOTIFICATION_API_FILE, 'r') as file: 'timestamp': str(timestamp),
# Check if the file object is of type _io.TextIOWrapper 'guid': guid,
if isinstance(file, _io.TextIOWrapper): 'read': 0,
file_contents = file.read() # Read file contents 'level': level,
if file_contents == '': 'content': content
file_contents = '[]' # If file is empty, initialize as empty list }
# mylog('debug', ['[Notification] User Notifications file: ', file_contents]) # If file exists, load existing data, otherwise initialize as empty list
notifications = json.loads(file_contents) # Parse JSON data if os.path.exists(NOTIFICATION_API_FILE):
else: with open(NOTIFICATION_API_FILE, 'r') as file:
mylog('none', '[Notification] File is not of type _io.TextIOWrapper') # Check if the file object is of type _io.TextIOWrapper
notifications = [] if isinstance(file, _io.TextIOWrapper):
else: file_contents = file.read() # Read file contents
notifications = [] if file_contents == '':
file_contents = '[]' # If file is empty, initialize as empty list
# Append new notification # mylog('debug', ['[Notification] User Notifications file: ', file_contents])
notifications.append(notification) notifications = json.loads(file_contents) # Parse JSON data
else:
mylog('none', '[Notification] File is not of type _io.TextIOWrapper')
notifications = []
else:
notifications = []
# Write updated data back to file # Append new notification
with open(NOTIFICATION_API_FILE, 'w') as file: notifications.append(notification)
json.dump(notifications, file, indent=4)
# Write updated data back to file
with open(NOTIFICATION_API_FILE, 'w') as file:
json.dump(notifications, file, indent=4)
# Trim notifications # Trim notifications
def remove_old(keepNumberOfEntries): def remove_old(keepNumberOfEntries):