Frontend user events rewrite v0.1

This commit is contained in:
Jokob-sk
2023-10-25 08:11:57 +11:00
parent e434a686c6
commit 0ed24dac0a
9 changed files with 175 additions and 169 deletions

View File

@@ -729,35 +729,66 @@ class plugin_object_class:
#===============================================================================
#-------------------------------------------------------------------------------
def check_and_run_user_event(db, pluginsState):
# def check_and_run_user_event(db, pluginsState):
sql = db.sql # TO-DO
sql.execute(""" select * from Parameters where par_ID = "Front_Event" """)
rows = sql.fetchall()
# sql = db.sql # TO-DO
# sql.execute(""" select * from Parameters where par_ID = "Front_Event" """)
# rows = sql.fetchall()
event, param = ['','']
if len(rows) > 0 and rows[0]['par_Value'] != 'finished':
keyValue = rows[0]['par_Value'].split('|')
# event, param = ['','']
# if len(rows) > 0 and rows[0]['par_Value'] != 'finished':
# keyValue = rows[0]['par_Value'].split('|')
if len(keyValue) == 2:
event = keyValue[0]
param = keyValue[1]
else:
# if len(keyValue) == 2:
# event = keyValue[0]
# param = keyValue[1]
# else:
# return pluginsState
# if event == 'test':
# pluginsState = handle_test(param, db, pluginsState)
# if event == 'run':
# pluginsState = handle_run(param, db, pluginsState)
# # clear event execution flag
# sql.execute ("UPDATE Parameters SET par_Value='finished' WHERE par_ID='Front_Event'")
# # commit to DB
# db.commitDB()
# return pluginsState
def check_and_run_user_event(db, pluginsState):
# Check if the log file exists
logFile = os.path.join(logPath, "execution_queue.log")
if not os.path.exists(logFile):
return pluginsState
if event == 'test':
pluginsState = handle_test(param, db, pluginsState)
if event == 'run':
pluginsState = handle_run(param, db, pluginsState)
with open(logFile, "r") as file:
lines = file.readlines()
# clear event execution flag
sql.execute ("UPDATE Parameters SET par_Value='finished' WHERE par_ID='Front_Event'")
for line in lines:
# Split the line by '|', and take the third and fourth columns (indices 2 and 3)
columns = line.strip().split('|')[2:4]
# commit to DB
db.commitDB()
if len(columns) != 2:
continue
event, param = columns
if event == 'test':
pluginsState = handle_test(param, db, pluginsState)
if event == 'run':
pluginsState = handle_run(param, db, pluginsState)
# Clear the log file
open(logFile, "w").close()
return pluginsState
#-------------------------------------------------------------------------------
def handle_run(runType, db, pluginsState):

View File

@@ -1,8 +0,0 @@
""" Publishers for Pi.Alert """
"""
each publisher exposes:
- check_config () returning True / False
- send (message) returning True / Fasle
"""

View File

@@ -1,111 +0,0 @@
import json
import subprocess
import hashlib
import hmac
import conf
from const import logPath
from helper import noti_obj, write_file
from logger import logResult, mylog
#-------------------------------------------------------------------------------
def check_config():
if conf.WEBHOOK_URL == '':
mylog('none', ['[Check Config] Error: Webhook service not set up correctly. Check your pialert.conf WEBHOOK_* variables.'])
return False
else:
return True
#-------------------------------------------------------------------------------
def send (msg: noti_obj):
# limit = 1024 * 1024 # 1MB limit (1024 bytes * 1024 bytes = 1MB)
limit = conf.WEBHOOK_SIZE
# use data type based on specified payload type
if conf.WEBHOOK_PAYLOAD == 'json':
# In this code, the truncate_json function is used to recursively traverse the JSON object
# and remove nodes that exceed the size limit. It checks the size of each node's JSON representation
# using json.dumps and includes only the nodes that are within the limit.
json_data = msg.json
json_str = json.dumps(json_data)
if len(json_str) <= limit:
payloadData = json_data
else:
def truncate_json(obj):
if isinstance(obj, dict):
return {
key: truncate_json(value)
for key, value in obj.items()
if len(json.dumps(value)) <= limit
}
elif isinstance(obj, list):
return [
truncate_json(item)
for item in obj
if len(json.dumps(item)) <= limit
]
else:
return obj
payloadData = truncate_json(json_data)
if conf.WEBHOOK_PAYLOAD == 'html':
if len(msg.html) > limit:
payloadData = msg.html[:limit] + " <h1>(text was truncated)</h1>"
else:
payloadData = msg.html
if conf.WEBHOOK_PAYLOAD == 'text':
if len(msg.text) > limit:
payloadData = msg.text[:limit] + " (text was truncated)"
else:
payloadData = msg.text
# Define slack-compatible payload
_json_payload = { "text": payloadData } if conf.WEBHOOK_PAYLOAD == 'text' else {
"username": "Pi.Alert",
"text": "There are new notifications",
"attachments": [{
"title": "Pi.Alert Notifications",
"title_link": conf.REPORT_DASHBOARD_URL,
"text": payloadData
}]
}
# DEBUG - Write the json payload into a log file for debugging
write_file (logPath + '/webhook_payload.json', json.dumps(_json_payload))
# Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both
# Consider: curl has the ability to load in data to POST from a file + piping
if(conf.WEBHOOK_URL.startswith('https://discord.com/api/webhooks/') and not conf.WEBHOOK_URL.endswith("/slack")):
_WEBHOOK_URL = f"{conf.WEBHOOK_URL}/slack"
curlParams = ["curl","-i","-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL]
else:
_WEBHOOK_URL = conf.WEBHOOK_URL
curlParams = ["curl","-i","-X", conf.WEBHOOK_REQUEST_METHOD , "-H", "Content-Type:application/json", "-d", json.dumps(_json_payload), _WEBHOOK_URL]
# Add HMAC signature if configured
if(conf.WEBHOOK_SECRET != ''):
h = hmac.new(conf.WEBHOOK_SECRET.encode("UTF-8"), json.dumps(_json_payload, separators=(',', ':')).encode(), hashlib.sha256).hexdigest()
curlParams.insert(4,"-H")
curlParams.insert(5,f"X-Webhook-Signature: sha256={h}")
try:
# Execute CURL call
mylog('debug', ['[send_webhook] curlParams: ', curlParams])
result = subprocess.run(curlParams, capture_output=True, text=True)
stdout = result.stdout
stderr = result.stderr
# Write stdout and stderr into .log files for debugging if needed
mylog('debug', ['[send_webhook] stdout: ', stdout])
mylog('debug', ['[send_webhook] stderr: ', stderr])
# logResult(stdout, stderr) # TO-DO should be changed to mylog
except subprocess.CalledProcessError as e:
# An error occurred, handle it
mylog('none', ['[send_webhook] Error: ', e.output])

View File

@@ -24,10 +24,6 @@ from const import pialertPath, logPath, apiPath
from helper import noti_obj, generate_mac_links, removeDuplicateNewLines, timeNowTZ, hide_email, updateState, get_file_content, write_file
from logger import logResult, mylog, print_log
from publishers.webhook import (check_config as webhook_check_config,
send as send_webhook)
#===============================================================================
# REPORTING
@@ -178,7 +174,7 @@ def get_notifications (db):
notiStruc = construct_notifications(db, sqlQuery, "New devices")
# collect "new_devices" for the webhook json
# collect "new_devices" for the json
json_new_devices = notiStruc.json["data"]
mail_text = mail_text.replace ('<NEW_DEVICES_TABLE>', notiStruc.text + '\n')
@@ -194,7 +190,7 @@ def get_notifications (db):
notiStruc = construct_notifications(db, sqlQuery, "Down devices")
# collect "down_devices" for the webhook json
# collect "down_devices" for the json
json_down_devices = notiStruc.json["data"]
mail_text = mail_text.replace ('<DOWN_DEVICES_TABLE>', notiStruc.text + '\n')
@@ -211,7 +207,7 @@ def get_notifications (db):
notiStruc = construct_notifications(db, sqlQuery, "Events")
# collect "events" for the webhook json
# collect "events" for the json
json_events = notiStruc.json["data"]
mail_text = mail_text.replace ('<EVENTS_TABLE>', notiStruc.text + '\n')
@@ -224,7 +220,7 @@ def get_notifications (db):
notiStruc = construct_notifications(db, sqlQuery, "Plugins")
# collect "plugins" for the webhook json
# collect "plugins" for the json
json_plugins = notiStruc.json["data"]
mail_text = mail_text.replace ('<PLUGINS_TABLE>', notiStruc.text + '\n')