Optimizing API updates #193

This commit is contained in:
Jokob-sk
2023-03-19 12:22:25 +11:00
parent 7024cd22de
commit 032b787b66

View File

@@ -95,6 +95,7 @@ log_timestamp = time_started
lastTimeImported = 0
sql_connection = None
#-------------------------------------------------------------------------------
def timeNow():
return datetime.datetime.now().replace(microsecond=0)
@@ -3273,12 +3274,13 @@ def to_binary_sensor(input):
# API
#===============================================================================
def update_api(isNotification = False, updateOnlyDataSources = []):
mylog('verbose', [' [API] Updating files in /front/api'])
folder = pialertPath + '/front/api/'
if isNotification:
# Update last notification alert in all formats
mylog('verbose', [' [API] Updating notification_* files in /front/api'])
write_file(folder + 'notification_text.txt' , mail_text)
write_file(folder + 'notification_text.html' , mail_html)
write_file(folder + 'notification_json_final.json' , json.dumps(json_final))
@@ -3286,7 +3288,7 @@ def update_api(isNotification = False, updateOnlyDataSources = []):
# Save plugins
write_file(folder + 'plugins.json' , json.dumps({"data" : plugins}))
# prepare databse tables we want to expose
# prepare database tables we want to expose
dataSourcesSQLs = [
["devices", sql_devices_all],
["nmap_scan", sql_nmap_scan_all],
@@ -3305,9 +3307,57 @@ def update_api(isNotification = False, updateOnlyDataSources = []):
if updateOnlyDataSources == [] or dsSQL[0] in updateOnlyDataSources:
json_string = get_table_as_json(dsSQL[1]).json
api_endpoint_class(dsSQL[1], folder + 'table_' + dsSQL[0] + '.json')
write_file(folder + 'table_' + dsSQL[0] + '.json' , json.dumps(json_string))
#-------------------------------------------------------------------------------
apiEndpoints = []
class api_endpoint_class:
def __init__(self, sql, path):
global apiEndpoints
self.sql = sql
self.jsonData = get_table_as_json(sql).json
self.path = path
self.fileName = path.split('/')[-1]
self.hash = hash(json.dumps(self.jsonData))
# check if the endpoint needs to be updated
found = False
changed = False
changedIndex = -1
index = 0
# search previous endpoint states to check if API needs updating
for endpoint in apiEndpoints:
# match sql and API endpoint path
if endpoint.sql == self.sql and endpoint.path == self.path:
found = True
if endpoint.hash != self.hash:
changed = True
changedIndex = index
index = index + 1
# cehck if API endpoints have changed or if it's a new one
if not found or changed:
mylog('verbose', [f' [API] Updating {self.fileName} file in /front/api'])
write_file(self.path, json.dumps(self.jsonData))
if not found:
apiEndpoints.append(self)
elif changed and changedIndex != -1 and changedIndex < len(apiEndpoints):
# update hash
apiEndpoints[changedIndex].hash = self.hash
else:
mylog('info', [f' [API] ERROR Updating {self.fileName}'])
#-------------------------------------------------------------------------------
def get_table_as_json(sqlQuery):
@@ -3327,8 +3377,7 @@ def get_table_as_json(sqlQuery):
#-------------------------------------------------------------------------------
class json_struc:
def __init__(self, jsn, columnNames):
# mylog('verbose', [' [] tmp: ', str(json.dumps(jsn))])
def __init__(self, jsn, columnNames):
self.json = jsn
self.columnNames = columnNames