Plugins 0.1 - Basic definition

This commit is contained in:
Jokob-sk
2023-02-05 13:02:38 +11:00
parent fd11cc30f5
commit 1cb5375a92
7 changed files with 643 additions and 1 deletions

View File

@@ -314,7 +314,7 @@ def importConfig ():
TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General') TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General')
PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General') PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General')
PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General') PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General')
INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports']", 'General') INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General')
SCAN_CYCLE_MINUTES = ccd('SCAN_CYCLE_MINUTES', 5 , c_d, 'Scan cycle delay (m)', 'integer', '', 'General') SCAN_CYCLE_MINUTES = ccd('SCAN_CYCLE_MINUTES', 5 , c_d, 'Scan cycle delay (m)', 'integer', '', 'General')
DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General') DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General')
REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General') REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General')
@@ -3042,6 +3042,38 @@ def upgradeDB ():
PRIMARY KEY("Index" AUTOINCREMENT) PRIMARY KEY("Index" AUTOINCREMENT)
); );
""") """)
# Plugin state
sql_Plugins_State = """ CREATE TABLE IF NOT EXISTS Plugins_State(
"Index" INTEGER,
Plugin TEXT NOT NULL,
Object_PrimaryID TEXT NOT NULL,
Object_SecondaryID TEXT NOT NULL,
DateTime TEXT NOT NULL,
Watched_Value1 TEXT NOT NULL,
Watched_Value2 TEXT NOT NULL,
Watched_Value3 TEXT NOT NULL,
Watched_Value4 TEXT NOT NULL,
Extra TEXT NOT NULL,
PRIMARY KEY("Index" AUTOINCREMENT)
); """
# sql.execute(sql_Plugins_State)
# Plugin execution results
sql_Plugin_Events = """ CREATE TABLE IF NOT EXISTS Plugins_Events(
"Index" INTEGER,
Plugin TEXT NOT NULL,
Object_PrimaryID TEXT NOT NULL,
Object_SecondaryID TEXT NOT NULL,
DateTime TEXT NOT NULL,
Watched_Value1 TEXT NOT NULL,
Watched_Value2 TEXT NOT NULL,
Watched_Value3 TEXT NOT NULL,
Watched_Value4 TEXT NOT NULL,
Processed TEXT NOT NULL,
PRIMARY KEY("Index" AUTOINCREMENT)
); """
# sql.execute(sql_Plugin_Events)
commitDB () commitDB ()

188
front/plugins/README.md Executable file
View File

@@ -0,0 +1,188 @@
## Overview
PiAlert comes with a simple plugin system to feed events from third-party scripts into the UI and then send notifications if desired.
If you wish to develop a plugin, please check the existing plugin structure.
## Plugin file structure overview
| File | Required | Description |
|----------------------|----------------------|----------------------|
| `config.json` | yes | Contains the plugin configuration including the settings available to the user. |
| `script.py` | yes | The Python script itself |
| `last_result.log` | yes | The file used to interface between PiAlert and the plugin (script). Should contain a set of testing data. |
| `script.log` | no | Logging output (recommended) |
| `README.md` | no | Amy setup considerations or overview |
More on specific files below.
### script.log
Used to interface between PiAlert and the plugin (script). After every scan it should contain only the results from the latest scan/execution.
- The format is a `csv`-like file with the pipe `|` separator. 8 (eight) values need to be supplied, so every line needs to contain 7 pipe separators. Empty values arerepresented by `null`
- Don't render "headers" for these "columns"
- Every scan result / event entry needs to be on a new line
- You can find which "columns" need to be present in the script results and if the value is required below.
- The order of these "columns" can't be changed
| Order | Represented Column | Required | Description |
|----------------------|----------------------|----------------------|----------------------|
| 0 | `Object_PrimaryID` | yes | The primary ID used to group Events under. Should be UNIQUE in the context of the last result (so in `last_result.log`) |
| 1 | `Object_SecondaryID` | no | Optionalsecondary ID to create a relationship beween other entities, such as a MAC address |
| 2 | `DateTime` | yes | When the event occured in the format `2023-01-02 15:56:30` |
| 3 | `Watched_Value1` | yes | A value that is watched and users can receive notifications if it changed compared to the previously saved entry. For example IP address |
| 4 | `Watched_Value2` | no | As above |
| 5 | `Watched_Value3` | no | As above |
| 6 | `Watched_Value4` | no | As above |
| 7 | `Extra` | no | Any other data you want to pass and display in PiAlert and the notifcations |
#### Examples
Valid CSV:
```csv
https://www.google.com|null|2023-01-02 15:56:30|200|0.7898|null|null|null
https://www.duckduckgo.com|192.168.0.1|2023-01-02 15:56:30|200|0.9898|null|null|Best search engine
```
Invalid CSV with different errors on each line:
```csv
https://www.google.com|null|2023-01-02 15:56:30|200|0.7898||null|null
https://www.duckduckgo.com|null|2023-01-02 15:56:30|200|0.9898|null|null|Best search engine|
|https://www.duckduckgo.com|null|2023-01-02 15:56:30|200|0.9898|null|null|Best search engine
null|192.168.1.1|2023-01-02 15:56:30|200|0.9898|null|null|Best search engine
https://www.duckduckgo.com|192.168.1.1|2023-01-02 15:56:30|null|0.9898|null|null|Best search engine
https://www.google.com|null|2023-01-02 15:56:30|200|0.7898|||
https://www.google.com|null|2023-01-02 15:56:30|200|0.7898|
```
### config.json
#### Supported settings types
- `RUN`
- `RUN_SCHD`
- `API_SQL`
- `TIMEOUT`
- `NOTIFY_ON`
- `ARGS`
#### Example
```json
{
"code_name": "website_monitor",
"settings_short_prefix": "WEBMON",
"display_name" : "Website monitor",
"font_awesome_icon_classses": "fa-solid fa-globe",
"description": {
"en_us" : "This plugin is to monitor status changes of different services or websites."
},
"database_column_aliases":{
"Plugins_Events":{
"Index":{
"en_us" : "Index"
},
"Object_PrimaryID":{
"en_us" : "Monitored URL"
},
"DateTime":{
"en_us" : "Checked on"
},
"Watched_Value1":{
"en_us" : "Status code"
},
"Watched_Value2":{
"en_us" : "Latency"
}
}
},
"settings":[
{
"type": "RUN",
"default_value":"none",
"options": ["none","once","schedule"],
"name" : {
"en_us" : "Schedule"
},
"description":
{
"en_us" : "Enable a regular scan of your services. If you select <code>schedule</code> the scheduling settings from below are applied. If you select <code>once</code> the scan is run only once on start of the application (container) for the time specified in <a href=\"#WEBMON_TIMEOUT\"><code>WEBMON_TIMEOUT</code> setting</a>."
}
},
{
"type": "RUN_SCHD",
"default_value":"0 2 * * *",
"name" : {
"en_us" : "Schedule"
},
"description":
{
"en_us" : "Only enabled if you select <code>schedule</code> in the <a href=\"#WEBMON_RUN\"><code>WEBMON_RUN</code> setting</a>. Make sure you enter the schedule in the correct cron-like format (e.g. validate at <a href=\"https://crontab.guru/\" target=\"_blank\">crontab.guru</a>). For example entering <code>0 4 * * *</code> will run the scan after 4 am in the <a onclick=\"toggleAllSettings()\" href=\"#TIMEZONE\"><code>TIMEZONE</code> you set above</a>. Will be run NEXT time the time passes."
}
},
{
"type": "API_SQL",
"default_value":"SELECT * FROM plugin_website_monitor",
"name" : {
"en_us" : "API endpoint"
},
"description":
{
"en_us" : "You can specify a custom SQL query which will generate a JSON file and then expose it via the <a href=\"/api/plugin_website_monitor.json\" target=\"_blank\"><code>plugin_website_monitor.json</code> file endpoint</a>."
}
},
{
"type": "TIMEOUT",
"default_value":5,
"name" : {
"en_us" : "Run timeout"
},
"description":
{
"en_us" : "Maximum time in seconds to wait for a Website monitor check to finish for any url."
}
},
{
"type": "NOTIFY_ON",
"default_value":["Watched_Value1"],
"options": ["Watched_Value1","Watched_Value2","Watched_Value3","Watched_Value4"],
"name" : {
"en_us" : "Notify on"
},
"description":
{
"en_us" : "Send a notification if selected values change. Use <code>CTRL + Click</code> to select/deselect. <ul> <li><code>Watched_Value1</code> is response status code (e.g.: 200, 404)</li><li><code>Watched_Value2</code> is Latency (not recommended)</li><li><code>Watched_Value3</code> unused </li><li><code>Watched_Value4</code> unused </li></ul>"
}
},
{
"type": "ARGS",
"default_value":"",
"name" : {
"en_us" : "Run timeout"
},
"description":
{
"en_us" : "Change the <a href=\"https://linux.die.net/man/1/dig\" target=\"_blank\">dig utility</a> arguments if you have issues resolving your Internet IP. Arguments are added at the end of the following command: <code>dig +short </code>."
}
}
]
}
```

View File

View File

@@ -0,0 +1,104 @@
{
"code_name": "website_monitor",
"settings_short_prefix": "WEBMON",
"display_name" : "Website monitor",
"font_awesome_icon_classses": "fa-solid fa-globe",
"description": {
"en_us" : "This plugin is to monitor status changes of different services or websites."
},
"database_column_aliases":{
"Plugins_Events":{
"Index":{
"en_us" : "Index"
},
"Object_PrimaryID":{
"en_us" : "Monitored URL"
},
"DateTime":{
"en_us" : "Checked on"
},
"Watched_Value1":{
"en_us" : "Status code"
},
"Watched_Value2":{
"en_us" : "Latency"
}
}
},
"settings":[
{
"type": "RUN",
"default_value":"none",
"options": ["none","once","schedule"],
"name" : {
"en_us" : "Schedule"
},
"description":
{
"en_us" : "Enable a regular scan of your services. If you select <code>schedule</code> the scheduling settings from below are applied. If you select <code>once</code> the scan is run only once on start of the application (container) for the time specified in <a href=\"#WEBMON_TIMEOUT\"><code>WEBMON_TIMEOUT</code> setting</a>."
}
},
{
"type": "RUN_SCHD",
"default_value":"0 2 * * *",
"name" : {
"en_us" : "Schedule"
},
"description":
{
"en_us" : "Only enabled if you select <code>schedule</code> in the <a href=\"#WEBMON_RUN\"><code>WEBMON_RUN</code> setting</a>. Make sure you enter the schedule in the correct cron-like format (e.g. validate at <a href=\"https://crontab.guru/\" target=\"_blank\">crontab.guru</a>). For example entering <code>0 4 * * *</code> will run the scan after 4 am in the <a onclick=\"toggleAllSettings()\" href=\"#TIMEZONE\"><code>TIMEZONE</code> you set above</a>. Will be run NEXT time the time passes."
}
},
{
"type": "API_SQL",
"default_value":"SELECT * FROM plugin_website_monitor",
"name" : {
"en_us" : "API endpoint"
},
"description":
{
"en_us" : "You can specify a custom SQL query which will generate a JSON file and then expose it via the <a href=\"/api/plugin_website_monitor.json\" target=\"_blank\"><code>plugin_website_monitor.json</code> file endpoint</a>."
}
},
{
"type": "TIMEOUT",
"default_value":5,
"name" : {
"en_us" : "Run timeout"
},
"description":
{
"en_us" : "Maximum time in seconds to wait for a Website monitor check to finish for any url."
}
},
{
"type": "NOTIFY_ON",
"default_value":["Watched_Value1"],
"options": ["Watched_Value1","Watched_Value2","Watched_Value3","Watched_Value4"],
"name" : {
"en_us" : "Notify on"
},
"description":
{
"en_us" : "Send a notification if selected values change. Use <code>CTRL + Click</code> to select/deselect. <ul> <li><code>Watched_Value1</code> is response status code (e.g.: 200, 404)</li><li><code>Watched_Value2</code> is Latency (not recommended)</li><li><code>Watched_Value3</code> unused </li><li><code>Watched_Value4</code> unused </li></ul>"
}
},
{
"type": "ARGS",
"default_value":"",
"name" : {
"en_us" : "Run timeout"
},
"description":
{
"en_us" : "Change the <a href=\"https://linux.die.net/man/1/dig\" target=\"_blank\">dig utility</a> arguments if you have issues resolving your Internet IP. Arguments are added at the end of the following command: <code>dig +short </code>."
}
}
]
}

View File

@@ -0,0 +1,2 @@
https://www.google.com|null|2023-01-02 15:56:30|404|0.7898|null|null|null
https://www.duckduckgo.com|192.168.0.1|2023-01-02 15:56:30|200|0.9898|null|null|Best search engine

View File

View File

@@ -0,0 +1,316 @@
#!/usr/bin/env python
from __future__ import unicode_literals
from time import sleep, time, strftime
import requests
import io
#import smtplib
import sys
#from smtp_config import sender, password, receivers, host, port
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import sqlite3
import pwd
import os
con = sqlite3.connect("monitoring.db")
cur = con.cursor()
#DELAY = 60 # Delay between site queries
EMAIL_INTERVAL = 1800 # Delay between alert emails
last_email_time = {} # Monitored sites and timestamp of last alert sent
# Message template for alert
MESSAGE = """From: {sender}
To: {receivers}
Subject: Monitor Service Notification
You are being notified that {site} is experiencing a {status} status!
"""
# Workflow
def main():
global cur
global con
prepare_service_monitoring_env()
service_monitoring()
print_service_monitoring_changes()
# prepare_service_monitoring_notification()
# -----------------------------------------------------------------------------
def prepare_service_monitoring_env ():
global con
global cur
sql_create_table = """ CREATE TABLE IF NOT EXISTS Services_Events(
moneve_URL TEXT NOT NULL,
moneve_DateTime TEXT NOT NULL,
moneve_StatusCode NUMERIC NOT NULL,
moneve_Latency TEXT NOT NULL
); """
cur.execute(sql_create_table)
sql_create_table = """ CREATE TABLE IF NOT EXISTS Services_CurrentScan(
cur_URL TEXT NOT NULL,
cur_DateTime TEXT NOT NULL,
cur_StatusCode NUMERIC NOT NULL,
cur_Latency TEXT NOT NULL,
cur_AlertEvents INTEGER DEFAULT 0,
cur_AlertDown INTEGER DEFAULT 0,
cur_StatusChanged INTEGER DEFAULT 0,
cur_LatencyChanged INTEGER DEFAULT 0
); """
cur.execute(sql_create_table)
sql_create_table = """ CREATE TABLE IF NOT EXISTS Services(
mon_URL TEXT NOT NULL,
mon_MAC TEXT,
mon_LastStatus NUMERIC NOT NULL,
mon_LastLatency TEXT NOT NULL,
mon_LastScan TEXT NOT NULL,
mon_Tags TEXT,
mon_AlertEvents INTEGER DEFAULT 0,
mon_AlertDown INTEGER DEFAULT 0,
PRIMARY KEY(mon_URL)
); """
cur.execute(sql_create_table)
# Update Service with lastLatence, lastScan and lastStatus
# -----------------------------------------------------------------------------
def set_service_update(_mon_URL, _mon_lastScan, _mon_lastStatus, _mon_lastLatence,):
global con
global cur
sqlite_insert = """UPDATE Services SET mon_LastScan=?, mon_LastStatus=?, mon_LastLatency=? WHERE mon_URL=?;"""
table_data = (_mon_lastScan, _mon_lastStatus, _mon_lastLatence, _mon_URL)
cur.execute(sqlite_insert, table_data)
con.commit()
# Insert Services_Events with moneve_URL, moneve_DateTime, moneve_StatusCode and moneve_Latency
# -----------------------------------------------------------------------------
def set_services_events(_moneve_URL, _moneve_DateTime, _moneve_StatusCode, _moneve_Latency):
global con
global cur
sqlite_insert = """INSERT INTO Services_Events
(moneve_URL, moneve_DateTime, moneve_StatusCode, moneve_Latency)
VALUES (?, ?, ?, ?);"""
table_data = (_moneve_URL, _moneve_DateTime, _moneve_StatusCode, _moneve_Latency)
cur.execute(sqlite_insert, table_data)
con.commit()
# Insert Services_Events with moneve_URL, moneve_DateTime, moneve_StatusCode and moneve_Latency
# -----------------------------------------------------------------------------
def set_services_current_scan(_cur_URL, _cur_DateTime, _cur_StatusCode, _cur_Latency):
global con
global cur
cur.execute("SELECT * FROM Services WHERE mon_URL = ?", [_cur_URL])
rows = cur.fetchall()
for row in rows:
_mon_AlertEvents = row[6]
_mon_AlertDown = row[7]
_mon_StatusCode = row[2]
_mon_Latency = row[3]
if _mon_StatusCode != _cur_StatusCode:
_cur_StatusChanged = 1
else:
_cur_StatusChanged = 0
if _mon_Latency == "99999" and _mon_Latency != _cur_Latency:
_cur_LatencyChanged = 1
elif _cur_Latency == "99999" and _mon_Latency != _cur_Latency:
_cur_LatencyChanged = 1
else:
_cur_LatencyChanged = 0
sqlite_insert = """INSERT INTO Services_CurrentScan
(cur_URL, cur_DateTime, cur_StatusCode, cur_Latency, cur_AlertEvents, cur_AlertDown, cur_StatusChanged, cur_LatencyChanged)
VALUES (?, ?, ?, ?, ?, ?, ?, ?);"""
table_data = (_cur_URL, _cur_DateTime, _cur_StatusCode, _cur_Latency, _mon_AlertEvents, _mon_AlertDown, _cur_StatusChanged, _cur_LatencyChanged)
cur.execute(sqlite_insert, table_data)
con.commit()
# -----------------------------------------------------------------------------
def service_monitoring_log(site, status, latency):
# global monitor_logfile
# Log status message to log file
with open('monitor.log', 'a') as monitor_logfile:
monitor_logfile.write("{} | {} | {} | {}\n".format(strftime("%Y-%m-%d %H:%M:%S"),
site,
status,
latency,
)
)
# -----------------------------------------------------------------------------
def send_alert(site, status):
"""If more than EMAIL_INTERVAL seconds since last email, resend email"""
if (time() - last_email_time[site]) > EMAIL_INTERVAL:
try:
smtpObj = smtplib.SMTP(host, port) # Set up SMTP object
smtpObj.starttls()
smtpObj.login(sender, password)
smtpObj.sendmail(sender,
receivers,
MESSAGE.format(sender=sender,
receivers=", ".join(receivers),
site=site,
status=status
)
)
last_email_time[site] = time() # Update time of last email
print("Successfully sent email")
except smtplib.SMTPException:
print("Error sending email ({}:{})".format(host, port))
# -----------------------------------------------------------------------------
def check_services_health(site):
# Enable self signed SSL
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
"""Send GET request to input site and return status code"""
try:
resp = requests.get(site, verify=False, timeout=10)
latency = resp.elapsed
latency_str = str(latency)
latency_str_seconds = latency_str.split(":")
format_latency_str = latency_str_seconds[2]
if format_latency_str[0] == "0" and format_latency_str[1] != "." :
format_latency_str = format_latency_str[1:]
return resp.status_code, format_latency_str
except requests.exceptions.SSLError:
pass
except:
latency = "99999"
return 503, latency
# ----------------------------------------------------------------------------- Duplicat
def get_username():
return pwd.getpwuid(os.getuid())[0]
# -----------------------------------------------------------------------------
def get_services_list():
global cur
global con
with open('monitor.log', 'a') as monitor_logfile:
monitor_logfile.write("... Get Services List\n")
monitor_logfile.close()
cur.execute("SELECT mon_URL FROM Services")
rows = cur.fetchall()
sites = []
for row in rows:
sites.append(row[0])
return sites
# -----------------------------------------------------------------------------
def flush_services_current_scan():
global cur
global con
with open('monitor.log', 'a') as monitor_logfile:
monitor_logfile.write("... Flush previous scan results\n")
monitor_logfile.close()
cur.execute("DELETE FROM Services_CurrentScan")
con.commit()
# -----------------------------------------------------------------------------
def print_service_monitoring_changes():
global cur
global con
print("Services Monitoring Changes")
changedStatusCode = cur.execute("SELECT COUNT() FROM Services_CurrentScan WHERE cur_StatusChanged = 1").fetchone()[0]
print("... Changed StatusCodes: ", str(changedStatusCode))
changedLatency = cur.execute("SELECT COUNT() FROM Services_CurrentScan WHERE cur_LatencyChanged = 1").fetchone()[0]
print("... Changed Reachability: ", str(changedLatency))
with open('monitor.log', 'a') as monitor_logfile:
monitor_logfile.write("\nServices Monitoring Changes:\n")
monitor_logfile.write("... Changed StatusCodes: " + str(changedStatusCode))
monitor_logfile.write("\n... Changed Reachability: " + str(changedLatency))
monitor_logfile.write("\n")
monitor_logfile.close()
# -----------------------------------------------------------------------------
# def prepare_service_monitoring_notification():
# global cur
# global con
# -----------------------------------------------------------------------------
def service_monitoring():
global cur
global con
# Empty Log and write new header
print("Prepare Services Monitoring")
print("... Prepare Logfile")
with open('monitor.log', 'w') as monitor_logfile:
monitor_logfile.write("Pi.Alert [Prototype]:\n---------------------------------------------------------\n")
monitor_logfile.write("Current User: %s \n\n" % get_username())
monitor_logfile.write("Monitor Web-Services\n")
monitor_logfile.write("Timestamp: " + strftime("%Y-%m-%d %H:%M:%S") + "\n")
monitor_logfile.close()
print("... Get Services List")
sites = get_services_list()
print("... Flush previous scan results")
flush_services_current_scan()
print("Start Services Monitoring")
with open('monitor.log', 'a') as monitor_logfile:
monitor_logfile.write("\nStart Services Monitoring\n\n| Timestamp | URL | StatusCode | ResponseTime |\n-----------------------------------------------\n")
monitor_logfile.close()
for site in sites:
last_email_time[site] = 0 # Initialize timestamp as 0
while sites:
for site in sites:
status,latency = check_services_health(site)
scantime = strftime("%Y-%m-%d %H:%M:%S")
# Debugging
# print("{} - {} STATUS: {} ResponseTime: {}".format(strftime("%Y-%m-%d %H:%M:%S"),
# site,
# status,
# latency)
# )
# Write Logfile
service_monitoring_log(site, status, latency)
# Insert Services_Events with moneve_URL, moneve_DateTime, moneve_StatusCode and moneve_Latency
set_services_events(site, scantime, status, latency)
# Insert Services_CurrentScan with moneve_URL, moneve_DateTime, moneve_StatusCode and moneve_Latency
set_services_current_scan(site, scantime, status, latency)
sys.stdout.flush()
# Update Service with lastLatence, lastScan and lastStatus after compare with services_current_scan
set_service_update(site, scantime, status, latency)
break
else:
with open('monitor.log', 'a') as monitor_logfile:
monitor_logfile.write("\n\nNo site(s) to monitor!")
monitor_logfile.close()
#===============================================================================
# BEGIN
#===============================================================================
if __name__ == '__main__':
sys.exit(main())