mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2025-12-07 01:26:11 -08:00
Merge pull request #249 from jokob-sk/split-up-work-2023-05-30
Split up work 2023 05 30
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -2,6 +2,7 @@
|
||||
.DS_Store
|
||||
config/pialert.conf
|
||||
db/*
|
||||
db/pialert.db
|
||||
front/log/*
|
||||
front/plugins/**/*.log
|
||||
**/%40eaDir/
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
FROM debian:bullseye-slim
|
||||
|
||||
# default UID and GID
|
||||
ENV USER=pi USER_ID=1000 USER_GID=1000 TZ=Europe/London PORT=20211
|
||||
ENV USER=pi USER_ID=1000 USER_GID=1000 PORT=20211
|
||||
#TZ=Europe/London
|
||||
|
||||
# Todo, figure out why using a workdir instead of full paths don't work
|
||||
# Todo, do we still need all these packages? I can already see sudo which isn't needed
|
||||
@@ -46,3 +47,8 @@ RUN rm /etc/nginx/sites-available/default \
|
||||
ENTRYPOINT ["tini", "--"]
|
||||
|
||||
CMD ["/home/pi/pialert/dockerfiles/start.sh"]
|
||||
|
||||
|
||||
|
||||
|
||||
## command to build docker: DOCKER_BUILDKIT=1 docker build . --iidfile dockerID
|
||||
@@ -51,13 +51,14 @@ The system continuously scans the network for, **New devices**, **New connection
|
||||
- Theme Selection (blue, red, green, yellow, black, purple) and Light/Dark-Mode Switch
|
||||
- DB maintenance, Backup, Restore tools and CSV Export / Import
|
||||
- Simple login Support
|
||||
- 🌟(Experimental) [Plugin system](https://github.com/jokob-sk/Pi.Alert/tree/main/front/plugins)
|
||||
- 🌟[Plugin system](https://github.com/jokob-sk/Pi.Alert/tree/main/front/plugins)
|
||||
- Create custom plugins with automatically generated settings and UI.
|
||||
- Monitor anything for changes
|
||||
- Check the [instructions](https://github.com/jokob-sk/Pi.Alert/tree/main/front/plugins) carefully if you are up for a challenge! Current plugins include:
|
||||
- Detecting Rogue DHCP servers via NMAP
|
||||
- Monitoring HTTP status changes of domains/URLs
|
||||
- Import devices from DHCP.leases files, a UniFi controller, or an SNMP enabled router
|
||||
- Creation of dummy devices to visualize your [network map](https://github.com/jokob-sk/Pi.Alert/blob/main/docs/NETWORK_TREE.md)
|
||||
|
||||
| ![Screen 1][screen1] | ![Screen 2][screen2] | ![Screen 5][screen5] |
|
||||
|----------------------|----------------------| ----------------------|
|
||||
@@ -86,7 +87,8 @@ The system continuously scans the network for, **New devices**, **New connection
|
||||
- [leiweibau](https://github.com/leiweibau/Pi.Alert): Dark mode (and much more)
|
||||
- [Macleykun](https://github.com/Macleykun): Help with Dockerfile clean-up
|
||||
- [Final-Hawk](https://github.com/Final-Hawk): Help with NTFY, styling and other fixes
|
||||
- [TeroRERO](https://github.com/terorero): Spanish translation
|
||||
- [TeroRERO](https://github.com/terorero): Spanish translation
|
||||
- [Data-Monkey] (https://github.com/Data-Monkey): Split-up of the python.py file and more
|
||||
- Please see the [Git contributors](https://github.com/jokob-sk/Pi.Alert/graphs/contributors) for a full list of people and their contributions to the project
|
||||
|
||||
## ☕ Support me
|
||||
|
||||
4368
back/pialert.py
4368
back/pialert.py
File diff suppressed because it is too large
Load Diff
@@ -7,18 +7,21 @@ services:
|
||||
network_mode: "host"
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ${APP_DATA_LOCATION}/pialert2/config:/home/pi/pialert/config
|
||||
# - ${APP_DATA_LOCATION}/pialert_dev/config:/home/pi/pialert/config
|
||||
- ${APP_DATA_LOCATION}/pialert/config:/home/pi/pialert/config
|
||||
# - ${APP_DATA_LOCATION}/pialert/db/pialert.db:/home/pi/pialert/db/pialert.db
|
||||
- ${APP_DATA_LOCATION}/pialert2/db:/home/pi/pialert/db
|
||||
# - ${APP_DATA_LOCATION}/pialert_dev/db:/home/pi/pialert/db
|
||||
- ${APP_DATA_LOCATION}/pialert/db:/home/pi/pialert/db
|
||||
# (optional) useful for debugging if you have issues setting up the container
|
||||
- ${LOGS_LOCATION}:/home/pi/pialert/front/log
|
||||
# DELETE START anyone trying to use this file: comment out / delete BELOW lines, they are only for development purposes
|
||||
- ${APP_DATA_LOCATION}/pialert/dhcp_samples/dhcp1.leases:/mnt/dhcp1.leases
|
||||
- ${APP_DATA_LOCATION}/pialert/dhcp_samples/dhcp2.leases:/mnt/dhcp2.leases
|
||||
- ${DEV_LOCATION}/back/pialert.py:/home/pi/pialert/back/pialert.py
|
||||
- ${DEV_LOCATION}/back/report_template.html:/home/pi/pialert/back/report_template.html
|
||||
- ${DEV_LOCATION}/back/report_template_new_version.html:/home/pi/pialert/back/report_template_new_version.html
|
||||
- ${DEV_LOCATION}/back/report_template.txt:/home/pi/pialert/back/report_template.txt
|
||||
# - ${DEV_LOCATION}/back/pialert.py:/home/pi/pialert/back/pialert.py
|
||||
- ${DEV_LOCATION}/pialert:/home/pi/pialert/pialert
|
||||
# - ${DEV_LOCATION}/back/report_template.html:/home/pi/pialert/back/report_template.html
|
||||
# - ${DEV_LOCATION}/back/report_template_new_version.html:/home/pi/pialert/back/report_template_new_version.html
|
||||
# - ${DEV_LOCATION}/back/report_template.txt:/home/pi/pialert/back/report_template.txt
|
||||
- ${DEV_LOCATION}/pholus:/home/pi/pialert/pholus
|
||||
- ${DEV_LOCATION}/dockerfiles:/home/pi/pialert/dockerfiles
|
||||
- ${APP_DATA_LOCATION}/pialert/php.ini:/etc/php/7.4/fpm/php.ini
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
#!/bin/sh
|
||||
/home/pi/pialert/dockerfiles/user-mapping.sh
|
||||
|
||||
# if custom variables not set we do not need to do anything
|
||||
if [ -n "${TZ}" ]; then
|
||||
FILECONF=/home/pi/pialert/config/pialert.conf
|
||||
if [ -f "$FILECONF" ]; then
|
||||
sed -ie "s|Europe/Berlin|${TZ}|g" /home/pi/pialert/config/pialert.conf
|
||||
else
|
||||
sed -ie "s|Europe/Berlin|${TZ}|g" /home/pi/pialert/back/pialert.conf_bak
|
||||
fi
|
||||
fi
|
||||
# # if custom variables not set we do not need to do anything
|
||||
# if [ -n "${TZ}" ]; then
|
||||
# FILECONF=/home/pi/pialert/config/pialert.conf
|
||||
# if [ -f "$FILECONF" ]; then
|
||||
# sed -ie "s|Europe/Berlin|${TZ}|g" /home/pi/pialert/config/pialert.conf
|
||||
# else
|
||||
# sed -ie "s|Europe/Berlin|${TZ}|g" /home/pi/pialert/back/pialert.conf_bak
|
||||
# fi
|
||||
# fi
|
||||
|
||||
if [ -n "${PORT}" ]; then
|
||||
sed -ie 's/listen 20211/listen '${PORT}'/g' /etc/nginx/sites-available/default
|
||||
@@ -28,4 +28,6 @@ chmod -R a+rw /home/pi/pialert/config
|
||||
/etc/init.d/nginx start
|
||||
|
||||
# cron -f
|
||||
python /home/pi/pialert/back/pialert.py
|
||||
#python /home/pi/pialert/back/pialert.py
|
||||
# echo "[DEBUG] DATA MONKEY VERSION ..."
|
||||
python /home/pi/pialert/pialert/
|
||||
|
||||
@@ -77,6 +77,16 @@ If you submit a PR please:
|
||||
4. New features code should ideally be re-usable for different purposes, not be for a very narrow use-case.
|
||||
5. New functionality should ideally be implemented via the Plugins system, if possible.
|
||||
|
||||
Suggested test cases:
|
||||
|
||||
- Blank setup with no DB or config
|
||||
- Existing DB / config
|
||||
- Sending a notification (e. g. Delete a device and wait for a scan to run) and testing all notification gateways, especially:
|
||||
- Email, Apprise (e.g. via Telegram), webhook (e.g. via Discord), MQTT (e.g. via HomeAssitant)
|
||||
- Saving settings
|
||||
- Test a couple of plugins
|
||||
- Check the Error log for anything unusual
|
||||
|
||||
Some additional context:
|
||||
|
||||
* Permanent settings/config is stored in the `pialert.conf` file
|
||||
|
||||
0
front/buildtimestamp.txt
Executable file
0
front/buildtimestamp.txt
Executable file
@@ -116,6 +116,7 @@ $lang['en_us'] = array(
|
||||
'Presence_CalHead_quarter' => 'quarter',
|
||||
'Presence_CalHead_month' => 'month',
|
||||
'Presence_CalHead_week' => 'week',
|
||||
'Presence_CalHead_day' => 'day',
|
||||
|
||||
//////////////////////////////////////////////////////////////////
|
||||
// Events Page
|
||||
|
||||
@@ -690,4 +690,4 @@ $lang['es_es'] = array(
|
||||
'API_CUSTOM_SQL_description' => 'Puede especificar una consulta SQL personalizada que generará un archivo JSON y luego lo expondrá a través del <a href="/api/table_custom_endpoint.json" target="_blank">archivo <code>table_custom_endpoint.json</code ></a>.',
|
||||
|
||||
);
|
||||
?>
|
||||
?>
|
||||
@@ -45,7 +45,7 @@
|
||||
}],
|
||||
"description": [{
|
||||
"language_code":"en_us",
|
||||
"string" : "When enabled, \"once\" is the preferred option. It runs at startup and after every save of the config here.<br> Changes will only show in the devices <b> after the next scan!</b>"
|
||||
"string" : "When enabled, ONCE is the preferred option. It runs at startup and after every save of the config here.<br> Changes will only show in the devices <b> after the next scan!</b>"
|
||||
}]
|
||||
},
|
||||
{
|
||||
@@ -214,4 +214,4 @@
|
||||
}]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -234,7 +234,7 @@ function initializeCalendar () {
|
||||
header: {
|
||||
left : 'prev,next today',
|
||||
center : 'title',
|
||||
right : 'timelineYear,timelineMonth,timelineWeek'
|
||||
right : 'timelineYear,timelineMonth,timelineWeek,timelineDay'
|
||||
},
|
||||
defaultView : 'timelineMonth',
|
||||
height : 'auto',
|
||||
@@ -286,6 +286,13 @@ function initializeCalendar () {
|
||||
buttonText : '<?= lang('Presence_CalHead_week');?>',
|
||||
slotLabelFormat : 'D',
|
||||
slotDuration : '24:00:01'
|
||||
},
|
||||
timelineDay: {
|
||||
type : 'timeline',
|
||||
duration : { day: 1 },
|
||||
buttonText : '<?= lang('Presence_CalHead_day');?>',
|
||||
slotLabelFormat : 'H',
|
||||
slotDuration : '00:30:00'
|
||||
}
|
||||
},
|
||||
|
||||
@@ -305,6 +312,15 @@ function initializeCalendar () {
|
||||
|
||||
if (date.format('YYYY-MM-DD') == moment().format('YYYY-MM-DD')) {
|
||||
cell.addClass ('fc-today'); };
|
||||
|
||||
if ($('#calendar').fullCalendar('getView').name == 'timelineDay') {
|
||||
cell.removeClass('fc-sat');
|
||||
cell.removeClass('fc-sun');
|
||||
cell.removeClass('fc-today');
|
||||
if (date.format('YYYY-MM-DD HH') == moment().format('YYYY-MM-DD HH')) {
|
||||
cell.addClass('fc-today');
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
resourceRender: function (resourceObj, labelTds, bodyTds) {
|
||||
|
||||
14
pialert/README.md
Executable file
14
pialert/README.md
Executable file
@@ -0,0 +1,14 @@
|
||||
# Pi.Alert all split into modules
|
||||
|
||||
I am trying to split this big original file into modules and gives me some nice challanges to solve.
|
||||
Since the original code is all in one file, the original author has taken quite some shortcuts by defining lots of variables as global !!
|
||||
These need to be changed now.
|
||||
|
||||
Here is the main structure
|
||||
|
||||
| Module | Description |
|
||||
|--------|-----------|
|
||||
|pialert.py | The MAIN program of Pi.Alert|
|
||||
|const.py | A place to define the constants for Pi.Alert like log path or config path.|
|
||||
|const.py| const.py holds the configuration variables and makes them availabe for all modules. It is also the <b>workaround</b> for the global variables until I can work them out|
|
||||
|api.py| |
|
||||
1
pialert/__init__.py
Executable file
1
pialert/__init__.py
Executable file
@@ -0,0 +1 @@
|
||||
""" __init__ for Pi.Alert """
|
||||
314
pialert/__main__.py
Executable file
314
pialert/__main__.py
Executable file
@@ -0,0 +1,314 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
# Pi.Alert v2.70 / 2021-02-01
|
||||
# Open Source Network Guard / WIFI & LAN intrusion detector
|
||||
#
|
||||
# pialert.py - Back module. Network scanner
|
||||
#-------------------------------------------------------------------------------
|
||||
# Puche 2021 / 2022+ jokob jokob@duck.com GNU GPLv3
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# IMPORTS
|
||||
#===============================================================================
|
||||
#from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import time
|
||||
import datetime
|
||||
import multiprocessing
|
||||
|
||||
# pialert modules
|
||||
import conf
|
||||
from const import *
|
||||
from logger import mylog
|
||||
from helper import filePermissions, isNewVersion, timeNow, updateState
|
||||
from api import update_api
|
||||
from networkscan import process_scan, scan_network
|
||||
from initialise import importConfigs
|
||||
from mac_vendor import update_devices_MAC_vendors
|
||||
from database import DB, get_all_devices
|
||||
from reporting import check_and_run_event, send_notifications
|
||||
from plugin import run_plugin_scripts
|
||||
|
||||
# different scanners
|
||||
from scanners.pholusscan import performPholusScan
|
||||
from scanners.nmapscan import performNmapScan
|
||||
from scanners.internet import check_internet_IP
|
||||
|
||||
|
||||
|
||||
#===============================================================================
|
||||
#===============================================================================
|
||||
# MAIN
|
||||
#===============================================================================
|
||||
#===============================================================================
|
||||
"""
|
||||
main structure of Pi Alert
|
||||
|
||||
Initialise All
|
||||
start Loop forever
|
||||
initialise loop
|
||||
(re)import config
|
||||
(re)import plugin config
|
||||
run plugins (once)
|
||||
run frontend events
|
||||
update API
|
||||
run scans
|
||||
run plugins (scheduled)
|
||||
check internet IP
|
||||
check vendor
|
||||
run PHOLUS
|
||||
run NMAP
|
||||
run "scan_network()"
|
||||
ARP Scan
|
||||
PiHole copy db
|
||||
PiHole DHCP leases
|
||||
processing scan results
|
||||
run plugins (after Scan)
|
||||
reporting
|
||||
cleanup
|
||||
end loop
|
||||
"""
|
||||
|
||||
def main ():
|
||||
mylog('debug', ['[MAIN] Setting up ...'])
|
||||
|
||||
conf.time_started = datetime.datetime.now()
|
||||
conf.cycle = ""
|
||||
conf.check_report = [1, "internet_IP", "update_vendors_silent"]
|
||||
conf.plugins_once_run = False
|
||||
|
||||
pialert_start_time = timeNow()
|
||||
|
||||
|
||||
# to be deleted if not used
|
||||
conf.log_timestamp = conf.time_started
|
||||
#cron_instance = Cron()
|
||||
|
||||
# timestamps of last execution times
|
||||
startTime = conf.time_started
|
||||
now_minus_24h = conf.time_started - datetime.timedelta(hours = 24)
|
||||
|
||||
# set these times to the past to force the first run
|
||||
last_network_scan = now_minus_24h
|
||||
last_internet_IP_scan = now_minus_24h
|
||||
last_scan_run = now_minus_24h
|
||||
last_cleanup = now_minus_24h
|
||||
last_update_vendors = conf.time_started - datetime.timedelta(days = 6) # update vendors 24h after first run and then once a week
|
||||
last_version_check = now_minus_24h
|
||||
|
||||
# indicates, if a new version is available
|
||||
conf.newVersionAvailable = False
|
||||
|
||||
# check file permissions and fix if required
|
||||
filePermissions()
|
||||
|
||||
# Open DB once and keep open
|
||||
# Opening / closing DB frequently actually casues more issues
|
||||
db = DB() # instance of class DB
|
||||
db.open()
|
||||
sql = db.sql # To-Do replace with the db class
|
||||
|
||||
# Upgrade DB if needed
|
||||
db.upgradeDB()
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# This is the main loop of Pi.Alert
|
||||
#===============================================================================
|
||||
while True:
|
||||
|
||||
# update time started
|
||||
time_started = datetime.datetime.now() # not sure why we need this ...
|
||||
loop_start_time = timeNow()
|
||||
mylog('debug', '[MAIN] Starting loop')
|
||||
|
||||
# re-load user configuration and plugins
|
||||
importConfigs(db)
|
||||
|
||||
# check if new version is available / only check once an hour
|
||||
# if newVersionAvailable is already true the function does nothing and returns true again
|
||||
if last_version_check + datetime.timedelta(hours=1) < loop_start_time :
|
||||
conf.newVersionAvailable = isNewVersion(conf.newVersionAvailable)
|
||||
|
||||
# Handle plugins executed ONCE
|
||||
if conf.ENABLE_PLUGINS and conf.plugins_once_run == False:
|
||||
run_plugin_scripts(db, 'once')
|
||||
conf.plugins_once_run = True
|
||||
|
||||
# check if there is a front end initiated event which needs to be executed
|
||||
check_and_run_event(db)
|
||||
|
||||
# Update API endpoints
|
||||
update_api(db)
|
||||
|
||||
# proceed if 1 minute passed
|
||||
if last_scan_run + datetime.timedelta(minutes=1) < loop_start_time :
|
||||
|
||||
# last time any scan or maintenance/upkeep was run
|
||||
last_scan_run = time_started
|
||||
|
||||
# Header
|
||||
updateState(db,"Process: Start")
|
||||
|
||||
# Timestamp
|
||||
startTime = time_started
|
||||
startTime = startTime.replace (microsecond=0)
|
||||
|
||||
# Check if any plugins need to run on schedule
|
||||
if conf.ENABLE_PLUGINS:
|
||||
run_plugin_scripts(db,'schedule')
|
||||
|
||||
# determine run/scan type based on passed time
|
||||
# --------------------------------------------
|
||||
|
||||
# check for changes in Internet IP
|
||||
if last_internet_IP_scan + datetime.timedelta(minutes=3) < time_started:
|
||||
conf.cycle = 'internet_IP'
|
||||
last_internet_IP_scan = time_started
|
||||
check_internet_IP(db)
|
||||
|
||||
# Update vendors once a week
|
||||
if last_update_vendors + datetime.timedelta(days = 7) < time_started:
|
||||
last_update_vendors = time_started
|
||||
conf.cycle = 'update_vendors'
|
||||
mylog('verbose', ['[MAIN] cycle:',conf.cycle])
|
||||
update_devices_MAC_vendors(db)
|
||||
|
||||
# Execute scheduled or one-off Pholus scan if enabled and run conditions fulfilled
|
||||
if conf.PHOLUS_RUN == "schedule" or conf.PHOLUS_RUN == "once":
|
||||
|
||||
pholusSchedule = [sch for sch in conf.mySchedules if sch.service == "pholus"][0]
|
||||
run = False
|
||||
|
||||
# run once after application starts
|
||||
|
||||
|
||||
if conf.PHOLUS_RUN == "once" and pholusSchedule.last_run == 0:
|
||||
run = True
|
||||
|
||||
# run if overdue scheduled time
|
||||
if conf.PHOLUS_RUN == "schedule":
|
||||
run = pholusSchedule.runScheduleCheck()
|
||||
|
||||
if run:
|
||||
pholusSchedule.last_run = datetime.datetime.now(conf.tz).replace(microsecond=0)
|
||||
performPholusScan(db, conf.PHOLUS_RUN_TIMEOUT, conf.userSubnets)
|
||||
|
||||
# Execute scheduled or one-off Nmap scan if enabled and run conditions fulfilled
|
||||
if conf.NMAP_RUN == "schedule" or conf.NMAP_RUN == "once":
|
||||
|
||||
nmapSchedule = [sch for sch in conf.mySchedules if sch.service == "nmap"][0]
|
||||
run = False
|
||||
|
||||
# run once after application starts
|
||||
if conf.NMAP_RUN == "once" and nmapSchedule.last_run == 0:
|
||||
run = True
|
||||
|
||||
# run if overdue scheduled time
|
||||
if conf.NMAP_RUN == "schedule":
|
||||
run = nmapSchedule.runScheduleCheck()
|
||||
|
||||
if run:
|
||||
nmapSchedule.last_run = timeNow()
|
||||
performNmapScan(db, get_all_devices(db))
|
||||
|
||||
# Perform a network scan via arp-scan or pihole
|
||||
if last_network_scan + datetime.timedelta(minutes=conf.SCAN_CYCLE_MINUTES) < time_started:
|
||||
last_network_scan = time_started
|
||||
conf.cycle = 1 # network scan
|
||||
mylog('verbose', ['[MAIN] cycle:',conf.cycle])
|
||||
updateState(db,"Scan: Network")
|
||||
|
||||
# scan_network()
|
||||
|
||||
# DEBUG start ++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
||||
# Start scan_network as a process
|
||||
|
||||
p = multiprocessing.Process(target=scan_network(db))
|
||||
p.start()
|
||||
|
||||
# Wait for 3600 seconds (max 1h) or until process finishes
|
||||
p.join(3600)
|
||||
|
||||
# If thread is still active
|
||||
if p.is_alive():
|
||||
mylog('none', "[MAIN] scan_network running too long - let\'s kill it")
|
||||
|
||||
# Terminate - may not work if process is stuck for good
|
||||
p.terminate()
|
||||
# OR Kill - will work for sure, no chance for process to finish nicely however
|
||||
# p.kill()
|
||||
|
||||
p.join()
|
||||
|
||||
# DEBUG end ++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
||||
# Run splugin scripts which are set to run every timne after a scan finished
|
||||
if conf.ENABLE_PLUGINS:
|
||||
run_plugin_scripts(db,'always_after_scan')
|
||||
|
||||
# --------------------------------------------------
|
||||
# process all the scanned data into new devices
|
||||
mylog('debug', "[MAIN] start processig scan results")
|
||||
process_scan (db, conf.arpscan_devices )
|
||||
|
||||
# Reporting
|
||||
if conf.cycle in conf.check_report:
|
||||
# Check if new devices found
|
||||
sql.execute (sql_new_devices)
|
||||
newDevices = sql.fetchall()
|
||||
db.commitDB()
|
||||
|
||||
# new devices were found
|
||||
if len(newDevices) > 0:
|
||||
# run all plugins registered to be run when new devices are found
|
||||
if conf.ENABLE_PLUGINS:
|
||||
run_plugin_scripts(db, 'on_new_device')
|
||||
|
||||
# Scan newly found devices with Nmap if enabled
|
||||
if conf.NMAP_ACTIVE and len(newDevices) > 0:
|
||||
performNmapScan( db, newDevices)
|
||||
|
||||
# send all configured notifications
|
||||
send_notifications(db)
|
||||
|
||||
# clean up the DB once a day
|
||||
if last_cleanup + datetime.timedelta(hours = 24) < time_started:
|
||||
last_cleanup = time_started
|
||||
conf.cycle = 'cleanup'
|
||||
mylog('verbose', ['[MAIN] cycle:',conf.cycle])
|
||||
db.cleanup_database(startTime, conf.DAYS_TO_KEEP_EVENTS, conf.PHOLUS_DAYS_DATA)
|
||||
|
||||
# Commit SQL
|
||||
db.commitDB()
|
||||
|
||||
# Final message
|
||||
if conf.cycle != "":
|
||||
action = str(conf.cycle)
|
||||
if action == "1":
|
||||
action = "network_scan"
|
||||
mylog('verbose', ['[MAIN] Last action: ', action])
|
||||
conf.cycle = ""
|
||||
mylog('verbose', ['[MAIN] cycle:',conf.cycle])
|
||||
|
||||
# Footer
|
||||
updateState(db,"Process: Wait")
|
||||
mylog('verbose', ['[MAIN] Process: Wait'])
|
||||
else:
|
||||
# do something
|
||||
conf.cycle = ""
|
||||
mylog('verbose', ['[MAIN] waiting to start next loop'])
|
||||
|
||||
#loop
|
||||
time.sleep(5) # wait for N seconds
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# BEGIN
|
||||
#===============================================================================
|
||||
if __name__ == '__main__':
|
||||
mylog('debug', ['[__main__] Welcome to Pi.Alert'])
|
||||
sys.exit(main())
|
||||
96
pialert/api.py
Executable file
96
pialert/api.py
Executable file
@@ -0,0 +1,96 @@
|
||||
import json
|
||||
|
||||
|
||||
# pialert modules
|
||||
import conf
|
||||
from const import (apiPath, sql_devices_all, sql_nmap_scan_all, sql_pholus_scan_all, sql_events_pending_alert,
|
||||
sql_settings, sql_plugins_events, sql_plugins_history, sql_plugins_objects,sql_language_strings)
|
||||
from logger import mylog
|
||||
from helper import write_file
|
||||
|
||||
apiEndpoints = []
|
||||
|
||||
#===============================================================================
|
||||
# API
|
||||
#===============================================================================
|
||||
def update_api(db, isNotification = False, updateOnlyDataSources = []):
|
||||
mylog('verbose', ['[API] Update API starting'])
|
||||
# return
|
||||
|
||||
folder = apiPath
|
||||
|
||||
# update notifications moved to reporting send_api()
|
||||
|
||||
# Save plugins
|
||||
if conf.ENABLE_PLUGINS:
|
||||
write_file(folder + 'plugins.json' , json.dumps({"data" : conf.plugins}))
|
||||
|
||||
# prepare database tables we want to expose
|
||||
dataSourcesSQLs = [
|
||||
["devices", sql_devices_all],
|
||||
["nmap_scan", sql_nmap_scan_all],
|
||||
["pholus_scan", sql_pholus_scan_all],
|
||||
["events_pending_alert", sql_events_pending_alert],
|
||||
["settings", sql_settings],
|
||||
["plugins_events", sql_plugins_events],
|
||||
["plugins_history", sql_plugins_history],
|
||||
["plugins_objects", sql_plugins_objects],
|
||||
["language_strings", sql_language_strings],
|
||||
["custom_endpoint", conf.API_CUSTOM_SQL],
|
||||
]
|
||||
|
||||
# Save selected database tables
|
||||
for dsSQL in dataSourcesSQLs:
|
||||
|
||||
if updateOnlyDataSources == [] or dsSQL[0] in updateOnlyDataSources:
|
||||
|
||||
api_endpoint_class(db, dsSQL[1], folder + 'table_' + dsSQL[0] + '.json')
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class api_endpoint_class:
|
||||
def __init__(self, db, query, path):
|
||||
|
||||
global apiEndpoints
|
||||
self.db = db
|
||||
self.query = query
|
||||
self.jsonData = db.get_table_as_json(self.query).json
|
||||
self.path = path
|
||||
self.fileName = path.split('/')[-1]
|
||||
self.hash = hash(json.dumps(self.jsonData))
|
||||
|
||||
# check if the endpoint needs to be updated
|
||||
found = False
|
||||
changed = False
|
||||
changedIndex = -1
|
||||
index = 0
|
||||
|
||||
# search previous endpoint states to check if API needs updating
|
||||
for endpoint in apiEndpoints:
|
||||
# match sql and API endpoint path
|
||||
if endpoint.query == self.query and endpoint.path == self.path:
|
||||
found = True
|
||||
if endpoint.hash != self.hash:
|
||||
changed = True
|
||||
changedIndex = index
|
||||
|
||||
index = index + 1
|
||||
|
||||
# cehck if API endpoints have changed or if it's a new one
|
||||
if not found or changed:
|
||||
|
||||
mylog('verbose', [f'[API] Updating {self.fileName} file in /front/api'])
|
||||
|
||||
write_file(self.path, json.dumps(self.jsonData))
|
||||
|
||||
if not found:
|
||||
apiEndpoints.append(self)
|
||||
|
||||
elif changed and changedIndex != -1 and changedIndex < len(apiEndpoints):
|
||||
# update hash
|
||||
apiEndpoints[changedIndex].hash = self.hash
|
||||
else:
|
||||
mylog('info', [f'[API] ERROR Updating {self.fileName}'])
|
||||
|
||||
125
pialert/conf.py
Executable file
125
pialert/conf.py
Executable file
@@ -0,0 +1,125 @@
|
||||
""" config related functions for Pi.Alert """
|
||||
|
||||
|
||||
# These are global variables, not config items and should not exist !
|
||||
mySettings = []
|
||||
mySettingsSQLsafe = []
|
||||
debug_force_notification = False
|
||||
cycle = 1
|
||||
userSubnets = []
|
||||
mySchedules = [] # bad solution for global - TO-DO
|
||||
plugins = [] # bad solution for global - TO-DO
|
||||
tz = ''
|
||||
|
||||
# modified time of the most recently imported config file
|
||||
# set to a small value to force import at first run
|
||||
lastImportedConfFile = 1.1
|
||||
|
||||
plugins_once_run = False
|
||||
newVersionAvailable = False
|
||||
time_started = ''
|
||||
check_report = []
|
||||
log_timestamp = 0
|
||||
arpscan_devices = []
|
||||
# for MQTT
|
||||
mqtt_connected_to_broker = False
|
||||
mqtt_sensors = []
|
||||
client = None # mqtt client
|
||||
# for notifications
|
||||
changedPorts_json_struc = None
|
||||
|
||||
|
||||
|
||||
# ACTUAL CONFIGRATION ITEMS set to defaults
|
||||
|
||||
# General
|
||||
ENABLE_ARPSCAN = True
|
||||
SCAN_SUBNETS = ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0']
|
||||
LOG_LEVEL = 'verbose'
|
||||
TIMEZONE = 'Europe/Berlin'
|
||||
ENABLE_PLUGINS = True
|
||||
PIALERT_WEB_PROTECTION = False
|
||||
PIALERT_WEB_PASSWORD = '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92'
|
||||
INCLUDED_SECTIONS = ['internet', 'new_devices', 'down_devices', 'events', 'ports']
|
||||
SCAN_CYCLE_MINUTES = 5
|
||||
DAYS_TO_KEEP_EVENTS = 90
|
||||
REPORT_DASHBOARD_URL = 'http://pi.alert/'
|
||||
DIG_GET_IP_ARG = '-4 myip.opendns.com @resolver1.opendns.com'
|
||||
UI_LANG = 'English'
|
||||
UI_PRESENCE = ['online', 'offline', 'archived']
|
||||
|
||||
|
||||
|
||||
# Email
|
||||
REPORT_MAIL = False
|
||||
SMTP_SERVER = ''
|
||||
SMTP_PORT = 587
|
||||
REPORT_TO = 'user@gmail.com'
|
||||
REPORT_FROM = 'Pi.Alert <user@gmail.com>'
|
||||
SMTP_SKIP_LOGIN = False
|
||||
SMTP_USER = ''
|
||||
SMTP_PASS = ''
|
||||
SMTP_SKIP_TLS = False
|
||||
SMTP_FORCE_SSL = False
|
||||
|
||||
# Webhooks
|
||||
REPORT_WEBHOOK = False
|
||||
WEBHOOK_URL = ''
|
||||
WEBHOOK_PAYLOAD = 'json'
|
||||
WEBHOOK_REQUEST_METHOD = 'GET'
|
||||
|
||||
# Apprise
|
||||
REPORT_APPRISE = False
|
||||
APPRISE_HOST = ''
|
||||
APPRISE_URL = ''
|
||||
APPRISE_PAYLOAD = 'html'
|
||||
|
||||
# NTFY
|
||||
REPORT_NTFY = False
|
||||
NTFY_HOST ='https://ntfy.sh'
|
||||
NTFY_TOPIC =''
|
||||
NTFY_USER = ''
|
||||
NTFY_PASSWORD = ''
|
||||
|
||||
# PUSHSAFER
|
||||
REPORT_PUSHSAFER = False
|
||||
PUSHSAFER_TOKEN = 'ApiKey'
|
||||
|
||||
# MQTT
|
||||
REPORT_MQTT = False
|
||||
MQTT_BROKER = ''
|
||||
MQTT_PORT = 1883
|
||||
MQTT_USER = ''
|
||||
MQTT_PASSWORD = ''
|
||||
MQTT_QOS = 0
|
||||
MQTT_DELAY_SEC = 2
|
||||
|
||||
# DynDNS
|
||||
DDNS_ACTIVE = False
|
||||
DDNS_DOMAIN = 'your_domain.freeddns.org'
|
||||
DDNS_USER = 'dynu_user'
|
||||
DDNS_PASSWORD = 'A0000000B0000000C0000000D0000000'
|
||||
DDNS_UPDATE_URL = 'https://api.dynu.com/nic/update?'
|
||||
|
||||
# PiHole
|
||||
PIHOLE_ACTIVE = False
|
||||
DHCP_ACTIVE = False
|
||||
|
||||
# PHOLUS
|
||||
PHOLUS_ACTIVE = False
|
||||
PHOLUS_TIMEOUT = 20
|
||||
PHOLUS_FORCE = False
|
||||
PHOLUS_RUN = 'once'
|
||||
PHOLUS_RUN_TIMEOUT = 600
|
||||
PHOLUS_RUN_SCHD = '0 4 * * *'
|
||||
PHOLUS_DAYS_DATA = 0
|
||||
|
||||
# Nmap
|
||||
NMAP_ACTIVE = True
|
||||
NMAP_TIMEOUT = 150
|
||||
NMAP_RUN = 'once'
|
||||
NMAP_RUN_SCHD = '0 2 * * *'
|
||||
NMAP_ARGS = '-p -10000 --max-parallelism 100'
|
||||
|
||||
# API
|
||||
API_CUSTOM_SQL = 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0'
|
||||
54
pialert/const.py
Executable file
54
pialert/const.py
Executable file
@@ -0,0 +1,54 @@
|
||||
""" CONSTANTS for Pi.Alert """
|
||||
|
||||
#===============================================================================
|
||||
# PATHS
|
||||
#===============================================================================
|
||||
pialertPath = '/home/pi/pialert'
|
||||
#pialertPath ='/home/roland/repos/Pi.Alert'
|
||||
|
||||
confPath = "/config/pialert.conf"
|
||||
dbPath = '/db/pialert.db'
|
||||
|
||||
|
||||
pluginsPath = pialertPath + '/front/plugins'
|
||||
logPath = pialertPath + '/front/log'
|
||||
apiPath = pialertPath + '/front/api/'
|
||||
fullConfPath = pialertPath + confPath
|
||||
fullDbPath = pialertPath + dbPath
|
||||
fullPholusPath = pialertPath+'/pholus/pholus3.py'
|
||||
|
||||
|
||||
vendorsDB = '/usr/share/arp-scan/ieee-oui.txt'
|
||||
piholeDB = '/etc/pihole/pihole-FTL.db'
|
||||
piholeDhcpleases = '/etc/pihole/dhcp.leases'
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# SQL queries
|
||||
#===============================================================================
|
||||
sql_devices_all = """select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_Group,
|
||||
dev_FirstConnection, dev_LastConnection, dev_LastIP, dev_StaticIP,
|
||||
dev_PresentLastScan, dev_LastNotification, dev_NewDevice,
|
||||
dev_Network_Node_MAC_ADDR, dev_Network_Node_port,
|
||||
dev_Icon from Devices"""
|
||||
sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived,
|
||||
(select count(*) from Devices a where dev_NewDevice = 1 ) as new,
|
||||
(select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown
|
||||
from Online_History order by Scan_Date desc limit 1"""
|
||||
sql_nmap_scan_all = "SELECT * FROM Nmap_Scan"
|
||||
sql_pholus_scan_all = "SELECT * FROM Pholus_Scan"
|
||||
sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0"
|
||||
sql_settings = "SELECT * FROM Settings"
|
||||
sql_plugins_objects = "SELECT * FROM Plugins_Objects"
|
||||
sql_language_strings = "SELECT * FROM Plugins_Language_Strings"
|
||||
sql_plugins_events = "SELECT * FROM Plugins_Events"
|
||||
sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY 'Index' DESC"
|
||||
sql_new_devices = """SELECT * FROM (
|
||||
SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC
|
||||
FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType = 'New Device'
|
||||
ORDER BY eve_DateTime ) t1
|
||||
LEFT JOIN
|
||||
( SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices) t2
|
||||
ON t1.dev_MAC = t2.dev_MAC_t2"""
|
||||
472
pialert/database.py
Executable file
472
pialert/database.py
Executable file
@@ -0,0 +1,472 @@
|
||||
""" all things database to support Pi.Alert """
|
||||
|
||||
import sqlite3
|
||||
|
||||
# pialert modules
|
||||
from const import fullDbPath, sql_devices_stats, sql_devices_all
|
||||
|
||||
from logger import mylog
|
||||
from helper import json_struc, initOrSetParam, row_to_json, timeNow #, updateState
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class DB():
|
||||
"""
|
||||
DB Class to provide the basic database interactions.
|
||||
Open / Commit / Close / read / write
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.sql = None
|
||||
self.sql_connection = None
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def open (self):
|
||||
# Check if DB is open
|
||||
if self.sql_connection != None :
|
||||
mylog('debug','openDB: databse already open')
|
||||
return
|
||||
|
||||
mylog('none', '[Database] Opening DB' )
|
||||
# Open DB and Cursor
|
||||
try:
|
||||
self.sql_connection = sqlite3.connect (fullDbPath, isolation_level=None)
|
||||
self.sql_connection.execute('pragma journal_mode=wal') #
|
||||
self.sql_connection.text_factory = str
|
||||
self.sql_connection.row_factory = sqlite3.Row
|
||||
self.sql = self.sql_connection.cursor()
|
||||
except sqlite3.Error as e:
|
||||
mylog('none',[ '[Database] - Open DB Error: ', e])
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def commitDB (self):
|
||||
if self.sql_connection == None :
|
||||
mylog('debug','commitDB: databse is not open')
|
||||
return False
|
||||
|
||||
# Commit changes to DB
|
||||
self.sql_connection.commit()
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_sql_array(self, query):
|
||||
if self.sql_connection == None :
|
||||
mylog('debug','getQueryArray: databse is not open')
|
||||
return
|
||||
|
||||
self.sql.execute(query)
|
||||
rows = self.sql.fetchall()
|
||||
#self.commitDB()
|
||||
|
||||
# convert result into list of lists
|
||||
arr = []
|
||||
for row in rows:
|
||||
r_temp = []
|
||||
for column in row:
|
||||
r_temp.append(column)
|
||||
arr.append(r_temp)
|
||||
|
||||
return arr
|
||||
|
||||
#===============================================================================
|
||||
# Cleanup / upkeep database
|
||||
#===============================================================================
|
||||
def cleanup_database (self, startTime, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA):
|
||||
"""
|
||||
Cleaning out old records from the tables that don't need to keep all data.
|
||||
"""
|
||||
# Header
|
||||
#updateState(self,"Upkeep: Clean DB")
|
||||
mylog('verbose', ['[DB Cleanup] Upkeep Database:' ])
|
||||
|
||||
# Cleanup Online History
|
||||
mylog('verbose', ['[DB Cleanup] Online_History: Delete all but keep latest 150 entries'])
|
||||
self.sql.execute ("""DELETE from Online_History where "Index" not in (
|
||||
SELECT "Index" from Online_History
|
||||
order by Scan_Date desc limit 150)""")
|
||||
mylog('verbose', ['[DB Cleanup] Optimize Database'])
|
||||
# Cleanup Events
|
||||
mylog('verbose', ['[DB Cleanup] Events: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days'])
|
||||
self.sql.execute ("""DELETE FROM Events
|
||||
WHERE eve_DateTime <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')""")
|
||||
# Cleanup Plugin Events History
|
||||
mylog('verbose', ['[DB Cleanup] Plugin Events History: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days'])
|
||||
self.sql.execute ("""DELETE FROM Plugins_History
|
||||
WHERE DateTimeChanged <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')""")
|
||||
# Cleanup Pholus_Scan
|
||||
if PHOLUS_DAYS_DATA != 0:
|
||||
mylog('verbose', ['[DB Cleanup] Pholus_Scan: Delete all older than ' + str(PHOLUS_DAYS_DATA) + ' days'])
|
||||
# improvement possibility: keep at least N per mac
|
||||
self.sql.execute ("""DELETE FROM Pholus_Scan
|
||||
WHERE Time <= date('now', '-"+ str(PHOLUS_DAYS_DATA) +" day')""")
|
||||
|
||||
# De-Dupe (de-duplicate - remove duplicate entries) from the Pholus_Scan table
|
||||
mylog('verbose', ['[DB Cleanup] Pholus_Scan: Delete all duplicates'])
|
||||
self.sql.execute ("""DELETE FROM Pholus_Scan
|
||||
WHERE rowid > (
|
||||
SELECT MIN(rowid) FROM Pholus_Scan p2
|
||||
WHERE Pholus_Scan.MAC = p2.MAC
|
||||
AND Pholus_Scan.Value = p2.Value
|
||||
AND Pholus_Scan.Record_Type = p2.Record_Type
|
||||
);""")
|
||||
# De-Dupe (de-duplicate - remove duplicate entries) from the Nmap_Scan table
|
||||
mylog('verbose', [' Nmap_Scan: Delete all duplicates'])
|
||||
self.sql.execute ("""DELETE FROM Nmap_Scan
|
||||
WHERE rowid > (
|
||||
SELECT MIN(rowid) FROM Nmap_Scan p2
|
||||
WHERE Nmap_Scan.MAC = p2.MAC
|
||||
AND Nmap_Scan.Port = p2.Port
|
||||
AND Nmap_Scan.State = p2.State
|
||||
AND Nmap_Scan.Service = p2.Service
|
||||
);""")
|
||||
|
||||
# Shrink DB
|
||||
mylog('verbose', [' Shrink Database'])
|
||||
self.sql.execute ("VACUUM;")
|
||||
self.commitDB()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def upgradeDB(self):
|
||||
"""
|
||||
Check the current tables in the DB and upgrade them if neccessary
|
||||
"""
|
||||
|
||||
# indicates, if Online_History table is available
|
||||
onlineHistoryAvailable = self.sql.execute("""
|
||||
SELECT name FROM sqlite_master WHERE type='table'
|
||||
AND name='Online_History';
|
||||
""").fetchall() != []
|
||||
|
||||
# Check if it is incompatible (Check if table has all required columns)
|
||||
isIncompatible = False
|
||||
|
||||
if onlineHistoryAvailable :
|
||||
isIncompatible = self.sql.execute ("""
|
||||
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Online_History') WHERE name='Archived_Devices'
|
||||
""").fetchone()[0] == 0
|
||||
|
||||
# Drop table if available, but incompatible
|
||||
if onlineHistoryAvailable and isIncompatible:
|
||||
mylog('none','[upgradeDB] Table is incompatible, Dropping the Online_History table')
|
||||
self.sql.execute("DROP TABLE Online_History;")
|
||||
onlineHistoryAvailable = False
|
||||
|
||||
if onlineHistoryAvailable == False :
|
||||
self.sql.execute("""
|
||||
CREATE TABLE "Online_History" (
|
||||
"Index" INTEGER,
|
||||
"Scan_Date" TEXT,
|
||||
"Online_Devices" INTEGER,
|
||||
"Down_Devices" INTEGER,
|
||||
"All_Devices" INTEGER,
|
||||
"Archived_Devices" INTEGER,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
);
|
||||
""")
|
||||
|
||||
# Alter Devices table
|
||||
# dev_Network_Node_MAC_ADDR column
|
||||
dev_Network_Node_MAC_ADDR_missing = self.sql.execute ("""
|
||||
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_MAC_ADDR'
|
||||
""").fetchone()[0] == 0
|
||||
|
||||
if dev_Network_Node_MAC_ADDR_missing :
|
||||
mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_MAC_ADDR to the Devices table"])
|
||||
self.sql.execute("""
|
||||
ALTER TABLE "Devices" ADD "dev_Network_Node_MAC_ADDR" TEXT
|
||||
""")
|
||||
|
||||
# dev_Network_Node_port column
|
||||
dev_Network_Node_port_missing = self.sql.execute ("""
|
||||
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_port'
|
||||
""").fetchone()[0] == 0
|
||||
|
||||
if dev_Network_Node_port_missing :
|
||||
mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_port to the Devices table"])
|
||||
self.sql.execute("""
|
||||
ALTER TABLE "Devices" ADD "dev_Network_Node_port" INTEGER
|
||||
""")
|
||||
|
||||
# dev_Icon column
|
||||
dev_Icon_missing = self.sql.execute ("""
|
||||
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Icon'
|
||||
""").fetchone()[0] == 0
|
||||
|
||||
if dev_Icon_missing :
|
||||
mylog('verbose', ["[upgradeDB] Adding dev_Icon to the Devices table"])
|
||||
self.sql.execute("""
|
||||
ALTER TABLE "Devices" ADD "dev_Icon" TEXT
|
||||
""")
|
||||
|
||||
# indicates, if Settings table is available
|
||||
settingsMissing = self.sql.execute("""
|
||||
SELECT name FROM sqlite_master WHERE type='table'
|
||||
AND name='Settings';
|
||||
""").fetchone() == None
|
||||
|
||||
# Re-creating Settings table
|
||||
mylog('verbose', ["[upgradeDB] Re-creating Settings table"])
|
||||
|
||||
if settingsMissing == False:
|
||||
self.sql.execute("DROP TABLE Settings;")
|
||||
|
||||
self.sql.execute("""
|
||||
CREATE TABLE "Settings" (
|
||||
"Code_Name" TEXT,
|
||||
"Display_Name" TEXT,
|
||||
"Description" TEXT,
|
||||
"Type" TEXT,
|
||||
"Options" TEXT,
|
||||
"RegEx" TEXT,
|
||||
"Value" TEXT,
|
||||
"Group" TEXT,
|
||||
"Events" TEXT
|
||||
);
|
||||
""")
|
||||
|
||||
# indicates, if Pholus_Scan table is available
|
||||
pholusScanMissing = self.sql.execute("""
|
||||
SELECT name FROM sqlite_master WHERE type='table'
|
||||
AND name='Pholus_Scan';
|
||||
""").fetchone() == None
|
||||
|
||||
# if pholusScanMissing == False:
|
||||
# # Re-creating Pholus_Scan table
|
||||
# self.sql.execute("DROP TABLE Pholus_Scan;")
|
||||
# pholusScanMissing = True
|
||||
|
||||
if pholusScanMissing:
|
||||
mylog('verbose', ["[upgradeDB] Re-creating Pholus_Scan table"])
|
||||
self.sql.execute("""
|
||||
CREATE TABLE "Pholus_Scan" (
|
||||
"Index" INTEGER,
|
||||
"Info" TEXT,
|
||||
"Time" TEXT,
|
||||
"MAC" TEXT,
|
||||
"IP_v4_or_v6" TEXT,
|
||||
"Record_Type" TEXT,
|
||||
"Value" TEXT,
|
||||
"Extra" TEXT,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
);
|
||||
""")
|
||||
|
||||
# indicates, if Nmap_Scan table is available
|
||||
nmapScanMissing = self.sql.execute("""
|
||||
SELECT name FROM sqlite_master WHERE type='table'
|
||||
AND name='Nmap_Scan';
|
||||
""").fetchone() == None
|
||||
|
||||
# Re-creating Parameters table
|
||||
mylog('verbose', ["[upgradeDB] Re-creating Parameters table"])
|
||||
self.sql.execute("DROP TABLE Parameters;")
|
||||
|
||||
self.sql.execute("""
|
||||
CREATE TABLE "Parameters" (
|
||||
"par_ID" TEXT PRIMARY KEY,
|
||||
"par_Value" TEXT
|
||||
);
|
||||
""")
|
||||
|
||||
# Initialize Parameters if unavailable
|
||||
initOrSetParam(self, 'Back_App_State','Initializing')
|
||||
|
||||
# if nmapScanMissing == False:
|
||||
# # Re-creating Nmap_Scan table
|
||||
# self.sql.execute("DROP TABLE Nmap_Scan;")
|
||||
# nmapScanMissing = True
|
||||
|
||||
if nmapScanMissing:
|
||||
mylog('verbose', ["[upgradeDB] Re-creating Nmap_Scan table"])
|
||||
self.sql.execute("""
|
||||
CREATE TABLE "Nmap_Scan" (
|
||||
"Index" INTEGER,
|
||||
"MAC" TEXT,
|
||||
"Port" TEXT,
|
||||
"Time" TEXT,
|
||||
"State" TEXT,
|
||||
"Service" TEXT,
|
||||
"Extra" TEXT,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
);
|
||||
""")
|
||||
|
||||
# Plugin state
|
||||
sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects(
|
||||
"Index" INTEGER,
|
||||
Plugin TEXT NOT NULL,
|
||||
Object_PrimaryID TEXT NOT NULL,
|
||||
Object_SecondaryID TEXT NOT NULL,
|
||||
DateTimeCreated TEXT NOT NULL,
|
||||
DateTimeChanged TEXT NOT NULL,
|
||||
Watched_Value1 TEXT NOT NULL,
|
||||
Watched_Value2 TEXT NOT NULL,
|
||||
Watched_Value3 TEXT NOT NULL,
|
||||
Watched_Value4 TEXT NOT NULL,
|
||||
Status TEXT NOT NULL,
|
||||
Extra TEXT NOT NULL,
|
||||
UserData TEXT NOT NULL,
|
||||
ForeignKey TEXT NOT NULL,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
); """
|
||||
self.sql.execute(sql_Plugins_Objects)
|
||||
|
||||
# Plugin execution results
|
||||
sql_Plugins_Events = """ CREATE TABLE IF NOT EXISTS Plugins_Events(
|
||||
"Index" INTEGER,
|
||||
Plugin TEXT NOT NULL,
|
||||
Object_PrimaryID TEXT NOT NULL,
|
||||
Object_SecondaryID TEXT NOT NULL,
|
||||
DateTimeCreated TEXT NOT NULL,
|
||||
DateTimeChanged TEXT NOT NULL,
|
||||
Watched_Value1 TEXT NOT NULL,
|
||||
Watched_Value2 TEXT NOT NULL,
|
||||
Watched_Value3 TEXT NOT NULL,
|
||||
Watched_Value4 TEXT NOT NULL,
|
||||
Status TEXT NOT NULL,
|
||||
Extra TEXT NOT NULL,
|
||||
UserData TEXT NOT NULL,
|
||||
ForeignKey TEXT NOT NULL,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
); """
|
||||
self.sql.execute(sql_Plugins_Events)
|
||||
|
||||
# Plugin execution history
|
||||
sql_Plugins_History = """ CREATE TABLE IF NOT EXISTS Plugins_History(
|
||||
"Index" INTEGER,
|
||||
Plugin TEXT NOT NULL,
|
||||
Object_PrimaryID TEXT NOT NULL,
|
||||
Object_SecondaryID TEXT NOT NULL,
|
||||
DateTimeCreated TEXT NOT NULL,
|
||||
DateTimeChanged TEXT NOT NULL,
|
||||
Watched_Value1 TEXT NOT NULL,
|
||||
Watched_Value2 TEXT NOT NULL,
|
||||
Watched_Value3 TEXT NOT NULL,
|
||||
Watched_Value4 TEXT NOT NULL,
|
||||
Status TEXT NOT NULL,
|
||||
Extra TEXT NOT NULL,
|
||||
UserData TEXT NOT NULL,
|
||||
ForeignKey TEXT NOT NULL,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
); """
|
||||
self.sql.execute(sql_Plugins_History)
|
||||
|
||||
# Dynamically generated language strings
|
||||
# indicates, if Language_Strings table is available
|
||||
languageStringsMissing = self.sql.execute("""
|
||||
SELECT name FROM sqlite_master WHERE type='table'
|
||||
AND name='Plugins_Language_Strings';
|
||||
""").fetchone() == None
|
||||
|
||||
if languageStringsMissing == False:
|
||||
self.sql.execute("DROP TABLE Plugins_Language_Strings;")
|
||||
|
||||
self.sql.execute(""" CREATE TABLE IF NOT EXISTS Plugins_Language_Strings(
|
||||
"Index" INTEGER,
|
||||
Language_Code TEXT NOT NULL,
|
||||
String_Key TEXT NOT NULL,
|
||||
String_Value TEXT NOT NULL,
|
||||
Extra TEXT NOT NULL,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
); """)
|
||||
|
||||
self.commitDB()
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_table_as_json(self, sqlQuery):
|
||||
|
||||
mylog('debug',[ '[Database] - get_table_as_json - Query: ', sqlQuery])
|
||||
try:
|
||||
self.sql.execute(sqlQuery)
|
||||
columnNames = list(map(lambda x: x[0], self.sql.description))
|
||||
rows = self.sql.fetchall()
|
||||
except sqlite3.Error as e:
|
||||
mylog('none',[ '[Database] - SQL ERROR: ', e])
|
||||
return None
|
||||
|
||||
result = {"data":[]}
|
||||
for row in rows:
|
||||
tmp = row_to_json(columnNames, row)
|
||||
result["data"].append(tmp)
|
||||
|
||||
mylog('debug',[ '[Database] - get_table_as_json - returning ', len(rows), " rows with columns: ", columnNames])
|
||||
return json_struc(result, columnNames)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# referece from here: https://codereview.stackexchange.com/questions/241043/interface-class-for-sqlite-databases
|
||||
#-------------------------------------------------------------------------------
|
||||
def read(self, query, *args):
|
||||
"""check the query and arguments are aligned and are read only"""
|
||||
mylog('debug',[ '[Database] - Read All: SELECT Query: ', query, " params: ", args])
|
||||
try:
|
||||
assert query.count('?') == len(args)
|
||||
assert query.upper().strip().startswith('SELECT')
|
||||
self.sql.execute(query, args)
|
||||
rows = self.sql.fetchall()
|
||||
return rows
|
||||
except AssertionError:
|
||||
mylog('none',[ '[Database] - ERROR: inconsistent query and/or arguments.', query, " params: ", args])
|
||||
except sqlite3.Error as e:
|
||||
mylog('none',[ '[Database] - SQL ERROR: ', e])
|
||||
return None
|
||||
|
||||
def read_one(self, query, *args):
|
||||
"""
|
||||
call read() with the same arguments but only returns the first row.
|
||||
should only be used when there is a single row result expected
|
||||
"""
|
||||
|
||||
mylog('debug',[ '[Database] - Read One: ', query, " params: ", args])
|
||||
rows = self.read(query, *args)
|
||||
if len(rows) == 1:
|
||||
return rows[0]
|
||||
|
||||
if len(rows) > 1:
|
||||
mylog('none',[ '[Database] - Warning!: query returns multiple rows, only first row is passed on!', query, " params: ", args])
|
||||
return rows[0]
|
||||
# empty result set
|
||||
return None
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_device_stats(db):
|
||||
# columns = ["online","down","all","archived","new","unknown"]
|
||||
return db.read_one(sql_devices_stats)
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_all_devices(db):
|
||||
return db.read(sql_devices_all)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def insertOnlineHistory(db):
|
||||
sql = db.sql #TO-DO
|
||||
startTime = timeNow()
|
||||
# Add to History
|
||||
|
||||
# only run this if the scans have run
|
||||
scanCount = db.read_one("SELECT count(*) FROM CurrentScan")
|
||||
if scanCount[0] == 0 :
|
||||
mylog('debug',[ '[insertOnlineHistory] - nothing to do, currentScan empty'])
|
||||
return 0
|
||||
|
||||
History_All = db.read("SELECT * FROM Devices")
|
||||
History_All_Devices = len(History_All)
|
||||
|
||||
History_Archived = db.read("SELECT * FROM Devices WHERE dev_Archived = 1")
|
||||
History_Archived_Devices = len(History_Archived)
|
||||
|
||||
History_Online = db.read("SELECT * FROM CurrentScan")
|
||||
History_Online_Devices = len(History_Online)
|
||||
History_Offline_Devices = History_All_Devices - History_Archived_Devices - History_Online_Devices
|
||||
|
||||
sql.execute ("INSERT INTO Online_History (Scan_Date, Online_Devices, Down_Devices, All_Devices, Archived_Devices) "+
|
||||
"VALUES ( ?, ?, ?, ?, ?)", (startTime, History_Online_Devices, History_Offline_Devices, History_All_Devices, History_Archived_Devices ) )
|
||||
db.commitDB()
|
||||
432
pialert/device.py
Executable file
432
pialert/device.py
Executable file
@@ -0,0 +1,432 @@
|
||||
|
||||
|
||||
|
||||
|
||||
import subprocess
|
||||
|
||||
import conf
|
||||
from helper import timeNow
|
||||
from scanners.internet import check_IP_format, get_internet_IP
|
||||
from logger import mylog, print_log
|
||||
from mac_vendor import query_MAC_vendor
|
||||
from scanners.pholusscan import performPholusScan, resolve_device_name_dig, resolve_device_name_pholus
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
|
||||
def save_scanned_devices (db, p_arpscan_devices, p_cycle_interval):
|
||||
sql = db.sql #TO-DO
|
||||
cycle = 1 # always 1, only one cycle supported
|
||||
|
||||
# Delete previous scan data
|
||||
sql.execute ("DELETE FROM CurrentScan WHERE cur_ScanCycle = ?",
|
||||
(cycle,))
|
||||
|
||||
if len(p_arpscan_devices) > 0:
|
||||
# Insert new arp-scan devices
|
||||
sql.executemany ("INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, "+
|
||||
" cur_IP, cur_Vendor, cur_ScanMethod) "+
|
||||
"VALUES ("+ str(cycle) + ", :mac, :ip, :hw, 'arp-scan')",
|
||||
p_arpscan_devices)
|
||||
|
||||
# Insert Pi-hole devices
|
||||
startTime = timeNow()
|
||||
sql.execute ("""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC,
|
||||
cur_IP, cur_Vendor, cur_ScanMethod)
|
||||
SELECT ?, PH_MAC, PH_IP, PH_Vendor, 'Pi-hole'
|
||||
FROM PiHole_Network
|
||||
WHERE PH_LastQuery >= ?
|
||||
AND NOT EXISTS (SELECT 'X' FROM CurrentScan
|
||||
WHERE cur_MAC = PH_MAC
|
||||
AND cur_ScanCycle = ? )""",
|
||||
(cycle,
|
||||
(int(startTime.strftime('%s')) - 60 * p_cycle_interval),
|
||||
cycle) )
|
||||
|
||||
# Check Internet connectivity
|
||||
internet_IP = get_internet_IP( conf.DIG_GET_IP_ARG )
|
||||
# TESTING - Force IP
|
||||
# internet_IP = ""
|
||||
if internet_IP != "" :
|
||||
sql.execute ("""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod)
|
||||
VALUES (?, 'Internet', ?, Null, 'queryDNS') """, (cycle, internet_IP) )
|
||||
|
||||
# #76 Add Local MAC of default local interface
|
||||
# BUGFIX #106 - Device that pialert is running
|
||||
# local_mac_cmd = ["bash -lc ifconfig `ip route list default | awk {'print $5'}` | grep ether | awk '{print $2}'"]
|
||||
# local_mac_cmd = ["/sbin/ifconfig `ip route list default | sort -nk11 | head -1 | awk {'print $5'}` | grep ether | awk '{print $2}'"]
|
||||
local_mac_cmd = ["/sbin/ifconfig `ip -o route get 1 | sed 's/^.*dev \\([^ ]*\\).*$/\\1/;q'` | grep ether | awk '{print $2}'"]
|
||||
local_mac = subprocess.Popen (local_mac_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip()
|
||||
|
||||
# local_dev_cmd = ["ip -o route get 1 | sed 's/^.*dev \\([^ ]*\\).*$/\\1/;q'"]
|
||||
# local_dev = subprocess.Popen (local_dev_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip()
|
||||
|
||||
# local_ip_cmd = ["ip route list default | awk {'print $7'}"]
|
||||
local_ip_cmd = ["ip -o route get 1 | sed 's/^.*src \\([^ ]*\\).*$/\\1/;q'"]
|
||||
local_ip = subprocess.Popen (local_ip_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip()
|
||||
|
||||
mylog('debug', ['[Save Devices] Saving this IP into the CurrentScan table:', local_ip])
|
||||
|
||||
if check_IP_format(local_ip) == '':
|
||||
local_ip = '0.0.0.0'
|
||||
|
||||
# Check if local mac has been detected with other methods
|
||||
sql.execute ("SELECT COUNT(*) FROM CurrentScan WHERE cur_ScanCycle = ? AND cur_MAC = ? ", (cycle, local_mac) )
|
||||
if sql.fetchone()[0] == 0 :
|
||||
sql.execute ("INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) "+
|
||||
"VALUES ( ?, ?, ?, Null, 'local_MAC') ", (cycle, local_mac, local_ip) )
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def print_scan_stats (db):
|
||||
sql = db.sql #TO-DO
|
||||
# Devices Detected
|
||||
sql.execute ("""SELECT COUNT(*) FROM CurrentScan
|
||||
WHERE cur_ScanCycle = ? """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', ['[Scan Stats] Devices Detected.......: ', str (sql.fetchone()[0]) ])
|
||||
|
||||
# Devices arp-scan
|
||||
sql.execute ("""SELECT COUNT(*) FROM CurrentScan
|
||||
WHERE cur_ScanMethod='arp-scan' AND cur_ScanCycle = ? """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', ['[Scan Stats] arp-scan detected..: ', str (sql.fetchone()[0]) ])
|
||||
|
||||
# Devices Pi-hole
|
||||
sql.execute ("""SELECT COUNT(*) FROM CurrentScan
|
||||
WHERE cur_ScanMethod='PiHole' AND cur_ScanCycle = ? """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', ['[Scan Stats] Pi-hole detected...: +' + str (sql.fetchone()[0]) ])
|
||||
|
||||
# New Devices
|
||||
sql.execute ("""SELECT COUNT(*) FROM CurrentScan
|
||||
WHERE cur_ScanCycle = ?
|
||||
AND NOT EXISTS (SELECT 1 FROM Devices
|
||||
WHERE dev_MAC = cur_MAC) """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', ['[Scan Stats] New Devices........: ' + str (sql.fetchone()[0]) ])
|
||||
|
||||
# Devices in this ScanCycle
|
||||
sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan
|
||||
WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle
|
||||
AND dev_ScanCycle = ? """,
|
||||
(conf.cycle,))
|
||||
|
||||
mylog('verbose', ['[Scan Stats] Devices in this cycle..: ' + str (sql.fetchone()[0]) ])
|
||||
|
||||
# Down Alerts
|
||||
sql.execute ("""SELECT COUNT(*) FROM Devices
|
||||
WHERE dev_AlertDeviceDown = 1
|
||||
AND dev_ScanCycle = ?
|
||||
AND NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', ['[Scan Stats] Down Alerts........: ' + str (sql.fetchone()[0]) ])
|
||||
|
||||
# New Down Alerts
|
||||
sql.execute ("""SELECT COUNT(*) FROM Devices
|
||||
WHERE dev_AlertDeviceDown = 1
|
||||
AND dev_PresentLastScan = 1
|
||||
AND dev_ScanCycle = ?
|
||||
AND NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', ['[Scan Stats] New Down Alerts....: ' + str (sql.fetchone()[0]) ])
|
||||
|
||||
# New Connections
|
||||
sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan
|
||||
WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle
|
||||
AND dev_PresentLastScan = 0
|
||||
AND dev_ScanCycle = ? """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', ['[Scan Stats] New Connections....: ' + str ( sql.fetchone()[0]) ])
|
||||
|
||||
# Disconnections
|
||||
sql.execute ("""SELECT COUNT(*) FROM Devices
|
||||
WHERE dev_PresentLastScan = 1
|
||||
AND dev_ScanCycle = ?
|
||||
AND NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', ['[Scan Stats] Disconnections.....: ' + str ( sql.fetchone()[0]) ])
|
||||
|
||||
# IP Changes
|
||||
sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan
|
||||
WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle
|
||||
AND dev_ScanCycle = ?
|
||||
AND dev_LastIP <> cur_IP """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', ['[Scan Stats] IP Changes.........: ' + str ( sql.fetchone()[0]) ])
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def create_new_devices (db):
|
||||
sql = db.sql # TO-DO
|
||||
startTime = timeNow()
|
||||
|
||||
# arpscan - Insert events for new devices
|
||||
mylog('debug','[New Devices] New devices - 1 Events')
|
||||
sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
SELECT cur_MAC, cur_IP, ?, 'New Device', cur_Vendor, 1
|
||||
FROM CurrentScan
|
||||
WHERE cur_ScanCycle = ?
|
||||
AND NOT EXISTS (SELECT 1 FROM Devices
|
||||
WHERE dev_MAC = cur_MAC) """,
|
||||
(startTime, conf.cycle) )
|
||||
|
||||
mylog('debug','[New Devices] Insert Connection into session table')
|
||||
sql.execute ("""INSERT INTO Sessions (ses_MAC, ses_IP, ses_EventTypeConnection, ses_DateTimeConnection,
|
||||
ses_EventTypeDisconnection, ses_DateTimeDisconnection, ses_StillConnected, ses_AdditionalInfo)
|
||||
SELECT cur_MAC, cur_IP,'Connected',?, NULL , NULL ,1, cur_Vendor
|
||||
FROM CurrentScan
|
||||
WHERE cur_ScanCycle = ?
|
||||
AND NOT EXISTS (SELECT 1 FROM Sessions
|
||||
WHERE ses_MAC = cur_MAC) """,
|
||||
(startTime, conf.cycle) )
|
||||
|
||||
# arpscan - Create new devices
|
||||
mylog('debug','[New Devices] 2 Create devices')
|
||||
sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_Vendor,
|
||||
dev_LastIP, dev_FirstConnection, dev_LastConnection,
|
||||
dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown,
|
||||
dev_PresentLastScan)
|
||||
SELECT cur_MAC, '(unknown)', cur_Vendor, cur_IP, ?, ?,
|
||||
1, 1, 0, 1
|
||||
FROM CurrentScan
|
||||
WHERE cur_ScanCycle = ?
|
||||
AND NOT EXISTS (SELECT 1 FROM Devices
|
||||
WHERE dev_MAC = cur_MAC) """,
|
||||
(startTime, startTime, conf.cycle) )
|
||||
|
||||
# Pi-hole - Insert events for new devices
|
||||
# NOT STRICYLY NECESARY (Devices can be created through Current_Scan)
|
||||
# Bugfix #2 - Pi-hole devices w/o IP
|
||||
mylog('debug','[New Devices] 3 Pi-hole Events')
|
||||
sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
SELECT PH_MAC, IFNULL (PH_IP,'-'), ?, 'New Device',
|
||||
'(Pi-Hole) ' || PH_Vendor, 1
|
||||
FROM PiHole_Network
|
||||
WHERE NOT EXISTS (SELECT 1 FROM Devices
|
||||
WHERE dev_MAC = PH_MAC) """,
|
||||
(startTime, ) )
|
||||
|
||||
# Pi-hole - Create New Devices
|
||||
# Bugfix #2 - Pi-hole devices w/o IP
|
||||
mylog('debug','[New Devices] 4 Pi-hole Create devices')
|
||||
sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_Vendor,
|
||||
dev_LastIP, dev_FirstConnection, dev_LastConnection,
|
||||
dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown,
|
||||
dev_PresentLastScan)
|
||||
SELECT PH_MAC, PH_Name, PH_Vendor, IFNULL (PH_IP,'-'),
|
||||
?, ?, 1, 1, 0, 1
|
||||
FROM PiHole_Network
|
||||
WHERE NOT EXISTS (SELECT 1 FROM Devices
|
||||
WHERE dev_MAC = PH_MAC) """,
|
||||
(startTime, startTime) )
|
||||
|
||||
# DHCP Leases - Insert events for new devices
|
||||
mylog('debug','[New Devices] 5 DHCP Leases Events')
|
||||
sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
SELECT DHCP_MAC, DHCP_IP, ?, 'New Device', '(DHCP lease)',1
|
||||
FROM DHCP_Leases
|
||||
WHERE NOT EXISTS (SELECT 1 FROM Devices
|
||||
WHERE dev_MAC = DHCP_MAC) """,
|
||||
(startTime, ) )
|
||||
|
||||
# DHCP Leases - Create New Devices
|
||||
mylog('debug','[New Devices] 6 DHCP Leases Create devices')
|
||||
# BUGFIX #23 - Duplicated MAC in DHCP.Leases
|
||||
# TEST - Force Duplicated MAC
|
||||
# sql.execute ("""INSERT INTO DHCP_Leases VALUES
|
||||
# (1610700000, 'TEST1', '10.10.10.1', 'Test 1', '*')""")
|
||||
# sql.execute ("""INSERT INTO DHCP_Leases VALUES
|
||||
# (1610700000, 'TEST2', '10.10.10.2', 'Test 2', '*')""")
|
||||
sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_LastIP,
|
||||
dev_Vendor, dev_FirstConnection, dev_LastConnection,
|
||||
dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown,
|
||||
dev_PresentLastScan)
|
||||
SELECT DISTINCT DHCP_MAC,
|
||||
(SELECT DHCP_Name FROM DHCP_Leases AS D2
|
||||
WHERE D2.DHCP_MAC = D1.DHCP_MAC
|
||||
ORDER BY DHCP_DateTime DESC LIMIT 1),
|
||||
(SELECT DHCP_IP FROM DHCP_Leases AS D2
|
||||
WHERE D2.DHCP_MAC = D1.DHCP_MAC
|
||||
ORDER BY DHCP_DateTime DESC LIMIT 1),
|
||||
'(unknown)', ?, ?, 1, 1, 0, 1
|
||||
FROM DHCP_Leases AS D1
|
||||
WHERE NOT EXISTS (SELECT 1 FROM Devices
|
||||
WHERE dev_MAC = DHCP_MAC) """,
|
||||
(startTime, startTime) )
|
||||
|
||||
# sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_Vendor,
|
||||
# dev_LastIP, dev_FirstConnection, dev_LastConnection,
|
||||
# dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown,
|
||||
# dev_PresentLastScan)
|
||||
# SELECT DHCP_MAC, DHCP_Name, '(unknown)', DHCP_IP, ?, ?,
|
||||
# 1, 1, 0, 1
|
||||
# FROM DHCP_Leases
|
||||
# WHERE NOT EXISTS (SELECT 1 FROM Devices
|
||||
# WHERE dev_MAC = DHCP_MAC) """,
|
||||
# (startTime, startTime) )
|
||||
mylog('debug','[New Devices] New Devices end')
|
||||
db.commitDB()
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def update_devices_data_from_scan (db):
|
||||
sql = db.sql #TO-DO
|
||||
startTime = timeNow()
|
||||
# Update Last Connection
|
||||
mylog('debug','[Update Devices] 1 Last Connection')
|
||||
sql.execute ("""UPDATE Devices SET dev_LastConnection = ?,
|
||||
dev_PresentLastScan = 1
|
||||
WHERE dev_ScanCycle = ?
|
||||
AND dev_PresentLastScan = 0
|
||||
AND EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(startTime, conf.cycle))
|
||||
|
||||
# Clean no active devices
|
||||
mylog('debug','[Update Devices] 2 Clean no active devices')
|
||||
sql.execute ("""UPDATE Devices SET dev_PresentLastScan = 0
|
||||
WHERE dev_ScanCycle = ?
|
||||
AND NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(conf.cycle,))
|
||||
|
||||
# Update IP & Vendor
|
||||
mylog('debug','[Update Devices] - 3 LastIP & Vendor')
|
||||
sql.execute ("""UPDATE Devices
|
||||
SET dev_LastIP = (SELECT cur_IP FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle),
|
||||
dev_Vendor = (SELECT cur_Vendor FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle)
|
||||
WHERE dev_ScanCycle = ?
|
||||
AND EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(conf.cycle,))
|
||||
|
||||
# Pi-hole Network - Update (unknown) Name
|
||||
mylog('debug','[Update Devices] - 4 Unknown Name')
|
||||
sql.execute ("""UPDATE Devices
|
||||
SET dev_NAME = (SELECT PH_Name FROM PiHole_Network
|
||||
WHERE PH_MAC = dev_MAC)
|
||||
WHERE (dev_Name in ("(unknown)", "(name not found)", "" )
|
||||
OR dev_Name IS NULL)
|
||||
AND EXISTS (SELECT 1 FROM PiHole_Network
|
||||
WHERE PH_MAC = dev_MAC
|
||||
AND PH_NAME IS NOT NULL
|
||||
AND PH_NAME <> '') """)
|
||||
|
||||
# DHCP Leases - Update (unknown) Name
|
||||
sql.execute ("""UPDATE Devices
|
||||
SET dev_NAME = (SELECT DHCP_Name FROM DHCP_Leases
|
||||
WHERE DHCP_MAC = dev_MAC)
|
||||
WHERE (dev_Name in ("(unknown)", "(name not found)", "" )
|
||||
OR dev_Name IS NULL)
|
||||
AND EXISTS (SELECT 1 FROM DHCP_Leases
|
||||
WHERE DHCP_MAC = dev_MAC)""")
|
||||
|
||||
# DHCP Leases - Vendor
|
||||
mylog('debug','[Update Devices] - 5 Vendor')
|
||||
|
||||
recordsToUpdate = []
|
||||
query = """SELECT * FROM Devices
|
||||
WHERE dev_Vendor = '(unknown)' OR dev_Vendor =''
|
||||
OR dev_Vendor IS NULL"""
|
||||
|
||||
for device in sql.execute (query) :
|
||||
vendor = query_MAC_vendor (device['dev_MAC'])
|
||||
if vendor != -1 and vendor != -2 :
|
||||
recordsToUpdate.append ([vendor, device['dev_MAC']])
|
||||
|
||||
sql.executemany ("UPDATE Devices SET dev_Vendor = ? WHERE dev_MAC = ? ",
|
||||
recordsToUpdate )
|
||||
|
||||
# clean-up device leases table
|
||||
sql.execute ("DELETE FROM DHCP_Leases")
|
||||
mylog('debug','[Update Devices] Update devices end')
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def update_devices_names (db):
|
||||
sql = db.sql #TO-DO
|
||||
# Initialize variables
|
||||
recordsToUpdate = []
|
||||
recordsNotFound = []
|
||||
|
||||
ignored = 0
|
||||
notFound = 0
|
||||
|
||||
foundDig = 0
|
||||
foundPholus = 0
|
||||
|
||||
# BUGFIX #97 - Updating name of Devices w/o IP
|
||||
sql.execute ("SELECT * FROM Devices WHERE dev_Name IN ('(unknown)','', '(name not found)') AND dev_LastIP <> '-'")
|
||||
unknownDevices = sql.fetchall()
|
||||
db.commitDB()
|
||||
|
||||
# perform Pholus scan if (unknown) devices found
|
||||
if conf.PHOLUS_ACTIVE and (len(unknownDevices) > 0 or conf.PHOLUS_FORCE):
|
||||
performPholusScan(db, conf.PHOLUS_TIMEOUT, conf.userSubnets)
|
||||
|
||||
# skip checks if no unknown devices
|
||||
if len(unknownDevices) == 0 and conf.PHOLUS_FORCE == False:
|
||||
return
|
||||
|
||||
# Devices without name
|
||||
mylog('verbose', '[Update Device Name] Trying to resolve devices without name')
|
||||
|
||||
# get names from Pholus scan
|
||||
sql.execute ('SELECT * FROM Pholus_Scan where "Record_Type"="Answer"')
|
||||
pholusResults = list(sql.fetchall())
|
||||
db.commitDB()
|
||||
|
||||
# Number of entries from previous Pholus scans
|
||||
mylog('verbose', ['[Update Device Name] Pholus entries from prev scans: ', len(pholusResults)])
|
||||
|
||||
for device in unknownDevices:
|
||||
newName = -1
|
||||
|
||||
# Resolve device name with DiG
|
||||
newName = resolve_device_name_dig (device['dev_MAC'], device['dev_LastIP'])
|
||||
|
||||
# count
|
||||
if newName != -1:
|
||||
foundDig += 1
|
||||
|
||||
# Resolve with Pholus
|
||||
if newName == -1:
|
||||
newName = resolve_device_name_pholus (device['dev_MAC'], device['dev_LastIP'], pholusResults)
|
||||
# count
|
||||
if newName != -1:
|
||||
foundPholus += 1
|
||||
|
||||
# isf still not found update name so we can distinguish the devices where we tried already
|
||||
if newName == -1 :
|
||||
recordsNotFound.append (["(name not found)", device['dev_MAC']])
|
||||
else:
|
||||
# name wa sfound with DiG or Pholus
|
||||
recordsToUpdate.append ([newName, device['dev_MAC']])
|
||||
|
||||
# Print log
|
||||
mylog('verbose', ['[Update Device Name] Names Found (DiG/Pholus): ', len(recordsToUpdate), " (",foundDig,"/",foundPholus ,")"] )
|
||||
mylog('verbose', ['[Update Device Name] Names Not Found : ', len(recordsNotFound)] )
|
||||
|
||||
# update not found devices with (name not found)
|
||||
sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsNotFound )
|
||||
# update names of devices which we were bale to resolve
|
||||
sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsToUpdate )
|
||||
db.commitDB()
|
||||
323
pialert/helper.py
Executable file
323
pialert/helper.py
Executable file
@@ -0,0 +1,323 @@
|
||||
""" Colection of generic functions to support Pi.Alert """
|
||||
|
||||
import io
|
||||
import sys
|
||||
import datetime
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from pytz import timezone
|
||||
from datetime import timedelta
|
||||
import json
|
||||
import time
|
||||
from pathlib import Path
|
||||
import requests
|
||||
|
||||
import conf
|
||||
from const import *
|
||||
from logger import mylog, logResult
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def timeNow():
|
||||
return datetime.datetime.now().replace(microsecond=0)
|
||||
#-------------------------------------------------------------------------------
|
||||
def timeNowTZ():
|
||||
return datetime.datetime.now(conf.tz).replace(microsecond=0)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def updateState(db, newState):
|
||||
|
||||
# ?? Why is the state written to the DB?
|
||||
|
||||
#sql = db.sql
|
||||
|
||||
mylog('debug', '[updateState] changing state to: "' + newState +'"')
|
||||
db.sql.execute ("UPDATE Parameters SET par_Value='"+ newState +"' WHERE par_ID='Back_App_State'")
|
||||
|
||||
db.commitDB()
|
||||
#-------------------------------------------------------------------------------
|
||||
def updateSubnets(scan_subnets):
|
||||
subnets = []
|
||||
|
||||
# multiple interfaces
|
||||
if type(scan_subnets) is list:
|
||||
for interface in scan_subnets :
|
||||
subnets.append(interface)
|
||||
# one interface only
|
||||
else:
|
||||
subnets.append(scan_subnets)
|
||||
|
||||
return subnets
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# check RW access of DB and config file
|
||||
def checkPermissionsOK():
|
||||
#global confR_access, confW_access, dbR_access, dbW_access
|
||||
|
||||
confR_access = (os.access(fullConfPath, os.R_OK))
|
||||
confW_access = (os.access(fullConfPath, os.W_OK))
|
||||
dbR_access = (os.access(fullDbPath, os.R_OK))
|
||||
dbW_access = (os.access(fullDbPath, os.W_OK))
|
||||
|
||||
mylog('none', ['\n'])
|
||||
mylog('none', ['Permissions check (All should be True)'])
|
||||
mylog('none', ['------------------------------------------------'])
|
||||
mylog('none', [ " " , confPath , " | " , " READ | " , confR_access])
|
||||
mylog('none', [ " " , confPath , " | " , " WRITE | " , confW_access])
|
||||
mylog('none', [ " " , dbPath , " | " , " READ | " , dbR_access])
|
||||
mylog('none', [ " " , dbPath , " | " , " WRITE | " , dbW_access])
|
||||
mylog('none', ['------------------------------------------------'])
|
||||
|
||||
#return dbR_access and dbW_access and confR_access and confW_access
|
||||
return (confR_access, dbR_access)
|
||||
#-------------------------------------------------------------------------------
|
||||
def fixPermissions():
|
||||
# Try fixing access rights if needed
|
||||
chmodCommands = []
|
||||
|
||||
chmodCommands.append(['sudo', 'chmod', 'a+rw', '-R', fullDbPath])
|
||||
chmodCommands.append(['sudo', 'chmod', 'a+rw', '-R', fullConfPath])
|
||||
|
||||
for com in chmodCommands:
|
||||
# Execute command
|
||||
mylog('none', ["[Setup] Attempting to fix permissions."])
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
result = subprocess.check_output (com, universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', ["[Setup] Fix Failed. Execute this command manually inside of the container: ", ' '.join(com)])
|
||||
mylog('none', [e.output])
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def initialiseFile(pathToCheck, defaultFile):
|
||||
# if file not readable (missing?) try to copy over the backed-up (default) one
|
||||
if str(os.access(pathToCheck, os.R_OK)) == "False":
|
||||
mylog('none', ["[Setup] ("+ pathToCheck +") file is not readable or missing. Trying to copy over the default one."])
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
p = subprocess.Popen(["cp", defaultFile , pathToCheck], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
stdout, stderr = p.communicate()
|
||||
|
||||
if str(os.access(pathToCheck, os.R_OK)) == "False":
|
||||
mylog('none', ["[Setup] Error copying ("+defaultFile+") to ("+pathToCheck+"). Make sure the app has Read & Write access to the parent directory."])
|
||||
else:
|
||||
mylog('none', ["[Setup] ("+defaultFile+") copied over successfully to ("+pathToCheck+")."])
|
||||
|
||||
# write stdout and stderr into .log files for debugging if needed
|
||||
logResult (stdout, stderr) # TO-DO should be changed to mylog
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', ["[Setup] Error copying ("+defaultFile+"). Make sure the app has Read & Write access to " + pathToCheck])
|
||||
mylog('none', [e.output])
|
||||
|
||||
|
||||
def filePermissions():
|
||||
# check and initialize pialert.conf
|
||||
(confR_access, dbR_access) = checkPermissionsOK() # Initial check
|
||||
|
||||
if confR_access == False:
|
||||
initialiseFile(fullConfPath, "/home/pi/pialert/back/pialert.conf_bak" )
|
||||
|
||||
# check and initialize pialert.db
|
||||
if dbR_access == False:
|
||||
initialiseFile(fullDbPath, "/home/pi/pialert/back/pialert.db_bak")
|
||||
|
||||
# last attempt
|
||||
fixPermissions()
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def bytes_to_string(value):
|
||||
# if value is of type bytes, convert to string
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode('utf-8')
|
||||
return value
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def if_byte_then_to_str(input):
|
||||
if isinstance(input, bytes):
|
||||
input = input.decode('utf-8')
|
||||
input = bytes_to_string(re.sub('[^a-zA-Z0-9-_\s]', '', str(input)))
|
||||
return input
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def collect_lang_strings(db, json, pref):
|
||||
|
||||
for prop in json["localized"]:
|
||||
for language_string in json[prop]:
|
||||
import_language_string(db, language_string["language_code"], pref + "_" + prop, language_string["string"])
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Creates a JSON object from a DB row
|
||||
def row_to_json(names, row):
|
||||
|
||||
rowEntry = {}
|
||||
|
||||
index = 0
|
||||
for name in names:
|
||||
rowEntry[name]= if_byte_then_to_str(row[name])
|
||||
index += 1
|
||||
|
||||
return rowEntry
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def import_language_string(db, code, key, value, extra = ""):
|
||||
|
||||
db.sql.execute ("""INSERT INTO Plugins_Language_Strings ("Language_Code", "String_Key", "String_Value", "Extra") VALUES (?, ?, ?, ?)""", (str(code), str(key), str(value), str(extra)))
|
||||
|
||||
db.commitDB()
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def checkIPV4(ip):
|
||||
""" Define a function to validate an Ip address
|
||||
"""
|
||||
ipRegex = "^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])$"
|
||||
|
||||
if(re.search(ipRegex, ip)):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def isNewVersion(newVersion: bool):
|
||||
|
||||
if newVersion == False:
|
||||
|
||||
f = open(pialertPath + '/front/buildtimestamp.txt', 'r')
|
||||
buildTimestamp = int(f.read().strip())
|
||||
f.close()
|
||||
|
||||
data = ""
|
||||
|
||||
try:
|
||||
url = requests.get("https://api.github.com/repos/jokob-sk/Pi.Alert/releases")
|
||||
text = url.text
|
||||
data = json.loads(text)
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
mylog('info', [" Couldn't check for new release."])
|
||||
data = ""
|
||||
|
||||
# make sure we received a valid response and not an API rate limit exceeded message
|
||||
if data != "" and len(data) > 0 and isinstance(data, list) and "published_at" in data[0]:
|
||||
|
||||
dateTimeStr = data[0]["published_at"]
|
||||
|
||||
realeaseTimestamp = int(datetime.datetime.strptime(dateTimeStr, '%Y-%m-%dT%H:%M:%SZ').strftime('%s'))
|
||||
|
||||
if realeaseTimestamp > buildTimestamp + 600:
|
||||
mylog('none', [" New version of the container available!"])
|
||||
newVersion = True
|
||||
# updateState(db, 'Back_New_Version_Available', str(newVersionAvailable)) ## TO DO add this back in but avoid circular ref with database
|
||||
|
||||
return newVersion
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def hide_email(email):
|
||||
m = email.split('@')
|
||||
|
||||
if len(m) == 2:
|
||||
return f'{m[0][0]}{"*"*(len(m[0])-2)}{m[0][-1] if len(m[0]) > 1 else ""}@{m[1]}'
|
||||
|
||||
return email
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def removeDuplicateNewLines(text):
|
||||
if "\n\n\n" in text:
|
||||
return removeDuplicateNewLines(text.replace("\n\n\n", "\n\n"))
|
||||
else:
|
||||
return text
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def add_json_list (row, list):
|
||||
new_row = []
|
||||
for column in row :
|
||||
column = bytes_to_string(column)
|
||||
|
||||
new_row.append(column)
|
||||
|
||||
list.append(new_row)
|
||||
|
||||
return list
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def sanitize_string(input):
|
||||
if isinstance(input, bytes):
|
||||
input = input.decode('utf-8')
|
||||
value = bytes_to_string(re.sub('[^a-zA-Z0-9-_\s]', '', str(input)))
|
||||
return value
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def generate_mac_links (html, deviceUrl):
|
||||
|
||||
p = re.compile(r'(?:[0-9a-fA-F]:?){12}')
|
||||
|
||||
MACs = re.findall(p, html)
|
||||
|
||||
for mac in MACs:
|
||||
html = html.replace('<td>' + mac + '</td>','<td><a href="' + deviceUrl + mac + '">' + mac + '</a></td>')
|
||||
|
||||
return html
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def initOrSetParam(db, parID, parValue):
|
||||
sql = db.sql
|
||||
|
||||
sql.execute ("INSERT INTO Parameters(par_ID, par_Value) VALUES('"+str(parID)+"', '"+str(parValue)+"') ON CONFLICT(par_ID) DO UPDATE SET par_Value='"+str(parValue)+"' where par_ID='"+str(parID)+"'")
|
||||
|
||||
db.commitDB()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
class json_struc:
|
||||
def __init__(self, jsn, columnNames):
|
||||
self.json = jsn
|
||||
self.columnNames = columnNames
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_file_content(path):
|
||||
|
||||
f = open(path, 'r')
|
||||
content = f.read()
|
||||
f.close()
|
||||
|
||||
return content
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def write_file (pPath, pText):
|
||||
# Write the text depending using the correct python version
|
||||
if sys.version_info < (3, 0):
|
||||
file = io.open (pPath , mode='w', encoding='utf-8')
|
||||
file.write ( pText.decode('unicode_escape') )
|
||||
file.close()
|
||||
else:
|
||||
file = open (pPath, 'w', encoding='utf-8')
|
||||
if pText is None:
|
||||
pText = ""
|
||||
file.write (pText)
|
||||
file.close()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
class noti_struc:
|
||||
def __init__(self, json, text, html):
|
||||
self.json = json
|
||||
self.text = text
|
||||
self.html = html
|
||||
256
pialert/initialise.py
Executable file
256
pialert/initialise.py
Executable file
@@ -0,0 +1,256 @@
|
||||
|
||||
import os
|
||||
import time
|
||||
from pytz import timezone
|
||||
from cron_converter import Cron
|
||||
from pathlib import Path
|
||||
import datetime
|
||||
|
||||
import conf
|
||||
from const import fullConfPath
|
||||
from helper import collect_lang_strings, updateSubnets, initOrSetParam
|
||||
from logger import mylog
|
||||
from api import update_api
|
||||
from scheduler import schedule_class
|
||||
from plugin import get_plugins_configs, print_plugin_info
|
||||
|
||||
#===============================================================================
|
||||
# Initialise user defined values
|
||||
#===============================================================================
|
||||
# We need access to the DB to save new values so need to define DB access methods first
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Import user values
|
||||
# Check config dictionary
|
||||
def ccd(key, default, config_dir, name, inputtype, options, group, events=[], desc = "", regex = ""):
|
||||
result = default
|
||||
|
||||
# use existing value if already supplied, otherwise default value is used
|
||||
if key in config_dir:
|
||||
result = config_dir[key]
|
||||
|
||||
if inputtype == 'text':
|
||||
result = result.replace('\'', "{s-quote}")
|
||||
|
||||
conf.mySettingsSQLsafe.append((key, name, desc, inputtype, options, regex, str(result), group, str(events)))
|
||||
conf.mySettings.append((key, name, desc, inputtype, options, regex, result, group, str(events)))
|
||||
|
||||
return result
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def importConfigs (db):
|
||||
|
||||
sql = db.sql
|
||||
|
||||
# get config file name
|
||||
config_file = Path(fullConfPath)
|
||||
|
||||
# Only import file if the file was modifed since last import.
|
||||
# this avoids time zone issues as we just compare the previous timestamp to the current time stamp
|
||||
mylog('debug', ['[Import Config] checking config file '])
|
||||
mylog('debug', ['[Import Config] lastImportedConfFile :', conf.lastImportedConfFile])
|
||||
mylog('debug', ['[Import Config] file modified time :', os.path.getmtime(config_file)])
|
||||
|
||||
|
||||
if (os.path.getmtime(config_file) == conf.lastImportedConfFile) :
|
||||
mylog('debug', ['[Import Config] skipping config file import'])
|
||||
return
|
||||
|
||||
conf.lastImportedConfFile = os.path.getmtime(config_file)
|
||||
|
||||
|
||||
|
||||
|
||||
mylog('debug', ['[Import Config] importing config file'])
|
||||
conf.mySettings = [] # reset settings
|
||||
conf.mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query
|
||||
|
||||
c_d = read_config_file(config_file)
|
||||
|
||||
# Import setting if found in the dictionary
|
||||
# General
|
||||
conf.ENABLE_ARPSCAN = ccd('ENABLE_ARPSCAN', True , c_d, 'Enable arpscan', 'boolean', '', 'General', ['run'])
|
||||
conf.SCAN_SUBNETS = ccd('SCAN_SUBNETS', ['192.168.1.0/24 --interface=eth1', '192.168.1.0/24 --interface=eth0'] , c_d, 'Subnets to scan', 'subnets', '', 'General')
|
||||
conf.LOG_LEVEL = ccd('LOG_LEVEL', 'verbose' , c_d, 'Log verboseness', 'selecttext', "['none', 'minimal', 'verbose', 'debug']", 'General')
|
||||
conf.TIMEZONE = ccd('TIMEZONE', 'Europe/Berlin' , c_d, 'Time zone', 'text', '', 'General')
|
||||
conf.ENABLE_PLUGINS = ccd('ENABLE_PLUGINS', True , c_d, 'Enable plugins', 'boolean', '', 'General')
|
||||
conf.PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General')
|
||||
conf.PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General')
|
||||
conf.INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General')
|
||||
conf.SCAN_CYCLE_MINUTES = ccd('SCAN_CYCLE_MINUTES', 5 , c_d, 'Scan cycle delay (m)', 'integer', '', 'General')
|
||||
conf.DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', 'integer', '', 'General')
|
||||
conf.REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General')
|
||||
conf.DIG_GET_IP_ARG = ccd('DIG_GET_IP_ARG', '-4 myip.opendns.com @resolver1.opendns.com' , c_d, 'DIG arguments', 'text', '', 'General')
|
||||
conf.UI_LANG = ccd('UI_LANG', 'English' , c_d, 'Language Interface', 'selecttext', "['English', 'German', 'Spanish']", 'General')
|
||||
conf.UI_PRESENCE = ccd('UI_PRESENCE', ['online', 'offline', 'archived'] , c_d, 'Include in presence', 'multiselect', "['online', 'offline', 'archived']", 'General')
|
||||
|
||||
# Email
|
||||
conf.REPORT_MAIL = ccd('REPORT_MAIL', False , c_d, 'Enable email', 'boolean', '', 'Email', ['test'])
|
||||
conf.SMTP_SERVER = ccd('SMTP_SERVER', '' , c_d,'SMTP server URL', 'text', '', 'Email')
|
||||
conf.SMTP_PORT = ccd('SMTP_PORT', 587 , c_d, 'SMTP port', 'integer', '', 'Email')
|
||||
conf.REPORT_TO = ccd('REPORT_TO', 'user@gmail.com' , c_d, 'Email to', 'text', '', 'Email')
|
||||
conf.REPORT_FROM = ccd('REPORT_FROM', 'Pi.Alert <user@gmail.com>' , c_d, 'Email Subject', 'text', '', 'Email')
|
||||
conf.SMTP_SKIP_LOGIN = ccd('SMTP_SKIP_LOGIN', False , c_d, 'SMTP skip login', 'boolean', '', 'Email')
|
||||
conf.SMTP_USER = ccd('SMTP_USER', '' , c_d, 'SMTP user', 'text', '', 'Email')
|
||||
conf.SMTP_PASS = ccd('SMTP_PASS', '' , c_d, 'SMTP password', 'password', '', 'Email')
|
||||
conf.SMTP_SKIP_TLS = ccd('SMTP_SKIP_TLS', False , c_d, 'SMTP skip TLS', 'boolean', '', 'Email')
|
||||
conf.SMTP_FORCE_SSL = ccd('SMTP_FORCE_SSL', False , c_d, 'Force SSL', 'boolean', '', 'Email')
|
||||
|
||||
# Webhooks
|
||||
conf.REPORT_WEBHOOK = ccd('REPORT_WEBHOOK', False , c_d, 'Enable Webhooks', 'boolean', '', 'Webhooks', ['test'])
|
||||
conf.WEBHOOK_URL = ccd('WEBHOOK_URL', '' , c_d, 'Target URL', 'text', '', 'Webhooks')
|
||||
conf.WEBHOOK_PAYLOAD = ccd('WEBHOOK_PAYLOAD', 'json' , c_d, 'Payload type', 'selecttext', "['json', 'html', 'text']", 'Webhooks')
|
||||
conf.WEBHOOK_REQUEST_METHOD = ccd('WEBHOOK_REQUEST_METHOD', 'GET' , c_d, 'Req type', 'selecttext', "['GET', 'POST', 'PUT']", 'Webhooks')
|
||||
|
||||
# Apprise
|
||||
conf.REPORT_APPRISE = ccd('REPORT_APPRISE', False , c_d, 'Enable Apprise', 'boolean', '', 'Apprise', ['test'])
|
||||
conf.APPRISE_HOST = ccd('APPRISE_HOST', '' , c_d, 'Apprise host URL', 'text', '', 'Apprise')
|
||||
conf.APPRISE_URL = ccd('APPRISE_URL', '' , c_d, 'Apprise notification URL', 'text', '', 'Apprise')
|
||||
conf.APPRISE_PAYLOAD = ccd('APPRISE_PAYLOAD', 'html' , c_d, 'Payload type', 'selecttext', "['html', 'text']", 'Apprise')
|
||||
|
||||
# NTFY
|
||||
conf.REPORT_NTFY = ccd('REPORT_NTFY', False , c_d, 'Enable NTFY', 'boolean', '', 'NTFY', ['test'])
|
||||
conf.NTFY_HOST = ccd('NTFY_HOST', 'https://ntfy.sh' , c_d, 'NTFY host URL', 'text', '', 'NTFY')
|
||||
conf.NTFY_TOPIC = ccd('NTFY_TOPIC', '' , c_d, 'NTFY topic', 'text', '', 'NTFY')
|
||||
conf.NTFY_USER = ccd('NTFY_USER', '' , c_d, 'NTFY user', 'text', '', 'NTFY')
|
||||
conf.NTFY_PASSWORD = ccd('NTFY_PASSWORD', '' , c_d, 'NTFY password', 'password', '', 'NTFY')
|
||||
|
||||
# PUSHSAFER
|
||||
conf.REPORT_PUSHSAFER = ccd('REPORT_PUSHSAFER', False , c_d, 'Enable PUSHSAFER', 'boolean', '', 'PUSHSAFER', ['test'])
|
||||
conf.PUSHSAFER_TOKEN = ccd('PUSHSAFER_TOKEN', 'ApiKey' , c_d, 'PUSHSAFER token', 'text', '', 'PUSHSAFER')
|
||||
|
||||
# MQTT
|
||||
conf.REPORT_MQTT = ccd('REPORT_MQTT', False , c_d, 'Enable MQTT', 'boolean', '', 'MQTT')
|
||||
conf.MQTT_BROKER = ccd('MQTT_BROKER', '' , c_d, 'MQTT broker', 'text', '', 'MQTT')
|
||||
conf.MQTT_PORT = ccd('MQTT_PORT', 1883 , c_d, 'MQTT broker port', 'integer', '', 'MQTT')
|
||||
conf.MQTT_USER = ccd('MQTT_USER', '' , c_d, 'MQTT user', 'text', '', 'MQTT')
|
||||
conf.MQTT_PASSWORD = ccd('MQTT_PASSWORD', '' , c_d, 'MQTT password', 'password', '', 'MQTT')
|
||||
conf.MQTT_QOS = ccd('MQTT_QOS', 0 , c_d, 'MQTT Quality of Service', 'selectinteger', "['0', '1', '2']", 'MQTT')
|
||||
conf.MQTT_DELAY_SEC = ccd('MQTT_DELAY_SEC', 2 , c_d, 'MQTT delay', 'selectinteger', "['2', '3', '4', '5']", 'MQTT')
|
||||
|
||||
# DynDNS
|
||||
conf.DDNS_ACTIVE = ccd('DDNS_ACTIVE', False , c_d, 'Enable DynDNS', 'boolean', '', 'DynDNS')
|
||||
conf.DDNS_DOMAIN = ccd('DDNS_DOMAIN', 'your_domain.freeddns.org' , c_d, 'DynDNS domain URL', 'text', '', 'DynDNS')
|
||||
conf.DDNS_USER = ccd('DDNS_USER', 'dynu_user' , c_d, 'DynDNS user', 'text', '', 'DynDNS')
|
||||
conf.DDNS_PASSWORD = ccd('DDNS_PASSWORD', 'A0000000B0000000C0000000D0000000' , c_d, 'DynDNS password', 'password', '', 'DynDNS')
|
||||
conf.DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS')
|
||||
|
||||
# PiHole
|
||||
conf.PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PiHole')
|
||||
conf.DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PiHole')
|
||||
|
||||
# PHOLUS
|
||||
conf.PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus')
|
||||
conf.PHOLUS_TIMEOUT = ccd('PHOLUS_TIMEOUT', 20 , c_d, 'Pholus timeout', 'integer', '', 'Pholus')
|
||||
conf.PHOLUS_FORCE = ccd('PHOLUS_FORCE', False , c_d, 'Pholus force check', 'boolean', '', 'Pholus')
|
||||
conf.PHOLUS_RUN = ccd('PHOLUS_RUN', 'once' , c_d, 'Pholus enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Pholus')
|
||||
conf.PHOLUS_RUN_TIMEOUT = ccd('PHOLUS_RUN_TIMEOUT', 600 , c_d, 'Pholus timeout schedule', 'integer', '', 'Pholus')
|
||||
conf.PHOLUS_RUN_SCHD = ccd('PHOLUS_RUN_SCHD', '0 4 * * *' , c_d, 'Pholus schedule', 'text', '', 'Pholus')
|
||||
conf.PHOLUS_DAYS_DATA = ccd('PHOLUS_DAYS_DATA', 0 , c_d, 'Pholus keep days', 'integer', '', 'Pholus')
|
||||
|
||||
# Nmap
|
||||
conf.NMAP_ACTIVE = ccd('NMAP_ACTIVE', True , c_d, 'Enable Nmap scans', 'boolean', '', 'Nmap')
|
||||
conf.NMAP_TIMEOUT = ccd('NMAP_TIMEOUT', 150 , c_d, 'Nmap timeout', 'integer', '', 'Nmap')
|
||||
conf.NMAP_RUN = ccd('NMAP_RUN', 'none' , c_d, 'Nmap enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Nmap')
|
||||
conf.NMAP_RUN_SCHD = ccd('NMAP_RUN_SCHD', '0 2 * * *' , c_d, 'Nmap schedule', 'text', '', 'Nmap')
|
||||
conf.NMAP_ARGS = ccd('NMAP_ARGS', '-p -10000' , c_d, 'Nmap custom arguments', 'text', '', 'Nmap')
|
||||
|
||||
# API
|
||||
conf.API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', 'text', '', 'API')
|
||||
|
||||
# Init timezone in case it changed
|
||||
conf.tz = timezone(conf.TIMEZONE)
|
||||
|
||||
# global mySchedules
|
||||
# reset schedules
|
||||
conf.mySchedules = []
|
||||
|
||||
# init pholus schedule
|
||||
pholusSchedule = Cron(conf.PHOLUS_RUN_SCHD).schedule(start_date=datetime.datetime.now(conf.tz))
|
||||
|
||||
conf.mySchedules.append(schedule_class("pholus", pholusSchedule, pholusSchedule.next(), False))
|
||||
|
||||
# init nmap schedule
|
||||
nmapSchedule = Cron(conf.NMAP_RUN_SCHD).schedule(start_date=datetime.datetime.now(conf.tz))
|
||||
conf.mySchedules.append(schedule_class("nmap", nmapSchedule, nmapSchedule.next(), False))
|
||||
|
||||
# Format and prepare the list of subnets
|
||||
conf.userSubnets = updateSubnets(conf.SCAN_SUBNETS)
|
||||
|
||||
# Plugins START
|
||||
# -----------------
|
||||
if conf.ENABLE_PLUGINS:
|
||||
conf.plugins = get_plugins_configs()
|
||||
|
||||
mylog('none', ['[Config] Plugins: Number of dynamically loaded plugins: ', len(conf.plugins)])
|
||||
|
||||
# handle plugins
|
||||
for plugin in conf.plugins:
|
||||
pref = plugin["unique_prefix"]
|
||||
print_plugin_info(plugin, ['display_name','description'])
|
||||
|
||||
# if plugin["enabled"] == 'true':
|
||||
|
||||
# collect plugin level language strings
|
||||
collect_lang_strings(db, plugin, pref)
|
||||
|
||||
for set in plugin["settings"]:
|
||||
setFunction = set["function"]
|
||||
# Setting code name / key
|
||||
key = pref + "_" + setFunction
|
||||
|
||||
v = ccd(key, set["default_value"], c_d, set["name"][0]["string"], set["type"] , str(set["options"]), pref)
|
||||
|
||||
# Save the user defined value into the object
|
||||
set["value"] = v
|
||||
|
||||
# Setup schedules
|
||||
if setFunction == 'RUN_SCHD':
|
||||
newSchedule = Cron(v).schedule(start_date=datetime.datetime.now(conf.tz))
|
||||
conf.mySchedules.append(schedule_class(pref, newSchedule, newSchedule.next(), False))
|
||||
|
||||
# Collect settings related language strings
|
||||
collect_lang_strings(db, set, pref + "_" + set["function"])
|
||||
|
||||
conf.plugins_once_run = False
|
||||
# -----------------
|
||||
# Plugins END
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Insert settings into the DB
|
||||
sql.execute ("DELETE FROM Settings")
|
||||
sql.executemany ("""INSERT INTO Settings ("Code_Name", "Display_Name", "Description", "Type", "Options",
|
||||
"RegEx", "Value", "Group", "Events" ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""", conf.mySettingsSQLsafe)
|
||||
|
||||
# Used to determine the next import
|
||||
conf.lastTimeImported = time.time()
|
||||
|
||||
# Is used to display a message in the UI when old (outdated) settings are loaded
|
||||
initOrSetParam(db, "Back_Settings_Imported",(round(time.time() * 1000),) )
|
||||
|
||||
#commitDB(sql_connection)
|
||||
db.commitDB()
|
||||
|
||||
# update only the settings datasource
|
||||
update_api(db, False, ["settings"])
|
||||
#TO DO this creates a circular reference between API and HELPER !
|
||||
|
||||
mylog('info', '[Config] Imported new config')
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def read_config_file(filename):
|
||||
"""
|
||||
retuns dict on the config file key:value pairs
|
||||
"""
|
||||
mylog('info', '[Config] reading config file')
|
||||
# load the variables from pialert.conf
|
||||
code = compile(filename.read_text(), filename.name, "exec")
|
||||
confDict = {} # config dictionary
|
||||
exec(code, {"__builtins__": {}}, confDict)
|
||||
return confDict
|
||||
97
pialert/logger.py
Executable file
97
pialert/logger.py
Executable file
@@ -0,0 +1,97 @@
|
||||
""" Colection of functions to support all logging for Pi.Alert """
|
||||
import sys
|
||||
import io
|
||||
import datetime
|
||||
|
||||
import conf
|
||||
from const import *
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# duplication from helper to avoid circle
|
||||
#-------------------------------------------------------------------------------
|
||||
def timeNow():
|
||||
return datetime.datetime.now().replace(microsecond=0)
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
debugLevels = [
|
||||
('none', 0), ('minimal', 1), ('verbose', 2), ('debug', 3)
|
||||
]
|
||||
|
||||
def mylog(requestedDebugLevel, n):
|
||||
|
||||
setLvl = 0
|
||||
reqLvl = 0
|
||||
|
||||
# Get debug urgency/relative weight
|
||||
for lvl in debugLevels:
|
||||
if conf.LOG_LEVEL == lvl[0]:
|
||||
setLvl = lvl[1]
|
||||
if requestedDebugLevel == lvl[0]:
|
||||
reqLvl = lvl[1]
|
||||
|
||||
if reqLvl <= setLvl:
|
||||
file_print (*n)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def file_print (*args):
|
||||
|
||||
result = timeNow().strftime ('%H:%M:%S') + ' '
|
||||
|
||||
for arg in args:
|
||||
result += str(arg)
|
||||
print(result)
|
||||
|
||||
file = open(logPath + "/pialert.log", "a")
|
||||
file.write(result + '\n')
|
||||
file.close()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def print_log (pText):
|
||||
|
||||
# Check LOG actived
|
||||
if not conf.LOG_LEVEL == 'debug' :
|
||||
return
|
||||
|
||||
# Current Time
|
||||
log_timestamp2 = datetime.datetime.now(conf.tz).replace(microsecond=0)
|
||||
|
||||
# Print line + time + elapsed time + text
|
||||
file_print ('[LOG_LEVEL=debug] ',
|
||||
# log_timestamp2, ' ',
|
||||
log_timestamp2.strftime ('%H:%M:%S'), ' ',
|
||||
pText)
|
||||
|
||||
|
||||
# Save current time to calculate elapsed time until next log
|
||||
conf.log_timestamp = log_timestamp2
|
||||
|
||||
return pText
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def append_file_binary (pPath, input):
|
||||
file = open (pPath, 'ab')
|
||||
file.write (input)
|
||||
file.close()
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def logResult (stdout, stderr):
|
||||
if stderr != None:
|
||||
append_file_binary (logPath + '/stderr.log', stderr)
|
||||
if stdout != None:
|
||||
append_file_binary (logPath + '/stdout.log', stdout)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def append_line_to_file (pPath, pText):
|
||||
# append the line depending using the correct python version
|
||||
if sys.version_info < (3, 0):
|
||||
file = io.open (pPath , mode='a', encoding='utf-8')
|
||||
file.write ( pText.decode('unicode_escape') )
|
||||
file.close()
|
||||
else:
|
||||
file = open (pPath, 'a', encoding='utf-8')
|
||||
file.write (pText)
|
||||
file.close()
|
||||
102
pialert/mac_vendor.py
Executable file
102
pialert/mac_vendor.py
Executable file
@@ -0,0 +1,102 @@
|
||||
|
||||
import subprocess
|
||||
|
||||
from const import pialertPath, vendorsDB
|
||||
from helper import timeNow, updateState
|
||||
from logger import mylog
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# UPDATE DEVICE MAC VENDORS
|
||||
#===============================================================================
|
||||
|
||||
|
||||
|
||||
def update_devices_MAC_vendors (db, pArg = ''):
|
||||
sql = db.sql # TO-DO
|
||||
# Header
|
||||
updateState(db,"Upkeep: Vendors")
|
||||
mylog('verbose', ['[', timeNow(), '] Upkeep - Update HW Vendors:' ])
|
||||
|
||||
# Update vendors DB (iab oui)
|
||||
mylog('verbose', [' Updating vendors DB (iab & oui)'])
|
||||
update_args = ['sh', pialertPath + '/update_vendors.sh', pArg]
|
||||
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
update_output = subprocess.check_output (update_args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', [' FAILED: Updating vendors DB, set LOG_LEVEL=debug for more info'])
|
||||
mylog('none', [e.output])
|
||||
|
||||
# Initialize variables
|
||||
recordsToUpdate = []
|
||||
ignored = 0
|
||||
notFound = 0
|
||||
|
||||
# All devices loop
|
||||
mylog('verbose', [' Searching devices vendor'])
|
||||
for device in sql.execute ("""SELECT * FROM Devices
|
||||
WHERE dev_Vendor = '(unknown)'
|
||||
OR dev_Vendor =''
|
||||
OR dev_Vendor IS NULL""") :
|
||||
# Search vendor in HW Vendors DB
|
||||
vendor = query_MAC_vendor (device['dev_MAC'])
|
||||
if vendor == -1 :
|
||||
notFound += 1
|
||||
elif vendor == -2 :
|
||||
ignored += 1
|
||||
else :
|
||||
recordsToUpdate.append ([vendor, device['dev_MAC']])
|
||||
|
||||
# Print log
|
||||
mylog('verbose', [" Devices Ignored: ", ignored])
|
||||
mylog('verbose', [" Vendors Not Found:", notFound])
|
||||
mylog('verbose', [" Vendors updated: ", len(recordsToUpdate) ])
|
||||
|
||||
|
||||
# update devices
|
||||
sql.executemany ("UPDATE Devices SET dev_Vendor = ? WHERE dev_MAC = ? ",
|
||||
recordsToUpdate )
|
||||
|
||||
# Commit DB
|
||||
db.commitDB()
|
||||
|
||||
if len(recordsToUpdate) > 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def query_MAC_vendor (pMAC):
|
||||
try :
|
||||
# BUGFIX #6 - Fix pMAC parameter as numbers
|
||||
pMACstr = str(pMAC)
|
||||
|
||||
# Check MAC parameter
|
||||
mac = pMACstr.replace (':','')
|
||||
if len(pMACstr) != 17 or len(mac) != 12 :
|
||||
return -2
|
||||
|
||||
# Search vendor in HW Vendors DB
|
||||
mac = mac[0:6]
|
||||
grep_args = ['grep', '-i', mac, vendorsDB]
|
||||
# Execute command
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
grep_output = subprocess.check_output (grep_args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', ["[Mac Vendor Check] Error: ", e.output])
|
||||
grep_output = " There was an error, check logs for details"
|
||||
|
||||
# Return Vendor
|
||||
vendor = grep_output[7:]
|
||||
vendor = vendor.rstrip()
|
||||
return vendor
|
||||
|
||||
# not Found
|
||||
except subprocess.CalledProcessError :
|
||||
return -1
|
||||
|
||||
327
pialert/networkscan.py
Executable file
327
pialert/networkscan.py
Executable file
@@ -0,0 +1,327 @@
|
||||
|
||||
|
||||
import conf
|
||||
from scanners.arpscan import execute_arpscan
|
||||
from scanners.pihole import copy_pihole_network, read_DHCP_leases
|
||||
from database import insertOnlineHistory
|
||||
from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, update_devices_names
|
||||
from helper import timeNow
|
||||
from logger import mylog
|
||||
from reporting import skip_repeated_notifications
|
||||
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# SCAN NETWORK
|
||||
#===============================================================================
|
||||
|
||||
|
||||
def scan_network (db):
|
||||
sql = db.sql #TO-DO
|
||||
|
||||
|
||||
# Header
|
||||
# moved updateState to main loop
|
||||
# updateState(db,"Scan: Network")
|
||||
mylog('verbose', ['[Network Scan] Scan Devices:' ])
|
||||
|
||||
# Query ScanCycle properties
|
||||
scanCycle_data = query_ScanCycle_Data (db, True)
|
||||
if scanCycle_data is None:
|
||||
mylog('none', ['\n'])
|
||||
mylog('none', ['[Network Scan]*************** ERROR ***************'])
|
||||
mylog('none', ['[Network Scan] ScanCycle %s not found' % conf.cycle ])
|
||||
mylog('none', ['[Network Scan] Exiting...\n'])
|
||||
return False
|
||||
|
||||
db.commitDB()
|
||||
|
||||
# arp-scan command
|
||||
conf.arpscan_devices = []
|
||||
if conf.ENABLE_ARPSCAN:
|
||||
mylog('verbose','[Network Scan] arp-scan start')
|
||||
conf.arpscan_devices = execute_arpscan (conf.userSubnets)
|
||||
mylog('verbose','[Network Scan] arp-scan ends')
|
||||
|
||||
# Pi-hole method
|
||||
if conf.PIHOLE_ACTIVE :
|
||||
mylog('verbose','[Network Scan] Pi-hole start')
|
||||
copy_pihole_network(db)
|
||||
db.commitDB()
|
||||
|
||||
# DHCP Leases method
|
||||
if conf.DHCP_ACTIVE :
|
||||
mylog('verbose','[Network Scan] DHCP Leases start')
|
||||
read_DHCP_leases (db)
|
||||
db.commitDB()
|
||||
|
||||
|
||||
|
||||
def process_scan (db, arpscan_devices = conf.arpscan_devices ):
|
||||
|
||||
|
||||
# Query ScanCycle properties
|
||||
scanCycle_data = query_ScanCycle_Data (db, True)
|
||||
if scanCycle_data is None:
|
||||
mylog('none', ['\n'])
|
||||
mylog('none', ['[Process Scan]*************** ERROR ***************'])
|
||||
mylog('none', ['[Process Scan] ScanCycle %s not found' % conf.cycle ])
|
||||
mylog('none', ['[Process Scan] Exiting...\n'])
|
||||
return False
|
||||
|
||||
db.commitDB()
|
||||
|
||||
# ScanCycle data
|
||||
cycle_interval = scanCycle_data['cic_EveryXmin']
|
||||
|
||||
# Load current scan data
|
||||
mylog('verbose','[Process Scan] Processing scan results')
|
||||
save_scanned_devices (db, arpscan_devices, cycle_interval)
|
||||
|
||||
# Print stats
|
||||
mylog('none','[Process Scan] Print Stats')
|
||||
print_scan_stats(db)
|
||||
mylog('none','[Process Scan] Stats end')
|
||||
|
||||
# Create Events
|
||||
mylog('verbose','[Process Scan] Updating DB Info')
|
||||
mylog('verbose','[Process Scan] Sessions Events (connect / discconnect)')
|
||||
insert_events(db)
|
||||
|
||||
# Create New Devices
|
||||
# after create events -> avoid 'connection' event
|
||||
mylog('verbose','[Process Scan] Creating new devices')
|
||||
create_new_devices (db)
|
||||
|
||||
# Update devices info
|
||||
mylog('verbose','[Process Scan] Updating Devices Info')
|
||||
update_devices_data_from_scan (db)
|
||||
|
||||
# Resolve devices names
|
||||
mylog('verbose','[Process Scan] Resolve devices names')
|
||||
update_devices_names(db)
|
||||
|
||||
# Void false connection - disconnections
|
||||
mylog('verbose','[Process Scan] Voiding false (ghost) disconnections')
|
||||
void_ghost_disconnections (db)
|
||||
|
||||
# Pair session events (Connection / Disconnection)
|
||||
mylog('verbose','[Process Scan] Pairing session events (connection / disconnection) ')
|
||||
pair_sessions_events(db)
|
||||
|
||||
# Sessions snapshot
|
||||
mylog('verbose','[Process Scan] Creating sessions snapshot')
|
||||
create_sessions_snapshot (db)
|
||||
|
||||
# Sessions snapshot
|
||||
mylog('verbose','[Process Scan] Inserting scan results into Online_History')
|
||||
insertOnlineHistory(db)
|
||||
|
||||
# Skip repeated notifications
|
||||
mylog('verbose','[Process Scan] Skipping repeated notifications')
|
||||
skip_repeated_notifications (db)
|
||||
|
||||
# Commit changes
|
||||
db.commitDB()
|
||||
|
||||
# moved plugin execution to main loop
|
||||
# if ENABLE_PLUGINS:
|
||||
# run_plugin_scripts(db,'always_after_scan')
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def query_ScanCycle_Data (db, pOpenCloseDB = False, cycle = 1):
|
||||
# Query Data
|
||||
db.sql.execute ("""SELECT cic_arpscanCycles, cic_EveryXmin
|
||||
FROM ScanCycles
|
||||
WHERE cic_ID = ? """, (cycle,))
|
||||
sqlRow = db.sql.fetchone()
|
||||
|
||||
# Return Row
|
||||
return sqlRow
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def void_ghost_disconnections (db):
|
||||
sql = db.sql #TO-DO
|
||||
startTime = timeNow()
|
||||
# Void connect ghost events (disconnect event exists in last X min.)
|
||||
mylog('debug','[Void Ghost Con] - 1 Connect ghost events')
|
||||
sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null,
|
||||
eve_EventType ='VOIDED - ' || eve_EventType
|
||||
WHERE eve_MAC != 'Internet'
|
||||
AND eve_EventType = 'Connected'
|
||||
AND eve_DateTime = ?
|
||||
AND eve_MAC IN (
|
||||
SELECT Events.eve_MAC
|
||||
FROM CurrentScan, Devices, ScanCycles, Events
|
||||
WHERE cur_ScanCycle = ?
|
||||
AND dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cic_ID
|
||||
AND cic_ID = cur_ScanCycle
|
||||
AND eve_MAC = cur_MAC
|
||||
AND eve_EventType = 'Disconnected'
|
||||
AND eve_DateTime >=
|
||||
DATETIME (?, '-' || cic_EveryXmin ||' minutes')
|
||||
) """,
|
||||
(startTime, conf.cycle, startTime) )
|
||||
|
||||
# Void connect paired events
|
||||
mylog('debug','[Void Ghost Con] - 2 Paired events')
|
||||
sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null
|
||||
WHERE eve_MAC != 'Internet'
|
||||
AND eve_PairEventRowid IN (
|
||||
SELECT Events.RowID
|
||||
FROM CurrentScan, Devices, ScanCycles, Events
|
||||
WHERE cur_ScanCycle = ?
|
||||
AND dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cic_ID
|
||||
AND cic_ID = cur_ScanCycle
|
||||
AND eve_MAC = cur_MAC
|
||||
AND eve_EventType = 'Disconnected'
|
||||
AND eve_DateTime >=
|
||||
DATETIME (?, '-' || cic_EveryXmin ||' minutes')
|
||||
) """,
|
||||
(conf.cycle, startTime) )
|
||||
|
||||
# Void disconnect ghost events
|
||||
mylog('debug','[Void Ghost Con] - 3 Disconnect ghost events')
|
||||
sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null,
|
||||
eve_EventType = 'VOIDED - '|| eve_EventType
|
||||
WHERE eve_MAC != 'Internet'
|
||||
AND ROWID IN (
|
||||
SELECT Events.RowID
|
||||
FROM CurrentScan, Devices, ScanCycles, Events
|
||||
WHERE cur_ScanCycle = ?
|
||||
AND dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cic_ID
|
||||
AND cic_ID = cur_ScanCycle
|
||||
AND eve_MAC = cur_MAC
|
||||
AND eve_EventType = 'Disconnected'
|
||||
AND eve_DateTime >=
|
||||
DATETIME (?, '-' || cic_EveryXmin ||' minutes')
|
||||
) """,
|
||||
(conf.cycle, startTime) )
|
||||
mylog('debug','[Void Ghost Con] Void Ghost Connections end')
|
||||
db.commitDB()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def pair_sessions_events (db):
|
||||
sql = db.sql #TO-DO
|
||||
|
||||
# NOT NECESSARY FOR INCREMENTAL UPDATE
|
||||
# print_log ('Pair session - 1 Clean')
|
||||
# sql.execute ("""UPDATE Events
|
||||
# SET eve_PairEventRowid = NULL
|
||||
# WHERE eve_EventType IN ('New Device', 'Connected')
|
||||
# """ )
|
||||
|
||||
|
||||
# Pair Connection / New Device events
|
||||
mylog('debug','[Pair Session] - 1 Connections / New Devices')
|
||||
sql.execute ("""UPDATE Events
|
||||
SET eve_PairEventRowid =
|
||||
(SELECT ROWID
|
||||
FROM Events AS EVE2
|
||||
WHERE EVE2.eve_EventType IN ('New Device', 'Connected',
|
||||
'Device Down', 'Disconnected')
|
||||
AND EVE2.eve_MAC = Events.eve_MAC
|
||||
AND EVE2.eve_Datetime > Events.eve_DateTime
|
||||
ORDER BY EVE2.eve_DateTime ASC LIMIT 1)
|
||||
WHERE eve_EventType IN ('New Device', 'Connected')
|
||||
AND eve_PairEventRowid IS NULL
|
||||
""" )
|
||||
|
||||
# Pair Disconnection / Device Down
|
||||
mylog('debug','[Pair Session] - 2 Disconnections')
|
||||
sql.execute ("""UPDATE Events
|
||||
SET eve_PairEventRowid =
|
||||
(SELECT ROWID
|
||||
FROM Events AS EVE2
|
||||
WHERE EVE2.eve_PairEventRowid = Events.ROWID)
|
||||
WHERE eve_EventType IN ('Device Down', 'Disconnected')
|
||||
AND eve_PairEventRowid IS NULL
|
||||
""" )
|
||||
mylog('debug','[Pair Session] Pair session end')
|
||||
|
||||
db.commitDB()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def create_sessions_snapshot (db):
|
||||
sql = db.sql #TO-DO
|
||||
|
||||
# Clean sessions snapshot
|
||||
mylog('debug','[Sessions Snapshot] - 1 Clean')
|
||||
sql.execute ("DELETE FROM SESSIONS" )
|
||||
|
||||
# Insert sessions
|
||||
mylog('debug','[Sessions Snapshot] - 2 Insert')
|
||||
sql.execute ("""INSERT INTO Sessions
|
||||
SELECT * FROM Convert_Events_to_Sessions""" )
|
||||
|
||||
mylog('debug','[Sessions Snapshot] Sessions end')
|
||||
db.commitDB()
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def insert_events (db):
|
||||
sql = db.sql #TO-DO
|
||||
startTime = timeNow()
|
||||
|
||||
# Check device down
|
||||
mylog('debug','[Events] - 1 - Devices down')
|
||||
sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
SELECT dev_MAC, dev_LastIP, ?, 'Device Down', '', 1
|
||||
FROM Devices
|
||||
WHERE dev_AlertDeviceDown = 1
|
||||
AND dev_PresentLastScan = 1
|
||||
AND dev_ScanCycle = ?
|
||||
AND NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(startTime, conf.cycle) )
|
||||
|
||||
# Check new connections
|
||||
mylog('debug','[Events] - 2 - New Connections')
|
||||
sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
SELECT cur_MAC, cur_IP, ?, 'Connected', '', dev_AlertEvents
|
||||
FROM Devices, CurrentScan
|
||||
WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle
|
||||
AND dev_PresentLastScan = 0
|
||||
AND dev_ScanCycle = ? """,
|
||||
(startTime, conf.cycle) )
|
||||
|
||||
# Check disconnections
|
||||
mylog('debug','[Events] - 3 - Disconnections')
|
||||
sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
SELECT dev_MAC, dev_LastIP, ?, 'Disconnected', '',
|
||||
dev_AlertEvents
|
||||
FROM Devices
|
||||
WHERE dev_AlertDeviceDown = 0
|
||||
AND dev_PresentLastScan = 1
|
||||
AND dev_ScanCycle = ?
|
||||
AND NOT EXISTS (SELECT 1 FROM CurrentScan
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(startTime, conf.cycle) )
|
||||
|
||||
# Check IP Changed
|
||||
mylog('debug','[Events] - 4 - IP Changes')
|
||||
sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
SELECT cur_MAC, cur_IP, ?, 'IP Changed',
|
||||
'Previous IP: '|| dev_LastIP, dev_AlertEvents
|
||||
FROM Devices, CurrentScan
|
||||
WHERE dev_MAC = cur_MAC AND dev_ScanCycle = cur_ScanCycle
|
||||
AND dev_ScanCycle = ?
|
||||
AND dev_LastIP <> cur_IP """,
|
||||
(startTime, conf.cycle) )
|
||||
mylog('debug','[Events] - Events end')
|
||||
588
pialert/plugin.py
Executable file
588
pialert/plugin.py
Executable file
@@ -0,0 +1,588 @@
|
||||
import os
|
||||
import json
|
||||
import subprocess
|
||||
import datetime
|
||||
from collections import namedtuple
|
||||
|
||||
# pialert modules
|
||||
import conf
|
||||
from const import pluginsPath, logPath
|
||||
from logger import mylog
|
||||
from helper import timeNow, updateState, get_file_content, write_file
|
||||
from api import update_api
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def run_plugin_scripts(db, runType):
|
||||
|
||||
# Header
|
||||
updateState(db,"Run: Plugins")
|
||||
|
||||
mylog('debug', ['[Plugins] Check if any plugins need to be executed on run type: ', runType])
|
||||
|
||||
for plugin in conf.plugins:
|
||||
|
||||
shouldRun = False
|
||||
|
||||
set = get_plugin_setting(plugin, "RUN")
|
||||
if set != None and set['value'] == runType:
|
||||
if runType != "schedule":
|
||||
shouldRun = True
|
||||
elif runType == "schedule":
|
||||
# run if overdue scheduled time
|
||||
prefix = plugin["unique_prefix"]
|
||||
|
||||
# check scheduels if any contains a unique plugin prefix matching the current plugin
|
||||
for schd in conf.mySchedules:
|
||||
if schd.service == prefix:
|
||||
# Check if schedule overdue
|
||||
shouldRun = schd.runScheduleCheck()
|
||||
if shouldRun:
|
||||
# note the last time the scheduled plugin run was executed
|
||||
schd.last_run = timeNow()
|
||||
|
||||
if shouldRun:
|
||||
|
||||
print_plugin_info(plugin, ['display_name'])
|
||||
mylog('debug', ['[Plugins] CMD: ', get_plugin_setting(plugin, "CMD")["value"]])
|
||||
execute_plugin(db, plugin)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_plugins_configs():
|
||||
|
||||
pluginsList = []
|
||||
|
||||
# only top level directories required. No need for the loop
|
||||
# for root, dirs, files in os.walk(pluginsPath):
|
||||
|
||||
dirs = next(os.walk(pluginsPath))[1]
|
||||
for d in dirs: # Loop over directories, not files
|
||||
if not d.startswith( "__" ): # ignore __pycache__
|
||||
pluginsList.append(json.loads(get_file_content(pluginsPath + "/" + d + '/config.json')))
|
||||
|
||||
return pluginsList
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def print_plugin_info(plugin, elements = ['display_name']):
|
||||
|
||||
mylog('verbose', ['[Plugins] ---------------------------------------------'])
|
||||
|
||||
for el in elements:
|
||||
res = get_plugin_string(plugin, el)
|
||||
mylog('verbose', ['[Plugins] ', el ,': ', res])
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Gets the whole setting object
|
||||
def get_plugin_setting(plugin, function_key):
|
||||
|
||||
result = None
|
||||
|
||||
for set in plugin['settings']:
|
||||
if set["function"] == function_key:
|
||||
result = set
|
||||
|
||||
if result == None:
|
||||
mylog('none', ['[Plugins] Setting with "function":"', function_key, '" is missing in plugin: ', get_plugin_string(plugin, 'display_name')])
|
||||
|
||||
return result
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Return whole setting touple
|
||||
def get_setting(key):
|
||||
result = None
|
||||
# index order: key, name, desc, inputtype, options, regex, result, group, events
|
||||
for set in conf.mySettings:
|
||||
if set[0] == key:
|
||||
result = set
|
||||
|
||||
if result is None:
|
||||
mylog('info', [' Error - setting_missing - Setting not found for key: ', key])
|
||||
mylog('info', [' Error - logging the settings into file: ', logPath + '/setting_missing.json'])
|
||||
write_file (logPath + '/setting_missing.json', json.dumps({ 'data' : conf.mySettings}))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Get localized string value on the top JSON depth, not recursive
|
||||
def get_plugin_string(props, el):
|
||||
|
||||
result = ''
|
||||
|
||||
if el in props['localized']:
|
||||
for val in props[el]:
|
||||
if val['language_code'] == 'en_us':
|
||||
result = val['string']
|
||||
|
||||
if result == '':
|
||||
result = 'en_us string missing'
|
||||
|
||||
else:
|
||||
result = props[el]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Executes the plugin command specified in the setting with the function specified as CMD
|
||||
def execute_plugin(db, plugin):
|
||||
sql = db.sql
|
||||
|
||||
# ------- necessary settings check --------
|
||||
set = get_plugin_setting(plugin, "CMD")
|
||||
|
||||
# handle missing "function":"CMD" setting
|
||||
if set == None:
|
||||
return
|
||||
|
||||
set_CMD = set["value"]
|
||||
|
||||
set = get_plugin_setting(plugin, "RUN_TIMEOUT")
|
||||
|
||||
# handle missing "function":"<unique_prefix>_TIMEOUT" setting
|
||||
if set == None:
|
||||
set_RUN_TIMEOUT = 10
|
||||
else:
|
||||
set_RUN_TIMEOUT = set["value"]
|
||||
|
||||
mylog('debug', ['[Plugins] Timeout: ', set_RUN_TIMEOUT])
|
||||
|
||||
# Prepare custom params
|
||||
params = []
|
||||
|
||||
if "params" in plugin:
|
||||
for param in plugin["params"]:
|
||||
resolved = ""
|
||||
|
||||
# Get setting value
|
||||
if param["type"] == "setting":
|
||||
resolved = get_setting(param["value"])
|
||||
|
||||
if resolved != None:
|
||||
resolved = plugin_param_from_glob_set(resolved)
|
||||
|
||||
# Get Sql result
|
||||
if param["type"] == "sql":
|
||||
resolved = flatten_array(db.get_sql_array(param["value"]))
|
||||
|
||||
if resolved == None:
|
||||
mylog('none', ['[Plugins] The parameter "name":"', param["name"], '" was resolved as None'])
|
||||
|
||||
else:
|
||||
params.append( [param["name"], resolved] )
|
||||
|
||||
|
||||
# build SQL query parameters to insert into the DB
|
||||
sqlParams = []
|
||||
|
||||
# python-script
|
||||
if plugin['data_source'] == 'python-script':
|
||||
# ------- prepare params --------
|
||||
# prepare command from plugin settings, custom parameters
|
||||
command = resolve_wildcards_arr(set_CMD.split(), params)
|
||||
|
||||
# Execute command
|
||||
mylog('verbose', ['[Plugins] Executing: ', set_CMD])
|
||||
mylog('debug', ['[Plugins] Resolved : ', command])
|
||||
|
||||
try:
|
||||
# try runnning a subprocess with a forced timeout in case the subprocess hangs
|
||||
output = subprocess.check_output (command, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(set_RUN_TIMEOUT))
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', [e.output])
|
||||
mylog('none', ['[Plugins] Error - enable LOG_LEVEL=debug and check logs'])
|
||||
except subprocess.TimeoutExpired as timeErr:
|
||||
mylog('none', ['[Plugins] TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||
|
||||
|
||||
# check the last run output
|
||||
f = open(pluginsPath + '/' + plugin["code_name"] + '/last_result.log', 'r+')
|
||||
newLines = f.read().split('\n')
|
||||
f.close()
|
||||
|
||||
# cleanup - select only lines containing a separator to filter out unnecessary data
|
||||
newLines = list(filter(lambda x: '|' in x, newLines))
|
||||
|
||||
# # regular logging
|
||||
# for line in newLines:
|
||||
# append_line_to_file (pluginsPath + '/plugin.log', line +'\n')
|
||||
|
||||
for line in newLines:
|
||||
columns = line.split("|")
|
||||
# There has to be always 9 columns
|
||||
if len(columns) == 9:
|
||||
sqlParams.append((plugin["unique_prefix"], columns[0], columns[1], 'null', columns[2], columns[3], columns[4], columns[5], columns[6], 0, columns[7], 'null', columns[8]))
|
||||
else:
|
||||
mylog('none', ['[Plugins]: Skipped invalid line in the output: ', line])
|
||||
|
||||
# pialert-db-query
|
||||
if plugin['data_source'] == 'pialert-db-query':
|
||||
# replace single quotes wildcards
|
||||
q = set_CMD.replace("{s-quote}", '\'')
|
||||
|
||||
# Execute command
|
||||
mylog('verbose', ['[Plugins] Executing: ', q])
|
||||
|
||||
# set_CMD should contain a SQL query
|
||||
arr = db.get_sql_array (q)
|
||||
|
||||
for row in arr:
|
||||
# There has to be always 9 columns
|
||||
if len(row) == 9 and (row[0] in ['','null']) == False :
|
||||
sqlParams.append((plugin["unique_prefix"], row[0], handle_empty(row[1]), 'null', row[2], row[3], row[4], handle_empty(row[5]), handle_empty(row[6]), 0, row[7], 'null', row[8]))
|
||||
else:
|
||||
mylog('none', ['[Plugins]: Skipped invalid sql result'])
|
||||
|
||||
|
||||
# check if the subprocess / SQL query failed / there was no valid output
|
||||
if len(sqlParams) == 0:
|
||||
mylog('none', ['[Plugins] No output received from the plugin ', plugin["unique_prefix"], ' - enable LOG_LEVEL=debug and check logs'])
|
||||
return
|
||||
else:
|
||||
mylog('verbose', ['[Plugins]: SUCCESS, received ', len(sqlParams), ' entries'])
|
||||
|
||||
# process results if any
|
||||
if len(sqlParams) > 0:
|
||||
sql.executemany ("""INSERT INTO Plugins_Events ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", "Watched_Value4", "Status" ,"Extra", "UserData", "ForeignKey") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", sqlParams)
|
||||
db.commitDB()
|
||||
sql.executemany ("""INSERT INTO Plugins_History ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", "Watched_Value4", "Status" ,"Extra", "UserData", "ForeignKey") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", sqlParams)
|
||||
db.commitDB()
|
||||
|
||||
process_plugin_events(db, plugin)
|
||||
|
||||
# update API endpoints
|
||||
update_api(db, False, ["plugins_events","plugins_objects"])
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def custom_plugin_decoder(pluginDict):
|
||||
return namedtuple('X', pluginDict.keys())(*pluginDict.values())
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Handle empty value
|
||||
def handle_empty(value):
|
||||
if value == '' or value is None:
|
||||
value = 'null'
|
||||
|
||||
return value
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Flattens a setting to make it passable to a script
|
||||
def plugin_param_from_glob_set(globalSetting):
|
||||
|
||||
setVal = globalSetting[6] # setting value
|
||||
setTyp = globalSetting[3] # setting type
|
||||
|
||||
|
||||
noConversion = ['text', 'integer', 'boolean', 'password', 'readonly', 'selectinteger', 'selecttext' ]
|
||||
arrayConversion = ['multiselect', 'list']
|
||||
|
||||
if setTyp in noConversion:
|
||||
return setVal
|
||||
|
||||
if setTyp in arrayConversion:
|
||||
return flatten_array(setVal)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Gets the setting value
|
||||
def get_plugin_setting_value(plugin, function_key):
|
||||
|
||||
resultObj = get_plugin_setting(plugin, function_key)
|
||||
|
||||
if resultObj != None:
|
||||
return resultObj["value"]
|
||||
|
||||
return None
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Return setting value
|
||||
def get_setting_value(key):
|
||||
|
||||
set = get_setting(key)
|
||||
|
||||
if get_setting(key) is not None:
|
||||
|
||||
setVal = set[6] # setting value
|
||||
setTyp = set[3] # setting type
|
||||
|
||||
return setVal
|
||||
|
||||
return ''
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def flatten_array(arr):
|
||||
|
||||
tmp = ''
|
||||
|
||||
mylog('debug', arr)
|
||||
|
||||
for arrayItem in arr:
|
||||
# only one column flattening is supported
|
||||
if isinstance(arrayItem, list):
|
||||
arrayItem = str(arrayItem[0])
|
||||
|
||||
tmp += arrayItem + ','
|
||||
# tmp = tmp.replace("'","").replace(' ','') # No single quotes or empty spaces allowed
|
||||
tmp = tmp.replace("'","") # No single quotes allowed
|
||||
|
||||
return tmp[:-1] # Remove last comma ','
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Replace {wildcars} with parameters
|
||||
def resolve_wildcards_arr(commandArr, params):
|
||||
|
||||
mylog('debug', ['[Plugins]: Pre-Resolved CMD: '] + commandArr)
|
||||
|
||||
for param in params:
|
||||
# mylog('debug', ['[Plugins]: key : {', param[0], '}'])
|
||||
# mylog('debug', ['[Plugins]: resolved: ', param[1]])
|
||||
|
||||
i = 0
|
||||
|
||||
for comPart in commandArr:
|
||||
|
||||
commandArr[i] = comPart.replace('{' + param[0] + '}', param[1]).replace('{s-quote}',"'")
|
||||
|
||||
i += 1
|
||||
|
||||
return commandArr
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Combine plugin objects, keep user-defined values, created time, changed time if nothing changed and the index
|
||||
def combine_plugin_objects(old, new):
|
||||
|
||||
new.userData = old.userData
|
||||
new.index = old.index
|
||||
new.created = old.created
|
||||
|
||||
# Keep changed time if nothing changed
|
||||
if new.status in ['watched-not-changed']:
|
||||
new.changed = old.changed
|
||||
|
||||
# return the new object, with some of the old values
|
||||
return new
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Check if watched values changed for the given plugin
|
||||
def process_plugin_events(db, plugin):
|
||||
sql = db.sql
|
||||
|
||||
##global pluginObjects, pluginEvents
|
||||
|
||||
pluginPref = plugin["unique_prefix"]
|
||||
|
||||
mylog('debug', ['[Plugins] Processing : ', pluginPref])
|
||||
|
||||
plugObjectsArr = db.get_sql_array ("SELECT * FROM Plugins_Objects where Plugin = '" + str(pluginPref)+"'")
|
||||
plugEventsArr = db.get_sql_array ("SELECT * FROM Plugins_Events where Plugin = '" + str(pluginPref)+"'")
|
||||
|
||||
pluginObjects = []
|
||||
pluginEvents = []
|
||||
|
||||
for obj in plugObjectsArr:
|
||||
pluginObjects.append(plugin_object_class(plugin, obj))
|
||||
|
||||
existingPluginObjectsCount = len(pluginObjects)
|
||||
|
||||
mylog('debug', ['[Plugins] Existing objects : ', existingPluginObjectsCount])
|
||||
mylog('debug', ['[Plugins] New and existing events : ', len(plugEventsArr)])
|
||||
|
||||
# set status as new - will be changed later if conditions are fulfilled, e.g. entry found
|
||||
for eve in plugEventsArr:
|
||||
tmpObject = plugin_object_class(plugin, eve)
|
||||
tmpObject.status = "new"
|
||||
pluginEvents.append(tmpObject)
|
||||
|
||||
|
||||
# Update the status to "exists"
|
||||
index = 0
|
||||
for tmpObjFromEvent in pluginEvents:
|
||||
|
||||
# compare hash of the IDs for uniqueness
|
||||
if any(x.idsHash == tmpObject.idsHash for x in pluginObjects):
|
||||
mylog('debug', ['[Plugins] Found existing object'])
|
||||
pluginEvents[index].status = "exists"
|
||||
index += 1
|
||||
|
||||
# Loop thru events and update the one that exist to determine if watched columns changed
|
||||
index = 0
|
||||
for tmpObjFromEvent in pluginEvents:
|
||||
|
||||
if tmpObjFromEvent.status == "exists":
|
||||
|
||||
# compare hash of the changed watched columns for uniqueness
|
||||
if any(x.watchedHash != tmpObject.watchedHash for x in pluginObjects):
|
||||
pluginEvents[index].status = "watched-changed"
|
||||
else:
|
||||
pluginEvents[index].status = "watched-not-changed"
|
||||
index += 1
|
||||
|
||||
# Merge existing plugin objects with newly discovered ones and update existing ones with new values
|
||||
for eveObj in pluginEvents:
|
||||
if eveObj.status == 'new':
|
||||
pluginObjects.append(eveObj)
|
||||
else:
|
||||
index = 0
|
||||
for plugObj in pluginObjects:
|
||||
# find corresponding object for the event and merge
|
||||
if plugObj.idsHash == eveObj.idsHash:
|
||||
pluginObjects[index] = combine_plugin_objects(plugObj, eveObj)
|
||||
|
||||
index += 1
|
||||
|
||||
# Update the DB
|
||||
# ----------------------------
|
||||
# Update the Plugin_Objects
|
||||
for plugObj in pluginObjects:
|
||||
|
||||
createdTime = plugObj.created
|
||||
|
||||
if plugObj.status == 'new':
|
||||
|
||||
createdTime = plugObj.changed
|
||||
|
||||
sql.execute ("INSERT INTO Plugins_Objects (Plugin, Object_PrimaryID, Object_SecondaryID, DateTimeCreated, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status, Extra, UserData, ForeignKey) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)", (plugObj.pluginPref, plugObj.primaryId , plugObj.secondaryId , createdTime, plugObj.changed , plugObj.watched1 , plugObj.watched2 , plugObj.watched3 , plugObj.watched4 , plugObj.status , plugObj.extra, plugObj.userData, plugObj.foreignKey ))
|
||||
else:
|
||||
sql.execute (f"UPDATE Plugins_Objects set Plugin = '{plugObj.pluginPref}', DateTimeChanged = '{plugObj.changed}', Watched_Value1 = '{plugObj.watched1}', Watched_Value2 = '{plugObj.watched2}', Watched_Value3 = '{plugObj.watched3}', Watched_Value4 = '{plugObj.watched4}', Status = '{plugObj.status}', Extra = '{plugObj.extra}', ForeignKey = '{plugObj.foreignKey}' WHERE \"Index\" = {plugObj.index}")
|
||||
|
||||
# Update the Plugins_Events with the new statuses
|
||||
sql.execute (f'DELETE FROM Plugins_Events where Plugin = "{pluginPref}"')
|
||||
|
||||
for plugObj in pluginEvents:
|
||||
|
||||
createdTime = plugObj.created
|
||||
|
||||
# use the same datetime for created and changed if a new entry
|
||||
if plugObj.status == 'new':
|
||||
createdTime = plugObj.changed
|
||||
|
||||
# insert only events if they are to be reported on
|
||||
if plugObj.status in get_plugin_setting_value(plugin, "REPORT_ON"):
|
||||
|
||||
sql.execute ("INSERT INTO Plugins_Events (Plugin, Object_PrimaryID, Object_SecondaryID, DateTimeCreated, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status, Extra, UserData, ForeignKey) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)", (plugObj.pluginPref, plugObj.primaryId , plugObj.secondaryId , createdTime, plugObj.changed , plugObj.watched1 , plugObj.watched2 , plugObj.watched3 , plugObj.watched4 , plugObj.status , plugObj.extra, plugObj.userData, plugObj.foreignKey ))
|
||||
|
||||
# Perform databse table mapping if enabled for the plugin
|
||||
if len(pluginEvents) > 0 and "mapped_to_table" in plugin:
|
||||
|
||||
sqlParams = []
|
||||
|
||||
dbTable = plugin['mapped_to_table']
|
||||
|
||||
mylog('debug', ['[Plugins] Mapping objects to database table: ', dbTable])
|
||||
|
||||
# collect all columns to be mapped
|
||||
mappedCols = []
|
||||
columnsStr = ''
|
||||
valuesStr = ''
|
||||
|
||||
for clmn in plugin['database_column_definitions']:
|
||||
if 'mapped_to_column' in clmn:
|
||||
mappedCols.append(clmn)
|
||||
columnsStr = f'{columnsStr}, "{clmn["mapped_to_column"]}"'
|
||||
valuesStr = f'{valuesStr}, ?'
|
||||
|
||||
if len(columnsStr) > 0:
|
||||
columnsStr = columnsStr[1:] # remove first ','
|
||||
valuesStr = valuesStr[1:] # remove first ','
|
||||
|
||||
# map the column names to plugin object event values
|
||||
for plgEv in pluginEvents:
|
||||
|
||||
tmpList = []
|
||||
|
||||
for col in mappedCols:
|
||||
if col['column'] == 'Index':
|
||||
tmpList.append(plgEv.index)
|
||||
elif col['column'] == 'Plugin':
|
||||
tmpList.append(plgEv.pluginPref)
|
||||
elif col['column'] == 'Object_PrimaryID':
|
||||
tmpList.append(plgEv.primaryId)
|
||||
elif col['column'] == 'Object_SecondaryID':
|
||||
tmpList.append(plgEv.secondaryId)
|
||||
elif col['column'] == 'DateTimeCreated':
|
||||
tmpList.append(plgEv.created)
|
||||
elif col['column'] == 'DateTimeChanged':
|
||||
tmpList.append(plgEv.changed)
|
||||
elif col['column'] == 'Watched_Value1':
|
||||
tmpList.append(plgEv.watched1)
|
||||
elif col['column'] == 'Watched_Value2':
|
||||
tmpList.append(plgEv.watched2)
|
||||
elif col['column'] == 'Watched_Value3':
|
||||
tmpList.append(plgEv.watched3)
|
||||
elif col['column'] == 'Watched_Value4':
|
||||
tmpList.append(plgEv.watched4)
|
||||
elif col['column'] == 'UserData':
|
||||
tmpList.append(plgEv.userData)
|
||||
elif col['column'] == 'Extra':
|
||||
tmpList.append(plgEv.extra)
|
||||
elif col['column'] == 'Status':
|
||||
tmpList.append(plgEv.status)
|
||||
|
||||
sqlParams.append(tuple(tmpList))
|
||||
|
||||
q = f'INSERT into {dbTable} ({columnsStr}) VALUES ({valuesStr})'
|
||||
|
||||
mylog('debug', ['[Plugins] SQL query for mapping: ', q ])
|
||||
|
||||
sql.executemany (q, sqlParams)
|
||||
|
||||
db.commitDB()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
class plugin_object_class:
|
||||
def __init__(self, plugin, objDbRow):
|
||||
self.index = objDbRow[0]
|
||||
self.pluginPref = objDbRow[1]
|
||||
self.primaryId = objDbRow[2]
|
||||
self.secondaryId = objDbRow[3]
|
||||
self.created = objDbRow[4]
|
||||
self.changed = objDbRow[5]
|
||||
self.watched1 = objDbRow[6]
|
||||
self.watched2 = objDbRow[7]
|
||||
self.watched3 = objDbRow[8]
|
||||
self.watched4 = objDbRow[9]
|
||||
self.status = objDbRow[10]
|
||||
self.extra = objDbRow[11]
|
||||
self.userData = objDbRow[12]
|
||||
self.foreignKey = objDbRow[13]
|
||||
|
||||
# self.idsHash = str(hash(str(self.primaryId) + str(self.secondaryId)))
|
||||
self.idsHash = str(self.primaryId) + str(self.secondaryId)
|
||||
|
||||
self.watchedClmns = []
|
||||
self.watchedIndxs = []
|
||||
|
||||
setObj = get_plugin_setting(plugin, 'WATCH')
|
||||
|
||||
indexNameColumnMapping = [(6, 'Watched_Value1' ), (7, 'Watched_Value2' ), (8, 'Watched_Value3' ), (9, 'Watched_Value4' )]
|
||||
|
||||
if setObj is not None:
|
||||
|
||||
self.watchedClmns = setObj["value"]
|
||||
|
||||
for clmName in self.watchedClmns:
|
||||
for mapping in indexNameColumnMapping:
|
||||
if clmName == indexNameColumnMapping[1]:
|
||||
self.watchedIndxs.append(indexNameColumnMapping[0])
|
||||
|
||||
tmp = ''
|
||||
for indx in self.watchedIndxs:
|
||||
tmp += str(objDbRow[indx])
|
||||
|
||||
self.watchedHash = str(hash(tmp))
|
||||
|
||||
|
||||
|
||||
8
pialert/publishers/__init__.py
Executable file
8
pialert/publishers/__init__.py
Executable file
@@ -0,0 +1,8 @@
|
||||
""" Publishers for Pi.Alert """
|
||||
|
||||
"""
|
||||
each publisher exposes:
|
||||
|
||||
- check_config () returning True / False
|
||||
- send (message) returning True / Fasle
|
||||
"""
|
||||
42
pialert/publishers/apprise.py
Executable file
42
pialert/publishers/apprise.py
Executable file
@@ -0,0 +1,42 @@
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import conf
|
||||
from helper import noti_struc
|
||||
from logger import logResult, mylog
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def check_config():
|
||||
if conf.APPRISE_URL == '' or conf.APPRISE_HOST == '':
|
||||
mylog('none', ['[Check Config] Error: Apprise service not set up correctly. Check your pialert.conf APPRISE_* variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def send (msg: noti_struc):
|
||||
html = msg.html
|
||||
text = msg.text
|
||||
|
||||
#Define Apprise compatible payload (https://github.com/caronc/apprise-api#stateless-solution)
|
||||
payload = html
|
||||
|
||||
if conf.APPRISE_PAYLOAD == 'text':
|
||||
payload = text
|
||||
|
||||
_json_payload={
|
||||
"urls": conf.APPRISE_URL,
|
||||
"title": "Pi.Alert Notifications",
|
||||
"format": conf.APPRISE_PAYLOAD,
|
||||
"body": payload
|
||||
}
|
||||
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
p = subprocess.Popen(["curl","-i","-X", "POST" ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), conf.APPRISE_HOST], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
stdout, stderr = p.communicate()
|
||||
# write stdout and stderr into .log files for debugging if needed
|
||||
logResult (stdout, stderr) # TO-DO should be changed to mylog
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', [e.output])
|
||||
90
pialert/publishers/email.py
Executable file
90
pialert/publishers/email.py
Executable file
@@ -0,0 +1,90 @@
|
||||
""" Pi.Alert module to send notification emails """
|
||||
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
import smtplib
|
||||
|
||||
|
||||
import conf
|
||||
from helper import hide_email, noti_struc
|
||||
from logger import mylog, print_log
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def check_config ():
|
||||
if conf.SMTP_SERVER == '' or conf.REPORT_FROM == '' or conf.REPORT_TO == '':
|
||||
mylog('none', ['[Email Check Config] Error: Email service not set up correctly. Check your pialert.conf SMTP_*, REPORT_FROM and REPORT_TO variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def send (msg: noti_struc):
|
||||
|
||||
pText = msg.text
|
||||
pHTML = msg.html
|
||||
|
||||
mylog('debug', '[Send Email] REPORT_TO: ' + hide_email(str(conf.REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(conf.SMTP_USER)))
|
||||
|
||||
# Compose email
|
||||
msg = MIMEMultipart('alternative')
|
||||
msg['Subject'] = 'Pi.Alert Report'
|
||||
msg['From'] = conf.REPORT_FROM
|
||||
msg['To'] = conf.REPORT_TO
|
||||
msg.attach (MIMEText (pText, 'plain'))
|
||||
msg.attach (MIMEText (pHTML, 'html'))
|
||||
|
||||
failedAt = ''
|
||||
|
||||
failedAt = print_log ('SMTP try')
|
||||
|
||||
try:
|
||||
# Send mail
|
||||
failedAt = print_log('Trying to open connection to ' + str(conf.SMTP_SERVER) + ':' + str(conf.SMTP_PORT))
|
||||
|
||||
if conf.SMTP_FORCE_SSL:
|
||||
failedAt = print_log('SMTP_FORCE_SSL == True so using .SMTP_SSL()')
|
||||
if conf.SMTP_PORT == 0:
|
||||
failedAt = print_log('SMTP_PORT == 0 so sending .SMTP_SSL(SMTP_SERVER)')
|
||||
smtp_connection = smtplib.SMTP_SSL(conf.SMTP_SERVER)
|
||||
else:
|
||||
failedAt = print_log('SMTP_PORT == 0 so sending .SMTP_SSL(SMTP_SERVER, SMTP_PORT)')
|
||||
smtp_connection = smtplib.SMTP_SSL(conf.SMTP_SERVER, conf.SMTP_PORT)
|
||||
|
||||
else:
|
||||
failedAt = print_log('SMTP_FORCE_SSL == False so using .SMTP()')
|
||||
if conf.SMTP_PORT == 0:
|
||||
failedAt = print_log('SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER)')
|
||||
smtp_connection = smtplib.SMTP (conf.SMTP_SERVER)
|
||||
else:
|
||||
failedAt = print_log('SMTP_PORT == 0 so sending .SMTP(SMTP_SERVER, SMTP_PORT)')
|
||||
smtp_connection = smtplib.SMTP (conf.SMTP_SERVER, conf.SMTP_PORT)
|
||||
|
||||
failedAt = print_log('Setting SMTP debug level')
|
||||
|
||||
# Log level set to debug of the communication between SMTP server and client
|
||||
if conf.LOG_LEVEL == 'debug':
|
||||
smtp_connection.set_debuglevel(1)
|
||||
|
||||
failedAt = print_log( 'Sending .ehlo()')
|
||||
smtp_connection.ehlo()
|
||||
|
||||
if not conf.SMTP_SKIP_TLS:
|
||||
failedAt = print_log('SMTP_SKIP_TLS == False so sending .starttls()')
|
||||
smtp_connection.starttls()
|
||||
failedAt = print_log('SMTP_SKIP_TLS == False so sending .ehlo()')
|
||||
smtp_connection.ehlo()
|
||||
if not conf.SMTP_SKIP_LOGIN:
|
||||
failedAt = print_log('SMTP_SKIP_LOGIN == False so sending .login()')
|
||||
smtp_connection.login (conf.SMTP_USER, conf.SMTP_PASS)
|
||||
|
||||
failedAt = print_log('Sending .sendmail()')
|
||||
smtp_connection.sendmail (conf.REPORT_FROM, conf.REPORT_TO, msg.as_string())
|
||||
smtp_connection.quit()
|
||||
except smtplib.SMTPAuthenticationError as e:
|
||||
mylog('none', [' ERROR: Failed at - ', failedAt])
|
||||
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPAuthenticationError), skipping Email (enable LOG_LEVEL=debug for more logging)'])
|
||||
except smtplib.SMTPServerDisconnected as e:
|
||||
mylog('none', [' ERROR: Failed at - ', failedAt])
|
||||
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPServerDisconnected), skipping Email (enable LOG_LEVEL=debug for more logging)'])
|
||||
|
||||
mylog('debug', '[Send Email] Last executed - ' + str(failedAt))
|
||||
245
pialert/publishers/mqtt.py
Executable file
245
pialert/publishers/mqtt.py
Executable file
@@ -0,0 +1,245 @@
|
||||
|
||||
import time
|
||||
import re
|
||||
from paho.mqtt import client as mqtt_client
|
||||
|
||||
import conf
|
||||
from logger import mylog
|
||||
from database import get_all_devices, get_device_stats
|
||||
from helper import bytes_to_string, sanitize_string
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# MQTT
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def check_config():
|
||||
if conf.MQTT_BROKER == '' or conf.MQTT_PORT == '' or conf.MQTT_USER == '' or conf.MQTT_PASSWORD == '':
|
||||
mylog('none', ['[Check Config] Error: MQTT service not set up correctly. Check your pialert.conf MQTT_* variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
class sensor_config:
|
||||
def __init__(self, deviceId, deviceName, sensorType, sensorName, icon):
|
||||
self.deviceId = deviceId
|
||||
self.deviceName = deviceName
|
||||
self.sensorType = sensorType
|
||||
self.sensorName = sensorName
|
||||
self.icon = icon
|
||||
self.hash = str(hash(str(deviceId) + str(deviceName)+ str(sensorType)+ str(sensorName)+ str(icon)))
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def publish_mqtt(client, topic, message):
|
||||
status = 1
|
||||
while status != 0:
|
||||
result = client.publish(
|
||||
topic=topic,
|
||||
payload=message,
|
||||
qos=conf.MQTT_QOS,
|
||||
retain=True,
|
||||
)
|
||||
|
||||
status = result[0]
|
||||
|
||||
if status != 0:
|
||||
mylog('info', ["Waiting to reconnect to MQTT broker"])
|
||||
time.sleep(0.1)
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def create_generic_device(client):
|
||||
|
||||
deviceName = 'PiAlert'
|
||||
deviceId = 'pialert'
|
||||
|
||||
create_sensor(client, deviceId, deviceName, 'sensor', 'online', 'wifi-check')
|
||||
create_sensor(client, deviceId, deviceName, 'sensor', 'down', 'wifi-cancel')
|
||||
create_sensor(client, deviceId, deviceName, 'sensor', 'all', 'wifi')
|
||||
create_sensor(client, deviceId, deviceName, 'sensor', 'archived', 'wifi-lock')
|
||||
create_sensor(client, deviceId, deviceName, 'sensor', 'new', 'wifi-plus')
|
||||
create_sensor(client, deviceId, deviceName, 'sensor', 'unknown', 'wifi-alert')
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def create_sensor(client, deviceId, deviceName, sensorType, sensorName, icon):
|
||||
|
||||
new_sensor_config = sensor_config(deviceId, deviceName, sensorType, sensorName, icon)
|
||||
|
||||
# check if config already in list and if not, add it, otherwise skip
|
||||
is_unique = True
|
||||
|
||||
for sensor in conf.mqtt_sensors:
|
||||
if sensor.hash == new_sensor_config.hash:
|
||||
is_unique = False
|
||||
break
|
||||
|
||||
# save if unique
|
||||
if is_unique:
|
||||
publish_sensor(client, new_sensor_config)
|
||||
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def publish_sensor(client, sensorConf):
|
||||
|
||||
message = '{ \
|
||||
"name":"'+ sensorConf.deviceName +' '+sensorConf.sensorName+'", \
|
||||
"state_topic":"system-sensors/'+sensorConf.sensorType+'/'+sensorConf.deviceId+'/state", \
|
||||
"value_template":"{{value_json.'+sensorConf.sensorName+'}}", \
|
||||
"unique_id":"'+sensorConf.deviceId+'_sensor_'+sensorConf.sensorName+'", \
|
||||
"device": \
|
||||
{ \
|
||||
"identifiers": ["'+sensorConf.deviceId+'_sensor"], \
|
||||
"manufacturer": "PiAlert", \
|
||||
"name":"'+sensorConf.deviceName+'" \
|
||||
}, \
|
||||
"icon":"mdi:'+sensorConf.icon+'" \
|
||||
}'
|
||||
|
||||
topic='homeassistant/'+sensorConf.sensorType+'/'+sensorConf.deviceId+'/'+sensorConf.sensorName+'/config'
|
||||
|
||||
# add the sensor to the global list to keep track of succesfully added sensors
|
||||
if publish_mqtt(client, topic, message):
|
||||
# hack - delay adding to the queue in case the process is
|
||||
time.sleep(conf.MQTT_DELAY_SEC) # restarted and previous publish processes aborted
|
||||
# (it takes ~2s to update a sensor config on the broker)
|
||||
conf.mqtt_sensors.append(sensorConf)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def mqtt_create_client():
|
||||
def on_disconnect(client, userdata, rc):
|
||||
conf.mqtt_connected_to_broker = False
|
||||
|
||||
# not sure is below line is correct / necessary
|
||||
# client = mqtt_create_client()
|
||||
|
||||
def on_connect(client, userdata, flags, rc):
|
||||
|
||||
if rc == 0:
|
||||
mylog('verbose', [" Connected to broker"])
|
||||
conf.mqtt_connected_to_broker = True # Signal connection
|
||||
else:
|
||||
mylog('none', [" Connection failed"])
|
||||
conf.mqtt_connected_to_broker = False
|
||||
|
||||
|
||||
client = mqtt_client.Client('PiAlert') # Set Connecting Client ID
|
||||
client.username_pw_set(conf.MQTT_USER, conf.MQTT_PASSWORD)
|
||||
client.on_connect = on_connect
|
||||
client.on_disconnect = on_disconnect
|
||||
client.connect(conf.MQTT_BROKER, conf.MQTT_PORT)
|
||||
client.loop_start()
|
||||
|
||||
return client
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def mqtt_start(db):
|
||||
|
||||
#global client
|
||||
|
||||
if conf.mqtt_connected_to_broker == False:
|
||||
conf.mqtt_connected_to_broker = True
|
||||
conf.client = mqtt_create_client()
|
||||
|
||||
client = conf.client
|
||||
# General stats
|
||||
|
||||
# Create a generic device for overal stats
|
||||
create_generic_device(client)
|
||||
|
||||
# Get the data
|
||||
row = get_device_stats(db)
|
||||
|
||||
columns = ["online","down","all","archived","new","unknown"]
|
||||
|
||||
payload = ""
|
||||
|
||||
# Update the values
|
||||
for column in columns:
|
||||
payload += '"'+column+'": ' + str(row[column]) +','
|
||||
|
||||
# Publish (warap into {} and remove last ',' from above)
|
||||
publish_mqtt(client, "system-sensors/sensor/pialert/state",
|
||||
'{ \
|
||||
'+ payload[:-1] +'\
|
||||
}'
|
||||
)
|
||||
|
||||
|
||||
# Specific devices
|
||||
|
||||
# Get all devices
|
||||
devices = get_all_devices(db)
|
||||
|
||||
sec_delay = len(devices) * int(conf.MQTT_DELAY_SEC)*5
|
||||
|
||||
mylog('info', [" Estimated delay: ", (sec_delay), 's ', '(', round(sec_delay/60,1) , 'min)' ])
|
||||
|
||||
for device in devices:
|
||||
|
||||
# Create devices in Home Assistant - send config messages
|
||||
deviceId = 'mac_' + device["dev_MAC"].replace(" ", "").replace(":", "_").lower()
|
||||
deviceNameDisplay = re.sub('[^a-zA-Z0-9-_\s]', '', device["dev_Name"])
|
||||
|
||||
create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'last_ip', 'ip-network')
|
||||
create_sensor(client, deviceId, deviceNameDisplay, 'binary_sensor', 'is_present', 'wifi')
|
||||
create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'mac_address', 'folder-key-network')
|
||||
create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'is_new', 'bell-alert-outline')
|
||||
create_sensor(client, deviceId, deviceNameDisplay, 'sensor', 'vendor', 'cog')
|
||||
|
||||
# update device sensors in home assistant
|
||||
|
||||
publish_mqtt(client, 'system-sensors/sensor/'+deviceId+'/state',
|
||||
'{ \
|
||||
"last_ip": "' + device["dev_LastIP"] +'", \
|
||||
"is_new": "' + str(device["dev_NewDevice"]) +'", \
|
||||
"vendor": "' + sanitize_string(device["dev_Vendor"]) +'", \
|
||||
"mac_address": "' + str(device["dev_MAC"]) +'" \
|
||||
}'
|
||||
)
|
||||
|
||||
publish_mqtt(client, 'system-sensors/binary_sensor/'+deviceId+'/state',
|
||||
'{ \
|
||||
"is_present": "' + to_binary_sensor(str(device["dev_PresentLastScan"])) +'"\
|
||||
}'
|
||||
)
|
||||
|
||||
# delete device / topic
|
||||
# homeassistant/sensor/mac_44_ef_bf_c4_b1_af/is_present/config
|
||||
# client.publish(
|
||||
# topic="homeassistant/sensor/"+deviceId+"/is_present/config",
|
||||
# payload="",
|
||||
# qos=1,
|
||||
# retain=True,
|
||||
# )
|
||||
# time.sleep(10)
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# Home Assistant UTILs
|
||||
#===============================================================================
|
||||
def to_binary_sensor(input):
|
||||
# In HA a binary sensor returns ON or OFF
|
||||
result = "OFF"
|
||||
|
||||
# bytestring
|
||||
if isinstance(input, str):
|
||||
if input == "1":
|
||||
result = "ON"
|
||||
elif isinstance(input, int):
|
||||
if input == 1:
|
||||
result = "ON"
|
||||
elif isinstance(input, bool):
|
||||
if input == True:
|
||||
result = "ON"
|
||||
elif isinstance(input, bytes):
|
||||
if bytes_to_string(input) == "1":
|
||||
result = "ON"
|
||||
return result
|
||||
43
pialert/publishers/ntfy.py
Executable file
43
pialert/publishers/ntfy.py
Executable file
@@ -0,0 +1,43 @@
|
||||
|
||||
import conf
|
||||
import requests
|
||||
from base64 import b64encode
|
||||
|
||||
from logger import mylog
|
||||
from helper import noti_struc
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def check_config():
|
||||
if conf.NTFY_HOST == '' or conf.NTFY_TOPIC == '':
|
||||
mylog('none', ['[Check Config] Error: NTFY service not set up correctly. Check your pialert.conf NTFY_* variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def send (msg: noti_struc):
|
||||
|
||||
headers = {
|
||||
"Title": "Pi.Alert Notification",
|
||||
"Actions": "view, Open Dashboard, "+ conf.REPORT_DASHBOARD_URL,
|
||||
"Priority": "urgent",
|
||||
"Tags": "warning"
|
||||
}
|
||||
# if username and password are set generate hash and update header
|
||||
if conf.NTFY_USER != "" and conf.NTFY_PASSWORD != "":
|
||||
# Generate hash for basic auth
|
||||
# usernamepassword = "{}:{}".format(conf.NTFY_USER,conf.NTFY_PASSWORD)
|
||||
basichash = b64encode(bytes(conf.NTFY_USER + ':' + conf.NTFY_PASSWORD, "utf-8")).decode("ascii")
|
||||
|
||||
# add authorization header with hash
|
||||
headers["Authorization"] = "Basic {}".format(basichash)
|
||||
|
||||
try:
|
||||
requests.post("{}/{}".format( conf.NTFY_HOST, conf.NTFY_TOPIC),
|
||||
data=msg.text,
|
||||
headers=headers)
|
||||
except requests.exceptions.RequestException as e:
|
||||
mylog('none', ['[NTFY] Error: ', e])
|
||||
return -1
|
||||
|
||||
return 0
|
||||
33
pialert/publishers/pushsafer.py
Executable file
33
pialert/publishers/pushsafer.py
Executable file
@@ -0,0 +1,33 @@
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
import conf
|
||||
from helper import noti_struc
|
||||
from logger import mylog
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def check_config():
|
||||
if conf.PUSHSAFER_TOKEN == 'ApiKey':
|
||||
mylog('none', ['[Check Config] Error: Pushsafer service not set up correctly. Check your pialert.conf PUSHSAFER_TOKEN variable.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def send ( msg:noti_struc ):
|
||||
_Text = msg.text
|
||||
url = 'https://www.pushsafer.com/api'
|
||||
post_fields = {
|
||||
"t" : 'Pi.Alert Message',
|
||||
"m" : _Text,
|
||||
"s" : 11,
|
||||
"v" : 3,
|
||||
"i" : 148,
|
||||
"c" : '#ef7f7f',
|
||||
"d" : 'a',
|
||||
"u" : conf.REPORT_DASHBOARD_URL,
|
||||
"ut" : 'Open Pi.Alert',
|
||||
"k" : conf.PUSHSAFER_TOKEN,
|
||||
}
|
||||
requests.post(url, data=post_fields)
|
||||
98
pialert/publishers/webhook.py
Executable file
98
pialert/publishers/webhook.py
Executable file
@@ -0,0 +1,98 @@
|
||||
import json
|
||||
import subprocess
|
||||
|
||||
import conf
|
||||
from const import logPath
|
||||
from helper import noti_struc, write_file
|
||||
from logger import logResult, mylog
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def check_config():
|
||||
if conf.WEBHOOK_URL == '':
|
||||
mylog('none', ['[Check Config] Error: Webhook service not set up correctly. Check your pialert.conf WEBHOOK_* variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def send (msg: noti_struc):
|
||||
|
||||
# use data type based on specified payload type
|
||||
if conf.WEBHOOK_PAYLOAD == 'json':
|
||||
payloadData = msg.json
|
||||
if conf.WEBHOOK_PAYLOAD == 'html':
|
||||
payloadData = msg.html
|
||||
if conf.WEBHOOK_PAYLOAD == 'text':
|
||||
payloadData = to_text(msg.json) # TO DO can we just send msg.text?
|
||||
|
||||
# Define slack-compatible payload
|
||||
_json_payload = { "text": payloadData } if conf.WEBHOOK_PAYLOAD == 'text' else {
|
||||
"username": "Pi.Alert",
|
||||
"text": "There are new notifications",
|
||||
"attachments": [{
|
||||
"title": "Pi.Alert Notifications",
|
||||
"title_link": conf.REPORT_DASHBOARD_URL,
|
||||
"text": payloadData
|
||||
}]
|
||||
}
|
||||
|
||||
# DEBUG - Write the json payload into a log file for debugging
|
||||
write_file (logPath + '/webhook_payload.json', json.dumps(_json_payload))
|
||||
|
||||
# Using the Slack-Compatible Webhook endpoint for Discord so that the same payload can be used for both
|
||||
if(conf.WEBHOOK_URL.startswith('https://discord.com/api/webhooks/') and not conf.WEBHOOK_URL.endswith("/slack")):
|
||||
_WEBHOOK_URL = f"{conf.WEBHOOK_URL}/slack"
|
||||
curlParams = ["curl","-i","-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL]
|
||||
else:
|
||||
_WEBHOOK_URL = conf.WEBHOOK_URL
|
||||
curlParams = ["curl","-i","-X", conf.WEBHOOK_REQUEST_METHOD ,"-H", "Content-Type:application/json" ,"-d", json.dumps(_json_payload), _WEBHOOK_URL]
|
||||
|
||||
# execute CURL call
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
mylog('debug', ['[send_webhook] curlParams: ', curlParams])
|
||||
p = subprocess.Popen(curlParams, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
|
||||
stdout, stderr = p.communicate()
|
||||
|
||||
# write stdout and stderr into .log files for debugging if needed
|
||||
logResult (stdout, stderr) # TO-DO should be changed to mylog
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', ['[send_webhook]', e.output])
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def to_text(_json):
|
||||
payloadData = ""
|
||||
if len(_json['internet']) > 0 and 'internet' in conf.INCLUDED_SECTIONS:
|
||||
payloadData += "INTERNET\n"
|
||||
for event in _json['internet']:
|
||||
payloadData += event[3] + ' on ' + event[2] + '. ' + event[4] + '. New address:' + event[1] + '\n'
|
||||
|
||||
if len(_json['new_devices']) > 0 and 'new_devices' in conf.INCLUDED_SECTIONS:
|
||||
payloadData += "NEW DEVICES:\n"
|
||||
for event in _json['new_devices']:
|
||||
if event[4] is None:
|
||||
event[4] = event[11]
|
||||
payloadData += event[1] + ' - ' + event[4] + '\n'
|
||||
|
||||
if len(_json['down_devices']) > 0 and 'down_devices' in conf.INCLUDED_SECTIONS:
|
||||
write_file (logPath + '/down_devices_example.log', _json['down_devices'])
|
||||
payloadData += 'DOWN DEVICES:\n'
|
||||
for event in _json['down_devices']:
|
||||
if event[4] is None:
|
||||
event[4] = event[11]
|
||||
payloadData += event[1] + ' - ' + event[4] + '\n'
|
||||
|
||||
if len(_json['events']) > 0 and 'events' in conf.INCLUDED_SECTIONS:
|
||||
payloadData += "EVENTS:\n"
|
||||
for event in _json['events']:
|
||||
if event[8] != "Internet":
|
||||
payloadData += event[8] + " on " + event[1] + " " + event[3] + " at " + event[2] + "\n"
|
||||
|
||||
return payloadData
|
||||
520
pialert/reporting.py
Executable file
520
pialert/reporting.py
Executable file
@@ -0,0 +1,520 @@
|
||||
|
||||
|
||||
import datetime
|
||||
import json
|
||||
|
||||
import socket
|
||||
|
||||
import subprocess
|
||||
import requests
|
||||
from json2table import convert
|
||||
|
||||
# pialert modules
|
||||
import conf
|
||||
from const import pialertPath, logPath, apiPath
|
||||
from helper import noti_struc, generate_mac_links, removeDuplicateNewLines, timeNow, hide_email, updateState, get_file_content, write_file
|
||||
from logger import logResult, mylog, print_log
|
||||
|
||||
|
||||
from publishers.email import (check_config as email_check_config,
|
||||
send as send_email )
|
||||
from publishers.ntfy import (check_config as ntfy_check_config,
|
||||
send as send_ntfy )
|
||||
from publishers.apprise import (check_config as apprise_check_config,
|
||||
send as send_apprise)
|
||||
from publishers.webhook import (check_config as webhook_check_config,
|
||||
send as send_webhook)
|
||||
from publishers.pushsafer import (check_config as pushsafer_check_config,
|
||||
send as send_pushsafer)
|
||||
from publishers.mqtt import (check_config as mqtt_check_config,
|
||||
mqtt_start )
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# REPORTING
|
||||
#===============================================================================
|
||||
# create a json for webhook and mqtt notifications to provide further integration options
|
||||
|
||||
|
||||
json_final = []
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def construct_notifications(db, sqlQuery, tableTitle, skipText = False, suppliedJsonStruct = None):
|
||||
|
||||
if suppliedJsonStruct is None and sqlQuery == "":
|
||||
return noti_struc("", "", "")
|
||||
|
||||
table_attributes = {"style" : "border-collapse: collapse; font-size: 12px; color:#70707", "width" : "100%", "cellspacing" : 0, "cellpadding" : "3px", "bordercolor" : "#C0C0C0", "border":"1"}
|
||||
headerProps = "width='120px' style='color:blue; font-size: 16px;' bgcolor='#909090' "
|
||||
thProps = "width='120px' style='color:#F0F0F0' bgcolor='#909090' "
|
||||
|
||||
build_direction = "TOP_TO_BOTTOM"
|
||||
text_line = '{}\t{}\n'
|
||||
|
||||
if suppliedJsonStruct is None:
|
||||
json_struc = db.get_table_as_json(sqlQuery)
|
||||
else:
|
||||
json_struc = suppliedJsonStruct
|
||||
|
||||
jsn = json_struc.json
|
||||
html = ""
|
||||
text = ""
|
||||
|
||||
if len(jsn["data"]) > 0:
|
||||
text = tableTitle + "\n---------\n"
|
||||
|
||||
html = convert(jsn, build_direction=build_direction, table_attributes=table_attributes)
|
||||
html = format_table(html, "data", headerProps, tableTitle).replace('<ul>','<ul style="list-style:none;padding-left:0">')
|
||||
|
||||
headers = json_struc.columnNames
|
||||
|
||||
# prepare text-only message
|
||||
if skipText == False:
|
||||
|
||||
for device in jsn["data"]:
|
||||
for header in headers:
|
||||
padding = ""
|
||||
if len(header) < 4:
|
||||
padding = "\t"
|
||||
text += text_line.format ( header + ': ' + padding, device[header])
|
||||
text += '\n'
|
||||
|
||||
# Format HTML table headers
|
||||
for header in headers:
|
||||
html = format_table(html, header, thProps)
|
||||
|
||||
return noti_struc(jsn, text, html)
|
||||
|
||||
|
||||
|
||||
|
||||
def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
|
||||
|
||||
sql = db.sql #TO-DO
|
||||
global mail_text, mail_html, json_final, changedPorts_json_struc, partial_html, partial_txt, partial_json
|
||||
|
||||
deviceUrl = conf.REPORT_DASHBOARD_URL + '/deviceDetails.php?mac='
|
||||
plugins_report = False
|
||||
|
||||
# Reporting section
|
||||
mylog('verbose', ['[Notification] Check if something to report'])
|
||||
|
||||
# prepare variables for JSON construction
|
||||
json_internet = []
|
||||
json_new_devices = []
|
||||
json_down_devices = []
|
||||
json_events = []
|
||||
json_ports = []
|
||||
json_plugins = []
|
||||
|
||||
# Disable reporting on events for devices where reporting is disabled based on the MAC address
|
||||
sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0
|
||||
WHERE eve_PendingAlertEmail = 1 AND eve_EventType != 'Device Down' AND eve_MAC IN
|
||||
(
|
||||
SELECT dev_MAC FROM Devices WHERE dev_AlertEvents = 0
|
||||
)""")
|
||||
sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0
|
||||
WHERE eve_PendingAlertEmail = 1 AND eve_EventType = 'Device Down' AND eve_MAC IN
|
||||
(
|
||||
SELECT dev_MAC FROM Devices WHERE dev_AlertDeviceDown = 0
|
||||
)""")
|
||||
|
||||
# Open text Template
|
||||
mylog('verbose', ['[Notification] Open text Template'])
|
||||
template_file = open(pialertPath + '/back/report_template.txt', 'r')
|
||||
mail_text = template_file.read()
|
||||
template_file.close()
|
||||
|
||||
# Open html Template
|
||||
mylog('verbose', ['[Notification] Open html Template'])
|
||||
template_file = open(pialertPath + '/back/report_template.html', 'r')
|
||||
if conf.newVersionAvailable :
|
||||
template_file = open(pialertPath + '/back/report_template_new_version.html', 'r')
|
||||
|
||||
mail_html = template_file.read()
|
||||
template_file.close()
|
||||
|
||||
# Report Header & footer
|
||||
timeFormated = timeNow().strftime ('%Y-%m-%d %H:%M')
|
||||
mail_text = mail_text.replace ('<REPORT_DATE>', timeFormated)
|
||||
mail_html = mail_html.replace ('<REPORT_DATE>', timeFormated)
|
||||
|
||||
mail_text = mail_text.replace ('<SERVER_NAME>', socket.gethostname() )
|
||||
mail_html = mail_html.replace ('<SERVER_NAME>', socket.gethostname() )
|
||||
|
||||
mylog('verbose', ['[Notification] included sections: ',INCLUDED_SECTIONS])
|
||||
if 'internet' in INCLUDED_SECTIONS:
|
||||
# Compose Internet Section
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_IP as IP, eve_DateTime as Datetime, eve_EventType as "Event Type", eve_AdditionalInfo as "More info" FROM Events
|
||||
WHERE eve_PendingAlertEmail = 1 AND eve_MAC = 'Internet'
|
||||
ORDER BY eve_DateTime"""
|
||||
|
||||
notiStruc = construct_notifications(db, sqlQuery, "Internet IP change")
|
||||
|
||||
# collect "internet" (IP changes) for the webhook json
|
||||
json_internet = notiStruc.json["data"]
|
||||
|
||||
mail_text = mail_text.replace ('<SECTION_INTERNET>', notiStruc.text + '\n')
|
||||
mail_html = mail_html.replace ('<INTERNET_TABLE>', notiStruc.html)
|
||||
mylog('verbose', ['[Notification] Internet sections done.'])
|
||||
|
||||
if 'new_devices' in INCLUDED_SECTIONS:
|
||||
# Compose New Devices Section
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType = 'New Device'
|
||||
ORDER BY eve_DateTime"""
|
||||
|
||||
notiStruc = construct_notifications(db, sqlQuery, "New devices")
|
||||
|
||||
# collect "new_devices" for the webhook json
|
||||
json_new_devices = notiStruc.json["data"]
|
||||
|
||||
mail_text = mail_text.replace ('<SECTION_NEW_DEVICES>', notiStruc.text + '\n')
|
||||
mail_html = mail_html.replace ('<NEW_DEVICES_TABLE>', notiStruc.html)
|
||||
mylog('verbose', ['[Notification] New Devices sections done.'])
|
||||
|
||||
if 'down_devices' in INCLUDED_SECTIONS:
|
||||
# Compose Devices Down Section
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType = 'Device Down'
|
||||
ORDER BY eve_DateTime"""
|
||||
|
||||
notiStruc = construct_notifications(db, sqlQuery, "Down devices")
|
||||
|
||||
# collect "new_devices" for the webhook json
|
||||
json_down_devices = notiStruc.json["data"]
|
||||
|
||||
mail_text = mail_text.replace ('<SECTION_DEVICES_DOWN>', notiStruc.text + '\n')
|
||||
mail_html = mail_html.replace ('<DOWN_DEVICES_TABLE>', notiStruc.html)
|
||||
mylog('verbose', ['[Notification] Down Devices sections done.'])
|
||||
|
||||
if 'events' in INCLUDED_SECTIONS:
|
||||
# Compose Events Section
|
||||
sqlQuery = """SELECT eve_MAC as MAC, eve_DateTime as Datetime, dev_LastIP as IP, eve_EventType as "Event Type", dev_Name as "Device name", dev_Comments as Comments FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType IN ('Connected','Disconnected',
|
||||
'IP Changed')
|
||||
ORDER BY eve_DateTime"""
|
||||
|
||||
notiStruc = construct_notifications(db, sqlQuery, "Events")
|
||||
|
||||
# collect "events" for the webhook json
|
||||
json_events = notiStruc.json["data"]
|
||||
|
||||
mail_text = mail_text.replace ('<SECTION_EVENTS>', notiStruc.text + '\n')
|
||||
mail_html = mail_html.replace ('<EVENTS_TABLE>', notiStruc.html)
|
||||
mylog('verbose', ['[Notification] Events sections done.'])
|
||||
|
||||
if 'ports' in INCLUDED_SECTIONS:
|
||||
# collect "ports" for the webhook json
|
||||
mylog('verbose', ['[Notification] Ports: conf.changedPorts_json_struc:', conf.changedPorts_json_struc])
|
||||
if conf.changedPorts_json_struc is not None:
|
||||
json_ports = conf.changedPorts_json_struc.json["data"]
|
||||
|
||||
notiStruc = construct_notifications(db, "", "Ports", True, conf.changedPorts_json_struc)
|
||||
mylog('verbose', ['[Notification] Ports: notiStruc:', notiStruc ])
|
||||
mail_html = mail_html.replace ('<PORTS_TABLE>', notiStruc.html)
|
||||
|
||||
portsTxt = ""
|
||||
if conf.changedPorts_json_struc is not None:
|
||||
portsTxt = "Ports \n---------\n Ports changed! Check PiAlert for details!\n"
|
||||
|
||||
mail_text = mail_text.replace ('<PORTS_TABLE>', portsTxt )
|
||||
mylog('verbose', ['[Notification] Ports sections done.'])
|
||||
|
||||
if 'plugins' in INCLUDED_SECTIONS and conf.ENABLE_PLUGINS:
|
||||
# Compose Plugins Section
|
||||
sqlQuery = """SELECT Plugin, Object_PrimaryId, Object_SecondaryId, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status from Plugins_Events"""
|
||||
|
||||
notiStruc = construct_notifications(db, sqlQuery, "Plugins")
|
||||
|
||||
# collect "plugins" for the webhook json
|
||||
json_plugins = notiStruc.json["data"]
|
||||
|
||||
mail_text = mail_text.replace ('<PLUGINS_TABLE>', notiStruc.text + '\n')
|
||||
mail_html = mail_html.replace ('<PLUGINS_TABLE>', notiStruc.html)
|
||||
|
||||
# check if we need to report something
|
||||
plugins_report = len(json_plugins) > 0
|
||||
mylog('verbose', ['[Notification] Plugins sections done.'])
|
||||
|
||||
json_final = {
|
||||
"internet": json_internet,
|
||||
"new_devices": json_new_devices,
|
||||
"down_devices": json_down_devices,
|
||||
"events": json_events,
|
||||
"ports": json_ports,
|
||||
"plugins": json_plugins,
|
||||
}
|
||||
|
||||
mail_text = removeDuplicateNewLines(mail_text)
|
||||
|
||||
# Create clickable MAC links
|
||||
mail_html = generate_mac_links (mail_html, deviceUrl)
|
||||
|
||||
# Write output emails for debug
|
||||
write_file (logPath + '/report_output.json', json.dumps(json_final))
|
||||
write_file (logPath + '/report_output.txt', mail_text)
|
||||
write_file (logPath + '/report_output.html', mail_html)
|
||||
|
||||
# Send Mail
|
||||
if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or json_ports != [] or conf.debug_force_notification or plugins_report:
|
||||
|
||||
mylog('none', ['[Notification] Changes detected, sending reports'])
|
||||
|
||||
msg = noti_struc(json_final, mail_text, mail_html)
|
||||
|
||||
mylog('info', ['[Notification] Udateing API files'])
|
||||
send_api()
|
||||
|
||||
if conf.REPORT_MAIL and check_config('email'):
|
||||
updateState(db,"Send: Email")
|
||||
mylog('info', ['[Notification] Sending report by Email'])
|
||||
send_email (msg )
|
||||
else :
|
||||
mylog('verbose', ['[Notification] Skip email'])
|
||||
if conf.REPORT_APPRISE and check_config('apprise'):
|
||||
updateState(db,"Send: Apprise")
|
||||
mylog('info', ['[Notification] Sending report by Apprise'])
|
||||
send_apprise (msg)
|
||||
else :
|
||||
mylog('verbose', ['[Notification] Skip Apprise'])
|
||||
if conf.REPORT_WEBHOOK and check_config('webhook'):
|
||||
updateState(db,"Send: Webhook")
|
||||
mylog('info', ['[Notification] Sending report by Webhook'])
|
||||
send_webhook (msg)
|
||||
else :
|
||||
mylog('verbose', ['[Notification] Skip webhook'])
|
||||
if conf.REPORT_NTFY and check_config('ntfy'):
|
||||
updateState(db,"Send: NTFY")
|
||||
mylog('info', ['[Notification] Sending report by NTFY'])
|
||||
send_ntfy (msg)
|
||||
else :
|
||||
mylog('verbose', ['[Notification] Skip NTFY'])
|
||||
if conf.REPORT_PUSHSAFER and check_config('pushsafer'):
|
||||
updateState(db,"Send: PUSHSAFER")
|
||||
mylog('info', ['[Notification] Sending report by PUSHSAFER'])
|
||||
send_pushsafer (msg)
|
||||
else :
|
||||
mylog('verbose', ['[Notification] Skip PUSHSAFER'])
|
||||
# Update MQTT entities
|
||||
if conf.REPORT_MQTT and check_config('mqtt'):
|
||||
updateState(db,"Send: MQTT")
|
||||
mylog('info', ['[Notification] Establishing MQTT thread'])
|
||||
mqtt_start(db)
|
||||
else :
|
||||
mylog('verbose', ['[Notification] Skip MQTT'])
|
||||
else :
|
||||
mylog('verbose', ['[Notification] No changes to report'])
|
||||
|
||||
# Clean Pending Alert Events
|
||||
sql.execute ("""UPDATE Devices SET dev_LastNotification = ?
|
||||
WHERE dev_MAC IN (SELECT eve_MAC FROM Events
|
||||
WHERE eve_PendingAlertEmail = 1)
|
||||
""", (datetime.datetime.now(),) )
|
||||
sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0
|
||||
WHERE eve_PendingAlertEmail = 1""")
|
||||
|
||||
# clear plugin events
|
||||
sql.execute ("DELETE FROM Plugins_Events")
|
||||
|
||||
conf.changedPorts_json_struc = None
|
||||
|
||||
# DEBUG - print number of rows updated
|
||||
mylog('info', ['[Notification] Notifications changes: ', sql.rowcount])
|
||||
|
||||
# Commit changes
|
||||
db.commitDB()
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def check_config(service):
|
||||
|
||||
if service == 'email':
|
||||
return email_check_config()
|
||||
|
||||
# if conf.SMTP_SERVER == '' or conf.REPORT_FROM == '' or conf.REPORT_TO == '':
|
||||
# mylog('none', ['[Check Config] Error: Email service not set up correctly. Check your pialert.conf SMTP_*, REPORT_FROM and REPORT_TO variables.'])
|
||||
# return False
|
||||
# else:
|
||||
# return True
|
||||
|
||||
if service == 'apprise':
|
||||
return apprise_check_config()
|
||||
|
||||
# if conf.APPRISE_URL == '' or conf.APPRISE_HOST == '':
|
||||
# mylog('none', ['[Check Config] Error: Apprise service not set up correctly. Check your pialert.conf APPRISE_* variables.'])
|
||||
# return False
|
||||
# else:
|
||||
# return True
|
||||
|
||||
if service == 'webhook':
|
||||
return webhook_check_config()
|
||||
|
||||
# if conf.WEBHOOK_URL == '':
|
||||
# mylog('none', ['[Check Config] Error: Webhook service not set up correctly. Check your pialert.conf WEBHOOK_* variables.'])
|
||||
# return False
|
||||
# else:
|
||||
# return True
|
||||
|
||||
if service == 'ntfy':
|
||||
return ntfy_check_config ()
|
||||
#
|
||||
# if conf.NTFY_HOST == '' or conf.NTFY_TOPIC == '':
|
||||
# mylog('none', ['[Check Config] Error: NTFY service not set up correctly. Check your pialert.conf NTFY_* variables.'])
|
||||
# return False
|
||||
# else:
|
||||
# return True
|
||||
|
||||
if service == 'pushsafer':
|
||||
return pushsafer_check_config()
|
||||
|
||||
if service == 'mqtt':
|
||||
return mqtt_check_config()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def format_table (html, thValue, props, newThValue = ''):
|
||||
|
||||
if newThValue == '':
|
||||
newThValue = thValue
|
||||
|
||||
return html.replace("<th>"+thValue+"</th>", "<th "+props+" >"+newThValue+"</th>" )
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def format_report_section (pActive, pSection, pTable, pText, pHTML):
|
||||
|
||||
|
||||
# Replace section text
|
||||
if pActive :
|
||||
conf.mail_text = conf.mail_text.replace ('<'+ pTable +'>', pText)
|
||||
conf.mail_html = conf.mail_html.replace ('<'+ pTable +'>', pHTML)
|
||||
|
||||
conf.mail_text = remove_tag (conf.mail_text, pSection)
|
||||
conf.mail_html = remove_tag (conf.mail_html, pSection)
|
||||
else:
|
||||
conf.mail_text = remove_section (conf.mail_text, pSection)
|
||||
conf.mail_html = remove_section (conf.mail_html, pSection)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def remove_section (pText, pSection):
|
||||
# Search section into the text
|
||||
if pText.find ('<'+ pSection +'>') >=0 \
|
||||
and pText.find ('</'+ pSection +'>') >=0 :
|
||||
# return text without the section
|
||||
return pText[:pText.find ('<'+ pSection+'>')] + \
|
||||
pText[pText.find ('</'+ pSection +'>') + len (pSection) +3:]
|
||||
else :
|
||||
# return all text
|
||||
return pText
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def remove_tag (pText, pTag):
|
||||
# return text without the tag
|
||||
return pText.replace ('<'+ pTag +'>','').replace ('</'+ pTag +'>','')
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Reporting
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def send_api():
|
||||
mylog('verbose', ['[Send API] Updating notification_* files in ', apiPath])
|
||||
|
||||
write_file(apiPath + 'notification_text.txt' , mail_text)
|
||||
write_file(apiPath + 'notification_text.html' , mail_html)
|
||||
write_file(apiPath + 'notification_json_final.json' , json.dumps(json_final))
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def skip_repeated_notifications (db):
|
||||
|
||||
# Skip repeated notifications
|
||||
# due strfime : Overflow --> use "strftime / 60"
|
||||
mylog('verbose','[Skip Repeated Notifications] Skip Repeated start')
|
||||
db.sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0
|
||||
WHERE eve_PendingAlertEmail = 1 AND eve_MAC IN
|
||||
(
|
||||
SELECT dev_MAC FROM Devices
|
||||
WHERE dev_LastNotification IS NOT NULL
|
||||
AND dev_LastNotification <>""
|
||||
AND (strftime("%s", dev_LastNotification)/60 +
|
||||
dev_SkipRepeated * 60) >
|
||||
(strftime('%s','now','localtime')/60 )
|
||||
)
|
||||
""" )
|
||||
mylog('verbose','[Skip Repeated Notifications] Skip Repeated end')
|
||||
|
||||
db.commitDB()
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# UTIL
|
||||
#===============================================================================
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def check_and_run_event(db):
|
||||
sql = db.sql # TO-DO
|
||||
sql.execute(""" select * from Parameters where par_ID = "Front_Event" """)
|
||||
rows = sql.fetchall()
|
||||
|
||||
event, param = ['','']
|
||||
if len(rows) > 0 and rows[0]['par_Value'] != 'finished':
|
||||
event = rows[0]['par_Value'].split('|')[0]
|
||||
param = rows[0]['par_Value'].split('|')[1]
|
||||
else:
|
||||
return
|
||||
|
||||
if event == 'test':
|
||||
handle_test(param)
|
||||
if event == 'run':
|
||||
handle_run(param)
|
||||
|
||||
# clear event execution flag
|
||||
sql.execute ("UPDATE Parameters SET par_Value='finished' WHERE par_ID='Front_Event'")
|
||||
|
||||
# commit to DB
|
||||
db.commitDB()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def handle_run(runType):
|
||||
global last_network_scan
|
||||
|
||||
mylog('info', ['[', timeNow(), '] START Run: ', runType])
|
||||
|
||||
if runType == 'ENABLE_ARPSCAN':
|
||||
last_network_scan = conf.time_started - datetime.timedelta(hours = 24)
|
||||
|
||||
mylog('info', ['[', timeNow(), '] END Run: ', runType])
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def handle_test(testType):
|
||||
|
||||
mylog('info', ['[', timeNow(), '] START Test: ', testType])
|
||||
|
||||
# Open text sample
|
||||
sample_txt = get_file_content(pialertPath + '/back/report_sample.txt')
|
||||
|
||||
# Open html sample
|
||||
sample_html = get_file_content(pialertPath + '/back/report_sample.html')
|
||||
|
||||
# Open json sample and get only the payload part
|
||||
sample_json_payload = json.loads(get_file_content(pialertPath + '/back/webhook_json_sample.json'))[0]["body"]["attachments"][0]["text"]
|
||||
|
||||
sample_msg = noti_struc(sample_json_payload, sample_txt, sample_html )
|
||||
|
||||
if testType == 'REPORT_MAIL':
|
||||
send_email(sample_msg)
|
||||
if testType == 'REPORT_WEBHOOK':
|
||||
send_webhook (sample_msg)
|
||||
if testType == 'REPORT_APPRISE':
|
||||
send_apprise (sample_msg)
|
||||
if testType == 'REPORT_NTFY':
|
||||
send_ntfy (sample_msg)
|
||||
if testType == 'REPORT_PUSHSAFER':
|
||||
send_pushsafer (sample_msg)
|
||||
|
||||
mylog('info', ['[Test Publishers] END Test: ', testType])
|
||||
57
pialert/scanners/arpscan.py
Executable file
57
pialert/scanners/arpscan.py
Executable file
@@ -0,0 +1,57 @@
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from logger import mylog
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def execute_arpscan (userSubnets):
|
||||
|
||||
# output of possible multiple interfaces
|
||||
arpscan_output = ""
|
||||
|
||||
# scan each interface
|
||||
for interface in userSubnets :
|
||||
arpscan_output += execute_arpscan_on_interface (interface)
|
||||
|
||||
# Search IP + MAC + Vendor as regular expresion
|
||||
re_ip = r'(?P<ip>((2[0-5]|1[0-9]|[0-9])?[0-9]\.){3}((2[0-5]|1[0-9]|[0-9])?[0-9]))'
|
||||
re_mac = r'(?P<mac>([0-9a-fA-F]{2}[:-]){5}([0-9a-fA-F]{2}))'
|
||||
re_hw = r'(?P<hw>.*)'
|
||||
re_pattern = re.compile (re_ip + '\s+' + re_mac + '\s' + re_hw)
|
||||
|
||||
# Create Userdict of devices
|
||||
devices_list = [device.groupdict()
|
||||
for device in re.finditer (re_pattern, arpscan_output)]
|
||||
|
||||
# Delete duplicate MAC
|
||||
unique_mac = []
|
||||
unique_devices = []
|
||||
|
||||
for device in devices_list :
|
||||
if device['mac'] not in unique_mac:
|
||||
unique_mac.append(device['mac'])
|
||||
unique_devices.append(device)
|
||||
|
||||
# return list
|
||||
mylog('debug', ['[ARP Scan] Completed found ', len(unique_devices) ,' devices ' ])
|
||||
return unique_devices
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def execute_arpscan_on_interface (interface):
|
||||
# Prepare command arguments
|
||||
subnets = interface.strip().split()
|
||||
# Retry is 6 to avoid false offline devices
|
||||
mylog('debug', ['[ARP Scan] - arpscan command: sudo arp-scan --ignoredups --retry=6 ', str(subnets)])
|
||||
arpscan_args = ['sudo', 'arp-scan', '--ignoredups', '--retry=6'] + subnets
|
||||
|
||||
# Execute command
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
result = subprocess.check_output (arpscan_args, universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', ['[ARP Scan]', e.output])
|
||||
result = ""
|
||||
|
||||
mylog('debug', ['[ARP Scan] on Interface Completed with results: ', result])
|
||||
return result
|
||||
195
pialert/scanners/internet.py
Executable file
195
pialert/scanners/internet.py
Executable file
@@ -0,0 +1,195 @@
|
||||
""" internet related functions to support Pi.Alert """
|
||||
|
||||
import subprocess
|
||||
import re
|
||||
|
||||
# pialert modules
|
||||
|
||||
import conf
|
||||
from helper import timeNow, updateState
|
||||
from logger import append_line_to_file, mylog
|
||||
from const import logPath
|
||||
|
||||
|
||||
|
||||
# need to find a better way to deal with settings !
|
||||
#global DDNS_ACTIVE, DDNS_DOMAIN, DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# INTERNET IP CHANGE
|
||||
#===============================================================================
|
||||
def check_internet_IP ( db ):
|
||||
|
||||
# Header
|
||||
updateState(db,"Scan: Internet IP")
|
||||
mylog('verbose', ['[Internet IP] Check Internet IP started'])
|
||||
|
||||
# Get Internet IP
|
||||
mylog('verbose', ['[Internet IP] - Retrieving Internet IP'])
|
||||
internet_IP = get_internet_IP(conf.DIG_GET_IP_ARG)
|
||||
# TESTING - Force IP
|
||||
# internet_IP = "1.2.3.4"
|
||||
|
||||
# Check result = IP
|
||||
if internet_IP == "" :
|
||||
mylog('none', ['[Internet IP] Error retrieving Internet IP'])
|
||||
mylog('none', ['[Internet IP] Exiting...'])
|
||||
return False
|
||||
mylog('verbose', ['[Internet IP] IP: ', internet_IP])
|
||||
|
||||
# Get previous stored IP
|
||||
mylog('verbose', ['[Internet IP] Retrieving previous IP:'])
|
||||
previous_IP = get_previous_internet_IP (db)
|
||||
mylog('verbose', ['[Internet IP] ', previous_IP])
|
||||
|
||||
# Check IP Change
|
||||
if internet_IP != previous_IP :
|
||||
mylog('info', ['[Internet IP] New internet IP: ', internet_IP])
|
||||
save_new_internet_IP (db, internet_IP)
|
||||
|
||||
else :
|
||||
mylog('verbose', ['[Internet IP] No changes to perform'])
|
||||
|
||||
# Get Dynamic DNS IP
|
||||
if conf.DDNS_ACTIVE :
|
||||
mylog('verbose', ['[DDNS] Retrieving Dynamic DNS IP'])
|
||||
dns_IP = get_dynamic_DNS_IP()
|
||||
|
||||
# Check Dynamic DNS IP
|
||||
if dns_IP == "" or dns_IP == "0.0.0.0" :
|
||||
mylog('none', ['[DDNS] Error retrieving Dynamic DNS IP'])
|
||||
mylog('none', ['[DDNS] ', dns_IP])
|
||||
|
||||
# Check DNS Change
|
||||
if dns_IP != internet_IP :
|
||||
mylog('none', ['[DDNS] Updating Dynamic DNS IP'])
|
||||
message = set_dynamic_DNS_IP ()
|
||||
mylog('none', ['[DDNS] ', message])
|
||||
else :
|
||||
mylog('verbose', ['[DDNS] No changes to perform'])
|
||||
else :
|
||||
mylog('verbose', ['[DDNS] Skipping Dynamic DNS update'])
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_internet_IP (DIG_GET_IP_ARG):
|
||||
# BUGFIX #46 - curl http://ipv4.icanhazip.com repeatedly is very slow
|
||||
# Using 'dig'
|
||||
dig_args = ['dig', '+short'] + DIG_GET_IP_ARG.strip().split()
|
||||
try:
|
||||
cmd_output = subprocess.check_output (dig_args, universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
mylog('none', [e.output])
|
||||
cmd_output = '' # no internet
|
||||
|
||||
# Check result is an IP
|
||||
IP = check_IP_format (cmd_output)
|
||||
|
||||
# Handle invalid response
|
||||
if IP == '':
|
||||
IP = '0.0.0.0'
|
||||
|
||||
return IP
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_previous_internet_IP (db):
|
||||
|
||||
previous_IP = '0.0.0.0'
|
||||
|
||||
# get previous internet IP stored in DB
|
||||
db.sql.execute ("SELECT dev_LastIP FROM Devices WHERE dev_MAC = 'Internet' ")
|
||||
result = db.sql.fetchone()
|
||||
|
||||
db.commitDB()
|
||||
|
||||
if result is not None and len(result) > 0 :
|
||||
previous_IP = result[0]
|
||||
|
||||
# return previous IP
|
||||
return previous_IP
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def save_new_internet_IP (db, pNewIP):
|
||||
# Log new IP into logfile
|
||||
append_line_to_file (logPath + '/IP_changes.log',
|
||||
'['+str(timeNow()) +']\t'+ pNewIP +'\n')
|
||||
|
||||
prevIp = get_previous_internet_IP(db)
|
||||
# Save event
|
||||
db.sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
VALUES ('Internet', ?, ?, 'Internet IP Changed',
|
||||
'Previous Internet IP: '|| ?, 1) """,
|
||||
(pNewIP, timeNow(), prevIp) )
|
||||
|
||||
# Save new IP
|
||||
db.sql.execute ("""UPDATE Devices SET dev_LastIP = ?
|
||||
WHERE dev_MAC = 'Internet' """,
|
||||
(pNewIP,) )
|
||||
|
||||
# commit changes
|
||||
db.commitDB()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def check_IP_format (pIP):
|
||||
# Check IP format
|
||||
IPv4SEG = r'(?:25[0-5]|(?:2[0-4]|1{0,1}[0-9]){0,1}[0-9])'
|
||||
IPv4ADDR = r'(?:(?:' + IPv4SEG + r'\.){3,3}' + IPv4SEG + r')'
|
||||
IP = re.search(IPv4ADDR, pIP)
|
||||
|
||||
# Return error if not IP
|
||||
if IP is None :
|
||||
return ""
|
||||
|
||||
# Return IP
|
||||
return IP.group(0)
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_dynamic_DNS_IP ():
|
||||
# Using OpenDNS server
|
||||
# dig_args = ['dig', '+short', DDNS_DOMAIN, '@resolver1.opendns.com']
|
||||
|
||||
# Using default DNS server
|
||||
dig_args = ['dig', '+short', conf.DDNS_DOMAIN]
|
||||
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
dig_output = subprocess.check_output (dig_args, universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', ['[DDNS] ERROR - ', e.output])
|
||||
dig_output = '' # probably no internet
|
||||
|
||||
# Check result is an IP
|
||||
IP = check_IP_format (dig_output)
|
||||
|
||||
# Handle invalid response
|
||||
if IP == '':
|
||||
IP = '0.0.0.0'
|
||||
|
||||
return IP
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def set_dynamic_DNS_IP ():
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
# Update Dynamic IP
|
||||
curl_output = subprocess.check_output (['curl', '-s',
|
||||
conf.DDNS_UPDATE_URL +
|
||||
'username=' + conf.DDNS_USER +
|
||||
'&password=' + conf.DDNS_PASSWORD +
|
||||
'&hostname=' + conf.DDNS_DOMAIN],
|
||||
universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', ['[DDNS] ERROR - ',e.output])
|
||||
curl_output = ""
|
||||
|
||||
return curl_output
|
||||
211
pialert/scanners/nmapscan.py
Executable file
211
pialert/scanners/nmapscan.py
Executable file
@@ -0,0 +1,211 @@
|
||||
|
||||
import subprocess
|
||||
|
||||
import conf
|
||||
from const import logPath, sql_nmap_scan_all
|
||||
from helper import json_struc, timeNow, updateState
|
||||
from logger import append_line_to_file, mylog
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
|
||||
|
||||
class nmap_entry:
|
||||
def __init__(self, mac, time, port, state, service, name = '', extra = '', index = 0):
|
||||
self.mac = mac
|
||||
self.time = time
|
||||
self.port = port
|
||||
self.state = state
|
||||
self.service = service
|
||||
self.name = name
|
||||
self.extra = extra
|
||||
self.index = index
|
||||
self.hash = str(mac) + str(port)+ str(state)+ str(service)
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def performNmapScan(db, devicesToScan):
|
||||
"""
|
||||
run nmap scan on a list of devices
|
||||
discovers open ports and keeps track existing and new open ports
|
||||
"""
|
||||
if len(devicesToScan) > 0:
|
||||
|
||||
timeoutSec = conf.NMAP_TIMEOUT
|
||||
|
||||
devTotal = len(devicesToScan)
|
||||
|
||||
updateState(db,"Scan: Nmap")
|
||||
|
||||
mylog('verbose', ['[NMAP Scan] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device'])
|
||||
mylog('verbose', ["[NMAP Scan] Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ])
|
||||
|
||||
devIndex = 0
|
||||
for device in devicesToScan:
|
||||
# Execute command
|
||||
output = ""
|
||||
# prepare arguments from user supplied ones
|
||||
nmapArgs = ['nmap'] + conf.NMAP_ARGS.split() + [device["dev_LastIP"]]
|
||||
|
||||
progress = ' (' + str(devIndex+1) + '/' + str(devTotal) + ')'
|
||||
|
||||
try:
|
||||
# try runnning a subprocess with a forced (timeout + 30 seconds) in case the subprocess hangs
|
||||
output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30))
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', ["[NMAP Scan] " ,e.output])
|
||||
mylog('none', ["[NMAP Scan] Error - Nmap Scan - check logs", progress])
|
||||
except subprocess.TimeoutExpired as timeErr:
|
||||
mylog('verbose', ['[NMAP Scan] Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', device["dev_LastIP"], progress])
|
||||
|
||||
if output == "": # check if the subprocess failed
|
||||
mylog('info', ['[NMAP Scan] Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details'])
|
||||
else:
|
||||
mylog('verbose', ['[NMAP Scan] Nmap SUCCESS for ', device["dev_LastIP"], progress])
|
||||
|
||||
devIndex += 1
|
||||
|
||||
# check the last run output
|
||||
newLines = output.split('\n')
|
||||
|
||||
# regular logging
|
||||
for line in newLines:
|
||||
append_line_to_file (logPath + '/pialert_nmap.log', line +'\n')
|
||||
|
||||
# collect ports / new Nmap Entries
|
||||
newEntriesTmp = []
|
||||
|
||||
index = 0
|
||||
startCollecting = False
|
||||
duration = ""
|
||||
for line in newLines:
|
||||
if 'Starting Nmap' in line:
|
||||
if len(newLines) > index+1 and 'Note: Host seems down' in newLines[index+1]:
|
||||
break # this entry is empty
|
||||
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
|
||||
startCollecting = True
|
||||
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
|
||||
startCollecting = False # end reached
|
||||
elif startCollecting and len(line.split()) == 3:
|
||||
newEntriesTmp.append(nmap_entry(device["dev_MAC"], timeNow(), line.split()[0], line.split()[1], line.split()[2], device["dev_Name"]))
|
||||
elif 'Nmap done' in line:
|
||||
duration = line.split('scanned in ')[1]
|
||||
index += 1
|
||||
mylog('verbose', ['[NMAP Scan] Ports found by NMAP: ', len(newEntriesTmp)])
|
||||
process_discovered_ports(db, device, newEntriesTmp)
|
||||
#end for loop
|
||||
|
||||
|
||||
|
||||
def process_discovered_ports(db, device, discoveredPorts):
|
||||
"""
|
||||
process ports discovered by nmap
|
||||
compare to previosu ports
|
||||
update DB
|
||||
raise notifications
|
||||
"""
|
||||
sql = db.sql # TO-DO
|
||||
# previous Nmap Entries
|
||||
oldEntries = []
|
||||
changedPortsTmp = []
|
||||
|
||||
mylog('verbose', ['[NMAP Scan] Process ports found by NMAP: ', len(discoveredPorts)])
|
||||
|
||||
if len(discoveredPorts) > 0:
|
||||
|
||||
# get all current NMAP ports from the DB
|
||||
rows = db.read(sql_nmap_scan_all)
|
||||
|
||||
for row in rows:
|
||||
# only collect entries matching the current MAC address
|
||||
if row["MAC"] == device["dev_MAC"]:
|
||||
oldEntries.append(nmap_entry(row["MAC"], row["Time"], row["Port"], row["State"], row["Service"], device["dev_Name"], row["Extra"], row["Index"]))
|
||||
|
||||
newEntries = []
|
||||
|
||||
# Collect all entries that don't match the ones in the DB
|
||||
for discoveredPort in discoveredPorts:
|
||||
|
||||
found = False
|
||||
|
||||
# Check the new entry is already available in oldEntries and remove from processing if yes
|
||||
for oldEntry in oldEntries:
|
||||
if discoveredPort.hash == oldEntry.hash:
|
||||
found = True
|
||||
|
||||
if not found:
|
||||
newEntries.append(discoveredPort)
|
||||
|
||||
|
||||
mylog('verbose', ['[NMAP Scan] Nmap newly discovered or changed ports: ', len(newEntries)])
|
||||
|
||||
# collect new ports, find the corresponding old entry and return for notification purposes
|
||||
# also update the DB with the new values after deleting the old ones
|
||||
if len(newEntries) > 0:
|
||||
|
||||
# params to build the SQL query
|
||||
params = []
|
||||
indexesToDelete = ""
|
||||
|
||||
# Find old entry matching the new entry hash
|
||||
for newEntry in newEntries:
|
||||
|
||||
foundEntry = None
|
||||
|
||||
for oldEntry in oldEntries:
|
||||
if oldEntry.hash == newEntry.hash:
|
||||
indexesToDelete = indexesToDelete + str(oldEntry.index) + ','
|
||||
foundEntry = oldEntry
|
||||
|
||||
columnNames = ["Name", "MAC", "Port", "State", "Service", "Extra", "NewOrOld" ]
|
||||
|
||||
# Old entry found
|
||||
if foundEntry is not None:
|
||||
# Build params for sql query
|
||||
params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, oldEntry.extra))
|
||||
# Build JSON for API and notifications
|
||||
changedPortsTmp.append({
|
||||
"Name" : foundEntry.name,
|
||||
"MAC" : newEntry.mac,
|
||||
"Port" : newEntry.port,
|
||||
"State" : newEntry.state,
|
||||
"Service" : newEntry.service,
|
||||
"Extra" : foundEntry.extra,
|
||||
"NewOrOld" : "New values"
|
||||
})
|
||||
changedPortsTmp.append({
|
||||
"Name" : foundEntry.name,
|
||||
"MAC" : foundEntry.mac,
|
||||
"Port" : foundEntry.port,
|
||||
"State" : foundEntry.state,
|
||||
"Service" : foundEntry.service,
|
||||
"Extra" : foundEntry.extra,
|
||||
"NewOrOld" : "Old values"
|
||||
})
|
||||
# New entry - no matching Old entry found
|
||||
else:
|
||||
# Build params for sql query
|
||||
params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, ''))
|
||||
# Build JSON for API and notifications
|
||||
changedPortsTmp.append({
|
||||
"Name" : "New device",
|
||||
"MAC" : newEntry.mac,
|
||||
"Port" : newEntry.port,
|
||||
"State" : newEntry.state,
|
||||
"Service" : newEntry.service,
|
||||
"Extra" : "",
|
||||
"NewOrOld" : "New device"
|
||||
})
|
||||
|
||||
conf.changedPorts_json_struc = json_struc({ "data" : changedPortsTmp}, columnNames)
|
||||
|
||||
# Delete old entries if available
|
||||
if len(indexesToDelete) > 0:
|
||||
sql.execute ("DELETE FROM Nmap_Scan where \"Index\" in (" + indexesToDelete[:-1] +")")
|
||||
db.commitDB()
|
||||
|
||||
# Insert new values into the DB
|
||||
sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params)
|
||||
db.commitDB()
|
||||
|
||||
|
||||
200
pialert/scanners/pholusscan.py
Executable file
200
pialert/scanners/pholusscan.py
Executable file
@@ -0,0 +1,200 @@
|
||||
import subprocess
|
||||
import re
|
||||
|
||||
from const import fullPholusPath, logPath
|
||||
from helper import checkIPV4, timeNow, updateState
|
||||
from logger import mylog
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def performPholusScan (db, timeoutSec, userSubnets):
|
||||
sql = db.sql # TO-DO
|
||||
# scan every interface
|
||||
for subnet in userSubnets:
|
||||
|
||||
temp = subnet.split("--interface=")
|
||||
|
||||
if len(temp) != 2:
|
||||
mylog('none', ["[PholusScan] Skip scan (need subnet in format '192.168.1.0/24 --inteface=eth0'), got: ", subnet])
|
||||
return
|
||||
|
||||
mask = temp[0].strip()
|
||||
interface = temp[1].strip()
|
||||
|
||||
# logging & updating app state
|
||||
updateState(db,"Scan: Pholus")
|
||||
mylog('none', ['[PholusScan] Scan: Pholus for ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min)'])
|
||||
mylog('verbose', ["[PholusScan] Pholus scan on [interface] ", interface, " [mask] " , mask])
|
||||
|
||||
# the scan always lasts 2x as long, so the desired user time from settings needs to be halved
|
||||
adjustedTimeout = str(round(int(timeoutSec) / 2, 0))
|
||||
|
||||
# python3 -m trace --trace /home/pi/pialert/pholus/pholus3.py eth1 -rdns_scanning 192.168.1.0/24 -stimeout 600
|
||||
pholus_args = ['python3', fullPholusPath, interface, "-rdns_scanning", mask, "-stimeout", adjustedTimeout]
|
||||
|
||||
# Execute command
|
||||
output = ""
|
||||
|
||||
try:
|
||||
# try runnning a subprocess with a forced (timeout + 30 seconds) in case the subprocess hangs
|
||||
output = subprocess.check_output (pholus_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30))
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', ['[PholusScan]', e.output])
|
||||
mylog('none', ["[PholusScan] Error - Pholus Scan - check logs"])
|
||||
except subprocess.TimeoutExpired as timeErr:
|
||||
mylog('none', ['[PholusScan] Pholus TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||
|
||||
if output == "": # check if the subprocess failed
|
||||
mylog('none', ['[PholusScan] Scan: Pholus FAIL - check logs'])
|
||||
else:
|
||||
mylog('verbose', ['[PholusScan] Scan: Pholus SUCCESS'])
|
||||
|
||||
# check the last run output
|
||||
f = open(logPath + '/pialert_pholus_lastrun.log', 'r+')
|
||||
newLines = f.read().split('\n')
|
||||
f.close()
|
||||
|
||||
# cleanup - select only lines containing a separator to filter out unnecessary data
|
||||
newLines = list(filter(lambda x: '|' in x, newLines))
|
||||
|
||||
# build SQL query parameters to insert into the DB
|
||||
params = []
|
||||
|
||||
for line in newLines:
|
||||
columns = line.split("|")
|
||||
if len(columns) == 4:
|
||||
params.append(( interface + " " + mask, timeNow() , columns[0].replace(" ", ""), columns[1].replace(" ", ""), columns[2].replace(" ", ""), columns[3], ''))
|
||||
|
||||
if len(params) > 0:
|
||||
sql.executemany ("""INSERT INTO Pholus_Scan ("Info", "Time", "MAC", "IP_v4_or_v6", "Record_Type", "Value", "Extra") VALUES (?, ?, ?, ?, ?, ?, ?)""", params)
|
||||
db.commitDB()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def cleanResult(str):
|
||||
# alternative str.split('.')[0]
|
||||
str = str.replace("._airplay", "")
|
||||
str = str.replace("._tcp", "")
|
||||
str = str.replace(".local", "")
|
||||
str = str.replace("._esphomelib", "")
|
||||
str = str.replace("._googlecast", "")
|
||||
str = str.replace(".lan", "")
|
||||
str = str.replace(".home", "")
|
||||
str = re.sub(r'-[a-fA-F0-9]{32}', '', str) # removing last part of e.g. Nest-Audio-ff77ff77ff77ff77ff77ff77ff77ff77
|
||||
# remove trailing dots
|
||||
if str.endswith('.'):
|
||||
str = str[:-1]
|
||||
|
||||
return str
|
||||
|
||||
|
||||
# Disclaimer - I'm interfacing with a script I didn't write (pholus3.py) so it's possible I'm missing types of answers
|
||||
# it's also possible the pholus3.py script can be adjusted to provide a better output to interface with it
|
||||
# Hit me with a PR if you know how! :)
|
||||
def resolve_device_name_pholus (pMAC, pIP, allRes):
|
||||
|
||||
pholusMatchesIndexes = []
|
||||
|
||||
index = 0
|
||||
for result in allRes:
|
||||
# limiting entries used for name resolution to the ones containing the current IP (v4 only)
|
||||
if result["MAC"] == pMAC and result["Record_Type"] == "Answer" and result["IP_v4_or_v6"] == pIP and '._googlezone' not in result["Value"]:
|
||||
# found entries with a matching MAC address, let's collect indexes
|
||||
pholusMatchesIndexes.append(index)
|
||||
|
||||
index += 1
|
||||
|
||||
# return if nothing found
|
||||
if len(pholusMatchesIndexes) == 0:
|
||||
return -1
|
||||
|
||||
# we have some entries let's try to select the most useful one
|
||||
|
||||
# airplay matches contain a lot of information
|
||||
# Matches for example:
|
||||
# Brand Tv (50)._airplay._tcp.local. TXT Class:32769 "acl=0 deviceid=66:66:66:66:66:66 features=0x77777,0x38BCB46 rsf=0x3 fv=p20.T-FFFFFF-03.1 flags=0x204 model=XXXX manufacturer=Brand serialNumber=XXXXXXXXXXX protovers=1.1 srcvers=777.77.77 pi=FF:FF:FF:FF:FF:FF psi=00000000-0000-0000-0000-FFFFFFFFFF gid=00000000-0000-0000-0000-FFFFFFFFFF gcgl=0 pk=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
|
||||
for i in pholusMatchesIndexes:
|
||||
if checkIPV4(allRes[i]['IP_v4_or_v6']) and '._airplay._tcp.local. TXT Class:32769' in str(allRes[i]["Value"]) :
|
||||
return allRes[i]["Value"].split('._airplay._tcp.local. TXT Class:32769')[0]
|
||||
|
||||
# second best - contains airplay
|
||||
# Matches for example:
|
||||
# _airplay._tcp.local. PTR Class:IN "Brand Tv (50)._airplay._tcp.local."
|
||||
for i in pholusMatchesIndexes:
|
||||
if checkIPV4(allRes[i]['IP_v4_or_v6']) and '_airplay._tcp.local. PTR Class:IN' in allRes[i]["Value"] and ('._googlecast') not in allRes[i]["Value"]:
|
||||
return cleanResult(allRes[i]["Value"].split('"')[1])
|
||||
|
||||
# Contains PTR Class:32769
|
||||
# Matches for example:
|
||||
# 3.1.168.192.in-addr.arpa. PTR Class:32769 "MyPc.local."
|
||||
for i in pholusMatchesIndexes:
|
||||
if checkIPV4(allRes[i]['IP_v4_or_v6']) and 'PTR Class:32769' in allRes[i]["Value"]:
|
||||
return cleanResult(allRes[i]["Value"].split('"')[1])
|
||||
|
||||
# Contains AAAA Class:IN
|
||||
# Matches for example:
|
||||
# DESKTOP-SOMEID.local. AAAA Class:IN "fe80::fe80:fe80:fe80:fe80"
|
||||
for i in pholusMatchesIndexes:
|
||||
if checkIPV4(allRes[i]['IP_v4_or_v6']) and 'AAAA Class:IN' in allRes[i]["Value"]:
|
||||
return cleanResult(allRes[i]["Value"].split('.local.')[0])
|
||||
|
||||
# Contains _googlecast._tcp.local. PTR Class:IN
|
||||
# Matches for example:
|
||||
# _googlecast._tcp.local. PTR Class:IN "Nest-Audio-ff77ff77ff77ff77ff77ff77ff77ff77._googlecast._tcp.local."
|
||||
for i in pholusMatchesIndexes:
|
||||
if checkIPV4(allRes[i]['IP_v4_or_v6']) and '_googlecast._tcp.local. PTR Class:IN' in allRes[i]["Value"] and ('Google-Cast-Group') not in allRes[i]["Value"]:
|
||||
return cleanResult(allRes[i]["Value"].split('"')[1])
|
||||
|
||||
# Contains A Class:32769
|
||||
# Matches for example:
|
||||
# Android.local. A Class:32769 "192.168.1.6"
|
||||
for i in pholusMatchesIndexes:
|
||||
if checkIPV4(allRes[i]['IP_v4_or_v6']) and ' A Class:32769' in allRes[i]["Value"]:
|
||||
return cleanResult(allRes[i]["Value"].split(' A Class:32769')[0])
|
||||
|
||||
# # Contains PTR Class:IN
|
||||
# Matches for example:
|
||||
# _esphomelib._tcp.local. PTR Class:IN "ceiling-light-1._esphomelib._tcp.local."
|
||||
for i in pholusMatchesIndexes:
|
||||
if checkIPV4(allRes[i]['IP_v4_or_v6']) and 'PTR Class:IN' in allRes[i]["Value"]:
|
||||
return cleanResult(allRes[i]["Value"].split('"')[1])
|
||||
|
||||
return -1
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def resolve_device_name_dig (pMAC, pIP):
|
||||
|
||||
newName = ""
|
||||
|
||||
try :
|
||||
dig_args = ['dig', '+short', '-x', pIP]
|
||||
|
||||
# Execute command
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
newName = subprocess.check_output (dig_args, universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', ['[device_name_dig] ', e.output])
|
||||
# newName = "Error - check logs"
|
||||
return -1
|
||||
|
||||
# Check returns
|
||||
newName = newName.strip()
|
||||
|
||||
if len(newName) == 0 :
|
||||
return -1
|
||||
|
||||
# Cleanup
|
||||
newName = cleanResult(newName)
|
||||
|
||||
if newName == "" or len(newName) == 0:
|
||||
return -1
|
||||
|
||||
# Return newName
|
||||
return newName
|
||||
|
||||
# not Found
|
||||
except subprocess.CalledProcessError :
|
||||
return -1
|
||||
94
pialert/scanners/pihole.py
Executable file
94
pialert/scanners/pihole.py
Executable file
@@ -0,0 +1,94 @@
|
||||
""" module to import db and leases from PiHole """
|
||||
|
||||
import sqlite3
|
||||
|
||||
import conf
|
||||
from const import piholeDB, piholeDhcpleases
|
||||
from logger import mylog
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def copy_pihole_network (db):
|
||||
"""
|
||||
attach the PiHole Database and copy the PiHole_Network table accross into the PiAlert DB
|
||||
"""
|
||||
|
||||
sql = db.sql # TO-DO
|
||||
# Open Pi-hole DB
|
||||
mylog('debug', '[PiHole Network] - attach PiHole DB')
|
||||
|
||||
try:
|
||||
sql.execute ("ATTACH DATABASE '"+ piholeDB +"' AS PH")
|
||||
except sqlite3.Error as e:
|
||||
mylog('none',[ '[PiHole Network] - SQL ERROR: ', e])
|
||||
|
||||
|
||||
# Copy Pi-hole Network table
|
||||
|
||||
try:
|
||||
sql.execute ("DELETE FROM PiHole_Network")
|
||||
|
||||
# just for reporting
|
||||
new_devices = []
|
||||
sql.execute ( """SELECT hwaddr, macVendor, lastQuery,
|
||||
(SELECT name FROM PH.network_addresses
|
||||
WHERE network_id = id ORDER BY lastseen DESC, ip),
|
||||
(SELECT ip FROM PH.network_addresses
|
||||
WHERE network_id = id ORDER BY lastseen DESC, ip)
|
||||
FROM PH.network
|
||||
WHERE hwaddr NOT LIKE 'ip-%'
|
||||
AND hwaddr <> '00:00:00:00:00:00' """)
|
||||
new_devices = sql.fetchall()
|
||||
|
||||
# insert into PiAlert DB
|
||||
sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery,
|
||||
PH_Name, PH_IP)
|
||||
SELECT hwaddr, macVendor, lastQuery,
|
||||
(SELECT name FROM PH.network_addresses
|
||||
WHERE network_id = id ORDER BY lastseen DESC, ip),
|
||||
(SELECT ip FROM PH.network_addresses
|
||||
WHERE network_id = id ORDER BY lastseen DESC, ip)
|
||||
FROM PH.network
|
||||
WHERE hwaddr NOT LIKE 'ip-%'
|
||||
AND hwaddr <> '00:00:00:00:00:00' """)
|
||||
sql.execute ("""UPDATE PiHole_Network SET PH_Name = '(unknown)'
|
||||
WHERE PH_Name IS NULL OR PH_Name = '' """)
|
||||
# Close Pi-hole DB
|
||||
sql.execute ("DETACH PH")
|
||||
|
||||
except sqlite3.Error as e:
|
||||
mylog('none',[ '[PiHole Network] - SQL ERROR: ', e])
|
||||
|
||||
db.commitDB()
|
||||
|
||||
mylog('debug',[ '[PiHole Network] - completed - found ', len(new_devices), ' devices'])
|
||||
return str(sql.rowcount) != "0"
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def read_DHCP_leases (db):
|
||||
"""
|
||||
read the PiHole DHCP file and insert all records into the DHCP_Leases table.
|
||||
"""
|
||||
mylog('debug', '[PiHole DHCP] - read DHCP_Leases file')
|
||||
# Read DHCP Leases
|
||||
# Bugfix #1 - dhcp.leases: lines with different number of columns (5 col)
|
||||
data = []
|
||||
reporting = False
|
||||
with open(piholeDhcpleases, 'r') as f:
|
||||
for line in f:
|
||||
reporting = True
|
||||
row = line.rstrip().split()
|
||||
if len(row) == 5 :
|
||||
data.append (row)
|
||||
|
||||
# Insert into PiAlert table
|
||||
db.sql.executemany ("""INSERT INTO DHCP_Leases (DHCP_DateTime, DHCP_MAC,
|
||||
DHCP_IP, DHCP_Name, DHCP_MAC2)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
""", data)
|
||||
db.commitDB()
|
||||
|
||||
mylog('debug', ['[PiHole DHCP] - completed - added ',len(data), ' devices.'])
|
||||
return reporting
|
||||
41
pialert/scheduler.py
Executable file
41
pialert/scheduler.py
Executable file
@@ -0,0 +1,41 @@
|
||||
""" class to manage schedules """
|
||||
import datetime
|
||||
|
||||
from logger import mylog, print_log
|
||||
import conf
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
class schedule_class:
|
||||
def __init__(self, service, scheduleObject, last_next_schedule, was_last_schedule_used, last_run = 0):
|
||||
self.service = service
|
||||
self.scheduleObject = scheduleObject
|
||||
self.last_next_schedule = last_next_schedule
|
||||
self.last_run = last_run
|
||||
self.was_last_schedule_used = was_last_schedule_used
|
||||
|
||||
def runScheduleCheck(self):
|
||||
|
||||
result = False
|
||||
|
||||
# Initialize the last run time if never run before
|
||||
if self.last_run == 0:
|
||||
self.last_run = (datetime.datetime.now(conf.tz) - datetime.timedelta(days=365)).replace(microsecond=0)
|
||||
|
||||
# get the current time with the currently specified timezone
|
||||
nowTime = datetime.datetime.now(conf.tz).replace(microsecond=0)
|
||||
|
||||
# Run the schedule if the current time is past the schedule time we saved last time and
|
||||
# (maybe the following check is unnecessary:)
|
||||
# if the last run is past the last time we run a scheduled Pholus scan
|
||||
if nowTime > self.last_next_schedule and self.last_run < self.last_next_schedule:
|
||||
mylog('debug',f'[Scheduler] - Scheduler run for {self.service}: YES')
|
||||
self.was_last_schedule_used = True
|
||||
result = True
|
||||
else:
|
||||
mylog('debug',f'[Scheduler] - Scheduler run for {self.service}: NO')
|
||||
|
||||
if self.was_last_schedule_used:
|
||||
self.was_last_schedule_used = False
|
||||
self.last_next_schedule = self.scheduleObject.next()
|
||||
|
||||
return result
|
||||
1
test/__init__.py
Executable file
1
test/__init__.py
Executable file
@@ -0,0 +1 @@
|
||||
""" tests for Pi.Alert """
|
||||
29
test/test_helper.py
Executable file
29
test/test_helper.py
Executable file
@@ -0,0 +1,29 @@
|
||||
import sys
|
||||
import pathlib
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).parent.parent.resolve()) + "/pialert/")
|
||||
|
||||
|
||||
import datetime
|
||||
|
||||
from helper import timeNow, updateSubnets
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def test_helper():
|
||||
assert timeNow() == datetime.datetime.now().replace(microsecond=0)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------
|
||||
def test_updateSubnets():
|
||||
# test single subnet
|
||||
subnet = "192.168.1.0/24 --interface=eth0"
|
||||
result = updateSubnets(subnet)
|
||||
assert type(result) is list
|
||||
assert len(result) == 1
|
||||
|
||||
# test multip subnets
|
||||
subnet = ["192.168.1.0/24 --interface=eth0", "192.168.2.0/24 --interface=eth1"]
|
||||
result = updateSubnets(subnet)
|
||||
assert type(result) is list
|
||||
assert len(result) == 2
|
||||
Reference in New Issue
Block a user