This commit is contained in:
Jokob-sk
2023-06-10 21:48:11 +10:00
20 changed files with 314 additions and 200 deletions

1
.gitignore vendored
View File

@@ -2,6 +2,7 @@
.DS_Store .DS_Store
config/pialert.conf config/pialert.conf
db/* db/*
db/pialert.db
front/log/* front/log/*
front/plugins/**/*.log front/plugins/**/*.log
**/%40eaDir/ **/%40eaDir/

View File

@@ -51,13 +51,14 @@ The system continuously scans the network for, **New devices**, **New connection
- Theme Selection (blue, red, green, yellow, black, purple) and Light/Dark-Mode Switch - Theme Selection (blue, red, green, yellow, black, purple) and Light/Dark-Mode Switch
- DB maintenance, Backup, Restore tools and CSV Export / Import - DB maintenance, Backup, Restore tools and CSV Export / Import
- Simple login Support - Simple login Support
- 🌟(Experimental) [Plugin system](https://github.com/jokob-sk/Pi.Alert/tree/main/front/plugins) - 🌟[Plugin system](https://github.com/jokob-sk/Pi.Alert/tree/main/front/plugins)
- Create custom plugins with automatically generated settings and UI. - Create custom plugins with automatically generated settings and UI.
- Monitor anything for changes - Monitor anything for changes
- Check the [instructions](https://github.com/jokob-sk/Pi.Alert/tree/main/front/plugins) carefully if you are up for a challenge! Current plugins include: - Check the [instructions](https://github.com/jokob-sk/Pi.Alert/tree/main/front/plugins) carefully if you are up for a challenge! Current plugins include:
- Detecting Rogue DHCP servers via NMAP - Detecting Rogue DHCP servers via NMAP
- Monitoring HTTP status changes of domains/URLs - Monitoring HTTP status changes of domains/URLs
- Import devices from DHCP.leases files, a UniFi controller, or an SNMP enabled router - Import devices from DHCP.leases files, a UniFi controller, or an SNMP enabled router
- Creation of dummy devices to visualize your [network map](https://github.com/jokob-sk/Pi.Alert/blob/main/docs/NETWORK_TREE.md)
| ![Screen 1][screen1] | ![Screen 2][screen2] | ![Screen 5][screen5] | | ![Screen 1][screen1] | ![Screen 2][screen2] | ![Screen 5][screen5] |
|----------------------|----------------------| ----------------------| |----------------------|----------------------| ----------------------|
@@ -87,6 +88,7 @@ The system continuously scans the network for, **New devices**, **New connection
- [Macleykun](https://github.com/Macleykun): Help with Dockerfile clean-up - [Macleykun](https://github.com/Macleykun): Help with Dockerfile clean-up
- [Final-Hawk](https://github.com/Final-Hawk): Help with NTFY, styling and other fixes - [Final-Hawk](https://github.com/Final-Hawk): Help with NTFY, styling and other fixes
- [TeroRERO](https://github.com/terorero): Spanish translation - [TeroRERO](https://github.com/terorero): Spanish translation
- [Data-Monkey] (https://github.com/Data-Monkey): Split-up of the python.py file and more
- Please see the [Git contributors](https://github.com/jokob-sk/Pi.Alert/graphs/contributors) for a full list of people and their contributions to the project - Please see the [Git contributors](https://github.com/jokob-sk/Pi.Alert/graphs/contributors) for a full list of people and their contributions to the project
## ☕ Support me ## ☕ Support me

Binary file not shown.

View File

@@ -7,18 +7,21 @@ services:
network_mode: "host" network_mode: "host"
restart: unless-stopped restart: unless-stopped
volumes: volumes:
- ${APP_DATA_LOCATION}/pialert2/config:/home/pi/pialert/config # - ${APP_DATA_LOCATION}/pialert_dev/config:/home/pi/pialert/config
- ${APP_DATA_LOCATION}/pialert/config:/home/pi/pialert/config
# - ${APP_DATA_LOCATION}/pialert/db/pialert.db:/home/pi/pialert/db/pialert.db # - ${APP_DATA_LOCATION}/pialert/db/pialert.db:/home/pi/pialert/db/pialert.db
- ${APP_DATA_LOCATION}/pialert2/db:/home/pi/pialert/db # - ${APP_DATA_LOCATION}/pialert_dev/db:/home/pi/pialert/db
- ${APP_DATA_LOCATION}/pialert/db:/home/pi/pialert/db
# (optional) useful for debugging if you have issues setting up the container # (optional) useful for debugging if you have issues setting up the container
- ${LOGS_LOCATION}:/home/pi/pialert/front/log - ${LOGS_LOCATION}:/home/pi/pialert/front/log
# DELETE START anyone trying to use this file: comment out / delete BELOW lines, they are only for development purposes # DELETE START anyone trying to use this file: comment out / delete BELOW lines, they are only for development purposes
- ${APP_DATA_LOCATION}/pialert/dhcp_samples/dhcp1.leases:/mnt/dhcp1.leases - ${APP_DATA_LOCATION}/pialert/dhcp_samples/dhcp1.leases:/mnt/dhcp1.leases
- ${APP_DATA_LOCATION}/pialert/dhcp_samples/dhcp2.leases:/mnt/dhcp2.leases - ${APP_DATA_LOCATION}/pialert/dhcp_samples/dhcp2.leases:/mnt/dhcp2.leases
- ${DEV_LOCATION}/back/pialert.py:/home/pi/pialert/back/pialert.py # - ${DEV_LOCATION}/back/pialert.py:/home/pi/pialert/back/pialert.py
- ${DEV_LOCATION}/back/report_template.html:/home/pi/pialert/back/report_template.html - ${DEV_LOCATION}/pialert:/home/pi/pialert/pialert
- ${DEV_LOCATION}/back/report_template_new_version.html:/home/pi/pialert/back/report_template_new_version.html # - ${DEV_LOCATION}/back/report_template.html:/home/pi/pialert/back/report_template.html
- ${DEV_LOCATION}/back/report_template.txt:/home/pi/pialert/back/report_template.txt # - ${DEV_LOCATION}/back/report_template_new_version.html:/home/pi/pialert/back/report_template_new_version.html
# - ${DEV_LOCATION}/back/report_template.txt:/home/pi/pialert/back/report_template.txt
- ${DEV_LOCATION}/pholus:/home/pi/pialert/pholus - ${DEV_LOCATION}/pholus:/home/pi/pialert/pholus
- ${DEV_LOCATION}/dockerfiles:/home/pi/pialert/dockerfiles - ${DEV_LOCATION}/dockerfiles:/home/pi/pialert/dockerfiles
- ${APP_DATA_LOCATION}/pialert/php.ini:/etc/php/7.4/fpm/php.ini - ${APP_DATA_LOCATION}/pialert/php.ini:/etc/php/7.4/fpm/php.ini

View File

@@ -77,6 +77,16 @@ If you submit a PR please:
4. New features code should ideally be re-usable for different purposes, not be for a very narrow use-case. 4. New features code should ideally be re-usable for different purposes, not be for a very narrow use-case.
5. New functionality should ideally be implemented via the Plugins system, if possible. 5. New functionality should ideally be implemented via the Plugins system, if possible.
Suggested test cases:
- Blank setup with no DB or config
- Existing DB / config
- Sending a notification (e. g. Delete a device and wait for a scan to run) and testing all notification gateways, especially:
- Email, Apprise (e.g. via Telegram), webhook (e.g. via Discord), MQTT (e.g. via HomeAssitant)
- Saving settings
- Test a couple of plugins
- Check the Error log for anything unusual
Some additional context: Some additional context:
* Permanent settings/config is stored in the `pialert.conf` file * Permanent settings/config is stored in the `pialert.conf` file

View File

@@ -116,6 +116,7 @@ $lang['en_us'] = array(
'Presence_CalHead_quarter' => 'quarter', 'Presence_CalHead_quarter' => 'quarter',
'Presence_CalHead_month' => 'month', 'Presence_CalHead_month' => 'month',
'Presence_CalHead_week' => 'week', 'Presence_CalHead_week' => 'week',
'Presence_CalHead_day' => 'day',
////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////
// Events Page // Events Page

View File

@@ -234,7 +234,7 @@ function initializeCalendar () {
header: { header: {
left : 'prev,next today', left : 'prev,next today',
center : 'title', center : 'title',
right : 'timelineYear,timelineMonth,timelineWeek' right : 'timelineYear,timelineMonth,timelineWeek,timelineDay'
}, },
defaultView : 'timelineMonth', defaultView : 'timelineMonth',
height : 'auto', height : 'auto',
@@ -286,6 +286,13 @@ function initializeCalendar () {
buttonText : '<?= lang('Presence_CalHead_week');?>', buttonText : '<?= lang('Presence_CalHead_week');?>',
slotLabelFormat : 'D', slotLabelFormat : 'D',
slotDuration : '24:00:01' slotDuration : '24:00:01'
},
timelineDay: {
type : 'timeline',
duration : { day: 1 },
buttonText : '<?= lang('Presence_CalHead_day');?>',
slotLabelFormat : 'H',
slotDuration : '00:30:00'
} }
}, },
@@ -305,6 +312,15 @@ function initializeCalendar () {
if (date.format('YYYY-MM-DD') == moment().format('YYYY-MM-DD')) { if (date.format('YYYY-MM-DD') == moment().format('YYYY-MM-DD')) {
cell.addClass ('fc-today'); }; cell.addClass ('fc-today'); };
if ($('#calendar').fullCalendar('getView').name == 'timelineDay') {
cell.removeClass('fc-sat');
cell.removeClass('fc-sun');
cell.removeClass('fc-today');
if (date.format('YYYY-MM-DD HH') == moment().format('YYYY-MM-DD HH')) {
cell.addClass('fc-today');
}
};
}, },
resourceRender: function (resourceObj, labelTds, bodyTds) { resourceRender: function (resourceObj, labelTds, bodyTds) {

View File

@@ -13,10 +13,9 @@
#=============================================================================== #===============================================================================
# IMPORTS # IMPORTS
#=============================================================================== #===============================================================================
from __future__ import print_function #from __future__ import print_function
import sys import sys
from collections import namedtuple
import time import time
import datetime import datetime
import multiprocessing import multiprocessing
@@ -25,7 +24,7 @@ import multiprocessing
import conf import conf
from const import * from const import *
from logger import mylog from logger import mylog
from helper import filePermissions, isNewVersion, timeNow, timeNowTZ, updateState from helper import filePermissions, isNewVersion, timeNow, updateState
from api import update_api from api import update_api
from networkscan import process_scan, scan_network from networkscan import process_scan, scan_network
from initialise import importConfigs from initialise import importConfigs
@@ -75,6 +74,7 @@ main structure of Pi Alert
""" """
def main (): def main ():
mylog('debug', ['[MAIN] Setting up ...'])
conf.time_started = datetime.datetime.now() conf.time_started = datetime.datetime.now()
conf.cycle = "" conf.cycle = ""
@@ -109,7 +109,7 @@ def main ():
# Open DB once and keep open # Open DB once and keep open
# Opening / closing DB frequently actually casues more issues # Opening / closing DB frequently actually casues more issues
db = DB() # instance of class DB db = DB() # instance of class DB
db.openDB() db.open()
sql = db.sql # To-Do replace with the db class sql = db.sql # To-Do replace with the db class
# Upgrade DB if needed # Upgrade DB if needed
@@ -119,13 +119,12 @@ def main ():
#=============================================================================== #===============================================================================
# This is the main loop of Pi.Alert # This is the main loop of Pi.Alert
#=============================================================================== #===============================================================================
while True: while True:
# update time started # update time started
time_started = datetime.datetime.now() # not sure why we need this ... time_started = datetime.datetime.now() # not sure why we need this ...
loop_start_time = timeNow() loop_start_time = timeNow()
mylog('debug', '[MAIN] Stating loop') mylog('debug', '[MAIN] Starting loop')
# re-load user configuration and plugins # re-load user configuration and plugins
importConfigs(db) importConfigs(db)
@@ -311,4 +310,5 @@ def main ():
# BEGIN # BEGIN
#=============================================================================== #===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
mylog('debug', ['[__main__] Welcome to Pi.Alert'])
sys.exit(main()) sys.exit(main())

View File

@@ -10,7 +10,11 @@ userSubnets = []
mySchedules = [] # bad solution for global - TO-DO mySchedules = [] # bad solution for global - TO-DO
plugins = [] # bad solution for global - TO-DO plugins = [] # bad solution for global - TO-DO
tz = '' tz = ''
lastTimeImported = 0 # Time the config was last imported
# modified time of the most recently imported config file
# set to a small value to force import at first run
lastImportedConfFile = 1.1
plugins_once_run = False plugins_once_run = False
newVersionAvailable = False newVersionAvailable = False
time_started = '' time_started = ''
@@ -20,10 +24,12 @@ arpscan_devices = []
# for MQTT # for MQTT
mqtt_connected_to_broker = False mqtt_connected_to_broker = False
mqtt_sensors = [] mqtt_sensors = []
client = None # mqtt client
# for notifications # for notifications
changedPorts_json_struc = None changedPorts_json_struc = None
# ACTUAL CONFIGRATION ITEMS set to defaults # ACTUAL CONFIGRATION ITEMS set to defaults
# General # General

View File

@@ -26,8 +26,15 @@ piholeDhcpleases = '/etc/pihole/dhcp.leases'
#=============================================================================== #===============================================================================
# SQL queries # SQL queries
#=============================================================================== #===============================================================================
sql_devices_all = "select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_Group, dev_FirstConnection, dev_LastConnection, dev_LastIP, dev_StaticIP, dev_PresentLastScan, dev_LastNotification, dev_NewDevice, dev_Network_Node_MAC_ADDR, dev_Network_Node_port, dev_Icon from Devices" sql_devices_all = """select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_Group,
sql_devices_stats = "SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1" dev_FirstConnection, dev_LastConnection, dev_LastIP, dev_StaticIP,
dev_PresentLastScan, dev_LastNotification, dev_NewDevice,
dev_Network_Node_MAC_ADDR, dev_Network_Node_port,
dev_Icon from Devices"""
sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived,
(select count(*) from Devices a where dev_NewDevice = 1 ) as new,
(select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown
from Online_History order by Scan_Date desc limit 1"""
sql_nmap_scan_all = "SELECT * FROM Nmap_Scan" sql_nmap_scan_all = "SELECT * FROM Nmap_Scan"
sql_pholus_scan_all = "SELECT * FROM Pholus_Scan" sql_pholus_scan_all = "SELECT * FROM Pholus_Scan"
sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0" sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0"
@@ -36,12 +43,12 @@ sql_plugins_objects = "SELECT * FROM Plugins_Objects"
sql_language_strings = "SELECT * FROM Plugins_Language_Strings" sql_language_strings = "SELECT * FROM Plugins_Language_Strings"
sql_plugins_events = "SELECT * FROM Plugins_Events" sql_plugins_events = "SELECT * FROM Plugins_Events"
sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY 'Index' DESC" sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY 'Index' DESC"
sql_new_devices = """SELECT * FROM ( SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC FROM Events_Devices sql_new_devices = """SELECT * FROM (
SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC
FROM Events_Devices
WHERE eve_PendingAlertEmail = 1 WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device' AND eve_EventType = 'New Device'
ORDER BY eve_DateTime ) t1 ORDER BY eve_DateTime ) t1
LEFT JOIN LEFT JOIN
( ( SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices) t2
SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices
) t2
ON t1.dev_MAC = t2.dev_MAC_t2""" ON t1.dev_MAC = t2.dev_MAC_t2"""

View File

@@ -15,13 +15,17 @@ from helper import json_struc, initOrSetParam, row_to_json, timeNow #, updateSta
class DB(): class DB():
"""
DB Class to provide the basic database interactions.
Open / Commit / Close / read / write
"""
def __init__(self): def __init__(self):
self.sql = None self.sql = None
self.sql_connection = None self.sql_connection = None
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def openDB (self): def open (self):
# Check if DB is open # Check if DB is open
if self.sql_connection != None : if self.sql_connection != None :
mylog('debug','openDB: databse already open') mylog('debug','openDB: databse already open')
@@ -29,11 +33,15 @@ class DB():
mylog('none', '[Database] Opening DB' ) mylog('none', '[Database] Opening DB' )
# Open DB and Cursor # Open DB and Cursor
try:
self.sql_connection = sqlite3.connect (fullDbPath, isolation_level=None) self.sql_connection = sqlite3.connect (fullDbPath, isolation_level=None)
self.sql_connection.execute('pragma journal_mode=wal') # self.sql_connection.execute('pragma journal_mode=wal') #
self.sql_connection.text_factory = str self.sql_connection.text_factory = str
self.sql_connection.row_factory = sqlite3.Row self.sql_connection.row_factory = sqlite3.Row
self.sql = self.sql_connection.cursor() self.sql = self.sql_connection.cursor()
except sqlite3.Error as e:
mylog('none',[ '[Database] - Open DB Error: ', e])
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def commitDB (self): def commitDB (self):
@@ -69,30 +77,36 @@ class DB():
# Cleanup / upkeep database # Cleanup / upkeep database
#=============================================================================== #===============================================================================
def cleanup_database (self, startTime, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA): def cleanup_database (self, startTime, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA):
"""
Cleaning out old records from the tables that don't need to keep all data.
"""
# Header # Header
#updateState(self,"Upkeep: Clean DB") #updateState(self,"Upkeep: Clean DB")
mylog('verbose', ['[', startTime, '] Upkeep Database:' ]) mylog('verbose', ['[DB Cleanup] Upkeep Database:' ])
# Cleanup Online History # Cleanup Online History
mylog('verbose', [' Online_History: Delete all but keep latest 150 entries']) mylog('verbose', ['[DB Cleanup] Online_History: Delete all but keep latest 150 entries'])
self.sql.execute ("""DELETE from Online_History where "Index" not in ( SELECT "Index" from Online_History order by Scan_Date desc limit 150)""") self.sql.execute ("""DELETE from Online_History where "Index" not in (
SELECT "Index" from Online_History
mylog('verbose', [' Optimize Database']) order by Scan_Date desc limit 150)""")
mylog('verbose', ['[DB Cleanup] Optimize Database'])
# Cleanup Events # Cleanup Events
mylog('verbose', [' Events: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days']) mylog('verbose', ['[DB Cleanup] Events: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days'])
self.sql.execute ("DELETE FROM Events WHERE eve_DateTime <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')") self.sql.execute ("""DELETE FROM Events
WHERE eve_DateTime <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')""")
# Cleanup Plugin Events History # Cleanup Plugin Events History
mylog('verbose', [' Plugin Events History: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days']) mylog('verbose', ['[DB Cleanup] Plugin Events History: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days'])
self.sql.execute ("DELETE FROM Plugins_History WHERE DateTimeChanged <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')") self.sql.execute ("""DELETE FROM Plugins_History
WHERE DateTimeChanged <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')""")
# Cleanup Pholus_Scan # Cleanup Pholus_Scan
if PHOLUS_DAYS_DATA != 0: if PHOLUS_DAYS_DATA != 0:
mylog('verbose', [' Pholus_Scan: Delete all older than ' + str(PHOLUS_DAYS_DATA) + ' days']) mylog('verbose', ['[DB Cleanup] Pholus_Scan: Delete all older than ' + str(PHOLUS_DAYS_DATA) + ' days'])
self.sql.execute ("DELETE FROM Pholus_Scan WHERE Time <= date('now', '-"+ str(PHOLUS_DAYS_DATA) +" day')") # improvement possibility: keep at least N per mac # improvement possibility: keep at least N per mac
self.sql.execute ("""DELETE FROM Pholus_Scan
WHERE Time <= date('now', '-"+ str(PHOLUS_DAYS_DATA) +" day')""")
# De-Dupe (de-duplicate - remove duplicate entries) from the Pholus_Scan table # De-Dupe (de-duplicate - remove duplicate entries) from the Pholus_Scan table
mylog('verbose', [' Pholus_Scan: Delete all duplicates']) mylog('verbose', ['[DB Cleanup] Pholus_Scan: Delete all duplicates'])
self.sql.execute ("""DELETE FROM Pholus_Scan self.sql.execute ("""DELETE FROM Pholus_Scan
WHERE rowid > ( WHERE rowid > (
SELECT MIN(rowid) FROM Pholus_Scan p2 SELECT MIN(rowid) FROM Pholus_Scan p2
@@ -100,7 +114,6 @@ class DB():
AND Pholus_Scan.Value = p2.Value AND Pholus_Scan.Value = p2.Value
AND Pholus_Scan.Record_Type = p2.Record_Type AND Pholus_Scan.Record_Type = p2.Record_Type
);""") );""")
# De-Dupe (de-duplicate - remove duplicate entries) from the Nmap_Scan table # De-Dupe (de-duplicate - remove duplicate entries) from the Nmap_Scan table
mylog('verbose', [' Nmap_Scan: Delete all duplicates']) mylog('verbose', [' Nmap_Scan: Delete all duplicates'])
self.sql.execute ("""DELETE FROM Nmap_Scan self.sql.execute ("""DELETE FROM Nmap_Scan
@@ -115,15 +128,16 @@ class DB():
# Shrink DB # Shrink DB
mylog('verbose', [' Shrink Database']) mylog('verbose', [' Shrink Database'])
self.sql.execute ("VACUUM;") self.sql.execute ("VACUUM;")
self.commitDB() self.commitDB()
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def upgradeDB(self): def upgradeDB(self):
sql = self.sql #TO-DO """
Check the current tables in the DB and upgrade them if neccessary
"""
# indicates, if Online_History table is available # indicates, if Online_History table is available
onlineHistoryAvailable = sql.execute(""" onlineHistoryAvailable = self.sql.execute("""
SELECT name FROM sqlite_master WHERE type='table' SELECT name FROM sqlite_master WHERE type='table'
AND name='Online_History'; AND name='Online_History';
""").fetchall() != [] """).fetchall() != []
@@ -132,18 +146,18 @@ class DB():
isIncompatible = False isIncompatible = False
if onlineHistoryAvailable : if onlineHistoryAvailable :
isIncompatible = sql.execute (""" isIncompatible = self.sql.execute ("""
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Online_History') WHERE name='Archived_Devices' SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Online_History') WHERE name='Archived_Devices'
""").fetchone()[0] == 0 """).fetchone()[0] == 0
# Drop table if available, but incompatible # Drop table if available, but incompatible
if onlineHistoryAvailable and isIncompatible: if onlineHistoryAvailable and isIncompatible:
mylog('none','[upgradeDB] Table is incompatible, Dropping the Online_History table') mylog('none','[upgradeDB] Table is incompatible, Dropping the Online_History table')
sql.execute("DROP TABLE Online_History;") self.sql.execute("DROP TABLE Online_History;")
onlineHistoryAvailable = False onlineHistoryAvailable = False
if onlineHistoryAvailable == False : if onlineHistoryAvailable == False :
sql.execute(""" self.sql.execute("""
CREATE TABLE "Online_History" ( CREATE TABLE "Online_History" (
"Index" INTEGER, "Index" INTEGER,
"Scan_Date" TEXT, "Scan_Date" TEXT,
@@ -157,40 +171,40 @@ class DB():
# Alter Devices table # Alter Devices table
# dev_Network_Node_MAC_ADDR column # dev_Network_Node_MAC_ADDR column
dev_Network_Node_MAC_ADDR_missing = sql.execute (""" dev_Network_Node_MAC_ADDR_missing = self.sql.execute ("""
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_MAC_ADDR' SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_MAC_ADDR'
""").fetchone()[0] == 0 """).fetchone()[0] == 0
if dev_Network_Node_MAC_ADDR_missing : if dev_Network_Node_MAC_ADDR_missing :
mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_MAC_ADDR to the Devices table"]) mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_MAC_ADDR to the Devices table"])
sql.execute(""" self.sql.execute("""
ALTER TABLE "Devices" ADD "dev_Network_Node_MAC_ADDR" TEXT ALTER TABLE "Devices" ADD "dev_Network_Node_MAC_ADDR" TEXT
""") """)
# dev_Network_Node_port column # dev_Network_Node_port column
dev_Network_Node_port_missing = sql.execute (""" dev_Network_Node_port_missing = self.sql.execute ("""
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_port' SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_port'
""").fetchone()[0] == 0 """).fetchone()[0] == 0
if dev_Network_Node_port_missing : if dev_Network_Node_port_missing :
mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_port to the Devices table"]) mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_port to the Devices table"])
sql.execute(""" self.sql.execute("""
ALTER TABLE "Devices" ADD "dev_Network_Node_port" INTEGER ALTER TABLE "Devices" ADD "dev_Network_Node_port" INTEGER
""") """)
# dev_Icon column # dev_Icon column
dev_Icon_missing = sql.execute (""" dev_Icon_missing = self.sql.execute ("""
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Icon' SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Icon'
""").fetchone()[0] == 0 """).fetchone()[0] == 0
if dev_Icon_missing : if dev_Icon_missing :
mylog('verbose', ["[upgradeDB] Adding dev_Icon to the Devices table"]) mylog('verbose', ["[upgradeDB] Adding dev_Icon to the Devices table"])
sql.execute(""" self.sql.execute("""
ALTER TABLE "Devices" ADD "dev_Icon" TEXT ALTER TABLE "Devices" ADD "dev_Icon" TEXT
""") """)
# indicates, if Settings table is available # indicates, if Settings table is available
settingsMissing = sql.execute(""" settingsMissing = self.sql.execute("""
SELECT name FROM sqlite_master WHERE type='table' SELECT name FROM sqlite_master WHERE type='table'
AND name='Settings'; AND name='Settings';
""").fetchone() == None """).fetchone() == None
@@ -199,9 +213,9 @@ class DB():
mylog('verbose', ["[upgradeDB] Re-creating Settings table"]) mylog('verbose', ["[upgradeDB] Re-creating Settings table"])
if settingsMissing == False: if settingsMissing == False:
sql.execute("DROP TABLE Settings;") self.sql.execute("DROP TABLE Settings;")
sql.execute(""" self.sql.execute("""
CREATE TABLE "Settings" ( CREATE TABLE "Settings" (
"Code_Name" TEXT, "Code_Name" TEXT,
"Display_Name" TEXT, "Display_Name" TEXT,
@@ -216,19 +230,19 @@ class DB():
""") """)
# indicates, if Pholus_Scan table is available # indicates, if Pholus_Scan table is available
pholusScanMissing = sql.execute(""" pholusScanMissing = self.sql.execute("""
SELECT name FROM sqlite_master WHERE type='table' SELECT name FROM sqlite_master WHERE type='table'
AND name='Pholus_Scan'; AND name='Pholus_Scan';
""").fetchone() == None """).fetchone() == None
# if pholusScanMissing == False: # if pholusScanMissing == False:
# # Re-creating Pholus_Scan table # # Re-creating Pholus_Scan table
# sql.execute("DROP TABLE Pholus_Scan;") # self.sql.execute("DROP TABLE Pholus_Scan;")
# pholusScanMissing = True # pholusScanMissing = True
if pholusScanMissing: if pholusScanMissing:
mylog('verbose', ["[upgradeDB] Re-creating Pholus_Scan table"]) mylog('verbose', ["[upgradeDB] Re-creating Pholus_Scan table"])
sql.execute(""" self.sql.execute("""
CREATE TABLE "Pholus_Scan" ( CREATE TABLE "Pholus_Scan" (
"Index" INTEGER, "Index" INTEGER,
"Info" TEXT, "Info" TEXT,
@@ -243,16 +257,16 @@ class DB():
""") """)
# indicates, if Nmap_Scan table is available # indicates, if Nmap_Scan table is available
nmapScanMissing = sql.execute(""" nmapScanMissing = self.sql.execute("""
SELECT name FROM sqlite_master WHERE type='table' SELECT name FROM sqlite_master WHERE type='table'
AND name='Nmap_Scan'; AND name='Nmap_Scan';
""").fetchone() == None """).fetchone() == None
# Re-creating Parameters table # Re-creating Parameters table
mylog('verbose', ["[upgradeDB] Re-creating Parameters table"]) mylog('verbose', ["[upgradeDB] Re-creating Parameters table"])
sql.execute("DROP TABLE Parameters;") self.sql.execute("DROP TABLE Parameters;")
sql.execute(""" self.sql.execute("""
CREATE TABLE "Parameters" ( CREATE TABLE "Parameters" (
"par_ID" TEXT PRIMARY KEY, "par_ID" TEXT PRIMARY KEY,
"par_Value" TEXT "par_Value" TEXT
@@ -264,12 +278,12 @@ class DB():
# if nmapScanMissing == False: # if nmapScanMissing == False:
# # Re-creating Nmap_Scan table # # Re-creating Nmap_Scan table
# sql.execute("DROP TABLE Nmap_Scan;") # self.sql.execute("DROP TABLE Nmap_Scan;")
# nmapScanMissing = True # nmapScanMissing = True
if nmapScanMissing: if nmapScanMissing:
mylog('verbose', ["[upgradeDB] Re-creating Nmap_Scan table"]) mylog('verbose', ["[upgradeDB] Re-creating Nmap_Scan table"])
sql.execute(""" self.sql.execute("""
CREATE TABLE "Nmap_Scan" ( CREATE TABLE "Nmap_Scan" (
"Index" INTEGER, "Index" INTEGER,
"MAC" TEXT, "MAC" TEXT,
@@ -300,7 +314,7 @@ class DB():
ForeignKey TEXT NOT NULL, ForeignKey TEXT NOT NULL,
PRIMARY KEY("Index" AUTOINCREMENT) PRIMARY KEY("Index" AUTOINCREMENT)
); """ ); """
sql.execute(sql_Plugins_Objects) self.sql.execute(sql_Plugins_Objects)
# Plugin execution results # Plugin execution results
sql_Plugins_Events = """ CREATE TABLE IF NOT EXISTS Plugins_Events( sql_Plugins_Events = """ CREATE TABLE IF NOT EXISTS Plugins_Events(
@@ -320,7 +334,7 @@ class DB():
ForeignKey TEXT NOT NULL, ForeignKey TEXT NOT NULL,
PRIMARY KEY("Index" AUTOINCREMENT) PRIMARY KEY("Index" AUTOINCREMENT)
); """ ); """
sql.execute(sql_Plugins_Events) self.sql.execute(sql_Plugins_Events)
# Plugin execution history # Plugin execution history
sql_Plugins_History = """ CREATE TABLE IF NOT EXISTS Plugins_History( sql_Plugins_History = """ CREATE TABLE IF NOT EXISTS Plugins_History(
@@ -340,19 +354,19 @@ class DB():
ForeignKey TEXT NOT NULL, ForeignKey TEXT NOT NULL,
PRIMARY KEY("Index" AUTOINCREMENT) PRIMARY KEY("Index" AUTOINCREMENT)
); """ ); """
sql.execute(sql_Plugins_History) self.sql.execute(sql_Plugins_History)
# Dynamically generated language strings # Dynamically generated language strings
# indicates, if Language_Strings table is available # indicates, if Language_Strings table is available
languageStringsMissing = sql.execute(""" languageStringsMissing = self.sql.execute("""
SELECT name FROM sqlite_master WHERE type='table' SELECT name FROM sqlite_master WHERE type='table'
AND name='Plugins_Language_Strings'; AND name='Plugins_Language_Strings';
""").fetchone() == None """).fetchone() == None
if languageStringsMissing == False: if languageStringsMissing == False:
sql.execute("DROP TABLE Plugins_Language_Strings;") self.sql.execute("DROP TABLE Plugins_Language_Strings;")
sql.execute(""" CREATE TABLE IF NOT EXISTS Plugins_Language_Strings( self.sql.execute(""" CREATE TABLE IF NOT EXISTS Plugins_Language_Strings(
"Index" INTEGER, "Index" INTEGER,
Language_Code TEXT NOT NULL, Language_Code TEXT NOT NULL,
String_Key TEXT NOT NULL, String_Key TEXT NOT NULL,
@@ -389,7 +403,7 @@ class DB():
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def read(self, query, *args): def read(self, query, *args):
"""check the query and arguments are aligned and are read only""" """check the query and arguments are aligned and are read only"""
mylog('debug',[ '[Database] - SELECT Query: ', query, " params: ", args]) mylog('debug',[ '[Database] - Read All: SELECT Query: ', query, " params: ", args])
try: try:
assert query.count('?') == len(args) assert query.count('?') == len(args)
assert query.upper().strip().startswith('SELECT') assert query.upper().strip().startswith('SELECT')
@@ -402,11 +416,29 @@ class DB():
mylog('none',[ '[Database] - SQL ERROR: ', e]) mylog('none',[ '[Database] - SQL ERROR: ', e])
return None return None
def read_one(self, query, *args):
"""
call read() with the same arguments but only returns the first row.
should only be used when there is a single row result expected
"""
mylog('debug',[ '[Database] - Read One: ', query, " params: ", args])
rows = self.read(query, *args)
if len(rows) == 1:
return rows[0]
if len(rows) > 1:
mylog('none',[ '[Database] - Warning!: query returns multiple rows, only first row is passed on!', query, " params: ", args])
return rows[0]
# empty result set
return None
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def get_device_stats(db): def get_device_stats(db):
# columns = ["online","down","all","archived","new","unknown"] # columns = ["online","down","all","archived","new","unknown"]
return db.read(sql_devices_stats) return db.read_one(sql_devices_stats)
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def get_all_devices(db): def get_all_devices(db):
return db.read(sql_devices_all) return db.read(sql_devices_all)
@@ -414,18 +446,24 @@ def get_all_devices(db):
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def insertOnlineHistory(db, cycle): def insertOnlineHistory(db):
sql = db.sql #TO-DO sql = db.sql #TO-DO
startTime = timeNow() startTime = timeNow()
# Add to History # Add to History
# only run this if the scans have run
scanCount = db.read_one("SELECT count(*) FROM CurrentScan")
if scanCount[0] == 0 :
mylog('debug',[ '[insertOnlineHistory] - nothing to do, currentScan empty'])
return 0
History_All = db.read("SELECT * FROM Devices") History_All = db.read("SELECT * FROM Devices")
History_All_Devices = len(History_All) History_All_Devices = len(History_All)
History_Archived = db.read("SELECT * FROM Devices WHERE dev_Archived = 1") History_Archived = db.read("SELECT * FROM Devices WHERE dev_Archived = 1")
History_Archived_Devices = len(History_Archived) History_Archived_Devices = len(History_Archived)
History_Online = db.read("SELECT * FROM CurrentScan WHERE cur_ScanCycle = ? ", cycle) History_Online = db.read("SELECT * FROM CurrentScan")
History_Online_Devices = len(History_Online) History_Online_Devices = len(History_Online)
History_Offline_Devices = History_All_Devices - History_Archived_Devices - History_Online_Devices History_Offline_Devices = History_All_Devices - History_Archived_Devices - History_Online_Devices

View File

@@ -7,8 +7,8 @@ from pathlib import Path
import datetime import datetime
import conf import conf
from const import * from const import fullConfPath
from helper import collect_lang_strings, timeNow, updateSubnets, initOrSetParam from helper import collect_lang_strings, updateSubnets, initOrSetParam
from logger import mylog from logger import mylog
from api import update_api from api import update_api
from scheduler import schedule_class from scheduler import schedule_class
@@ -43,15 +43,26 @@ def importConfigs (db):
sql = db.sql sql = db.sql
lastTimeImported = 0 # get config file name
# get config file
config_file = Path(fullConfPath) config_file = Path(fullConfPath)
# Skip import if last time of import is NEWER than file age # Only import file if the file was modifed since last import.
if (os.path.getmtime(config_file) < lastTimeImported) : # this avoids time zone issues as we just compare the previous timestamp to the current time stamp
mylog('debug', ['[Import Config] checking config file '])
mylog('debug', ['[Import Config] lastImportedConfFile :', conf.lastImportedConfFile])
mylog('debug', ['[Import Config] file modified time :', os.path.getmtime(config_file)])
if (os.path.getmtime(config_file) == conf.lastImportedConfFile) :
mylog('debug', ['[Import Config] skipping config file import'])
return return
conf.lastImportedConfFile = os.path.getmtime(config_file)
mylog('debug', ['[Import Config] importing config file'])
conf.mySettings = [] # reset settings conf.mySettings = [] # reset settings
conf.mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query conf.mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query
@@ -237,6 +248,7 @@ def read_config_file(filename):
""" """
retuns dict on the config file key:value pairs retuns dict on the config file key:value pairs
""" """
mylog('info', '[Config] reading config file')
# load the variables from pialert.conf # load the variables from pialert.conf
code = compile(filename.read_text(), filename.name, "exec") code = compile(filename.read_text(), filename.name, "exec")
confDict = {} # config dictionary confDict = {} # config dictionary

View File

@@ -88,7 +88,7 @@ def query_MAC_vendor (pMAC):
grep_output = subprocess.check_output (grep_args) grep_output = subprocess.check_output (grep_args)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
# An error occured, handle it # An error occured, handle it
mylog('none', [e.output]) mylog('none', ["[Mac Vendor Check] Error: ", e.output])
grep_output = " There was an error, check logs for details" grep_output = " There was an error, check logs for details"
# Return Vendor # Return Vendor

View File

@@ -36,8 +36,6 @@ def scan_network (db):
db.commitDB() db.commitDB()
# arp-scan command # arp-scan command
conf.arpscan_devices = [] conf.arpscan_devices = []
if conf.ENABLE_ARPSCAN: if conf.ENABLE_ARPSCAN:
@@ -117,7 +115,7 @@ def process_scan (db, arpscan_devices = conf.arpscan_devices ):
# Sessions snapshot # Sessions snapshot
mylog('verbose','[Process Scan] Inserting scan results into Online_History') mylog('verbose','[Process Scan] Inserting scan results into Online_History')
insertOnlineHistory(db,conf.cycle) insertOnlineHistory(db)
# Skip repeated notifications # Skip repeated notifications
mylog('verbose','[Process Scan] Skipping repeated notifications') mylog('verbose','[Process Scan] Skipping repeated notifications')

View File

@@ -12,14 +12,14 @@ from helper import timeNow, updateState, get_file_content, write_file
from api import update_api from api import update_api
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def run_plugin_scripts(db, runType, plugins = conf.plugins): def run_plugin_scripts(db, runType):
# Header # Header
updateState(db,"Run: Plugins") updateState(db,"Run: Plugins")
mylog('debug', ['[Plugins] Check if any plugins need to be executed on run type: ', runType]) mylog('debug', ['[Plugins] Check if any plugins need to be executed on run type: ', runType])
for plugin in plugins: for plugin in conf.plugins:
shouldRun = False shouldRun = False

View File

@@ -146,8 +146,9 @@ def mqtt_start(db):
if conf.mqtt_connected_to_broker == False: if conf.mqtt_connected_to_broker == False:
conf.mqtt_connected_to_broker = True conf.mqtt_connected_to_broker = True
client = mqtt_create_client() conf.client = mqtt_create_client()
client = conf.client
# General stats # General stats
# Create a generic device for overal stats # Create a generic device for overal stats
@@ -175,7 +176,7 @@ def mqtt_start(db):
# Specific devices # Specific devices
# Get all devices # Get all devices
devices = get_all_devices() devices = get_all_devices(db)
sec_delay = len(devices) * int(conf.MQTT_DELAY_SEC)*5 sec_delay = len(devices) * int(conf.MQTT_DELAY_SEC)*5

View File

@@ -32,6 +32,12 @@ def send (msg: noti_struc):
# add authorization header with hash # add authorization header with hash
headers["Authorization"] = "Basic {}".format(basichash) headers["Authorization"] = "Basic {}".format(basichash)
try:
requests.post("{}/{}".format( conf.NTFY_HOST, conf.NTFY_TOPIC), requests.post("{}/{}".format( conf.NTFY_HOST, conf.NTFY_TOPIC),
data=msg.html, data=msg.text,
headers=headers) headers=headers)
except requests.exceptions.RequestException as e:
mylog('none', ['[NTFY] Error: ', e])
return -1
return 0

View File

@@ -51,7 +51,7 @@ def send (msg: noti_struc):
# execute CURL call # execute CURL call
try: try:
# try runnning a subprocess # try runnning a subprocess
mylog('debug', '[send_webhook] curlParams: '+ curlParams) mylog('debug', ['[send_webhook] curlParams: ', curlParams])
p = subprocess.Popen(curlParams, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) p = subprocess.Popen(curlParams, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, stderr = p.communicate() stdout, stderr = p.communicate()

View File

@@ -285,7 +285,7 @@ def send_notifications (db, INCLUDED_SECTIONS = conf.INCLUDED_SECTIONS):
if conf.REPORT_WEBHOOK and check_config('webhook'): if conf.REPORT_WEBHOOK and check_config('webhook'):
updateState(db,"Send: Webhook") updateState(db,"Send: Webhook")
mylog('info', ['[Notification] Sending report by Webhook']) mylog('info', ['[Notification] Sending report by Webhook'])
send_webhook (json_final, mail_text) send_webhook (msg)
else : else :
mylog('verbose', ['[Notification] Skip webhook']) mylog('verbose', ['[Notification] Skip webhook'])
if conf.REPORT_NTFY and check_config('ntfy'): if conf.REPORT_NTFY and check_config('ntfy'):

View File

@@ -27,6 +27,19 @@ def copy_pihole_network (db):
try: try:
sql.execute ("DELETE FROM PiHole_Network") sql.execute ("DELETE FROM PiHole_Network")
# just for reporting
new_devices = []
sql.execute ( """SELECT hwaddr, macVendor, lastQuery,
(SELECT name FROM PH.network_addresses
WHERE network_id = id ORDER BY lastseen DESC, ip),
(SELECT ip FROM PH.network_addresses
WHERE network_id = id ORDER BY lastseen DESC, ip)
FROM PH.network
WHERE hwaddr NOT LIKE 'ip-%'
AND hwaddr <> '00:00:00:00:00:00' """)
new_devices = sql.fetchall()
# insert into PiAlert DB
sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery, sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery,
PH_Name, PH_IP) PH_Name, PH_IP)
SELECT hwaddr, macVendor, lastQuery, SELECT hwaddr, macVendor, lastQuery,
@@ -47,7 +60,7 @@ def copy_pihole_network (db):
db.commitDB() db.commitDB()
mylog('debug',[ '[PiHole Network] - completed - found ',sql.rowcount, ' devices']) mylog('debug',[ '[PiHole Network] - completed - found ', len(new_devices), ' devices'])
return str(sql.rowcount) != "0" return str(sql.rowcount) != "0"