mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2025-12-07 09:36:05 -08:00
changed to __main__.py and scanners folder
This commit is contained in:
@@ -1,7 +1,8 @@
|
||||
FROM debian:bullseye-slim
|
||||
|
||||
# default UID and GID
|
||||
ENV USER=pi USER_ID=1000 USER_GID=1000 TZ=Europe/London PORT=20211
|
||||
ENV USER=pi USER_ID=1000 USER_GID=1000 PORT=20211
|
||||
#TZ=Europe/London
|
||||
|
||||
# Todo, figure out why using a workdir instead of full paths don't work
|
||||
# Todo, do we still need all these packages? I can already see sudo which isn't needed
|
||||
@@ -46,3 +47,8 @@ RUN rm /etc/nginx/sites-available/default \
|
||||
ENTRYPOINT ["tini", "--"]
|
||||
|
||||
CMD ["/home/pi/pialert/dockerfiles/start.sh"]
|
||||
|
||||
|
||||
|
||||
|
||||
## command to build docker: DOCKER_BUILDKIT=1 docker build . --iidfile dockerID
|
||||
@@ -1,15 +1,15 @@
|
||||
#!/bin/sh
|
||||
/home/pi/pialert/dockerfiles/user-mapping.sh
|
||||
|
||||
# if custom variables not set we do not need to do anything
|
||||
if [ -n "${TZ}" ]; then
|
||||
FILECONF=/home/pi/pialert/config/pialert.conf
|
||||
if [ -f "$FILECONF" ]; then
|
||||
sed -ie "s|Europe/Berlin|${TZ}|g" /home/pi/pialert/config/pialert.conf
|
||||
else
|
||||
sed -ie "s|Europe/Berlin|${TZ}|g" /home/pi/pialert/back/pialert.conf_bak
|
||||
fi
|
||||
fi
|
||||
# # if custom variables not set we do not need to do anything
|
||||
# if [ -n "${TZ}" ]; then
|
||||
# FILECONF=/home/pi/pialert/config/pialert.conf
|
||||
# if [ -f "$FILECONF" ]; then
|
||||
# sed -ie "s|Europe/Berlin|${TZ}|g" /home/pi/pialert/config/pialert.conf
|
||||
# else
|
||||
# sed -ie "s|Europe/Berlin|${TZ}|g" /home/pi/pialert/back/pialert.conf_bak
|
||||
# fi
|
||||
# fi
|
||||
|
||||
if [ -n "${PORT}" ]; then
|
||||
sed -ie 's/listen 20211/listen '${PORT}'/g' /etc/nginx/sites-available/default
|
||||
@@ -29,5 +29,5 @@ chmod -R a+rw /home/pi/pialert/config
|
||||
|
||||
# cron -f
|
||||
#python /home/pi/pialert/back/pialert.py
|
||||
echo "DATA MONKEY VERSION ..."
|
||||
python /home/pi/pialert/pialert/pialert.py
|
||||
echo "[DEBUG] DATA MONKEY VERSION ..."
|
||||
python /home/pi/pialert/pialert/
|
||||
|
||||
@@ -30,26 +30,50 @@ from api import update_api
|
||||
from networkscan import scan_network
|
||||
from initialise import importConfigs
|
||||
from mac_vendor import update_devices_MAC_vendors
|
||||
from database import DB, get_all_devices, upgradeDB, sql_new_devices
|
||||
from database import DB, get_all_devices, sql_new_devices
|
||||
from reporting import check_and_run_event, send_notifications
|
||||
from plugin import run_plugin_scripts
|
||||
|
||||
# different scanners
|
||||
from pholusscan import performPholusScan
|
||||
from nmapscan import performNmapScan
|
||||
from internet import check_internet_IP
|
||||
|
||||
from scanners.pholusscan import performPholusScan
|
||||
from scanners.nmapscan import performNmapScan
|
||||
from scanners.internet import check_internet_IP
|
||||
|
||||
# Global variables
|
||||
changedPorts_json_struc = None
|
||||
|
||||
|
||||
|
||||
#===============================================================================
|
||||
#===============================================================================
|
||||
# MAIN
|
||||
#===============================================================================
|
||||
#===============================================================================
|
||||
"""
|
||||
main structure of Pi Alert
|
||||
|
||||
Initialise All
|
||||
start Loop forever
|
||||
initialise loop
|
||||
(re)import config
|
||||
(re)import plugin config
|
||||
run plugins (once)
|
||||
run frontend events
|
||||
update API
|
||||
run scans
|
||||
run plugins (scheduled)
|
||||
check internet IP
|
||||
check vendor
|
||||
run PHOLUS
|
||||
run NMAP
|
||||
run "scan_network()"
|
||||
ARP Scan
|
||||
PiHole copy db
|
||||
PiHole DHCP leases
|
||||
processing scan results
|
||||
run plugins (after Scan)
|
||||
reporting
|
||||
cleanup
|
||||
end loop
|
||||
"""
|
||||
|
||||
def main ():
|
||||
|
||||
@@ -102,7 +126,7 @@ def main ():
|
||||
# update time started
|
||||
time_started = datetime.datetime.now() # not sure why we need this ...
|
||||
loop_start_time = timeNow()
|
||||
mylog('debug', ['[ +++++++ ', timeNow(), '] [MAIN] Stating loop'])
|
||||
mylog('debug', '[MAIN] Stating loop')
|
||||
|
||||
# re-load user configuration and plugins
|
||||
importConfigs(db)
|
||||
@@ -153,8 +177,8 @@ def main ():
|
||||
if last_update_vendors + datetime.timedelta(days = 7) < time_started:
|
||||
last_update_vendors = time_started
|
||||
conf.cycle = 'update_vendors'
|
||||
mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle])
|
||||
update_devices_MAC_vendors()
|
||||
mylog('verbose', ['[MAIN] cycle:',conf.cycle])
|
||||
update_devices_MAC_vendors(db)
|
||||
|
||||
# Execute scheduled or one-off Pholus scan if enabled and run conditions fulfilled
|
||||
if conf.PHOLUS_RUN == "schedule" or conf.PHOLUS_RUN == "once":
|
||||
@@ -183,7 +207,7 @@ def main ():
|
||||
run = False
|
||||
|
||||
# run once after application starts
|
||||
if conf.NMAP_RUN == "once" and conf.nmapSchedule.last_run == 0:
|
||||
if conf.NMAP_RUN == "once" and nmapSchedule.last_run == 0:
|
||||
run = True
|
||||
|
||||
# run if overdue scheduled time
|
||||
@@ -191,14 +215,14 @@ def main ():
|
||||
run = nmapSchedule.runScheduleCheck()
|
||||
|
||||
if run:
|
||||
conf.nmapSchedule.last_run = timeNow()
|
||||
nmapSchedule.last_run = timeNow()
|
||||
performNmapScan(db, get_all_devices(db))
|
||||
|
||||
# Perform a network scan via arp-scan or pihole
|
||||
if last_network_scan + datetime.timedelta(minutes=conf.SCAN_CYCLE_MINUTES) < time_started:
|
||||
last_network_scan = time_started
|
||||
conf.cycle = 1 # network scan
|
||||
mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle])
|
||||
mylog('verbose', ['[MAIN] cycle:',conf.cycle])
|
||||
updateState(db,"Scan: Network")
|
||||
|
||||
# scan_network()
|
||||
@@ -214,8 +238,7 @@ def main ():
|
||||
|
||||
# If thread is still active
|
||||
if p.is_alive():
|
||||
print("DEBUG scan_network running too long - let\'s kill it")
|
||||
mylog('info', [' DEBUG scan_network running too long - let\'s kill it'])
|
||||
mylog('none', "[MAIN] scan_network running too long - let\'s kill it")
|
||||
|
||||
# Terminate - may not work if process is stuck for good
|
||||
p.terminate()
|
||||
@@ -254,7 +277,7 @@ def main ():
|
||||
if last_cleanup + datetime.timedelta(hours = 24) < time_started:
|
||||
last_cleanup = time_started
|
||||
conf.cycle = 'cleanup'
|
||||
mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle])
|
||||
mylog('verbose', ['[MAIN] cycle:',conf.cycle])
|
||||
db.cleanup_database(startTime, conf.DAYS_TO_KEEP_EVENTS, conf.PHOLUS_DAYS_DATA)
|
||||
|
||||
# Commit SQL
|
||||
@@ -265,17 +288,17 @@ def main ():
|
||||
action = str(conf.cycle)
|
||||
if action == "1":
|
||||
action = "network_scan"
|
||||
mylog('verbose', ['[', timeNow(), '] Last action: ', action])
|
||||
mylog('verbose', ['[MAIN] Last action: ', action])
|
||||
conf.cycle = ""
|
||||
mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle])
|
||||
mylog('verbose', ['[MAIN] cycle:',conf.cycle])
|
||||
|
||||
# Footer
|
||||
updateState(db,"Process: Wait")
|
||||
mylog('verbose', ['[', timeNow(), '] Process: Wait'])
|
||||
mylog('verbose', ['[MAIN] Process: Wait'])
|
||||
else:
|
||||
# do something
|
||||
conf.cycle = ""
|
||||
mylog('verbose', ['[', timeNow(), '] [MAIN] waiting to start next loop'])
|
||||
mylog('verbose', ['[MAIN] waiting to start next loop'])
|
||||
|
||||
#loop
|
||||
time.sleep(5) # wait for N seconds
|
||||
@@ -2,11 +2,11 @@ import json
|
||||
|
||||
|
||||
# pialert modules
|
||||
import conf
|
||||
from const import pialertPath
|
||||
from logger import mylog
|
||||
from files import write_file
|
||||
from database import *
|
||||
from conf import ENABLE_PLUGINS, API_CUSTOM_SQL
|
||||
|
||||
apiEndpoints = []
|
||||
|
||||
@@ -14,7 +14,7 @@ apiEndpoints = []
|
||||
# API
|
||||
#===============================================================================
|
||||
def update_api(isNotification = False, updateOnlyDataSources = []):
|
||||
mylog('verbose', [' [API] Update API not ding anything for now !'])
|
||||
mylog('verbose', [' [API] Update API not doing anything for now !'])
|
||||
return
|
||||
|
||||
folder = pialertPath + '/front/api/'
|
||||
@@ -28,7 +28,7 @@ def update_api(isNotification = False, updateOnlyDataSources = []):
|
||||
write_file(folder + 'notification_json_final.json' , json.dumps(json_final))
|
||||
|
||||
# Save plugins
|
||||
if ENABLE_PLUGINS:
|
||||
if conf.ENABLE_PLUGINS:
|
||||
write_file(folder + 'plugins.json' , json.dumps({"data" : plugins}))
|
||||
|
||||
# prepare database tables we want to expose
|
||||
@@ -42,7 +42,7 @@ def update_api(isNotification = False, updateOnlyDataSources = []):
|
||||
["plugins_history", sql_plugins_history],
|
||||
["plugins_objects", sql_plugins_objects],
|
||||
["language_strings", sql_language_strings],
|
||||
["custom_endpoint", API_CUSTOM_SQL],
|
||||
["custom_endpoint", conf.API_CUSTOM_SQL],
|
||||
]
|
||||
|
||||
# Save selected database tables
|
||||
@@ -57,12 +57,12 @@ def update_api(isNotification = False, updateOnlyDataSources = []):
|
||||
|
||||
|
||||
class api_endpoint_class:
|
||||
def __init__(self, sql, path):
|
||||
def __init__(self, db, path):
|
||||
|
||||
global apiEndpoints
|
||||
|
||||
self.sql = sql
|
||||
self.jsonData = get_table_as_json(sql).json
|
||||
self.db = db
|
||||
self.sql = db.sql
|
||||
self.jsonData = db.get_table_as_json( self.sql).json
|
||||
self.path = path
|
||||
self.fileName = path.split('/')[-1]
|
||||
self.hash = hash(json.dumps(self.jsonData))
|
||||
|
||||
@@ -48,7 +48,7 @@ class DB():
|
||||
mylog('debug','openDB: databse already open')
|
||||
return
|
||||
|
||||
mylog('none', 'Opening DB' )
|
||||
mylog('none', '[Database] Opening DB' )
|
||||
# Open DB and Cursor
|
||||
self.sql_connection = sqlite3.connect (fullDbPath, isolation_level=None)
|
||||
self.sql_connection.execute('pragma journal_mode=wal') #
|
||||
@@ -60,12 +60,11 @@ class DB():
|
||||
def commitDB (self):
|
||||
if self.sql_connection == None :
|
||||
mylog('debug','commitDB: databse is not open')
|
||||
return
|
||||
|
||||
# mylog('debug','commitDB: comiting DB changes')
|
||||
return False
|
||||
|
||||
# Commit changes to DB
|
||||
self.sql_connection.commit()
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_sql_array(self, query):
|
||||
@@ -75,7 +74,7 @@ class DB():
|
||||
|
||||
self.sql.execute(query)
|
||||
rows = self.sql.fetchall()
|
||||
self.commitDB()
|
||||
#self.commitDB()
|
||||
|
||||
# convert result into list of lists
|
||||
arr = []
|
||||
@@ -386,14 +385,14 @@ class DB():
|
||||
self.commitDB()
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_table_as_json(db, sqlQuery):
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_table_as_json(self, sqlQuery):
|
||||
|
||||
db.sql.execute(sqlQuery)
|
||||
self.sql.execute(sqlQuery)
|
||||
|
||||
columnNames = list(map(lambda x: x[0], db.sql.description))
|
||||
columnNames = list(map(lambda x: x[0], self.sql.description))
|
||||
|
||||
rows = db.sql.fetchall()
|
||||
rows = self.sql.fetchall()
|
||||
|
||||
result = {"data":[]}
|
||||
|
||||
@@ -402,277 +401,32 @@ def get_table_as_json(db, sqlQuery):
|
||||
result["data"].append(tmp)
|
||||
return json_struc(result, columnNames)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def upgradeDB(db: DB()):
|
||||
sql = db.sql #TO-DO
|
||||
|
||||
# indicates, if Online_History table is available
|
||||
onlineHistoryAvailable = db.sql.execute("""
|
||||
SELECT name FROM sqlite_master WHERE type='table'
|
||||
AND name='Online_History';
|
||||
""").fetchall() != []
|
||||
|
||||
# Check if it is incompatible (Check if table has all required columns)
|
||||
isIncompatible = False
|
||||
|
||||
if onlineHistoryAvailable :
|
||||
isIncompatible = sql.execute ("""
|
||||
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Online_History') WHERE name='Archived_Devices'
|
||||
""").fetchone()[0] == 0
|
||||
|
||||
# Drop table if available, but incompatible
|
||||
if onlineHistoryAvailable and isIncompatible:
|
||||
mylog('none','[upgradeDB] Table is incompatible, Dropping the Online_History table')
|
||||
sql.execute("DROP TABLE Online_History;")
|
||||
onlineHistoryAvailable = False
|
||||
|
||||
if onlineHistoryAvailable == False :
|
||||
sql.execute("""
|
||||
CREATE TABLE "Online_History" (
|
||||
"Index" INTEGER,
|
||||
"Scan_Date" TEXT,
|
||||
"Online_Devices" INTEGER,
|
||||
"Down_Devices" INTEGER,
|
||||
"All_Devices" INTEGER,
|
||||
"Archived_Devices" INTEGER,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
);
|
||||
""")
|
||||
|
||||
# Alter Devices table
|
||||
# dev_Network_Node_MAC_ADDR column
|
||||
dev_Network_Node_MAC_ADDR_missing = sql.execute ("""
|
||||
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_MAC_ADDR'
|
||||
""").fetchone()[0] == 0
|
||||
|
||||
if dev_Network_Node_MAC_ADDR_missing :
|
||||
mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_MAC_ADDR to the Devices table"])
|
||||
sql.execute("""
|
||||
ALTER TABLE "Devices" ADD "dev_Network_Node_MAC_ADDR" TEXT
|
||||
""")
|
||||
|
||||
# dev_Network_Node_port column
|
||||
dev_Network_Node_port_missing = sql.execute ("""
|
||||
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_port'
|
||||
""").fetchone()[0] == 0
|
||||
|
||||
if dev_Network_Node_port_missing :
|
||||
mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_port to the Devices table"])
|
||||
sql.execute("""
|
||||
ALTER TABLE "Devices" ADD "dev_Network_Node_port" INTEGER
|
||||
""")
|
||||
|
||||
# dev_Icon column
|
||||
dev_Icon_missing = sql.execute ("""
|
||||
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Icon'
|
||||
""").fetchone()[0] == 0
|
||||
|
||||
if dev_Icon_missing :
|
||||
mylog('verbose', ["[upgradeDB] Adding dev_Icon to the Devices table"])
|
||||
sql.execute("""
|
||||
ALTER TABLE "Devices" ADD "dev_Icon" TEXT
|
||||
""")
|
||||
|
||||
# indicates, if Settings table is available
|
||||
settingsMissing = sql.execute("""
|
||||
SELECT name FROM sqlite_master WHERE type='table'
|
||||
AND name='Settings';
|
||||
""").fetchone() == None
|
||||
|
||||
# Re-creating Settings table
|
||||
mylog('verbose', ["[upgradeDB] Re-creating Settings table"])
|
||||
|
||||
if settingsMissing == False:
|
||||
sql.execute("DROP TABLE Settings;")
|
||||
|
||||
sql.execute("""
|
||||
CREATE TABLE "Settings" (
|
||||
"Code_Name" TEXT,
|
||||
"Display_Name" TEXT,
|
||||
"Description" TEXT,
|
||||
"Type" TEXT,
|
||||
"Options" TEXT,
|
||||
"RegEx" TEXT,
|
||||
"Value" TEXT,
|
||||
"Group" TEXT,
|
||||
"Events" TEXT
|
||||
);
|
||||
""")
|
||||
|
||||
# indicates, if Pholus_Scan table is available
|
||||
pholusScanMissing = sql.execute("""
|
||||
SELECT name FROM sqlite_master WHERE type='table'
|
||||
AND name='Pholus_Scan';
|
||||
""").fetchone() == None
|
||||
|
||||
# if pholusScanMissing == False:
|
||||
# # Re-creating Pholus_Scan table
|
||||
# sql.execute("DROP TABLE Pholus_Scan;")
|
||||
# pholusScanMissing = True
|
||||
|
||||
if pholusScanMissing:
|
||||
mylog('verbose', ["[upgradeDB] Re-creating Pholus_Scan table"])
|
||||
sql.execute("""
|
||||
CREATE TABLE "Pholus_Scan" (
|
||||
"Index" INTEGER,
|
||||
"Info" TEXT,
|
||||
"Time" TEXT,
|
||||
"MAC" TEXT,
|
||||
"IP_v4_or_v6" TEXT,
|
||||
"Record_Type" TEXT,
|
||||
"Value" TEXT,
|
||||
"Extra" TEXT,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
);
|
||||
""")
|
||||
|
||||
# indicates, if Nmap_Scan table is available
|
||||
nmapScanMissing = sql.execute("""
|
||||
SELECT name FROM sqlite_master WHERE type='table'
|
||||
AND name='Nmap_Scan';
|
||||
""").fetchone() == None
|
||||
|
||||
# Re-creating Parameters table
|
||||
mylog('verbose', ["[upgradeDB] Re-creating Parameters table"])
|
||||
sql.execute("DROP TABLE Parameters;")
|
||||
|
||||
sql.execute("""
|
||||
CREATE TABLE "Parameters" (
|
||||
"par_ID" TEXT PRIMARY KEY,
|
||||
"par_Value" TEXT
|
||||
);
|
||||
""")
|
||||
|
||||
# Initialize Parameters if unavailable
|
||||
initOrSetParam(db, 'Back_App_State','Initializing')
|
||||
|
||||
# if nmapScanMissing == False:
|
||||
# # Re-creating Nmap_Scan table
|
||||
# sql.execute("DROP TABLE Nmap_Scan;")
|
||||
# nmapScanMissing = True
|
||||
|
||||
if nmapScanMissing:
|
||||
mylog('verbose', ["[upgradeDB] Re-creating Nmap_Scan table"])
|
||||
sql.execute("""
|
||||
CREATE TABLE "Nmap_Scan" (
|
||||
"Index" INTEGER,
|
||||
"MAC" TEXT,
|
||||
"Port" TEXT,
|
||||
"Time" TEXT,
|
||||
"State" TEXT,
|
||||
"Service" TEXT,
|
||||
"Extra" TEXT,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
);
|
||||
""")
|
||||
|
||||
# Plugin state
|
||||
sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects(
|
||||
"Index" INTEGER,
|
||||
Plugin TEXT NOT NULL,
|
||||
Object_PrimaryID TEXT NOT NULL,
|
||||
Object_SecondaryID TEXT NOT NULL,
|
||||
DateTimeCreated TEXT NOT NULL,
|
||||
DateTimeChanged TEXT NOT NULL,
|
||||
Watched_Value1 TEXT NOT NULL,
|
||||
Watched_Value2 TEXT NOT NULL,
|
||||
Watched_Value3 TEXT NOT NULL,
|
||||
Watched_Value4 TEXT NOT NULL,
|
||||
Status TEXT NOT NULL,
|
||||
Extra TEXT NOT NULL,
|
||||
UserData TEXT NOT NULL,
|
||||
ForeignKey TEXT NOT NULL,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
); """
|
||||
sql.execute(sql_Plugins_Objects)
|
||||
|
||||
# Plugin execution results
|
||||
sql_Plugins_Events = """ CREATE TABLE IF NOT EXISTS Plugins_Events(
|
||||
"Index" INTEGER,
|
||||
Plugin TEXT NOT NULL,
|
||||
Object_PrimaryID TEXT NOT NULL,
|
||||
Object_SecondaryID TEXT NOT NULL,
|
||||
DateTimeCreated TEXT NOT NULL,
|
||||
DateTimeChanged TEXT NOT NULL,
|
||||
Watched_Value1 TEXT NOT NULL,
|
||||
Watched_Value2 TEXT NOT NULL,
|
||||
Watched_Value3 TEXT NOT NULL,
|
||||
Watched_Value4 TEXT NOT NULL,
|
||||
Status TEXT NOT NULL,
|
||||
Extra TEXT NOT NULL,
|
||||
UserData TEXT NOT NULL,
|
||||
ForeignKey TEXT NOT NULL,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
); """
|
||||
sql.execute(sql_Plugins_Events)
|
||||
|
||||
# Plugin execution history
|
||||
sql_Plugins_History = """ CREATE TABLE IF NOT EXISTS Plugins_History(
|
||||
"Index" INTEGER,
|
||||
Plugin TEXT NOT NULL,
|
||||
Object_PrimaryID TEXT NOT NULL,
|
||||
Object_SecondaryID TEXT NOT NULL,
|
||||
DateTimeCreated TEXT NOT NULL,
|
||||
DateTimeChanged TEXT NOT NULL,
|
||||
Watched_Value1 TEXT NOT NULL,
|
||||
Watched_Value2 TEXT NOT NULL,
|
||||
Watched_Value3 TEXT NOT NULL,
|
||||
Watched_Value4 TEXT NOT NULL,
|
||||
Status TEXT NOT NULL,
|
||||
Extra TEXT NOT NULL,
|
||||
UserData TEXT NOT NULL,
|
||||
ForeignKey TEXT NOT NULL,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
); """
|
||||
sql.execute(sql_Plugins_History)
|
||||
|
||||
# Dynamically generated language strings
|
||||
# indicates, if Language_Strings table is available
|
||||
languageStringsMissing = sql.execute("""
|
||||
SELECT name FROM sqlite_master WHERE type='table'
|
||||
AND name='Plugins_Language_Strings';
|
||||
""").fetchone() == None
|
||||
|
||||
if languageStringsMissing == False:
|
||||
sql.execute("DROP TABLE Plugins_Language_Strings;")
|
||||
|
||||
sql.execute(""" CREATE TABLE IF NOT EXISTS Plugins_Language_Strings(
|
||||
"Index" INTEGER,
|
||||
Language_Code TEXT NOT NULL,
|
||||
String_Key TEXT NOT NULL,
|
||||
String_Value TEXT NOT NULL,
|
||||
Extra TEXT NOT NULL,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
); """)
|
||||
|
||||
db.commitDB()
|
||||
#-------------------------------------------------------------------------------
|
||||
# referece from here: https://codereview.stackexchange.com/questions/241043/interface-class-for-sqlite-databases
|
||||
#-------------------------------------------------------------------------------
|
||||
def read(self, query, *args):
|
||||
"""check the query and arguments are aligned and are read only"""
|
||||
mylog('debug',[ '[Database] - SELECT Query: ', query, " params: ", args])
|
||||
try:
|
||||
assert query.count('?') == len(args)
|
||||
assert query.upper().strip().startswith('SELECT')
|
||||
self.sql.execute(query, args)
|
||||
rows = self.sql.fetchall()
|
||||
return rows
|
||||
except AssertionError:
|
||||
mylog('none',[ '[Database] - ERROR: inconsistent query and/or arguments.', query, " params: ", args])
|
||||
except sqlite3.Error as e:
|
||||
mylog('none',[ '[Database] - SQL ERROR: ', e])
|
||||
return None
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_device_stats(db):
|
||||
sql = db.sql #TO-DO
|
||||
# columns = ["online","down","all","archived","new","unknown"]
|
||||
sql.execute(sql_devices_stats)
|
||||
|
||||
row = sql.fetchone()
|
||||
db.commitDB()
|
||||
|
||||
return row
|
||||
return db.read(sql_devices_stats)
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_all_devices(db):
|
||||
sql = db.sql #TO-DO
|
||||
sql.execute(sql_devices_all)
|
||||
|
||||
row = sql.fetchall()
|
||||
|
||||
db.commitDB()
|
||||
return row
|
||||
return db.read(sql_devices_all)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
@@ -681,20 +435,17 @@ def insertOnlineHistory(db, cycle):
|
||||
sql = db.sql #TO-DO
|
||||
startTime = timeNow()
|
||||
# Add to History
|
||||
sql.execute("SELECT * FROM Devices")
|
||||
History_All = sql.fetchall()
|
||||
|
||||
History_All = db.read("SELECT * FROM Devices")
|
||||
History_All_Devices = len(History_All)
|
||||
|
||||
sql.execute("SELECT * FROM Devices WHERE dev_Archived = 1")
|
||||
History_Archived = sql.fetchall()
|
||||
History_Archived = db.read("SELECT * FROM Devices WHERE dev_Archived = 1")
|
||||
History_Archived_Devices = len(History_Archived)
|
||||
|
||||
sql.execute("""SELECT * FROM CurrentScan WHERE cur_ScanCycle = ? """, (cycle,))
|
||||
History_Online = sql.fetchall()
|
||||
History_Online = db.read("SELECT * FROM CurrentScan WHERE cur_ScanCycle = ? ", cycle)
|
||||
History_Online_Devices = len(History_Online)
|
||||
History_Offline_Devices = History_All_Devices - History_Archived_Devices - History_Online_Devices
|
||||
|
||||
sql.execute ("INSERT INTO Online_History (Scan_Date, Online_Devices, Down_Devices, All_Devices, Archived_Devices) "+
|
||||
"VALUES ( ?, ?, ?, ?, ?)", (startTime, History_Online_Devices, History_Offline_Devices, History_All_Devices, History_Archived_Devices ) )
|
||||
db.commitDB()
|
||||
|
||||
|
||||
@@ -6,10 +6,10 @@ import subprocess
|
||||
|
||||
import conf
|
||||
from helper import timeNow
|
||||
from internet import check_IP_format, get_internet_IP
|
||||
from scanners.internet import check_IP_format, get_internet_IP
|
||||
from logger import mylog, print_log
|
||||
from mac_vendor import query_MAC_vendor
|
||||
from pholusscan import performPholusScan, resolve_device_name_dig, resolve_device_name_pholus
|
||||
from scanners.pholusscan import performPholusScan, resolve_device_name_dig, resolve_device_name_pholus
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -64,7 +64,7 @@ def save_scanned_devices (db, p_arpscan_devices, p_cycle_interval):
|
||||
local_ip_cmd = ["ip -o route get 1 | sed 's/^.*src \\([^ ]*\\).*$/\\1/;q'"]
|
||||
local_ip = subprocess.Popen (local_ip_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip()
|
||||
|
||||
mylog('debug', [' Saving this IP into the CurrentScan table:', local_ip])
|
||||
mylog('debug', ['[Save Devices] Saving this IP into the CurrentScan table:', local_ip])
|
||||
|
||||
if check_IP_format(local_ip) == '':
|
||||
local_ip = '0.0.0.0'
|
||||
@@ -82,19 +82,19 @@ def print_scan_stats (db):
|
||||
sql.execute ("""SELECT COUNT(*) FROM CurrentScan
|
||||
WHERE cur_ScanCycle = ? """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' Devices Detected.......: ', str (sql.fetchone()[0]) ])
|
||||
mylog('verbose', ['[Scan Stats] Devices Detected.......: ', str (sql.fetchone()[0]) ])
|
||||
|
||||
# Devices arp-scan
|
||||
sql.execute ("""SELECT COUNT(*) FROM CurrentScan
|
||||
WHERE cur_ScanMethod='arp-scan' AND cur_ScanCycle = ? """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' arp-scan detected..: ', str (sql.fetchone()[0]) ])
|
||||
mylog('verbose', ['[Scan Stats] arp-scan detected..: ', str (sql.fetchone()[0]) ])
|
||||
|
||||
# Devices Pi-hole
|
||||
sql.execute ("""SELECT COUNT(*) FROM CurrentScan
|
||||
WHERE cur_ScanMethod='PiHole' AND cur_ScanCycle = ? """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' Pi-hole detected...: +' + str (sql.fetchone()[0]) ])
|
||||
mylog('verbose', ['[Scan Stats] Pi-hole detected...: +' + str (sql.fetchone()[0]) ])
|
||||
|
||||
# New Devices
|
||||
sql.execute ("""SELECT COUNT(*) FROM CurrentScan
|
||||
@@ -102,7 +102,7 @@ def print_scan_stats (db):
|
||||
AND NOT EXISTS (SELECT 1 FROM Devices
|
||||
WHERE dev_MAC = cur_MAC) """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' New Devices........: ' + str (sql.fetchone()[0]) ])
|
||||
mylog('verbose', ['[Scan Stats] New Devices........: ' + str (sql.fetchone()[0]) ])
|
||||
|
||||
# Devices in this ScanCycle
|
||||
sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan
|
||||
@@ -110,7 +110,7 @@ def print_scan_stats (db):
|
||||
AND dev_ScanCycle = ? """,
|
||||
(conf.cycle,))
|
||||
|
||||
mylog('verbose', [' Devices in this cycle..: ' + str (sql.fetchone()[0]) ])
|
||||
mylog('verbose', ['[Scan Stats] Devices in this cycle..: ' + str (sql.fetchone()[0]) ])
|
||||
|
||||
# Down Alerts
|
||||
sql.execute ("""SELECT COUNT(*) FROM Devices
|
||||
@@ -120,7 +120,7 @@ def print_scan_stats (db):
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' Down Alerts........: ' + str (sql.fetchone()[0]) ])
|
||||
mylog('verbose', ['[Scan Stats] Down Alerts........: ' + str (sql.fetchone()[0]) ])
|
||||
|
||||
# New Down Alerts
|
||||
sql.execute ("""SELECT COUNT(*) FROM Devices
|
||||
@@ -131,7 +131,7 @@ def print_scan_stats (db):
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' New Down Alerts....: ' + str (sql.fetchone()[0]) ])
|
||||
mylog('verbose', ['[Scan Stats] New Down Alerts....: ' + str (sql.fetchone()[0]) ])
|
||||
|
||||
# New Connections
|
||||
sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan
|
||||
@@ -139,7 +139,7 @@ def print_scan_stats (db):
|
||||
AND dev_PresentLastScan = 0
|
||||
AND dev_ScanCycle = ? """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' New Connections....: ' + str ( sql.fetchone()[0]) ])
|
||||
mylog('verbose', ['[Scan Stats] New Connections....: ' + str ( sql.fetchone()[0]) ])
|
||||
|
||||
# Disconnections
|
||||
sql.execute ("""SELECT COUNT(*) FROM Devices
|
||||
@@ -149,7 +149,7 @@ def print_scan_stats (db):
|
||||
WHERE dev_MAC = cur_MAC
|
||||
AND dev_ScanCycle = cur_ScanCycle) """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' Disconnections.....: ' + str ( sql.fetchone()[0]) ])
|
||||
mylog('verbose', ['[Scan Stats] Disconnections.....: ' + str ( sql.fetchone()[0]) ])
|
||||
|
||||
# IP Changes
|
||||
sql.execute ("""SELECT COUNT(*) FROM Devices, CurrentScan
|
||||
@@ -157,7 +157,7 @@ def print_scan_stats (db):
|
||||
AND dev_ScanCycle = ?
|
||||
AND dev_LastIP <> cur_IP """,
|
||||
(conf.cycle,))
|
||||
mylog('verbose', [' IP Changes.........: ' + str ( sql.fetchone()[0]) ])
|
||||
mylog('verbose', ['[Scan Stats] IP Changes.........: ' + str ( sql.fetchone()[0]) ])
|
||||
|
||||
|
||||
|
||||
@@ -395,7 +395,7 @@ def update_devices_names (db):
|
||||
db.commitDB()
|
||||
|
||||
# Number of entries from previous Pholus scans
|
||||
mylog('verbose', '[Update Device Name] Pholus entries from prev scans: ', len(pholusResults))
|
||||
mylog('verbose', ['[Update Device Name] Pholus entries from prev scans: ', len(pholusResults)])
|
||||
|
||||
for device in unknownDevices:
|
||||
newName = -1
|
||||
@@ -422,8 +422,8 @@ def update_devices_names (db):
|
||||
recordsToUpdate.append ([newName, device['dev_MAC']])
|
||||
|
||||
# Print log
|
||||
mylog('verbose', '[Update Device Name] Names Found (DiG/Pholus): ', len(recordsToUpdate), " (",foundDig,"/",foundPholus ,")" )
|
||||
mylog('verbose', '[Update Device Name] Names Not Found : ', len(recordsNotFound) )
|
||||
mylog('verbose', ['[Update Device Name] Names Found (DiG/Pholus): ', len(recordsToUpdate), " (",foundDig,"/",foundPholus ,")"] )
|
||||
mylog('verbose', ['[Update Device Name] Names Not Found : ', len(recordsNotFound)] )
|
||||
|
||||
# update not found devices with (name not found)
|
||||
sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsNotFound )
|
||||
|
||||
@@ -32,14 +32,12 @@ def updateState(db, newState):
|
||||
|
||||
#sql = db.sql
|
||||
|
||||
mylog('debug', ' [updateState] changing state to: "' + newState +'"')
|
||||
mylog('debug', '[updateState] changing state to: "' + newState +'"')
|
||||
db.sql.execute ("UPDATE Parameters SET par_Value='"+ newState +"' WHERE par_ID='Back_App_State'")
|
||||
|
||||
db.commitDB()
|
||||
#-------------------------------------------------------------------------------
|
||||
def updateSubnets(scan_subnets):
|
||||
|
||||
# remove old list
|
||||
subnets = []
|
||||
|
||||
# multiple interfaces
|
||||
@@ -64,8 +62,8 @@ def checkPermissionsOK():
|
||||
dbR_access = (os.access(fullDbPath, os.R_OK))
|
||||
dbW_access = (os.access(fullDbPath, os.W_OK))
|
||||
|
||||
|
||||
mylog('none', ['\n Permissions check (All should be True)'])
|
||||
mylog('none', ['\n'])
|
||||
mylog('none', ['Permissions check (All should be True)'])
|
||||
mylog('none', ['------------------------------------------------'])
|
||||
mylog('none', [ " " , confPath , " | " , " READ | " , confR_access])
|
||||
mylog('none', [ " " , confPath , " | " , " WRITE | " , confW_access])
|
||||
|
||||
@@ -175,13 +175,12 @@ def importConfigs (db):
|
||||
if conf.ENABLE_PLUGINS:
|
||||
conf.plugins = get_plugins_configs()
|
||||
|
||||
mylog('none', ['[', timeNow(), '] Plugins: Number of dynamically loaded plugins: ', len(conf.plugins)])
|
||||
mylog('none', ['[Config] Plugins: Number of dynamically loaded plugins: ', len(conf.plugins)])
|
||||
|
||||
# handle plugins
|
||||
for plugin in conf.plugins:
|
||||
print_plugin_info(plugin, ['display_name','description'])
|
||||
|
||||
pref = plugin["unique_prefix"]
|
||||
print_plugin_info(plugin, ['display_name','description'])
|
||||
|
||||
# if plugin["enabled"] == 'true':
|
||||
|
||||
@@ -232,5 +231,4 @@ def importConfigs (db):
|
||||
# update_api(False, ["settings"])
|
||||
# TO DO this creates a circular reference between API and HELPER !
|
||||
|
||||
mylog('info', ['[', timeNow(), '] Config: Imported new config'])
|
||||
|
||||
mylog('info', '[Config] Imported new config')
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
|
||||
|
||||
import conf
|
||||
from arpscan import execute_arpscan
|
||||
from scanners.arpscan import execute_arpscan
|
||||
from scanners.pihole import copy_pihole_network, read_DHCP_leases
|
||||
from database import insertOnlineHistory
|
||||
from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, update_devices_names
|
||||
from helper import timeNow
|
||||
from logger import mylog, print_log
|
||||
from pihole import copy_pihole_network, read_DHCP_leases
|
||||
from logger import mylog
|
||||
from reporting import skip_repeated_notifications
|
||||
|
||||
|
||||
@@ -18,19 +18,20 @@ from reporting import skip_repeated_notifications
|
||||
|
||||
def scan_network (db):
|
||||
sql = db.sql #TO-DO
|
||||
reporting = False
|
||||
|
||||
|
||||
# Header
|
||||
# moved updateState to main loop
|
||||
# updateState(db,"Scan: Network")
|
||||
mylog('verbose', ['[', timeNow(), '] Scan Devices:' ])
|
||||
mylog('verbose', ['[Network Scan] Scan Devices:' ])
|
||||
|
||||
# Query ScanCycle properties
|
||||
scanCycle_data = query_ScanCycle_Data (db, True)
|
||||
if scanCycle_data is None:
|
||||
mylog('none', ['\n*************** ERROR ***************'])
|
||||
mylog('none', ['ScanCycle %s not found' % conf.cycle ])
|
||||
mylog('none', [' Exiting...\n'])
|
||||
mylog('none', ['\n'])
|
||||
mylog('none', ['[Network Scan]*************** ERROR ***************'])
|
||||
mylog('none', ['[Network Scan] ScanCycle %s not found' % conf.cycle ])
|
||||
mylog('none', ['[Network Scan] Exiting...\n'])
|
||||
return False
|
||||
|
||||
db.commitDB()
|
||||
@@ -57,6 +58,7 @@ def scan_network (db):
|
||||
read_DHCP_leases (db)
|
||||
db.commitDB()
|
||||
|
||||
|
||||
# Load current scan data
|
||||
mylog('verbose','[Network Scan] Processing scan results')
|
||||
save_scanned_devices (db, arpscan_devices, cycle_interval)
|
||||
@@ -111,7 +113,6 @@ def scan_network (db):
|
||||
# if ENABLE_PLUGINS:
|
||||
# run_plugin_scripts(db,'always_after_scan')
|
||||
|
||||
return reporting
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def query_ScanCycle_Data (db, pOpenCloseDB = False, cycle = 1):
|
||||
|
||||
@@ -21,7 +21,7 @@ def timeNow():
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def run_plugin_scripts(db, runType):
|
||||
def run_plugin_scripts(db, runType, plugins = conf.plugins):
|
||||
|
||||
# global plugins, tz, mySchedules
|
||||
|
||||
@@ -30,7 +30,7 @@ def run_plugin_scripts(db, runType):
|
||||
|
||||
mylog('debug', [' [Plugins] Check if any plugins need to be executed on run type: ', runType])
|
||||
|
||||
for plugin in conf.plugins:
|
||||
for plugin in plugins:
|
||||
|
||||
shouldRun = False
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ from json2table import convert
|
||||
# pialert modules
|
||||
import conf
|
||||
from const import pialertPath, logPath
|
||||
from database import get_table_as_json
|
||||
#from database import get_table_as_json
|
||||
from files import get_file_content, write_file
|
||||
from helper import generate_mac_links, isNewVersion, removeDuplicateNewLines, timeNow, hide_email, json_struc, updateState
|
||||
from logger import logResult, mylog, print_log
|
||||
@@ -52,7 +52,7 @@ def construct_notifications(db, sqlQuery, tableTitle, skipText = False, supplied
|
||||
text_line = '{}\t{}\n'
|
||||
|
||||
if suppliedJsonStruct is None:
|
||||
json_struc = get_table_as_json(db, sqlQuery)
|
||||
json_struc = db.get_table_as_json(sqlQuery)
|
||||
else:
|
||||
json_struc = suppliedJsonStruct
|
||||
|
||||
@@ -96,7 +96,7 @@ def send_notifications (db):
|
||||
plugins_report = False
|
||||
|
||||
# Reporting section
|
||||
mylog('verbose', [' Check if something to report'])
|
||||
mylog('verbose', ['[Notification] Check if something to report'])
|
||||
|
||||
# prepare variables for JSON construction
|
||||
json_internet = []
|
||||
@@ -254,47 +254,47 @@ def send_notifications (db):
|
||||
|
||||
# update_api(True) # TO-DO
|
||||
|
||||
mylog('none', [' Changes detected, sending reports'])
|
||||
mylog('none', ['[Notification] Changes detected, sending reports'])
|
||||
|
||||
if conf.REPORT_MAIL and check_config('email'):
|
||||
updateState(db,"Send: Email")
|
||||
mylog('info', [' Sending report by Email'])
|
||||
mylog('info', ['[Notification] Sending report by Email'])
|
||||
send_email (mail_text, mail_html)
|
||||
else :
|
||||
mylog('verbose', [' Skip email'])
|
||||
mylog('verbose', ['[Notification] Skip email'])
|
||||
if conf.REPORT_APPRISE and check_config('apprise'):
|
||||
updateState(db,"Send: Apprise")
|
||||
mylog('info', [' Sending report by Apprise'])
|
||||
mylog('info', ['[Notification] Sending report by Apprise'])
|
||||
send_apprise (mail_html, mail_text)
|
||||
else :
|
||||
mylog('verbose', [' Skip Apprise'])
|
||||
mylog('verbose', ['[Notification] Skip Apprise'])
|
||||
if conf.REPORT_WEBHOOK and check_config('webhook'):
|
||||
updateState(db,"Send: Webhook")
|
||||
mylog('info', [' Sending report by Webhook'])
|
||||
mylog('info', ['[Notification] Sending report by Webhook'])
|
||||
send_webhook (json_final, mail_text)
|
||||
else :
|
||||
mylog('verbose', [' Skip webhook'])
|
||||
mylog('verbose', ['[Notification] Skip webhook'])
|
||||
if conf.REPORT_NTFY and check_config('ntfy'):
|
||||
updateState(db,"Send: NTFY")
|
||||
mylog('info', [' Sending report by NTFY'])
|
||||
mylog('info', ['[Notification] Sending report by NTFY'])
|
||||
send_ntfy (mail_text)
|
||||
else :
|
||||
mylog('verbose', [' Skip NTFY'])
|
||||
mylog('verbose', ['[Notification] Skip NTFY'])
|
||||
if conf.REPORT_PUSHSAFER and check_config('pushsafer'):
|
||||
updateState(db,"Send: PUSHSAFER")
|
||||
mylog('info', [' Sending report by PUSHSAFER'])
|
||||
mylog('info', ['[Notification] Sending report by PUSHSAFER'])
|
||||
send_pushsafer (mail_text)
|
||||
else :
|
||||
mylog('verbose', [' Skip PUSHSAFER'])
|
||||
mylog('verbose', ['[Notification] Skip PUSHSAFER'])
|
||||
# Update MQTT entities
|
||||
if conf.REPORT_MQTT and check_config('mqtt'):
|
||||
updateState(db,"Send: MQTT")
|
||||
mylog('info', [' Establishing MQTT thread'])
|
||||
mylog('info', ['[Notification] Establishing MQTT thread'])
|
||||
mqtt_start()
|
||||
else :
|
||||
mylog('verbose', [' Skip MQTT'])
|
||||
mylog('verbose', ['[Notification] Skip MQTT'])
|
||||
else :
|
||||
mylog('verbose', [' No changes to report'])
|
||||
mylog('verbose', ['[Notification] No changes to report'])
|
||||
|
||||
# Clean Pending Alert Events
|
||||
sql.execute ("""UPDATE Devices SET dev_LastNotification = ?
|
||||
@@ -310,7 +310,7 @@ def send_notifications (db):
|
||||
changedPorts_json_struc = None
|
||||
|
||||
# DEBUG - print number of rows updated
|
||||
mylog('info', ['[', timeNow(), '] Notifications: ', sql.rowcount])
|
||||
mylog('info', ['[Notification] Notifications changes: ', sql.rowcount])
|
||||
|
||||
# Commit changes
|
||||
db.commitDB()
|
||||
@@ -321,42 +321,42 @@ def check_config(service):
|
||||
|
||||
if service == 'email':
|
||||
if conf.SMTP_SERVER == '' or conf.REPORT_FROM == '' or conf.REPORT_TO == '':
|
||||
mylog('none', [' Error: Email service not set up correctly. Check your pialert.conf SMTP_*, REPORT_FROM and REPORT_TO variables.'])
|
||||
mylog('none', ['[Check Config] Error: Email service not set up correctly. Check your pialert.conf SMTP_*, REPORT_FROM and REPORT_TO variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
if service == 'apprise':
|
||||
if conf.APPRISE_URL == '' or conf.APPRISE_HOST == '':
|
||||
mylog('none', [' Error: Apprise service not set up correctly. Check your pialert.conf APPRISE_* variables.'])
|
||||
mylog('none', ['[Check Config] Error: Apprise service not set up correctly. Check your pialert.conf APPRISE_* variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
if service == 'webhook':
|
||||
if conf.WEBHOOK_URL == '':
|
||||
mylog('none', [' Error: Webhook service not set up correctly. Check your pialert.conf WEBHOOK_* variables.'])
|
||||
mylog('none', ['[Check Config] Error: Webhook service not set up correctly. Check your pialert.conf WEBHOOK_* variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
if service == 'ntfy':
|
||||
if conf.NTFY_HOST == '' or conf.NTFY_TOPIC == '':
|
||||
mylog('none', [' Error: NTFY service not set up correctly. Check your pialert.conf NTFY_* variables.'])
|
||||
mylog('none', ['[Check Config] Error: NTFY service not set up correctly. Check your pialert.conf NTFY_* variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
if service == 'pushsafer':
|
||||
if conf.PUSHSAFER_TOKEN == 'ApiKey':
|
||||
mylog('none', [' Error: Pushsafer service not set up correctly. Check your pialert.conf PUSHSAFER_TOKEN variable.'])
|
||||
mylog('none', ['[Check Config] Error: Pushsafer service not set up correctly. Check your pialert.conf PUSHSAFER_TOKEN variable.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
if service == 'mqtt':
|
||||
if conf.MQTT_BROKER == '' or conf.MQTT_PORT == '' or conf.MQTT_USER == '' or conf.MQTT_PASSWORD == '':
|
||||
mylog('none', [' Error: MQTT service not set up correctly. Check your pialert.conf MQTT_* variables.'])
|
||||
mylog('none', ['[Check Config] Error: MQTT service not set up correctly. Check your pialert.conf MQTT_* variables.'])
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
@@ -3,7 +3,6 @@ import subprocess
|
||||
|
||||
from logger import mylog
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def execute_arpscan (userSubnets):
|
||||
|
||||
@@ -34,6 +33,7 @@ def execute_arpscan (userSubnets):
|
||||
unique_devices.append(device)
|
||||
|
||||
# return list
|
||||
mylog('debug', ['[ARP Scan] Completed found ', len(unique_devices) ,' devices ' ])
|
||||
return unique_devices
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
@@ -41,6 +41,7 @@ def execute_arpscan_on_interface (interface):
|
||||
# Prepare command arguments
|
||||
subnets = interface.strip().split()
|
||||
# Retry is 6 to avoid false offline devices
|
||||
mylog('debug', ['[ARP Scan] - arpscan command: sudo arp-scan --ignoredups --retry=6 ', str(subnets)])
|
||||
arpscan_args = ['sudo', 'arp-scan', '--ignoredups', '--retry=6'] + subnets
|
||||
|
||||
# Execute command
|
||||
@@ -49,7 +50,8 @@ def execute_arpscan_on_interface (interface):
|
||||
result = subprocess.check_output (arpscan_args, universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', [e.output])
|
||||
mylog('none', ['[ARP Scan]', e.output])
|
||||
result = ""
|
||||
|
||||
mylog('debug', ['[ARP Scan] on Interface Completed with results: ', result])
|
||||
return result
|
||||
@@ -23,53 +23,53 @@ def check_internet_IP ( db ):
|
||||
|
||||
# Header
|
||||
updateState(db,"Scan: Internet IP")
|
||||
mylog('verbose', ['[', timeNow(), '] Check Internet IP:'])
|
||||
mylog('verbose', ['[Internet IP] Check Internet IP started'])
|
||||
|
||||
# Get Internet IP
|
||||
mylog('verbose', [' Retrieving Internet IP:'])
|
||||
mylog('verbose', ['[Internet IP] - Retrieving Internet IP'])
|
||||
internet_IP = get_internet_IP(conf.DIG_GET_IP_ARG)
|
||||
# TESTING - Force IP
|
||||
# internet_IP = "1.2.3.4"
|
||||
|
||||
# Check result = IP
|
||||
if internet_IP == "" :
|
||||
mylog('none', [' Error retrieving Internet IP'])
|
||||
mylog('none', [' Exiting...'])
|
||||
mylog('none', ['[Internet IP] Error retrieving Internet IP'])
|
||||
mylog('none', ['[Internet IP] Exiting...'])
|
||||
return False
|
||||
mylog('verbose', [' ', internet_IP])
|
||||
mylog('verbose', ['[Internet IP] IP: ', internet_IP])
|
||||
|
||||
# Get previous stored IP
|
||||
mylog('verbose', [' Retrieving previous IP:'])
|
||||
mylog('verbose', ['[Internet IP] Retrieving previous IP:'])
|
||||
previous_IP = get_previous_internet_IP (db)
|
||||
mylog('verbose', [' ', previous_IP])
|
||||
mylog('verbose', ['[Internet IP] ', previous_IP])
|
||||
|
||||
# Check IP Change
|
||||
if internet_IP != previous_IP :
|
||||
mylog('info', [' New internet IP: ', internet_IP])
|
||||
mylog('info', ['[Internet IP] New internet IP: ', internet_IP])
|
||||
save_new_internet_IP (db, internet_IP)
|
||||
|
||||
else :
|
||||
mylog('verbose', [' No changes to perform'])
|
||||
mylog('verbose', ['[Internet IP] No changes to perform'])
|
||||
|
||||
# Get Dynamic DNS IP
|
||||
if conf.DDNS_ACTIVE :
|
||||
mylog('verbose', [' Retrieving Dynamic DNS IP'])
|
||||
mylog('verbose', ['[DDNS] Retrieving Dynamic DNS IP'])
|
||||
dns_IP = get_dynamic_DNS_IP()
|
||||
|
||||
# Check Dynamic DNS IP
|
||||
if dns_IP == "" or dns_IP == "0.0.0.0" :
|
||||
mylog('info', [' Error retrieving Dynamic DNS IP'])
|
||||
mylog('info', [' ', dns_IP])
|
||||
mylog('none', ['[DDNS] Error retrieving Dynamic DNS IP'])
|
||||
mylog('none', ['[DDNS] ', dns_IP])
|
||||
|
||||
# Check DNS Change
|
||||
if dns_IP != internet_IP :
|
||||
mylog('info', [' Updating Dynamic DNS IP'])
|
||||
mylog('none', ['[DDNS] Updating Dynamic DNS IP'])
|
||||
message = set_dynamic_DNS_IP ()
|
||||
mylog('info', [' ', message])
|
||||
mylog('none', ['[DDNS] ', message])
|
||||
else :
|
||||
mylog('verbose', [' No changes to perform'])
|
||||
mylog('verbose', ['[DDNS] No changes to perform'])
|
||||
else :
|
||||
mylog('verbose', [' Skipping Dynamic DNS update'])
|
||||
mylog('verbose', ['[DDNS] Skipping Dynamic DNS update'])
|
||||
|
||||
|
||||
|
||||
@@ -164,7 +164,7 @@ def get_dynamic_DNS_IP ():
|
||||
dig_output = subprocess.check_output (dig_args, universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', [e.output])
|
||||
mylog('none', ['[DDNS] ERROR - ', e.output])
|
||||
dig_output = '' # probably no internet
|
||||
|
||||
# Check result is an IP
|
||||
@@ -189,7 +189,7 @@ def set_dynamic_DNS_IP ():
|
||||
universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', [e.output])
|
||||
mylog('none', ['[DDNS] ERROR - ',e.output])
|
||||
curl_output = ""
|
||||
|
||||
return curl_output
|
||||
@@ -15,7 +15,7 @@ def performPholusScan (db, timeoutSec, userSubnets):
|
||||
temp = subnet.split("--interface=")
|
||||
|
||||
if len(temp) != 2:
|
||||
mylog('none', [" Skip scan (need subnet in format '192.168.1.0/24 --inteface=eth0'), got: ", subnet])
|
||||
mylog('none', ["[PholusScan] Skip scan (need subnet in format '192.168.1.0/24 --inteface=eth0'), got: ", subnet])
|
||||
return
|
||||
|
||||
mask = temp[0].strip()
|
||||
@@ -23,8 +23,8 @@ def performPholusScan (db, timeoutSec, userSubnets):
|
||||
|
||||
# logging & updating app state
|
||||
updateState(db,"Scan: Pholus")
|
||||
mylog('info', ['[', timeNow(), '] Scan: Pholus for ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min)'])
|
||||
mylog('verbose', [" Pholus scan on [interface] ", interface, " [mask] " , mask])
|
||||
mylog('none', ['[PholusScan] Scan: Pholus for ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min)'])
|
||||
mylog('verbose', ["[PholusScan] Pholus scan on [interface] ", interface, " [mask] " , mask])
|
||||
|
||||
# the scan always lasts 2x as long, so the desired user time from settings needs to be halved
|
||||
adjustedTimeout = str(round(int(timeoutSec) / 2, 0))
|
||||
@@ -40,15 +40,15 @@ def performPholusScan (db, timeoutSec, userSubnets):
|
||||
output = subprocess.check_output (pholus_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30))
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', [e.output])
|
||||
mylog('none', [" Error - Pholus Scan - check logs"])
|
||||
mylog('none', [[PholusScan], e.output])
|
||||
mylog('none', ["[PholusScan] Error - Pholus Scan - check logs"])
|
||||
except subprocess.TimeoutExpired as timeErr:
|
||||
mylog('none', [' Pholus TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||
mylog('none', ['[PholusScan] Pholus TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||
|
||||
if output == "": # check if the subprocess failed
|
||||
mylog('none', ['[', timeNow(), '] Scan: Pholus FAIL - check logs'])
|
||||
mylog('none', ['[PholusScan] Scan: Pholus FAIL - check logs'])
|
||||
else:
|
||||
mylog('verbose', ['[', timeNow(), '] Scan: Pholus SUCCESS'])
|
||||
mylog('verbose', ['[PholusScan] Scan: Pholus SUCCESS'])
|
||||
|
||||
# check the last run output
|
||||
f = open(logPath + '/pialert_pholus_lastrun.log', 'r+')
|
||||
@@ -176,7 +176,7 @@ def resolve_device_name_dig (pMAC, pIP):
|
||||
newName = subprocess.check_output (dig_args, universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', [e.output])
|
||||
mylog('none', ['[device_name_dig] ', e.output])
|
||||
# newName = "Error - check logs"
|
||||
return -1
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
""" module to import db and leases from PiHole """
|
||||
|
||||
import sqlite3
|
||||
|
||||
import conf
|
||||
from const import piholeDB, piholeDhcpleases
|
||||
from logger import mylog
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def copy_pihole_network (db):
|
||||
@@ -10,10 +14,19 @@ def copy_pihole_network (db):
|
||||
|
||||
sql = db.sql # TO-DO
|
||||
# Open Pi-hole DB
|
||||
mylog('debug', '[PiHole Network] - attach PiHole DB')
|
||||
|
||||
try:
|
||||
sql.execute ("ATTACH DATABASE '"+ piholeDB +"' AS PH")
|
||||
except sqlite3.Error as e:
|
||||
mylog('none',[ '[PiHole Network] - SQL ERROR: ', e])
|
||||
|
||||
|
||||
# Copy Pi-hole Network table
|
||||
|
||||
try:
|
||||
sql.execute ("DELETE FROM PiHole_Network")
|
||||
|
||||
sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery,
|
||||
PH_Name, PH_IP)
|
||||
SELECT hwaddr, macVendor, lastQuery,
|
||||
@@ -28,8 +41,13 @@ def copy_pihole_network (db):
|
||||
WHERE PH_Name IS NULL OR PH_Name = '' """)
|
||||
# Close Pi-hole DB
|
||||
sql.execute ("DETACH PH")
|
||||
db.commit()
|
||||
|
||||
except sqlite3.Error as e:
|
||||
mylog('none',[ '[PiHole Network] - SQL ERROR: ', e])
|
||||
|
||||
db.commitDB()
|
||||
|
||||
mylog('debug',[ '[PiHole Network] - completed - found ',sql.rowcount, ' devices'])
|
||||
return str(sql.rowcount) != "0"
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
@@ -37,11 +55,11 @@ def read_DHCP_leases (db):
|
||||
"""
|
||||
read the PiHole DHCP file and insert all records into the DHCP_Leases table.
|
||||
"""
|
||||
|
||||
sql = db.sql # TO-DO
|
||||
mylog('debug', '[PiHole DHCP] - read DHCP_Leases file')
|
||||
# Read DHCP Leases
|
||||
# Bugfix #1 - dhcp.leases: lines with different number of columns (5 col)
|
||||
data = []
|
||||
reporting = False
|
||||
with open(piholeDhcpleases, 'r') as f:
|
||||
for line in f:
|
||||
reporting = True
|
||||
@@ -50,8 +68,11 @@ def read_DHCP_leases (db):
|
||||
data.append (row)
|
||||
|
||||
# Insert into PiAlert table
|
||||
sql.executemany ("""INSERT INTO DHCP_Leases (DHCP_DateTime, DHCP_MAC,
|
||||
db.sql.executemany ("""INSERT INTO DHCP_Leases (DHCP_DateTime, DHCP_MAC,
|
||||
DHCP_IP, DHCP_Name, DHCP_MAC2)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
""", data)
|
||||
db.commit()
|
||||
db.commitDB()
|
||||
|
||||
mylog('debug', ['[PiHole DHCP] - completed - added ',len(data), ' devices.'])
|
||||
return reporting
|
||||
Reference in New Issue
Block a user