diff --git a/dockerfiles/start.sh b/dockerfiles/start.sh
index 44535788..4ff77756 100755
--- a/dockerfiles/start.sh
+++ b/dockerfiles/start.sh
@@ -28,4 +28,6 @@ chmod -R a+rw /home/pi/pialert/config
/etc/init.d/nginx start
# cron -f
-python /home/pi/pialert/back/pialert.py
+#python /home/pi/pialert/back/pialert.py
+echo "DATA MONKEY VERSION ..."
+python /home/pi/pialert/pialert/pialert.py
diff --git a/front/plugins/undiscoverables/config.json b/front/plugins/undiscoverables/config.json
index de03f76f..fd316f49 100755
--- a/front/plugins/undiscoverables/config.json
+++ b/front/plugins/undiscoverables/config.json
@@ -45,7 +45,7 @@
}],
"description": [{
"language_code":"en_us",
- "string" : "When enabled, \"once\" is the preferred option. It runs at startup and after every save of the config here.
Changes will only show in the devices after the next scan!"
+ "string" : "When enabled, ONCE is the preferred option. It runs at startup and after every save of the config here.
Changes will only show in the devices after the next scan!"
}]
},
{
@@ -214,4 +214,4 @@
}]
}
]
-}
+}
\ No newline at end of file
diff --git a/pialert/conf.py b/pialert/conf.py
index a5e651e1..69157e1d 100644
--- a/pialert/conf.py
+++ b/pialert/conf.py
@@ -15,6 +15,8 @@ plugins_once_run = False
newVersionAvailable = False
time_started = ''
check_report = []
+log_timestamp = 0
+
# ACTUAL CONFIGRATION ITEMS set to defaults
@@ -103,9 +105,9 @@ PHOLUS_DAYS_DATA = 0
# Nmap
NMAP_ACTIVE = True
NMAP_TIMEOUT = 150
-NMAP_RUN = 'none'
+NMAP_RUN = 'once'
NMAP_RUN_SCHD = '0 2 * * *'
-NMAP_ARGS = '-p -10000'
+NMAP_ARGS = '-p -10000 --max-parallelism 100'
# API
API_CUSTOM_SQL = 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0'
\ No newline at end of file
diff --git a/pialert/const.py b/pialert/const.py
index 132a88b8..ab720c0a 100644
--- a/pialert/const.py
+++ b/pialert/const.py
@@ -3,8 +3,8 @@
#===============================================================================
# PATHS
#===============================================================================
-# pialertPath = '/home/pi/pialert'
-pialertPath ='/home/roland/repos/Pi.Alert'
+pialertPath = '/home/pi/pialert'
+#pialertPath ='/home/roland/repos/Pi.Alert'
confPath = "/config/pialert.conf"
dbPath = '/db/pialert.db'
diff --git a/pialert/database.py b/pialert/database.py
index 2e0d433a..06857214 100644
--- a/pialert/database.py
+++ b/pialert/database.py
@@ -56,7 +56,6 @@ class DB():
self.sql_connection.row_factory = sqlite3.Row
self.sql = self.sql_connection.cursor()
-
#-------------------------------------------------------------------------------
def commitDB (self):
if self.sql_connection == None :
@@ -88,7 +87,6 @@ class DB():
return arr
-
#===============================================================================
# Cleanup / upkeep database
#===============================================================================
@@ -142,12 +140,250 @@ class DB():
self.commitDB()
+ #-------------------------------------------------------------------------------
+ def upgradeDB(self):
+ sql = self.sql #TO-DO
+ # indicates, if Online_History table is available
+ onlineHistoryAvailable = sql.execute("""
+ SELECT name FROM sqlite_master WHERE type='table'
+ AND name='Online_History';
+ """).fetchall() != []
+ # Check if it is incompatible (Check if table has all required columns)
+ isIncompatible = False
+
+ if onlineHistoryAvailable :
+ isIncompatible = sql.execute ("""
+ SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Online_History') WHERE name='Archived_Devices'
+ """).fetchone()[0] == 0
+
+ # Drop table if available, but incompatible
+ if onlineHistoryAvailable and isIncompatible:
+ mylog('none','[upgradeDB] Table is incompatible, Dropping the Online_History table')
+ sql.execute("DROP TABLE Online_History;")
+ onlineHistoryAvailable = False
+ if onlineHistoryAvailable == False :
+ sql.execute("""
+ CREATE TABLE "Online_History" (
+ "Index" INTEGER,
+ "Scan_Date" TEXT,
+ "Online_Devices" INTEGER,
+ "Down_Devices" INTEGER,
+ "All_Devices" INTEGER,
+ "Archived_Devices" INTEGER,
+ PRIMARY KEY("Index" AUTOINCREMENT)
+ );
+ """)
+ # Alter Devices table
+ # dev_Network_Node_MAC_ADDR column
+ dev_Network_Node_MAC_ADDR_missing = sql.execute ("""
+ SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_MAC_ADDR'
+ """).fetchone()[0] == 0
+ if dev_Network_Node_MAC_ADDR_missing :
+ mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_MAC_ADDR to the Devices table"])
+ sql.execute("""
+ ALTER TABLE "Devices" ADD "dev_Network_Node_MAC_ADDR" TEXT
+ """)
+ # dev_Network_Node_port column
+ dev_Network_Node_port_missing = sql.execute ("""
+ SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Network_Node_port'
+ """).fetchone()[0] == 0
+
+ if dev_Network_Node_port_missing :
+ mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_port to the Devices table"])
+ sql.execute("""
+ ALTER TABLE "Devices" ADD "dev_Network_Node_port" INTEGER
+ """)
+
+ # dev_Icon column
+ dev_Icon_missing = sql.execute ("""
+ SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Devices') WHERE name='dev_Icon'
+ """).fetchone()[0] == 0
+
+ if dev_Icon_missing :
+ mylog('verbose', ["[upgradeDB] Adding dev_Icon to the Devices table"])
+ sql.execute("""
+ ALTER TABLE "Devices" ADD "dev_Icon" TEXT
+ """)
+
+ # indicates, if Settings table is available
+ settingsMissing = sql.execute("""
+ SELECT name FROM sqlite_master WHERE type='table'
+ AND name='Settings';
+ """).fetchone() == None
+
+ # Re-creating Settings table
+ mylog('verbose', ["[upgradeDB] Re-creating Settings table"])
+
+ if settingsMissing == False:
+ sql.execute("DROP TABLE Settings;")
+
+ sql.execute("""
+ CREATE TABLE "Settings" (
+ "Code_Name" TEXT,
+ "Display_Name" TEXT,
+ "Description" TEXT,
+ "Type" TEXT,
+ "Options" TEXT,
+ "RegEx" TEXT,
+ "Value" TEXT,
+ "Group" TEXT,
+ "Events" TEXT
+ );
+ """)
+
+ # indicates, if Pholus_Scan table is available
+ pholusScanMissing = sql.execute("""
+ SELECT name FROM sqlite_master WHERE type='table'
+ AND name='Pholus_Scan';
+ """).fetchone() == None
+
+ # if pholusScanMissing == False:
+ # # Re-creating Pholus_Scan table
+ # sql.execute("DROP TABLE Pholus_Scan;")
+ # pholusScanMissing = True
+
+ if pholusScanMissing:
+ mylog('verbose', ["[upgradeDB] Re-creating Pholus_Scan table"])
+ sql.execute("""
+ CREATE TABLE "Pholus_Scan" (
+ "Index" INTEGER,
+ "Info" TEXT,
+ "Time" TEXT,
+ "MAC" TEXT,
+ "IP_v4_or_v6" TEXT,
+ "Record_Type" TEXT,
+ "Value" TEXT,
+ "Extra" TEXT,
+ PRIMARY KEY("Index" AUTOINCREMENT)
+ );
+ """)
+
+ # indicates, if Nmap_Scan table is available
+ nmapScanMissing = sql.execute("""
+ SELECT name FROM sqlite_master WHERE type='table'
+ AND name='Nmap_Scan';
+ """).fetchone() == None
+
+ # Re-creating Parameters table
+ mylog('verbose', ["[upgradeDB] Re-creating Parameters table"])
+ sql.execute("DROP TABLE Parameters;")
+
+ sql.execute("""
+ CREATE TABLE "Parameters" (
+ "par_ID" TEXT PRIMARY KEY,
+ "par_Value" TEXT
+ );
+ """)
+
+ # Initialize Parameters if unavailable
+ initOrSetParam(self, 'Back_App_State','Initializing')
+
+ # if nmapScanMissing == False:
+ # # Re-creating Nmap_Scan table
+ # sql.execute("DROP TABLE Nmap_Scan;")
+ # nmapScanMissing = True
+
+ if nmapScanMissing:
+ mylog('verbose', ["[upgradeDB] Re-creating Nmap_Scan table"])
+ sql.execute("""
+ CREATE TABLE "Nmap_Scan" (
+ "Index" INTEGER,
+ "MAC" TEXT,
+ "Port" TEXT,
+ "Time" TEXT,
+ "State" TEXT,
+ "Service" TEXT,
+ "Extra" TEXT,
+ PRIMARY KEY("Index" AUTOINCREMENT)
+ );
+ """)
+
+ # Plugin state
+ sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects(
+ "Index" INTEGER,
+ Plugin TEXT NOT NULL,
+ Object_PrimaryID TEXT NOT NULL,
+ Object_SecondaryID TEXT NOT NULL,
+ DateTimeCreated TEXT NOT NULL,
+ DateTimeChanged TEXT NOT NULL,
+ Watched_Value1 TEXT NOT NULL,
+ Watched_Value2 TEXT NOT NULL,
+ Watched_Value3 TEXT NOT NULL,
+ Watched_Value4 TEXT NOT NULL,
+ Status TEXT NOT NULL,
+ Extra TEXT NOT NULL,
+ UserData TEXT NOT NULL,
+ ForeignKey TEXT NOT NULL,
+ PRIMARY KEY("Index" AUTOINCREMENT)
+ ); """
+ sql.execute(sql_Plugins_Objects)
+
+ # Plugin execution results
+ sql_Plugins_Events = """ CREATE TABLE IF NOT EXISTS Plugins_Events(
+ "Index" INTEGER,
+ Plugin TEXT NOT NULL,
+ Object_PrimaryID TEXT NOT NULL,
+ Object_SecondaryID TEXT NOT NULL,
+ DateTimeCreated TEXT NOT NULL,
+ DateTimeChanged TEXT NOT NULL,
+ Watched_Value1 TEXT NOT NULL,
+ Watched_Value2 TEXT NOT NULL,
+ Watched_Value3 TEXT NOT NULL,
+ Watched_Value4 TEXT NOT NULL,
+ Status TEXT NOT NULL,
+ Extra TEXT NOT NULL,
+ UserData TEXT NOT NULL,
+ ForeignKey TEXT NOT NULL,
+ PRIMARY KEY("Index" AUTOINCREMENT)
+ ); """
+ sql.execute(sql_Plugins_Events)
+
+ # Plugin execution history
+ sql_Plugins_History = """ CREATE TABLE IF NOT EXISTS Plugins_History(
+ "Index" INTEGER,
+ Plugin TEXT NOT NULL,
+ Object_PrimaryID TEXT NOT NULL,
+ Object_SecondaryID TEXT NOT NULL,
+ DateTimeCreated TEXT NOT NULL,
+ DateTimeChanged TEXT NOT NULL,
+ Watched_Value1 TEXT NOT NULL,
+ Watched_Value2 TEXT NOT NULL,
+ Watched_Value3 TEXT NOT NULL,
+ Watched_Value4 TEXT NOT NULL,
+ Status TEXT NOT NULL,
+ Extra TEXT NOT NULL,
+ UserData TEXT NOT NULL,
+ ForeignKey TEXT NOT NULL,
+ PRIMARY KEY("Index" AUTOINCREMENT)
+ ); """
+ sql.execute(sql_Plugins_History)
+
+ # Dynamically generated language strings
+ # indicates, if Language_Strings table is available
+ languageStringsMissing = sql.execute("""
+ SELECT name FROM sqlite_master WHERE type='table'
+ AND name='Plugins_Language_Strings';
+ """).fetchone() == None
+
+ if languageStringsMissing == False:
+ sql.execute("DROP TABLE Plugins_Language_Strings;")
+
+ sql.execute(""" CREATE TABLE IF NOT EXISTS Plugins_Language_Strings(
+ "Index" INTEGER,
+ Language_Code TEXT NOT NULL,
+ String_Key TEXT NOT NULL,
+ String_Value TEXT NOT NULL,
+ Extra TEXT NOT NULL,
+ PRIMARY KEY("Index" AUTOINCREMENT)
+ ); """)
+
+ self.commitDB()
#-------------------------------------------------------------------------------
diff --git a/pialert/device.py b/pialert/device.py
index 6d52ad34..e22ab380 100644
--- a/pialert/device.py
+++ b/pialert/device.py
@@ -9,7 +9,7 @@ from helper import timeNow
from internet import check_IP_format, get_internet_IP
from logger import mylog, print_log
from mac_vendor import query_MAC_vendor
-from pholusscan import performPholusScan, resolve_device_name_pholus
+from pholusscan import performPholusScan, resolve_device_name_dig, resolve_device_name_pholus
#-------------------------------------------------------------------------------
@@ -167,7 +167,7 @@ def create_new_devices (db):
startTime = timeNow()
# arpscan - Insert events for new devices
- print_log ('New devices - 1 Events')
+ mylog('debug','[New Devices] New devices - 1 Events')
sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
@@ -178,7 +178,7 @@ def create_new_devices (db):
WHERE dev_MAC = cur_MAC) """,
(startTime, conf.cycle) )
- print_log ('New devices - Insert Connection into session table')
+ mylog('debug','[New Devices] Insert Connection into session table')
sql.execute ("""INSERT INTO Sessions (ses_MAC, ses_IP, ses_EventTypeConnection, ses_DateTimeConnection,
ses_EventTypeDisconnection, ses_DateTimeDisconnection, ses_StillConnected, ses_AdditionalInfo)
SELECT cur_MAC, cur_IP,'Connected',?, NULL , NULL ,1, cur_Vendor
@@ -189,7 +189,7 @@ def create_new_devices (db):
(startTime, conf.cycle) )
# arpscan - Create new devices
- print_log ('New devices - 2 Create devices')
+ mylog('debug','[New Devices] 2 Create devices')
sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_Vendor,
dev_LastIP, dev_FirstConnection, dev_LastConnection,
dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown,
@@ -205,7 +205,7 @@ def create_new_devices (db):
# Pi-hole - Insert events for new devices
# NOT STRICYLY NECESARY (Devices can be created through Current_Scan)
# Bugfix #2 - Pi-hole devices w/o IP
- print_log ('New devices - 3 Pi-hole Events')
+ mylog('debug','[New Devices] 3 Pi-hole Events')
sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
@@ -218,7 +218,7 @@ def create_new_devices (db):
# Pi-hole - Create New Devices
# Bugfix #2 - Pi-hole devices w/o IP
- print_log ('New devices - 4 Pi-hole Create devices')
+ mylog('debug','[New Devices] 4 Pi-hole Create devices')
sql.execute ("""INSERT INTO Devices (dev_MAC, dev_name, dev_Vendor,
dev_LastIP, dev_FirstConnection, dev_LastConnection,
dev_ScanCycle, dev_AlertEvents, dev_AlertDeviceDown,
@@ -231,7 +231,7 @@ def create_new_devices (db):
(startTime, startTime) )
# DHCP Leases - Insert events for new devices
- print_log ('New devices - 5 DHCP Leases Events')
+ mylog('debug','[New Devices] 5 DHCP Leases Events')
sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
@@ -242,7 +242,7 @@ def create_new_devices (db):
(startTime, ) )
# DHCP Leases - Create New Devices
- print_log ('New devices - 6 DHCP Leases Create devices')
+ mylog('debug','[New Devices] 6 DHCP Leases Create devices')
# BUGFIX #23 - Duplicated MAC in DHCP.Leases
# TEST - Force Duplicated MAC
# sql.execute ("""INSERT INTO DHCP_Leases VALUES
@@ -276,7 +276,7 @@ def create_new_devices (db):
# WHERE NOT EXISTS (SELECT 1 FROM Devices
# WHERE dev_MAC = DHCP_MAC) """,
# (startTime, startTime) )
- print_log ('New Devices end')
+ mylog('debug','[New Devices] New Devices end')
db.commitDB()
@@ -285,7 +285,7 @@ def update_devices_data_from_scan (db):
sql = db.sql #TO-DO
startTime = timeNow()
# Update Last Connection
- print_log ('Update devices - 1 Last Connection')
+ mylog('debug','[Update Devices] 1 Last Connection')
sql.execute ("""UPDATE Devices SET dev_LastConnection = ?,
dev_PresentLastScan = 1
WHERE dev_ScanCycle = ?
@@ -296,7 +296,7 @@ def update_devices_data_from_scan (db):
(startTime, conf.cycle))
# Clean no active devices
- print_log ('Update devices - 2 Clean no active devices')
+ mylog('debug','[Update Devices] 2 Clean no active devices')
sql.execute ("""UPDATE Devices SET dev_PresentLastScan = 0
WHERE dev_ScanCycle = ?
AND NOT EXISTS (SELECT 1 FROM CurrentScan
@@ -305,7 +305,7 @@ def update_devices_data_from_scan (db):
(conf.cycle,))
# Update IP & Vendor
- print_log ('Update devices - 3 LastIP & Vendor')
+ mylog('debug','[Update Devices] - 3 LastIP & Vendor')
sql.execute ("""UPDATE Devices
SET dev_LastIP = (SELECT cur_IP FROM CurrentScan
WHERE dev_MAC = cur_MAC
@@ -320,7 +320,7 @@ def update_devices_data_from_scan (db):
(conf.cycle,))
# Pi-hole Network - Update (unknown) Name
- print_log ('Update devices - 4 Unknown Name')
+ mylog('debug','[Update Devices] - 4 Unknown Name')
sql.execute ("""UPDATE Devices
SET dev_NAME = (SELECT PH_Name FROM PiHole_Network
WHERE PH_MAC = dev_MAC)
@@ -341,7 +341,7 @@ def update_devices_data_from_scan (db):
WHERE DHCP_MAC = dev_MAC)""")
# DHCP Leases - Vendor
- print_log ('Update devices - 5 Vendor')
+ mylog('debug','[Update Devices] - 5 Vendor')
recordsToUpdate = []
query = """SELECT * FROM Devices
@@ -358,7 +358,7 @@ def update_devices_data_from_scan (db):
# clean-up device leases table
sql.execute ("DELETE FROM DHCP_Leases")
- print_log ('Update devices end')
+ mylog('debug','[Update Devices] Update devices end')
#-------------------------------------------------------------------------------
def update_devices_names (db):
@@ -387,7 +387,7 @@ def update_devices_names (db):
return
# Devices without name
- mylog('verbose', [' Trying to resolve devices without name'])
+ mylog('verbose', '[Update Device Name] Trying to resolve devices without name')
# get names from Pholus scan
sql.execute ('SELECT * FROM Pholus_Scan where "Record_Type"="Answer"')
@@ -395,13 +395,13 @@ def update_devices_names (db):
db.commitDB()
# Number of entries from previous Pholus scans
- mylog('verbose', [" Pholus entries from prev scans: ", len(pholusResults)])
+ mylog('verbose', '[Update Device Name] Pholus entries from prev scans: ', len(pholusResults))
for device in unknownDevices:
newName = -1
# Resolve device name with DiG
- newName = resolve_device_name_pholus (device['dev_MAC'], device['dev_LastIP'])
+ newName = resolve_device_name_dig (device['dev_MAC'], device['dev_LastIP'])
# count
if newName != -1:
@@ -422,13 +422,11 @@ def update_devices_names (db):
recordsToUpdate.append ([newName, device['dev_MAC']])
# Print log
- mylog('verbose', [" Names Found (DiG/Pholus): ", len(recordsToUpdate), " (",foundDig,"/",foundPholus ,")" ])
- mylog('verbose', [" Names Not Found : ", len(recordsNotFound) ])
+ mylog('verbose', '[Update Device Name] Names Found (DiG/Pholus): ', len(recordsToUpdate), " (",foundDig,"/",foundPholus ,")" )
+ mylog('verbose', '[Update Device Name] Names Not Found : ', len(recordsNotFound) )
# update not found devices with (name not found)
sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsNotFound )
# update names of devices which we were bale to resolve
sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsToUpdate )
db.commitDB()
-
-
diff --git a/pialert/helper.py b/pialert/helper.py
index 24047a54..85906df7 100644
--- a/pialert/helper.py
+++ b/pialert/helper.py
@@ -4,7 +4,6 @@ import datetime
import os
import re
import subprocess
-from cron_converter import Cron
from pytz import timezone
from datetime import timedelta
import json
diff --git a/pialert/logger.py b/pialert/logger.py
index d090f79d..8b6cf060 100644
--- a/pialert/logger.py
+++ b/pialert/logger.py
@@ -3,14 +3,22 @@ import sys
import io
import datetime
+import conf
from const import *
+#-------------------------------------------------------------------------------
+# duplication from helper to avoid circle
+#-------------------------------------------------------------------------------
+def timeNowTZ():
+ if conf.tz == '':
+ return datetime.datetime.now().replace(microsecond=0)
+ else:
+ return datetime.datetime.now(conf.tz).replace(microsecond=0)
#-------------------------------------------------------------------------------
debugLevels = [
('none', 0), ('minimal', 1), ('verbose', 2), ('debug', 3)
]
-LOG_LEVEL = 'debug'
def mylog(requestedDebugLevel, n):
@@ -19,7 +27,7 @@ def mylog(requestedDebugLevel, n):
# Get debug urgency/relative weight
for lvl in debugLevels:
- if LOG_LEVEL == lvl[0]:
+ if conf.LOG_LEVEL == lvl[0]:
setLvl = lvl[1]
if requestedDebugLevel == lvl[0]:
reqLvl = lvl[1]
@@ -30,7 +38,7 @@ def mylog(requestedDebugLevel, n):
#-------------------------------------------------------------------------------
def file_print (*args):
- result = ''
+ result = timeNowTZ().strftime ('%H:%M:%S') + ' '
for arg in args:
result += str(arg)
@@ -42,14 +50,13 @@ def file_print (*args):
#-------------------------------------------------------------------------------
def print_log (pText):
- global log_timestamp
# Check LOG actived
- if not LOG_LEVEL == 'debug' :
+ if not conf.LOG_LEVEL == 'debug' :
return
# Current Time
- log_timestamp2 = datetime.datetime.now().replace(microsecond=0)
+ log_timestamp2 = datetime.datetime.now(conf.tz).replace(microsecond=0)
# Print line + time + elapsed time + text
file_print ('[LOG_LEVEL=debug] ',
@@ -59,7 +66,7 @@ def print_log (pText):
# Save current time to calculate elapsed time until next log
- log_timestamp = log_timestamp2
+ conf.log_timestamp = log_timestamp2
return pText
diff --git a/pialert/networkscan.py b/pialert/networkscan.py
index 3471bdb4..ac4dfbac 100644
--- a/pialert/networkscan.py
+++ b/pialert/networkscan.py
@@ -41,67 +41,67 @@ def scan_network (db):
# arp-scan command
arpscan_devices = []
if conf.ENABLE_ARPSCAN:
- mylog('verbose', [' arp-scan start'])
+ mylog('verbose','[Network Scan] arp-scan start')
arpscan_devices = execute_arpscan (conf.userSubnets)
- print_log ('arp-scan ends')
+ mylog('verbose','[Network Scan] arp-scan ends')
# Pi-hole method
if conf.PIHOLE_ACTIVE :
- mylog('verbose', [' Pi-hole start'])
+ mylog('verbose','[Network Scan] Pi-hole start')
copy_pihole_network(db)
db.commitDB()
# DHCP Leases method
if conf.DHCP_ACTIVE :
- mylog('verbose', [' DHCP Leases start'])
+ mylog('verbose','[Network Scan] DHCP Leases start')
read_DHCP_leases (db)
db.commitDB()
# Load current scan data
- mylog('verbose', [' Processing scan results'])
+ mylog('verbose','[Network Scan] Processing scan results')
save_scanned_devices (db, arpscan_devices, cycle_interval)
# Print stats
- mylog ('none', 'Print Stats')
+ mylog('none','[Network Scan] Print Stats')
print_scan_stats(db)
- mylog ('none', 'Stats end')
+ mylog('none','[Network Scan] Stats end')
# Create Events
- mylog('verbose', [' Updating DB Info'])
- mylog('verbose', [' Sessions Events (connect / discconnect)'])
+ mylog('verbose','[Network Scan] Updating DB Info')
+ mylog('verbose','[Network Scan] Sessions Events (connect / discconnect)')
insert_events(db)
# Create New Devices
# after create events -> avoid 'connection' event
- mylog('verbose', [' Creating new devices'])
+ mylog('verbose','[Network Scan] Creating new devices')
create_new_devices (db)
# Update devices info
- mylog('verbose', [' Updating Devices Info'])
+ mylog('verbose','[Network Scan] Updating Devices Info')
update_devices_data_from_scan (db)
# Resolve devices names
- print_log (' Resolve devices names')
+ mylog('verbose','[Network Scan] Resolve devices names')
update_devices_names(db)
# Void false connection - disconnections
- mylog('verbose', [' Voiding false (ghost) disconnections'])
+ mylog('verbose','[Network Scan] Voiding false (ghost) disconnections')
void_ghost_disconnections (db)
# Pair session events (Connection / Disconnection)
- mylog('verbose', [' Pairing session events (connection / disconnection) '])
+ mylog('verbose','[Network Scan] Pairing session events (connection / disconnection) ')
pair_sessions_events(db)
# Sessions snapshot
- mylog('verbose', [' Creating sessions snapshot'])
+ mylog('verbose','[Network Scan] Creating sessions snapshot')
create_sessions_snapshot (db)
# Sessions snapshot
- mylog('verbose', [' Inserting scan results into Online_History'])
+ mylog('verbose','[Network Scan] Inserting scan results into Online_History')
insertOnlineHistory(db,conf.cycle)
# Skip repeated notifications
- mylog('verbose', [' Skipping repeated notifications'])
+ mylog('verbose','[Network Scan] Skipping repeated notifications')
skip_repeated_notifications (db)
# Commit changes
@@ -131,7 +131,7 @@ def void_ghost_disconnections (db):
sql = db.sql #TO-DO
startTime = timeNow()
# Void connect ghost events (disconnect event exists in last X min.)
- print_log ('Void - 1 Connect ghost events')
+ mylog('debug','[Void Ghost Con] - 1 Connect ghost events')
sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null,
eve_EventType ='VOIDED - ' || eve_EventType
WHERE eve_MAC != 'Internet'
@@ -152,7 +152,7 @@ def void_ghost_disconnections (db):
(startTime, conf.cycle, startTime) )
# Void connect paired events
- print_log ('Void - 2 Paired events')
+ mylog('debug','[Void Ghost Con] - 2 Paired events')
sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null
WHERE eve_MAC != 'Internet'
AND eve_PairEventRowid IN (
@@ -170,7 +170,7 @@ def void_ghost_disconnections (db):
(conf.cycle, startTime) )
# Void disconnect ghost events
- print_log ('Void - 3 Disconnect ghost events')
+ mylog('debug','[Void Ghost Con] - 3 Disconnect ghost events')
sql.execute ("""UPDATE Events SET eve_PairEventRowid = Null,
eve_EventType = 'VOIDED - '|| eve_EventType
WHERE eve_MAC != 'Internet'
@@ -187,7 +187,7 @@ def void_ghost_disconnections (db):
DATETIME (?, '-' || cic_EveryXmin ||' minutes')
) """,
(conf.cycle, startTime) )
- print_log ('Void end')
+ mylog('debug','[Void Ghost Con] Void Ghost Connections end')
db.commitDB()
#-------------------------------------------------------------------------------
@@ -203,7 +203,7 @@ def pair_sessions_events (db):
# Pair Connection / New Device events
- print_log ('Pair session - 1 Connections / New Devices')
+ mylog('debug','[Pair Session] - 1 Connections / New Devices')
sql.execute ("""UPDATE Events
SET eve_PairEventRowid =
(SELECT ROWID
@@ -218,7 +218,7 @@ def pair_sessions_events (db):
""" )
# Pair Disconnection / Device Down
- print_log ('Pair session - 2 Disconnections')
+ mylog('debug','[Pair Session] - 2 Disconnections')
sql.execute ("""UPDATE Events
SET eve_PairEventRowid =
(SELECT ROWID
@@ -227,7 +227,7 @@ def pair_sessions_events (db):
WHERE eve_EventType IN ('Device Down', 'Disconnected')
AND eve_PairEventRowid IS NULL
""" )
- print_log ('Pair session end')
+ mylog('debug','[Pair Session] Pair session end')
db.commitDB()
@@ -236,15 +236,15 @@ def create_sessions_snapshot (db):
sql = db.sql #TO-DO
# Clean sessions snapshot
- print_log ('Sessions Snapshot - 1 Clean')
+ mylog('debug','[Sessions Snapshot] - 1 Clean')
sql.execute ("DELETE FROM SESSIONS" )
# Insert sessions
- print_log ('Sessions Snapshot - 2 Insert')
+ mylog('debug','[Sessions Snapshot] - 2 Insert')
sql.execute ("""INSERT INTO Sessions
SELECT * FROM Convert_Events_to_Sessions""" )
- print_log ('Sessions end')
+ mylog('debug','[Sessions Snapshot] Sessions end')
db.commitDB()
@@ -254,7 +254,7 @@ def insert_events (db):
startTime = timeNow()
# Check device down
- print_log ('Events 1 - Devices down')
+ mylog('debug','[Events] - 1 - Devices down')
sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
@@ -269,7 +269,7 @@ def insert_events (db):
(startTime, conf.cycle) )
# Check new connections
- print_log ('Events 2 - New Connections')
+ mylog('debug','[Events] - 2 - New Connections')
sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
@@ -281,7 +281,7 @@ def insert_events (db):
(startTime, conf.cycle) )
# Check disconnections
- print_log ('Events 3 - Disconnections')
+ mylog('debug','[Events] - 3 - Disconnections')
sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
@@ -297,7 +297,7 @@ def insert_events (db):
(startTime, conf.cycle) )
# Check IP Changed
- print_log ('Events 4 - IP Changes')
+ mylog('debug','[Events] - 4 - IP Changes')
sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
@@ -308,6 +308,4 @@ def insert_events (db):
AND dev_ScanCycle = ?
AND dev_LastIP <> cur_IP """,
(startTime, conf.cycle) )
- print_log ('Events end')
-
-
+ mylog('debug','[Events] - Events end')
\ No newline at end of file
diff --git a/pialert/pialert.py b/pialert/pialert.py
index 5c5916f3..9afaea84 100755
--- a/pialert/pialert.py
+++ b/pialert/pialert.py
@@ -15,26 +15,18 @@
#===============================================================================
from __future__ import print_function
-
import sys
from collections import namedtuple
import time
import datetime
-from datetime import timedelta
-import json
-from pathlib import Path
-from cron_converter import Cron
-from json2table import convert
import multiprocessing
-
# pialert modules
import conf
from const import *
from logger import mylog
-from helper import filePermissions, isNewVersion, timeNow, updateState
+from helper import filePermissions, isNewVersion, timeNow, timeNowTZ, updateState
from api import update_api
-from files import get_file_content
from networkscan import scan_network
from initialise import importConfigs
from mac_vendor import update_devices_MAC_vendors
@@ -70,8 +62,8 @@ def main ():
# to be deleted if not used
- log_timestamp = conf.time_started
- cron_instance = Cron()
+ conf.log_timestamp = conf.time_started
+ #cron_instance = Cron()
# timestamps of last execution times
startTime = conf.time_started
@@ -98,7 +90,7 @@ def main ():
sql = db.sql # To-Do replace with the db class
# Upgrade DB if needed
- upgradeDB(db)
+ db.upgradeDB()
#===============================================================================
@@ -110,7 +102,7 @@ def main ():
# update time started
time_started = datetime.datetime.now() # not sure why we need this ...
loop_start_time = timeNow()
- mylog('debug', ['[', timeNow(), '] [MAIN] Stating loop'])
+ mylog('debug', ['[ +++++++ ', timeNow(), '] [MAIN] Stating loop'])
# re-load user configuration and plugins
importConfigs(db)
@@ -153,15 +145,15 @@ def main ():
# check for changes in Internet IP
if last_internet_IP_scan + datetime.timedelta(minutes=3) < time_started:
- cycle = 'internet_IP'
+ conf.cycle = 'internet_IP'
last_internet_IP_scan = time_started
check_internet_IP(db)
# Update vendors once a week
if last_update_vendors + datetime.timedelta(days = 7) < time_started:
last_update_vendors = time_started
- cycle = 'update_vendors'
- mylog('verbose', ['[', timeNow(), '] cycle:',cycle])
+ conf.cycle = 'update_vendors'
+ mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle])
update_devices_MAC_vendors()
# Execute scheduled or one-off Pholus scan if enabled and run conditions fulfilled
@@ -199,14 +191,14 @@ def main ():
run = nmapSchedule.runScheduleCheck()
if run:
- conf.nmapSchedule.last_run = datetime.datetime.now(conf.tz).replace(microsecond=0)
+ conf.nmapSchedule.last_run = timeNow()
performNmapScan(db, get_all_devices(db))
# Perform a network scan via arp-scan or pihole
if last_network_scan + datetime.timedelta(minutes=conf.SCAN_CYCLE_MINUTES) < time_started:
last_network_scan = time_started
- cycle = 1 # network scan
- mylog('verbose', ['[', timeNow(), '] cycle:',cycle])
+ conf.cycle = 1 # network scan
+ mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle])
updateState(db,"Scan: Network")
# scan_network()
@@ -261,48 +253,34 @@ def main ():
# clean up the DB once a day
if last_cleanup + datetime.timedelta(hours = 24) < time_started:
last_cleanup = time_started
- cycle = 'cleanup'
- mylog('verbose', ['[', timeNow(), '] cycle:',cycle])
+ conf.cycle = 'cleanup'
+ mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle])
db.cleanup_database(startTime, conf.DAYS_TO_KEEP_EVENTS, conf.PHOLUS_DAYS_DATA)
# Commit SQL
db.commitDB()
# Final message
- if cycle != "":
- action = str(cycle)
+ if conf.cycle != "":
+ action = str(conf.cycle)
if action == "1":
action = "network_scan"
mylog('verbose', ['[', timeNow(), '] Last action: ', action])
- cycle = ""
- mylog('verbose', ['[', timeNow(), '] cycle:',cycle])
+ conf.cycle = ""
+ mylog('verbose', ['[', timeNow(), '] cycle:',conf.cycle])
# Footer
updateState(db,"Process: Wait")
mylog('verbose', ['[', timeNow(), '] Process: Wait'])
else:
# do something
- cycle = ""
+ conf.cycle = ""
mylog('verbose', ['[', timeNow(), '] [MAIN] waiting to start next loop'])
#loop
time.sleep(5) # wait for N seconds
-
-
-
-
-
-
-
-
-#-------------------------------------------------------------------------------
-#-------------------------------------------------------------------------------
-# Plugins
-#-------------------------------------------------------------------------------
-
-
#===============================================================================
# BEGIN
#===============================================================================
diff --git a/pialert/plugin.py b/pialert/plugin.py
index a9c685ee..ec0904aa 100644
--- a/pialert/plugin.py
+++ b/pialert/plugin.py
@@ -55,7 +55,7 @@ def run_plugin_scripts(db, runType):
print_plugin_info(plugin, ['display_name'])
mylog('debug', [' [Plugins] CMD: ', get_plugin_setting(plugin, "CMD")["value"]])
- execute_plugin(plugin)
+ execute_plugin(db, plugin)
@@ -70,8 +70,9 @@ def get_plugins_configs():
# for root, dirs, files in os.walk(pluginsPath):
dirs = next(os.walk(pluginsPath))[1]
- for d in dirs: # Loop over directories, not files
- pluginsList.append(json.loads(get_file_content(pluginsPath + "/" + d + '/config.json')))
+ for d in dirs: # Loop over directories, not files
+ if not d.startswith( "__" ): # ignore __pycache__
+ pluginsList.append(json.loads(get_file_content(pluginsPath + "/" + d + '/config.json')))
return pluginsList
@@ -126,9 +127,9 @@ def get_plugin_string(props, el):
result = ''
if el in props['localized']:
- for str in props[el]:
- if str['language_code'] == 'en_us':
- result = str['string']
+ for val in props[el]:
+ if val['language_code'] == 'en_us':
+ result = val['string']
if result == '':
result = 'en_us string missing'
@@ -265,7 +266,7 @@ def execute_plugin(db, plugin):
sql.executemany ("""INSERT INTO Plugins_History ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", "Watched_Value4", "Status" ,"Extra", "UserData", "ForeignKey") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", sqlParams)
db.commitDB()
- process_plugin_events(plugin)
+ process_plugin_events(db, plugin)
# update API endpoints
# update_api(False, ["plugins_events","plugins_objects"]) # TO-DO - remover circular reference
@@ -304,7 +305,7 @@ def plugin_param_from_glob_set(globalSetting):
# Gets the setting value
def get_plugin_setting_value(plugin, function_key):
- resultObj = get_plugin_string(plugin, function_key)
+ resultObj = get_plugin_setting(plugin, function_key)
if resultObj != None:
return resultObj["value"]
@@ -386,7 +387,7 @@ def combine_plugin_objects(old, new):
def process_plugin_events(db, plugin):
sql = db.sql
- global pluginObjects, pluginEvents
+ ##global pluginObjects, pluginEvents
pluginPref = plugin["unique_prefix"]
@@ -449,9 +450,8 @@ def process_plugin_events(db, plugin):
index += 1
-# Update the DB
+ # Update the DB
# ----------------------------
-
# Update the Plugin_Objects
for plugObj in pluginObjects:
@@ -477,7 +477,7 @@ def process_plugin_events(db, plugin):
createdTime = plugObj.changed
# insert only events if they are to be reported on
- if plugObj.status in get_plugin_setting_value(plugin, "REPORT_ON"):
+ if plugObj.status in get_plugin_setting_value(plugin, "REPORT_ON"):
sql.execute ("INSERT INTO Plugins_Events (Plugin, Object_PrimaryID, Object_SecondaryID, DateTimeCreated, DateTimeChanged, Watched_Value1, Watched_Value2, Watched_Value3, Watched_Value4, Status, Extra, UserData, ForeignKey) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)", (plugObj.pluginPref, plugObj.primaryId , plugObj.secondaryId , createdTime, plugObj.changed , plugObj.watched1 , plugObj.watched2 , plugObj.watched3 , plugObj.watched4 , plugObj.status , plugObj.extra, plugObj.userData, plugObj.foreignKey ))
diff --git a/pialert/reporting.py b/pialert/reporting.py
index 4b6de43c..8ee72af2 100644
--- a/pialert/reporting.py
+++ b/pialert/reporting.py
@@ -407,9 +407,7 @@ def remove_tag (pText, pTag):
#-------------------------------------------------------------------------------
def send_email (pText, pHTML):
- # Print more info for debugging if LOG_LEVEL == 'debug'
- if conf.LOG_LEVEL == 'debug':
- print_log ('REPORT_TO: ' + hide_email(str(conf.REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(conf.SMTP_USER)))
+ mylog('debug', '[Send Email] REPORT_TO: ' + hide_email(str(conf.REPORT_TO)) + ' SMTP_USER: ' + hide_email(str(conf.SMTP_USER)))
# Compose email
msg = MIMEMultipart('alternative')
@@ -473,7 +471,7 @@ def send_email (pText, pHTML):
mylog('none', [' ERROR: Failed at - ', failedAt])
mylog('none', [' ERROR: Couldn\'t connect to the SMTP server (SMTPServerDisconnected), skipping Email (enable LOG_LEVEL=debug for more logging)'])
- print_log(' DEBUG: Last executed - ' + str(failedAt))
+ mylog('debug', '[Send Email] Last executed - ' + str(failedAt))
#-------------------------------------------------------------------------------
def send_ntfy (_Text):
@@ -548,7 +546,7 @@ def send_webhook (_json, _html):
# execute CURL call
try:
# try runnning a subprocess
- mylog('debug', curlParams)
+ mylog('debug', '[send_webhook] curlParams: '+ curlParams)
p = subprocess.Popen(curlParams, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, stderr = p.communicate()
@@ -557,7 +555,7 @@ def send_webhook (_json, _html):
logResult (stdout, stderr) # TO-DO should be changed to mylog
except subprocess.CalledProcessError as e:
# An error occured, handle it
- mylog('none', [e.output])
+ mylog('none', ['[send_webhook]', e.output])
#-------------------------------------------------------------------------------
def send_apprise (html, text):
@@ -622,7 +620,7 @@ def skip_repeated_notifications (db):
# Skip repeated notifications
# due strfime : Overflow --> use "strftime / 60"
- print_log ('Skip Repeated')
+ mylog('verbose','[Skip Repeated Notifications] Skip Repeated')
db.sql.execute ("""UPDATE Events SET eve_PendingAlertEmail = 0
WHERE eve_PendingAlertEmail = 1 AND eve_MAC IN
(
@@ -634,7 +632,7 @@ def skip_repeated_notifications (db):
(strftime('%s','now','localtime')/60 )
)
""" )
- print_log ('Skip Repeated end')
+ mylog('verbose','[Skip Repeated Notifications] Skip Repeated end')
db.commitDB()
diff --git a/pialert/scheduler.py b/pialert/scheduler.py
index b0cbc2ca..2350ded0 100644
--- a/pialert/scheduler.py
+++ b/pialert/scheduler.py
@@ -1,7 +1,7 @@
""" class to manage schedules """
import datetime
-from logger import print_log
+from logger import mylog, print_log
import conf
#-------------------------------------------------------------------------------
@@ -28,11 +28,11 @@ class schedule_class:
# (maybe the following check is unnecessary:)
# if the last run is past the last time we run a scheduled Pholus scan
if nowTime > self.last_next_schedule and self.last_run < self.last_next_schedule:
- print_log(f'Scheduler run for {self.service}: YES')
+ mylog('debug',f'[Scheduler] - Scheduler run for {self.service}: YES')
self.was_last_schedule_used = True
result = True
else:
- print_log(f'Scheduler run for {self.service}: NO')
+ mylog('debug',f'[Scheduler] - Scheduler run for {self.service}: NO')
if self.was_last_schedule_used:
self.was_last_schedule_used = False