added default DB

fixed re-import of config file
This commit is contained in:
Data-Monkey
2023-06-03 21:42:04 +10:00
parent 007611c429
commit 6bb891f830
6 changed files with 181 additions and 147 deletions

BIN
db/pialert.db Executable file → Normal file

Binary file not shown.

View File

@@ -13,10 +13,9 @@
#=============================================================================== #===============================================================================
# IMPORTS # IMPORTS
#=============================================================================== #===============================================================================
from __future__ import print_function #from __future__ import print_function
import sys import sys
from collections import namedtuple
import time import time
import datetime import datetime
import multiprocessing import multiprocessing
@@ -25,7 +24,7 @@ import multiprocessing
import conf import conf
from const import * from const import *
from logger import mylog from logger import mylog
from helper import filePermissions, isNewVersion, timeNow, timeNowTZ, updateState from helper import filePermissions, isNewVersion, timeNow, updateState
from api import update_api from api import update_api
from networkscan import process_scan, scan_network from networkscan import process_scan, scan_network
from initialise import importConfigs from initialise import importConfigs
@@ -75,6 +74,7 @@ main structure of Pi Alert
""" """
def main (): def main ():
mylog('debug', ['[MAIN] Setting up ...'])
conf.time_started = datetime.datetime.now() conf.time_started = datetime.datetime.now()
conf.cycle = "" conf.cycle = ""
@@ -109,7 +109,7 @@ def main ():
# Open DB once and keep open # Open DB once and keep open
# Opening / closing DB frequently actually casues more issues # Opening / closing DB frequently actually casues more issues
db = DB() # instance of class DB db = DB() # instance of class DB
db.openDB() db.open()
sql = db.sql # To-Do replace with the db class sql = db.sql # To-Do replace with the db class
# Upgrade DB if needed # Upgrade DB if needed
@@ -119,6 +119,7 @@ def main ():
#=============================================================================== #===============================================================================
# This is the main loop of Pi.Alert # This is the main loop of Pi.Alert
#=============================================================================== #===============================================================================
mylog('debug', ['[Import Config TEST outside loop] lastImportedConfFile :', conf.lastImportedConfFile])
while True: while True:
@@ -128,6 +129,7 @@ def main ():
mylog('debug', '[MAIN] Stating loop') mylog('debug', '[MAIN] Stating loop')
# re-load user configuration and plugins # re-load user configuration and plugins
mylog('debug', ['[Import Config TEST inside loop] lastImportedConfFile :', conf.lastImportedConfFile])
importConfigs(db) importConfigs(db)
# check if new version is available / only check once an hour # check if new version is available / only check once an hour
@@ -311,4 +313,5 @@ def main ():
# BEGIN # BEGIN
#=============================================================================== #===============================================================================
if __name__ == '__main__': if __name__ == '__main__':
mylog('debug', ['[__main__] Welcome to Pi.Alert'])
sys.exit(main()) sys.exit(main())

View File

@@ -10,7 +10,11 @@ userSubnets = []
mySchedules = [] # bad solution for global - TO-DO mySchedules = [] # bad solution for global - TO-DO
plugins = [] # bad solution for global - TO-DO plugins = [] # bad solution for global - TO-DO
tz = '' tz = ''
lastTimeImported = 0 # Time the config was last imported
# modified time of the most recently imported config file
# set to a small value to force import at first run
lastImportedConfFile = 1.1
plugins_once_run = False plugins_once_run = False
newVersionAvailable = False newVersionAvailable = False
time_started = '' time_started = ''

View File

@@ -26,7 +26,11 @@ piholeDhcpleases = '/etc/pihole/dhcp.leases'
#=============================================================================== #===============================================================================
# SQL queries # SQL queries
#=============================================================================== #===============================================================================
sql_devices_all = "select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_Group, dev_FirstConnection, dev_LastConnection, dev_LastIP, dev_StaticIP, dev_PresentLastScan, dev_LastNotification, dev_NewDevice, dev_Network_Node_MAC_ADDR, dev_Network_Node_port, dev_Icon from Devices" sql_devices_all = """select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_Group,
dev_FirstConnection, dev_LastConnection, dev_LastIP, dev_StaticIP,
dev_PresentLastScan, dev_LastNotification, dev_NewDevice,
dev_Network_Node_MAC_ADDR, dev_Network_Node_port,
dev_Icon from Devices"""
sql_devices_stats = "SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1" sql_devices_stats = "SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1"
sql_nmap_scan_all = "SELECT * FROM Nmap_Scan" sql_nmap_scan_all = "SELECT * FROM Nmap_Scan"
sql_pholus_scan_all = "SELECT * FROM Pholus_Scan" sql_pholus_scan_all = "SELECT * FROM Pholus_Scan"
@@ -36,12 +40,12 @@ sql_plugins_objects = "SELECT * FROM Plugins_Objects"
sql_language_strings = "SELECT * FROM Plugins_Language_Strings" sql_language_strings = "SELECT * FROM Plugins_Language_Strings"
sql_plugins_events = "SELECT * FROM Plugins_Events" sql_plugins_events = "SELECT * FROM Plugins_Events"
sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY 'Index' DESC" sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY 'Index' DESC"
sql_new_devices = """SELECT * FROM ( SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC FROM Events_Devices sql_new_devices = """SELECT * FROM (
WHERE eve_PendingAlertEmail = 1 SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC
AND eve_EventType = 'New Device' FROM Events_Devices
ORDER BY eve_DateTime ) t1 WHERE eve_PendingAlertEmail = 1
LEFT JOIN AND eve_EventType = 'New Device'
( ORDER BY eve_DateTime ) t1
SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices LEFT JOIN
) t2 ( SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices) t2
ON t1.dev_MAC = t2.dev_MAC_t2""" ON t1.dev_MAC = t2.dev_MAC_t2"""

View File

@@ -15,13 +15,17 @@ from helper import json_struc, initOrSetParam, row_to_json, timeNow #, updateSta
class DB(): class DB():
"""
DB Class to provide the basic database interactions.
Open / Commit / Close / read / write
"""
def __init__(self): def __init__(self):
self.sql = None self.sql = None
self.sql_connection = None self.sql_connection = None
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def openDB (self): def open (self):
# Check if DB is open # Check if DB is open
if self.sql_connection != None : if self.sql_connection != None :
mylog('debug','openDB: databse already open') mylog('debug','openDB: databse already open')
@@ -44,13 +48,13 @@ class DB():
# Commit changes to DB # Commit changes to DB
self.sql_connection.commit() self.sql_connection.commit()
return True return True
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def get_sql_array(self, query): def get_sql_array(self, query):
if self.sql_connection == None : if self.sql_connection == None :
mylog('debug','getQueryArray: databse is not open') mylog('debug','getQueryArray: databse is not open')
return return
self.sql.execute(query) self.sql.execute(query)
rows = self.sql.fetchall() rows = self.sql.fetchall()
#self.commitDB() #self.commitDB()
@@ -69,81 +73,88 @@ class DB():
# Cleanup / upkeep database # Cleanup / upkeep database
#=============================================================================== #===============================================================================
def cleanup_database (self, startTime, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA): def cleanup_database (self, startTime, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA):
# Header """
#updateState(self,"Upkeep: Clean DB") Cleaning out old records from the tables that don't need to keep all data.
mylog('verbose', ['[', startTime, '] Upkeep Database:' ]) """
# Header
#updateState(self,"Upkeep: Clean DB")
mylog('verbose', ['[DB Cleanup] Upkeep Database:' ])
# Cleanup Online History # Cleanup Online History
mylog('verbose', [' Online_History: Delete all but keep latest 150 entries']) mylog('verbose', ['[DB Cleanup] Online_History: Delete all but keep latest 150 entries'])
self.sql.execute ("""DELETE from Online_History where "Index" not in ( SELECT "Index" from Online_History order by Scan_Date desc limit 150)""") self.sql.execute ("""DELETE from Online_History where "Index" not in (
SELECT "Index" from Online_History
order by Scan_Date desc limit 150)""")
mylog('verbose', ['[DB Cleanup] Optimize Database'])
# Cleanup Events
mylog('verbose', ['[DB Cleanup] Events: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days'])
self.sql.execute ("""DELETE FROM Events
WHERE eve_DateTime <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')""")
# Cleanup Plugin Events History
mylog('verbose', ['[DB Cleanup] Plugin Events History: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days'])
self.sql.execute ("""DELETE FROM Plugins_History
WHERE DateTimeChanged <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')""")
# Cleanup Pholus_Scan
if PHOLUS_DAYS_DATA != 0:
mylog('verbose', ['[DB Cleanup] Pholus_Scan: Delete all older than ' + str(PHOLUS_DAYS_DATA) + ' days'])
# improvement possibility: keep at least N per mac
self.sql.execute ("""DELETE FROM Pholus_Scan
WHERE Time <= date('now', '-"+ str(PHOLUS_DAYS_DATA) +" day')""")
mylog('verbose', [' Optimize Database']) # De-Dupe (de-duplicate - remove duplicate entries) from the Pholus_Scan table
# Cleanup Events mylog('verbose', ['[DB Cleanup] Pholus_Scan: Delete all duplicates'])
mylog('verbose', [' Events: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days']) self.sql.execute ("""DELETE FROM Pholus_Scan
self.sql.execute ("DELETE FROM Events WHERE eve_DateTime <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')") WHERE rowid > (
SELECT MIN(rowid) FROM Pholus_Scan p2
WHERE Pholus_Scan.MAC = p2.MAC
AND Pholus_Scan.Value = p2.Value
AND Pholus_Scan.Record_Type = p2.Record_Type
);""")
# De-Dupe (de-duplicate - remove duplicate entries) from the Nmap_Scan table
mylog('verbose', [' Nmap_Scan: Delete all duplicates'])
self.sql.execute ("""DELETE FROM Nmap_Scan
WHERE rowid > (
SELECT MIN(rowid) FROM Nmap_Scan p2
WHERE Nmap_Scan.MAC = p2.MAC
AND Nmap_Scan.Port = p2.Port
AND Nmap_Scan.State = p2.State
AND Nmap_Scan.Service = p2.Service
);""")
# Cleanup Plugin Events History # Shrink DB
mylog('verbose', [' Plugin Events History: Delete all older than '+str(DAYS_TO_KEEP_EVENTS)+' days']) mylog('verbose', [' Shrink Database'])
self.sql.execute ("DELETE FROM Plugins_History WHERE DateTimeChanged <= date('now', '-"+str(DAYS_TO_KEEP_EVENTS)+" day')") self.sql.execute ("VACUUM;")
self.commitDB()
# Cleanup Pholus_Scan
if PHOLUS_DAYS_DATA != 0:
mylog('verbose', [' Pholus_Scan: Delete all older than ' + str(PHOLUS_DAYS_DATA) + ' days'])
self.sql.execute ("DELETE FROM Pholus_Scan WHERE Time <= date('now', '-"+ str(PHOLUS_DAYS_DATA) +" day')") # improvement possibility: keep at least N per mac
# De-Dupe (de-duplicate - remove duplicate entries) from the Pholus_Scan table
mylog('verbose', [' Pholus_Scan: Delete all duplicates'])
self.sql.execute ("""DELETE FROM Pholus_Scan
WHERE rowid > (
SELECT MIN(rowid) FROM Pholus_Scan p2
WHERE Pholus_Scan.MAC = p2.MAC
AND Pholus_Scan.Value = p2.Value
AND Pholus_Scan.Record_Type = p2.Record_Type
);""")
# De-Dupe (de-duplicate - remove duplicate entries) from the Nmap_Scan table
mylog('verbose', [' Nmap_Scan: Delete all duplicates'])
self.sql.execute ("""DELETE FROM Nmap_Scan
WHERE rowid > (
SELECT MIN(rowid) FROM Nmap_Scan p2
WHERE Nmap_Scan.MAC = p2.MAC
AND Nmap_Scan.Port = p2.Port
AND Nmap_Scan.State = p2.State
AND Nmap_Scan.Service = p2.Service
);""")
# Shrink DB
mylog('verbose', [' Shrink Database'])
self.sql.execute ("VACUUM;")
self.commitDB()
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def upgradeDB(self): def upgradeDB(self):
sql = self.sql #TO-DO """
Check the current tables in the DB and upgrade them if neccessary
"""
sql = self.sql #TO-DO
# indicates, if Online_History table is available # indicates, if Online_History table is available
onlineHistoryAvailable = sql.execute(""" onlineHistoryAvailable = sql.execute("""
SELECT name FROM sqlite_master WHERE type='table' SELECT name FROM sqlite_master WHERE type='table'
AND name='Online_History'; AND name='Online_History';
""").fetchall() != [] """).fetchall() != []
# Check if it is incompatible (Check if table has all required columns) # Check if it is incompatible (Check if table has all required columns)
isIncompatible = False isIncompatible = False
if onlineHistoryAvailable : if onlineHistoryAvailable :
isIncompatible = sql.execute (""" isIncompatible = sql.execute ("""
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Online_History') WHERE name='Archived_Devices' SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Online_History') WHERE name='Archived_Devices'
""").fetchone()[0] == 0 """).fetchone()[0] == 0
# Drop table if available, but incompatible # Drop table if available, but incompatible
if onlineHistoryAvailable and isIncompatible: if onlineHistoryAvailable and isIncompatible:
mylog('none','[upgradeDB] Table is incompatible, Dropping the Online_History table') mylog('none','[upgradeDB] Table is incompatible, Dropping the Online_History table')
sql.execute("DROP TABLE Online_History;") sql.execute("DROP TABLE Online_History;")
onlineHistoryAvailable = False onlineHistoryAvailable = False
if onlineHistoryAvailable == False : if onlineHistoryAvailable == False :
sql.execute(""" sql.execute("""
CREATE TABLE "Online_History" ( CREATE TABLE "Online_History" (
"Index" INTEGER, "Index" INTEGER,
"Scan_Date" TEXT, "Scan_Date" TEXT,
@@ -152,7 +163,7 @@ class DB():
"All_Devices" INTEGER, "All_Devices" INTEGER,
"Archived_Devices" INTEGER, "Archived_Devices" INTEGER,
PRIMARY KEY("Index" AUTOINCREMENT) PRIMARY KEY("Index" AUTOINCREMENT)
); );
""") """)
# Alter Devices table # Alter Devices table
@@ -162,9 +173,9 @@ class DB():
""").fetchone()[0] == 0 """).fetchone()[0] == 0
if dev_Network_Node_MAC_ADDR_missing : if dev_Network_Node_MAC_ADDR_missing :
mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_MAC_ADDR to the Devices table"]) mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_MAC_ADDR to the Devices table"])
sql.execute(""" sql.execute("""
ALTER TABLE "Devices" ADD "dev_Network_Node_MAC_ADDR" TEXT ALTER TABLE "Devices" ADD "dev_Network_Node_MAC_ADDR" TEXT
""") """)
# dev_Network_Node_port column # dev_Network_Node_port column
@@ -173,9 +184,9 @@ class DB():
""").fetchone()[0] == 0 """).fetchone()[0] == 0
if dev_Network_Node_port_missing : if dev_Network_Node_port_missing :
mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_port to the Devices table"]) mylog('verbose', ["[upgradeDB] Adding dev_Network_Node_port to the Devices table"])
sql.execute(""" sql.execute("""
ALTER TABLE "Devices" ADD "dev_Network_Node_port" INTEGER ALTER TABLE "Devices" ADD "dev_Network_Node_port" INTEGER
""") """)
# dev_Icon column # dev_Icon column
@@ -184,52 +195,52 @@ class DB():
""").fetchone()[0] == 0 """).fetchone()[0] == 0
if dev_Icon_missing : if dev_Icon_missing :
mylog('verbose', ["[upgradeDB] Adding dev_Icon to the Devices table"]) mylog('verbose', ["[upgradeDB] Adding dev_Icon to the Devices table"])
sql.execute(""" sql.execute("""
ALTER TABLE "Devices" ADD "dev_Icon" TEXT ALTER TABLE "Devices" ADD "dev_Icon" TEXT
""") """)
# indicates, if Settings table is available # indicates, if Settings table is available
settingsMissing = sql.execute(""" settingsMissing = sql.execute("""
SELECT name FROM sqlite_master WHERE type='table' SELECT name FROM sqlite_master WHERE type='table'
AND name='Settings'; AND name='Settings';
""").fetchone() == None """).fetchone() == None
# Re-creating Settings table # Re-creating Settings table
mylog('verbose', ["[upgradeDB] Re-creating Settings table"]) mylog('verbose', ["[upgradeDB] Re-creating Settings table"])
if settingsMissing == False: if settingsMissing == False:
sql.execute("DROP TABLE Settings;") sql.execute("DROP TABLE Settings;")
sql.execute(""" sql.execute("""
CREATE TABLE "Settings" ( CREATE TABLE "Settings" (
"Code_Name" TEXT, "Code_Name" TEXT,
"Display_Name" TEXT, "Display_Name" TEXT,
"Description" TEXT, "Description" TEXT,
"Type" TEXT, "Type" TEXT,
"Options" TEXT, "Options" TEXT,
"RegEx" TEXT, "RegEx" TEXT,
"Value" TEXT, "Value" TEXT,
"Group" TEXT, "Group" TEXT,
"Events" TEXT "Events" TEXT
); );
""") """)
# indicates, if Pholus_Scan table is available # indicates, if Pholus_Scan table is available
pholusScanMissing = sql.execute(""" pholusScanMissing = sql.execute("""
SELECT name FROM sqlite_master WHERE type='table' SELECT name FROM sqlite_master WHERE type='table'
AND name='Pholus_Scan'; AND name='Pholus_Scan';
""").fetchone() == None """).fetchone() == None
# if pholusScanMissing == False: # if pholusScanMissing == False:
# # Re-creating Pholus_Scan table # # Re-creating Pholus_Scan table
# sql.execute("DROP TABLE Pholus_Scan;") # sql.execute("DROP TABLE Pholus_Scan;")
# pholusScanMissing = True # pholusScanMissing = True
if pholusScanMissing: if pholusScanMissing:
mylog('verbose', ["[upgradeDB] Re-creating Pholus_Scan table"]) mylog('verbose', ["[upgradeDB] Re-creating Pholus_Scan table"])
sql.execute(""" sql.execute("""
CREATE TABLE "Pholus_Scan" ( CREATE TABLE "Pholus_Scan" (
"Index" INTEGER, "Index" INTEGER,
"Info" TEXT, "Info" TEXT,
"Time" TEXT, "Time" TEXT,
@@ -239,47 +250,47 @@ class DB():
"Value" TEXT, "Value" TEXT,
"Extra" TEXT, "Extra" TEXT,
PRIMARY KEY("Index" AUTOINCREMENT) PRIMARY KEY("Index" AUTOINCREMENT)
); );
""") """)
# indicates, if Nmap_Scan table is available # indicates, if Nmap_Scan table is available
nmapScanMissing = sql.execute(""" nmapScanMissing = sql.execute("""
SELECT name FROM sqlite_master WHERE type='table' SELECT name FROM sqlite_master WHERE type='table'
AND name='Nmap_Scan'; AND name='Nmap_Scan';
""").fetchone() == None """).fetchone() == None
# Re-creating Parameters table # Re-creating Parameters table
mylog('verbose', ["[upgradeDB] Re-creating Parameters table"]) mylog('verbose', ["[upgradeDB] Re-creating Parameters table"])
sql.execute("DROP TABLE Parameters;") sql.execute("DROP TABLE Parameters;")
sql.execute(""" sql.execute("""
CREATE TABLE "Parameters" ( CREATE TABLE "Parameters" (
"par_ID" TEXT PRIMARY KEY, "par_ID" TEXT PRIMARY KEY,
"par_Value" TEXT "par_Value" TEXT
); );
""") """)
# Initialize Parameters if unavailable # Initialize Parameters if unavailable
initOrSetParam(self, 'Back_App_State','Initializing') initOrSetParam(self, 'Back_App_State','Initializing')
# if nmapScanMissing == False: # if nmapScanMissing == False:
# # Re-creating Nmap_Scan table # # Re-creating Nmap_Scan table
# sql.execute("DROP TABLE Nmap_Scan;") # sql.execute("DROP TABLE Nmap_Scan;")
# nmapScanMissing = True # nmapScanMissing = True
if nmapScanMissing: if nmapScanMissing:
mylog('verbose', ["[upgradeDB] Re-creating Nmap_Scan table"]) mylog('verbose', ["[upgradeDB] Re-creating Nmap_Scan table"])
sql.execute(""" sql.execute("""
CREATE TABLE "Nmap_Scan" ( CREATE TABLE "Nmap_Scan" (
"Index" INTEGER, "Index" INTEGER,
"MAC" TEXT, "MAC" TEXT,
"Port" TEXT, "Port" TEXT,
"Time" TEXT, "Time" TEXT,
"State" TEXT, "State" TEXT,
"Service" TEXT, "Service" TEXT,
"Extra" TEXT, "Extra" TEXT,
PRIMARY KEY("Index" AUTOINCREMENT) PRIMARY KEY("Index" AUTOINCREMENT)
); );
""") """)
# Plugin state # Plugin state
@@ -288,13 +299,13 @@ class DB():
Plugin TEXT NOT NULL, Plugin TEXT NOT NULL,
Object_PrimaryID TEXT NOT NULL, Object_PrimaryID TEXT NOT NULL,
Object_SecondaryID TEXT NOT NULL, Object_SecondaryID TEXT NOT NULL,
DateTimeCreated TEXT NOT NULL, DateTimeCreated TEXT NOT NULL,
DateTimeChanged TEXT NOT NULL, DateTimeChanged TEXT NOT NULL,
Watched_Value1 TEXT NOT NULL, Watched_Value1 TEXT NOT NULL,
Watched_Value2 TEXT NOT NULL, Watched_Value2 TEXT NOT NULL,
Watched_Value3 TEXT NOT NULL, Watched_Value3 TEXT NOT NULL,
Watched_Value4 TEXT NOT NULL, Watched_Value4 TEXT NOT NULL,
Status TEXT NOT NULL, Status TEXT NOT NULL,
Extra TEXT NOT NULL, Extra TEXT NOT NULL,
UserData TEXT NOT NULL, UserData TEXT NOT NULL,
ForeignKey TEXT NOT NULL, ForeignKey TEXT NOT NULL,
@@ -308,13 +319,13 @@ class DB():
Plugin TEXT NOT NULL, Plugin TEXT NOT NULL,
Object_PrimaryID TEXT NOT NULL, Object_PrimaryID TEXT NOT NULL,
Object_SecondaryID TEXT NOT NULL, Object_SecondaryID TEXT NOT NULL,
DateTimeCreated TEXT NOT NULL, DateTimeCreated TEXT NOT NULL,
DateTimeChanged TEXT NOT NULL, DateTimeChanged TEXT NOT NULL,
Watched_Value1 TEXT NOT NULL, Watched_Value1 TEXT NOT NULL,
Watched_Value2 TEXT NOT NULL, Watched_Value2 TEXT NOT NULL,
Watched_Value3 TEXT NOT NULL, Watched_Value3 TEXT NOT NULL,
Watched_Value4 TEXT NOT NULL, Watched_Value4 TEXT NOT NULL,
Status TEXT NOT NULL, Status TEXT NOT NULL,
Extra TEXT NOT NULL, Extra TEXT NOT NULL,
UserData TEXT NOT NULL, UserData TEXT NOT NULL,
ForeignKey TEXT NOT NULL, ForeignKey TEXT NOT NULL,
@@ -328,40 +339,40 @@ class DB():
Plugin TEXT NOT NULL, Plugin TEXT NOT NULL,
Object_PrimaryID TEXT NOT NULL, Object_PrimaryID TEXT NOT NULL,
Object_SecondaryID TEXT NOT NULL, Object_SecondaryID TEXT NOT NULL,
DateTimeCreated TEXT NOT NULL, DateTimeCreated TEXT NOT NULL,
DateTimeChanged TEXT NOT NULL, DateTimeChanged TEXT NOT NULL,
Watched_Value1 TEXT NOT NULL, Watched_Value1 TEXT NOT NULL,
Watched_Value2 TEXT NOT NULL, Watched_Value2 TEXT NOT NULL,
Watched_Value3 TEXT NOT NULL, Watched_Value3 TEXT NOT NULL,
Watched_Value4 TEXT NOT NULL, Watched_Value4 TEXT NOT NULL,
Status TEXT NOT NULL, Status TEXT NOT NULL,
Extra TEXT NOT NULL, Extra TEXT NOT NULL,
UserData TEXT NOT NULL, UserData TEXT NOT NULL,
ForeignKey TEXT NOT NULL, ForeignKey TEXT NOT NULL,
PRIMARY KEY("Index" AUTOINCREMENT) PRIMARY KEY("Index" AUTOINCREMENT)
); """ ); """
sql.execute(sql_Plugins_History) sql.execute(sql_Plugins_History)
# Dynamically generated language strings # Dynamically generated language strings
# indicates, if Language_Strings table is available # indicates, if Language_Strings table is available
languageStringsMissing = sql.execute(""" languageStringsMissing = sql.execute("""
SELECT name FROM sqlite_master WHERE type='table' SELECT name FROM sqlite_master WHERE type='table'
AND name='Plugins_Language_Strings'; AND name='Plugins_Language_Strings';
""").fetchone() == None """).fetchone() == None
if languageStringsMissing == False: if languageStringsMissing == False:
sql.execute("DROP TABLE Plugins_Language_Strings;") sql.execute("DROP TABLE Plugins_Language_Strings;")
sql.execute(""" CREATE TABLE IF NOT EXISTS Plugins_Language_Strings( sql.execute(""" CREATE TABLE IF NOT EXISTS Plugins_Language_Strings(
"Index" INTEGER, "Index" INTEGER,
Language_Code TEXT NOT NULL, Language_Code TEXT NOT NULL,
String_Key TEXT NOT NULL, String_Key TEXT NOT NULL,
String_Value TEXT NOT NULL, String_Value TEXT NOT NULL,
Extra TEXT NOT NULL, Extra TEXT NOT NULL,
PRIMARY KEY("Index" AUTOINCREMENT) PRIMARY KEY("Index" AUTOINCREMENT)
); """) ); """)
self.commitDB() self.commitDB()
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
@@ -369,15 +380,15 @@ class DB():
mylog('debug',[ '[Database] - get_table_as_json - Query: ', sqlQuery]) mylog('debug',[ '[Database] - get_table_as_json - Query: ', sqlQuery])
try: try:
self.sql.execute(sqlQuery) self.sql.execute(sqlQuery)
columnNames = list(map(lambda x: x[0], self.sql.description)) columnNames = list(map(lambda x: x[0], self.sql.description))
rows = self.sql.fetchall() rows = self.sql.fetchall()
except sqlite3.Error as e: except sqlite3.Error as e:
mylog('none',[ '[Database] - SQL ERROR: ', e]) mylog('none',[ '[Database] - SQL ERROR: ', e])
return None return None
result = {"data":[]} result = {"data":[]}
for row in rows: for row in rows:
tmp = row_to_json(columnNames, row) tmp = row_to_json(columnNames, row)
result["data"].append(tmp) result["data"].append(tmp)
@@ -386,7 +397,7 @@ class DB():
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
# referece from here: https://codereview.stackexchange.com/questions/241043/interface-class-for-sqlite-databases # referece from here: https://codereview.stackexchange.com/questions/241043/interface-class-for-sqlite-databases
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def read(self, query, *args): def read(self, query, *args):
"""check the query and arguments are aligned and are read only""" """check the query and arguments are aligned and are read only"""
mylog('debug',[ '[Database] - SELECT Query: ', query, " params: ", args]) mylog('debug',[ '[Database] - SELECT Query: ', query, " params: ", args])
@@ -408,7 +419,7 @@ def get_device_stats(db):
# columns = ["online","down","all","archived","new","unknown"] # columns = ["online","down","all","archived","new","unknown"]
return db.read(sql_devices_stats) return db.read(sql_devices_stats)
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def get_all_devices(db): def get_all_devices(db):
return db.read(sql_devices_all) return db.read(sql_devices_all)
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
@@ -418,7 +429,7 @@ def insertOnlineHistory(db, cycle):
sql = db.sql #TO-DO sql = db.sql #TO-DO
startTime = timeNow() startTime = timeNow()
# Add to History # Add to History
History_All = db.read("SELECT * FROM Devices") History_All = db.read("SELECT * FROM Devices")
History_All_Devices = len(History_All) History_All_Devices = len(History_All)
@@ -428,7 +439,7 @@ def insertOnlineHistory(db, cycle):
History_Online = db.read("SELECT * FROM CurrentScan WHERE cur_ScanCycle = ? ", cycle) History_Online = db.read("SELECT * FROM CurrentScan WHERE cur_ScanCycle = ? ", cycle)
History_Online_Devices = len(History_Online) History_Online_Devices = len(History_Online)
History_Offline_Devices = History_All_Devices - History_Archived_Devices - History_Online_Devices History_Offline_Devices = History_All_Devices - History_Archived_Devices - History_Online_Devices
sql.execute ("INSERT INTO Online_History (Scan_Date, Online_Devices, Down_Devices, All_Devices, Archived_Devices) "+ sql.execute ("INSERT INTO Online_History (Scan_Date, Online_Devices, Down_Devices, All_Devices, Archived_Devices) "+
"VALUES ( ?, ?, ?, ?, ?)", (startTime, History_Online_Devices, History_Offline_Devices, History_All_Devices, History_Archived_Devices ) ) "VALUES ( ?, ?, ?, ?, ?)", (startTime, History_Online_Devices, History_Offline_Devices, History_All_Devices, History_Archived_Devices ) )
db.commitDB() db.commitDB()

View File

@@ -7,8 +7,8 @@ from pathlib import Path
import datetime import datetime
import conf import conf
from const import * from const import fullConfPath
from helper import collect_lang_strings, timeNow, updateSubnets, initOrSetParam from helper import collect_lang_strings, updateSubnets, initOrSetParam
from logger import mylog from logger import mylog
from api import update_api from api import update_api
from scheduler import schedule_class from scheduler import schedule_class
@@ -43,15 +43,26 @@ def importConfigs (db):
sql = db.sql sql = db.sql
lastTimeImported = 0 # get config file name
# get config file
config_file = Path(fullConfPath) config_file = Path(fullConfPath)
# Skip import if last time of import is NEWER than file age # Only import file if the file was modifed since last import.
if (os.path.getmtime(config_file) < lastTimeImported) : # this avoids time zone issues as we just compare the previous timestamp to the current time stamp
mylog('debug', ['[Import Config] checking config file '])
mylog('debug', ['[Import Config] lastImportedConfFile :', conf.lastImportedConfFile])
mylog('debug', ['[Import Config] file modified time :', os.path.getmtime(config_file)])
if (os.path.getmtime(config_file) == conf.lastImportedConfFile) :
mylog('debug', ['[Import Config] skipping config file import'])
return return
conf.lastImportedConfFile = os.path.getmtime(config_file)
mylog('debug', ['[Import Config] importing config file'])
conf.mySettings = [] # reset settings conf.mySettings = [] # reset settings
conf.mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query conf.mySettingsSQLsafe = [] # same as above but safe to be passed into a SQL query
@@ -237,6 +248,7 @@ def read_config_file(filename):
""" """
retuns dict on the config file key:value pairs retuns dict on the config file key:value pairs
""" """
mylog('info', '[Config] reading config file')
# load the variables from pialert.conf # load the variables from pialert.conf
code = compile(filename.read_text(), filename.name, "exec") code = compile(filename.read_text(), filename.name, "exec")
confDict = {} # config dictionary confDict = {} # config dictionary