Merge pull request #246 from Data-Monkey/split_it_up

Split it up
This commit is contained in:
jokob-sk
2023-06-05 18:23:03 +10:00
committed by GitHub
7 changed files with 44 additions and 16 deletions

View File

@@ -300,7 +300,7 @@ def main ():
else:
# do something
conf.cycle = ""
mylog('verbose', ['[MAIN] waiting to start next loop'])
mylog('verbose', ['[MAIN] waiting to start next loop'])
#loop
time.sleep(5) # wait for N seconds

View File

@@ -31,7 +31,10 @@ sql_devices_all = """select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_G
dev_PresentLastScan, dev_LastNotification, dev_NewDevice,
dev_Network_Node_MAC_ADDR, dev_Network_Node_port,
dev_Icon from Devices"""
sql_devices_stats = "SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1"
sql_devices_stats = """SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived,
(select count(*) from Devices a where dev_NewDevice = 1 ) as new,
(select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown
from Online_History order by Scan_Date desc limit 1"""
sql_nmap_scan_all = "SELECT * FROM Nmap_Scan"
sql_pholus_scan_all = "SELECT * FROM Pholus_Scan"
sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0"

View File

@@ -33,11 +33,15 @@ class DB():
mylog('none', '[Database] Opening DB' )
# Open DB and Cursor
self.sql_connection = sqlite3.connect (fullDbPath, isolation_level=None)
self.sql_connection.execute('pragma journal_mode=wal') #
self.sql_connection.text_factory = str
self.sql_connection.row_factory = sqlite3.Row
self.sql = self.sql_connection.cursor()
try:
self.sql_connection = sqlite3.connect (fullDbPath, isolation_level=None)
self.sql_connection.execute('pragma journal_mode=wal') #
self.sql_connection.text_factory = str
self.sql_connection.row_factory = sqlite3.Row
self.sql = self.sql_connection.cursor()
except sqlite3.Error as e:
mylog('none',[ '[Database] - Open DB Error: ', e])
#-------------------------------------------------------------------------------
def commitDB (self):
@@ -421,9 +425,13 @@ class DB():
mylog('debug',[ '[Database] - Read One: ', query, " params: ", args])
rows = self.read(query, *args)
if len(rows) == 1:
return rows[0]
if len(rows) > 1:
mylog('none',[ '[Database] - Warning!: query returns multiple rows, only first row is passed on!', query, " params: ", args])
return rows[0]
# empty result set
return None
@@ -439,18 +447,24 @@ def get_all_devices(db):
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
def insertOnlineHistory(db, cycle):
def insertOnlineHistory(db):
sql = db.sql #TO-DO
startTime = timeNow()
# Add to History
# only run this if the scans have run
scanCount = db.read_one("SELECT count(*) FROM CurrentScan")
if scanCount[0] == 0 :
mylog('debug',[ '[insertOnlineHistory] - nothing to do, currentScan empty'])
return 0
History_All = db.read("SELECT * FROM Devices")
History_All_Devices = len(History_All)
History_Archived = db.read("SELECT * FROM Devices WHERE dev_Archived = 1")
History_Archived_Devices = len(History_Archived)
History_Online = db.read("SELECT * FROM CurrentScan WHERE cur_ScanCycle = ? ", cycle)
History_Online = db.read("SELECT * FROM CurrentScan")
History_Online_Devices = len(History_Online)
History_Offline_Devices = History_All_Devices - History_Archived_Devices - History_Online_Devices

View File

@@ -88,7 +88,7 @@ def query_MAC_vendor (pMAC):
grep_output = subprocess.check_output (grep_args)
except subprocess.CalledProcessError as e:
# An error occured, handle it
mylog('none', [e.output])
mylog('none', ["[Mac Vendor Check] Error: ", e.output])
grep_output = " There was an error, check logs for details"
# Return Vendor

View File

@@ -36,8 +36,6 @@ def scan_network (db):
db.commitDB()
# arp-scan command
conf.arpscan_devices = []
if conf.ENABLE_ARPSCAN:
@@ -117,7 +115,7 @@ def process_scan (db, arpscan_devices = conf.arpscan_devices ):
# Sessions snapshot
mylog('verbose','[Process Scan] Inserting scan results into Online_History')
insertOnlineHistory(db,conf.cycle)
insertOnlineHistory(db)
# Skip repeated notifications
mylog('verbose','[Process Scan] Skipping repeated notifications')

View File

@@ -12,14 +12,14 @@ from helper import timeNow, updateState, get_file_content, write_file
from api import update_api
#-------------------------------------------------------------------------------
def run_plugin_scripts(db, runType, plugins = conf.plugins):
def run_plugin_scripts(db, runType):
# Header
updateState(db,"Run: Plugins")
mylog('debug', ['[Plugins] Check if any plugins need to be executed on run type: ', runType])
for plugin in plugins:
for plugin in conf.plugins:
shouldRun = False

View File

@@ -27,6 +27,19 @@ def copy_pihole_network (db):
try:
sql.execute ("DELETE FROM PiHole_Network")
# just for reporting
new_devices = []
sql.execute ( """SELECT hwaddr, macVendor, lastQuery,
(SELECT name FROM PH.network_addresses
WHERE network_id = id ORDER BY lastseen DESC, ip),
(SELECT ip FROM PH.network_addresses
WHERE network_id = id ORDER BY lastseen DESC, ip)
FROM PH.network
WHERE hwaddr NOT LIKE 'ip-%'
AND hwaddr <> '00:00:00:00:00:00' """)
new_devices = sql.fetchall()
# insert into PiAlert DB
sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery,
PH_Name, PH_IP)
SELECT hwaddr, macVendor, lastQuery,
@@ -47,7 +60,7 @@ def copy_pihole_network (db):
db.commitDB()
mylog('debug',[ '[PiHole Network] - completed - found ',sql.rowcount, ' devices'])
mylog('debug',[ '[PiHole Network] - completed - found ', len(new_devices), ' devices'])
return str(sql.rowcount) != "0"