PIHOLE to plugin rewrite

This commit is contained in:
Jokob-sk
2023-08-09 07:52:56 +10:00
parent bae7b0400c
commit 0e23c6ac61
17 changed files with 130 additions and 241 deletions

View File

@@ -200,39 +200,9 @@ def main ():
if run:
nmapSchedule.last_run = timeNowTZ()
performNmapScan(db, get_all_devices(db))
# todo replace the scans with plugins
# Perform a network scan via arp-scan or pihole
# if last_network_scan + datetime.timedelta(minutes=conf.SCAN_CYCLE_MINUTES) < loop_start_time:
# last_network_scan = loop_start_time
# conf.cycle = 1 # network scan
# mylog('verbose', ['[MAIN] cycle:',conf.cycle])
# updateState(db,"Scan: Network")
# # scan_network()
# # DEBUG start ++++++++++++++++++++++++++++++++++++++++++++++++++++++
# # Start scan_network as a process
# p = multiprocessing.Process(target=scan_network(db))
# p.start()
# # Wait for a maximum of 3600 seconds (1h) or until process finishes
# p.join(3600)
# # If thread is still active
# if p.is_alive():
# mylog('none', "[MAIN] scan_network running too long - let\'s kill it")
# # Terminate - may not work if process is stuck for good
# p.terminate()
# # OR Kill - will work for sure, no chance for process to finish nicely however
# # p.kill()
# p.join()
# # DEBUG end ++++++++++++++++++++++++++++++++++++++++++++++++++++++
# # Run splugin scripts which are set to run every timne after a scan finished
# Run splugin scripts which are set to run every timne after a scans finished
if conf.ENABLE_PLUGINS:
run_plugin_scripts(db,'always_after_scan')

View File

@@ -47,7 +47,6 @@ ENABLE_PLUGINS = True
PIALERT_WEB_PROTECTION = False
PIALERT_WEB_PASSWORD = '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92'
INCLUDED_SECTIONS = ['internet', 'new_devices', 'down_devices', 'events', 'ports']
SCAN_CYCLE_MINUTES = 5
DAYS_TO_KEEP_EVENTS = 90
REPORT_DASHBOARD_URL = 'http://pi.alert/'
DIG_GET_IP_ARG = '-4 myip.opendns.com @resolver1.opendns.com'
@@ -107,10 +106,6 @@ DDNS_USER = 'dynu_user'
DDNS_PASSWORD = 'A0000000B0000000C0000000D0000000'
DDNS_UPDATE_URL = 'https://api.dynu.com/nic/update?'
# PiHole
PIHOLE_ACTIVE = False
DHCP_ACTIVE = False
# PHOLUS
PHOLUS_ACTIVE = False
PHOLUS_TIMEOUT = 20

View File

@@ -62,10 +62,12 @@ def save_scanned_devices (db):
if check_IP_format(local_ip) == '':
local_ip = '0.0.0.0'
# Check if local mac has been detected with other methods
sql.execute (f"SELECT COUNT(*) FROM CurrentScan WHERE cur_MAC = '{local_mac}'")
if sql.fetchone()[0] == 0 :
sql.execute (f"""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) VALUES ( 1, '{local_mac}', '{local_ip}', Null, 'local_MAC') """)
# Proceed if variable contains valid MAC
if check_mac_or_internet(local_mac):
# Check if local mac has been detected with other methods
sql.execute (f"SELECT COUNT(*) FROM CurrentScan WHERE cur_MAC = '{local_mac}'")
if sql.fetchone()[0] == 0 :
sql.execute (f"""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) VALUES ( 1, '{local_mac}', '{local_ip}', Null, 'local_MAC') """)
#-------------------------------------------------------------------------------
def print_scan_stats (db):
@@ -415,3 +417,16 @@ def update_devices_names (db):
# update names of devices which we were bale to resolve
sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsToUpdate )
db.commitDB()
#-------------------------------------------------------------------------------
# Check if the variable contains a valid MAC address or "Internet"
def check_mac_or_internet(input_str):
# Regular expression pattern for matching a MAC address
mac_pattern = r'^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$'
if input_str.lower() == 'internet':
return True
elif re.match(mac_pattern, input_str):
return True
else:
return False

View File

@@ -90,8 +90,7 @@ def importConfigs (db):
conf.PLUGINS_KEEP_HIST = ccd('PLUGINS_KEEP_HIST', 10000 , c_d, 'Keep history entries', 'integer', '', 'General')
conf.PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General')
conf.PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General')
conf.INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'text.multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General')
conf.SCAN_CYCLE_MINUTES = ccd('SCAN_CYCLE_MINUTES', 5 , c_d, 'Scan cycle delay (m)', 'integer', '', 'General')
conf.INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'text.multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General')
conf.REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General')
conf.DIG_GET_IP_ARG = ccd('DIG_GET_IP_ARG', '-4 myip.opendns.com @resolver1.opendns.com' , c_d, 'DIG arguments', 'text', '', 'General')
conf.UI_LANG = ccd('UI_LANG', 'English' , c_d, 'Language Interface', 'text.select', "['English', 'German', 'Spanish']", 'General')
@@ -155,10 +154,6 @@ def importConfigs (db):
conf.DDNS_PASSWORD = ccd('DDNS_PASSWORD', 'A0000000B0000000C0000000D0000000' , c_d, 'DynDNS password', 'password', '', 'DynDNS')
conf.DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS')
# PiHole
conf.PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PIHOLE')
conf.DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PIHOLE')
# PHOLUS
conf.PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus')
conf.PHOLUS_TIMEOUT = ccd('PHOLUS_TIMEOUT', 20 , c_d, 'Pholus timeout', 'integer', '', 'Pholus')

View File

@@ -2,7 +2,6 @@
import conf
from scanners.pihole import copy_pihole_network, read_DHCP_leases
from database import insertOnlineHistory
from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, update_devices_names
from helper import timeNowTZ
@@ -15,32 +14,6 @@ from reporting import skip_repeated_notifications
# SCAN NETWORK
#===============================================================================
def scan_network (db):
sql = db.sql #TO-DO
# Header
# moved updateState to main loop
# updateState(db,"Scan: Network")
mylog('verbose', ['[Network Scan] Scan Devices:' ])
db.commitDB()
# Pi-hole method
if conf.PIHOLE_ACTIVE :
mylog('verbose','[Network Scan] Pi-hole start')
copy_pihole_network(db)
db.commitDB()
# DHCP Leases method
if conf.DHCP_ACTIVE :
mylog('verbose','[Network Scan] DHCP Leases start')
read_DHCP_leases (db)
db.commitDB()
def process_scan (db):
# Load current scan data

View File

@@ -253,7 +253,7 @@ def execute_plugin(db, plugin):
# try attaching the sqlite DB
try:
sql.execute ("ATTACH DATABASE '"+ fullSqlitePath +"' AS PH")
sql.execute ("ATTACH DATABASE '"+ fullSqlitePath +"' AS EXTERNAL")
except sqlite3.Error as e:
mylog('none',[ '[Plugin] - ATTACH DATABASE failed with SQL ERROR: ', e])

View File

@@ -11,7 +11,6 @@ from logger import append_line_to_file, mylog
from const import logPath
# need to find a better way to deal with settings !
#global DDNS_ACTIVE, DDNS_DOMAIN, DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD

View File

@@ -1,97 +0,0 @@
""" module to import db and leases from PiHole """
# TODO remove this file in teh future
import sqlite3
import conf
from logger import mylog
piholeDhcpleases = '/etc/pihole/dhcp.leases'
piholeDB = '/etc/pihole/pihole-FTL.db'
#-------------------------------------------------------------------------------
def copy_pihole_network (db):
"""
attach the PiHole Database and copy the PiHole_Network table accross into the PiAlert DB
"""
sql = db.sql # TO-DO
# Open Pi-hole DB
mylog('debug', '[PiHole Network] - attach PiHole DB')
try:
sql.execute ("ATTACH DATABASE '"+ piholeDB +"' AS PH")
except sqlite3.Error as e:
mylog('none',[ '[PiHole Network] - SQL ERROR: ', e])
# Copy Pi-hole Network table
try:
sql.execute ("DELETE FROM PiHole_Network")
# just for reporting
new_devices = []
sql.execute ( """SELECT hwaddr, macVendor, lastQuery,
(SELECT name FROM PH.network_addresses
WHERE network_id = id ORDER BY lastseen DESC, ip),
(SELECT ip FROM PH.network_addresses
WHERE network_id = id ORDER BY lastseen DESC, ip)
FROM PH.network
WHERE hwaddr NOT LIKE 'ip-%'
AND hwaddr <> '00:00:00:00:00:00' """)
new_devices = sql.fetchall()
# insert into PiAlert DB
sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery,
PH_Name, PH_IP)
SELECT hwaddr, macVendor, lastQuery,
(SELECT name FROM PH.network_addresses
WHERE network_id = id ORDER BY lastseen DESC, ip),
(SELECT ip FROM PH.network_addresses
WHERE network_id = id ORDER BY lastseen DESC, ip)
FROM PH.network
WHERE hwaddr NOT LIKE 'ip-%'
AND hwaddr <> '00:00:00:00:00:00' """)
sql.execute ("""UPDATE PiHole_Network SET PH_Name = '(unknown)'
WHERE PH_Name IS NULL OR PH_Name = '' """)
# Close Pi-hole DB
sql.execute ("DETACH PH")
except sqlite3.Error as e:
mylog('none',[ '[PiHole Network] - SQL ERROR: ', e])
db.commitDB()
mylog('debug',[ '[PiHole Network] - completed - found ', len(new_devices), ' devices'])
return str(sql.rowcount) != "0"
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
def read_DHCP_leases (db):
"""
read the PiHole DHCP file and insert all records into the DHCP_Leases table.
"""
mylog('debug', '[PiHole DHCP] - read DHCP_Leases file')
# Read DHCP Leases
# Bugfix #1 - dhcp.leases: lines with different number of columns (5 col)
data = []
reporting = False
with open(piholeDhcpleases, 'r') as f:
for line in f:
reporting = True
row = line.rstrip().split()
if len(row) == 5 :
data.append (row)
# Insert into PiAlert table
db.sql.executemany ("""INSERT INTO DHCP_Leases (DHCP_DateTime, DHCP_MAC,
DHCP_IP, DHCP_Name, DHCP_MAC2)
VALUES (?, ?, ?, ?, ?)
""", data)
db.commitDB()
mylog('debug', ['[PiHole DHCP] - completed - added ',len(data), ' devices.'])
return reporting