more cleanup and removed files.py module again

This commit is contained in:
Data-Monkey
2023-05-28 16:10:58 +10:00
parent a71f16ee37
commit 7177cdd51d
15 changed files with 151 additions and 129 deletions

View File

@@ -27,10 +27,10 @@ from const import *
from logger import mylog from logger import mylog
from helper import filePermissions, isNewVersion, timeNow, timeNowTZ, updateState from helper import filePermissions, isNewVersion, timeNow, timeNowTZ, updateState
from api import update_api from api import update_api
from networkscan import scan_network from networkscan import process_scan, scan_network
from initialise import importConfigs from initialise import importConfigs
from mac_vendor import update_devices_MAC_vendors from mac_vendor import update_devices_MAC_vendors
from database import DB, get_all_devices, sql_new_devices from database import DB, get_all_devices
from reporting import check_and_run_event, send_notifications from reporting import check_and_run_event, send_notifications
from plugin import run_plugin_scripts from plugin import run_plugin_scripts
@@ -145,7 +145,7 @@ def main ():
check_and_run_event(db) check_and_run_event(db)
# Update API endpoints # Update API endpoints
update_api() update_api(db)
# proceed if 1 minute passed # proceed if 1 minute passed
if last_scan_run + datetime.timedelta(minutes=1) < loop_start_time : if last_scan_run + datetime.timedelta(minutes=1) < loop_start_time :
@@ -252,6 +252,10 @@ def main ():
if conf.ENABLE_PLUGINS: if conf.ENABLE_PLUGINS:
run_plugin_scripts(db,'always_after_scan') run_plugin_scripts(db,'always_after_scan')
# --------------------------------------------------
# process all the scanned data into new devices
mylog('debug', "[MAIN] start processig scan results")
process_scan (db, conf.arpscan_devices )
# Reporting # Reporting
if conf.cycle in conf.check_report: if conf.cycle in conf.check_report:

View File

@@ -3,33 +3,27 @@ import json
# pialert modules # pialert modules
import conf import conf
from const import pialertPath from const import (apiPath, sql_devices_all, sql_nmap_scan_all, sql_pholus_scan_all, sql_events_pending_alert,
sql_settings, sql_plugins_events, sql_plugins_history, sql_plugins_objects,sql_language_strings)
from logger import mylog from logger import mylog
from files import write_file from helper import write_file
from database import *
apiEndpoints = [] apiEndpoints = []
#=============================================================================== #===============================================================================
# API # API
#=============================================================================== #===============================================================================
def update_api(isNotification = False, updateOnlyDataSources = []): def update_api(db, isNotification = False, updateOnlyDataSources = []):
mylog('verbose', [' [API] Update API not doing anything for now !']) mylog('verbose', ['[API] Update API starting'])
return # return
folder = pialertPath + '/front/api/' folder = apiPath
if isNotification: # update notifications moved to reporting send_api()
# Update last notification alert in all formats
mylog('verbose', [' [API] Updating notification_* files in /front/api'])
write_file(folder + 'notification_text.txt' , mail_text)
write_file(folder + 'notification_text.html' , mail_html)
write_file(folder + 'notification_json_final.json' , json.dumps(json_final))
# Save plugins # Save plugins
if conf.ENABLE_PLUGINS: if conf.ENABLE_PLUGINS:
write_file(folder + 'plugins.json' , json.dumps({"data" : plugins})) write_file(folder + 'plugins.json' , json.dumps({"data" : conf.plugins}))
# prepare database tables we want to expose # prepare database tables we want to expose
dataSourcesSQLs = [ dataSourcesSQLs = [
@@ -50,19 +44,19 @@ def update_api(isNotification = False, updateOnlyDataSources = []):
if updateOnlyDataSources == [] or dsSQL[0] in updateOnlyDataSources: if updateOnlyDataSources == [] or dsSQL[0] in updateOnlyDataSources:
api_endpoint_class(dsSQL[1], folder + 'table_' + dsSQL[0] + '.json') api_endpoint_class(db, dsSQL[1], folder + 'table_' + dsSQL[0] + '.json')
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
class api_endpoint_class: class api_endpoint_class:
def __init__(self, db, path): def __init__(self, db, query, path):
global apiEndpoints global apiEndpoints
self.db = db self.db = db
self.sql = db.sql self.query = query
self.jsonData = db.get_table_as_json( self.sql).json self.jsonData = db.get_table_as_json(self.query).json
self.path = path self.path = path
self.fileName = path.split('/')[-1] self.fileName = path.split('/')[-1]
self.hash = hash(json.dumps(self.jsonData)) self.hash = hash(json.dumps(self.jsonData))
@@ -76,7 +70,7 @@ class api_endpoint_class:
# search previous endpoint states to check if API needs updating # search previous endpoint states to check if API needs updating
for endpoint in apiEndpoints: for endpoint in apiEndpoints:
# match sql and API endpoint path # match sql and API endpoint path
if endpoint.sql == self.sql and endpoint.path == self.path: if endpoint.query == self.query and endpoint.path == self.path:
found = True found = True
if endpoint.hash != self.hash: if endpoint.hash != self.hash:
changed = True changed = True
@@ -87,7 +81,7 @@ class api_endpoint_class:
# cehck if API endpoints have changed or if it's a new one # cehck if API endpoints have changed or if it's a new one
if not found or changed: if not found or changed:
mylog('verbose', [f' [API] Updating {self.fileName} file in /front/api']) mylog('verbose', [f'[API] Updating {self.fileName} file in /front/api'])
write_file(self.path, json.dumps(self.jsonData)) write_file(self.path, json.dumps(self.jsonData))
@@ -98,5 +92,5 @@ class api_endpoint_class:
# update hash # update hash
apiEndpoints[changedIndex].hash = self.hash apiEndpoints[changedIndex].hash = self.hash
else: else:
mylog('info', [f' [API] ERROR Updating {self.fileName}']) mylog('info', [f'[API] ERROR Updating {self.fileName}'])

View File

@@ -16,7 +16,7 @@ newVersionAvailable = False
time_started = '' time_started = ''
check_report = [] check_report = []
log_timestamp = 0 log_timestamp = 0
arpscan_devices = []
# ACTUAL CONFIGRATION ITEMS set to defaults # ACTUAL CONFIGRATION ITEMS set to defaults

View File

@@ -12,10 +12,36 @@ dbPath = '/db/pialert.db'
pluginsPath = pialertPath + '/front/plugins' pluginsPath = pialertPath + '/front/plugins'
logPath = pialertPath + '/front/log' logPath = pialertPath + '/front/log'
apiPath = pialertPath + '/front/api/'
fullConfPath = pialertPath + confPath fullConfPath = pialertPath + confPath
fullDbPath = pialertPath + dbPath fullDbPath = pialertPath + dbPath
fullPholusPath = pialertPath+'/pholus/pholus3.py' fullPholusPath = pialertPath+'/pholus/pholus3.py'
vendorsDB = '/usr/share/arp-scan/ieee-oui.txt' vendorsDB = '/usr/share/arp-scan/ieee-oui.txt'
piholeDB = '/etc/pihole/pihole-FTL.db' piholeDB = '/etc/pihole/pihole-FTL.db'
piholeDhcpleases = '/etc/pihole/dhcp.leases' piholeDhcpleases = '/etc/pihole/dhcp.leases'
#===============================================================================
# SQL queries
#===============================================================================
sql_devices_all = "select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_Group, dev_FirstConnection, dev_LastConnection, dev_LastIP, dev_StaticIP, dev_PresentLastScan, dev_LastNotification, dev_NewDevice, dev_Network_Node_MAC_ADDR, dev_Network_Node_port, dev_Icon from Devices"
sql_devices_stats = "SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1"
sql_nmap_scan_all = "SELECT * FROM Nmap_Scan"
sql_pholus_scan_all = "SELECT * FROM Pholus_Scan"
sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0"
sql_settings = "SELECT * FROM Settings"
sql_plugins_objects = "SELECT * FROM Plugins_Objects"
sql_language_strings = "SELECT * FROM Plugins_Language_Strings"
sql_plugins_events = "SELECT * FROM Plugins_Events"
sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY 'Index' DESC"
sql_new_devices = """SELECT * FROM ( SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device'
ORDER BY eve_DateTime ) t1
LEFT JOIN
(
SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices
) t2
ON t1.dev_MAC = t2.dev_MAC_t2"""

View File

@@ -3,7 +3,7 @@
import sqlite3 import sqlite3
# pialert modules # pialert modules
from const import fullDbPath from const import fullDbPath, sql_devices_stats, sql_devices_all
from logger import mylog from logger import mylog
from helper import json_struc, initOrSetParam, row_to_json, timeNow #, updateState from helper import json_struc, initOrSetParam, row_to_json, timeNow #, updateState
@@ -11,28 +11,7 @@ from helper import json_struc, initOrSetParam, row_to_json, timeNow #, updateSta
#===============================================================================
# SQL queries
#===============================================================================
sql_devices_all = "select dev_MAC, dev_Name, dev_DeviceType, dev_Vendor, dev_Group, dev_FirstConnection, dev_LastConnection, dev_LastIP, dev_StaticIP, dev_PresentLastScan, dev_LastNotification, dev_NewDevice, dev_Network_Node_MAC_ADDR, dev_Network_Node_port, dev_Icon from Devices"
sql_devices_stats = "SELECT Online_Devices as online, Down_Devices as down, All_Devices as 'all', Archived_Devices as archived, (select count(*) from Devices a where dev_NewDevice = 1 ) as new, (select count(*) from Devices a where dev_Name = '(unknown)' or dev_Name = '(name not found)' ) as unknown from Online_History order by Scan_Date desc limit 1"
sql_nmap_scan_all = "SELECT * FROM Nmap_Scan"
sql_pholus_scan_all = "SELECT * FROM Pholus_Scan"
sql_events_pending_alert = "SELECT * FROM Events where eve_PendingAlertEmail is not 0"
sql_settings = "SELECT * FROM Settings"
sql_plugins_objects = "SELECT * FROM Plugins_Objects"
sql_language_strings = "SELECT * FROM Plugins_Language_Strings"
sql_plugins_events = "SELECT * FROM Plugins_Events"
sql_plugins_history = "SELECT * FROM Plugins_History ORDER BY 'Index' DESC"
sql_new_devices = """SELECT * FROM ( SELECT eve_IP as dev_LastIP, eve_MAC as dev_MAC FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device'
ORDER BY eve_DateTime ) t1
LEFT JOIN
(
SELECT dev_Name, dev_MAC as dev_MAC_t2 FROM Devices
) t2
ON t1.dev_MAC = t2.dev_MAC_t2"""
class DB(): class DB():

View File

@@ -1,37 +0,0 @@
import io
import sys
#-------------------------------------------------------------------------------
def write_file (pPath, pText):
# Write the text depending using the correct python version
if sys.version_info < (3, 0):
file = io.open (pPath , mode='w', encoding='utf-8')
file.write ( pText.decode('unicode_escape') )
file.close()
else:
file = open (pPath, 'w', encoding='utf-8')
if pText is None:
pText = ""
file.write (pText)
file.close()
#-------------------------------------------------------------------------------
def get_file_content(path):
f = open(path, 'r')
content = f.read()
f.close()
return content
#-------------------------------------------------------------------------------
def read_config_file(filename):
"""
retuns dict on the config file key:value pairs
"""
# load the variables from pialert.conf
code = compile(filename.read_text(), filename.name, "exec")
confDict = {} # config dictionary
exec(code, {"__builtins__": {}}, confDict)
return confDict

View File

@@ -1,5 +1,7 @@
""" Colection of generic functions to support Pi.Alert """ """ Colection of generic functions to support Pi.Alert """
import io
import sys
import datetime import datetime
import os import os
import re import re
@@ -14,7 +16,6 @@ import requests
import conf import conf
from const import * from const import *
from logger import mylog, logResult from logger import mylog, logResult
# from api import update_api # to avoid circular reference
@@ -297,3 +298,25 @@ class json_struc:
#-------------------------------------------------------------------------------
def get_file_content(path):
f = open(path, 'r')
content = f.read()
f.close()
return content
#-------------------------------------------------------------------------------
def write_file (pPath, pText):
# Write the text depending using the correct python version
if sys.version_info < (3, 0):
file = io.open (pPath , mode='w', encoding='utf-8')
file.write ( pText.decode('unicode_escape') )
file.close()
else:
file = open (pPath, 'w', encoding='utf-8')
if pText is None:
pText = ""
file.write (pText)
file.close()

View File

@@ -10,7 +10,7 @@ import conf
from const import * from const import *
from helper import collect_lang_strings, timeNow, updateSubnets, initOrSetParam from helper import collect_lang_strings, timeNow, updateSubnets, initOrSetParam
from logger import mylog from logger import mylog
from files import read_config_file from api import update_api
from scheduler import schedule_class from scheduler import schedule_class
from plugin import get_plugins_configs, print_plugin_info from plugin import get_plugins_configs, print_plugin_info
@@ -148,9 +148,6 @@ def importConfigs (db):
# API # API
conf.API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', 'text', '', 'API') conf.API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', 'text', '', 'API')
# Prepare scheduler
#global tz, mySchedules, plugins
# Init timezone in case it changed # Init timezone in case it changed
conf.tz = timezone(conf.TIMEZONE) conf.tz = timezone(conf.TIMEZONE)
@@ -228,7 +225,20 @@ def importConfigs (db):
db.commitDB() db.commitDB()
# update only the settings datasource # update only the settings datasource
# update_api(False, ["settings"]) update_api(db, False, ["settings"])
# TO DO this creates a circular reference between API and HELPER ! #TO DO this creates a circular reference between API and HELPER !
mylog('info', '[Config] Imported new config') mylog('info', '[Config] Imported new config')
#-------------------------------------------------------------------------------
def read_config_file(filename):
"""
retuns dict on the config file key:value pairs
"""
# load the variables from pialert.conf
code = compile(filename.read_text(), filename.name, "exec")
confDict = {} # config dictionary
exec(code, {"__builtins__": {}}, confDict)
return confDict

View File

@@ -9,11 +9,9 @@ from const import *
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
# duplication from helper to avoid circle # duplication from helper to avoid circle
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def timeNowTZ(): def timeNow():
if conf.tz == '':
return datetime.datetime.now().replace(microsecond=0) return datetime.datetime.now().replace(microsecond=0)
else:
return datetime.datetime.now(conf.tz).replace(microsecond=0)
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
debugLevels = [ debugLevels = [
@@ -38,7 +36,7 @@ def mylog(requestedDebugLevel, n):
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def file_print (*args): def file_print (*args):
result = timeNowTZ().strftime ('%H:%M:%S') + ' ' result = timeNow().strftime ('%H:%M:%S') + ' '
for arg in args: for arg in args:
result += str(arg) result += str(arg)

View File

@@ -36,14 +36,13 @@ def scan_network (db):
db.commitDB() db.commitDB()
# ScanCycle data
cycle_interval = scanCycle_data['cic_EveryXmin']
# arp-scan command # arp-scan command
arpscan_devices = [] conf.arpscan_devices = []
if conf.ENABLE_ARPSCAN: if conf.ENABLE_ARPSCAN:
mylog('verbose','[Network Scan] arp-scan start') mylog('verbose','[Network Scan] arp-scan start')
arpscan_devices = execute_arpscan (conf.userSubnets) conf.arpscan_devices = execute_arpscan (conf.userSubnets)
mylog('verbose','[Network Scan] arp-scan ends') mylog('verbose','[Network Scan] arp-scan ends')
# Pi-hole method # Pi-hole method
@@ -59,51 +58,69 @@ def scan_network (db):
db.commitDB() db.commitDB()
def process_scan (db, arpscan_devices = conf.arpscan_devices ):
# Query ScanCycle properties
scanCycle_data = query_ScanCycle_Data (db, True)
if scanCycle_data is None:
mylog('none', ['\n'])
mylog('none', ['[Process Scan]*************** ERROR ***************'])
mylog('none', ['[Process Scan] ScanCycle %s not found' % conf.cycle ])
mylog('none', ['[Process Scan] Exiting...\n'])
return False
db.commitDB()
# ScanCycle data
cycle_interval = scanCycle_data['cic_EveryXmin']
# Load current scan data # Load current scan data
mylog('verbose','[Network Scan] Processing scan results') mylog('verbose','[Process Scan] Processing scan results')
save_scanned_devices (db, arpscan_devices, cycle_interval) save_scanned_devices (db, arpscan_devices, cycle_interval)
# Print stats # Print stats
mylog('none','[Network Scan] Print Stats') mylog('none','[Process Scan] Print Stats')
print_scan_stats(db) print_scan_stats(db)
mylog('none','[Network Scan] Stats end') mylog('none','[Process Scan] Stats end')
# Create Events # Create Events
mylog('verbose','[Network Scan] Updating DB Info') mylog('verbose','[Process Scan] Updating DB Info')
mylog('verbose','[Network Scan] Sessions Events (connect / discconnect)') mylog('verbose','[Process Scan] Sessions Events (connect / discconnect)')
insert_events(db) insert_events(db)
# Create New Devices # Create New Devices
# after create events -> avoid 'connection' event # after create events -> avoid 'connection' event
mylog('verbose','[Network Scan] Creating new devices') mylog('verbose','[Process Scan] Creating new devices')
create_new_devices (db) create_new_devices (db)
# Update devices info # Update devices info
mylog('verbose','[Network Scan] Updating Devices Info') mylog('verbose','[Process Scan] Updating Devices Info')
update_devices_data_from_scan (db) update_devices_data_from_scan (db)
# Resolve devices names # Resolve devices names
mylog('verbose','[Network Scan] Resolve devices names') mylog('verbose','[Process Scan] Resolve devices names')
update_devices_names(db) update_devices_names(db)
# Void false connection - disconnections # Void false connection - disconnections
mylog('verbose','[Network Scan] Voiding false (ghost) disconnections') mylog('verbose','[Process Scan] Voiding false (ghost) disconnections')
void_ghost_disconnections (db) void_ghost_disconnections (db)
# Pair session events (Connection / Disconnection) # Pair session events (Connection / Disconnection)
mylog('verbose','[Network Scan] Pairing session events (connection / disconnection) ') mylog('verbose','[Process Scan] Pairing session events (connection / disconnection) ')
pair_sessions_events(db) pair_sessions_events(db)
# Sessions snapshot # Sessions snapshot
mylog('verbose','[Network Scan] Creating sessions snapshot') mylog('verbose','[Process Scan] Creating sessions snapshot')
create_sessions_snapshot (db) create_sessions_snapshot (db)
# Sessions snapshot # Sessions snapshot
mylog('verbose','[Network Scan] Inserting scan results into Online_History') mylog('verbose','[Process Scan] Inserting scan results into Online_History')
insertOnlineHistory(db,conf.cycle) insertOnlineHistory(db,conf.cycle)
# Skip repeated notifications # Skip repeated notifications
mylog('verbose','[Network Scan] Skipping repeated notifications') mylog('verbose','[Process Scan] Skipping repeated notifications')
skip_repeated_notifications (db) skip_repeated_notifications (db)
# Commit changes # Commit changes

View File

@@ -7,9 +7,9 @@ from collections import namedtuple
# pialert modules # pialert modules
import conf import conf
from const import pluginsPath, logPath from const import pluginsPath, logPath
from files import get_file_content, write_file
from logger import mylog from logger import mylog
from helper import timeNowTZ, updateState from helper import timeNowTZ, updateState, get_file_content, write_file
from api import update_api
@@ -269,7 +269,7 @@ def execute_plugin(db, plugin):
process_plugin_events(db, plugin) process_plugin_events(db, plugin)
# update API endpoints # update API endpoints
# update_api(False, ["plugins_events","plugins_objects"]) # TO-DO - remover circular reference update_api(db, False, ["plugins_events","plugins_objects"])
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def custom_plugin_decoder(pluginDict): def custom_plugin_decoder(pluginDict):

View File

@@ -12,10 +12,8 @@ from json2table import convert
# pialert modules # pialert modules
import conf import conf
from const import pialertPath, logPath from const import pialertPath, logPath, apiPath
#from database import get_table_as_json from helper import generate_mac_links, removeDuplicateNewLines, timeNow, hide_email, json_struc, updateState, get_file_content, write_file
from files import get_file_content, write_file
from helper import generate_mac_links, isNewVersion, removeDuplicateNewLines, timeNow, hide_email, json_struc, updateState
from logger import logResult, mylog, print_log from logger import logResult, mylog, print_log
from mqtt import mqtt_start from mqtt import mqtt_start
@@ -252,10 +250,11 @@ def send_notifications (db):
# Send Mail # Send Mail
if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or json_ports != [] or conf.debug_force_notification or plugins_report: if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or json_ports != [] or conf.debug_force_notification or plugins_report:
# update_api(True) # TO-DO
mylog('none', ['[Notification] Changes detected, sending reports']) mylog('none', ['[Notification] Changes detected, sending reports'])
mylog('info', ['[Notification] Udateing API files'])
send_api()
if conf.REPORT_MAIL and check_config('email'): if conf.REPORT_MAIL and check_config('email'):
updateState(db,"Send: Email") updateState(db,"Send: Email")
mylog('info', ['[Notification] Sending report by Email']) mylog('info', ['[Notification] Sending report by Email'])
@@ -613,6 +612,13 @@ def to_text(_json):
return payloadData return payloadData
#-------------------------------------------------------------------------------
def send_api():
mylog('verbose', ['[Send API] Updating notification_* files in ', apiPath])
write_file(apiPath + 'notification_text.txt' , mail_text)
write_file(apiPath + 'notification_text.html' , mail_html)
write_file(apiPath + 'notification_json_final.json' , json.dumps(json_final))
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------

View File

@@ -2,8 +2,7 @@
import subprocess import subprocess
import conf import conf
from const import logPath from const import logPath, sql_nmap_scan_all
from database import sql_nmap_scan_all
from helper import json_struc, timeNow, updateState from helper import json_struc, timeNow, updateState
from logger import append_line_to_file, mylog from logger import append_line_to_file, mylog
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------

View File

@@ -40,7 +40,7 @@ def performPholusScan (db, timeoutSec, userSubnets):
output = subprocess.check_output (pholus_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30)) output = subprocess.check_output (pholus_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30))
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
# An error occured, handle it # An error occured, handle it
mylog('none', [[PholusScan], e.output]) mylog('none', ['[PholusScan]', e.output])
mylog('none', ["[PholusScan] Error - Pholus Scan - check logs"]) mylog('none', ["[PholusScan] Error - Pholus Scan - check logs"])
except subprocess.TimeoutExpired as timeErr: except subprocess.TimeoutExpired as timeErr:
mylog('none', ['[PholusScan] Pholus TIMEOUT - the process forcefully terminated as timeout reached']) mylog('none', ['[PholusScan] Pholus TIMEOUT - the process forcefully terminated as timeout reached'])

View File

@@ -50,6 +50,9 @@ def copy_pihole_network (db):
mylog('debug',[ '[PiHole Network] - completed - found ',sql.rowcount, ' devices']) mylog('debug',[ '[PiHole Network] - completed - found ',sql.rowcount, ' devices'])
return str(sql.rowcount) != "0" return str(sql.rowcount) != "0"
#-------------------------------------------------------------------------------
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
def read_DHCP_leases (db): def read_DHCP_leases (db):
""" """