mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2025-12-07 09:36:05 -08:00
changed to __main__.py and scanners folder
This commit is contained in:
57
pialert/scanners/arpscan.py
Normal file
57
pialert/scanners/arpscan.py
Normal file
@@ -0,0 +1,57 @@
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from logger import mylog
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def execute_arpscan (userSubnets):
|
||||
|
||||
# output of possible multiple interfaces
|
||||
arpscan_output = ""
|
||||
|
||||
# scan each interface
|
||||
for interface in userSubnets :
|
||||
arpscan_output += execute_arpscan_on_interface (interface)
|
||||
|
||||
# Search IP + MAC + Vendor as regular expresion
|
||||
re_ip = r'(?P<ip>((2[0-5]|1[0-9]|[0-9])?[0-9]\.){3}((2[0-5]|1[0-9]|[0-9])?[0-9]))'
|
||||
re_mac = r'(?P<mac>([0-9a-fA-F]{2}[:-]){5}([0-9a-fA-F]{2}))'
|
||||
re_hw = r'(?P<hw>.*)'
|
||||
re_pattern = re.compile (re_ip + '\s+' + re_mac + '\s' + re_hw)
|
||||
|
||||
# Create Userdict of devices
|
||||
devices_list = [device.groupdict()
|
||||
for device in re.finditer (re_pattern, arpscan_output)]
|
||||
|
||||
# Delete duplicate MAC
|
||||
unique_mac = []
|
||||
unique_devices = []
|
||||
|
||||
for device in devices_list :
|
||||
if device['mac'] not in unique_mac:
|
||||
unique_mac.append(device['mac'])
|
||||
unique_devices.append(device)
|
||||
|
||||
# return list
|
||||
mylog('debug', ['[ARP Scan] Completed found ', len(unique_devices) ,' devices ' ])
|
||||
return unique_devices
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def execute_arpscan_on_interface (interface):
|
||||
# Prepare command arguments
|
||||
subnets = interface.strip().split()
|
||||
# Retry is 6 to avoid false offline devices
|
||||
mylog('debug', ['[ARP Scan] - arpscan command: sudo arp-scan --ignoredups --retry=6 ', str(subnets)])
|
||||
arpscan_args = ['sudo', 'arp-scan', '--ignoredups', '--retry=6'] + subnets
|
||||
|
||||
# Execute command
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
result = subprocess.check_output (arpscan_args, universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', ['[ARP Scan]', e.output])
|
||||
result = ""
|
||||
|
||||
mylog('debug', ['[ARP Scan] on Interface Completed with results: ', result])
|
||||
return result
|
||||
195
pialert/scanners/internet.py
Normal file
195
pialert/scanners/internet.py
Normal file
@@ -0,0 +1,195 @@
|
||||
""" internet related functions to support Pi.Alert """
|
||||
|
||||
import subprocess
|
||||
import re
|
||||
|
||||
# pialert modules
|
||||
|
||||
import conf
|
||||
from helper import timeNow, updateState
|
||||
from logger import append_line_to_file, mylog
|
||||
from const import logPath
|
||||
|
||||
|
||||
|
||||
# need to find a better way to deal with settings !
|
||||
#global DDNS_ACTIVE, DDNS_DOMAIN, DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD
|
||||
|
||||
|
||||
#===============================================================================
|
||||
# INTERNET IP CHANGE
|
||||
#===============================================================================
|
||||
def check_internet_IP ( db ):
|
||||
|
||||
# Header
|
||||
updateState(db,"Scan: Internet IP")
|
||||
mylog('verbose', ['[Internet IP] Check Internet IP started'])
|
||||
|
||||
# Get Internet IP
|
||||
mylog('verbose', ['[Internet IP] - Retrieving Internet IP'])
|
||||
internet_IP = get_internet_IP(conf.DIG_GET_IP_ARG)
|
||||
# TESTING - Force IP
|
||||
# internet_IP = "1.2.3.4"
|
||||
|
||||
# Check result = IP
|
||||
if internet_IP == "" :
|
||||
mylog('none', ['[Internet IP] Error retrieving Internet IP'])
|
||||
mylog('none', ['[Internet IP] Exiting...'])
|
||||
return False
|
||||
mylog('verbose', ['[Internet IP] IP: ', internet_IP])
|
||||
|
||||
# Get previous stored IP
|
||||
mylog('verbose', ['[Internet IP] Retrieving previous IP:'])
|
||||
previous_IP = get_previous_internet_IP (db)
|
||||
mylog('verbose', ['[Internet IP] ', previous_IP])
|
||||
|
||||
# Check IP Change
|
||||
if internet_IP != previous_IP :
|
||||
mylog('info', ['[Internet IP] New internet IP: ', internet_IP])
|
||||
save_new_internet_IP (db, internet_IP)
|
||||
|
||||
else :
|
||||
mylog('verbose', ['[Internet IP] No changes to perform'])
|
||||
|
||||
# Get Dynamic DNS IP
|
||||
if conf.DDNS_ACTIVE :
|
||||
mylog('verbose', ['[DDNS] Retrieving Dynamic DNS IP'])
|
||||
dns_IP = get_dynamic_DNS_IP()
|
||||
|
||||
# Check Dynamic DNS IP
|
||||
if dns_IP == "" or dns_IP == "0.0.0.0" :
|
||||
mylog('none', ['[DDNS] Error retrieving Dynamic DNS IP'])
|
||||
mylog('none', ['[DDNS] ', dns_IP])
|
||||
|
||||
# Check DNS Change
|
||||
if dns_IP != internet_IP :
|
||||
mylog('none', ['[DDNS] Updating Dynamic DNS IP'])
|
||||
message = set_dynamic_DNS_IP ()
|
||||
mylog('none', ['[DDNS] ', message])
|
||||
else :
|
||||
mylog('verbose', ['[DDNS] No changes to perform'])
|
||||
else :
|
||||
mylog('verbose', ['[DDNS] Skipping Dynamic DNS update'])
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_internet_IP (DIG_GET_IP_ARG):
|
||||
# BUGFIX #46 - curl http://ipv4.icanhazip.com repeatedly is very slow
|
||||
# Using 'dig'
|
||||
dig_args = ['dig', '+short'] + DIG_GET_IP_ARG.strip().split()
|
||||
try:
|
||||
cmd_output = subprocess.check_output (dig_args, universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
mylog('none', [e.output])
|
||||
cmd_output = '' # no internet
|
||||
|
||||
# Check result is an IP
|
||||
IP = check_IP_format (cmd_output)
|
||||
|
||||
# Handle invalid response
|
||||
if IP == '':
|
||||
IP = '0.0.0.0'
|
||||
|
||||
return IP
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_previous_internet_IP (db):
|
||||
|
||||
previous_IP = '0.0.0.0'
|
||||
|
||||
# get previous internet IP stored in DB
|
||||
db.sql.execute ("SELECT dev_LastIP FROM Devices WHERE dev_MAC = 'Internet' ")
|
||||
result = db.sql.fetchone()
|
||||
|
||||
db.commitDB()
|
||||
|
||||
if result is not None and len(result) > 0 :
|
||||
previous_IP = result[0]
|
||||
|
||||
# return previous IP
|
||||
return previous_IP
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def save_new_internet_IP (db, pNewIP):
|
||||
# Log new IP into logfile
|
||||
append_line_to_file (logPath + '/IP_changes.log',
|
||||
'['+str(timeNow()) +']\t'+ pNewIP +'\n')
|
||||
|
||||
prevIp = get_previous_internet_IP(db)
|
||||
# Save event
|
||||
db.sql.execute ("""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
|
||||
eve_EventType, eve_AdditionalInfo,
|
||||
eve_PendingAlertEmail)
|
||||
VALUES ('Internet', ?, ?, 'Internet IP Changed',
|
||||
'Previous Internet IP: '|| ?, 1) """,
|
||||
(pNewIP, timeNow(), prevIp) )
|
||||
|
||||
# Save new IP
|
||||
db.sql.execute ("""UPDATE Devices SET dev_LastIP = ?
|
||||
WHERE dev_MAC = 'Internet' """,
|
||||
(pNewIP,) )
|
||||
|
||||
# commit changes
|
||||
db.commitDB()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def check_IP_format (pIP):
|
||||
# Check IP format
|
||||
IPv4SEG = r'(?:25[0-5]|(?:2[0-4]|1{0,1}[0-9]){0,1}[0-9])'
|
||||
IPv4ADDR = r'(?:(?:' + IPv4SEG + r'\.){3,3}' + IPv4SEG + r')'
|
||||
IP = re.search(IPv4ADDR, pIP)
|
||||
|
||||
# Return error if not IP
|
||||
if IP is None :
|
||||
return ""
|
||||
|
||||
# Return IP
|
||||
return IP.group(0)
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def get_dynamic_DNS_IP ():
|
||||
# Using OpenDNS server
|
||||
# dig_args = ['dig', '+short', DDNS_DOMAIN, '@resolver1.opendns.com']
|
||||
|
||||
# Using default DNS server
|
||||
dig_args = ['dig', '+short', conf.DDNS_DOMAIN]
|
||||
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
dig_output = subprocess.check_output (dig_args, universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', ['[DDNS] ERROR - ', e.output])
|
||||
dig_output = '' # probably no internet
|
||||
|
||||
# Check result is an IP
|
||||
IP = check_IP_format (dig_output)
|
||||
|
||||
# Handle invalid response
|
||||
if IP == '':
|
||||
IP = '0.0.0.0'
|
||||
|
||||
return IP
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def set_dynamic_DNS_IP ():
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
# Update Dynamic IP
|
||||
curl_output = subprocess.check_output (['curl', '-s',
|
||||
conf.DDNS_UPDATE_URL +
|
||||
'username=' + conf.DDNS_USER +
|
||||
'&password=' + conf.DDNS_PASSWORD +
|
||||
'&hostname=' + conf.DDNS_DOMAIN],
|
||||
universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', ['[DDNS] ERROR - ',e.output])
|
||||
curl_output = ""
|
||||
|
||||
return curl_output
|
||||
204
pialert/scanners/nmapscan.py
Normal file
204
pialert/scanners/nmapscan.py
Normal file
@@ -0,0 +1,204 @@
|
||||
|
||||
import subprocess
|
||||
|
||||
import conf
|
||||
from const import logPath
|
||||
from database import sql_nmap_scan_all
|
||||
from helper import json_struc, timeNow, updateState
|
||||
from logger import append_line_to_file, mylog
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
|
||||
|
||||
class nmap_entry:
|
||||
def __init__(self, mac, time, port, state, service, name = '', extra = '', index = 0):
|
||||
self.mac = mac
|
||||
self.time = time
|
||||
self.port = port
|
||||
self.state = state
|
||||
self.service = service
|
||||
self.name = name
|
||||
self.extra = extra
|
||||
self.index = index
|
||||
self.hash = str(mac) + str(port)+ str(state)+ str(service)
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def performNmapScan(db, devicesToScan):
|
||||
sql = db.sql # TO-DO
|
||||
|
||||
global changedPorts_json_struc
|
||||
|
||||
changedPortsTmp = []
|
||||
|
||||
if len(devicesToScan) > 0:
|
||||
|
||||
timeoutSec = conf.NMAP_TIMEOUT
|
||||
|
||||
devTotal = len(devicesToScan)
|
||||
|
||||
updateState(db,"Scan: Nmap")
|
||||
|
||||
mylog('verbose', ['[', timeNow(), '] Scan: Nmap for max ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min) per device'])
|
||||
|
||||
mylog('verbose', [" Estimated max delay: ", (devTotal * int(timeoutSec)), 's ', '(', round((devTotal * int(timeoutSec))/60,1) , 'min)' ])
|
||||
|
||||
devIndex = 0
|
||||
for device in devicesToScan:
|
||||
# Execute command
|
||||
output = ""
|
||||
# prepare arguments from user supplied ones
|
||||
nmapArgs = ['nmap'] + conf.NMAP_ARGS.split() + [device["dev_LastIP"]]
|
||||
|
||||
progress = ' (' + str(devIndex+1) + '/' + str(devTotal) + ')'
|
||||
|
||||
try:
|
||||
# try runnning a subprocess with a forced (timeout + 30 seconds) in case the subprocess hangs
|
||||
output = subprocess.check_output (nmapArgs, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30))
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', [e.output])
|
||||
mylog('none', [" Error - Nmap Scan - check logs", progress])
|
||||
except subprocess.TimeoutExpired as timeErr:
|
||||
mylog('verbose', [' Nmap TIMEOUT - the process forcefully terminated as timeout reached for ', device["dev_LastIP"], progress])
|
||||
|
||||
if output == "": # check if the subprocess failed
|
||||
mylog('info', ['[', timeNow(), '] Scan: Nmap FAIL for ', device["dev_LastIP"], progress ,' check logs for details'])
|
||||
else:
|
||||
mylog('verbose', ['[', timeNow(), '] Scan: Nmap SUCCESS for ', device["dev_LastIP"], progress])
|
||||
|
||||
devIndex += 1
|
||||
|
||||
# check the last run output
|
||||
newLines = output.split('\n')
|
||||
|
||||
# regular logging
|
||||
for line in newLines:
|
||||
append_line_to_file (logPath + '/pialert_nmap.log', line +'\n')
|
||||
|
||||
# collect ports / new Nmap Entries
|
||||
newEntriesTmp = []
|
||||
|
||||
index = 0
|
||||
startCollecting = False
|
||||
duration = ""
|
||||
for line in newLines:
|
||||
if 'Starting Nmap' in line:
|
||||
if len(newLines) > index+1 and 'Note: Host seems down' in newLines[index+1]:
|
||||
break # this entry is empty
|
||||
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
|
||||
startCollecting = True
|
||||
elif 'PORT' in line and 'STATE' in line and 'SERVICE' in line:
|
||||
startCollecting = False # end reached
|
||||
elif startCollecting and len(line.split()) == 3:
|
||||
newEntriesTmp.append(nmap_entry(device["dev_MAC"], timeNow(), line.split()[0], line.split()[1], line.split()[2], device["dev_Name"]))
|
||||
elif 'Nmap done' in line:
|
||||
duration = line.split('scanned in ')[1]
|
||||
index += 1
|
||||
|
||||
# previous Nmap Entries
|
||||
oldEntries = []
|
||||
|
||||
mylog('verbose', ['[', timeNow(), '] Scan: Ports found by NMAP: ', len(newEntriesTmp)])
|
||||
|
||||
if len(newEntriesTmp) > 0:
|
||||
|
||||
# get all current NMAP ports from the DB
|
||||
sql.execute(sql_nmap_scan_all)
|
||||
|
||||
rows = sql.fetchall()
|
||||
|
||||
for row in rows:
|
||||
# only collect entries matching the current MAC address
|
||||
if row["MAC"] == device["dev_MAC"]:
|
||||
oldEntries.append(nmap_entry(row["MAC"], row["Time"], row["Port"], row["State"], row["Service"], device["dev_Name"], row["Extra"], row["Index"]))
|
||||
|
||||
|
||||
newEntries = []
|
||||
|
||||
# Collect all entries that don't match the ones in the DB
|
||||
for newTmpEntry in newEntriesTmp:
|
||||
|
||||
found = False
|
||||
|
||||
# Check the new entry is already available in oldEntries and remove from processing if yes
|
||||
for oldEntry in oldEntries:
|
||||
if newTmpEntry.hash == oldEntry.hash:
|
||||
found = True
|
||||
|
||||
if not found:
|
||||
newEntries.append(newTmpEntry)
|
||||
|
||||
|
||||
mylog('verbose', ['[', timeNow(), '] Scan: Nmap newly discovered or changed ports: ', len(newEntries)])
|
||||
|
||||
# collect new ports, find the corresponding old entry and return for notification purposes
|
||||
# also update the DB with the new values after deleting the old ones
|
||||
if len(newEntries) > 0:
|
||||
|
||||
# params to build the SQL query
|
||||
params = []
|
||||
indexesToDelete = ""
|
||||
|
||||
# Find old entry matching the new entry hash
|
||||
for newEntry in newEntries:
|
||||
|
||||
foundEntry = None
|
||||
|
||||
for oldEntry in oldEntries:
|
||||
if oldEntry.hash == newEntry.hash:
|
||||
indexesToDelete = indexesToDelete + str(oldEntry.index) + ','
|
||||
foundEntry = oldEntry
|
||||
|
||||
columnNames = ["Name", "MAC", "Port", "State", "Service", "Extra", "NewOrOld" ]
|
||||
|
||||
# Old entry found
|
||||
if foundEntry is not None:
|
||||
# Build params for sql query
|
||||
params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, oldEntry.extra))
|
||||
# Build JSON for API and notifications
|
||||
changedPortsTmp.append({
|
||||
"Name" : foundEntry.name,
|
||||
"MAC" : newEntry.mac,
|
||||
"Port" : newEntry.port,
|
||||
"State" : newEntry.state,
|
||||
"Service" : newEntry.service,
|
||||
"Extra" : foundEntry.extra,
|
||||
"NewOrOld" : "New values"
|
||||
})
|
||||
changedPortsTmp.append({
|
||||
"Name" : foundEntry.name,
|
||||
"MAC" : foundEntry.mac,
|
||||
"Port" : foundEntry.port,
|
||||
"State" : foundEntry.state,
|
||||
"Service" : foundEntry.service,
|
||||
"Extra" : foundEntry.extra,
|
||||
"NewOrOld" : "Old values"
|
||||
})
|
||||
# New entry - no matching Old entry found
|
||||
else:
|
||||
# Build params for sql query
|
||||
params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, ''))
|
||||
# Build JSON for API and notifications
|
||||
changedPortsTmp.append({
|
||||
"Name" : "New device",
|
||||
"MAC" : newEntry.mac,
|
||||
"Port" : newEntry.port,
|
||||
"State" : newEntry.state,
|
||||
"Service" : newEntry.service,
|
||||
"Extra" : "",
|
||||
"NewOrOld" : "New device"
|
||||
})
|
||||
|
||||
changedPorts_json_struc = json_struc({ "data" : changedPortsTmp}, columnNames)
|
||||
|
||||
# Delete old entries if available
|
||||
if len(indexesToDelete) > 0:
|
||||
sql.execute ("DELETE FROM Nmap_Scan where \"Index\" in (" + indexesToDelete[:-1] +")")
|
||||
db.commitDB()
|
||||
|
||||
# Insert new values into the DB
|
||||
sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params)
|
||||
db.commitDB()
|
||||
|
||||
|
||||
200
pialert/scanners/pholusscan.py
Normal file
200
pialert/scanners/pholusscan.py
Normal file
@@ -0,0 +1,200 @@
|
||||
import subprocess
|
||||
import re
|
||||
|
||||
from const import fullPholusPath, logPath
|
||||
from helper import checkIPV4, timeNow, updateState
|
||||
from logger import mylog
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def performPholusScan (db, timeoutSec, userSubnets):
|
||||
sql = db.sql # TO-DO
|
||||
# scan every interface
|
||||
for subnet in userSubnets:
|
||||
|
||||
temp = subnet.split("--interface=")
|
||||
|
||||
if len(temp) != 2:
|
||||
mylog('none', ["[PholusScan] Skip scan (need subnet in format '192.168.1.0/24 --inteface=eth0'), got: ", subnet])
|
||||
return
|
||||
|
||||
mask = temp[0].strip()
|
||||
interface = temp[1].strip()
|
||||
|
||||
# logging & updating app state
|
||||
updateState(db,"Scan: Pholus")
|
||||
mylog('none', ['[PholusScan] Scan: Pholus for ', str(timeoutSec), 's ('+ str(round(int(timeoutSec) / 60, 1)) +'min)'])
|
||||
mylog('verbose', ["[PholusScan] Pholus scan on [interface] ", interface, " [mask] " , mask])
|
||||
|
||||
# the scan always lasts 2x as long, so the desired user time from settings needs to be halved
|
||||
adjustedTimeout = str(round(int(timeoutSec) / 2, 0))
|
||||
|
||||
# python3 -m trace --trace /home/pi/pialert/pholus/pholus3.py eth1 -rdns_scanning 192.168.1.0/24 -stimeout 600
|
||||
pholus_args = ['python3', fullPholusPath, interface, "-rdns_scanning", mask, "-stimeout", adjustedTimeout]
|
||||
|
||||
# Execute command
|
||||
output = ""
|
||||
|
||||
try:
|
||||
# try runnning a subprocess with a forced (timeout + 30 seconds) in case the subprocess hangs
|
||||
output = subprocess.check_output (pholus_args, universal_newlines=True, stderr=subprocess.STDOUT, timeout=(timeoutSec + 30))
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', [[PholusScan], e.output])
|
||||
mylog('none', ["[PholusScan] Error - Pholus Scan - check logs"])
|
||||
except subprocess.TimeoutExpired as timeErr:
|
||||
mylog('none', ['[PholusScan] Pholus TIMEOUT - the process forcefully terminated as timeout reached'])
|
||||
|
||||
if output == "": # check if the subprocess failed
|
||||
mylog('none', ['[PholusScan] Scan: Pholus FAIL - check logs'])
|
||||
else:
|
||||
mylog('verbose', ['[PholusScan] Scan: Pholus SUCCESS'])
|
||||
|
||||
# check the last run output
|
||||
f = open(logPath + '/pialert_pholus_lastrun.log', 'r+')
|
||||
newLines = f.read().split('\n')
|
||||
f.close()
|
||||
|
||||
# cleanup - select only lines containing a separator to filter out unnecessary data
|
||||
newLines = list(filter(lambda x: '|' in x, newLines))
|
||||
|
||||
# build SQL query parameters to insert into the DB
|
||||
params = []
|
||||
|
||||
for line in newLines:
|
||||
columns = line.split("|")
|
||||
if len(columns) == 4:
|
||||
params.append(( interface + " " + mask, timeNow() , columns[0].replace(" ", ""), columns[1].replace(" ", ""), columns[2].replace(" ", ""), columns[3], ''))
|
||||
|
||||
if len(params) > 0:
|
||||
sql.executemany ("""INSERT INTO Pholus_Scan ("Info", "Time", "MAC", "IP_v4_or_v6", "Record_Type", "Value", "Extra") VALUES (?, ?, ?, ?, ?, ?, ?)""", params)
|
||||
db.commitDB()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def cleanResult(str):
|
||||
# alternative str.split('.')[0]
|
||||
str = str.replace("._airplay", "")
|
||||
str = str.replace("._tcp", "")
|
||||
str = str.replace(".local", "")
|
||||
str = str.replace("._esphomelib", "")
|
||||
str = str.replace("._googlecast", "")
|
||||
str = str.replace(".lan", "")
|
||||
str = str.replace(".home", "")
|
||||
str = re.sub(r'-[a-fA-F0-9]{32}', '', str) # removing last part of e.g. Nest-Audio-ff77ff77ff77ff77ff77ff77ff77ff77
|
||||
# remove trailing dots
|
||||
if str.endswith('.'):
|
||||
str = str[:-1]
|
||||
|
||||
return str
|
||||
|
||||
|
||||
# Disclaimer - I'm interfacing with a script I didn't write (pholus3.py) so it's possible I'm missing types of answers
|
||||
# it's also possible the pholus3.py script can be adjusted to provide a better output to interface with it
|
||||
# Hit me with a PR if you know how! :)
|
||||
def resolve_device_name_pholus (pMAC, pIP, allRes):
|
||||
|
||||
pholusMatchesIndexes = []
|
||||
|
||||
index = 0
|
||||
for result in allRes:
|
||||
# limiting entries used for name resolution to the ones containing the current IP (v4 only)
|
||||
if result["MAC"] == pMAC and result["Record_Type"] == "Answer" and result["IP_v4_or_v6"] == pIP and '._googlezone' not in result["Value"]:
|
||||
# found entries with a matching MAC address, let's collect indexes
|
||||
pholusMatchesIndexes.append(index)
|
||||
|
||||
index += 1
|
||||
|
||||
# return if nothing found
|
||||
if len(pholusMatchesIndexes) == 0:
|
||||
return -1
|
||||
|
||||
# we have some entries let's try to select the most useful one
|
||||
|
||||
# airplay matches contain a lot of information
|
||||
# Matches for example:
|
||||
# Brand Tv (50)._airplay._tcp.local. TXT Class:32769 "acl=0 deviceid=66:66:66:66:66:66 features=0x77777,0x38BCB46 rsf=0x3 fv=p20.T-FFFFFF-03.1 flags=0x204 model=XXXX manufacturer=Brand serialNumber=XXXXXXXXXXX protovers=1.1 srcvers=777.77.77 pi=FF:FF:FF:FF:FF:FF psi=00000000-0000-0000-0000-FFFFFFFFFF gid=00000000-0000-0000-0000-FFFFFFFFFF gcgl=0 pk=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
|
||||
for i in pholusMatchesIndexes:
|
||||
if checkIPV4(allRes[i]['IP_v4_or_v6']) and '._airplay._tcp.local. TXT Class:32769' in str(allRes[i]["Value"]) :
|
||||
return allRes[i]["Value"].split('._airplay._tcp.local. TXT Class:32769')[0]
|
||||
|
||||
# second best - contains airplay
|
||||
# Matches for example:
|
||||
# _airplay._tcp.local. PTR Class:IN "Brand Tv (50)._airplay._tcp.local."
|
||||
for i in pholusMatchesIndexes:
|
||||
if checkIPV4(allRes[i]['IP_v4_or_v6']) and '_airplay._tcp.local. PTR Class:IN' in allRes[i]["Value"] and ('._googlecast') not in allRes[i]["Value"]:
|
||||
return cleanResult(allRes[i]["Value"].split('"')[1])
|
||||
|
||||
# Contains PTR Class:32769
|
||||
# Matches for example:
|
||||
# 3.1.168.192.in-addr.arpa. PTR Class:32769 "MyPc.local."
|
||||
for i in pholusMatchesIndexes:
|
||||
if checkIPV4(allRes[i]['IP_v4_or_v6']) and 'PTR Class:32769' in allRes[i]["Value"]:
|
||||
return cleanResult(allRes[i]["Value"].split('"')[1])
|
||||
|
||||
# Contains AAAA Class:IN
|
||||
# Matches for example:
|
||||
# DESKTOP-SOMEID.local. AAAA Class:IN "fe80::fe80:fe80:fe80:fe80"
|
||||
for i in pholusMatchesIndexes:
|
||||
if checkIPV4(allRes[i]['IP_v4_or_v6']) and 'AAAA Class:IN' in allRes[i]["Value"]:
|
||||
return cleanResult(allRes[i]["Value"].split('.local.')[0])
|
||||
|
||||
# Contains _googlecast._tcp.local. PTR Class:IN
|
||||
# Matches for example:
|
||||
# _googlecast._tcp.local. PTR Class:IN "Nest-Audio-ff77ff77ff77ff77ff77ff77ff77ff77._googlecast._tcp.local."
|
||||
for i in pholusMatchesIndexes:
|
||||
if checkIPV4(allRes[i]['IP_v4_or_v6']) and '_googlecast._tcp.local. PTR Class:IN' in allRes[i]["Value"] and ('Google-Cast-Group') not in allRes[i]["Value"]:
|
||||
return cleanResult(allRes[i]["Value"].split('"')[1])
|
||||
|
||||
# Contains A Class:32769
|
||||
# Matches for example:
|
||||
# Android.local. A Class:32769 "192.168.1.6"
|
||||
for i in pholusMatchesIndexes:
|
||||
if checkIPV4(allRes[i]['IP_v4_or_v6']) and ' A Class:32769' in allRes[i]["Value"]:
|
||||
return cleanResult(allRes[i]["Value"].split(' A Class:32769')[0])
|
||||
|
||||
# # Contains PTR Class:IN
|
||||
# Matches for example:
|
||||
# _esphomelib._tcp.local. PTR Class:IN "ceiling-light-1._esphomelib._tcp.local."
|
||||
for i in pholusMatchesIndexes:
|
||||
if checkIPV4(allRes[i]['IP_v4_or_v6']) and 'PTR Class:IN' in allRes[i]["Value"]:
|
||||
return cleanResult(allRes[i]["Value"].split('"')[1])
|
||||
|
||||
return -1
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def resolve_device_name_dig (pMAC, pIP):
|
||||
|
||||
newName = ""
|
||||
|
||||
try :
|
||||
dig_args = ['dig', '+short', '-x', pIP]
|
||||
|
||||
# Execute command
|
||||
try:
|
||||
# try runnning a subprocess
|
||||
newName = subprocess.check_output (dig_args, universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# An error occured, handle it
|
||||
mylog('none', ['[device_name_dig] ', e.output])
|
||||
# newName = "Error - check logs"
|
||||
return -1
|
||||
|
||||
# Check returns
|
||||
newName = newName.strip()
|
||||
|
||||
if len(newName) == 0 :
|
||||
return -1
|
||||
|
||||
# Cleanup
|
||||
newName = cleanResult(newName)
|
||||
|
||||
if newName == "" or len(newName) == 0:
|
||||
return -1
|
||||
|
||||
# Return newName
|
||||
return newName
|
||||
|
||||
# not Found
|
||||
except subprocess.CalledProcessError :
|
||||
return -1
|
||||
78
pialert/scanners/pihole.py
Normal file
78
pialert/scanners/pihole.py
Normal file
@@ -0,0 +1,78 @@
|
||||
""" module to import db and leases from PiHole """
|
||||
|
||||
import sqlite3
|
||||
|
||||
import conf
|
||||
from const import piholeDB, piholeDhcpleases
|
||||
from logger import mylog
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def copy_pihole_network (db):
|
||||
"""
|
||||
attach the PiHole Database and copy the PiHole_Network table accross into the PiAlert DB
|
||||
"""
|
||||
|
||||
sql = db.sql # TO-DO
|
||||
# Open Pi-hole DB
|
||||
mylog('debug', '[PiHole Network] - attach PiHole DB')
|
||||
|
||||
try:
|
||||
sql.execute ("ATTACH DATABASE '"+ piholeDB +"' AS PH")
|
||||
except sqlite3.Error as e:
|
||||
mylog('none',[ '[PiHole Network] - SQL ERROR: ', e])
|
||||
|
||||
|
||||
# Copy Pi-hole Network table
|
||||
|
||||
try:
|
||||
sql.execute ("DELETE FROM PiHole_Network")
|
||||
|
||||
sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery,
|
||||
PH_Name, PH_IP)
|
||||
SELECT hwaddr, macVendor, lastQuery,
|
||||
(SELECT name FROM PH.network_addresses
|
||||
WHERE network_id = id ORDER BY lastseen DESC, ip),
|
||||
(SELECT ip FROM PH.network_addresses
|
||||
WHERE network_id = id ORDER BY lastseen DESC, ip)
|
||||
FROM PH.network
|
||||
WHERE hwaddr NOT LIKE 'ip-%'
|
||||
AND hwaddr <> '00:00:00:00:00:00' """)
|
||||
sql.execute ("""UPDATE PiHole_Network SET PH_Name = '(unknown)'
|
||||
WHERE PH_Name IS NULL OR PH_Name = '' """)
|
||||
# Close Pi-hole DB
|
||||
sql.execute ("DETACH PH")
|
||||
|
||||
except sqlite3.Error as e:
|
||||
mylog('none',[ '[PiHole Network] - SQL ERROR: ', e])
|
||||
|
||||
db.commitDB()
|
||||
|
||||
mylog('debug',[ '[PiHole Network] - completed - found ',sql.rowcount, ' devices'])
|
||||
return str(sql.rowcount) != "0"
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
def read_DHCP_leases (db):
|
||||
"""
|
||||
read the PiHole DHCP file and insert all records into the DHCP_Leases table.
|
||||
"""
|
||||
mylog('debug', '[PiHole DHCP] - read DHCP_Leases file')
|
||||
# Read DHCP Leases
|
||||
# Bugfix #1 - dhcp.leases: lines with different number of columns (5 col)
|
||||
data = []
|
||||
reporting = False
|
||||
with open(piholeDhcpleases, 'r') as f:
|
||||
for line in f:
|
||||
reporting = True
|
||||
row = line.rstrip().split()
|
||||
if len(row) == 5 :
|
||||
data.append (row)
|
||||
|
||||
# Insert into PiAlert table
|
||||
db.sql.executemany ("""INSERT INTO DHCP_Leases (DHCP_DateTime, DHCP_MAC,
|
||||
DHCP_IP, DHCP_Name, DHCP_MAC2)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
""", data)
|
||||
db.commitDB()
|
||||
|
||||
mylog('debug', ['[PiHole DHCP] - completed - added ',len(data), ' devices.'])
|
||||
return reporting
|
||||
Reference in New Issue
Block a user