Plugins code cleanup 0.1

This commit is contained in:
Jokob-sk
2023-09-01 08:04:14 +10:00
parent 015ce23fe2
commit 9afed6e43b
6 changed files with 85 additions and 199 deletions

View File

@@ -37,9 +37,7 @@ def main():
# Print a message to indicate that the script is starting.
mylog('verbose', ['[ARP Scan] In script '])
# Assuming 'values' is a dictionary or object that contains a key 'userSubnets'
# which holds a list of user-submitted subnets.
# Printing the userSubnets list to check its content.
# holds a list of user-submitted subnets.
mylog('verbose', ['[ARP Scan] values.userSubnets: ', values.userSubnets])
@@ -87,7 +85,7 @@ def main():
def execute_arpscan(userSubnets):
# output of possible multiple interfaces
# output of possible multiple interfaces
arpscan_output = ""
devices_list = []

View File

@@ -16,17 +16,24 @@ import pwd
import os
from dhcp_leases import DhcpLeases
curPath = str(pathlib.Path(__file__).parent.resolve())
log_file = curPath + '/script.log'
last_run = curPath + '/last_result.log'
sys.path.append("/home/pi/pialert/front/plugins")
sys.path.append('/home/pi/pialert/pialert')
print(last_run)
from plugin_helper import Plugin_Object, Plugin_Objects
from logger import mylog, append_line_to_file
from helper import timeNowTZ
from const import logPath, pialertPath
CUR_PATH = str(pathlib.Path(__file__).parent.resolve())
LOG_FILE = os.path.join(CUR_PATH, 'script.log')
RESULT_FILE = os.path.join(CUR_PATH, 'last_result.log')
# Workflow
def main():
last_run_logfile = open(last_run, 'a')
mylog('verbose',['[DHCPLSS] In script'])
last_run_logfile = open(RESULT_FILE, 'a')
# empty file
last_run_logfile.write("")
@@ -35,48 +42,26 @@ def main():
parser.add_argument('paths', action="store", help="absolute dhcp.leases file paths to check separated by ','")
values = parser.parse_args()
# Init the file
plug_objects = Plugin_Objects( RESULT_FILE )
# parse output
newEntries = []
if values.paths:
for path in values.paths.split('=')[1].split(','):
newEntries = get_entries(newEntries, path)
plug_objects_tmp = get_entries(path, plug_objects)
mylog('verbose',[f'[DHCPLSS] {len(plug_objects_tmp)} Entries found in "{path}"'])
for e in newEntries:
# Insert list into the log
service_monitoring_log(e.primaryId, e.secondaryId, e.created, e.watched1, e.watched2, e.watched3, e.watched4, e.extra, e.foreignKey )
plug_objects = plug_objects + plug_objects_tmp
plug_objects.write_result_file()
# -----------------------------------------------------------------------------
def service_monitoring_log(primaryId, secondaryId, created, watched1, watched2 = '', watched3 = '', watched4 = '', extra ='', foreignKey ='' ):
if watched1 == '':
watched1 = 'null'
if watched2 == '':
watched2 = 'null'
if watched3 == '':
watched3 = 'null'
if watched4 == '':
watched4 = 'null'
with open(last_run, 'a') as last_run_logfile:
# https://www.duckduckgo.com|192.168.0.1|2023-01-02 15:56:30|200|0.9898|null|null|Best search engine|null
last_run_logfile.write("{}|{}|{}|{}|{}|{}|{}|{}|{}\n".format(
primaryId,
secondaryId,
created,
watched1,
watched2,
watched3,
watched4,
extra,
foreignKey
)
)
# -----------------------------------------------------------------------------
def get_entries(newEntries, path):
def get_entries(path, plug_objects):
# PiHole dhcp.leases format
if 'pihole' in path:
@@ -88,8 +73,17 @@ def get_entries(newEntries, path):
row = line.rstrip().split()
# rows: DHCP_DateTime, DHCP_MAC, DHCP_IP, DHCP_Name, DHCP_MAC2
if len(row) == 5 :
tmpPlugObj = plugin_object_class(row[1], row[2], 'True', row[3], row[4], 'True', path)
newEntries.append(tmpPlugObj)
plug_objects.add_object(
primaryId = row[1],
secondaryId = row[2],
watched1 = 'True',
watched2 = row[3],
watched3 = row[4],
watched4 = 'True',
extra = path,
foreignKey = row[1]
)
# Generic dhcp.leases format
else:
@@ -97,28 +91,19 @@ def get_entries(newEntries, path):
leasesList = leases.get()
for lease in leasesList:
plug_objects.add_object(
primaryId = lease.ethernet,
secondaryId = lease.ip,
watched1 = lease.active,
watched2 = lease.hostname,
watched3 = lease.hardware,
watched4 = lease.binding_state,
extra = path,
foreignKey = lease.ethernet
)
tmpPlugObj = plugin_object_class(lease.ethernet, lease.ip, lease.active, lease.hostname, lease.hardware, lease.binding_state, path)
newEntries.append(tmpPlugObj)
return plug_objects
return newEntries
# -------------------------------------------------------------------
class plugin_object_class:
def __init__(self, primaryId = '',secondaryId = '', watched1 = '',watched2 = '',watched3 = '',watched4 = '',extra = '',foreignKey = ''):
self.pluginPref = ''
self.primaryId = primaryId
self.secondaryId = secondaryId
self.created = strftime("%Y-%m-%d %H:%M:%S")
self.changed = ''
self.watched1 = watched1
self.watched2 = watched2
self.watched3 = watched3
self.watched4 = watched4
self.status = ''
self.extra = extra
self.userData = ''
self.foreignKey = foreignKey
#===============================================================================
# BEGIN

View File

@@ -173,115 +173,5 @@ def performNmapScan(deviceIPs, deviceMACs, timeoutSec, args):
if __name__ == '__main__':
main()
# def process_discovered_ports(db, device, discoveredPorts):
# """
# process ports discovered by nmap
# compare to previosu ports
# update DB
# raise notifications
# """
# sql = db.sql # TO-DO
# # previous Nmap Entries
# oldEntries = []
# changedPortsTmp = []
# mylog('verbose', ['[NMAP Scan] Process ports found by NMAP: ', len(discoveredPorts)])
# if len(discoveredPorts) > 0:
# # get all current NMAP ports from the DB
# rows = db.read(sql_nmap_scan_all)
# for row in rows:
# # only collect entries matching the current MAC address
# if row["MAC"] == device["dev_MAC"]:
# oldEntries.append(nmap_entry(row["MAC"], row["Time"], row["Port"], row["State"], row["Service"], device["dev_Name"], row["Extra"], row["Index"]))
# newEntries = []
# # Collect all entries that don't match the ones in the DB
# for discoveredPort in discoveredPorts:
# found = False
# # Check the new entry is already available in oldEntries and remove from processing if yes
# for oldEntry in oldEntries:
# if discoveredPort.hash == oldEntry.hash:
# found = True
# if not found:
# newEntries.append(discoveredPort)
# mylog('verbose', ['[NMAP Scan] Nmap newly discovered or changed ports: ', len(newEntries)])
# # collect new ports, find the corresponding old entry and return for notification purposes
# # also update the DB with the new values after deleting the old ones
# if len(newEntries) > 0:
# # params to build the SQL query
# params = []
# indexesToDelete = ""
# # Find old entry matching the new entry hash
# for newEntry in newEntries:
# foundEntry = None
# for oldEntry in oldEntries:
# if oldEntry.hash == newEntry.hash:
# indexesToDelete = indexesToDelete + str(oldEntry.index) + ','
# foundEntry = oldEntry
# columnNames = ["Name", "MAC", "Port", "State", "Service", "Extra", "NewOrOld" ]
# # Old entry found
# if foundEntry is not None:
# # Build params for sql query
# params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, oldEntry.extra))
# # Build JSON for API and notifications
# changedPortsTmp.append({
# "Name" : foundEntry.name,
# "MAC" : newEntry.mac,
# "Port" : newEntry.port,
# "State" : newEntry.state,
# "Service" : newEntry.service,
# "Extra" : foundEntry.extra,
# "NewOrOld" : "New values"
# })
# changedPortsTmp.append({
# "Name" : foundEntry.name,
# "MAC" : foundEntry.mac,
# "Port" : foundEntry.port,
# "State" : foundEntry.state,
# "Service" : foundEntry.service,
# "Extra" : foundEntry.extra,
# "NewOrOld" : "Old values"
# })
# # New entry - no matching Old entry found
# else:
# # Build params for sql query
# params.append((newEntry.mac, newEntry.time, newEntry.port, newEntry.state, newEntry.service, ''))
# # Build JSON for API and notifications
# changedPortsTmp.append({
# "Name" : "New device",
# "MAC" : newEntry.mac,
# "Port" : newEntry.port,
# "State" : newEntry.state,
# "Service" : newEntry.service,
# "Extra" : "",
# "NewOrOld" : "New device"
# })
# conf.changedPorts_json_struc = json_struc({ "data" : changedPortsTmp}, columnNames)
# # Delete old entries if available
# if len(indexesToDelete) > 0:
# sql.execute ("DELETE FROM Nmap_Scan where \"Index\" in (" + indexesToDelete[:-1] +")")
# db.commitDB()
# # Insert new values into the DB
# sql.executemany ("""INSERT INTO Nmap_Scan ("MAC", "Time", "Port", "State", "Service", "Extra") VALUES (?, ?, ?, ?, ?, ?)""", params)
# db.commitDB()

View File

@@ -40,13 +40,13 @@ def main():
plug_objects = Plugin_Objects(RESULT_FILE)
# Print a message to indicate that the script is starting.
mylog('verbose',['[Pholus] In script'])
mylog('verbose',['[PHOLUS] In script'])
# Assuming 'values' is a dictionary or object that contains a key 'userSubnets'
# which holds a list of user-submitted subnets.
# Printing the userSubnets list to check its content.
mylog('verbose',['[Pholus] Subnets: ', values.userSubnets])
mylog('verbose',['[Pholus] len Subnets: ', len(values.userSubnets)])
mylog('verbose',['[PHOLUS] Subnets: ', values.userSubnets])
mylog('verbose',['[PHOLUS] len Subnets: ', len(values.userSubnets)])
# Extract the base64-encoded subnet information from the first element of the userSubnets list.
# The format of the element is assumed to be like 'userSubnets=b<base64-encoded-data>'.

View File

@@ -19,28 +19,35 @@ from requests.packages.urllib3.exceptions import InsecureRequestWarning
import pwd
import os
sys.path.append("/home/pi/pialert/front/plugins")
sys.path.append('/home/pi/pialert/pialert')
from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64
from logger import mylog, append_line_to_file
from helper import timeNowTZ
from const import logPath, pialertPath
CUR_PATH = str(pathlib.Path(__file__).parent.resolve())
LOG_FILE = os.path.join(CUR_PATH, 'script.log')
RESULT_FILE = os.path.join(CUR_PATH, 'last_result.log')
curPath = str(pathlib.Path(__file__).parent.resolve())
log_file = curPath + '/script.log'
last_run = curPath + '/last_result.log'
# Workflow
def main():
mylog('verbose', ['[SNMPDSC] In script '])
# init global variables
global ROUTERS
# empty file
open(last_run , 'w').close()
last_run_logfile = open(last_run, 'a')
open(RESULT_FILE , 'w').close()
last_run_logfile = open(RESULT_FILE, 'a')
parser = argparse.ArgumentParser(description='This plugin is used to discover devices via the arp table(s) of a RFC1213 compliant router or switch.')
parser.add_argument('routers', action="store", help="IP(s) of routers, separated by comma (,) if passing multiple")
values = parser.parse_args()
# parse output
@@ -49,11 +56,13 @@ def main():
if values.routers:
ROUTERS = values.routers.split('=')[1].replace('\'','')
newEntries = get_entries(newEntries)
mylog('verbose', ['[SNMPDSC] Entries found: ', len(newEntries)])
for e in newEntries:
# Insert list into the log
service_monitoring_log(e.primaryId, e.secondaryId, e.created, e.watched1, e.watched2, e.watched3, e.watched4, e.extra, e.foreignKey )
# -----------------------------------------------------------------------------
@@ -72,7 +81,7 @@ def get_entries(newEntries):
for router in routers:
# snmpwalk -v 2c -c public -OXsq 192.168.1.1 .1.3.6.1.2.1.3.1.1.2
print(router)
mylog('verbose', ['[SNMPDSC] Router snmpwalk command: ', router])
timeoutSec = 10
@@ -88,7 +97,7 @@ def get_entries(newEntries):
# Process outputs
# Sample: iso.3.6.1.2.1.3.1.1.2.3.1.192.168.1.2 "6C 6C 6C 6C 6C 6C "
with open(log_file, 'a') as run_logfile:
with open(LOG_FILE, 'a') as run_logfile:
for line in newLines:
# debug
@@ -149,7 +158,7 @@ def service_monitoring_log(primaryId, secondaryId, created, watched1, watched2 =
if foreignKey == '':
foreignKey = 'null'
with open(last_run, 'a') as last_run_logfile:
with open(RESULT_FILE, 'a') as last_run_logfile:
last_run_logfile.write("{}|{}|{}|{}|{}|{}|{}|{}|{}\n".format(
primaryId,
secondaryId,

View File

@@ -7,14 +7,16 @@ import argparse
import sys
sys.path.append("/home/pi/pialert/front/plugins")
sys.path.append('/home/pi/pialert/pialert')
from plugin_helper import Plugin_Objects
from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64
from logger import mylog, append_line_to_file
from helper import timeNowTZ
from const import logPath, pialertPath
CUR_PATH = str(pathlib.Path(__file__).parent.resolve())
LOG_FILE = os.path.join(CUR_PATH , 'script.log')
RESULT_FILE = os.path.join(CUR_PATH , 'last_result.log')
LOG_FILE = os.path.join(CUR_PATH, 'script.log')
RESULT_FILE = os.path.join(CUR_PATH, 'last_result.log')
def main():
@@ -23,6 +25,8 @@ def main():
parser.add_argument('devices', action="store", help="list of device names separated by ','")
values = parser.parse_args()
mylog('verbose', ['[UNDIS] In script'])
UNDIS_devices = Plugin_Objects( RESULT_FILE )
if values.devices: