Merge branch 'jokob-sk:main' into Pi.Alert-fixes-improvements-systeminfo-v2

This commit is contained in:
Carlos V
2023-08-09 00:27:17 +02:00
committed by GitHub
17 changed files with 130 additions and 241 deletions

View File

@@ -47,7 +47,7 @@ docker run -d --rm --network=host \
| **Required** | `:/home/pi/pialert/db` | Folder which will contain the `pialert.db` file |
|Optional| `:/home/pi/pialert/front/log` | Logs folder useful for debugging if you have issues setting up the container |
|Optional| `:/etc/pihole/pihole-FTL.db` | PiHole's `pihole-FTL.db` database file. Required if you want to use PiHole |
|Optional| `:/etc/pihole/dhcp.leases` | PiHole's `dhcp.leases` file. Required if you want to use PiHole |
|Optional| `:/etc/pihole/dhcp.leases` | PiHole's `dhcp.leases` file. Required if you want to use PiHole `dhcp.leases` file. This has to be matched with a corresponding `DHCPLSS_paths_to_check` setting entry. (the path in the container must contain `pihole`)|
|Optional| `:/home/pi/pialert/front/api` | A simple [API endpoint](https://github.com/jokob-sk/Pi.Alert/blob/main/docs/API.md) containing static (but regularly updated) json and other files. |
@@ -65,10 +65,12 @@ These are the most important settings to get at least some output in your Device
- ❗ To use the arp-scan method, you need to set the `SCAN_SUBNETS` variable. See the documentation on how [to setup SUBNETS, VLANs & limitations](https://github.com/jokob-sk/Pi.Alert/blob/main/docs/SUBNETS.md)
##### For pihole: PIHOLE_ACTIVE, DHCP_ACTIVE
##### For pihole: PIHOLE_RUN, DHCPLSS_RUN
* `PIHOLE_ACTIVE`: You need to map `:/etc/pihole/pihole-FTL.db in the docker-compose.yml` file if you enable this setting.
* `DHCP_ACTIVE` : You need to map `:/etc/pihole/dhcp.leases in the docker-compose.yml` file if you enable this setting.
* `PIHOLE_RUN`: You need to map `:/etc/pihole/pihole-FTL.db in the docker-compose.yml` file if you enable this setting.
* `DHCPLSS_RUN`: You need to map `:/etc/pihole/dhcp.leases in the docker-compose.yml` file if you enable this setting. This has to be matched with a corresponding `DHCPLSS_paths_to_check` setting entry (the path in the container must contain `pihole`).
> It's recommended to use the same schedule interval for all plugins scanning your network.
### **Common issues**

View File

@@ -427,7 +427,7 @@
"General_display_name" : "General",
"General_icon" : "<i class=\"fa fa-gears\"></i>",
"ENABLE_ARPSCAN_name" : "Enable ARP scan",
"ENABLE_ARPSCAN_description" : "Arp-scan is a command-line tool that uses the ARP protocol to discover and fingerprint IP hosts on the local network. An alternative to ARP scan is to enable the <a onclick=\"toggleAllSettings()\" href=\"#PIHOLE_ACTIVE\"><code>PIHOLE_ACTIVE</code>PiHole integration settings</a>.",
"ENABLE_ARPSCAN_description" : "Arp-scan is a command-line tool that uses the ARP protocol to discover and fingerprint IP hosts on the local network. An alternative to ARP scan is to enable the <a onclick=\"toggleAllSettings()\" href=\"#PIHOLE_RUN\"><code>PIHOLE_RUN</code>PiHole integration settings</a>.",
"SCAN_SUBNETS_name" : "Subnets to scan",
"SCAN_SUBNETS_description" : "The arp-scan time itself depends on the number of IP addresses to check so set this up carefully with the appropriate network mask and interface. Check the <a href=\"https://github.com/jokob-sk/Pi.Alert/blob/main/docs/SUBNETS.md\" target=\"_blank\">subnets documentation</a> for help on setting up VLANs, what VLANs are supported, or how to figure out the network mask and your interface.",
"LOG_LEVEL_name" : "Print additional logging",
@@ -443,9 +443,7 @@
"PIALERT_WEB_PASSWORD_name" : "Login password",
"PIALERT_WEB_PASSWORD_description" : "The default password is <code>123456</code>. To change the password run <code>/home/pi/pialert/back/pialert-cli</code> in the container",
"INCLUDED_SECTIONS_name" : "Notify on",
"INCLUDED_SECTIONS_description" : "Specifies which events trigger notifications. Remove the event type(s) you don not want to get notified on. This setting overrides device-specific settings in the UI. (<code>CTRL + Click</code> to select/deselect).",
"SCAN_CYCLE_MINUTES_name" : "Scan cycle delay",
"SCAN_CYCLE_MINUTES_description" : "The delay between scans in minutes. Only related to arp-scan, PiHole, DHCP_ACTIVE scans. If using arp-scan, the scan time itself depends on the number of IP addresses to check. This is influenced by the network mask set in the <a href=\"#SCAN_SUBNETS\"><code>SCAN_SUBNETS</code> setting</a> at the top. Every IP takes a couple seconds to scan.",
"INCLUDED_SECTIONS_description" : "Specifies which events trigger notifications. Remove the event type(s) you don not want to get notified on. This setting overrides device-specific settings in the UI. (<code>CTRL + Click</code> to select/deselect).",
"DAYS_TO_KEEP_EVENTS_name" : "Delete events older than",
"DAYS_TO_KEEP_EVENTS_description" : "This is a maintenance setting. This specifies the number of days worth of event entries that will be kept. All older events will be deleted periodically. Also applies on Plugin Events History.",
"HRS_TO_KEEP_NEWDEV_name" : "Keep new devices for",
@@ -550,11 +548,7 @@
"DDNS_UPDATE_URL_name" : "DynDNS update URL",
"DDNS_UPDATE_URL_description" : "Update URL starting with <code>http://</code> or <code>https://</code>.",
"PiHole_display_name" : "PiHole",
"PiHole_icon" : "<i class=\"fa fa-seedling\"></i>",
"PIHOLE_ACTIVE_name" : "Enable PiHole mapping",
"PIHOLE_ACTIVE_description" : "You need to map<code>:/etc/pihole/pihole-FTL.db</code> in the <code>docker-compose.yml</code> file if you enable this setting.",
"DHCP_ACTIVE_name" : "Enable PiHole DHCP",
"DHCP_ACTIVE_description" : "You need to map <code>:/etc/pihole/dhcp.leases</code> in the <code>docker-compose.yml</code> file if you enable this setting.",
"PiHole_icon" : "<i class=\"fa fa-seedling\"></i>",
"Pholus_display_name" : "Pholus",
"Pholus_icon" : "<i class=\"fa fa-search\"></i>",
"PHOLUS_ACTIVE_name" : "Cycle run",

View File

@@ -414,7 +414,7 @@
"General_display_name" : "General",
"General_icon" : "<i class=\"fa fa-gears\"></i>",
"ENABLE_ARPSCAN_name" : "Activar escaneo ARP",
"ENABLE_ARPSCAN_description" : "El escaneo Arp es una herramienta de la línea de comandos que usa el protocolo ARP para encontrar e identificar la ip de los dispositivos. Una alternativa a este escaneo sería activar los ajustes de la <a onclick=\"toggleAllSettings()\" href=\"#PIHOLE_ACTIVE\"><code>PIHOLE_ACTIVE</code>integración con PiHole</a>.",
"ENABLE_ARPSCAN_description" : "El escaneo Arp es una herramienta de la línea de comandos que usa el protocolo ARP para encontrar e identificar la ip de los dispositivos. Una alternativa a este escaneo sería activar los ajustes de la <a onclick=\"toggleAllSettings()\" href=\"#PIHOLE_RUN\"><code>PIHOLE_RUN</code>integración con PiHole</a>.",
"SCAN_SUBNETS_name" : "Subredes para escanear",
"SCAN_SUBNETS_description" : "El tiempo del escaneo ARP depende del número de ips a comprobar, así que es importante establecer correctamente la máscara y la interfaz de red. Comprueba la <a href=\"https://github.com/jokob-sk/Pi.Alert/blob/main/docs/SUBNETS.md\" target=\"_blank\">documentación sobre sudredes</a> para obtener ayuda para establecer VLANs, cuáles son soportadas o como averiguar la máscara y la interfaz de red.",
"LOG_LEVEL_name" : "Imprimir registros adicionales",
@@ -428,9 +428,7 @@
"PIALERT_WEB_PASSWORD_name" : "Contraseña de inicio de sesión",
"PIALERT_WEB_PASSWORD_description" : "La contraseña predeterminada es <code>123456</code>. Para cambiar la contraseña, ejecute <code>/home/pi/pialert/back/pialert-cli</code> en el contenedor",
"INCLUDED_SECTIONS_name" : "Notificar en",
"INCLUDED_SECTIONS_description" : "Especifica que eventos envían notificaciones. Elimina los tipos de eventos de los que no quieras recibir notificaciones. Este ajuste sobreescribe los ajustes específicos de los dispositivos en la interfaz. (<code>CTRL + Clic</code> para seleccionar / deseleccionar).",
"SCAN_CYCLE_MINUTES_name" : "Retraso del ciclo de escaneo",
"SCAN_CYCLE_MINUTES_description" : "El retraso entre escaneos. Si usa arp-scan, el tiempo de escaneo en sí depende de la cantidad de direcciones IP para verificar. Esto está influenciado por la máscara de red configurada en la configuración <a href=\"#SCAN_SUBNETS\"><code>SCAN_SUBNETS</code></a> en la parte superior. Cada IP toma un par de segundos para escanear.",
"INCLUDED_SECTIONS_description" : "Especifica que eventos envían notificaciones. Elimina los tipos de eventos de los que no quieras recibir notificaciones. Este ajuste sobreescribe los ajustes específicos de los dispositivos en la interfaz. (<code>CTRL + Clic</code> para seleccionar / deseleccionar).",
"DAYS_TO_KEEP_EVENTS_name" : "Eliminar eventos anteriores a",
"DAYS_TO_KEEP_EVENTS_description" : "Esta es una configuración de mantenimiento. Esto especifica el número de días de entradas de eventos que se guardarán. Todos los eventos anteriores se eliminarán periódicamente.",
"REPORT_DASHBOARD_URL_name" : "URL de Pi.Alert",
@@ -532,11 +530,7 @@
"DDNS_UPDATE_URL_name" : "URL de actualización de DynDNS",
"DDNS_UPDATE_URL_description" : "Actualice la URL que comienza con <code>http://</code> o <code>https://</code>.",
"PiHole_display_name" : "PiHole",
"PiHole_icon" : "<i class=\"fa fa-seedling\"></i>",
"PIHOLE_ACTIVE_name" : "Habilitar el mapeo de PiHole",
"PIHOLE_ACTIVE_description" : "Debe mapear <code>:/etc/pihole/pihole-FTL.db</code> en el archivo <code>docker-compose.yml</code> si habilita esta configuración.",
"DHCP_ACTIVE_name" : "Habilitar PiHole DHCP",
"DHCP_ACTIVE_description" : "Debe asignar <code>:/etc/pihole/dhcp.leases</code> en el archivo <code>docker-compose.yml</code> si habilita esta configuración.",
"PiHole_icon" : "<i class=\"fa fa-seedling\"></i>",
"Pholus_display_name" : "Pholus",
"Pholus_icon" : "<i class=\"fa fa-search\"></i>",
"PHOLUS_ACTIVE_name" : "Ejecución del ciclo",

View File

@@ -95,11 +95,12 @@ More on specifics below.
## Supported data sources
Currently, only 3 data sources are supported (valid `data_source` value).
Currently, these data sources are supported (valid `data_source` value).
- Script (`script`)
- SQL query on the PiAlert database (`pialert-db-query`)
- Template (`template`)
- External SQLite database (`sqlite-db-query`)
> 🔎Example
>```json
@@ -200,6 +201,44 @@ This SQL query is executed on the `pialert.db` SQLite database file.
Used to initialize internal settings. Check the `newdev_template` plugin for details.
### "data_source": "sqlite-db-query"
You can execute a SQL query on an external database connected to the current PiALert database via a temporary `EXTERNAL.` prefix. The external SQLite database file has to be mapped in the container to the path specified in the `db_path` property:
```json
...
"data_source": "sqlite-db-query",
"data_source_settings":
{
"db_path":"/etc/pihole/pihole-FTL.db"
},
...
```
The actual SQL query you want to execute is then stored as a `CMD` setting, similar to the `pialert-db-query` plugin type.
> 🔎Example
>
> Notice the `EXTERNAL.` prefix.
>
>```json
>{
> "function": "CMD",
> "type": "text",
> "default_value":"SELECT hwaddr as Object_PrimaryID, cast('http://' || (SELECT ip FROM EXTERNAL.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1) as VARCHAR(100)) || ':' || cast( SUBSTR((SELECT name FROM EXTERNAL.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1), 0, INSTR((SELECT name FROM EXTERNAL.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1), '/')) as VARCHAR(100)) as Object_SecondaryID, datetime() as DateTime, macVendor as Watched_Value1, lastQuery as Watched_Value2, (SELECT name FROM EXTERNAL.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1) as Watched_Value3, 'null' as Watched_Value4, '' as Extra, hwaddr as ForeignKey FROM EXTERNAL.network WHERE hwaddr NOT LIKE 'ip-%' AND hwaddr <> '00:00:00:00:00:00'; ",
> "options": [],
> "localized": ["name", "description"],
> "name" : [{
> "language_code":"en_us",
> "string" : "SQL to run"
> }],
> "description": [{
> "language_code":"en_us",
> "string" : "This SQL query is used to populate the coresponding UI tables under the Plugins section. This particular one selects data from a mapped PiHole SQLite database and maps it to the corresponding Plugin columns."
> }]
> }
> ```
## 🕳 Filters
Plugin entries can be filtered based on values entered into filter fields. The `txtMacFilter` textbox/field contains the Mac address of the currently viewed device or simply a Mac address that's available in the `mac` query string.
@@ -355,7 +394,7 @@ Below are some general additional notes, when defining `params`:
- `"name":"name_value"` - is used as a wildcard replacement in the `CMD` setting value by using curly brackets `{name_value}`. The wildcard is replaced by the result of the `"value" : "param_value"` and `"type":"type_value"` combo configuration below.
- `"type":"<sql|setting>"` - is used to specify the type of the params, currently only 2 supported (`sql`,`setting`).
- `"type":"sql"` - will execute the SQL query specified in the `value` property. The sql query needs to return only one column. The column is flattened and separated by commas (`,`), e.g: `SELECT dev_MAC from DEVICES` -> `Internet,74:ac:74:ac:74:ac,44:44:74:ac:74:ac`. This is then used to replace the wildcards in the `CMD` setting.
- `"type":"setting"` - The setting code name. A combination of the value from `unique_prefix` + `_` + `function` value, or otherwise the code name you can find in the Settings page under the Setting display name, e.g. `SCAN_CYCLE_MINUTES`.
- `"type":"setting"` - The setting code name. A combination of the value from `unique_prefix` + `_` + `function` value, or otherwise the code name you can find in the Settings page under the Setting display name, e.g. `PIHOLE_RUN`.
- `"value" : "param_value"` - Needs to contain a setting code name or SQL query without wildcards.

View File

@@ -107,7 +107,7 @@
}],
"description": [{
"language_code":"en_us",
"string" : "Only enabled if you select <code>schedule</code> in the <a href=\"#ARPSCAN_RUN\"><code>ARPSCAN_RUN</code> setting</a>. Make sure you enter the schedule in the correct cron-like format (e.g. validate at <a href=\"https://crontab.guru/\" target=\"_blank\">crontab.guru</a>). For example entering <code>*/3 * * * *</code> will run the scan every 3 minutes. Will be run NEXT time the time passes."
"string" : "Only enabled if you select <code>schedule</code> in the <a href=\"#ARPSCAN_RUN\"><code>ARPSCAN_RUN</code> setting</a>. Make sure you enter the schedule in the correct cron-like format (e.g. validate at <a href=\"https://crontab.guru/\" target=\"_blank\">crontab.guru</a>). For example entering <code>*/3 * * * *</code> will run the scan every 3 minutes. Will be run NEXT time the time passes. <br/> It's recommended to use the same schedule interval for all plugins scanning your network."
}]
},
{

View File

@@ -264,7 +264,7 @@
}],
"description": [{
"language_code":"en_us",
"string" : "Add all dhcp.leases mapped paths to watch. Enter full path within the container, e.g. <code>/mnt/dhcp2.leases</code>. You must map these files accordingly in your <code>docker-compose.yml</code> file."
"string" : "Add all dhcp.leases mapped paths to watch. Enter full path within the container, e.g. <code>/mnt/dhcp2.leases</code>. You must map these files accordingly in your <code>docker-compose.yml</code> file. (If you are mapping a PiHole dhcp.leases file the path in the container must contain <code>pihole</code>, e.g.: <code>:/etc/pihole/dhcp.leases</code>)"
}]
},
{

View File

@@ -39,8 +39,7 @@ def main():
newEntries = []
if values.paths:
for path in values.paths.split('=')[1].split(','):
for path in values.paths.split('=')[1].split(','):
newEntries = get_entries(newEntries, path)
@@ -79,15 +78,28 @@ def service_monitoring_log(primaryId, secondaryId, created, watched1, watched2 =
# -----------------------------------------------------------------------------
def get_entries(newEntries, path):
# PiHole dhcp.leases format
if 'pihole' in path:
data = []
reporting = False
with open(piholeDhcpleases, 'r') as f:
for line in f:
row = line.rstrip().split()
# rows: DHCP_DateTime, DHCP_MAC, DHCP_IP, DHCP_Name, DHCP_MAC2
if len(row) == 5 :
tmpPlugObj = plugin_object_class(row[1], row[2], 'True', row[3], row[4], 'True', path)
newEntries.append(tmpPlugObj)
leases = DhcpLeases(path)
leasesList = leases.get()
# Generic dhcp.leases format
else:
leases = DhcpLeases(path)
leasesList = leases.get()
for lease in leasesList:
for lease in leasesList:
tmpPlugObj = plugin_object_class(lease.ethernet, lease.ip, lease.active, lease.hostname, lease.hardware, lease.binding_state, path)
newEntries.append(tmpPlugObj)
tmpPlugObj = plugin_object_class(lease.ethernet, lease.ip, lease.active, lease.hostname, lease.hardware, lease.binding_state, path)
newEntries.append(tmpPlugObj)
return newEntries

View File

@@ -45,38 +45,36 @@
}],
"settings": [
{
"function": "RUN",
"type": "text.select",
"default_value":"disabled",
"options": ["disabled", "once", "schedule", "always_after_scan", "on_new_device"],
"localized": ["name", "description"],
"name" :[{
"language_code":"en_us",
"string" : "When to run"
}],
"description": [{
"language_code":"en_us",
"string" : "Specify when your PiHole device import from the PiHole databse will run. The typical setting would be <code>schedule</code> and then you specify a cron-like schedule in the <a href=\"#PIHOLE_RUN_SCHD\"><code>PIHOLE_RUN_SCHD</code>setting</a>. If enabled, you must map the pihole db into your container to the <code>:/etc/pihole/pihole-FTL.db</code> mount path."
}]
},
{
"function": "RUN",
"type": "text.select",
"default_value":"disabled",
"options": ["disabled", "once", "schedule", "always_after_scan", "on_new_device"],
"localized": ["name", "description"],
"name" :[{
"language_code":"en_us",
"string" : "When to run"
}],
"description": [{
"language_code":"en_us",
"string" : "Specify when your PiHole device import from the PiHole databse will run. The typical setting would be <code>schedule</code> and then you specify a cron-like schedule in the <a href=\"#PIHOLE_RUN_SCHD\"><code>PIHOLE_RUN_SCHD</code>setting</a>. If enabled, you must map the pihole db into your container to the <code>/etc/pihole/pihole-FTL.db</code> mount path."
}]
},
{
"function": "CMD",
"type": "text",
"default_value":"SELECT hwaddr as Object_PrimaryID, cast('http://' || (SELECT ip FROM PH.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1) as VARCHAR(100)) || ':' || cast( SUBSTR((SELECT name FROM PH.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1), 0, INSTR((SELECT name FROM PH.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1), '/')) as VARCHAR(100)) as Object_SecondaryID, datetime() as DateTime, macVendor as Watched_Value1, lastQuery as Watched_Value2, (SELECT name FROM PH.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1) as Watched_Value3, 'null' as Watched_Value4, '' as Extra, hwaddr as ForeignKey FROM PH.network WHERE hwaddr NOT LIKE 'ip-%' AND hwaddr <> '00:00:00:00:00:00'; ",
"options": [],
"localized": ["name", "description"],
"name" : [{
"language_code":"en_us",
"string" : "SQL to run"
}],
"description": [{
"language_code":"en_us",
"string" : "This SQL query is used to populate the coresponding UI tables under the Plugins section."
}]
},
"function": "CMD",
"type": "text",
"default_value":"SELECT hwaddr as Object_PrimaryID, cast('http://' || (SELECT ip FROM EXTERNAL.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1) as VARCHAR(100)) || ':' || cast( SUBSTR((SELECT name FROM EXTERNAL.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1), 0, INSTR((SELECT name FROM EXTERNAL.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1), '/')) as VARCHAR(100)) as Object_SecondaryID, datetime() as DateTime, macVendor as Watched_Value1, lastQuery as Watched_Value2, (SELECT name FROM EXTERNAL.network_addresses WHERE network_id = id ORDER BY lastseen DESC, ip LIMIT 1) as Watched_Value3, 'null' as Watched_Value4, '' as Extra, hwaddr as ForeignKey FROM EXTERNAL.network WHERE hwaddr NOT LIKE 'ip-%' AND hwaddr <> '00:00:00:00:00:00'; ",
"options": [],
"localized": ["name", "description"],
"name" : [{
"language_code":"en_us",
"string" : "SQL to run"
}],
"description": [{
"language_code":"en_us",
"string" : "This SQL query is used to populate the coresponding UI tables under the Plugins section. This particular one selects data from a mapped PiHole SQLite database and maps it to the corresponding Plugin columns."
}]
},
{
"function": "RUN_TIMEOUT",
"type": "integer",
@@ -108,7 +106,7 @@
}],
"description": [{
"language_code":"en_us",
"string" : "Only enabled if you select <code>schedule</code> in the <a href=\"#ARPSCAN_RUN\"><code>ARPSCAN_RUN</code> setting</a>. Make sure you enter the schedule in the correct cron-like format (e.g. validate at <a href=\"https://crontab.guru/\" target=\"_blank\">crontab.guru</a>). For example entering <code>*/30 * * * *</code> will run the scan every 30 minutes. Will be run NEXT time the time passes."
"string" : "Only enabled if you select <code>schedule</code> in the <a href=\"#ARPSCAN_RUN\"><code>ARPSCAN_RUN</code> setting</a>. Make sure you enter the schedule in the correct cron-like format (e.g. validate at <a href=\"https://crontab.guru/\" target=\"_blank\">crontab.guru</a>). For example entering <code>*/30 * * * *</code> will run the scan every 30 minutes. Will be run NEXT time the time passes. <br/> It's recommended to use the same schedule interval for all plugins scanning your network."
}]
},
{

View File

@@ -111,7 +111,7 @@ while ($row = $result -> fetchArray (SQLITE3_ASSOC)) {
const settingGroups = [];
const settingKeyOfLists = [];
// core groups are the ones not generated by plugins
const settingCoreGroups = ['General', 'NewDeviceDefaults', 'Email', 'Webhooks', 'Apprise', 'NTFY', 'PUSHSAFER', 'MQTT', 'DynDNS', 'PiHole', 'Pholus', 'Nmap', 'API'];
const settingCoreGroups = ['General', 'NewDeviceDefaults', 'Email', 'Webhooks', 'Apprise', 'NTFY', 'PUSHSAFER', 'MQTT', 'DynDNS', 'Pholus', 'Nmap', 'API'];
// Loop through the settingsArray and collect unique settingGroups

View File

@@ -200,39 +200,9 @@ def main ():
if run:
nmapSchedule.last_run = timeNowTZ()
performNmapScan(db, get_all_devices(db))
# todo replace the scans with plugins
# Perform a network scan via arp-scan or pihole
# if last_network_scan + datetime.timedelta(minutes=conf.SCAN_CYCLE_MINUTES) < loop_start_time:
# last_network_scan = loop_start_time
# conf.cycle = 1 # network scan
# mylog('verbose', ['[MAIN] cycle:',conf.cycle])
# updateState(db,"Scan: Network")
# # scan_network()
# # DEBUG start ++++++++++++++++++++++++++++++++++++++++++++++++++++++
# # Start scan_network as a process
# p = multiprocessing.Process(target=scan_network(db))
# p.start()
# # Wait for a maximum of 3600 seconds (1h) or until process finishes
# p.join(3600)
# # If thread is still active
# if p.is_alive():
# mylog('none', "[MAIN] scan_network running too long - let\'s kill it")
# # Terminate - may not work if process is stuck for good
# p.terminate()
# # OR Kill - will work for sure, no chance for process to finish nicely however
# # p.kill()
# p.join()
# # DEBUG end ++++++++++++++++++++++++++++++++++++++++++++++++++++++
# # Run splugin scripts which are set to run every timne after a scan finished
# Run splugin scripts which are set to run every timne after a scans finished
if conf.ENABLE_PLUGINS:
run_plugin_scripts(db,'always_after_scan')

View File

@@ -47,7 +47,6 @@ ENABLE_PLUGINS = True
PIALERT_WEB_PROTECTION = False
PIALERT_WEB_PASSWORD = '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92'
INCLUDED_SECTIONS = ['internet', 'new_devices', 'down_devices', 'events', 'ports']
SCAN_CYCLE_MINUTES = 5
DAYS_TO_KEEP_EVENTS = 90
REPORT_DASHBOARD_URL = 'http://pi.alert/'
DIG_GET_IP_ARG = '-4 myip.opendns.com @resolver1.opendns.com'
@@ -107,10 +106,6 @@ DDNS_USER = 'dynu_user'
DDNS_PASSWORD = 'A0000000B0000000C0000000D0000000'
DDNS_UPDATE_URL = 'https://api.dynu.com/nic/update?'
# PiHole
PIHOLE_ACTIVE = False
DHCP_ACTIVE = False
# PHOLUS
PHOLUS_ACTIVE = False
PHOLUS_TIMEOUT = 20

View File

@@ -62,10 +62,12 @@ def save_scanned_devices (db):
if check_IP_format(local_ip) == '':
local_ip = '0.0.0.0'
# Check if local mac has been detected with other methods
sql.execute (f"SELECT COUNT(*) FROM CurrentScan WHERE cur_MAC = '{local_mac}'")
if sql.fetchone()[0] == 0 :
sql.execute (f"""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) VALUES ( 1, '{local_mac}', '{local_ip}', Null, 'local_MAC') """)
# Proceed if variable contains valid MAC
if check_mac_or_internet(local_mac):
# Check if local mac has been detected with other methods
sql.execute (f"SELECT COUNT(*) FROM CurrentScan WHERE cur_MAC = '{local_mac}'")
if sql.fetchone()[0] == 0 :
sql.execute (f"""INSERT INTO CurrentScan (cur_ScanCycle, cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) VALUES ( 1, '{local_mac}', '{local_ip}', Null, 'local_MAC') """)
#-------------------------------------------------------------------------------
def print_scan_stats (db):
@@ -415,3 +417,16 @@ def update_devices_names (db):
# update names of devices which we were bale to resolve
sql.executemany ("UPDATE Devices SET dev_Name = ? WHERE dev_MAC = ? ", recordsToUpdate )
db.commitDB()
#-------------------------------------------------------------------------------
# Check if the variable contains a valid MAC address or "Internet"
def check_mac_or_internet(input_str):
# Regular expression pattern for matching a MAC address
mac_pattern = r'^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$'
if input_str.lower() == 'internet':
return True
elif re.match(mac_pattern, input_str):
return True
else:
return False

View File

@@ -90,8 +90,7 @@ def importConfigs (db):
conf.PLUGINS_KEEP_HIST = ccd('PLUGINS_KEEP_HIST', 10000 , c_d, 'Keep history entries', 'integer', '', 'General')
conf.PIALERT_WEB_PROTECTION = ccd('PIALERT_WEB_PROTECTION', False , c_d, 'Enable logon', 'boolean', '', 'General')
conf.PIALERT_WEB_PASSWORD = ccd('PIALERT_WEB_PASSWORD', '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92' , c_d, 'Logon password', 'readonly', '', 'General')
conf.INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'text.multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General')
conf.SCAN_CYCLE_MINUTES = ccd('SCAN_CYCLE_MINUTES', 5 , c_d, 'Scan cycle delay (m)', 'integer', '', 'General')
conf.INCLUDED_SECTIONS = ccd('INCLUDED_SECTIONS', ['internet', 'new_devices', 'down_devices', 'events', 'ports'] , c_d, 'Notify on', 'text.multiselect', "['internet', 'new_devices', 'down_devices', 'events', 'ports', 'plugins']", 'General')
conf.REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://pi.alert/' , c_d, 'PiAlert URL', 'text', '', 'General')
conf.DIG_GET_IP_ARG = ccd('DIG_GET_IP_ARG', '-4 myip.opendns.com @resolver1.opendns.com' , c_d, 'DIG arguments', 'text', '', 'General')
conf.UI_LANG = ccd('UI_LANG', 'English' , c_d, 'Language Interface', 'text.select', "['English', 'German', 'Spanish']", 'General')
@@ -155,10 +154,6 @@ def importConfigs (db):
conf.DDNS_PASSWORD = ccd('DDNS_PASSWORD', 'A0000000B0000000C0000000D0000000' , c_d, 'DynDNS password', 'password', '', 'DynDNS')
conf.DDNS_UPDATE_URL = ccd('DDNS_UPDATE_URL', 'https://api.dynu.com/nic/update?' , c_d, 'DynDNS update URL', 'text', '', 'DynDNS')
# PiHole
conf.PIHOLE_ACTIVE = ccd('PIHOLE_ACTIVE', False, c_d, 'Enable PiHole mapping', 'boolean', '', 'PIHOLE')
conf.DHCP_ACTIVE = ccd('DHCP_ACTIVE', False , c_d, 'Enable PiHole DHCP', 'boolean', '', 'PIHOLE')
# PHOLUS
conf.PHOLUS_ACTIVE = ccd('PHOLUS_ACTIVE', False , c_d, 'Enable Pholus scans', 'boolean', '', 'Pholus')
conf.PHOLUS_TIMEOUT = ccd('PHOLUS_TIMEOUT', 20 , c_d, 'Pholus timeout', 'integer', '', 'Pholus')

View File

@@ -2,7 +2,6 @@
import conf
from scanners.pihole import copy_pihole_network, read_DHCP_leases
from database import insertOnlineHistory
from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, update_devices_names
from helper import timeNowTZ
@@ -15,32 +14,6 @@ from reporting import skip_repeated_notifications
# SCAN NETWORK
#===============================================================================
def scan_network (db):
sql = db.sql #TO-DO
# Header
# moved updateState to main loop
# updateState(db,"Scan: Network")
mylog('verbose', ['[Network Scan] Scan Devices:' ])
db.commitDB()
# Pi-hole method
if conf.PIHOLE_ACTIVE :
mylog('verbose','[Network Scan] Pi-hole start')
copy_pihole_network(db)
db.commitDB()
# DHCP Leases method
if conf.DHCP_ACTIVE :
mylog('verbose','[Network Scan] DHCP Leases start')
read_DHCP_leases (db)
db.commitDB()
def process_scan (db):
# Load current scan data

View File

@@ -253,7 +253,7 @@ def execute_plugin(db, plugin):
# try attaching the sqlite DB
try:
sql.execute ("ATTACH DATABASE '"+ fullSqlitePath +"' AS PH")
sql.execute ("ATTACH DATABASE '"+ fullSqlitePath +"' AS EXTERNAL")
except sqlite3.Error as e:
mylog('none',[ '[Plugin] - ATTACH DATABASE failed with SQL ERROR: ', e])

View File

@@ -11,7 +11,6 @@ from logger import append_line_to_file, mylog
from const import logPath
# need to find a better way to deal with settings !
#global DDNS_ACTIVE, DDNS_DOMAIN, DDNS_UPDATE_URL, DDNS_USER, DDNS_PASSWORD

View File

@@ -1,97 +0,0 @@
""" module to import db and leases from PiHole """
# TODO remove this file in teh future
import sqlite3
import conf
from logger import mylog
piholeDhcpleases = '/etc/pihole/dhcp.leases'
piholeDB = '/etc/pihole/pihole-FTL.db'
#-------------------------------------------------------------------------------
def copy_pihole_network (db):
"""
attach the PiHole Database and copy the PiHole_Network table accross into the PiAlert DB
"""
sql = db.sql # TO-DO
# Open Pi-hole DB
mylog('debug', '[PiHole Network] - attach PiHole DB')
try:
sql.execute ("ATTACH DATABASE '"+ piholeDB +"' AS PH")
except sqlite3.Error as e:
mylog('none',[ '[PiHole Network] - SQL ERROR: ', e])
# Copy Pi-hole Network table
try:
sql.execute ("DELETE FROM PiHole_Network")
# just for reporting
new_devices = []
sql.execute ( """SELECT hwaddr, macVendor, lastQuery,
(SELECT name FROM PH.network_addresses
WHERE network_id = id ORDER BY lastseen DESC, ip),
(SELECT ip FROM PH.network_addresses
WHERE network_id = id ORDER BY lastseen DESC, ip)
FROM PH.network
WHERE hwaddr NOT LIKE 'ip-%'
AND hwaddr <> '00:00:00:00:00:00' """)
new_devices = sql.fetchall()
# insert into PiAlert DB
sql.execute ("""INSERT INTO PiHole_Network (PH_MAC, PH_Vendor, PH_LastQuery,
PH_Name, PH_IP)
SELECT hwaddr, macVendor, lastQuery,
(SELECT name FROM PH.network_addresses
WHERE network_id = id ORDER BY lastseen DESC, ip),
(SELECT ip FROM PH.network_addresses
WHERE network_id = id ORDER BY lastseen DESC, ip)
FROM PH.network
WHERE hwaddr NOT LIKE 'ip-%'
AND hwaddr <> '00:00:00:00:00:00' """)
sql.execute ("""UPDATE PiHole_Network SET PH_Name = '(unknown)'
WHERE PH_Name IS NULL OR PH_Name = '' """)
# Close Pi-hole DB
sql.execute ("DETACH PH")
except sqlite3.Error as e:
mylog('none',[ '[PiHole Network] - SQL ERROR: ', e])
db.commitDB()
mylog('debug',[ '[PiHole Network] - completed - found ', len(new_devices), ' devices'])
return str(sql.rowcount) != "0"
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
def read_DHCP_leases (db):
"""
read the PiHole DHCP file and insert all records into the DHCP_Leases table.
"""
mylog('debug', '[PiHole DHCP] - read DHCP_Leases file')
# Read DHCP Leases
# Bugfix #1 - dhcp.leases: lines with different number of columns (5 col)
data = []
reporting = False
with open(piholeDhcpleases, 'r') as f:
for line in f:
reporting = True
row = line.rstrip().split()
if len(row) == 5 :
data.append (row)
# Insert into PiAlert table
db.sql.executemany ("""INSERT INTO DHCP_Leases (DHCP_DateTime, DHCP_MAC,
DHCP_IP, DHCP_Name, DHCP_MAC2)
VALUES (?, ?, ?, ?, ?)
""", data)
db.commitDB()
mylog('debug', ['[PiHole DHCP] - completed - added ',len(data), ' devices.'])
return reporting