diff --git a/dockerfiles/README.md b/dockerfiles/README.md index 36d59b38..57b50d50 100755 --- a/dockerfiles/README.md +++ b/dockerfiles/README.md @@ -4,9 +4,10 @@ [![Docker Pulls](https://img.shields.io/docker/pulls/jokobsk/pi.alert?label=Pulls&logo=docker&color=0aa8d2&logoColor=fff)](https://hub.docker.com/r/jokobsk/pi.alert) [![Docker Pushed](https://img.shields.io/badge/dynamic/json?color=0aa8d2&logoColor=fff&label=Pushed&query=last_updated&url=https%3A%2F%2Fhub.docker.com%2Fv2%2Frepositories%2Fjokobsk%2Fpi.alert%2F&logo=docker&link=http://left&link=https://hub.docker.com/repository/docker/jokobsk/pi.alert)](https://hub.docker.com/r/jokobsk/pi.alert) -# 🐳 A docker image for Pi.Alert +# PiAlert 💻🔍 Network security scanner -🐳 [Docker hub](https://registry.hub.docker.com/r/jokobsk/pi.alert) | 📑 [Docker instructions](https://github.com/jokob-sk/Pi.Alert/blob/main/dockerfiles/README.md) | 🆕 [Release notes](https://github.com/jokob-sk/Pi.Alert/releases) | 📚 [All Docs](https://github.com/jokob-sk/Pi.Alert/tree/main/docs) + | 🐳 [Docker hub](https://registry.hub.docker.com/r/jokobsk/pi.alert) | 📑 [Docker guide](https://github.com/jokob-sk/Pi.Alert/blob/main/dockerfiles/README.md) |🆕 [Release notes](https://github.com/jokob-sk/Pi.Alert/releases) | 📚 [All Docs](https://github.com/jokob-sk/Pi.Alert/tree/main/docs) | + |----------------------|----------------------| ----------------------| ----------------------| @@ -15,7 +16,6 @@ - ## 📕 Basic Usage - You will have to run the container on the host network, e.g: @@ -79,8 +79,9 @@ There are 2 approaches how to get PiHole devices imported. Via the PiHole import * `DHCPLSS_RUN`: You need to map `:/etc/pihole/dhcp.leases` in the `docker-compose.yml` file if you enable this setting. * The above setting has to be matched with a corresponding `DHCPLSS_paths_to_check` setting entry (the path in the container must contain `pihole` as PiHole uses a different format of the `dhcp.leases` file). +> [!NOTE] > It's recommended to use the same schedule interval for all plugins responsible for discovering new devices. - + ### **Common issues** 💡 Before creating a new issue, please check if a similar issue was [already resolved](https://github.com/jokob-sk/Pi.Alert/issues?q=is%3Aissue+is%3Aclosed). @@ -217,10 +218,19 @@ Big thanks to @Macleykun for help and -## ☕ Support me +## ❤ Support me -Sponsor Me on GitHub -Buy Me A Coffee -Support me on patreon +Get: +- Regular updates to keep your data and family safe 🔄 +- Better and more functionality➕ +- I don't get burned out and the app survives longer🔥🤯 +- Quicker and better support with issues 🆘 +- Less grumpy me 😄 -BTC: 1N8tupjeCK12qRVU2XrV17WvKK7LCawyZM +| [![GitHub](https://i.imgur.com/emsRCPh.png)](https://github.com/sponsors/jokob-sk) | [![Buy Me A Coffee](https://i.imgur.com/pIM6YXL.png)](https://www.buymeacoffee.com/jokobsk) | [![Patreon](https://i.imgur.com/MuYsrq1.png)](https://www.patreon.com/user?u=84385063) | +| --- | --- | --- | + +- Bitcoin: `1N8tupjeCK12qRVU2XrV17WvKK7LCawyZM` +- Ethereum: `0x6e2749Cb42F4411bc98501406BdcD82244e3f9C7` + +> 📧 Email me at [jokob@duck.com](mailto:jokob@duck.com?subject=PiAlert) if you want to get in touch or if I should add other sponsorship platforms. \ No newline at end of file diff --git a/front/php/server/util.php b/front/php/server/util.php index 4b6d9daf..0561c1f2 100755 --- a/front/php/server/util.php +++ b/front/php/server/util.php @@ -332,9 +332,7 @@ function saveSettings() fwrite($newConfig, $txt); fclose($newConfig); - displayMessage("
Settings saved to the ".$config_file." file. -

Backup of the previous ".$config_file." created here:

".$new_name."

- Note: Wait at least 5s for the changes to reflect in the UI. (longer if for example a Scan is running)", + displayMessage("
Settings saved to the pialert.conf file.

A time-stamped backup of the previous file created.

Reloading...
", FALSE, TRUE, TRUE, TRUE); } diff --git a/front/php/templates/language/en_us.json b/front/php/templates/language/en_us.json index fda88d57..d346b361 100755 --- a/front/php/templates/language/en_us.json +++ b/front/php/templates/language/en_us.json @@ -461,6 +461,7 @@ "settings_missing" : "Not all settings loaded, refresh the page! This is probably caused by a high load on the database.", "settings_missing_block" : "You can not save your settings without specifying all setting keys. Refresh the page. This is probably caused by a high load on the database.", "settings_old" : "Importing settings and re-initializing...", + "settings_saved" : "
Settings saved to the pialert.conf file.

A time-stamped backup of the previous file created.

Reloading...
", "settings_imported" : "Last time settings were imported from the pialert.conf file:", "settings_expand_all" : "Expand all", "Setting_Override" : "Override value", diff --git a/front/plugins/README.md b/front/plugins/README.md index 4364cc65..44178642 100755 --- a/front/plugins/README.md +++ b/front/plugins/README.md @@ -18,6 +18,7 @@ |-------------|-------------|-----------------------|------------------------|----------------------------------------------------------| | | Yes | ARPSCAN | Script | 📚[arp_scan](/front/plugins/arp_scan/) | | | | CSVBCKP | Script | 📚[csv_backup](/front/plugins/csv_backup/) | +| Yes* | | DBCLNP | Script | 📚[db_cleanup](/front/plugins/db_cleanup/) | | | Yes | DHCPLSS | Script | 📚[dhcp_leases](/front/plugins/dhcp_leases/) | | | | DHCPSRVS | Script | 📚[dhcp_servers](/front/plugins/dhcp_servers/) | | Yes | | NEWDEV | Template | 📚[newdev_template](/front/plugins/newdev_template/) | @@ -30,7 +31,8 @@ | | | WEBMON | Script | 📚[website_monitor](/front/plugins/website_monitor/) | | N/A | | N/A | SQL query | No example available, but the External SQLite based plugins work very similar | ->* The Undiscoverables plugin (`UNDIS`) inserts only user-specified dummy devices. +> \* The Undiscoverables plugin (`UNDIS`) inserts only user-specified dummy devices. +> \* The dabase cleanup plugin is not _required_ but the app will become unusable after a while if not executed. > [!NOTE] > You soft-disable plugins via Settings or completely ignore plugins by placing a `ignore_plugin` file into the plugin directory. The difference is that ignored plugins don't show up anywhere in the UI (Settings, Device details, Plugins pages). The app skips ignored plugins completely. Device-detecting plugins insert values into the `CurrentScan` database table. The plugins that are not required are safe to ignore, however it makes sense to have a least some device-detecting plugins (that insert entries into the `CurrentScan` table) enabled, such as ARPSCAN or PIHOLE. diff --git a/front/plugins/csv_backup/config.json b/front/plugins/csv_backup/config.json index e5f26de3..af2651d9 100755 --- a/front/plugins/csv_backup/config.json +++ b/front/plugins/csv_backup/config.json @@ -109,7 +109,7 @@ }, { "language_code": "de_de", - "string": "Kommando" + "string": "Befehl" } ], "description": [ diff --git a/front/plugins/db_cleanup/README.md b/front/plugins/db_cleanup/README.md new file mode 100755 index 00000000..03fa70f4 --- /dev/null +++ b/front/plugins/db_cleanup/README.md @@ -0,0 +1,7 @@ +## Overview + +Plugin to run regular database cleanup tasks. It is strongly recommended to have an hourly or at least daily schedule running. + +### Usage + +- Check the Settings page for details. diff --git a/front/plugins/db_cleanup/config.json b/front/plugins/db_cleanup/config.json new file mode 100755 index 00000000..9dc23e20 --- /dev/null +++ b/front/plugins/db_cleanup/config.json @@ -0,0 +1,180 @@ +{ + "code_name": "db_cleanup", + "unique_prefix": "DBCLNP", + "enabled": true, + "data_source": "script", + "show_ui": false, + "localized": ["display_name", "description", "icon"], + + "display_name": [ + { + "language_code": "en_us", + "string": "DB cleanup" + } + ], + "icon": [ + { + "language_code": "en_us", + "string": "" + } + ], + "description": [ + { + "language_code": "en_us", + "string": "A plugin to schedule database cleanup & upkeep tasks." + } + ], + "params" : [{ + "name" : "pluginskeephistory", + "type" : "setting", + "value" : "PLUGINS_KEEP_HIST" + }, + { + "name" : "daystokeepevents", + "type" : "setting", + "value" : "DAYS_TO_KEEP_EVENTS" + }, + { + "name" : "hourstokeepnewdevice", + "type" : "setting", + "value" : "HRS_TO_KEEP_NEWDEV" + }, + { + "name" : "pholuskeepdays", + "type" : "setting", + "value" : "PHOLUS_DAYS_DATA" + } + ], + + "settings": [ + { + "function": "RUN", + "type": "text.select", + "default_value":"schedule", + "options": ["disabled", "once", "schedule", "always_after_scan"], + "localized": ["name", "description"], + "name" :[{ + "language_code":"en_us", + "string" : "When to run" + }, + { + "language_code":"es_es", + "string" : "Cuándo ejecutar" + }, + { + "language_code":"de_de", + "string" : "Wann laufen" + }], + "description": [{ + "language_code":"en_us", + "string" : "When the cleanup should be performed. An hourly or daily SCHEDULE is a good option." + }] + }, + { + "function": "CMD", + "type": "readonly", + "default_value": "python3 /home/pi/pialert/front/plugins/db_cleanup/script.py pluginskeephistory={pluginskeephistory} hourstokeepnewdevice={hourstokeepnewdevice} daystokeepevents={daystokeepevents} pholuskeepdays={pholuskeepdays}", + "options": [], + "localized": ["name", "description"], + "name": [ + { + "language_code": "en_us", + "string": "Command" + }, + { + "language_code": "es_es", + "string": "Comando" + }, + { + "language_code": "de_de", + "string": "Befehl" + } + ], + "description": [ + { + "language_code": "en_us", + "string": "Command to run. This can not be changed" + }, + { + "language_code": "es_es", + "string": "Comando a ejecutar. Esto no se puede cambiar" + }, + { + "language_code": "de_de", + "string": "Befehl zum Ausführen. Dies kann nicht geändert werden" + } + ] + }, + { + "function": "RUN_SCHD", + "type": "text", + "default_value":"*/30 * * * *", + "options": [], + "localized": ["name", "description"], + "name" : [{ + "language_code":"en_us", + "string" : "Schedule" + }, + { + "language_code":"es_es", + "string" : "Schedule" + }, + { + "language_code":"de_de", + "string" : "Schedule" + }], + "description": [{ + "language_code":"en_us", + "string" : "Only enabled if you select schedule in the CSVBCKP_RUN setting. Make sure you enter the schedule in the correct cron-like format (e.g. validate at crontab.guru). For example entering 0 4 * * * will run the scan after 4 am in the TIMEZONE you set above. Will be run NEXT time the time passes." + }, + { + "language_code":"es_es", + "string" : "Solo está habilitado si selecciona schedule en la configuración CSVBCKP_RUN. Asegúrese de ingresar la programación en el formato similar a cron correcto (por ejemplo, valide en crontab.guru). Por ejemplo, ingresar 0 4 * * * ejecutará el escaneo después de las 4 a.m. en el TIMEZONE que configuró arriba. Se ejecutará la PRÓXIMA vez que pase el tiempo." + }, + { + "language_code":"de_de", + "string" : "Nur aktiviert, wenn Sie schedule in der CSVBCKP_RUN-Einstellung auswählen. Stellen Sie sicher, dass Sie den Zeitplan im richtigen Cron-ähnlichen Format eingeben (z. B. validieren unter crontab.guru). Wenn Sie beispielsweise 0 4 * * * eingeben, wird der Scan nach 4 Uhr morgens in der TIMEZONE den Sie oben festgelegt haben. Wird das NÄCHSTE Mal ausgeführt, wenn die Zeit vergeht." + }] + }, + { + "function": "RUN_TIMEOUT", + "type": "integer", + "default_value": 30, + "options": [], + "localized": ["name", "description"], + "name": [ + { + "language_code": "en_us", + "string": "Run timeout" + }, + { + "language_code": "es_es", + "string": "Tiempo límite de ejecución" + }, + { + "language_code": "de_de", + "string": "Zeitüberschreitung" + } + ], + "description": [ + { + "language_code": "en_us", + "string": "Maximum time in seconds to wait for the script to finish. If this time is exceeded the script is aborted." + }, + { + "language_code": "es_es", + "string": "Tiempo máximo en segundos para esperar a que finalice el script. Si se supera este tiempo, el script se cancela." + }, + { + "language_code": "de_de", + "string": "Maximale Zeit in Sekunden, die auf den Abschluss des Skripts gewartet werden soll. Bei Überschreitung dieser Zeit wird das Skript abgebrochen." + } + ] + } + ], + + "database_column_definitions": + [ + + ] +} diff --git a/front/plugins/db_cleanup/script.py b/front/plugins/db_cleanup/script.py new file mode 100755 index 00000000..5d747a06 --- /dev/null +++ b/front/plugins/db_cleanup/script.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python +# test script by running: +# /home/pi/pialert/front/plugins/db_cleanup/script.py pluginskeephistory=250 hourstokeepnewdevice=48 daystokeepevents=90 + +import os +import pathlib +import argparse +import sys +import hashlib +import csv +import sqlite3 +from io import StringIO +from datetime import datetime + +sys.path.append("/home/pi/pialert/front/plugins") +sys.path.append('/home/pi/pialert/pialert') + +from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 +from logger import mylog, append_line_to_file +from helper import timeNowTZ +from const import logPath, pialertPath + + +CUR_PATH = str(pathlib.Path(__file__).parent.resolve()) +LOG_FILE = os.path.join(CUR_PATH, 'script.log') +RESULT_FILE = os.path.join(CUR_PATH, 'last_result.log') + +def main(): + + parser = argparse.ArgumentParser(description='DB cleanup tasks') + parser.add_argument('pluginskeephistory', action="store", help="TBC") + parser.add_argument('hourstokeepnewdevice', action="store", help="TBC") + parser.add_argument('daystokeepevents', action="store", help="TBC") + parser.add_argument('pholuskeepdays', action="store", help="TBC") + + values = parser.parse_args() + + PLUGINS_KEEP_HIST = values.pluginskeephistory.split('=')[1] + HRS_TO_KEEP_NEWDEV = values.hourstokeepnewdevice.split('=')[1] + DAYS_TO_KEEP_EVENTS = values.daystokeepevents.split('=')[1] + PHOLUS_DAYS_DATA = values.pholuskeepdays.split('=')[1] + + mylog('verbose', ['[DBCLNP] In script']) + + + # Execute cleanup/upkeep + cleanup_database('/home/pi/pialert/db/pialert.db', DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA, HRS_TO_KEEP_NEWDEV, PLUGINS_KEEP_HIST) + + mylog('verbose', ['[DBCLNP] Cleanup complete file ']) + + return 0 + +#=============================================================================== +# Cleanup / upkeep database +#=============================================================================== +def cleanup_database (dbPath, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA, HRS_TO_KEEP_NEWDEV, PLUGINS_KEEP_HIST): + """ + Cleaning out old records from the tables that don't need to keep all data. + """ + + mylog('verbose', ['[DBCLNP] Upkeep Database:' ]) + + # Connect to the PiAlert SQLite database + conn = sqlite3.connect(dbPath) + cursor = conn.cursor() + + # Cleanup Online History + mylog('verbose', ['[DBCLNP] Online_History: Delete all but keep latest 150 entries']) + cursor.execute ("""DELETE from Online_History where "Index" not in ( + SELECT "Index" from Online_History + order by Scan_Date desc limit 150)""") + mylog('verbose', ['[DBCLNP] Optimize Database']) + # Cleanup Events + mylog('verbose', [f'[DBCLNP] Events: Delete all older than {str(DAYS_TO_KEEP_EVENTS)} days (DAYS_TO_KEEP_EVENTS setting)']) + cursor.execute (f"""DELETE FROM Events + WHERE eve_DateTime <= date('now', '-{str(DAYS_TO_KEEP_EVENTS)} day')""") + + # Trim Plugins_History entries to less than PLUGINS_KEEP_HIST setting per unique "Plugin" column entry + mylog('verbose', [f'[DBCLNP] Plugins_History: Trim Plugins_History entries to less than {str(PLUGINS_KEEP_HIST)} per Plugin (PLUGINS_KEEP_HIST setting)']) + + # Build the SQL query to delete entries that exceed the limit per unique "Plugin" column entry + delete_query = f"""DELETE FROM Plugins_History + WHERE "Index" NOT IN ( + SELECT "Index" + FROM ( + SELECT "Index", + ROW_NUMBER() OVER(PARTITION BY "Plugin" ORDER BY DateTimeChanged DESC) AS row_num + FROM Plugins_History + ) AS ranked_objects + WHERE row_num <= {str(PLUGINS_KEEP_HIST)} + );""" + + cursor.execute(delete_query) + + # Cleanup Pholus_Scan + if PHOLUS_DAYS_DATA != 0: + mylog('verbose', ['[DBCLNP] Pholus_Scan: Delete all older than ' + str(PHOLUS_DAYS_DATA) + ' days (PHOLUS_DAYS_DATA setting)']) + # todo: improvement possibility: keep at least N per mac + cursor.execute (f"""DELETE FROM Pholus_Scan + WHERE Time <= date('now', '-{str(PHOLUS_DAYS_DATA)} day')""") + # Cleanup New Devices + if HRS_TO_KEEP_NEWDEV != 0: + mylog('verbose', [f'[DBCLNP] Devices: Delete all New Devices older than {str(HRS_TO_KEEP_NEWDEV)} hours (HRS_TO_KEEP_NEWDEV setting)']) + cursor.execute (f"""DELETE FROM Devices + WHERE dev_NewDevice = 1 AND dev_FirstConnection < date('now', '+{str(HRS_TO_KEEP_NEWDEV)} hour')""") + + + # De-dupe (de-duplicate) from the Plugins_Objects table + # TODO This shouldn't be necessary - probably a concurrency bug somewhere in the code :( + mylog('verbose', ['[DBCLNP] Plugins_Objects: Delete all duplicates']) + cursor.execute(""" + DELETE FROM Plugins_Objects + WHERE rowid > ( + SELECT MIN(rowid) FROM Plugins_Objects p2 + WHERE Plugins_Objects.Plugin = p2.Plugin + AND Plugins_Objects.Object_PrimaryID = p2.Object_PrimaryID + AND Plugins_Objects.Object_SecondaryID = p2.Object_SecondaryID + AND Plugins_Objects.UserData = p2.UserData + ) + """) + + # De-Dupe (de-duplicate - remove duplicate entries) from the Pholus_Scan table + mylog('verbose', ['[DBCLNP] Pholus_Scan: Delete all duplicates']) + cursor.execute ("""DELETE FROM Pholus_Scan + WHERE rowid > ( + SELECT MIN(rowid) FROM Pholus_Scan p2 + WHERE Pholus_Scan.MAC = p2.MAC + AND Pholus_Scan.Value = p2.Value + AND Pholus_Scan.Record_Type = p2.Record_Type + );""") + + conn.commit() + + # Shrink DB + mylog('verbose', ['[DBCLNP] Shrink Database']) + cursor.execute ("VACUUM;") + + # Close the database connection + conn.close() + + + +#=============================================================================== +# BEGIN +#=============================================================================== +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/pialert/__main__.py b/pialert/__main__.py index 9be38e63..e96bef15 100755 --- a/pialert/__main__.py +++ b/pialert/__main__.py @@ -53,16 +53,13 @@ main structure of Pi Alert (re)import plugin config run plugins (once) run frontend events - update API - run scans - run plugins (scheduled) - check internet IP - check vendor - run "scan_network()" - processing scan results - run plugins (after Scan) - reporting - cleanup + update API + run plugins (scheduled) + check internet IP + check vendor + processing scan results + run plugins (after Scan) + reporting end loop """ @@ -103,10 +100,8 @@ def main (): # TODO fix these loop_start_time = conf.loop_start_time # TODO fix - last_update_vendors = conf.last_update_vendors - last_cleanup = conf.last_cleanup - last_version_check = conf.last_version_check - + last_update_vendors = conf.last_update_vendors + last_version_check = conf.last_version_check # check if new version is available / only check once an hour if conf.last_version_check + datetime.timedelta(hours=1) < conf.loop_start_time : @@ -186,13 +181,6 @@ def main (): # send all configured notifications send_notifications(db) - # clean up the DB once an hour - if conf.last_cleanup + datetime.timedelta(hours = 1) < loop_start_time: - conf.last_cleanup = loop_start_time - conf.cycle = 'cleanup' - mylog('verbose', ['[MAIN] cycle:',conf.cycle]) - db.cleanup_database(startTime, conf.DAYS_TO_KEEP_EVENTS, get_setting_value('PHOLUS_DAYS_DATA'), conf.HRS_TO_KEEP_NEWDEV, conf.PLUGINS_KEEP_HIST) - # Commit SQL db.commitDB() diff --git a/pialert/api.py b/pialert/api.py index a3cd34b9..13f21da1 100755 --- a/pialert/api.py +++ b/pialert/api.py @@ -75,7 +75,7 @@ class api_endpoint_class: index = index + 1 - # cehck if API endpoints have changed or if it's a new one + # check if API endpoints have changed or if it's a new one if not found or changed: mylog('verbose', [f'[API] Updating {self.fileName} file in /front/api']) diff --git a/pialert/conf.py b/pialert/conf.py index 36b3b11b..baf9fa13 100755 --- a/pialert/conf.py +++ b/pialert/conf.py @@ -23,7 +23,6 @@ time_started = '' startTime = '' last_internet_IP_scan = '' last_scan_run = '' -last_cleanup = '' last_update_vendors = '' last_version_check = '' arpscan_devices = [] diff --git a/pialert/database.py b/pialert/database.py index 349f1878..088811b8 100755 --- a/pialert/database.py +++ b/pialert/database.py @@ -74,86 +74,6 @@ class DB(): return arr - #=============================================================================== - # Cleanup / upkeep database - #=============================================================================== - def cleanup_database (self, startTime, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA, HRS_TO_KEEP_NEWDEV, PLUGINS_KEEP_HIST): - """ - Cleaning out old records from the tables that don't need to keep all data. - """ - # Header - #updateState(self,"Upkeep: Clean DB") - mylog('verbose', ['[DB Cleanup] Upkeep Database:' ]) - - # Cleanup Online History - mylog('verbose', ['[DB Cleanup] Online_History: Delete all but keep latest 150 entries']) - self.sql.execute ("""DELETE from Online_History where "Index" not in ( - SELECT "Index" from Online_History - order by Scan_Date desc limit 150)""") - mylog('verbose', ['[DB Cleanup] Optimize Database']) - # Cleanup Events - mylog('verbose', [f'[DB Cleanup] Events: Delete all older than {str(DAYS_TO_KEEP_EVENTS)} days (DAYS_TO_KEEP_EVENTS setting)']) - self.sql.execute (f"""DELETE FROM Events - WHERE eve_DateTime <= date('now', '-{str(DAYS_TO_KEEP_EVENTS)} day')""") - - # Trim Plugins_History entries to less than PLUGINS_KEEP_HIST setting per unique "Plugin" column entry - mylog('verbose', [f'[DB Cleanup] Plugins_History: Trim Plugins_History entries to less than {str(PLUGINS_KEEP_HIST)} per Plugin (PLUGINS_KEEP_HIST setting)']) - - # Build the SQL query to delete entries that exceed the limit per unique "Plugin" column entry - delete_query = f"""DELETE FROM Plugins_History - WHERE "Index" NOT IN ( - SELECT "Index" - FROM ( - SELECT "Index", - ROW_NUMBER() OVER(PARTITION BY "Plugin" ORDER BY DateTimeChanged DESC) AS row_num - FROM Plugins_History - ) AS ranked_objects - WHERE row_num <= {str(PLUGINS_KEEP_HIST)} - );""" - - self.sql.execute(delete_query) - - # Cleanup Pholus_Scan - if PHOLUS_DAYS_DATA != 0: - mylog('verbose', ['[DB Cleanup] Pholus_Scan: Delete all older than ' + str(PHOLUS_DAYS_DATA) + ' days (PHOLUS_DAYS_DATA setting)']) - # todo: improvement possibility: keep at least N per mac - self.sql.execute (f"""DELETE FROM Pholus_Scan - WHERE Time <= date('now', '-{str(PHOLUS_DAYS_DATA)} day')""") - # Cleanup New Devices - if HRS_TO_KEEP_NEWDEV != 0: - mylog('verbose', [f'[DB Cleanup] Devices: Delete all New Devices older than {str(HRS_TO_KEEP_NEWDEV)} hours (HRS_TO_KEEP_NEWDEV setting)']) - self.sql.execute (f"""DELETE FROM Devices - WHERE dev_NewDevice = 1 AND dev_FirstConnection < date('now', '+{str(HRS_TO_KEEP_NEWDEV)} hour')""") - - - # De-dupe (de-duplicate) from the Plugins_Objects table - # TODO This shouldn't be necessary - probably a concurrency bug somewhere in the code :( - mylog('verbose', ['[DB Cleanup] Plugins_Objects: Delete all duplicates']) - self.sql.execute(""" - DELETE FROM Plugins_Objects - WHERE rowid > ( - SELECT MIN(rowid) FROM Plugins_Objects p2 - WHERE Plugins_Objects.Plugin = p2.Plugin - AND Plugins_Objects.Object_PrimaryID = p2.Object_PrimaryID - AND Plugins_Objects.Object_SecondaryID = p2.Object_SecondaryID - AND Plugins_Objects.UserData = p2.UserData - ) - """) - - # De-Dupe (de-duplicate - remove duplicate entries) from the Pholus_Scan table - mylog('verbose', ['[DB Cleanup] Pholus_Scan: Delete all duplicates']) - self.sql.execute ("""DELETE FROM Pholus_Scan - WHERE rowid > ( - SELECT MIN(rowid) FROM Pholus_Scan p2 - WHERE Pholus_Scan.MAC = p2.MAC - AND Pholus_Scan.Value = p2.Value - AND Pholus_Scan.Record_Type = p2.Record_Type - );""") - - # Shrink DB - mylog('verbose', ['[DB Cleanup] Shrink Database']) - self.sql.execute ("VACUUM;") - self.commitDB() #------------------------------------------------------------------------------- def upgradeDB(self): diff --git a/pialert/initialise.py b/pialert/initialise.py index 3e71db3f..257dc4bc 100755 --- a/pialert/initialise.py +++ b/pialert/initialise.py @@ -175,8 +175,7 @@ def importConfigs (db): # set these times to the past to force the first run conf.last_internet_IP_scan = now_minus_24h - conf.last_scan_run = now_minus_24h - conf.last_cleanup = now_minus_24h + conf.last_scan_run = now_minus_24h conf.last_update_vendors = conf.time_started - datetime.timedelta(days = 6) # update vendors 24h after first run and then once a week conf.last_version_check = now_minus_24h