This commit is contained in:
Jokob-sk
2023-01-29 12:32:48 +11:00
parent 51e865c98d
commit 2e532abf6b
6 changed files with 144 additions and 19 deletions

View File

@@ -31,11 +31,12 @@ The system continuously scans the network for, **New devices**, **New connection
that were not discovered by the other methods.
## 🧩 Notification Integrations
## 🧩 Integrations
- [Apprise](https://hub.docker.com/r/caronc/apprise), [Pushsafer](https://www.pushsafer.com/), [NTFY](https://ntfy.sh/)
- [Webhooks](https://github.com/jokob-sk/Pi.Alert/blob/main/docs/WEBHOOK_N8N.md) ([sample JSON](docs/webhook_json_sample.json))
- Home Assistant via [MQTT](https://www.home-assistant.io/integrations/mqtt/)
- discovery ~10s per device, deleting not supported, use [MQTT Explorer](https://mqtt-explorer.com/) for now
- A simple [API endpoint](https://github.com/jokob-sk/Pi.Alert/blob/main/docs/API.md)
## 🔐 Security

View File

@@ -288,6 +288,8 @@ def importConfig ():
global PHOLUS_ACTIVE, PHOLUS_TIMEOUT, PHOLUS_FORCE, PHOLUS_DAYS_DATA, PHOLUS_RUN, PHOLUS_RUN_SCHD, PHOLUS_RUN_TIMEOUT
# Nmap
global NMAP_ACTIVE, NMAP_TIMEOUT, NMAP_RUN, NMAP_RUN_SCHD, NMAP_ARGS
# API
global ENABLE_API, API_RUN, API_RUN_SCHD, API_RUN_INTERVAL
mySettings = [] # reset settings
# get config file
@@ -385,8 +387,14 @@ def importConfig ():
NMAP_RUN = ccd('NMAP_RUN', 'none' , c_d, 'Nmap enable schedule', 'selecttext', "['none', 'once', 'schedule']", 'Nmap')
NMAP_RUN_SCHD = ccd('NMAP_RUN_SCHD', '0 2 * * *' , c_d, 'Nmap schedule', 'text', '', 'Nmap')
NMAP_ARGS = ccd('NMAP_ARGS', '-p -10000' , c_d, 'Nmap custom arguments', 'text', '', 'Nmap')
# API
ENABLE_API = ccd('ENABLE_API', True , c_d, 'Enable API', 'boolean', '', 'API')
API_RUN = ccd('API_RUN', 'schedule' , c_d, 'API execution', 'selecttext', "['none', 'interval', 'schedule']", 'API')
API_RUN_SCHD = ccd('API_RUN_SCHD', '*/3 * * * *' , c_d, 'API schedule', 'text', '', 'API')
API_RUN_INTERVAL = ccd('API_RUN_INTERVAL', 10 , c_d, 'API update interval', 'integer', '', 'API')
# Insert into DB
# Insert settings into the DB
sql.execute ("DELETE FROM Settings")
sql.executemany ("""INSERT INTO Settings ("Code_Name", "Display_Name", "Description", "Type", "Options",
"RegEx", "Value", "Group", "Events" ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""", mySettings)
@@ -416,6 +424,10 @@ def importConfig ():
nmapSchedule = Cron(NMAP_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz))
mySchedules.append(serviceSchedule("nmap", nmapSchedule, nmapSchedule.next(), False))
# init API schedule
apiSchedule = Cron(API_RUN_SCHD).schedule(start_date=datetime.datetime.now(tz))
mySchedules.append(serviceSchedule("api", apiSchedule, apiSchedule.next(), False))
# Format and prepare the list of subnets
updateSubnets()
@@ -433,6 +445,7 @@ now_minus_24h = time_started - datetime.timedelta(hours = 24)
last_network_scan = now_minus_24h
last_internet_IP_scan = now_minus_24h
last_API_update = now_minus_24h
last_run = now_minus_24h
last_cleanup = now_minus_24h
last_update_vendors = time_started - datetime.timedelta(days = 6) # update vendors 24h after first run and than once a week
@@ -442,7 +455,7 @@ newVersionAvailable = False
def main ():
# Initialize global variables
global time_started, cycle, last_network_scan, last_internet_IP_scan, last_run, last_cleanup, last_update_vendors
global time_started, cycle, last_network_scan, last_internet_IP_scan, last_run, last_cleanup, last_update_vendors, last_API_update
# second set of global variables
global startTime, log_timestamp, sql_connection, sql
@@ -468,6 +481,12 @@ def main ():
# check if there is a front end initiated event which needs to be executed
check_and_run_event()
# Execute API update if enabled via the interval schedule settings and if enough time passed
if API_RUN == "interval" and last_API_update + datetime.timedelta(seconds = API_RUN_INTERVAL) < time_started:
last_API_update = time_started
update_api()
# proceed if 1 minute passed
if last_run + datetime.timedelta(minutes=1) < time_started :
@@ -496,7 +515,7 @@ def main ():
cycle = 'update_vendors'
update_devices_MAC_vendors()
# Execute Pholus scheduled or one-off scan if enabled and run conditions fulfilled
# Execute scheduled or one-off Pholus scan if enabled and run conditions fulfilled
if PHOLUS_RUN == "schedule" or PHOLUS_RUN == "once":
pholusSchedule = [sch for sch in mySchedules if sch.service == "pholus"][0]
@@ -514,7 +533,7 @@ def main ():
pholusSchedule.last_run = datetime.datetime.now(tz).replace(microsecond=0)
performPholusScan(PHOLUS_RUN_TIMEOUT)
# Execute Nmap scheduled or one-off scan if enabled and run conditions fulfilled
# Execute scheduled or one-off Nmap scan if enabled and run conditions fulfilled
if NMAP_RUN == "schedule" or NMAP_RUN == "once":
nmapSchedule = [sch for sch in mySchedules if sch.service == "nmap"][0]
@@ -532,6 +551,19 @@ def main ():
nmapSchedule.last_run = datetime.datetime.now(tz).replace(microsecond=0)
performNmapScan(get_all_devices())
# Execute scheduled API update if enabled
if API_RUN == "schedule":
apiSchedule = [sch for sch in mySchedules if sch.service == "api"][0]
run = False
# run if overdue scheduled time
run = apiSchedule.runScheduleCheck()
if run:
apiSchedule.last_run = datetime.datetime.now(tz).replace(microsecond=0)
update_api()
# Perform a network scan via arp-scan or pihole
if last_network_scan + datetime.timedelta(minutes=SCAN_CYCLE_MINUTES) < time_started:
last_network_scan = time_started
@@ -2189,7 +2221,7 @@ def send_notifications ():
# Send Mail
if json_internet != [] or json_new_devices != [] or json_down_devices != [] or json_events != [] or debug_force_notification:
update_api()
update_api(True)
file_print(' Changes detected, sending reports')
@@ -2913,19 +2945,25 @@ def to_binary_sensor(input):
return result
#===============================================================================
# API
#===============================================================================
def update_api():
def update_api(isNotification = False):
# Proceed only if enabled in settings
if ENABLE_API == False:
return
file_print(' [API] Updating files in /front/api')
folder = pialertPath + '/front/api/'
write_file(folder + 'notification_text.txt' , mail_text)
write_file(folder + 'notification_text.html' , mail_html)
write_file(folder + 'notification_json_final.json' , json.dumps(json_final))
if isNotification:
# Update last notification alert in all formats
write_file(folder + 'notification_text.txt' , mail_text)
write_file(folder + 'notification_text.html' , mail_html)
write_file(folder + 'notification_json_final.json' , json.dumps(json_final))
# prepare databse tables we want to expose
dataSourcesSQLs = [
["devices", sql_devices_all],
["nmap_scan", sql_nmap_scan_all],

View File

@@ -48,6 +48,7 @@ docker run -d --rm --network=host \
|Optional| `:/home/pi/pialert/front/log` | Logs folder useful for debugging if you have issues setting up the container |
|Optional| `:/etc/pihole/pihole-FTL.db` | PiHole's `pihole-FTL.db` database file. Required if you want to use PiHole |
|Optional| `:/etc/pihole/dhcp.leases` | PiHole's `dhcp.leases` file. Required if you want to use PiHole |
|Optional| `:/home/pi/pialert/front/api` | A simple [API endpoint](https://github.com/jokob-sk/Pi.Alert/blob/main/docs/API.md) containing static (but regularly updated) json and other files. |
### Config (`pialert.conf`)
@@ -56,7 +57,7 @@ docker run -d --rm --network=host \
- ❗ Set the `SCAN_SUBNETS` variable.
* The adapter will probably be `eth0` or `eth1`. (Run `iwconfig` to find your interface name(s))
* Specify the network filter (which **significantly** speeds up the scan process). For example, the filter `192.168.1.0/24` covers IP ranges 192.168.1.0 to 192.168.1.255.
* Examples for one and two subnets (❗ Note the `['...', '...']` format for two or more subnets):
* Examples for one and two subnets (❗ Note the `['...', '...']` format):
* One subnet: `SCAN_SUBNETS = ['192.168.1.0/24 --interface=eth0']`
* Two subnets: `SCAN_SUBNETS = ['192.168.1.0/24 --interface=eth0', '192.168.1.0/24 --interface=eth1']`

View File

@@ -1,12 +1,23 @@
## Where are API endpoints located
## API endpoints
PiAlert comes with a simple API. These API endpoints are static files, which are updated during:
PiAlert comes with a simple API. These API endpoints are static files, that are periodically updated based on your settings.
1) A notification event
2) TBD
### When are the endpoints updated
Once you enable the API (`ENABLE_API` setting), the endpoints are updated during these events:
1) Always during a notification event.
2) (optional) If `API_RUN` is set to `schedule` on a specified cron-like schedule specified by the `API_RUN_SCHD` setting.
3) (optional) If `API_RUN` is set to `interval` every N seconds specified by the `API_RUN_INTERVAL` setting (minimum 5).
### Location of the endpoints
In the container, these files are located under the `/home/pi/pialert/front/api/` folder and thus on the `<pialert_url>/api/<File name>` url.
### Available endpoints
You can access the following files:
| File name | Description |
@@ -16,8 +27,72 @@ You can access the following files:
| `notification_json_final.json` | The json version of the last notification (e.g. used for webhooks - [sample JSON](https://github.com/jokob-sk/Pi.Alert/blob/main/back/webhook_json_sample.json)). |
| `table_devices.json` | The current (at the time of the last update as mentioned above on this page) state of all of the available Devices detected by the app. |
| `table_nmap_scan.json` | The current state of the discovered ports by the regular NMAP scans. |
| `pholus_scan.json` | The latest state of the [pholus](https://github.com/jokob-sk/Pi.Alert/tree/main/pholus) (A multicast DNS and DNS Service Discovery Security Assessment Tool) scan results. |
| `table_pholus_scan.json` | The latest state of the [pholus](https://github.com/jokob-sk/Pi.Alert/tree/main/pholus) (A multicast DNS and DNS Service Discovery Security Assessment Tool) scan results. |
| `table_events_pending_alert.json` | The list of the unprocessed (pending) notification events. |
Current/latest state of the aforementioned files depends on your settings.
### JSON Data format
The endpoints starting with the `table_` prefix contain most, if not all, data contained in the corresponding database table. The common format for those is:
```JSON
{
"data": [
{
"db_column_name": "data",
"db_column_name2": "data2"
},
{
"db_column_name": "data3",
"db_column_name2": "data4"
}
]
}
```
Example JSON of the `table_devices.json` endpoint with two Devices (database rows):
```JSON
{
"data": [
{
"dev_MAC": "Internet",
"dev_Name": "Net - Huawei",
"dev_DeviceType": "Router",
"dev_Vendor": null,
"dev_Group": "Always on",
"dev_FirstConnection": "2021-01-01 00:00:00",
"dev_LastConnection": "2021-01-28 22:22:11",
"dev_LastIP": "192.168.1.24",
"dev_StaticIP": 0,
"dev_PresentLastScan": 1,
"dev_LastNotification": "2023-01-28 22:22:28.998715",
"dev_NewDevice": 0,
"dev_Network_Node_MAC_ADDR": "",
"dev_Network_Node_port": "",
"dev_Icon": "globe"
},
{
"dev_MAC": "a4:8f:ff:aa:ba:1f",
"dev_Name": "Net - USG",
"dev_DeviceType": "Firewall",
"dev_Vendor": "Ubiquiti Inc",
"dev_Group": "",
"dev_FirstConnection": "2021-02-12 22:05:00",
"dev_LastConnection": "2021-07-17 15:40:00",
"dev_LastIP": "192.168.1.1",
"dev_StaticIP": 1,
"dev_PresentLastScan": 1,
"dev_LastNotification": "2021-07-17 15:40:10.667717",
"dev_NewDevice": 0,
"dev_Network_Node_MAC_ADDR": "Internet",
"dev_Network_Node_port": 1,
"dev_Icon": "shield-halved"
}
]
}
```

View File

@@ -646,6 +646,16 @@ the arp-scan will take hours to complete instead of seconds.
'NMAP_ARGS_name' => 'Arguments',
'NMAP_ARGS_description' => 'Arguments used to run the Nmap scan. Be careful to specify <a href="https://linux.die.net/man/1/nmap" target="_blank">the arguments</a> correctly. For example <code>-p -10000</code> scans ports from 1 to 10000.',
// Nmap
'API_settings_group' => '<i class="fa fa-arrow-down-up-across-line"></i> API',
'ENABLE_API_name' => 'Enable API',
'ENABLE_API_description' => 'If enabled the app will start publishing and updating <a href="https://github.com/jokob-sk/Pi.Alert/blob/main/docs/API.md" target="_blank">simple API endpoints</a> under the <code>/home/pi/pialert/front/api/</code> folder and thus on the <code>pialert_url/api/File_name`</code> url.',
'API_RUN_name' => 'Scheduling updates',
'API_RUN_description' => 'Scheduling settings to specify when the API endpoints should be updated. If set to <code>schedule</code> then endpoints will be updated on a specified cron-like schedule specified by the <code>API_RUN_SCHD</code> setting. Otherwise if set to <code>interval</code> endpoints will be updated every N seconds specified by the <code>API_RUN_INTERVAL</code> setting.',
'API_RUN_SCHD_name' => 'Schedule',
'API_RUN_SCHD_description' => 'Depends on the <code>API_RUN</code> settings to be set to <code>schedule</code>. Make sure you enter the schedule in the correct cron-like format (e.g. validate at <a href="https://crontab.guru/" target="_blank">crontab.guru</a>).',
'API_RUN_INTERVAL_name' => 'API update interval',
'API_RUN_INTERVAL_description' => 'Depends on the <code>API_RUN</code> settings to be set to <code>interval</code>. The minimum cycle is <code>5</code> seconds.',
);
?>

View File

@@ -285,7 +285,7 @@ while ($row = $result -> fetchArray (SQLITE3_ASSOC)) {
<script>
// number of settings has to be equal to
var settingsNumber = 61;
var settingsNumber = 65;
// Wrong number of settings processing
if(<?php echo count($settings)?> != settingsNumber)