Merge pull request #1029 from jokob-sk/next_release

Next release - Workflow work
This commit is contained in:
Jokob @NetAlertX
2025-03-29 12:47:56 +11:00
committed by GitHub
73 changed files with 2561 additions and 1440 deletions

View File

@@ -1,6 +1,6 @@
name: Feature Request
description: 'Suggest an idea for NetAlertX'
labels: ['Feature request']
labels: ['Feature request ']
body:
- type: checkboxes
attributes:

View File

@@ -61,6 +61,8 @@ services:
- ${DEV_LOCATION}/front/cloud_services.php:/app/front/cloud_services.php
- ${DEV_LOCATION}/front/report.php:/app/front/report.php
- ${DEV_LOCATION}/front/workflows.php:/app/front/workflows.php
- ${DEV_LOCATION}/front/workflowsCore.php:/app/front/workflowsCore.php
- ${DEV_LOCATION}/front/appEvents.php:/app/front/appEvents.php
- ${DEV_LOCATION}/front/appEventsCore.php:/app/front/appEventsCore.php
- ${DEV_LOCATION}/front/multiEditCore.php:/app/front/multiEditCore.php
- ${DEV_LOCATION}/front/plugins:/app/front/plugins

View File

@@ -54,8 +54,6 @@ if [[ $EUID -ne 0 ]]; then
exit 1
fi
echo "[INSTALL] Copy starter ${DB_FILE} and ${CONF_FILE} if they don't exist"
# DANGER ZONE: ALWAYS_FRESH_INSTALL
if [ "$ALWAYS_FRESH_INSTALL" = true ]; then
echo "[INSTALL] ❗ ALERT /db and /config folders are cleared because the ALWAYS_FRESH_INSTALL is set to: $ALWAYS_FRESH_INSTALL"
@@ -96,7 +94,9 @@ if [ -f "${INSTALL_DIR_OLD}/config/${OLD_APP_NAME}.conf" ]; then
fi
# 🔺 FOR BACKWARD COMPATIBILITY - REMOVE AFTER 12/12/2025
# Copy starter .db and .conf if they don't exist
echo "[INSTALL] Copy starter ${DB_FILE} and ${CONF_FILE} if they don't exist"
# Copy starter app.db, app.conf if they don't exist
cp -na "${INSTALL_DIR}/back/${CONF_FILE}" "${INSTALL_DIR}/config/${CONF_FILE}"
cp -na "${INSTALL_DIR}/back/${DB_FILE}" "${FULL_FILEDB_PATH}"
@@ -143,6 +143,7 @@ fi
# Create the execution_queue.log and app_front.log files if they don't exist
touch "${INSTALL_DIR}"/log/{app.log,execution_queue.log,app_front.log,app.php_errors.log,stderr.log,stdout.log,db_is_locked.log}
touch "${INSTALL_DIR}"/api/user_notifications.json
# Create plugins sub-directory if it doesn't exist in case a custom log folder is used
mkdir -p "${INSTALL_DIR}"/log/plugins

View File

@@ -4,7 +4,7 @@
## Overview
This functionality allows you to define **custom properties** for devices, which can store and display additional information on the device listing page. By marking properties as visible, you can enhance the user interface with quick actions, notes, or external links.
This functionality allows you to define **custom properties** for devices, which can store and display additional information on the device listing page. By marking properties as "Show", you can enhance the user interface with quick actions, notes, or external links.
### Key Features:
- **Customizable Properties**: Define specific properties for each device.
@@ -63,10 +63,10 @@ Visible properties (`CUSTPROP_show: true`) are displayed as interactive icons in
---
## Example Scenarios
## Example Use Cases
1. **Device Documentation Link**:
- Add a custom property with `CUSTPROP_type` set to `link` or `link_new_tab` to allow quick navigation to the documentation.
- Add a custom property with `CUSTPROP_type` set to `link` or `link_new_tab` to allow quick navigation to the external documentation of the device.
2. **Firmware Details**:
- Use `CUSTPROP_type: show_notes` to display firmware versions or upgrade instructions in a modal.

View File

@@ -1,35 +1,38 @@
## Development environment set up
# Development environment set up
>[!NOTE]
> Replace `/development` with the path where your code files will be stored. The default container name is `netalertx` so there might be a conflict with your running containers.
### Development Guidelines
## Development Guidelines
**Priority Order (Highest to Lowest):**
Before starting development, please scan the below development guidelines.
### Priority Order (Highest to Lowest)
1. 🔼 Fixing core bugs that lack workarounds.
2. 🔵 Adding core functionality that unlocks other features (e.g., plugins).
3. 🔵 Refactoring to enable faster development.
4. 🔽 UI improvements (PRs welcome).
💡 **Design Philosophy:**
### Design Philosophy
Focus on core functionality and integrate with existing tools rather than reinventing the wheel.
Examples:
- Using **Apprise** for notifications instead of implementing multiple separate gateways.
- Implementing **regex-based validation** instead of one-off validation for each setting.
📌 **Note on UI requests:**
> [!NOTE]
> UI changes have lower priority, however, PRs are welcome, but **keep them small & focused**.
- UI changes have lower priority due to framework limitations and mobile support constraints.
- PRs are welcome, but **keep them small & focused**.
## Development Environment Set Up
## 1. Download the code:
### 1. Download the code:
- `mkdir /development`
- `cd /development && git clone https://github.com/jokob-sk/NetAlertX.git`
## 2. Create a DEV .env_dev file
### 2. Create a DEV .env_dev file
`touch /development/.env_dev && sudo nano /development/.env_dev`
@@ -43,10 +46,12 @@ TZ=Europe/Berlin
PORT=22222 # make sure this port is unique on your whole network
DEV_LOCATION=/development/NetAlertX
APP_DATA_LOCATION=/volume/docker_appdata
# Make sure your GRAPHQL_PORT setting has a port that is unique on your whole host network
APP_CONF_OVERRIDE={"GRAPHQL_PORT":"22223"}
# ALWAYS_FRESH_INSTALL=true # uncommenting this will always delete the content of /config and /db dirs on boot to simulate a fresh install
```
## 3. Create /db and /config dirs
### 3. Create /db and /config dirs
Create a folder `netalertx` in the `APP_DATA_LOCATION` (in this example in `/volume/docker_appdata`) with 2 subfolders `db` and `config`.
@@ -54,7 +59,7 @@ Create a folder `netalertx` in the `APP_DATA_LOCATION` (in this example in `/vol
- `mkdir /volume/docker_appdata/netalertx/db`
- `mkdir /volume/docker_appdata/netalertx/config`
## 4. Run the container
### 4. Run the container
- `cd /development/NetAlertX && sudo docker-compose --env-file ../.env_dev `
@@ -63,7 +68,7 @@ You can then modify the python script without restarting/rebuilding the containe
![image](https://github.com/jokob-sk/NetAlertX/assets/96159884/3cbf2748-03c8-49e7-b801-f38c7755246b)
## 💡 Tips
## Tips
A quick cheat sheet of useful commands.
@@ -75,9 +80,9 @@ A command to stop, remove the container and the image (replace `netalertx` and `
### Restart the server backend
Most code changes can be tetsed without rebuilding the container. When working on the python server backend, you only need to restart the server.
Most code changes can be tested without rebuilding the container. When working on the python server backend, you only need to restart the server.
1. You can usually restart the backend via Maintenance > Logs > Restart server
1. You can usually restart the backend via _Maintenance > Logs > Restart_ server
![image](./img/DEV_ENV_SETUP/Maintenance_Logs_Restart_server.png)
@@ -86,11 +91,13 @@ Most code changes can be tetsed without rebuilding the container. When working o
- `sudo docker exec -it netalertx /bin/bash`
- `pkill -f "python /app/server" && python /app/server & `
3. If none of the above work, restart the docker image. This is usually the last resort as sometimes the Docker engine becomes unresponsive and the whole engine needs to be restarted.
3. If none of the above work, restart the docker caontainer.
## Contributing & Pull Requests
- This is usually the last resort as sometimes the Docker engine becomes unresponsive and the whole engine needs to be restarted.
**Before submitting a PR, please ensure:**
## Contributing & Pull Requests
### Before submitting a PR, please ensure:
✔ Changes are **backward-compatible** with existing installs.
✔ No unnecessary changes are made.

View File

@@ -1,5 +1,8 @@
# `docker-compose.yaml` Examples
> [!NOTE]
> The container needs to run in `network_mode:"host"`.
### Example 1
```yaml

View File

@@ -49,3 +49,44 @@ NetAlertX comes with MQTT support, allowing you to show all detected devices as
[list]: ./img/HOME_ASISSTANT/HomeAssistant-Devices-List.png "list"
[overview]: ./img/HOME_ASISSTANT/HomeAssistant-Overview-Card.png "overview"
## Troubleshooting
If you can't see all devices detected, run `sudo arp-scan --interface=eth0 192.168.1.0/24` (change these based on your setup, read [Subnets](./SUBNETS.md) docs for details). This command has to be executed the NetAlertX container, not in the Home Assistant container.
You can access the NetAlertX container via Portainer on your host or via ssh. The container name will be something like `addon_db21ed7f_netalertx` (you can copy the `db21ed7f_netalertx` part from the browser when accessing the UI of NetAlertX).
## Accessing the NetAlertX container via SSH
1. Log into your Home Assistant host via SSH
```bash
local@local:~ $ ssh pi@192.168.1.9
```
2. Find the NetAlertX container name, in this case `addon_db21ed7f_netalertx`
```bash
pi@raspberrypi:~ $ sudo docker container ls | grep netalertx
06c540d97f67 ghcr.io/alexbelgium/netalertx-armv7:25.3.1 "/init" 6 days ago Up 6 days (healthy) addon_db21ed7f_netalertx
```
3. SSH into the NetAlertX cointainer
```bash
pi@raspberrypi:~ $ sudo docker exec -it addon_db21ed7f_netalertx /bin/sh
/ #
```
4. Execute a test `asrp-scan` scan
```bash
/ # sudo arp-scan --ignoredups --retry=6 192.168.1.0/24 --interface=eth0
Interface: eth0, type: EN10MB, MAC: dc:a6:32:73:8a:b1, IPv4: 192.168.1.9
Starting arp-scan 1.10.0 with 256 hosts (https://github.com/royhills/arp-scan)
192.168.1.1 74:ac:b9:54:09:fb Ubiquiti Networks Inc.
192.168.1.21 74:ac:b9:ad:c3:30 Ubiquiti Networks Inc.
192.168.1.58 1c:69:7a:a2:34:7b EliteGroup Computer Systems Co., LTD
192.168.1.57 f4:92:bf:a3:f3:56 Ubiquiti Networks Inc.
...
```
If your result doesn't contain results similar to the above, double check your subnet, interface and if you are dealing with an inaccessible network segment, read the [Remote networks documentation](./REMOTE_NETWORKS.md).

View File

@@ -43,14 +43,13 @@ The application installation folder in the docker container has changed from `/h
# Examples
Exmaples of docker files with the new mount points.
Examples of docker files with the new mount points.
## Example 1: Mapping folders
### Old docker-compose.yml
```yaml
version: "3"
services:
pialert:
container_name: pialert
@@ -72,7 +71,6 @@ services:
### New docker-compose.yml
```yaml
version: "3"
services:
netalertx: # ⚠ This has changed (🟡optional)
container_name: netalertx # ⚠ This has changed (🟡optional)
@@ -100,7 +98,6 @@ services:
### Old docker-compose.yml
```yaml
version: "3"
services:
pialert:
container_name: pialert
@@ -122,7 +119,6 @@ services:
### New docker-compose.yml
```yaml
version: "3"
services:
netalertx: # ⚠ This has changed (🟡optional)
container_name: netalertx # ⚠ This has changed (🟡optional)

View File

@@ -2,17 +2,14 @@
You need to specify the network interface and the network mask. You can also configure multiple subnets and specify VLANs (see VLAN exceptions below).
`ARPSCAN` can scan multiple networks if the network allows it. To scan networks directly, the subnets must be accessible from the network where NetAlertX is running. This means NetAlertX needs to have access to the interface attached to that subnet. You can verify this by running the following command in the container (replace the interface and ip mask):
`ARPSCAN` can scan multiple networks if the network allows it. To scan networks directly, the subnets must be accessible from the network where NetAlertX is running. This means NetAlertX needs to have access to the interface attached to that subnet.
`sudo arp-scan --interface=eth0 192.168.1.0/24`
> [!WARNING]
> If you don't see all expected devices run the following command in the NetAlertX container (replace the interface and ip mask):
> `sudo arp-scan --interface=eth0 192.168.1.0/24`
>
> If this command returns no results, the network is not accessible due to your network or firewall restrictions (Wi-Fi Extenders, VPNs and inaccessible networks). If direct scans are not possible, check the [remote networks documentation](./REMOTE_NETWORKS.md) for workarounds.
In this example, `--interface=eth0 192.168.1.0/24` represents a neighboring subnet. If this command returns no results, the network is not accessible due to your network or firewall restrictions.
If direct scans are not possible (Wi-Fi Extenders, VPNs and inaccessible networks), check the [remote networks documentation](./REMOTE_NETWORKS.md).
> [!TIP]
> You may need to increase the time between scans `ARPSCAN_RUN_SCHD` and the timeout `ARPSCAN_RUN_TIMEOUT` (and similar settings for related plugins) when adding more subnets. If the timeout setting is exceeded, the scan is canceled to prevent the application from hanging due to rogue plugins.
> Check [debugging plugins](./DEBUG_PLUGINS.md) for more tips.
## Example Values
@@ -24,7 +21,17 @@ If direct scans are not possible (Wi-Fi Extenders, VPNs and inaccessible network
* One subnet: `SCAN_SUBNETS = ['192.168.1.0/24 --interface=eth0']`
* Two subnets: `SCAN_SUBNETS = ['192.168.1.0/24 --interface=eth0','192.168.1.0/24 --interface=eth1 --vlan=107']`
If you get timeout messages, decrease the network mask (e.g.: from `/16` to `/24`) or increase the `TIMEOUT` setting (e.g.: `ARPSCAN_RUN_TIMEOUT` to `300` (5-minute timeout)) for the plugin and the interval between scans (e.g.: `ARPSCAN_RUN_SCHD` to `*/10 * * * *` (scans every 10 minutes)).
> [!TIP]
> When adding more subnets, you may need to increase both the scan interval (`ARPSCAN_RUN_SCHD`) and the timeout (`ARPSCAN_RUN_TIMEOUT`)—as well as similar settings for related plugins.
>
> If the timeout is too short, you may see timeout errors in the log. To prevent the application from hanging due to unresponsive plugins, scans are canceled when they exceed the timeout limit.
>
> To fix this:
> - Reduce the subnet size (e.g., change `/16` to `/24`).
> - Increase the timeout (e.g., set `ARPSCAN_RUN_TIMEOUT` to `300` for a 5-minute timeout).
> - Extend the scan interval (e.g., set `ARPSCAN_RUN_SCHD` to `*/10 * * * *` to scan every 10 minutes).
>
> For more troubleshooting tips, see [Debugging Plugins](./DEBUG_PLUGINS.md).
---

View File

@@ -8,7 +8,6 @@ NetAlertX provides contextual help within the application:
- **Hover over settings, fields, or labels** to see additional tooltips and guidance.
- **Click ❔ (question-mark) icons** next to various elements to view detailed information.
- Access the in-app **Help / FAQ** section for frequently asked questions and quick answers.
---

21
front/appEvents.php Executable file
View File

@@ -0,0 +1,21 @@
<?php
require 'php/templates/header.php';
require 'php/templates/notification.php';
?>
<!-- ----------------------------------------------------------------------- -->
<!-- Page ------------------------------------------------------------------ -->
<div class="content-wrapper">
<?php
require 'appEventsCore.php';
?>
</div>
<?php
require 'php/templates/footer.php';
?>

View File

@@ -1,12 +1,12 @@
<section class="content">
<div class="nav-tabs-custom app-event-content" style="margin-bottom: 0px;">
<ul id="tabs-location" class="nav nav-tabs col-sm-2">
<li class="left-nav"><a class="col-sm-12" href="#" id="" data-toggle="tab">Events</a></li>
</ul>
<div id="tabs-content-location" class="tab-content col-sm-10">
<table class="table table-striped" id="appevents-table" data-my-dbtable="AppEvents"></table>
</div>
<div class="nav-tabs-custom app-event-content" style="margin-bottom: 0px;">
<ul id="tabs-location" class="nav nav-tabs col-sm-2 hidden">
<li class="left-nav"><a class="col-sm-12" href="#" id="" data-toggle="tab">Events</a></li>
</ul>
<div id="tabs-content-location" class="tab-content col-sm-12">
<table class="table table-striped" id="appevents-table" data-my-dbtable="AppEvents"></table>
</div>
</div>
</section>
@@ -18,75 +18,111 @@ showSpinner()
$(document).ready(function() {
// Load JSON data from the provided URL
$.getJSON('/php/server/query_json.php?file=table_appevents.json', function(data) {
// Process the JSON data and generate UI dynamically
processData(data)
// Load JSON data from the provided URL
$.getJSON('/php/server/query_json.php?file=table_appevents.json', function(data) {
// Process the JSON data and generate UI dynamically
processData(data)
// hide loading dialog
hideSpinner()
});
// hide loading dialog
hideSpinner()
});
});
function processData(data) {
// Create an object to store unique ObjectType values as app event identifiers
var appEventIdentifiers = {};
// Create an object to store unique ObjectType values as app event identifiers
var appEventIdentifiers = {};
// Array to accumulate data for DataTable
var allData = [];
// Array to accumulate data for DataTable
var allData = [];
// Iterate through the data and generate tabs and content dynamically
$.each(data.data, function(index, item) {
// Accumulate data for DataTable
allData.push(item);
});
// Initialize DataTable for all app events
// Iterate through the data and generate tabs and content dynamically
$.each(data.data, function(index, item) {
$('#appevents-table').DataTable({
data: allData,
paging: true,
lengthChange: true,
lengthMenu: [[10, 25, 50, 100, 500, -1], [10, 25, 50, 100, 500, 'All']],
searching: true,
ordering: true,
info: true,
autoWidth: false,
pageLength: 25, // Set the default paging to 25
columns: [
{ data: 'DateTimeCreated', title: getString('AppEvents_DateTimeCreated') },
{ data: 'AppEventType', title: getString('AppEvents_Type') },
{ data: 'ObjectType', title: getString('AppEvents_ObjectType') },
{ data: 'ObjectPrimaryID', title: getString('AppEvents_ObjectPrimaryID') },
{ data: 'ObjectSecondaryID', title: getString('AppEvents_ObjectSecondaryID') },
{ data: 'ObjectStatus', title: getString('AppEvents_ObjectStatus') },
{ data: 'Extra', title: getString('AppEvents_Extra') },
{ data: 'ObjectPlugin', title: getString('AppEvents_Plugin') },
// Add other columns as needed
],
// Add column-specific configurations if needed
columnDefs: [
{ className: 'text-center', targets: [3] },
{ width: '80px', targets: [6] },
// ... Add other columnDefs as needed
// Full MAC
{targets: [3, 4],
'createdCell': function (td, cellData, rowData, row, col) {
if (!emptyArr.includes(cellData)){
$(td).html (createDeviceLink(cellData));
} else {
$(td).html ('');
}
} },
]
});
// Accumulate data for DataTable
allData.push(item);
});
console.log(allData);
// Initialize DataTable for all app events
$('#appevents-table').DataTable({
data: allData,
paging: true,
lengthChange: true,
lengthMenu: [[10, 25, 50, 100, 500, -1], [10, 25, 50, 100, 500, 'All']],
searching: true,
ordering: true,
info: true,
autoWidth: false,
pageLength: 25, // Set the default paging to 25
columns: [
{ data: 'DateTimeCreated', title: getString('AppEvents_DateTimeCreated') },
{ data: 'AppEventProcessed', title: getString('AppEvents_AppEventProcessed') },
{ data: 'AppEventType', title: getString('AppEvents_Type') },
{ data: 'ObjectType', title: getString('AppEvents_ObjectType') },
{ data: 'ObjectPrimaryID', title: getString('AppEvents_ObjectPrimaryID') },
{ data: 'ObjectSecondaryID', title: getString('AppEvents_ObjectSecondaryID') },
{ data: 'ObjectStatus', title: getString('AppEvents_ObjectStatus') },
{ data: 'ObjectPlugin', title: getString('AppEvents_Plugin') },
{ data: 'ObjectGUID', title: "Object GUID" },
{ data: 'GUID', title: "Event GUID" },
// Add other columns as needed
],
// Add column-specific configurations if needed
columnDefs: [
{ className: 'text-center', targets: [4] },
{ width: '80px', targets: [7] },
// ... Add other columnDefs as needed
// Full MAC
{targets: [4, 5],
'createdCell': function (td, cellData, rowData, row, col) {
if (!emptyArr.includes(cellData)){
$(td).html (createDeviceLink(cellData));
} else {
$(td).html ('');
}
} },
// Processed
{targets: [1],
'createdCell': function (td, cellData, rowData, row, col) {
// console.log(cellData);
$(td).html (cellData);
}
},
// Datetime
{targets: [0],
'createdCell': function (td, cellData, rowData, row, col) {
let timezone = $("#NAX_TZ").html(); // e.g., 'Europe/Berlin'
let utcDate = new Date(cellData + ' UTC'); // Adding ' UTC' makes it interpreted as UTC time
// Format the date in the desired timezone
let options = {
year: 'numeric',
month: 'short',
day: '2-digit',
hour: '2-digit',
minute: '2-digit',
second: '2-digit',
hour12: false, // Use 24-hour format
timeZone: timezone // Use the specified timezone
};
let localDate = new Intl.DateTimeFormat('en-GB', options).format(utcDate);
// Update the table cell
$(td).html(localDate);
}
},
]
});
// Activate the first tab
$('#tabs-location li:first-child').addClass('active');
$('#tabs-content-location .tab-pane:first-child').addClass('active');
// Activate the first tab
$('#tabs-location li:first-child').addClass('active');
$('#tabs-content-location .tab-pane:first-child').addClass('active');
}
</script>

View File

@@ -1524,10 +1524,11 @@ input[readonly] {
margin:0px;
align-items:center;
border-radius:20px;
width:180px;
display:flex;
/* width:190px; Don't change, smaller causes line break in network view */
/* display:flex; */
flex-direction:column;
justify-content:center;
display: inline-grid;
}
.networkHelpIcon
{
@@ -1597,7 +1598,7 @@ input[readonly] {
.spanNetworkTree {
display: inline-block;
width: 135px;
/* width: 135px; */
white-space: nowrap;
overflow: hidden !important;
text-overflow: ellipsis;
@@ -1840,6 +1841,132 @@ input[readonly] {
height:50px;
}
/* -----------------------------------------------------------------------------
Workflows
----------------------------------------------------------------------------- */
#wf-content-wrapper
{
display: grid;
}
#workflowContainer
{
display: grid;
}
#workflowContainerWrap {
display: grid;
}
#workflowContainerWrap .panel-collapse
{
padding: 5px;
}
.workflows .col-sm-12, .workflows .col-sx-12
{
padding-right: 5px;
padding-left: 5px;
}
.workflows .add-button-wrap .button-container
{
padding-bottom: 5px;
text-align: center;
}
.workflows .actions-list {
display: block;
}
.workflows .form-group {
margin-bottom: 7px;
z-index: 1;
display: flex;
}
.workflows .section-title
{
padding: 10px;
font-weight: bolder;
font-size: large;
}
.workflows .panel, .workflows .box {
padding-top: 5px;
padding-bottom: 5px;
}
.workflows .btn-secondary{
color: #000;
}
.workflows .button-container
{
display: contents;
text-align: center;
width: 100%;
}
.workflows .panel:hover{
opacity: 1;
}
.workflows .panel{
opacity: 0.8;
}
.workflows .bottom-buttons button
{
margin: 5px;
}
.workflows .button-container
{
padding-right: 0px !important;
padding-left: 0px !important;
}
/* .workflows .condition-list button
{
margin: 2px;
} */
/* .button-container button
{
width:100%;
} */
.workflows .workflow-card
{
display: block;
}
.workflow-card .panel-title
{
padding: 10px;
}
.workflow-card, .actions-list
{
display: contents;
padding: 5px;
padding-left: 10px;
}
.condition-list
{
z-index:1;
}
.condition
{
padding: 5px;
padding-left: 10px;
}
/* -----------------------------------------------------------------------------
Floating edit button
----------------------------------------------------------------------------- */

View File

@@ -744,4 +744,5 @@
.thresholdFormControl
{
color:#000;
}
}

View File

@@ -123,7 +123,7 @@
<!-- page script ----------------------------------------------------------- -->
<script>
var deviceStatus = 'all';
var tableRows = getCache ("nax_parTableRows") == "" ? 10 : getCache ("nax_parTableRows") ;
var tableRows = getCache ("nax_parTableRows") == "" ? 20 : getCache ("nax_parTableRows") ;
var tableOrder = getCache ("nax_parTableOrder") == "" ? [[3,'desc'], [0,'asc']] : JSON.parse(getCache ("nax_parTableOrder")) ;
var tableColumnHide = [];
@@ -737,12 +737,13 @@ function initializeDatatable (status) {
},
'paging' : true,
'lengthChange' : true,
'lengthMenu' : [[10, 25, 50, 100, 500, 100000], [10, 25, 50, 100, 500, getString('Device_Tablelenght_all')]],
'lengthMenu' : [[10, 20, 25, 50, 100, 500, 100000], [10, 20, 25, 50, 100, 500, getString('Device_Tablelenght_all')]],
'searching' : true,
'ordering' : true,
'info' : true,
'autoWidth' : false,
'dom': '<"top"f>rtl<"bottom"ip><"clear">',
// Parameters
'pageLength' : tableRows,

View File

@@ -1303,6 +1303,38 @@ $(document).ready(function() {
}
});
// -----------------------------------------------------------
// Restart Backend Python Server
function askRestartBackend() {
// Ask
showModalWarning(getString('Maint_RestartServer'), getString('Maint_Restart_Server_noti_text'),
getString('Gen_Cancel'), getString('Maint_RestartServer'), 'restartBackend');
}
// -----------------------------------------------------------
function restartBackend() {
modalEventStatusId = 'modal-message-front-event'
// Execute
$.ajax({
method: "POST",
url: "php/server/util.php",
data: { function: "addToExecutionQueue", action: `${getGuid()}|cron_restart_backend` },
success: function(data, textStatus) {
// showModalOk ('Result', data );
// show message
showModalOk(getString("general_event_title"), `${getString("general_event_description")} <br/> <br/> <code id='${modalEventStatusId}'></code>`);
updateModalState()
write_notification('[Maintenance] App manually restarted', 'info')
}
})
}
// -----------------------------------------------------------------------------
// initialize
// -----------------------------------------------------------------------------

File diff suppressed because one or more lines are too long

View File

@@ -395,38 +395,6 @@ function deleteActHistory()
});
}
// -----------------------------------------------------------
// Restart Backend Python Server
function askRestartBackend() {
// Ask
showModalWarning('<?= lang('Maint_RestartServer');?>', '<?= lang('Maint_Restart_Server_noti_text');?>',
'<?= lang('Gen_Cancel');?>', '<?= lang('Maint_RestartServer');?>', 'restartBackend');
}
// -----------------------------------------------------------
function restartBackend() {
modalEventStatusId = 'modal-message-front-event'
// Execute
$.ajax({
method: "POST",
url: "php/server/util.php",
data: { function: "addToExecutionQueue", action: `${getGuid()}|cron_restart_backend` },
success: function(data, textStatus) {
// showModalOk ('Result', data );
// show message
showModalOk(getString("general_event_title"), `${getString("general_event_description")} <br/> <br/> <code id='${modalEventStatusId}'></code>`);
updateModalState()
write_notification('[Maintenance] App manually restarted', 'info')
}
})
}
// -----------------------------------------------------------
// Import pasted Config ASK
function askImportPastedConfig() {
@@ -440,17 +408,15 @@ function askImportPastedConfig() {
// Upload Settings Config
function UploadConfig()
{
// alert("aaa")
appConf = $('#modal-input-textarea').val()
// encode for import
appConfBase64 = btoa(appConf)
// import
$.post('php/server/query_replace_config.php', { config: appConfBase64 }, function(msg) {
$.post('php/server/query_replace_config.php', { base64data: appConfBase64, fileName: "app.conf" }, function(msg) {
console.log(msg);
// showMessage(msg);
write_notification(`[Maintenance] Settings imported from backup: ${msg}`, 'interrupt');
write_notification(`[Maintenance]: ${msg}`, 'interrupt');
});
}

View File

@@ -465,7 +465,7 @@
?>
<script src="lib/treeviz/bundle.js"></script>
<script src="lib/treeviz/bundle.js.map"></script>
<script defer>

View File

@@ -1,6 +1,6 @@
<?php
ini_set('error_log', '../../log/app.php_errors.log'); // initializing the app.php_errors.log file for the maintenance section
require dirname(__FILE__).'/../templates/timezone.php';
require dirname(__FILE__).'/../templates/globals.php';
require dirname(__FILE__).'/db.php';
require dirname(__FILE__).'/util.php';
require dirname(__FILE__).'/../templates/language/lang.php';

View File

@@ -13,21 +13,23 @@ require dirname(__FILE__).'/../server/init.php';
if ($_SERVER['REQUEST_METHOD'] === 'GET') {
// Get query string parameters ?file=settings_table.json&download=true
$file = isset($_GET['file']) ? $_GET['file'] : null;
$download = isset($_GET['download']) ? $_GET['download'] === 'true' : false;
$download = isset($_GET['download']) && $_GET['download'] === 'true';
// Check if file parameter is provided
if ($file) {
// Define the folder where files are located
$filePath = "/app/config/" . basename($file);
// Check if the file exists
if (file_exists($filePath)) {
// Handle download behavior
// Check if the file exists and is readable
if (file_exists($filePath) && is_readable($filePath)) {
// Determine file extension
$extension = pathinfo($filePath, PATHINFO_EXTENSION);
if ($download) {
// Force file download
header('Content-Description: File Transfer');
header('Content-Type: application/octet-stream');
header('Content-Disposition: attachment; filename="' . basename($filePath) . '"');
header('Content-Disposition: attachment; filename="' . basename($file) . '"');
header('Expires: 0');
header('Cache-Control: must-revalidate');
header('Pragma: public');
@@ -35,19 +37,29 @@ if ($_SERVER['REQUEST_METHOD'] === 'GET') {
readfile($filePath);
exit;
} else {
// Display file content
header('Content-Type: text/plain');
echo file_get_contents($filePath);
// Serve file based on type
if ($extension === 'json') {
header('Content-Type: application/json');
echo json_encode(json_decode(file_get_contents($filePath), true), JSON_PRETTY_PRINT);
} else {
header('Content-Type: text/plain');
echo file_get_contents($filePath);
}
exit;
}
} else {
// File not found response
http_response_code(404);
header('Content-Type: application/json');
echo json_encode(["error" => "File not found"]);
exit;
}
} else {
// Missing file parameter response
http_response_code(400);
header('Content-Type: application/json');
echo json_encode(["error" => "Missing 'file' parameter"]);
exit;
}
}
?>

View File

@@ -17,7 +17,13 @@ if ($_SERVER['REQUEST_METHOD'] === 'GET') {
// Check if file parameter is provided
if ($file) {
// Define the folder where files are located
$filePath = "/app/api/" . basename($file);
if ($file == "workflows.json")
{
$filePath = "/app/config/" . basename($file);
} else
{
$filePath = "/app/api/" . basename($file);
}
// Check if the file exists
if (file_exists($filePath)) {
@@ -34,5 +40,38 @@ if ($_SERVER['REQUEST_METHOD'] === 'GET') {
http_response_code(400);
echo json_encode(["error" => "Missing 'file' parameter"]);
}
} elseif ($_SERVER['REQUEST_METHOD'] === 'POST') {
// Read the input JSON data
$inputData = file_get_contents("php://input");
$decodedData = json_decode($inputData, true);
if (json_last_error() !== JSON_ERROR_NONE) {
http_response_code(400);
echo json_encode(["error" => "Invalid JSON data"]);
exit;
}
// Check if file parameter is provided and is workflows.json
if (!isset($_GET['file']) || $_GET['file'] !== "workflows.json") {
http_response_code(400);
echo json_encode(["error" => "Invalid or missing file parameter"]);
exit;
}
$file = $_GET['file'];
$filePath = "/app/config/" . basename($file);
// Save new workflows.json (replace existing content)
if (file_put_contents($filePath, json_encode($decodedData, JSON_PRETTY_PRINT))) {
http_response_code(200);
echo json_encode(["success" => "Workflows replaced successfully"]);
} else {
http_response_code(500);
echo json_encode(["error" => "Failed to update workflows.json"]);
}
}
else {
http_response_code(405);
echo json_encode(["error" => "Method Not Allowed"]);
}
?>

View File

@@ -8,16 +8,24 @@ require_once $_SERVER['DOCUMENT_ROOT'] . '/php/templates/security.php';
require dirname(__FILE__).'/../server/init.php';
// ---- IMPORTS ----
global $fullConfPath;
global $configFolderPath;
//------------------------------------------------------------------------------
// Handle incoming requests
if ($_SERVER['REQUEST_METHOD'] === 'POST') {
// Access the 'config' parameter from the POST request
$base64Data = $_POST['config'] ?? null;
$base64Data = $_POST['base64data'] ?? null;
if (!$base64Data) {
$msg = "Missing 'config' parameter.";
$msg = "Missing 'base64data' parameter.";
echo $msg;
http_response_code(400); // Bad request
die($msg);
}
$fileName = $_POST['fileName'] ?? null;
if (!$fileName) {
$msg = "Missing 'fileName' parameter.";
echo $msg;
http_response_code(400); // Bad request
die($msg);
@@ -33,15 +41,17 @@ if ($_SERVER['REQUEST_METHOD'] === 'POST') {
die($msg);
}
$fullPath = $configFolderPath.$fileName;
// Backup the original file
if (file_exists($fullConfPath)) {
copy($fullConfPath, $fullConfPath . ".bak");
if (file_exists($fullPath)) {
copy($fullPath, $fullPath . ".bak");
}
// Write the new configuration
$file = fopen($fullConfPath, "w");
$file = fopen($fullPath, "w");
if (!$file) {
$msg = "Unable to open file!";
$msg = "Unable to open file: ". $fullPath;
echo $msg;
http_response_code(500); // Server error
die($msg);
@@ -50,6 +60,6 @@ if ($_SERVER['REQUEST_METHOD'] === 'POST') {
fwrite($file, $input);
fclose($file);
echo "Configuration saved successfully.";
echo "Configuration file saved successfully: " .$fileName ;
}
?>

View File

@@ -8,7 +8,7 @@
# Puche 2021 / 2022+ jokob jokob@duck.com GNU GPLv3
//------------------------------------------------------------------------------
require dirname(__FILE__).'/../templates/timezone.php';
require dirname(__FILE__).'/../templates/globals.php';
require dirname(__FILE__).'/../templates/skinUI.php';
//------------------------------------------------------------------------------

View File

@@ -1,6 +1,6 @@
<?php
require dirname(__FILE__).'/../templates/timezone.php';
require dirname(__FILE__).'/../templates/globals.php';
//------------------------------------------------------------------------------
// check if authenticated

View File

@@ -1,17 +1,21 @@
<?php
// ###################################
// ## TimeZone processing start
// ###################################
// ######################################################################
// ## Global constants and TimeZone processing
// ######################################################################
$configFolderPath = dirname(__FILE__)."/../../../config/";
$config_file = "app.conf";
$configFolderPath = "/app/config/";
$logFolderPath = "/app/log/";
$config_file = "app.conf";
$workflows_file = "workflows.json";
$log_file = "app_front.log";
$default_tz = "Europe/Berlin";
$fullConfPath = $configFolderPath.$config_file;
$fullWorkflowsPath = $configFolderPath.$workflows_file;
$config_file_lines = file($fullConfPath);
$config_file_lines_timezone = array_values(preg_grep('/^TIMEZONE\s.*/', $config_file_lines));
@@ -44,7 +48,7 @@ date_default_timezone_set($timeZone);
$date = new DateTime("now", new DateTimeZone($timeZone) );
$timestamp = $date->format('Y-m-d_H-i-s');
// ###################################
// ## TimeZone processing end
// ###################################
// ######################################################################
// ## Global constants and TimeZone processing
// ######################################################################

View File

@@ -150,7 +150,8 @@
let formattedDateTime = `${day}-${month}-${year} ${hour}:${minute}:${second}`;
if (document.getElementById) {
document.getElementById("PIA_Servertime_place").innerHTML = '(' + formattedDateTime + ')';
document.getElementById("NAX_Servertime_plc").innerHTML = '(' + formattedDateTime + ')';
document.getElementById("NAX_TZ").innerHTML = timeZone;
}
setTimeout(update_servertime, 1000); // Call recursively every second
@@ -234,7 +235,13 @@
<!-- Server Name -->
<li>
<div class="header-server-time small">
<div><?php echo gethostname();?></div> <div><span id="PIA_Servertime_place"></span></div>
<div>
<?php echo gethostname();?>
</div>
<div>
<span id="NAX_Servertime_plc"></span>
<span id="NAX_TZ" class="hidden"></span>
</div>
</div>
</li>
@@ -414,18 +421,22 @@
</li>
<!-- Integrations menu item -->
<li class=" treeview <?php if (in_array (basename($_SERVER['SCRIPT_NAME']), array('plugins.php', 'workflows.php' ) ) ){ echo 'active menu-open'; } ?>">
<li class=" treeview <?php if (in_array (basename($_SERVER['SCRIPT_NAME']), array('plugins.php', 'workflows.php', 'appEvents.php' ) ) ){ echo 'active menu-open'; } ?>">
<a href="#">
<i class="fa fa-fw fa-plug"></i> <span><?= lang('Navigation_Integrations');?></span>
<span class="pull-right-container">
<i class="fa fa-angle-left pull-right"></i>
</span>
</a>
<ul class="treeview-menu " style="display: <?php if (in_array (basename($_SERVER['SCRIPT_NAME']), array('plugins.php', 'workflows.php' ) ) ){ echo 'block'; } else {echo 'none';} ?>;">
<ul class="treeview-menu " style="display: <?php if (in_array (basename($_SERVER['SCRIPT_NAME']), array('plugins.php', 'workflows.php', 'appEvents.php' ) ) ){ echo 'block'; } else {echo 'none';} ?>;">
<li>
<div class="info-icon-nav"> </div>
<a href="workflows.php"><?= lang('Navigation_Workflows');?></a>
</li>
<li>
<div class="info-icon-nav"> </div>
<a href="appEvents.php"><?= lang('Navigation_AppEvents');?></a>
</li>
<li>
<a href="plugins.php"><?= lang("Navigation_Plugins");?> </a>
</li>

View File

@@ -8,6 +8,7 @@
"About_Design": "",
"About_Exit": "",
"About_Title": "",
"AppEvents_AppEventProcessed": "",
"AppEvents_DateTimeCreated": "",
"AppEvents_Extra": "",
"AppEvents_GUID": "",
@@ -464,6 +465,7 @@
"NETWORK_DEVICE_TYPES_description": "",
"NETWORK_DEVICE_TYPES_name": "",
"Navigation_About": "",
"Navigation_AppEvents": "",
"Navigation_Devices": "",
"Navigation_Donations": "",
"Navigation_Events": "",

View File

@@ -8,6 +8,7 @@
"About_Design": "Dissenyat per:",
"About_Exit": "Sortir",
"About_Title": "Escàner de seguretat de xarxa i marc de notificacions",
"AppEvents_AppEventProcessed": "",
"AppEvents_DateTimeCreated": "Logged",
"AppEvents_Extra": "Extra",
"AppEvents_GUID": "GUID d'esdeveniments d'Aplicació",
@@ -464,6 +465,7 @@
"NETWORK_DEVICE_TYPES_description": "Quins tipus de dispositius es poden utilitzar com a dispositius de xarxa a la vista \"xarxa\". El tipus de dispositiu ha de coincidir exactament amb la configuració <code>Tipus</code> dels detalls de dispositiu. Afegir-ho al dispositiu fent servir el botó <code>+</code>. No elimini els tipus existents, només afegir-ne nous.",
"NETWORK_DEVICE_TYPES_name": "Tipus de dispositiu de xarxa",
"Navigation_About": "Sobre",
"Navigation_AppEvents": "",
"Navigation_Devices": "Dispositius",
"Navigation_Donations": "Donacions",
"Navigation_Events": "Esdeveniments",
@@ -716,4 +718,4 @@
"settings_update_item_warning": "Actualitza el valor sota. Sigues curós de seguir el format anterior. <b>No hi ha validació.</b>",
"test_event_icon": "fa-vial-circle-check",
"test_event_tooltip": "Deseu els canvis primer abans de comprovar la configuració."
}
}

View File

@@ -8,6 +8,7 @@
"About_Design": "",
"About_Exit": "",
"About_Title": "",
"AppEvents_AppEventProcessed": "",
"AppEvents_DateTimeCreated": "",
"AppEvents_Extra": "",
"AppEvents_GUID": "",
@@ -464,6 +465,7 @@
"NETWORK_DEVICE_TYPES_description": "",
"NETWORK_DEVICE_TYPES_name": "",
"Navigation_About": "",
"Navigation_AppEvents": "",
"Navigation_Devices": "",
"Navigation_Donations": "",
"Navigation_Events": "",

View File

@@ -16,6 +16,7 @@
"About_Design": "Entworfen für:",
"About_Exit": "Abmelden",
"About_Title": "Netzwerksicherheitsscanner und Benachrichtigungsframework",
"AppEvents_AppEventProcessed": "",
"AppEvents_DateTimeCreated": "protokolliert",
"AppEvents_Extra": "Extra",
"AppEvents_GUID": "Anwendungsereignis-GUID",
@@ -500,6 +501,7 @@
"NTFY_display_name": "NTFY",
"NTFY_icon": "<i class=\"fa fa-terminal\"></i>",
"Navigation_About": "Über",
"Navigation_AppEvents": "",
"Navigation_Devices": "Geräte",
"Navigation_Donations": "Spenden",
"Navigation_Events": "Ereignisse",
@@ -797,4 +799,4 @@
"settings_update_item_warning": "",
"test_event_icon": "",
"test_event_tooltip": "Speichere die Änderungen, bevor Sie die Einstellungen testen."
}
}

View File

@@ -8,6 +8,7 @@
"About_Design": "Designed for:",
"About_Exit": "Sign out",
"About_Title": "Network security scanner & notification framework",
"AppEvents_AppEventProcessed": "Processed",
"AppEvents_DateTimeCreated": "Logged",
"AppEvents_Extra": "Extra",
"AppEvents_GUID": "Application Event GUID",
@@ -224,7 +225,7 @@
"Device_TableHead_Name": "Name",
"Device_TableHead_NetworkSite": "Network Site",
"Device_TableHead_Owner": "Owner",
"Device_TableHead_Parent_MAC": "Parent node MAC",
"Device_TableHead_Parent_MAC": "Parent network node",
"Device_TableHead_Port": "Port",
"Device_TableHead_PresentLastScan": "Presence",
"Device_TableHead_RowID": "Row ID",
@@ -464,6 +465,7 @@
"NETWORK_DEVICE_TYPES_description": "Which device types are allowed to be used as network devices in the Network view. The device type has to match exactly the <code>Type</code> setting on a specific device in Device details. Add it on the Device via the <code>+</code> button. Do not remove existing types, only add new ones.",
"NETWORK_DEVICE_TYPES_name": "Network device types",
"Navigation_About": "About",
"Navigation_AppEvents": "App Events",
"Navigation_Devices": "Devices",
"Navigation_Donations": "Donations",
"Navigation_Events": "Events",

View File

@@ -16,6 +16,7 @@
"About_Design": "Diseñado para:",
"About_Exit": "Salir",
"About_Title": "Escáner de seguridad de la red y marco de notificaciones",
"AppEvents_AppEventProcessed": "",
"AppEvents_DateTimeCreated": "Registrado",
"AppEvents_Extra": "Extra",
"AppEvents_GUID": "GUID del evento de aplicación",
@@ -498,6 +499,7 @@
"NTFY_display_name": "NTFY",
"NTFY_icon": "<i class=\"fa fa-terminal\"></i>",
"Navigation_About": "Acerca de",
"Navigation_AppEvents": "",
"Navigation_Devices": "Dispositivos",
"Navigation_Donations": "Donaciones",
"Navigation_Events": "Eventos",

View File

@@ -8,6 +8,7 @@
"About_Design": "Conçu pour:",
"About_Exit": "Se déconnecter",
"About_Title": "Analyse de la sécurité du réseau et cadre de notification",
"AppEvents_AppEventProcessed": "",
"AppEvents_DateTimeCreated": "Connecté",
"AppEvents_Extra": "Extra",
"AppEvents_GUID": "GUID dévénements de l'application",
@@ -464,6 +465,7 @@
"NETWORK_DEVICE_TYPES_description": "Les types d'appareils autorisés à être utilisés comme appareils réseau dans la vue Réseau. Le type d'appareils doit être identique au paramètre <code>Type</code> d'un appareil dans le détail des appareils. Ajouter le sur l'appareil grâce au bouton <code>+</code>. Ne pas supprimer de valeurs, seulement en ajouter de nouvelles.",
"NETWORK_DEVICE_TYPES_name": "Type d'appareil réseau",
"Navigation_About": "À propos",
"Navigation_AppEvents": "",
"Navigation_Devices": "Appareils",
"Navigation_Donations": "Dons",
"Navigation_Events": "Évènements",
@@ -716,4 +718,4 @@
"settings_update_item_warning": "Mettre à jour la valeur ci-dessous. Veillez à bien suivre le même format qu'auparavant. <b>Il n'y a pas de pas de contrôle.</b>",
"test_event_icon": "fa-vial-circle-check",
"test_event_tooltip": "Enregistrer d'abord vos modifications avant de tester vôtre paramétrage."
}
}

View File

@@ -8,6 +8,7 @@
"About_Design": "Progettato per:",
"About_Exit": "Esci",
"About_Title": "Scanner di sicurezza di rete e framework di notifica",
"AppEvents_AppEventProcessed": "",
"AppEvents_DateTimeCreated": "Loggato",
"AppEvents_Extra": "Extra",
"AppEvents_GUID": "GUID evento applicazione",
@@ -464,6 +465,7 @@
"NETWORK_DEVICE_TYPES_description": "Quali tipi di dispositivo possono essere utilizzati come dispositivi di rete nella vista Rete. Il tipo di dispositivo deve corrispondere esattamente all'impostazione <code>Tipo</code> su un dispositivo specifico nei Dettagli dispositivo. Aggiungilo sul Dispositivo tramite il pulsante <code>+</code>. Non rimuovere i tipi esistenti, aggiungine solo di nuovi.",
"NETWORK_DEVICE_TYPES_name": "Tipi di dispositivi di rete",
"Navigation_About": "Informazioni su",
"Navigation_AppEvents": "",
"Navigation_Devices": "Dispositivi",
"Navigation_Donations": "Donazioni",
"Navigation_Events": "Eventi",
@@ -716,4 +718,4 @@
"settings_update_item_warning": "Aggiorna il valore qui sotto. Fai attenzione a seguire il formato precedente. <b>La convalida non viene eseguita.</b>",
"test_event_icon": "fa-vial-circle-check",
"test_event_tooltip": "Salva le modifiche prima di provare le nuove impostazioni."
}
}

View File

@@ -8,6 +8,7 @@
"About_Design": "Designet for:",
"About_Exit": "Logg ut",
"About_Title": "Nettverkssikkerhetsskanner og varslingsrammeverk",
"AppEvents_AppEventProcessed": "",
"AppEvents_DateTimeCreated": "Logget",
"AppEvents_Extra": "Ekstra",
"AppEvents_GUID": "Applikasjon Hendelse GUID",
@@ -464,6 +465,7 @@
"NETWORK_DEVICE_TYPES_description": "Hvilke enhetstyper som tillates å brukes som nettverksenheter i nettverksvisningen. Enhetstypen må samsvare med nøyaktig <code>Type</code> Innstillingen på en bestemt enhet i enhetsdetaljer. Ikke fjern eksisterende typer, legg bare til nye.",
"NETWORK_DEVICE_TYPES_name": "Nettverksenhetstyper",
"Navigation_About": "Om",
"Navigation_AppEvents": "",
"Navigation_Devices": "Enheter",
"Navigation_Donations": "Donasjoner",
"Navigation_Events": "Hendelser",

View File

@@ -8,6 +8,7 @@
"About_Design": "Zaprojektowany dla:",
"About_Exit": "Wyloguj",
"About_Title": "Skaner bezpieczeństwa sieciowego i framwork powiadomień",
"AppEvents_AppEventProcessed": "",
"AppEvents_DateTimeCreated": "Zalogowany",
"AppEvents_Extra": "Ekstra",
"AppEvents_GUID": "Aplikacja GUID wydarzeń",
@@ -464,6 +465,7 @@
"NETWORK_DEVICE_TYPES_description": "Które typy urządzeń mają zezwolenie na bycie użytym jako urządzenia sieciowe w Widoku Sieci. Typ urządzenia musi dokładnie odpowiadać ustawieniu <code>Typ</code> na konkretnym urządzeniu w Szczegółach urządzenia. Nie usuwaj istniejących typów, tylko dodawaj nowe.",
"NETWORK_DEVICE_TYPES_name": "Typy urządzeń sieciowych",
"Navigation_About": "Informacje o",
"Navigation_AppEvents": "",
"Navigation_Devices": "Urządzenia",
"Navigation_Donations": "Dotacje",
"Navigation_Events": "Wydarzenia",
@@ -716,4 +718,4 @@
"settings_update_item_warning": "Zaktualizuj poniższą wartość. Zachowaj ostrożność i postępuj zgodnie z poprzednim formatem. <b>Walidacja nie jest wykonywana.</b>",
"test_event_icon": "fa-vial-circle-check",
"test_event_tooltip": "Zapisz zmiany zanim będziesz testować swoje ustawienia."
}
}

View File

@@ -8,6 +8,7 @@
"About_Design": "Desenvolvido por:",
"About_Exit": "Sair",
"About_Title": "Analisador de segurança de rede & framework de notificação",
"AppEvents_AppEventProcessed": "",
"AppEvents_DateTimeCreated": "Registrado em",
"AppEvents_Extra": "Adicional",
"AppEvents_GUID": "Evento de aplicação GUID",

View File

@@ -8,6 +8,7 @@
"About_Design": "Разработан:",
"About_Exit": "Зарегистрироваться",
"About_Title": "Сетевой сканер и система уведомлений",
"AppEvents_AppEventProcessed": "",
"AppEvents_DateTimeCreated": "Журнал",
"AppEvents_Extra": "Дополнительно",
"AppEvents_GUID": "GUID события приложения",
@@ -464,6 +465,7 @@
"NETWORK_DEVICE_TYPES_description": "Какие типы устройств разрешено использовать в качестве сетевых устройств в представлении Сеть. Тип устройства должен точно соответствовать настройке <code>Type</code> для конкретного устройства в сведениях об устройстве. Добавьте его на устройство с помощью кнопки <code>+</code>. Не удаляйте существующие типы, а только добавляйте новые.",
"NETWORK_DEVICE_TYPES_name": "Типы сетевых устройств",
"Navigation_About": "О NetAlertX",
"Navigation_AppEvents": "",
"Navigation_Devices": "Устройства",
"Navigation_Donations": "Пожертвования",
"Navigation_Events": "События",
@@ -716,4 +718,4 @@
"settings_update_item_warning": "Обновить значение ниже. Будьте осторожны, следуя предыдущему формату. <b>Проверка не выполняется.</b>",
"test_event_icon": "fa-vial-circle-check",
"test_event_tooltip": "Сначала сохраните изменения, прежде чем проверять настройки."
}
}

View File

@@ -8,6 +8,7 @@
"About_Design": "",
"About_Exit": "Oturum kapat",
"About_Title": "",
"AppEvents_AppEventProcessed": "",
"AppEvents_DateTimeCreated": "",
"AppEvents_Extra": "Ekstra",
"AppEvents_GUID": "",
@@ -464,6 +465,7 @@
"NETWORK_DEVICE_TYPES_description": "",
"NETWORK_DEVICE_TYPES_name": "",
"Navigation_About": "Hakkında",
"Navigation_AppEvents": "",
"Navigation_Devices": "Cihazlar",
"Navigation_Donations": "",
"Navigation_Events": "",

View File

@@ -8,6 +8,7 @@
"About_Design": "Призначений для:",
"About_Exit": "Вийти",
"About_Title": "Сканер безпеки мережі та структура сповіщень",
"AppEvents_AppEventProcessed": "",
"AppEvents_DateTimeCreated": "Зареєстровано",
"AppEvents_Extra": "Екстра",
"AppEvents_GUID": "GUID події програми",
@@ -464,6 +465,7 @@
"NETWORK_DEVICE_TYPES_description": "Які типи пристроїв дозволено використовувати як мережеві пристрої в поданні мережі. Тип пристрою має точно відповідати налаштуванню <code>Тип</code> на певному пристрої в Деталях пристрою. Додайте його на пристрій за допомогою кнопки <code>+</code>. Не видаляйте існуючі типи, лише додайте нові.",
"NETWORK_DEVICE_TYPES_name": "Типи мережевих пристроїв",
"Navigation_About": "про",
"Navigation_AppEvents": "",
"Navigation_Devices": "Пристрої",
"Navigation_Donations": "Пожертви",
"Navigation_Events": "Події",
@@ -716,4 +718,4 @@
"settings_update_item_warning": "Оновіть значення нижче. Слідкуйте за попереднім форматом. <b>Перевірка не виконана.</b>",
"test_event_icon": "fa-vial-circle- check",
"test_event_tooltip": "Перш ніж перевіряти налаштування, збережіть зміни."
}
}

View File

@@ -8,6 +8,7 @@
"About_Design": "设计用于:",
"About_Exit": "登出",
"About_Title": "网络安全扫描器和通知框架",
"AppEvents_AppEventProcessed": "",
"AppEvents_DateTimeCreated": "已记录",
"AppEvents_Extra": "额外的",
"AppEvents_GUID": "应用程序事件 GUID",
@@ -464,6 +465,7 @@
"NETWORK_DEVICE_TYPES_description": "哪些设备类型允许在网络视图中用作网络设备。设备类型必须与设备详细信息中特定设备上的 <code>Type</code> 设置完全匹配。请勿删除现有类型,仅添加新类型。",
"NETWORK_DEVICE_TYPES_name": "网络设备类型",
"Navigation_About": "关于",
"Navigation_AppEvents": "",
"Navigation_Devices": "设备",
"Navigation_Donations": "捐款",
"Navigation_Events": "事件",

View File

@@ -293,9 +293,6 @@ def create_sensor(mqtt_client, deviceId, deviceName, sensorType, sensorName, ico
# check previous configs
sensorConfig = sensor_config(deviceId, deviceName, sensorType, sensorName, icon, mac)
mylog('verbose', [f"[{pluginName}] Publishing sensor number {len(mqtt_sensors)}"])
# send if new
if sensorConfig.isNew:

View File

@@ -18,7 +18,7 @@ from const import pluginsPath, fullDbPath, logPath
from helper import timeNowTZ, get_setting_value
from notification import write_notification
from database import DB
from device import Device_obj
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
@@ -53,8 +53,8 @@ def main():
# Initialize the Plugin obj output file
plugin_objects = Plugin_Objects(RESULT_FILE)
# Create a Device_obj instance
device_handler = Device_obj(db)
# Create a DeviceInstance instance
device_handler = DeviceInstance(db)
# Retrieve devices
unknown_devices = device_handler.getUnknown()

View File

@@ -139,6 +139,7 @@ def cleanup_database (dbPath, DAYS_TO_KEEP_EVENTS, HRS_TO_KEEP_NEWDEV, HRS_TO_KE
);"""
cursor.execute(delete_query)
conn.commit()
# -----------------------------------------------------

View File

@@ -23,7 +23,7 @@ from logger import mylog, Logger, append_line_to_file
from helper import timeNowTZ, get_setting_value
from const import logPath, applicationPath, fullDbPath
from database import DB
from device import Device_obj
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
@@ -57,8 +57,8 @@ def main():
# Initialize the Plugin obj output file
plugin_objects = Plugin_Objects(RESULT_FILE)
# Create a Device_obj instance
device_handler = Device_obj(db)
# Create a DeviceInstance instance
device_handler = DeviceInstance(db)
# Retrieve devices
all_devices = device_handler.getAll()

View File

@@ -18,7 +18,7 @@ from const import pluginsPath, fullDbPath, logPath
from helper import timeNowTZ, get_setting_value
from notification import write_notification
from database import DB
from device import Device_obj
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
@@ -53,8 +53,8 @@ def main():
# Initialize the Plugin obj output file
plugin_objects = Plugin_Objects(RESULT_FILE)
# Create a Device_obj instance
device_handler = Device_obj(db)
# Create a DeviceInstance instance
device_handler = DeviceInstance(db)
# Retrieve devices
unknown_devices = device_handler.getUnknown()

View File

@@ -24,7 +24,7 @@ from logger import mylog, Logger, append_line_to_file
from helper import timeNowTZ, get_setting_value, extract_between_strings, extract_ip_addresses, extract_mac_addresses
from const import logPath, applicationPath, fullDbPath
from database import DB
from device import Device_obj
from models.device_instance import DeviceInstance
import conf
from pytz import timezone

View File

@@ -23,7 +23,7 @@ from logger import mylog, Logger, append_line_to_file
from helper import timeNowTZ, get_setting_value
from const import logPath, applicationPath, fullDbPath
from database import DB
from device import Device_obj
from models.device_instance import DeviceInstance
import conf
from pytz import timezone
@@ -55,8 +55,8 @@ def main():
# Initialize the Plugin obj output file
plugin_objects = Plugin_Objects(RESULT_FILE)
# Create a Device_obj instance
device_handler = Device_obj(db)
# Create a DeviceInstance instance
device_handler = DeviceInstance(db)
# Retrieve devices
unknown_devices = device_handler.getUnknown()

View File

@@ -251,12 +251,12 @@ def main():
mylog("verbose", [f"[{pluginName}] starting execution"])
from database import DB
from device import Device_obj
from models.device_instance import DeviceInstance
db = DB() # instance of class DB
db.open()
# Create a Device_obj instance
device_handler = Device_obj(db)
# Create a DeviceInstance instance
device_handler = DeviceInstance(db)
# Retrieve configuration settings
# these should be self-explanatory
omada_sites = []

View File

@@ -19,7 +19,7 @@ from const import pluginsPath, fullDbPath, logPath
from helper import timeNowTZ, get_setting_value
from notification import write_notification
from database import DB
from device import Device_obj
from models.device_instance import DeviceInstance
import conf
# Make sure the TIMEZONE for logging is correct
@@ -54,8 +54,8 @@ def main():
db = DB() # instance of class DB
db.open()
# Create a Device_obj instance
device_handler = Device_obj(db)
# Create a DeviceInstance instance
device_handler = DeviceInstance(db)
# Retrieve devices
if 'offline' in devices_to_wake:

View File

@@ -40,7 +40,7 @@
}
]
},
"default_value": 5000,
"default_value": 100,
"options": [],
"localized": ["name", "description"],
"name": [

View File

@@ -7,10 +7,10 @@
<!-- Page ------------------------------------------------------------------ -->
<div class="content-wrapper">
<div class="content-wrapper" id="wf-content-wrapper">
<?php
require 'appEventsCore.php';
require 'workflowsCore.php';
?>

1035
front/workflowsCore.php Executable file

File diff suppressed because it is too large Load Diff

View File

@@ -28,13 +28,14 @@ from logger import mylog
from helper import filePermissions, timeNowTZ, get_setting_value
from app_state import updateState
from api import update_api
from networkscan import process_scan
from scan.session_events import process_scan
from initialise import importConfigs
from database import DB
from reporting import get_notifications
from notification import Notification_obj
from plugin import run_plugin_scripts, check_and_run_user_event
from device import update_devices_names
from scan.device_handling import update_devices_names
from workflows.manager import WorkflowManager
#===============================================================================
#===============================================================================
@@ -79,6 +80,9 @@ def main ():
# Upgrade DB if needed
db.upgradeDB()
# Initialize the WorkflowManager
workflow_manager = WorkflowManager(db)
#===============================================================================
# This is the main loop of NetAlertX
#===============================================================================
@@ -180,15 +184,39 @@ def main ():
# Commit SQL
db.commitDB()
# Footer
mylog('verbose', ['[MAIN] Process: Idle'])
else:
# do something
# mylog('verbose', ['[MAIN] Waiting to start next loop'])
updateState("Process: Idle")
updateState("Process: Idle")
# WORKFLOWS handling
# ----------------------------------------
# Fetch new unprocessed events
new_events = workflow_manager.get_new_app_events()
mylog('debug', [f'[MAIN] Processing WORKFLOW new_events from get_new_app_events: {len(new_events)}'])
# Process each new event and check triggers
if len(new_events) > 0:
updateState("Workflows: Start")
update_api_flag = False
for event in new_events:
mylog('debug', [f'[MAIN] Processing WORKFLOW app event with GUID {event["GUID"]}'])
# proceed to process events
workflow_manager.process_event(event)
if workflow_manager.update_api:
# Update API endpoints if needed
update_api_flag = True
if update_api_flag:
update_api(db, all_plugins, True)
updateState("Workflows: End")
#loop
time.sleep(5) # wait for N seconds

View File

@@ -1,380 +0,0 @@
import datetime
import json
import uuid
# Register NetAlertX modules
import conf
from const import applicationPath, logPath, apiPath, confFileName, sql_generateGuid
from logger import logResult, mylog, print_log
from helper import timeNowTZ
#-------------------------------------------------------------------------------
# Execution object handling
#-------------------------------------------------------------------------------
class AppEvent_obj:
def __init__(self, db):
self.db = db
# drop table
self.db.sql.execute("""DROP TABLE IF EXISTS "AppEvents" """)
# Drop all triggers
self.db.sql.execute('DROP TRIGGER IF EXISTS trg_create_device;')
self.db.sql.execute('DROP TRIGGER IF EXISTS trg_read_device;')
self.db.sql.execute('DROP TRIGGER IF EXISTS trg_update_device;')
self.db.sql.execute('DROP TRIGGER IF EXISTS trg_delete_device;')
self.db.sql.execute('DROP TRIGGER IF EXISTS trg_delete_plugin_object;')
self.db.sql.execute('DROP TRIGGER IF EXISTS trg_create_plugin_object;')
self.db.sql.execute('DROP TRIGGER IF EXISTS trg_update_plugin_object;')
# Create AppEvent table if missing
self.db.sql.execute("""CREATE TABLE IF NOT EXISTS "AppEvents" (
"Index" INTEGER,
"GUID" TEXT UNIQUE,
"DateTimeCreated" TEXT,
"ObjectType" TEXT, -- ObjectType (Plugins, Notifications, Events)
"ObjectGUID" TEXT,
"ObjectPlugin" TEXT,
"ObjectPrimaryID" TEXT,
"ObjectSecondaryID" TEXT,
"ObjectForeignKey" TEXT,
"ObjectIndex" TEXT,
"ObjectIsNew" BOOLEAN,
"ObjectIsArchived" BOOLEAN,
"ObjectStatusColumn" TEXT, -- Status (Notifications, Plugins), eve_EventType (Events)
"ObjectStatus" TEXT, -- new_devices, down_devices, events, new, watched-changed, watched-not-changed, missing-in-last-scan, Device down, New Device, IP Changed, Connected, Disconnected, VOIDED - Disconnected, VOIDED - Connected, <missing event>
"AppEventType" TEXT, -- "create", "update", "delete" (+TBD)
"Helper1" TEXT,
"Helper2" TEXT,
"Helper3" TEXT,
"Extra" TEXT,
PRIMARY KEY("Index" AUTOINCREMENT)
);
""")
# -------------
# Device events
sql_devices_mappedColumns = '''
"GUID",
"DateTimeCreated",
"ObjectType",
"ObjectPrimaryID",
"ObjectSecondaryID",
"ObjectStatus",
"ObjectStatusColumn",
"ObjectIsNew",
"ObjectIsArchived",
"ObjectForeignKey",
"AppEventType"
'''
# Trigger for create event
self.db.sql.execute(f'''
CREATE TRIGGER IF NOT EXISTS "trg_create_device"
AFTER INSERT ON "Devices"
BEGIN
INSERT INTO "AppEvents" (
{sql_devices_mappedColumns}
)
VALUES (
{sql_generateGuid},
DATETIME('now'),
'Devices',
NEW.devMac,
NEW.devLastIP,
CASE WHEN NEW.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END,
'devPresentLastScan',
NEW.devIsNew,
NEW.devIsArchived,
NEW.devMac,
'create'
);
END;
''')
# 🔴 This would generate too many events, disabled for now
# # Trigger for read event
# self.db.sql.execute('''
# TODO
# ''')
# Trigger for update event
self.db.sql.execute(f'''
CREATE TRIGGER IF NOT EXISTS "trg_update_device"
AFTER UPDATE ON "Devices"
BEGIN
INSERT INTO "AppEvents" (
{sql_devices_mappedColumns}
)
VALUES (
{sql_generateGuid},
DATETIME('now'),
'Devices',
NEW.devMac,
NEW.devLastIP,
CASE WHEN NEW.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END,
'devPresentLastScan',
NEW.devIsNew,
NEW.devIsArchived,
NEW.devMac,
'update'
);
END;
''')
# Trigger for delete event
self.db.sql.execute(f'''
CREATE TRIGGER IF NOT EXISTS "trg_delete_device"
AFTER DELETE ON "Devices"
BEGIN
INSERT INTO "AppEvents" (
{sql_devices_mappedColumns}
)
VALUES (
{sql_generateGuid},
DATETIME('now'),
'Devices',
OLD.devMac,
OLD.devLastIP,
CASE WHEN OLD.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END,
'devPresentLastScan',
OLD.devIsNew,
OLD.devIsArchived,
OLD.devMac,
'delete'
);
END;
''')
# -------------
# Plugins_Objects events
sql_plugins_objects_mappedColumns = '''
"GUID",
"DateTimeCreated",
"ObjectType",
"ObjectPlugin",
"ObjectPrimaryID",
"ObjectSecondaryID",
"ObjectForeignKey",
"ObjectStatusColumn",
"ObjectStatus",
"AppEventType"
'''
# Create trigger for update event on Plugins_Objects
self.db.sql.execute(f'''
CREATE TRIGGER IF NOT EXISTS trg_update_plugin_object
AFTER UPDATE ON Plugins_Objects
BEGIN
INSERT INTO AppEvents (
{sql_plugins_objects_mappedColumns}
)
VALUES (
{sql_generateGuid},
DATETIME('now'),
'Plugins_Objects',
NEW.Plugin,
NEW.Object_PrimaryID,
NEW.Object_SecondaryID,
NEW.ForeignKey,
'Status',
NEW.Status,
'update'
);
END;
''')
# Create trigger for CREATE event on Plugins_Objects
self.db.sql.execute(f'''
CREATE TRIGGER IF NOT EXISTS trg_create_plugin_object
AFTER INSERT ON Plugins_Objects
BEGIN
INSERT INTO AppEvents (
{sql_plugins_objects_mappedColumns}
)
VALUES (
{sql_generateGuid},
DATETIME('now'),
'Plugins_Objects',
NEW.Plugin,
NEW.Object_PrimaryID,
NEW.Object_SecondaryID,
NEW.ForeignKey,
'Status',
NEW.Status,
'create'
);
END;
''')
# Create trigger for DELETE event on Plugins_Objects
self.db.sql.execute(f'''
CREATE TRIGGER IF NOT EXISTS trg_delete_plugin_object
AFTER DELETE ON Plugins_Objects
BEGIN
INSERT INTO AppEvents (
{sql_plugins_objects_mappedColumns}
)
VALUES (
{sql_generateGuid},
DATETIME('now'),
'Plugins_Objects',
OLD.Plugin,
OLD.Object_PrimaryID,
OLD.Object_SecondaryID,
OLD.ForeignKey,
'Status',
OLD.Status,
'delete'
);
END;
''')
self.save()
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
# below code is unused
# -------------------------------------------------------------------------------
# Create a new DB entry if new notifications are available, otherwise skip
def create(self, Extra="", **kwargs):
# Check if nothing to report, end
if not any(kwargs.values()):
return False
# Continue and save into DB if notifications are available
self.GUID = str(uuid.uuid4())
self.DateTimeCreated = timeNowTZ()
self.ObjectType = "Plugins" # Modify ObjectType as needed
# Optional parameters
self.ObjectGUID = kwargs.get("ObjectGUID", "")
self.ObjectPlugin = kwargs.get("ObjectPlugin", "")
self.ObjectMAC = kwargs.get("ObjectMAC", "")
self.ObjectIP = kwargs.get("ObjectIP", "")
self.ObjectPrimaryID = kwargs.get("ObjectPrimaryID", "")
self.ObjectSecondaryID = kwargs.get("ObjectSecondaryID", "")
self.ObjectForeignKey = kwargs.get("ObjectForeignKey", "")
self.ObjectIndex = kwargs.get("ObjectIndex", "")
self.ObjectRowID = kwargs.get("ObjectRowID", "")
self.ObjectStatusColumn = kwargs.get("ObjectStatusColumn", "")
self.ObjectStatus = kwargs.get("ObjectStatus", "")
self.AppEventStatus = "new" # Modify AppEventStatus as needed
self.Extra = Extra
self.upsert()
return True
def upsert(self):
self.db.sql.execute("""
INSERT OR REPLACE INTO AppEvents (
"GUID",
"DateTimeCreated",
"ObjectType",
"ObjectGUID",
"ObjectPlugin",
"ObjectMAC",
"ObjectIP",
"ObjectPrimaryID",
"ObjectSecondaryID",
"ObjectForeignKey",
"ObjectIndex",
"ObjectRowID",
"ObjectStatusColumn",
"ObjectStatus",
"AppEventStatus",
"Extra"
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (
self.GUID,
self.DateTimeCreated,
self.ObjectType,
self.ObjectGUID,
self.ObjectPlugin,
self.ObjectMAC,
self.ObjectIP,
self.ObjectPrimaryID,
self.ObjectSecondaryID,
self.ObjectForeignKey,
self.ObjectIndex,
self.ObjectRowID,
self.ObjectStatusColumn,
self.ObjectStatus,
self.AppEventStatus,
self.Extra
))
self.save()
def save(self):
# Commit changes
self.db.commitDB()
def getPluginObject(**kwargs):
# Check if nothing, end
if not any(kwargs.values()):
return None
# Optional parameters
GUID = kwargs.get("GUID", "")
Plugin = kwargs.get("Plugin", "")
MAC = kwargs.get("MAC", "")
IP = kwargs.get("IP", "")
PrimaryID = kwargs.get("PrimaryID", "")
SecondaryID = kwargs.get("SecondaryID", "")
ForeignKey = kwargs.get("ForeignKey", "")
Index = kwargs.get("Index", "")
RowID = kwargs.get("RowID", "")
# we need the plugin
if Plugin == "":
return None
plugins_objects = apiPath + 'table_plugins_objects.json'
try:
with open(plugins_objects, 'r') as json_file:
data = json.load(json_file)
for item in data.get("data",[]):
if item.get("Index") == Index:
return item
for item in data.get("data",[]):
if item.get("ObjectPrimaryID") == PrimaryID and item.get("ObjectSecondaryID") == SecondaryID:
return item
for item in data.get("data",[]):
if item.get("ObjectPrimaryID") == MAC and item.get("ObjectSecondaryID") == IP:
return item
for item in data.get("data",[]):
if item.get("ObjectPrimaryID") == PrimaryID and item.get("ObjectSecondaryID") == IP:
return item
for item in data.get("data",[]):
if item.get("ObjectPrimaryID") == MAC and item.get("ObjectSecondaryID") == IP:
return item
mylog('debug', [f'[{module_name}] ⚠ ERROR - Object not found - GUID:{GUID} | Plugin:{Plugin} | MAC:{MAC} | IP:{IP} | PrimaryID:{PrimaryID} | SecondaryID:{SecondaryID} | ForeignKey:{ForeignKey} | Index:{Index} | RowID:{RowID} '])
return None
except (FileNotFoundError, json.JSONDecodeError, ValueError) as e:
# Handle the case when the file is not found, JSON decoding fails, or data is not in the expected format
mylog('none', [f'[{module_name}] ⚠ ERROR - JSONDecodeError or FileNotFoundError for file {plugins_objects}'])
return None

View File

@@ -71,7 +71,7 @@ sql_devices_all = """
FROM Devices
"""
sql_appevents = """select * from AppEvents"""
sql_appevents = """select * from AppEvents order by DateTimeCreated desc"""
# The below query calculates counts of devices in various categories:
# (connected/online, offline, down, new, archived),
# as well as a combined count for devices that match any status listed in the UI_MY_DEVICES setting

View File

@@ -3,6 +3,7 @@ from Crypto.Util.Padding import pad, unpad
import base64
import os
import hashlib
import uuid
# SIMPLE CRYPT - requeres C compiler -------------------------------------------------------------------------
@@ -56,4 +57,10 @@ def get_random_bytes(length):
# Format hexadecimal string with hyphens
formatted_hex = '-'.join(hex_string[i:i+2] for i in range(0, len(hex_string), 2))
return formatted_hex
return formatted_hex
#-------------------------------------------------------------------------------
def generate_deterministic_guid(plugin, primary_id, secondary_id):
"""Generates a deterministic GUID based on plugin, primary ID, and secondary ID."""
data = f"{plugin}-{primary_id}-{secondary_id}".encode("utf-8")
return str(uuid.UUID(hashlib.md5(data).hexdigest()))

View File

@@ -9,7 +9,7 @@ from const import fullDbPath, sql_devices_stats, sql_devices_all, sql_generateGu
from logger import mylog
from helper import json_obj, initOrSetParam, row_to_json, timeNowTZ
from appevent import AppEvent_obj
from workflows.app_events import AppEvent_obj
class DB():
"""
@@ -543,6 +543,7 @@ class DB():
sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects(
"Index" INTEGER,
Plugin TEXT NOT NULL,
ObjectGUID TEXT,
Object_PrimaryID TEXT NOT NULL,
Object_SecondaryID TEXT NOT NULL,
DateTimeCreated TEXT NOT NULL,
@@ -589,6 +590,18 @@ class DB():
self.sql.execute('ALTER TABLE "Plugins_Objects" ADD COLUMN "HelpVal2" TEXT')
self.sql.execute('ALTER TABLE "Plugins_Objects" ADD COLUMN "HelpVal3" TEXT')
self.sql.execute('ALTER TABLE "Plugins_Objects" ADD COLUMN "HelpVal4" TEXT')
# plug_ObjectGUID_missing column
plug_ObjectGUID_missing = self.sql.execute ("""
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Plugins_Objects') WHERE name='ObjectGUID'
""").fetchone()[0] == 0
if plug_ObjectGUID_missing :
mylog('verbose', ["[upgradeDB] Adding ObjectGUID to the Plugins_Objects table"])
self.sql.execute("""
ALTER TABLE "Plugins_Objects" ADD "ObjectGUID" TEXT
""")
# -----------------------------------------
# REMOVE after 6/6/2025 - END
@@ -645,6 +658,17 @@ class DB():
self.sql.execute('ALTER TABLE "Plugins_Events" ADD COLUMN "HelpVal2" TEXT')
self.sql.execute('ALTER TABLE "Plugins_Events" ADD COLUMN "HelpVal3" TEXT')
self.sql.execute('ALTER TABLE "Plugins_Events" ADD COLUMN "HelpVal4" TEXT')
# plug_ObjectGUID_missing column
plug_ObjectGUID_missing = self.sql.execute ("""
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Plugins_Events') WHERE name='ObjectGUID'
""").fetchone()[0] == 0
if plug_ObjectGUID_missing :
mylog('verbose', ["[upgradeDB] Adding ObjectGUID to the Plugins_Events table"])
self.sql.execute("""
ALTER TABLE "Plugins_Events" ADD "ObjectGUID" TEXT
""")
# -----------------------------------------
# REMOVE after 6/6/2025 - END
@@ -703,6 +727,18 @@ class DB():
self.sql.execute('ALTER TABLE "Plugins_History" ADD COLUMN "HelpVal3" TEXT')
self.sql.execute('ALTER TABLE "Plugins_History" ADD COLUMN "HelpVal4" TEXT')
# plug_ObjectGUID_missing column
plug_ObjectGUID_missing = self.sql.execute ("""
SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Plugins_History') WHERE name='ObjectGUID'
""").fetchone()[0] == 0
if plug_ObjectGUID_missing :
mylog('verbose', ["[upgradeDB] Adding ObjectGUID to the Plugins_History table"])
self.sql.execute("""
ALTER TABLE "Plugins_History" ADD "ObjectGUID" TEXT
""")
# -----------------------------------------
# REMOVE after 6/6/2025 - END
# -----------------------------------------

View File

@@ -1,783 +0,0 @@
import subprocess
import conf
import os
import re
from helper import timeNowTZ, get_setting, get_setting_value, list_to_where, resolve_device_name_dig, get_device_name_nbtlookup, get_device_name_nslookup, get_device_name_mdns, check_IP_format, sanitize_SQL_input
from logger import mylog, print_log
from const import vendorsPath, vendorsPathNewest, sql_generateGuid
#-------------------------------------------------------------------------------
# Device object handling (WIP)
#-------------------------------------------------------------------------------
class Device_obj:
def __init__(self, db):
self.db = db
# Get all
def getAll(self):
self.db.sql.execute("""
SELECT * FROM Devices
""")
return self.db.sql.fetchall()
# Get all with unknown names
def getUnknown(self):
self.db.sql.execute("""
SELECT * FROM Devices WHERE devName in ("(unknown)", "(name not found)", "" )
""")
return self.db.sql.fetchall()
# Get specific column value based on devMac
def getValueWithMac(self, column_name, devMac):
query = f"SELECT {column_name} FROM Devices WHERE devMac = ?"
self.db.sql.execute(query, (devMac,))
result = self.db.sql.fetchone()
return result[column_name] if result else None
# Get all down
def getDown(self):
self.db.sql.execute("""
SELECT * FROM Devices WHERE devAlertDown = 1 and devPresentLastScan = 0
""")
return self.db.sql.fetchall()
# Get all down
def getOffline(self):
self.db.sql.execute("""
SELECT * FROM Devices WHERE devPresentLastScan = 0
""")
return self.db.sql.fetchall()
#-------------------------------------------------------------------------------
# Removing devices from the CurrentScan DB table which the user chose to ignore by MAC or IP
def exclude_ignored_devices(db):
sql = db.sql # Database interface for executing queries
mac_condition = list_to_where('OR', 'cur_MAC', 'LIKE', get_setting_value('NEWDEV_ignored_MACs'))
ip_condition = list_to_where('OR', 'cur_IP', 'LIKE', get_setting_value('NEWDEV_ignored_IPs'))
# Only delete if either the MAC or IP matches an ignored condition
conditions = []
if mac_condition:
conditions.append(mac_condition)
if ip_condition:
conditions.append(ip_condition)
# Join conditions and prepare the query
conditions_str = " OR ".join(conditions)
if conditions_str:
query = f"""DELETE FROM CurrentScan WHERE
1=1
AND (
{conditions_str}
)
"""
else:
query = "DELETE FROM CurrentScan WHERE 1=1 AND 1=0" # No valid conditions, prevent deletion
mylog('debug', f'[New Devices] Excluding Ignored Devices Query: {query}')
sql.execute(query)
#-------------------------------------------------------------------------------
def save_scanned_devices (db):
sql = db.sql #TO-DO
# Add Local MAC of default local interface
local_mac_cmd = ["/sbin/ifconfig `ip -o route get 1 | sed 's/^.*dev \\([^ ]*\\).*$/\\1/;q'` | grep ether | awk '{print $2}'"]
local_mac = subprocess.Popen (local_mac_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip()
local_ip_cmd = ["ip -o route get 1 | sed 's/^.*src \\([^ ]*\\).*$/\\1/;q'"]
local_ip = subprocess.Popen (local_ip_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().strip()
mylog('debug', ['[Save Devices] Saving this IP into the CurrentScan table:', local_ip])
if check_IP_format(local_ip) == '':
local_ip = '0.0.0.0'
# Proceed if variable contains valid MAC
if check_mac_or_internet(local_mac):
sql.execute (f"""INSERT OR IGNORE INTO CurrentScan (cur_MAC, cur_IP, cur_Vendor, cur_ScanMethod) VALUES ( '{local_mac}', '{local_ip}', Null, 'local_MAC') """)
#-------------------------------------------------------------------------------
def print_scan_stats(db):
sql = db.sql # TO-DO
query = """
SELECT
(SELECT COUNT(*) FROM CurrentScan) AS devices_detected,
(SELECT COUNT(*) FROM CurrentScan WHERE NOT EXISTS (SELECT 1 FROM Devices WHERE devMac = cur_MAC)) AS new_devices,
(SELECT COUNT(*) FROM Devices WHERE devAlertDown != 0 AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE devMac = cur_MAC)) AS down_alerts,
(SELECT COUNT(*) FROM Devices WHERE devAlertDown != 0 AND devPresentLastScan = 1 AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE devMac = cur_MAC)) AS new_down_alerts,
(SELECT COUNT(*) FROM Devices WHERE devPresentLastScan = 0) AS new_connections,
(SELECT COUNT(*) FROM Devices WHERE devPresentLastScan = 1 AND NOT EXISTS (SELECT 1 FROM CurrentScan WHERE devMac = cur_MAC)) AS disconnections,
(SELECT COUNT(*) FROM Devices, CurrentScan WHERE devMac = cur_MAC AND devLastIP <> cur_IP) AS ip_changes,
cur_ScanMethod,
COUNT(*) AS scan_method_count
FROM CurrentScan
GROUP BY cur_ScanMethod
"""
sql.execute(query)
stats = sql.fetchall()
mylog('verbose', f'[Scan Stats] Devices Detected.......: {stats[0]["devices_detected"]}')
mylog('verbose', f'[Scan Stats] New Devices............: {stats[0]["new_devices"]}')
mylog('verbose', f'[Scan Stats] Down Alerts............: {stats[0]["down_alerts"]}')
mylog('verbose', f'[Scan Stats] New Down Alerts........: {stats[0]["new_down_alerts"]}')
mylog('verbose', f'[Scan Stats] New Connections........: {stats[0]["new_connections"]}')
mylog('verbose', f'[Scan Stats] Disconnections.........: {stats[0]["disconnections"]}')
mylog('verbose', f'[Scan Stats] IP Changes.............: {stats[0]["ip_changes"]}')
# if str(stats[0]["new_devices"]) != '0':
mylog('trace', f' ================ DEVICES table content ================')
sql.execute('select * from Devices')
rows = sql.fetchall()
for row in rows:
row_dict = dict(row)
mylog('trace', f' {row_dict}')
mylog('trace', f' ================ CurrentScan table content ================')
sql.execute('select * from CurrentScan')
rows = sql.fetchall()
for row in rows:
row_dict = dict(row)
mylog('trace', f' {row_dict}')
mylog('trace', f' ================ Events table content where eve_PendingAlertEmail = 1 ================')
sql.execute('select * from Events where eve_PendingAlertEmail = 1')
rows = sql.fetchall()
for row in rows:
row_dict = dict(row)
mylog('trace', f' {row_dict}')
mylog('trace', f' ================ Events table COUNT ================')
sql.execute('select count(*) from Events')
rows = sql.fetchall()
for row in rows:
row_dict = dict(row)
mylog('trace', f' {row_dict}')
mylog('verbose', '[Scan Stats] Scan Method Statistics:')
for row in stats:
if row["cur_ScanMethod"] is not None:
mylog('verbose', f' {row["cur_ScanMethod"]}: {row["scan_method_count"]}')
#-------------------------------------------------------------------------------
def create_new_devices (db):
sql = db.sql # TO-DO
startTime = timeNowTZ()
# Insert events for new devices from CurrentScan
mylog('debug','[New Devices] New devices - 1 Events')
query = f"""INSERT INTO Events (eve_MAC, eve_IP, eve_DateTime,
eve_EventType, eve_AdditionalInfo,
eve_PendingAlertEmail)
SELECT cur_MAC, cur_IP, '{startTime}', 'New Device', cur_Vendor, 1
FROM CurrentScan
WHERE NOT EXISTS (SELECT 1 FROM Devices
WHERE devMac = cur_MAC)
"""
mylog('debug',f'[New Devices] Log Events Query: {query}')
sql.execute(query)
mylog('debug',f'[New Devices] Insert Connection into session table')
sql.execute (f"""INSERT INTO Sessions (ses_MAC, ses_IP, ses_EventTypeConnection, ses_DateTimeConnection,
ses_EventTypeDisconnection, ses_DateTimeDisconnection, ses_StillConnected, ses_AdditionalInfo)
SELECT cur_MAC, cur_IP,'Connected','{startTime}', NULL , NULL ,1, cur_Vendor
FROM CurrentScan
WHERE NOT EXISTS (SELECT 1 FROM Sessions
WHERE ses_MAC = cur_MAC)
""")
# Create new devices from CurrentScan
mylog('debug','[New Devices] 2 Create devices')
# default New Device values preparation
newDevColumns = """devAlertEvents,
devAlertDown,
devPresentLastScan,
devIsArchived,
devIsNew,
devSkipRepeated,
devScan,
devOwner,
devFavorite,
devGroup,
devComments,
devLogEvents,
devLocation,
devCustomProps"""
newDevDefaults = f"""{get_setting_value('NEWDEV_devAlertEvents')},
{get_setting_value('NEWDEV_devAlertDown')},
{get_setting_value('NEWDEV_devPresentLastScan')},
{get_setting_value('NEWDEV_devIsArchived')},
{get_setting_value('NEWDEV_devIsNew')},
{get_setting_value('NEWDEV_devSkipRepeated')},
{get_setting_value('NEWDEV_devScan')},
'{sanitize_SQL_input(get_setting_value('NEWDEV_devOwner'))}',
{get_setting_value('NEWDEV_devFavorite')},
'{sanitize_SQL_input(get_setting_value('NEWDEV_devGroup'))}',
'{sanitize_SQL_input(get_setting_value('NEWDEV_devComments'))}',
{get_setting_value('NEWDEV_devLogEvents')},
'{sanitize_SQL_input(get_setting_value('NEWDEV_devLocation'))}',
'{sanitize_SQL_input(get_setting_value('NEWDEV_devCustomProps'))}'
"""
# Fetch data from CurrentScan skipping ignored devices by IP and MAC
query = f"""SELECT cur_MAC, cur_Name, cur_Vendor, cur_ScanMethod, cur_IP, cur_SyncHubNodeName, cur_NetworkNodeMAC, cur_PORT, cur_NetworkSite, cur_SSID, cur_Type
FROM CurrentScan """
mylog('debug',f'[New Devices] Collecting New Devices Query: {query}')
current_scan_data = sql.execute(query).fetchall()
for row in current_scan_data:
cur_MAC, cur_Name, cur_Vendor, cur_ScanMethod, cur_IP, cur_SyncHubNodeName, cur_NetworkNodeMAC, cur_PORT, cur_NetworkSite, cur_SSID, cur_Type = row
# Handle NoneType
cur_Name = cur_Name.strip() if cur_Name else '(unknown)'
cur_Type = cur_Type.strip() if cur_Type else get_setting_value("NEWDEV_devType")
cur_NetworkNodeMAC = cur_NetworkNodeMAC.strip() if cur_NetworkNodeMAC else ''
cur_NetworkNodeMAC = cur_NetworkNodeMAC if cur_NetworkNodeMAC and cur_MAC != "Internet" else (get_setting_value("NEWDEV_devParentMAC") if cur_MAC != "Internet" else "null")
cur_SyncHubNodeName = cur_SyncHubNodeName if cur_SyncHubNodeName and cur_SyncHubNodeName != "null" else (get_setting_value("SYNC_node_name"))
# Preparing the individual insert statement
sqlQuery = f"""INSERT OR IGNORE INTO Devices
(
devMac,
devName,
devVendor,
devLastIP,
devFirstConnection,
devLastConnection,
devSyncHubNode,
devGUID,
devParentMAC,
devParentPort,
devSite,
devSSID,
devType,
devSourcePlugin,
{newDevColumns}
)
VALUES
(
'{sanitize_SQL_input(cur_MAC)}',
'{sanitize_SQL_input(cur_Name)}',
'{sanitize_SQL_input(cur_Vendor)}',
'{sanitize_SQL_input(cur_IP)}',
?,
?,
'{sanitize_SQL_input(cur_SyncHubNodeName)}',
{sql_generateGuid},
'{sanitize_SQL_input(cur_NetworkNodeMAC)}',
'{sanitize_SQL_input(cur_PORT)}',
'{sanitize_SQL_input(cur_NetworkSite)}',
'{sanitize_SQL_input(cur_SSID)}',
'{sanitize_SQL_input(cur_Type)}',
'{sanitize_SQL_input(cur_ScanMethod)}',
{newDevDefaults}
)"""
mylog('trace', f'[New Devices] Create device SQL: {sqlQuery}')
sql.execute(sqlQuery, (startTime, startTime))
mylog('debug','[New Devices] New Devices end')
db.commitDB()
#-------------------------------------------------------------------------------
def update_devices_data_from_scan (db):
sql = db.sql #TO-DO
startTime = timeNowTZ().strftime('%Y-%m-%d %H:%M:%S')
# Update Last Connection
mylog('debug', '[Update Devices] 1 Last Connection')
sql.execute(f"""UPDATE Devices SET devLastConnection = '{startTime}',
devPresentLastScan = 1
WHERE devPresentLastScan = 0
AND EXISTS (SELECT 1 FROM CurrentScan
WHERE devMac = cur_MAC) """)
# Clean no active devices
mylog('debug', '[Update Devices] 2 Clean no active devices')
sql.execute("""UPDATE Devices SET devPresentLastScan = 0
WHERE NOT EXISTS (SELECT 1 FROM CurrentScan
WHERE devMac = cur_MAC) """)
# Update IP
mylog('debug', '[Update Devices] - cur_IP -> devLastIP (always updated)')
sql.execute("""UPDATE Devices
SET devLastIP = (SELECT cur_IP FROM CurrentScan
WHERE devMac = cur_MAC)
WHERE EXISTS (SELECT 1 FROM CurrentScan
WHERE devMac = cur_MAC) """)
# Update only devices with empty, NULL or (u(U)nknown) vendors
mylog('debug', '[Update Devices] - cur_Vendor -> (if empty) devVendor')
sql.execute("""UPDATE Devices
SET devVendor = (
SELECT cur_Vendor
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
)
WHERE
(devVendor IS NULL OR devVendor IN ("", "null", "(unknown)", "(Unknown)"))
AND EXISTS (
SELECT 1
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
)""")
# Update only devices with empty or NULL devParentPort
mylog('debug', '[Update Devices] - (if not empty) cur_Port -> devParentPort')
sql.execute("""UPDATE Devices
SET devParentPort = (
SELECT cur_Port
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
)
WHERE EXISTS (
SELECT 1
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
AND CurrentScan.cur_Port IS NOT NULL AND CurrentScan.cur_Port NOT IN ("", "null")
)""")
# Update only devices with empty or NULL devParentMAC
mylog('debug', '[Update Devices] - (if not empty) cur_NetworkNodeMAC -> devParentMAC')
sql.execute("""UPDATE Devices
SET devParentMAC = (
SELECT cur_NetworkNodeMAC
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
)
WHERE
(devParentMAC IS NULL OR devParentMAC IN ("", "null", "(unknown)", "(Unknown)"))
AND
EXISTS (
SELECT 1
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
AND CurrentScan.cur_NetworkNodeMAC IS NOT NULL AND CurrentScan.cur_NetworkNodeMAC NOT IN ("", "null")
)""")
# Update only devices with empty or NULL devSite
mylog('debug', '[Update Devices] - (if not empty) cur_NetworkSite -> (if empty) devSite')
sql.execute("""UPDATE Devices
SET devSite = (
SELECT cur_NetworkSite
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
)
WHERE
(devSite IS NULL OR devSite IN ("", "null"))
AND EXISTS (
SELECT 1
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
AND CurrentScan.cur_NetworkSite IS NOT NULL AND CurrentScan.cur_NetworkSite NOT IN ("", "null")
)""")
# Update only devices with empty or NULL devSSID
mylog('debug', '[Update Devices] - (if not empty) cur_SSID -> (if empty) devSSID')
sql.execute("""UPDATE Devices
SET devSSID = (
SELECT cur_SSID
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
)
WHERE
(devSSID IS NULL OR devSSID IN ("", "null"))
AND EXISTS (
SELECT 1
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
AND CurrentScan.cur_SSID IS NOT NULL AND CurrentScan.cur_SSID NOT IN ("", "null")
)""")
# Update only devices with empty or NULL devType
mylog('debug', '[Update Devices] - (if not empty) cur_Type -> (if empty) devType')
sql.execute("""UPDATE Devices
SET devType = (
SELECT cur_Type
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
)
WHERE
(devType IS NULL OR devType IN ("", "null"))
AND EXISTS (
SELECT 1
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
AND CurrentScan.cur_Type IS NOT NULL AND CurrentScan.cur_Type NOT IN ("", "null")
)""")
# Update (unknown) or (name not found) Names if available
mylog('debug','[Update Devices] - (if not empty) cur_Name -> (if empty) devName')
sql.execute (""" UPDATE Devices
SET devName = COALESCE((
SELECT cur_Name
FROM CurrentScan
WHERE cur_MAC = devMac
AND cur_Name IS NOT NULL
AND cur_Name <> 'null'
AND cur_Name <> ''
), devName)
WHERE (devName IN ('(unknown)', '(name not found)', '')
OR devName IS NULL)
AND EXISTS (
SELECT 1
FROM CurrentScan
WHERE cur_MAC = devMac
AND cur_Name IS NOT NULL
AND cur_Name <> 'null'
AND cur_Name <> ''
) """)
# Update VENDORS
recordsToUpdate = []
query = """SELECT * FROM Devices
WHERE devVendor IS NULL OR devVendor IN ("", "null", "(unknown)", "(Unknown)")
"""
for device in sql.execute (query) :
vendor = query_MAC_vendor (device['devMac'])
if vendor != -1 and vendor != -2 :
recordsToUpdate.append ([vendor, device['devMac']])
if len(recordsToUpdate) > 0:
sql.executemany ("UPDATE Devices SET devVendor = ? WHERE devMac = ? ", recordsToUpdate )
# Guess ICONS
recordsToUpdate = []
default_icon = get_setting_value('NEWDEV_devIcon')
if get_setting_value('NEWDEV_replace_preset_icon'):
query = f"""SELECT * FROM Devices
WHERE devIcon in ('', 'null', '{default_icon}')
OR devIcon IS NULL"""
else:
query = """SELECT * FROM Devices
WHERE devIcon in ('', 'null')
OR devIcon IS NULL"""
for device in sql.execute (query) :
# Conditional logic for devIcon guessing
devIcon = guess_icon(device['devVendor'], device['devMac'], device['devLastIP'], device['devName'], default_icon)
recordsToUpdate.append ([devIcon, device['devMac']])
mylog('debug',f'[Update Devices] recordsToUpdate: {recordsToUpdate}')
if len(recordsToUpdate) > 0:
sql.executemany ("UPDATE Devices SET devIcon = ? WHERE devMac = ? ", recordsToUpdate )
# Guess Type
recordsToUpdate = []
query = """SELECT * FROM Devices
WHERE devType in ('', 'null')
OR devType IS NULL"""
default_type = get_setting_value('NEWDEV_devType')
for device in sql.execute (query) :
# Conditional logic for devIcon guessing
devType = guess_type(device['devVendor'], device['devMac'], device['devLastIP'], device['devName'], default_type)
recordsToUpdate.append ([devType, device['devMac']])
if len(recordsToUpdate) > 0:
sql.executemany ("UPDATE Devices SET devType = ? WHERE devMac = ? ", recordsToUpdate )
mylog('debug','[Update Devices] Update devices end')
#-------------------------------------------------------------------------------
def update_devices_names (db):
sql = db.sql #TO-DO
# Initialize variables
recordsToUpdate = []
recordsNotFound = []
nameNotFound = "(name not found)"
ignored = 0
notFound = 0
foundDig = 0
foundmDNSLookup = 0
foundNsLookup = 0
foundNbtLookup = 0
# Gen unknown devices
device_handler = Device_obj(db)
# Retrieve devices
unknownDevices = device_handler.getUnknown()
# skip checks if no unknown devices
if len(unknownDevices) == 0:
return
# Devices without name
mylog('verbose', f'[Update Device Name] Trying to resolve devices without name. Unknown devices count: {len(unknownDevices)}')
for device in unknownDevices:
newName = nameNotFound
# Resolve device name with DiG
newName = resolve_device_name_dig (device['devMac'], device['devLastIP'])
# count
if newName != nameNotFound:
foundDig += 1
# Resolve device name with AVAHISCAN plugin data
if newName == nameNotFound:
newName = get_device_name_mdns(db, device['devMac'], device['devLastIP'])
if newName != nameNotFound:
foundmDNSLookup += 1
# Resolve device name with NSLOOKUP plugin data
if newName == nameNotFound:
newName = get_device_name_nslookup(db, device['devMac'], device['devLastIP'])
if newName != nameNotFound:
foundNsLookup += 1
# Resolve device name with NBTLOOKUP plugin data
if newName == nameNotFound:
newName = get_device_name_nbtlookup(db, device['devMac'], device['devLastIP'])
if newName != nameNotFound:
foundNbtLookup += 1
# if still not found update name so we can distinguish the devices where we tried already
if newName == nameNotFound :
notFound += 1
# if devName is the same as what we will change it to, take no action
# this mitigates a race condition which would overwrite a users edits that occured since the select earlier
if device['devName'] != nameNotFound:
recordsNotFound.append (["(name not found)", device['devMac']])
else:
# name was found
recordsToUpdate.append ([newName, device['devMac']])
# Print log
mylog('verbose', [f'[Update Device Name] Names Found (DiG/mDNS/NSLOOKUP/NBTSCAN): {len(recordsToUpdate)} ({foundDig}/{foundmDNSLookup}/{foundNsLookup}/{foundNbtLookup})'] )
mylog('verbose', [f'[Update Device Name] Names Not Found : {notFound}'] )
# update not found devices with (name not found)
sql.executemany ("UPDATE Devices SET devName = ? WHERE devMac = ? ", recordsNotFound )
# update names of devices which we were bale to resolve
sql.executemany ("UPDATE Devices SET devName = ? WHERE devMac = ? ", recordsToUpdate )
db.commitDB()
#-------------------------------------------------------------------------------
# Check if the variable contains a valid MAC address or "Internet"
def check_mac_or_internet(input_str):
# Regular expression pattern for matching a MAC address
mac_pattern = r'([0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2}[:-][0-9A-Fa-f]{2})'
if input_str.lower() == 'internet':
return True
elif re.match(mac_pattern, input_str):
return True
else:
return False
#===============================================================================
# Lookup unknown vendors on devices
#===============================================================================
#-------------------------------------------------------------------------------
def query_MAC_vendor (pMAC):
pMACstr = str(pMAC)
filePath = vendorsPath
if os.path.isfile(vendorsPathNewest):
filePath = vendorsPathNewest
# Check MAC parameter
mac = pMACstr.replace (':','').lower()
if len(pMACstr) != 17 or len(mac) != 12 :
return -2 # return -2 if ignored MAC
# Search vendor in HW Vendors DB
mac_start_string6 = mac[0:6]
mac_start_string9 = mac[0:9]
try:
with open(filePath, 'r') as f:
for line in f:
line_lower = line.lower() # Convert line to lowercase for case-insensitive matching
if line_lower.startswith(mac_start_string6):
parts = line.split('\t', 1)
if len(parts) > 1:
vendor = parts[1].strip()
mylog('debug', [f"[Vendor Check] Found '{vendor}' for '{pMAC}' in {vendorsPath}"])
return vendor
else:
mylog('debug', [f'[Vendor Check] ⚠ ERROR: Match found, but line could not be processed: "{line_lower}"'])
return -1
return -1 # MAC address not found in the database
except FileNotFoundError:
mylog('none', [f"[Vendor Check] ⚠ ERROR: Vendors file {vendorsPath} not found."])
return -1
#===============================================================================
# Icons
#===============================================================================
#-------------------------------------------------------------------------------
# Base64 encoded HTML string for FontAwesome icons
icons = {
"globe": "PGkgY2xhc3M9ImZhcyBmYS1nbG9iZSI+PC9pPg==", # globe icon
"phone": "PGkgY2xhc3M9ImZhcyBmYS1tb2JpbGUtYWx0Ij48L2k+",
"laptop": "PGkgY2xhc3M9ImZhIGZhLWxhcHRvcCI+PC9pPg==",
"printer": "PGkgY2xhc3M9ImZhIGZhLXByaW50ZXIiPjwvaT4=",
"router": "PGkgY2xhc3M9ImZhcyBmYS1yYW5kb20iPjwvaT4=",
"tv": "PGkgY2xhc3M9ImZhIGZhLXR2Ij48L2k+",
"desktop": "PGkgY2xhc3M9ImZhIGZhLWRlc2t0b3AiPjwvaT4=",
"tablet": "PGkgY2xhc3M9ImZhIGZhLXRhYmxldCI+PC9pPg==",
"watch": "PGkgY2xhc3M9ImZhIGZhLXdhbmNoIj48L2k+",
"camera": "PGkgY2xhc3M9ImZhIGZhLWNhbWVyYSI+PC9pPg==",
"home": "PGkgY2xhc3M9ImZhIGZhLWhvbWUiPjwvaT4=",
"apple": "PGkgY2xhc3M9ImZhYiBmYS1hcHBsZSI+PC9pPg==",
"ethernet": "PGkgY2xhc3M9ImZhcyBmYS1ldGhlcm5ldCI+PC9pPg==",
"google": "PGkgY2xhc3M9ImZhYiBmYS1nb29nbGUiPjwvaT4=",
"raspberry": "PGkgY2xhc3M9ImZhYiBmYS1yYXNwYmVycnktcGkiPjwvaT4=",
"microchip": "PGkgY2xhc3M9ImZhcyBmYS1taWNyb2NoaXAiPjwvaT4="
}
#-------------------------------------------------------------------------------
# Guess device icon
def guess_icon(vendor, mac, ip, name, default):
mylog('debug', [f"[guess_icon] Guessing icon for (vendor|mac|ip|name): ('{vendor}'|'{mac}'|{ip}|{name})"])
result = default
mac = mac.upper()
vendor = vendor.lower() if vendor else "unknown"
name = name.lower() if name else "(unknown)"
# Guess icon based on vendor
if any(brand in vendor for brand in {"samsung", "motorola"}):
result = icons.get("phone")
elif "dell" in vendor:
result = icons.get("laptop")
elif "hp" in vendor:
result = icons.get("printer")
elif "cisco" in vendor:
result = icons.get("router")
elif "lg" in vendor:
result = icons.get("tv")
elif "raspberry" in vendor:
result = icons.get("raspberry")
elif "apple" in vendor:
result = icons.get("apple")
elif "google" in vendor:
result = icons.get("google")
elif "ubiquiti" in vendor:
result = icons.get("router")
elif any(brand in vendor for brand in {"espressif"}):
result = icons.get("microchip")
# Guess icon based on MAC address patterns
elif mac == "INTERNET":
result = icons.get("globe")
elif mac.startswith("00:1A:79"): # Apple
result = icons.get("apple")
elif mac.startswith("B0:BE:83"): # Apple
result = icons.get("apple")
elif mac.startswith("00:1B:63"): # Sony
result = icons.get("tablet")
elif mac.startswith("74:AC:B9"): # Unifi
result = icons.get("ethernet")
# Guess icon based on name
elif 'google' in name:
result = icons.get("google")
elif 'desktop' in name:
result = icons.get("desktop")
elif 'raspberry' in name:
result = icons.get("raspberry")
# Guess icon based on IP address ranges
elif ip.startswith("192.168.1."):
result = icons.get("laptop")
return result
#-------------------------------------------------------------------------------
# Guess device type
def guess_type(vendor, mac, ip, name, default):
result = default
mac = mac.upper()
vendor = vendor.lower() if vendor else "unknown"
name = str(name).lower() if name else "(unknown)"
# Guess icon based on vendor
if any(brand in vendor for brand in {"samsung", "motorola"}):
result = "Phone"
elif "cisco" in vendor:
result = "Router"
elif "lg" in vendor:
result = "TV"
elif "google" in vendor:
result = "Phone"
elif "ubiquiti" in vendor:
result = "Router"
# Guess type based on MAC address patterns
elif mac == "INTERNET":
result = "Internet"
# Guess type based on name
elif 'google' in name:
result = "Phone"
# Guess type based on IP address ranges
elif ip == ("192.168.1.1"):
result = "Router"
return result

View File

@@ -1,31 +0,0 @@
import json
def update_value(json_data, object_path, key, value, target_property, desired_value):
# Helper function to traverse the JSON structure and get the target object
def traverse(obj, path):
keys = path.split(".")
for key in keys:
if isinstance(obj, list):
key = int(key)
obj = obj[key]
return obj
# Helper function to update the target property with the desired value
def update(obj, path, key, value, target_property, desired_value):
keys = path.split(".")
for i, key in enumerate(keys):
if isinstance(obj, list):
key = int(key)
# Check if we have reached the desired object
if i == len(keys) - 1 and obj[key][key] == value:
# Update the target property with the desired value
obj[key][target_property] = desired_value
else:
obj = obj[key]
return obj
# Get the target object based on the object path
target_obj = traverse(json_data, object_path)
# Update the value in the target object
updated_obj = update(json_data, object_path, key, value, target_property, desired_value)
return updated_obj

View File

@@ -0,0 +1,84 @@
import sys
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog, print_log
#-------------------------------------------------------------------------------
# Device object handling (WIP)
#-------------------------------------------------------------------------------
class DeviceInstance:
def __init__(self, db):
self.db = db
# Get all
def getAll(self):
self.db.sql.execute("""
SELECT * FROM Devices
""")
return self.db.sql.fetchall()
# Get all with unknown names
def getUnknown(self):
self.db.sql.execute("""
SELECT * FROM Devices WHERE devName in ("(unknown)", "(name not found)", "" )
""")
return self.db.sql.fetchall()
# Get specific column value based on devMac
def getValueWithMac(self, column_name, devMac):
query = f"SELECT {column_name} FROM Devices WHERE devMac = ?"
self.db.sql.execute(query, (devMac,))
result = self.db.sql.fetchone()
return result[column_name] if result else None
# Get all down
def getDown(self):
self.db.sql.execute("""
SELECT * FROM Devices WHERE devAlertDown = 1 and devPresentLastScan = 0
""")
return self.db.sql.fetchall()
# Get all down
def getOffline(self):
self.db.sql.execute("""
SELECT * FROM Devices WHERE devPresentLastScan = 0
""")
return self.db.sql.fetchall()
# Get a device by devGUID
def getByGUID(self, devGUID):
self.db.sql.execute("SELECT * FROM Devices WHERE devGUID = ?", (devGUID,))
result = self.db.sql.fetchone()
return dict(result) if result else None
# Check if a device exists by devGUID
def exists(self, devGUID):
self.db.sql.execute("SELECT COUNT(*) AS count FROM Devices WHERE devGUID = ?", (devGUID,))
result = self.db.sql.fetchone()
return result["count"] > 0
# Update a specific field for a device
def updateField(self, devGUID, field, value):
if not self.exists(devGUID):
m = f"[Device] In 'updateField': GUID {devGUID} not found."
mylog('none', m)
raise ValueError(m)
self.db.sql.execute(f"""
UPDATE Devices SET {field} = ? WHERE devGUID = ?
""", (value, devGUID))
self.db.commitDB()
# Delete a device by devGUID
def delete(self, devGUID):
if not self.exists(devGUID):
m = f"[Device] In 'delete': GUID {devGUID} not found."
mylog('none', m)
raise ValueError(m)
self.db.sql.execute("DELETE FROM Devices WHERE devGUID = ?", (devGUID,))
self.db.commitDB()

View File

@@ -0,0 +1,65 @@
import sys
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/server"])
from logger import mylog, print_log
#-------------------------------------------------------------------------------
# Plugin object handling (WIP)
#-------------------------------------------------------------------------------
class PluginObjectInstance:
def __init__(self, db):
self.db = db
# Get all plugin objects
def getAll(self):
self.db.sql.execute("""
SELECT * FROM Plugins_Objects
""")
return self.db.sql.fetchall()
# Get plugin object by ObjectGUID
def getByGUID(self, ObjectGUID):
self.db.sql.execute("SELECT * FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,))
result = self.db.sql.fetchone()
return dict(result) if result else None
# Check if a plugin object exists by ObjectGUID
def exists(self, ObjectGUID):
self.db.sql.execute("SELECT COUNT(*) AS count FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,))
result = self.db.sql.fetchone()
return result["count"] > 0
# Get objects by plugin name
def getByPlugin(self, plugin):
self.db.sql.execute("SELECT * FROM Plugins_Objects WHERE Plugin = ?", (plugin,))
return self.db.sql.fetchall()
# Get objects by status
def getByStatus(self, status):
self.db.sql.execute("SELECT * FROM Plugins_Objects WHERE Status = ?", (status,))
return self.db.sql.fetchall()
# Update a specific field for a plugin object
def updateField(self, ObjectGUID, field, value):
if not self.exists(ObjectGUID):
m = f"[PluginObject] In 'updateField': GUID {ObjectGUID} not found."
mylog('none', m)
raise ValueError(m)
self.db.sql.execute(f"""
UPDATE Plugins_Objects SET {field} = ? WHERE ObjectGUID = ?
""", (value, ObjectGUID))
self.db.commitDB()
# Delete a plugin object by ObjectGUID
def delete(self, ObjectGUID):
if not self.exists(ObjectGUID):
m = f"[PluginObject] In 'delete': GUID {ObjectGUID} not found."
mylog('none', m)
raise ValueError(m)
self.db.sql.execute("DELETE FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,))
self.db.commitDB()

View File

@@ -18,6 +18,7 @@ from api import update_api
from plugin_utils import logEventStatusCounts, get_plugin_string, get_plugin_setting_obj, print_plugin_info, list_to_csv, combine_plugin_objects, resolve_wildcards_arr, handle_empty, custom_plugin_decoder, decode_and_rename_files
from notification import Notification_obj, write_notification
from user_events_queue import UserEventsQueue
from crypto_utils import generate_deterministic_guid
# Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL'))
@@ -582,13 +583,14 @@ def process_plugin_events(db, plugin, plugEventsArr):
for plugObj in pluginObjects:
# keep old createdTime time if the plugObj already was created before
createdTime = plugObj.changed if plugObj.status == 'new' else plugObj.created
# 18 values without Index
# 19 values without Index
values = (
plugObj.pluginPref, plugObj.primaryId, plugObj.secondaryId, createdTime,
plugObj.changed, plugObj.watched1, plugObj.watched2, plugObj.watched3,
plugObj.watched4, plugObj.status, plugObj.extra, plugObj.userData,
plugObj.foreignKey, plugObj.syncHubNodeName,
plugObj.helpVal1, plugObj.helpVal2, plugObj.helpVal3, plugObj.helpVal4
plugObj.helpVal1, plugObj.helpVal2, plugObj.helpVal3, plugObj.helpVal4,
plugObj.objectGUID
)
if plugObj.status == 'new':
@@ -625,8 +627,9 @@ def process_plugin_events(db, plugin, plugEventsArr):
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4")
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
"ObjectGUID")
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", objects_to_insert
)
@@ -637,7 +640,9 @@ def process_plugin_events(db, plugin, plugEventsArr):
UPDATE Plugins_Objects
SET "Plugin" = ?, "Object_PrimaryID" = ?, "Object_SecondaryID" = ?, "DateTimeCreated" = ?,
"DateTimeChanged" = ?, "Watched_Value1" = ?, "Watched_Value2" = ?, "Watched_Value3" = ?,
"Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ?, "HelpVal1" = ?, "HelpVal2" = ?, "HelpVal3" = ?, "HelpVal4" = ?
"Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ?,
"HelpVal1" = ?, "HelpVal2" = ?, "HelpVal3" = ?, "HelpVal4" = ?,
"ObjectGUID" = ?
WHERE "Index" = ?
""", objects_to_update
)
@@ -651,8 +656,9 @@ def process_plugin_events(db, plugin, plugEventsArr):
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4")
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
"ObjectGUID")
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", events_to_insert
)
@@ -665,8 +671,9 @@ def process_plugin_events(db, plugin, plugEventsArr):
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4")
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
"HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
"ObjectGUID")
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", history_to_insert
)
@@ -807,6 +814,7 @@ class plugin_object_class:
self.helpVal2 = objDbRow[16]
self.helpVal3 = objDbRow[17]
self.helpVal4 = objDbRow[18]
self.objectGUID = generate_deterministic_guid(self.pluginPref, self.primaryId, self.secondaryId)
# Check if self.status is valid

View File

@@ -6,7 +6,7 @@ from logger import mylog
from const import pluginsPath, logPath, apiPath
from helper import timeNowTZ, get_file_content, write_file, get_setting, get_setting_value, setting_value_to_python_type
from app_state import updateState
from crypto_utils import decrypt_data
from crypto_utils import decrypt_data, generate_deterministic_guid
module_name = 'Plugin utils'

View File

@@ -317,16 +317,14 @@ def update_devices_data_from_scan (db):
WHERE Devices.devMac = CurrentScan.cur_MAC
)
WHERE
(devParentPort IS NULL OR devParentPort = "" OR devParentPort = "null")
AND
(devParentPort IS NULL OR devParentPort IN ("", "null", "(unknown)", "(Unknown)"))
AND
EXISTS (
SELECT 1
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
AND CurrentScan.cur_Port IS NOT NULL
AND CurrentScan.cur_Port NOT IN ("", "null")
)
""")
AND CurrentScan.cur_Port IS NOT NULL AND CurrentScan.cur_Port NOT IN ("", "null")
)""")
# Update only devices with empty or NULL devParentMAC
mylog('debug', '[Update Devices] - (if not empty) cur_NetworkNodeMAC -> devParentMAC')
@@ -336,16 +334,34 @@ def update_devices_data_from_scan (db):
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
)
<<<<<<< HEAD
<<<<<<<< HEAD:server/scan/device_handling.py
WHERE
========
WHERE
>>>>>>>> main:server/device.py
=======
WHERE
>>>>>>> main
(devParentMAC IS NULL OR devParentMAC IN ("", "null", "(unknown)", "(Unknown)"))
AND
EXISTS (
SELECT 1
FROM CurrentScan
WHERE Devices.devMac = CurrentScan.cur_MAC
<<<<<<< HEAD
<<<<<<<< HEAD:server/scan/device_handling.py
AND CurrentScan.cur_NetworkNodeMAC IS NOT NULL AND CurrentScan.cur_NetworkNodeMAC NOT IN ("", "null")
========
AND CurrentScan.cur_NetworkNodeMAC IS NOT NULL AND CurrentScan.cur_NetworkNodeMAC NOT IN ("", "null")
>>>>>>>> main:server/device.py
)""")
=======
AND CurrentScan.cur_NetworkNodeMAC IS NOT NULL AND CurrentScan.cur_NetworkNodeMAC NOT IN ("", "null")
)
""")
>>>>>>> main
# Update only devices with empty or NULL devSite
mylog('debug', '[Update Devices] - (if not empty) cur_NetworkSite -> (if empty) devSite')

View File

@@ -1,15 +1,16 @@
import sys
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/server"])
import conf
from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, exclude_ignored_devices
from scan.device_handling import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, exclude_ignored_devices
from helper import timeNowTZ
from logger import mylog
from reporting import skip_repeated_notifications
#===============================================================================
# SCAN NETWORK
#===============================================================================

147
server/workflows/actions.py Executable file
View File

@@ -0,0 +1,147 @@
import sys
import sqlite3
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/server"])
import conf
from logger import mylog, Logger
from helper import get_setting_value, timeNowTZ
from models.device_instance import DeviceInstance
from models.plugin_object_instance import PluginObjectInstance
# Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL'))
from workflows.triggers import Trigger
class Action:
"""Base class for all actions."""
def __init__(self, trigger):
self.trigger = trigger
def execute(self, obj):
"""Executes the action on the given object."""
raise NotImplementedError("Subclasses must implement execute()")
class UpdateFieldAction(Action):
"""Action to update a specific field of an object."""
def __init__(self, db, field, value, trigger):
super().__init__(trigger) # Call the base class constructor
self.field = field
self.value = value
self.db = db
def execute(self):
mylog('verbose', f"[WF] Updating field '{self.field}' to '{self.value}' for event object {self.trigger.object_type}")
obj = self.trigger.object
# convert to dict for easeir handling
if isinstance(obj, sqlite3.Row):
obj = dict(obj) # Convert Row object to a standard dictionary
processed = False
# currently unused
if isinstance(obj, dict) and "ObjectGUID" in obj:
mylog('debug', f"[WF] Updating Object '{obj}' ")
plugin_instance = PluginObjectInstance(self.db)
plugin_instance.updateField(obj["ObjectGUID"], self.field, self.value)
processed = True
elif isinstance(obj, dict) and "devGUID" in obj:
mylog('debug', f"[WF] Updating Device '{obj}' ")
device_instance = DeviceInstance(self.db)
device_instance.updateField(obj["devGUID"], self.field, self.value)
processed = True
if not processed:
mylog('none', f"[WF] Could not process action for object: {obj}")
return obj
class DeleteObjectAction(Action):
"""Action to delete an object."""
def __init__(self, db, trigger):
super().__init__(trigger) # Call the base class constructor
self.db = db
def execute(self):
mylog('verbose', f"[WF] Deleting event object {self.trigger.object_type}")
obj = self.trigger.object
# convert to dict for easeir handling
if isinstance(obj, sqlite3.Row):
obj = dict(obj) # Convert Row object to a standard dictionary
processed = False
# currently unused
if isinstance(obj, dict) and "ObjectGUID" in obj:
mylog('debug', f"[WF] Updating Object '{obj}' ")
plugin_instance = PluginObjectInstance(self.db)
plugin_instance.delete(obj["ObjectGUID"])
processed = True
elif isinstance(obj, dict) and "devGUID" in obj:
mylog('debug', f"[WF] Updating Device '{obj}' ")
device_instance = DeviceInstance(self.db)
device_instance.delete(obj["devGUID"])
processed = True
if not processed:
mylog('none', f"[WF] Could not process action for object: {obj}")
return obj
class RunPluginAction(Action):
"""Action to run a specific plugin."""
def __init__(self, plugin_name, params, trigger): # Add trigger
super().__init__(trigger) # Call parent constructor
self.plugin_name = plugin_name
self.params = params
def execute(self):
obj = self.trigger.object
mylog('verbose', [f"Executing plugin '{self.plugin_name}' with parameters {self.params} for object {obj}"])
# PluginManager.run(self.plugin_name, self.parameters)
return obj
class SendNotificationAction(Action):
"""Action to send a notification."""
def __init__(self, method, message, trigger):
super().__init__(trigger) # Call parent constructor
self.method = method # Fix attribute name
self.message = message
def execute(self):
obj = self.trigger.object
mylog('verbose', [f"Sending notification via '{self.method}': {self.message} for object {obj}"])
# NotificationManager.send(self.method, self.message)
return obj
class ActionGroup:
"""Handles multiple actions applied to an object."""
def __init__(self, actions):
self.actions = actions
def execute(self, obj):
for action in self.actions:
action.execute(obj)
return obj

186
server/workflows/app_events.py Executable file
View File

@@ -0,0 +1,186 @@
import datetime
import json
import uuid
import sys
import pytz
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/server"])
# Register NetAlertX modules
import conf
from helper import get_setting_value, timeNowTZ
# Make sure the TIMEZONE for logging is correct
# conf.tz = pytz.timezone(get_setting_value('TIMEZONE'))
from logger import mylog, Logger, print_log, logResult
# Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL'))
from const import applicationPath, logPath, apiPath, confFileName, sql_generateGuid
from helper import timeNowTZ
class AppEvent_obj:
def __init__(self, db):
self.db = db
# Drop existing table
self.db.sql.execute("""DROP TABLE IF EXISTS "AppEvents" """)
# Drop all triggers
self.drop_all_triggers()
# Create the AppEvents table if missing
self.create_app_events_table()
# Define object mapping for different table structures, including fields, expressions, and constants
self.object_mapping = {
"Devices": {
"fields": {
"ObjectGUID": "NEW.devGUID",
"ObjectPrimaryID": "NEW.devMac",
"ObjectSecondaryID": "NEW.devLastIP",
"ObjectForeignKey": "NEW.devGUID",
"ObjectStatus": "CASE WHEN NEW.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END",
"ObjectStatusColumn": "'devPresentLastScan'",
"ObjectIsNew": "NEW.devIsNew",
"ObjectIsArchived": "NEW.devIsArchived",
"ObjectPlugin": "'DEVICES'"
}
}
# ,
# "Plugins_Objects": {
# "fields": {
# "ObjectGUID": "NEW.ObjectGUID",
# "ObjectPrimaryID": "NEW.Plugin",
# "ObjectSecondaryID": "NEW.Object_PrimaryID",
# "ObjectForeignKey": "NEW.ForeignKey",
# "ObjectStatus": "NEW.Status",
# "ObjectStatusColumn": "'Status'",
# "ObjectIsNew": "CASE WHEN NEW.Status = 'new' THEN 1 ELSE 0 END",
# "ObjectIsArchived": "0", # Default value
# "ObjectPlugin": "NEW.Plugin"
# }
# }
}
# Re-Create triggers dynamically
for table, config in self.object_mapping.items():
self.create_trigger(table, "insert", config)
self.create_trigger(table, "update", config)
self.create_trigger(table, "delete", config)
self.save()
def drop_all_triggers(self):
"""Drops all relevant triggers to ensure a clean start."""
self.db.sql.execute("""
SELECT 'DROP TRIGGER IF EXISTS ' || name || ';'
FROM sqlite_master
WHERE type = 'trigger';
""")
# Fetch all drop statements
drop_statements = self.db.sql.fetchall()
# Execute each drop statement
for statement in drop_statements:
self.db.sql.execute(statement[0])
self.save()
def create_app_events_table(self):
"""Creates the AppEvents table if it doesn't exist."""
self.db.sql.execute("""
CREATE TABLE IF NOT EXISTS "AppEvents" (
"Index" INTEGER PRIMARY KEY AUTOINCREMENT,
"GUID" TEXT UNIQUE,
"AppEventProcessed" BOOLEAN,
"DateTimeCreated" TEXT,
"ObjectType" TEXT,
"ObjectGUID" TEXT,
"ObjectPlugin" TEXT,
"ObjectPrimaryID" TEXT,
"ObjectSecondaryID" TEXT,
"ObjectForeignKey" TEXT,
"ObjectIndex" TEXT,
"ObjectIsNew" BOOLEAN,
"ObjectIsArchived" BOOLEAN,
"ObjectStatusColumn" TEXT,
"ObjectStatus" TEXT,
"AppEventType" TEXT,
"Helper1" TEXT,
"Helper2" TEXT,
"Helper3" TEXT,
"Extra" TEXT
);
""")
def create_trigger(self, table_name, event, config):
"""Generic function to create triggers dynamically."""
trigger_name = f"trg_{event}_{table_name.lower()}"
query = f"""
CREATE TRIGGER IF NOT EXISTS "{trigger_name}"
AFTER {event.upper()} ON "{table_name}"
WHEN NOT EXISTS (
SELECT 1 FROM AppEvents
WHERE AppEventProcessed = 0
AND ObjectType = '{table_name}'
AND ObjectGUID = {manage_prefix(config['fields']['ObjectGUID'], event)}
AND ObjectStatus = {manage_prefix(config['fields']['ObjectStatus'], event)}
AND AppEventType = '{event.lower()}'
)
BEGIN
INSERT INTO "AppEvents" (
"GUID",
"DateTimeCreated",
"AppEventProcessed",
"ObjectType",
"ObjectGUID",
"ObjectPrimaryID",
"ObjectSecondaryID",
"ObjectStatus",
"ObjectStatusColumn",
"ObjectIsNew",
"ObjectIsArchived",
"ObjectForeignKey",
"ObjectPlugin",
"AppEventType"
)
VALUES (
{sql_generateGuid},
DATETIME('now'),
FALSE,
'{table_name}',
{manage_prefix(config['fields']['ObjectGUID'], event)}, -- ObjectGUID
{manage_prefix(config['fields']['ObjectPrimaryID'], event)}, -- ObjectPrimaryID
{manage_prefix(config['fields']['ObjectSecondaryID'], event)}, -- ObjectSecondaryID
{manage_prefix(config['fields']['ObjectStatus'], event)}, -- ObjectStatus
{manage_prefix(config['fields']['ObjectStatusColumn'], event)}, -- ObjectStatusColumn
{manage_prefix(config['fields']['ObjectIsNew'], event)}, -- ObjectIsNew
{manage_prefix(config['fields']['ObjectIsArchived'], event)}, -- ObjectIsArchived
{manage_prefix(config['fields']['ObjectForeignKey'], event)}, -- ObjectForeignKey
{manage_prefix(config['fields']['ObjectPlugin'], event)}, -- ObjectForeignKey
'{event.lower()}'
);
END;
"""
mylog("verbose", [query])
self.db.sql.execute(query)
def save(self):
# Commit changes
self.db.commitDB()
# Manage prefixes of column names
def manage_prefix(field, event):
if event == "delete":
return field.replace("NEW.", "OLD.")
return field

83
server/workflows/conditions.py Executable file
View File

@@ -0,0 +1,83 @@
import re
import sys
import json
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/server"])
import conf
from logger import mylog, Logger
from helper import get_setting_value, timeNowTZ
# Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL'))
class Condition:
"""Evaluates a single condition."""
def __init__(self, condition_json):
self.field = condition_json["field"]
self.operator = condition_json["operator"]
self.value = condition_json["value"]
self.negate = condition_json.get("negate", False)
def evaluate(self, trigger):
# try finding the value of the field on the event triggering this workflow or thre object triggering the app event
appEvent_value = trigger.event[self.field] if self.field in trigger.event.keys() else None
eveObj_value = trigger.object[self.field] if self.field in trigger.object.keys() else None
# proceed only if value found
if appEvent_value is None and eveObj_value is None:
return False
elif appEvent_value is not None:
obj_value = appEvent_value
elif eveObj_value is not None:
obj_value = eveObj_value
# process based on operators
if self.operator == "equals":
result = str(obj_value) == str(self.value)
elif self.operator == "contains":
result = str(self.value) in str(obj_value)
elif self.operator == "regex":
result = bool(re.match(self.value, str(obj_value)))
else:
m = f"[WF] Unsupported operator: {self.operator}"
mylog('none', [m])
raise ValueError(m)
return not result if self.negate else result
class ConditionGroup:
"""Handles condition groups with AND, OR logic, supporting nested groups."""
def __init__(self, group_json):
mylog('none', ["[WF] json.dumps(group_json)"])
mylog('none', [json.dumps(group_json)])
mylog('none', [group_json])
self.logic = group_json.get("logic", "AND").upper()
self.conditions = []
for condition in group_json["conditions"]:
if "field" in condition: # Simple condition
self.conditions.append(Condition(condition))
else: # Nested condition group
self.conditions.append(ConditionGroup(condition))
def evaluate(self, event):
results = [condition.evaluate(event) for condition in self.conditions]
if self.logic == "AND":
return all(results)
elif self.logic == "OR":
return any(results)
else:
m = f"[WF] Unsupported logic: {self.logic}"
mylog('none', [m])
raise ValueError(m)

160
server/workflows/manager.py Executable file
View File

@@ -0,0 +1,160 @@
import sys
import json
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/server"])
import conf
from const import fullConfFolder
import workflows.actions
from logger import mylog, Logger
from helper import get_setting_value, timeNowTZ
# Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL'))
from workflows.triggers import Trigger
from workflows.conditions import ConditionGroup
from workflows.actions import *
class WorkflowManager:
def __init__(self, db):
self.db = db
self.workflows = self.load_workflows()
self.update_api = False
def load_workflows(self):
"""Load workflows from workflows.json."""
try:
workflows_json_path = fullConfFolder + '/workflows.json'
with open(workflows_json_path, 'r') as f:
workflows = json.load(f)
return workflows
except (FileNotFoundError, json.JSONDecodeError):
mylog('none', ['[WF] Failed to load workflows.json'])
return []
def get_new_app_events(self):
"""Get new unprocessed events from the AppEvents table."""
result = self.db.sql.execute("""
SELECT * FROM AppEvents
WHERE AppEventProcessed = 0
ORDER BY DateTimeCreated ASC
""").fetchall()
mylog('none', [f'[WF] get_new_app_events - new events count: {len(result)}'])
return result
def process_event(self, event):
"""Process the events. Check if events match a workflow trigger"""
mylog('verbose', [f"[WF] Processing event with GUID {event["GUID"]}"])
# Check if the trigger conditions match
for workflow in self.workflows:
# construct trigger object which also evaluates if the current event triggers it
trigger = Trigger(workflow["trigger"], event, self.db)
if trigger.triggered:
mylog('verbose', [f"[WF] Event with GUID '{event["GUID"]}' triggered the workflow '{workflow["name"]}'"])
self.execute_workflow(workflow, trigger)
# After processing the event, mark the event as processed (set AppEventProcessed to 1)
self.db.sql.execute("""
UPDATE AppEvents
SET AppEventProcessed = 1
WHERE "Index" = ?
""", (event['Index'],)) # Pass the event's unique identifier
self.db.commitDB()
def execute_workflow(self, workflow, trigger):
"""Execute the actions in the given workflow if conditions are met."""
# Ensure conditions exist
if not isinstance(workflow.get("conditions"), list):
m = f"[WF] workflow['conditions'] must be a list"
mylog('none', [m])
raise ValueError(m)
# Evaluate each condition group separately
for condition_group in workflow["conditions"]:
evaluator = ConditionGroup(condition_group)
if evaluator.evaluate(trigger): # If any group evaluates to True
mylog('none', [f"[WF] Workflow {workflow["name"]} will be executed - conditions were evaluated as TRUE"])
mylog('debug', [f"[WF] Workflow condition_group: {condition_group}"])
self.execute_actions(workflow["actions"], trigger)
return # Stop if a condition group succeeds
mylog('none', ["[WF] No condition group matched. Actions not executed."])
def execute_actions(self, actions, trigger):
"""Execute the actions defined in a workflow."""
for action in actions:
if action["type"] == "update_field":
field = action["field"]
value = action["value"]
action_instance = UpdateFieldAction(self.db, field, value, trigger)
# indicate if the api has to be updated
self.update_api = True
elif action["type"] == "run_plugin":
plugin_name = action["plugin"]
params = action["params"]
action_instance = RunPluginAction(self.db, plugin_name, params, trigger)
elif action["type"] == "delete_device":
action_instance = DeleteObjectAction(self.db, trigger)
# elif action["type"] == "send_notification":
# method = action["method"]
# message = action["message"]
# action_instance = SendNotificationAction(method, message, trigger)
else:
m = f"[WF] Unsupported action type: {action['type']}"
mylog('none', [m])
raise ValueError(m)
action_instance.execute() # Execute the action
# if result:
# # Iterate through actions and execute them
# for action in workflow["actions"]:
# if action["type"] == "update_field":
# # Action type is "update_field", so map to UpdateFieldAction
# field = action["field"]
# value = action["value"]
# action_instance = UpdateFieldAction(field, value)
# action_instance.execute(trigger.event)
# elif action["type"] == "run_plugin":
# # Action type is "run_plugin", so map to RunPluginAction
# plugin_name = action["plugin"]
# params = action["params"]
# action_instance = RunPluginAction(plugin_name, params)
# action_instance.execute(trigger.event)
# elif action["type"] == "send_notification":
# # Action type is "send_notification", so map to SendNotificationAction
# method = action["method"]
# message = action["message"]
# action_instance = SendNotificationAction(method, message)
# action_instance.execute(trigger.event)
# else:
# # Handle unsupported action types
# raise ValueError(f"Unsupported action type: {action['type']}")

62
server/workflows/triggers.py Executable file
View File

@@ -0,0 +1,62 @@
import sys
# Register NetAlertX directories
INSTALL_PATH="/app"
sys.path.extend([f"{INSTALL_PATH}/server"])
import conf
from logger import mylog, Logger
from helper import get_setting_value, timeNowTZ
# Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL'))
class Trigger:
"""Represents a trigger definition"""
def __init__(self, triggerJson, event, db):
"""
:param name: Friendly name of the trigger
:param triggerJson: JSON trigger object {"object_type":"Devices",event_type":"update"}
:param event: The actual event that the trigger is evaluated against
:param db: DB connection in case trigger matches and object needs to be retrieved
"""
self.object_type = triggerJson["object_type"]
self.event_type = triggerJson["event_type"]
self.event = event # Store the triggered event context, if provided
self.triggered = self.object_type == event["ObjectType"] and self.event_type == event["AppEventType"]
mylog('verbose', [f"[WF] self.triggered '{self.triggered}'"])
if self.triggered:
# object type corresponds with the DB table name
db_table = self.object_type
if db_table == "Devices":
refField = "devGUID"
elif db_table == "Plugins_Objects":
refField = "ObjectGUID"
else:
m = f"[WF] Unsupported object_type: {self.object_type}"
mylog('none', [m])
raise ValueError(m)
query = f"""
SELECT * FROM
{db_table}
WHERE {refField} = '{event["ObjectGUID"]}'
"""
mylog('debug', [query])
result = db.sql.execute(query).fetchall()
self.object = result[0]
else:
self.object = None
def set_event(self, event):
"""Set or update the event context for this trigger"""
self.event = event

55
test/workflows.json Executable file
View File

@@ -0,0 +1,55 @@
[
{
"name": "Sample Device Update Workflow",
"trigger": {
"object_type": "Devices",
"event_type": "update"
},
"conditions": [
{
"logic": "AND",
"conditions": [
{
"field": "devVendor",
"operator": "contains",
"value": "Google"
},
{
"field": "devIsNew",
"operator": "equals",
"value": "1"
},
{
"logic": "OR",
"conditions": [
{
"field": "devIsNew",
"operator": "equals",
"value": "1"
},
{
"field": "devName",
"operator": "contains",
"value": "Google"
}
]
}
]
}
],
"actions": [
{
"type": "update_field",
"field": "devIsNew",
"value": "0"
},
{
"type": "run_plugin",
"plugin": "SMTP",
"params": {
"message": "New device from Google detected."
}
}
]
}
]