Compare commits

...

20 Commits

Author SHA1 Message Date
jokob-sk
2f70e2e8d8 Merge branch 'main' of https://github.com/jokob-sk/NetAlertX
Some checks are pending
docker / docker_dev (push) Waiting to run
2024-09-19 15:45:16 +10:00
jokob-sk
4a20b66c92 Clear NEW flag setting 2024-09-19 15:43:16 +10:00
github-actions[bot]
36cec0ab38 [🤖Automation] Update README with sponsors information
Some checks are pending
docker / docker_dev (push) Waiting to run
2024-09-18 11:53:41 +00:00
jokob-sk
6bde0f9084 🔑 Set PWD not working #793
Some checks are pending
docker / docker_dev (push) Waiting to run
2024-09-18 12:56:09 +10:00
jokob-sk
f64ef5b881 🔑 Set PWD not working #793
Some checks are pending
docker / docker_dev (push) Waiting to run
2024-09-18 09:57:46 +10:00
jokob-sk
1895f68233 🔄 Sync hub 2.0.1 - init Node field #788 2024-09-18 09:27:32 +10:00
jokob-sk
d2fe53bc81 🔃 Sync hub 2.0.1 #788 2024-09-18 08:48:08 +10:00
github-actions[bot]
e9e45c34ae [🤖Automation] Update README with sponsors information
Some checks are pending
docker / docker_dev (push) Waiting to run
2024-09-17 11:53:54 +00:00
jokob-sk
064a51acee 🩹 Handle vendor NoneType #791
Some checks are pending
docker / docker_dev (push) Waiting to run
2024-09-17 08:05:17 +10:00
github-actions[bot]
7340ce6da2 [🤖Automation] Update README with sponsors information
Some checks are pending
docker / docker_dev (push) Waiting to run
2024-09-16 11:53:54 +00:00
github-actions[bot]
703885308a [🤖Automation] Update README with sponsors information
Some checks are pending
docker / docker_dev (push) Waiting to run
2024-09-15 11:53:52 +00:00
jokob-sk
71856b49a4 🩹 Handle vendor NoneType #791
Some checks are pending
docker / docker_dev (push) Waiting to run
2024-09-15 08:32:35 +10:00
github-actions[bot]
86c7d26107 [🤖Automation] Update README with sponsors information
Some checks are pending
docker / docker_dev (push) Waiting to run
2024-09-14 11:53:34 +00:00
jokob-sk
d858f4f9d0 🔄Sync hub 2.0
Some checks are pending
docker / docker_dev (push) Waiting to run
2024-09-14 10:37:30 +10:00
jokob-sk
aefe470d31 🔄Sync hub 2.0 2024-09-14 09:37:27 +10:00
github-actions[bot]
99fb60c1b5 [🤖Automation] Update README with sponsors information
Some checks are pending
docker / docker_dev (push) Waiting to run
2024-09-13 11:53:38 +00:00
Hosted Weblate
ec37e4d71b Merge branch 'origin/main' into Weblate. 2024-09-13 11:09:18 +00:00
gallegonovato
e240821d6c Translated using Weblate (Spanish)
Currently translated at 100.0% (694 of 694 strings)

Translation: NetAlertX/core
Translate-URL: https://hosted.weblate.org/projects/pialert/core/es/
2024-09-13 11:09:15 +00:00
github-actions[bot]
632e441dda [🤖Automation] Update README with sponsors information
Some checks are pending
docker / docker_dev (push) Waiting to run
2024-09-12 11:53:59 +00:00
jokob-sk
24f7935891 📚Docs
Some checks are pending
docker / docker_dev (push) Waiting to run
2024-09-12 07:55:20 +10:00
26 changed files with 669 additions and 160 deletions

View File

@@ -41,7 +41,8 @@ docker run -d --rm --network=host \
| `PORT` |Port of the web interface | `20211` | | `PORT` |Port of the web interface | `20211` |
| `LISTEN_ADDR` |Set the specific IP Address for the listener address for the nginx webserver (web interface). This could be useful when using multiple subnets to hide the web interface from all untrusted networks. | `0.0.0.0` | | `LISTEN_ADDR` |Set the specific IP Address for the listener address for the nginx webserver (web interface). This could be useful when using multiple subnets to hide the web interface from all untrusted networks. | `0.0.0.0` |
|`TZ` |Time zone to display stats correctly. Find your time zone [here](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) | `Europe/Berlin` | |`TZ` |Time zone to display stats correctly. Find your time zone [here](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) | `Europe/Berlin` |
|`ALWAYS_FRESH_INSTALL` | Setting to `true` will delete the content of the `/db` & `/config` folders. For testing purposes. Can be coupled with [watchtower](https://github.com/containrrr/watchtower) to have an always freshly installed `netalertx`/`-dev` image. | `N/A` | |`APP_CONF_OVERRIDE` | JSON override for settings, e.g. `{"SCAN_SUBNETS":"['192.168.1.0/24 --interface=eth1']","UI_dark_mode":"True"}` (Experimental 🧪) | `N/A` |
|`ALWAYS_FRESH_INSTALL` | If `true` will delete the content of the `/db` & `/config` folders. For testing purposes. Can be coupled with [watchtower](https://github.com/containrrr/watchtower) to have an always freshly installed `netalertx`/`netalertx-dev` image. | `N/A` |
### Docker paths ### Docker paths

275
docs/AUTHELIA.md Executable file
View File

@@ -0,0 +1,275 @@
(DRAFT) Authelia support
```yaml
theme: dark
default_2fa_method: "totp"
server:
address: 0.0.0.0:9091
endpoints:
enable_expvars: false
enable_pprof: false
authz:
forward-auth:
implementation: 'ForwardAuth'
authn_strategies:
- name: 'HeaderAuthorization'
schemes:
- 'Basic'
- name: 'CookieSession'
ext-authz:
implementation: 'ExtAuthz'
authn_strategies:
- name: 'HeaderAuthorization'
schemes:
- 'Basic'
- name: 'CookieSession'
auth-request:
implementation: 'AuthRequest'
authn_strategies:
- name: 'HeaderAuthRequestProxyAuthorization'
schemes:
- 'Basic'
- name: 'CookieSession'
legacy:
implementation: 'Legacy'
authn_strategies:
- name: 'HeaderLegacy'
- name: 'CookieSession'
disable_healthcheck: false
tls:
key: ""
certificate: ""
client_certificates: []
headers:
csp_template: ""
log:
## Level of verbosity for logs: info, debug, trace.
level: info
###############################################################
# The most important section
###############################################################
access_control:
## Default policy can either be 'bypass', 'one_factor', 'two_factor' or 'deny'.
default_policy: deny
networks:
- name: internal
networks:
- '192.168.0.0/18'
- '10.10.10.0/8' # Zerotier
- name: private
networks:
- '172.16.0.0/12'
rules:
- networks:
- private
domain:
- '*'
policy: bypass
- networks:
- internal
domain:
- '*'
policy: bypass
- domain:
# exclude itself from auth, should not happen as we use Traefik middleware on a case-by-case screnario
- 'auth.MYDOMAIN1.TLD'
- 'authelia.MYDOMAIN1.TLD'
- 'auth.MYDOMAIN2.TLD'
- 'authelia.MYDOMAIN2.TLD'
policy: bypass
- domain:
#All subdomains match
- 'MYDOMAIN1.TLD'
- '*.MYDOMAIN1.TLD'
policy: two_factor
- domain:
# This will not work yet as Authelio does not support multi-domain authentication
- 'MYDOMAIN2.TLD'
- '*.MYDOMAIN2.TLD'
policy: two_factor
############################################################
identity_validation:
reset_password:
jwt_secret: "[REDACTED]"
identity_providers:
oidc:
enable_client_debug_messages: true
enforce_pkce: public_clients_only
hmac_secret: [REDACTED]
lifespans:
authorize_code: 1m
id_token: 1h
refresh_token: 90m
access_token: 1h
cors:
endpoints:
- authorization
- token
- revocation
- introspection
- userinfo
allowed_origins:
- "*"
allowed_origins_from_client_redirect_uris: false
jwks:
- key: [REDACTED]
certificate_chain:
clients:
- client_id: portainer
client_name: Portainer
# generate secret with "authelia crypto hash generate pbkdf2 --random --random.length 32 --random.charset alphanumeric"
# Random Password: [REDACTED]
# Digest: [REDACTED]
client_secret: [REDACTED]
token_endpoint_auth_method: 'client_secret_post'
public: false
authorization_policy: two_factor
consent_mode: pre-configured #explicit
pre_configured_consent_duration: '6M' #Must be re-authorised every 6 Months
scopes:
- openid
#- groups #Currently not supported in Authelia V
- email
- profile
redirect_uris:
- https://portainer.MYDOMAIN1.LTD
userinfo_signed_response_alg: none
- client_id: openproject
client_name: OpenProject
# generate secret with "authelia crypto hash generate pbkdf2 --random --random.length 32 --random.charset alphanumeric"
# Random Password: [REDACTED]
# Digest: [REDACTED]
client_secret: [REDACTED]
token_endpoint_auth_method: 'client_secret_basic'
public: false
authorization_policy: two_factor
consent_mode: pre-configured #explicit
pre_configured_consent_duration: '6M' #Must be re-authorised every 6 Months
scopes:
- openid
#- groups #Currently not supported in Authelia V
- email
- profile
redirect_uris:
- https://op.MYDOMAIN.TLD
#grant_types:
# - refresh_token
# - authorization_code
#response_types:
# - code
#response_modes:
# - form_post
# - query
# - fragment
userinfo_signed_response_alg: none
##################################################################
telemetry:
metrics:
enabled: false
address: tcp://0.0.0.0:9959
totp:
disable: false
issuer: authelia.com
algorithm: sha1
digits: 6
period: 30 ## The period in seconds a one-time password is valid for.
skew: 1
secret_size: 32
webauthn:
disable: false
timeout: 60s ## Adjust the interaction timeout for Webauthn dialogues.
display_name: Authelia
attestation_conveyance_preference: indirect
user_verification: preferred
ntp:
address: "pool.ntp.org"
version: 4
max_desync: 5s
disable_startup_check: false
disable_failure: false
authentication_backend:
password_reset:
disable: false
custom_url: ""
refresh_interval: 5m
file:
path: /config/users_database.yml
watch: true
password:
algorithm: argon2
argon2:
variant: argon2id
iterations: 3
memory: 65536
parallelism: 4
key_length: 32
salt_length: 16
password_policy:
standard:
enabled: false
min_length: 8
max_length: 0
require_uppercase: true
require_lowercase: true
require_number: true
require_special: true
## zxcvbn is a well known and used password strength algorithm. It does not have tunable settings.
zxcvbn:
enabled: false
min_score: 3
regulation:
max_retries: 3
find_time: 2m
ban_time: 5m
session:
name: authelia_session
secret: [REDACTED]
expiration: 60m
inactivity: 15m
cookies:
- domain: 'MYDOMAIN1.LTD'
authelia_url: 'https://auth.MYDOMAIN1.LTD'
name: 'authelia_session'
default_redirection_url: 'https://MYDOMAIN1.LTD'
- domain: 'MYDOMAIN2.LTD'
authelia_url: 'https://auth.MYDOMAIN2.LTD'
name: 'authelia_session_other'
default_redirection_url: 'https://MYDOMAIN2.LTD'
storage:
encryption_key: [REDACTED]
local:
path: /config/db.sqlite3
notifier:
disable_startup_check: true
smtp:
address: MYOTHERDOMAIN.LTD:465
timeout: 5s
username: "USER@DOMAIN"
password: "[REDACTED]"
sender: "Authelia <postmaster@MYOTHERDOMAIN.LTD>"
identifier: NAME@MYOTHERDOMAIN.LTD
subject: "[Authelia] {title}"
startup_check_address: postmaster@MYOTHERDOMAIN.LTD
```

View File

@@ -63,6 +63,7 @@ There is also an in-app Help / FAQ section that should be answering frequently a
- [Version history (legacy)](/docs/VERSIONS_HISTORY.md) - [Version history (legacy)](/docs/VERSIONS_HISTORY.md)
- [Reverse proxy (Nginx, Apache, SWAG)](/docs/REVERSE_PROXY.md) - [Reverse proxy (Nginx, Apache, SWAG)](/docs/REVERSE_PROXY.md)
- [Setting up Authelia](/docs/AUTHELIA.md) (DRAFT)
#### 👩💻For Developers👨💻 #### 👩💻For Developers👨💻

View File

@@ -630,17 +630,11 @@ function debugTimer () {
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
function secondsSincePageLoad() { function secondsSincePageLoad() {
// Get the current time // Get the current time since the page was loaded
var currentTime = Date.now(); var timeSincePageLoad = performance.now();
// Get the time when the page was loaded
var pageLoadTime = performance.timeOrigin;
// Calculate the difference in milliseconds
var timeDifference = currentTime - pageLoadTime;
// Convert milliseconds to seconds // Convert milliseconds to seconds
var secondsAgo = Math.floor(timeDifference / 1000); var secondsAgo = Math.floor(timeSincePageLoad / 1000);
return secondsAgo; return secondsAgo;
} }

View File

@@ -295,6 +295,7 @@ function checkNotification() {
console.log(response); console.log(response);
// After marking the notification as read, check for the next one // After marking the notification as read, check for the next one
checkNotification(); checkNotification();
hideSpinner();
}, },
error: function(xhr, status, error) { error: function(xhr, status, error) {
console.error("Error marking notification as read:", status, error); console.error("Error marking notification as read:", status, error);

View File

@@ -56,6 +56,8 @@
"BackDevices_Restore_okay": "", "BackDevices_Restore_okay": "",
"BackDevices_darkmode_disabled": "", "BackDevices_darkmode_disabled": "",
"BackDevices_darkmode_enabled": "", "BackDevices_darkmode_enabled": "",
"CLEAR_NEW_FLAG_description": "",
"CLEAR_NEW_FLAG_name": "",
"DAYS_TO_KEEP_EVENTS_description": "", "DAYS_TO_KEEP_EVENTS_description": "",
"DAYS_TO_KEEP_EVENTS_name": "", "DAYS_TO_KEEP_EVENTS_name": "",
"DevDetail_Copy_Device_Title": "", "DevDetail_Copy_Device_Title": "",

View File

@@ -68,6 +68,8 @@
"BackDevices_Restore_okay": "Die Wiederherstellung wurde erfolgreich ausgeführt.", "BackDevices_Restore_okay": "Die Wiederherstellung wurde erfolgreich ausgeführt.",
"BackDevices_darkmode_disabled": "Heller Modus aktiviert.", "BackDevices_darkmode_disabled": "Heller Modus aktiviert.",
"BackDevices_darkmode_enabled": "Dunkler Modus aktiviert.", "BackDevices_darkmode_enabled": "Dunkler Modus aktiviert.",
"CLEAR_NEW_FLAG_description": "",
"CLEAR_NEW_FLAG_name": "",
"DAYS_TO_KEEP_EVENTS_description": "Dies ist eine Wartungseinstellung. Spezifiziert wie viele Tage Events gespeichert bleiben. Alle älteren Events werden periodisch gelöscht. Wird auch auf die Plugins History angewendet.", "DAYS_TO_KEEP_EVENTS_description": "Dies ist eine Wartungseinstellung. Spezifiziert wie viele Tage Events gespeichert bleiben. Alle älteren Events werden periodisch gelöscht. Wird auch auf die Plugins History angewendet.",
"DAYS_TO_KEEP_EVENTS_name": "Lösche Events älter als", "DAYS_TO_KEEP_EVENTS_name": "Lösche Events älter als",
"DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Details von Gerät kopieren", "DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Details von Gerät kopieren",

View File

@@ -56,6 +56,8 @@
"BackDevices_Restore_okay": "Restore executed successfully.", "BackDevices_Restore_okay": "Restore executed successfully.",
"BackDevices_darkmode_disabled": "Darkmode Disabled", "BackDevices_darkmode_disabled": "Darkmode Disabled",
"BackDevices_darkmode_enabled": "Darkmode Enabled", "BackDevices_darkmode_enabled": "Darkmode Enabled",
"CLEAR_NEW_FLAG_description": "If enabled (<code>0</code> is disabled), devices flagged as <b>New Device</b> will be unflagged if the time limit (specified in hours) exceeds their <b>First Session</b> time.",
"CLEAR_NEW_FLAG_name": "Clear new flag",
"DAYS_TO_KEEP_EVENTS_description": "This is a maintenance setting. This specifies the number of days worth of event entries that will be kept. All older events will be deleted periodically. Also applies on Plugin Events History.", "DAYS_TO_KEEP_EVENTS_description": "This is a maintenance setting. This specifies the number of days worth of event entries that will be kept. All older events will be deleted periodically. Also applies on Plugin Events History.",
"DAYS_TO_KEEP_EVENTS_name": "Delete events older than", "DAYS_TO_KEEP_EVENTS_name": "Delete events older than",
"DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Copy details from device", "DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Copy details from device",
@@ -303,8 +305,8 @@
"Gen_Work_In_Progress": "Work in progress, good time to feedback on https://github.com/jokob-sk/NetAlertX/issues", "Gen_Work_In_Progress": "Work in progress, good time to feedback on https://github.com/jokob-sk/NetAlertX/issues",
"General_display_name": "General", "General_display_name": "General",
"General_icon": "<i class=\"fa fa-gears\"></i>", "General_icon": "<i class=\"fa fa-gears\"></i>",
"HRS_TO_KEEP_NEWDEV_description": "This is a maintenance setting. If enabled (<code>0</code> is disabled), devices marked as <b>New Device</b> will be deleted if their <b>First Session</b> time was older than the specified hours in this setting. Use this setting if you want to auto-delete <b>New Devices</b> after <code>X</code> hours.", "HRS_TO_KEEP_NEWDEV_description": "This is a maintenance setting <b>DELETING devices</b>. If enabled (<code>0</code> is disabled), devices marked as <b>New Device</b> will be deleted if their <b>First Session</b> time was older than the specified hours in this setting. Use this setting if you want to auto-delete <b>New Devices</b> after <code>X</code> hours.",
"HRS_TO_KEEP_NEWDEV_name": "Keep new devices for", "HRS_TO_KEEP_NEWDEV_name": "Delete new devices after",
"HelpFAQ_Cat_Detail": "Details", "HelpFAQ_Cat_Detail": "Details",
"HelpFAQ_Cat_Detail_300_head": "What means ", "HelpFAQ_Cat_Detail_300_head": "What means ",
"HelpFAQ_Cat_Detail_300_text_a": "means a network device (a device of the type AP, Gateway, Firewall, Hypervisor, Powerline, Switch, WLAN, PLC, Router,USB LAN Adapter, USB WIFI Adapter, or Internet). Custom types can be added via the <code>NETWORK_DEVICE_TYPES</code> setting.", "HelpFAQ_Cat_Detail_300_text_a": "means a network device (a device of the type AP, Gateway, Firewall, Hypervisor, Powerline, Switch, WLAN, PLC, Router,USB LAN Adapter, USB WIFI Adapter, or Internet). Custom types can be added via the <code>NETWORK_DEVICE_TYPES</code> setting.",

View File

@@ -66,6 +66,8 @@
"BackDevices_Restore_okay": "Restauración ejecutado con éxito.", "BackDevices_Restore_okay": "Restauración ejecutado con éxito.",
"BackDevices_darkmode_disabled": "Darkmode Desactivado", "BackDevices_darkmode_disabled": "Darkmode Desactivado",
"BackDevices_darkmode_enabled": "Darkmode Activado", "BackDevices_darkmode_enabled": "Darkmode Activado",
"CLEAR_NEW_FLAG_description": "",
"CLEAR_NEW_FLAG_name": "",
"DAYS_TO_KEEP_EVENTS_description": "Esta es una configuración de mantenimiento. Esto especifica el número de días de entradas de eventos que se guardarán. Todos los eventos anteriores se eliminarán periódicamente.", "DAYS_TO_KEEP_EVENTS_description": "Esta es una configuración de mantenimiento. Esto especifica el número de días de entradas de eventos que se guardarán. Todos los eventos anteriores se eliminarán periódicamente.",
"DAYS_TO_KEEP_EVENTS_name": "Eliminar eventos anteriores a", "DAYS_TO_KEEP_EVENTS_name": "Eliminar eventos anteriores a",
"DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Copiar detalles del dispositivo", "DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Copiar detalles del dispositivo",
@@ -284,7 +286,7 @@
"Gen_AreYouSure": "¿Estás seguro?", "Gen_AreYouSure": "¿Estás seguro?",
"Gen_Backup": "Ejecutar copia de seguridad", "Gen_Backup": "Ejecutar copia de seguridad",
"Gen_Cancel": "Cancelar", "Gen_Cancel": "Cancelar",
"Gen_Change": "", "Gen_Change": "Cambiar",
"Gen_Copy": "Ejecutar", "Gen_Copy": "Ejecutar",
"Gen_DataUpdatedUITakesTime": "Correcto - La interfaz puede tardar en actualizarse si se está ejecutando un escaneo.", "Gen_DataUpdatedUITakesTime": "Correcto - La interfaz puede tardar en actualizarse si se está ejecutando un escaneo.",
"Gen_Delete": "Eliminar", "Gen_Delete": "Eliminar",
@@ -724,8 +726,8 @@
"UI_PRESENCE_name": "Mostrar en el gráfico de presencia", "UI_PRESENCE_name": "Mostrar en el gráfico de presencia",
"UI_REFRESH_description": "Ingrese el número de segundos después de los cuales se recarga la interfaz de usuario. Ajustado a <code> 0 </code> para desactivar.", "UI_REFRESH_description": "Ingrese el número de segundos después de los cuales se recarga la interfaz de usuario. Ajustado a <code> 0 </code> para desactivar.",
"UI_REFRESH_name": "Actualización automática de la interfaz de usuario", "UI_REFRESH_name": "Actualización automática de la interfaz de usuario",
"VERSION_description": "", "VERSION_description": "Valor de ayuda de versión o marca de tiempo para comprobar si la aplicación se ha actualizado.",
"VERSION_name": "", "VERSION_name": "Versión o marca de tiempo",
"WEBHOOK_PAYLOAD_description": "El formato de datos de carga de Webhook para el atributo <code>body</code> > <code>attachments</code> > <code>text</code> en el json de carga. Vea un ejemplo de la carga <a target=\"_blank\" href=\"https://github.com/jokob-sk/NetAlertX/blob/main/front/report_templates/webhook_json_sample.json\">aquí</a>. (por ejemplo: para discord use <code>text</code>)", "WEBHOOK_PAYLOAD_description": "El formato de datos de carga de Webhook para el atributo <code>body</code> > <code>attachments</code> > <code>text</code> en el json de carga. Vea un ejemplo de la carga <a target=\"_blank\" href=\"https://github.com/jokob-sk/NetAlertX/blob/main/front/report_templates/webhook_json_sample.json\">aquí</a>. (por ejemplo: para discord use <code>text</code>)",
"WEBHOOK_PAYLOAD_name": "Tipo de carga", "WEBHOOK_PAYLOAD_name": "Tipo de carga",
"WEBHOOK_REQUEST_METHOD_description": "El método de solicitud HTTP que se utilizará para la llamada de webhook.", "WEBHOOK_REQUEST_METHOD_description": "El método de solicitud HTTP que se utilizará para la llamada de webhook.",

View File

@@ -56,6 +56,8 @@
"BackDevices_Restore_okay": "Restauration exécutée avec succès.", "BackDevices_Restore_okay": "Restauration exécutée avec succès.",
"BackDevices_darkmode_disabled": "Mode sombre désactivé", "BackDevices_darkmode_disabled": "Mode sombre désactivé",
"BackDevices_darkmode_enabled": "Mode sombre activé", "BackDevices_darkmode_enabled": "Mode sombre activé",
"CLEAR_NEW_FLAG_description": "",
"CLEAR_NEW_FLAG_name": "",
"DAYS_TO_KEEP_EVENTS_description": "Il s'agit d'un paramètre de maintenance. Il indique le nombre de jours pendant lesquels les entrées d'événements seront conservées. Tous les événements plus anciens seront supprimés périodiquement. S'applique également à l'historique des événements du plugin.", "DAYS_TO_KEEP_EVENTS_description": "Il s'agit d'un paramètre de maintenance. Il indique le nombre de jours pendant lesquels les entrées d'événements seront conservées. Tous les événements plus anciens seront supprimés périodiquement. S'applique également à l'historique des événements du plugin.",
"DAYS_TO_KEEP_EVENTS_name": "Supprimer les événements plus anciens que", "DAYS_TO_KEEP_EVENTS_name": "Supprimer les événements plus anciens que",
"DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Copier les détails de l'appareil", "DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Copier les détails de l'appareil",

View File

@@ -56,6 +56,8 @@
"BackDevices_Restore_okay": "Ripristino eseguito correttamente.", "BackDevices_Restore_okay": "Ripristino eseguito correttamente.",
"BackDevices_darkmode_disabled": "Modalità scura disabilitata", "BackDevices_darkmode_disabled": "Modalità scura disabilitata",
"BackDevices_darkmode_enabled": "Modalità scura abilitata", "BackDevices_darkmode_enabled": "Modalità scura abilitata",
"CLEAR_NEW_FLAG_description": "",
"CLEAR_NEW_FLAG_name": "",
"DAYS_TO_KEEP_EVENTS_description": "Questa è un'impostazione di manutenzione. Specifica il numero di giorni delle voci degli eventi che verranno conservati. Tutti gli eventi più vecchi verranno eliminati periodicamente. Si applica anche alla cronologia degli eventi del plugin (Plugin Events History).", "DAYS_TO_KEEP_EVENTS_description": "Questa è un'impostazione di manutenzione. Specifica il numero di giorni delle voci degli eventi che verranno conservati. Tutti gli eventi più vecchi verranno eliminati periodicamente. Si applica anche alla cronologia degli eventi del plugin (Plugin Events History).",
"DAYS_TO_KEEP_EVENTS_name": "Elimina eventi più vecchi di", "DAYS_TO_KEEP_EVENTS_name": "Elimina eventi più vecchi di",
"DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Copia dettagli dal dispositivo", "DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Copia dettagli dal dispositivo",

View File

@@ -56,6 +56,8 @@
"BackDevices_Restore_okay": "Gjenoppretting utført.", "BackDevices_Restore_okay": "Gjenoppretting utført.",
"BackDevices_darkmode_disabled": "Mørk modus Deaktivert", "BackDevices_darkmode_disabled": "Mørk modus Deaktivert",
"BackDevices_darkmode_enabled": "Mørk modus Aktivert", "BackDevices_darkmode_enabled": "Mørk modus Aktivert",
"CLEAR_NEW_FLAG_description": "",
"CLEAR_NEW_FLAG_name": "",
"DAYS_TO_KEEP_EVENTS_description": "Dette er en vedlikeholdsinnstilling. Dette spesifiserer antall dager verdt med hendelsesoppføringer som vil beholdes. Alle eldre hendelser vil bli slettet med jevne mellomrom. Gjelder også for plugin-hendelseshistorikk.", "DAYS_TO_KEEP_EVENTS_description": "Dette er en vedlikeholdsinnstilling. Dette spesifiserer antall dager verdt med hendelsesoppføringer som vil beholdes. Alle eldre hendelser vil bli slettet med jevne mellomrom. Gjelder også for plugin-hendelseshistorikk.",
"DAYS_TO_KEEP_EVENTS_name": "Slett hendelser eldre enn", "DAYS_TO_KEEP_EVENTS_name": "Slett hendelser eldre enn",
"DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Kopier detaljer fra enhet", "DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Kopier detaljer fra enhet",

View File

@@ -56,6 +56,8 @@
"BackDevices_Restore_okay": "Przywracanie wykonane z sukcesem.", "BackDevices_Restore_okay": "Przywracanie wykonane z sukcesem.",
"BackDevices_darkmode_disabled": "Tryb ciemny Wyłączony", "BackDevices_darkmode_disabled": "Tryb ciemny Wyłączony",
"BackDevices_darkmode_enabled": "Tryb ciemny Włączony", "BackDevices_darkmode_enabled": "Tryb ciemny Włączony",
"CLEAR_NEW_FLAG_description": "",
"CLEAR_NEW_FLAG_name": "",
"DAYS_TO_KEEP_EVENTS_description": "To jest ustawienie konserwacji. Określa ile dni mają być utrzymywane wpisy wydarzeń. Wszystkie starsze wpisy wydarzeń zostaną usunięte okresowo. Dotyczy także Historii Wydarzeń Pluginów.", "DAYS_TO_KEEP_EVENTS_description": "To jest ustawienie konserwacji. Określa ile dni mają być utrzymywane wpisy wydarzeń. Wszystkie starsze wpisy wydarzeń zostaną usunięte okresowo. Dotyczy także Historii Wydarzeń Pluginów.",
"DAYS_TO_KEEP_EVENTS_name": "Usuń wydarzenia starsze niż", "DAYS_TO_KEEP_EVENTS_name": "Usuń wydarzenia starsze niż",
"DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i>Kopiuj opis z urządzenia", "DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i>Kopiuj opis z urządzenia",

View File

@@ -56,6 +56,8 @@
"BackDevices_Restore_okay": "Restauração executada com sucesso.", "BackDevices_Restore_okay": "Restauração executada com sucesso.",
"BackDevices_darkmode_disabled": "Modo Noturno Desabilitado", "BackDevices_darkmode_disabled": "Modo Noturno Desabilitado",
"BackDevices_darkmode_enabled": "Modo Noturno Habilitado", "BackDevices_darkmode_enabled": "Modo Noturno Habilitado",
"CLEAR_NEW_FLAG_description": "",
"CLEAR_NEW_FLAG_name": "",
"DAYS_TO_KEEP_EVENTS_description": "Esta é uma definição de manutenção. Especifica o número de dias de entradas de eventos que serão mantidas. Todos os eventos mais antigos serão eliminados periodicamente. Também se aplica ao Histórico de eventos do plug-in.", "DAYS_TO_KEEP_EVENTS_description": "Esta é uma definição de manutenção. Especifica o número de dias de entradas de eventos que serão mantidas. Todos os eventos mais antigos serão eliminados periodicamente. Também se aplica ao Histórico de eventos do plug-in.",
"DAYS_TO_KEEP_EVENTS_name": "Excluir eventos mais antigos que", "DAYS_TO_KEEP_EVENTS_name": "Excluir eventos mais antigos que",
"DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Copiar detalhes do dispositivo", "DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Copiar detalhes do dispositivo",

View File

@@ -56,6 +56,8 @@
"BackDevices_Restore_okay": "Восстановление выполнено успешно.", "BackDevices_Restore_okay": "Восстановление выполнено успешно.",
"BackDevices_darkmode_disabled": "Темный режим отключен", "BackDevices_darkmode_disabled": "Темный режим отключен",
"BackDevices_darkmode_enabled": "Темный режим включен", "BackDevices_darkmode_enabled": "Темный режим включен",
"CLEAR_NEW_FLAG_description": "",
"CLEAR_NEW_FLAG_name": "",
"DAYS_TO_KEEP_EVENTS_description": "Это настройка обслуживания. Здесь указывается количество дней, в течение которых будут храниться записи о событиях. Все старые события будут периодически удаляться. Также применимо к истории событий плагина.", "DAYS_TO_KEEP_EVENTS_description": "Это настройка обслуживания. Здесь указывается количество дней, в течение которых будут храниться записи о событиях. Все старые события будут периодически удаляться. Также применимо к истории событий плагина.",
"DAYS_TO_KEEP_EVENTS_name": "Удалить события старше", "DAYS_TO_KEEP_EVENTS_name": "Удалить события старше",
"DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Скопировать данные с устройства", "DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> Скопировать данные с устройства",

View File

@@ -56,6 +56,8 @@
"BackDevices_Restore_okay": "", "BackDevices_Restore_okay": "",
"BackDevices_darkmode_disabled": "", "BackDevices_darkmode_disabled": "",
"BackDevices_darkmode_enabled": "", "BackDevices_darkmode_enabled": "",
"CLEAR_NEW_FLAG_description": "",
"CLEAR_NEW_FLAG_name": "",
"DAYS_TO_KEEP_EVENTS_description": "", "DAYS_TO_KEEP_EVENTS_description": "",
"DAYS_TO_KEEP_EVENTS_name": "", "DAYS_TO_KEEP_EVENTS_name": "",
"DevDetail_Copy_Device_Title": "", "DevDetail_Copy_Device_Title": "",

View File

@@ -56,6 +56,8 @@
"BackDevices_Restore_okay": "已成功恢复。", "BackDevices_Restore_okay": "已成功恢复。",
"BackDevices_darkmode_disabled": "暗黑模式已禁用", "BackDevices_darkmode_disabled": "暗黑模式已禁用",
"BackDevices_darkmode_enabled": "已启用暗黑模式", "BackDevices_darkmode_enabled": "已启用暗黑模式",
"CLEAR_NEW_FLAG_description": "",
"CLEAR_NEW_FLAG_name": "",
"DAYS_TO_KEEP_EVENTS_description": "这是维护设置。它指定将保留的事件条目的天数。所有较旧的事件将被定期删除。也适用于插件事件历史记录。", "DAYS_TO_KEEP_EVENTS_description": "这是维护设置。它指定将保留的事件条目的天数。所有较旧的事件将被定期删除。也适用于插件事件历史记录。",
"DAYS_TO_KEEP_EVENTS_name": "删除早于", "DAYS_TO_KEEP_EVENTS_name": "删除早于",
"DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> 从设备复制详细信息", "DevDetail_Copy_Device_Title": "<i class=\"fa fa-copy\"></i> 从设备复制详细信息",

View File

@@ -44,12 +44,13 @@ def main():
HRS_TO_KEEP_NEWDEV = int(values.hourstokeepnewdevice.split('=')[1]) HRS_TO_KEEP_NEWDEV = int(values.hourstokeepnewdevice.split('=')[1])
DAYS_TO_KEEP_EVENTS = int(values.daystokeepevents.split('=')[1]) DAYS_TO_KEEP_EVENTS = int(values.daystokeepevents.split('=')[1])
PHOLUS_DAYS_DATA = get_setting_value("PHOLUS_DAYS_DATA") PHOLUS_DAYS_DATA = get_setting_value("PHOLUS_DAYS_DATA")
CLEAR_NEW_FLAG = get_setting_value("CLEAR_NEW_FLAG")
mylog('verbose', [f'[{pluginName}] In script']) mylog('verbose', [f'[{pluginName}] In script'])
# Execute cleanup/upkeep # Execute cleanup/upkeep
cleanup_database(fullDbPath, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA, HRS_TO_KEEP_NEWDEV, PLUGINS_KEEP_HIST) cleanup_database(fullDbPath, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA, HRS_TO_KEEP_NEWDEV, PLUGINS_KEEP_HIST, CLEAR_NEW_FLAG)
mylog('verbose', [f'[{pluginName}] Cleanup complete']) mylog('verbose', [f'[{pluginName}] Cleanup complete'])
@@ -58,7 +59,7 @@ def main():
#=============================================================================== #===============================================================================
# Cleanup / upkeep database # Cleanup / upkeep database
#=============================================================================== #===============================================================================
def cleanup_database (dbPath, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA, HRS_TO_KEEP_NEWDEV, PLUGINS_KEEP_HIST): def cleanup_database (dbPath, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA, HRS_TO_KEEP_NEWDEV, PLUGINS_KEEP_HIST, CLEAR_NEW_FLAG):
""" """
Cleaning out old records from the tables that don't need to keep all data. Cleaning out old records from the tables that don't need to keep all data.
""" """
@@ -151,6 +152,17 @@ def cleanup_database (dbPath, DAYS_TO_KEEP_EVENTS, PHOLUS_DAYS_DATA, HRS_TO_KEEP
mylog('verbose', [f'[{pluginName}] Query: {query} ']) mylog('verbose', [f'[{pluginName}] Query: {query} '])
cursor.execute (query) cursor.execute (query)
# -----------------------------------------------------
# Clear New Flag
if CLEAR_NEW_FLAG != 0:
mylog('verbose', [f'[{pluginName}] Devices: Clear "New Device" flag for all devices older than {str(CLEAR_NEW_FLAG)} hours (CLEAR_NEW_FLAG setting)'])
query = f"""UPDATE Devices SET dev_NewDevice = 0 WHERE dev_NewDevice = 1 AND date(dev_FirstConnection, '+{str(CLEAR_NEW_FLAG)} hour') < date('now')"""
# select * from Devices where dev_NewDevice = 1 AND date(dev_FirstConnection, '+3 hour' ) < date('now')
mylog('verbose', [f'[{pluginName}] Query: {query} '])
cursor.execute(query)
# ----------------------------------------------------- # -----------------------------------------------------
# Cleanup Pholus_Scan # Cleanup Pholus_Scan
if PHOLUS_DAYS_DATA != "" and PHOLUS_DAYS_DATA != 0: if PHOLUS_DAYS_DATA != "" and PHOLUS_DAYS_DATA != 0:

View File

@@ -1057,7 +1057,7 @@ def main():
elif values.rdns_scanning: elif values.rdns_scanning:
file_print_pr("[DEBUG] Timestamp 45: ", timeNow()) file_print_pr("[DEBUG] Timestamp 45: ", timeNow())
dns_query=None dns_query=None
ipn = ipaddress.ip_network(values.rdns_scanning) ipn = ipaddress.ip_network(values.rdns_scanning, strict=False)
for ip in ipn.hosts(): for ip in ipn.hosts():
the_query = ip.reverse_pointer the_query = ip.reverse_pointer
if not dns_query: if not dns_query:

View File

@@ -159,6 +159,81 @@
"string": "Encryption key used to encrypt the data before sending and for decryption on the hub. The key needs to be the same on the hub and on the nodes." "string": "Encryption key used to encrypt the data before sending and for decryption on the hub. The key needs to be the same on the hub and on the nodes."
} }
] ]
},{
"function": "nodes",
"type": {
"dataType": "array",
"elements": [
{
"elementType": "input",
"elementOptions": [
{ "placeholder": "Enter full url" },
{ "suffix": "_in" },
{ "cssClasses": "col-sm-10" },
{ "prefillValue": "null" }
],
"transformers": []
},
{
"elementType": "button",
"elementOptions": [
{ "sourceSuffixes": ["_in"] },
{ "separator": "" },
{ "cssClasses": "col-xs-12" },
{ "onClick": "addList(this, false)" },
{ "getStringKey": "Gen_Add" }
],
"transformers": []
},
{
"elementType": "select",
"elementHasInputValue": 1,
"elementOptions": [
{ "multiple": "true" },
{ "readonly": "true" },
{ "editable": "true" }
],
"transformers": []
},
{
"elementType": "button",
"elementOptions": [
{ "sourceSuffixes": [] },
{ "separator": "" },
{ "cssClasses": "col-xs-6" },
{ "onClick": "removeAllOptions(this)" },
{ "getStringKey": "Gen_Remove_All" }
],
"transformers": []
},
{
"elementType": "button",
"elementOptions": [
{ "sourceSuffixes": [] },
{ "separator": "" },
{ "cssClasses": "col-xs-6" },
{ "onClick": "removeFromList(this)" },
{ "getStringKey": "Gen_Remove_Last" }
],
"transformers": []
}
]
},
"default_value": [],
"options": [],
"localized": ["name", "description"],
"name": [
{
"language_code": "en_us",
"string": "Nodes [h]"
}
],
"description": [
{
"language_code": "en_us",
"string": "If specified, the hub will pull Devices data from the listed nodes."
}
]
}, },
{ {
"function": "hub_url", "function": "hub_url",

View File

@@ -3,8 +3,11 @@
// External files // External files
require '/app/front/php/server/init.php'; require '/app/front/php/server/init.php';
$method = $_SERVER['REQUEST_METHOD'];
if ($_SERVER['REQUEST_METHOD'] === 'POST') { // ----------------------------------------------
// Method to check authorization
function checkAuthorization($method) {
// Retrieve the authorization header // Retrieve the authorization header
$headers = apache_request_headers(); $headers = apache_request_headers();
$auth_header = $headers['Authorization'] ?? ''; $auth_header = $headers['Authorization'] ?? '';
@@ -14,16 +17,56 @@ if ($_SERVER['REQUEST_METHOD'] === 'POST') {
if ($auth_header !== $expected_token) { if ($auth_header !== $expected_token) {
http_response_code(403); http_response_code(403);
echo 'Forbidden'; echo 'Forbidden';
write_notification("[Plugin: SYNC] Incoming data: Incorrect API Token", "alert"); write_notification("[Plugin: SYNC] Incoming data: Incorrect API Token (".$method.")", "alert");
exit; exit;
} }
}
// ----------------------------------------------
// Function to return JSON response
function jsonResponse($status, $data = '', $message = '') {
http_response_code($status);
header('Content-Type: application/json');
echo json_encode([
'node_name' => getSettingValue('SYNC_node_name'),
'status' => $status,
'message' => $message,
'data_base64' => $data,
'timestamp' => date('Y-m-d H:i:s')
]);
}
// ----------------------------------------------
// MAIN
// ----------------------------------------------
// requesting data (this is a NODE)
if ($method === 'GET') {
checkAuthorization($method);
$file_path = "/app/front/api/table_devices.json";
$data = file_get_contents($file_path);
// Prepare the data to return as a JSON response
$response_data = base64_encode($data);
// Return JSON response
jsonResponse(200, $response_data, 'OK');
write_notification("[Plugin: SYNC] Data sent", "info");
}
// receiving data (this is a HUB)
else if ($method === 'POST') {
checkAuthorization($method);
// Retrieve and decode the data from the POST request // Retrieve and decode the data from the POST request
$data = $_POST['data'] ?? ''; $data = $_POST['data'] ?? '';
$plugin_folder = $_POST['plugin_folder'] ?? ''; $plugin_folder = $_POST['plugin_folder'] ?? '';
$node_name = $_POST['node_name'] ?? ''; $node_name = $_POST['node_name'] ?? '';
$storage_path = "/app/front/plugins/{$plugin_folder}"; $storage_path = "/app/front/plugins/{$plugin_folder}";
// Create the storage directory if it doesn't exist // Create the storage directory if it doesn't exist
@@ -43,12 +86,12 @@ if ($_SERVER['REQUEST_METHOD'] === 'POST') {
$file_path = "{$storage_path}/last_result.encoded.{$node_name}.{$file_count}.log"; $file_path = "{$storage_path}/last_result.encoded.{$node_name}.{$file_count}.log";
// Save the decoded data to the file // Save the decoded data to the file
file_put_contents($file_path, $data); file_put_contents($file_path, $data);
http_response_code(200); http_response_code(200);
echo 'Data received and stored successfully'; echo 'Data received and stored successfully';
write_notification("[Plugin: SYNC] Data received ({$plugin_folder})", "info"); write_notification("[Plugin: SYNC] Data received ({$plugin_folder})", "info");
} else { } else {
http_response_code(405); http_response_code(405);
echo 'Method Not Allowed'; echo 'Method Not Allowed';

View File

@@ -7,6 +7,7 @@ import hashlib
import requests import requests
import json import json
import sqlite3 import sqlite3
import base64
# Define the installation path and extend the system path for plugin imports # Define the installation path and extend the system path for plugin imports
@@ -46,24 +47,66 @@ def main():
hub_url = get_setting_value('SYNC_hub_url') hub_url = get_setting_value('SYNC_hub_url')
node_name = get_setting_value('SYNC_node_name') node_name = get_setting_value('SYNC_node_name')
send_devices = get_setting_value('SYNC_devices') send_devices = get_setting_value('SYNC_devices')
pull_nodes = get_setting_value('SYNC_nodes')
# variables to determine operation mode
is_hub = False
is_node = False
# Check if api_token set
if not api_token:
mylog('verbose', [f'[{pluginName}] ⚠ ERROR api_token not defined - quitting.'])
return -1
# Get all plugin configurations # check if this is a hub or a node
all_plugins = get_plugins_configs() if len(hub_url) > 0 and (send_devices or plugins_to_sync):
is_node = True
mylog('verbose', [f'[{pluginName}] Mode 1: PUSH (NODE) - This is a NODE as SYNC_hub_url, SYNC_devices or SYNC_plugins are set'])
if len(pull_nodes) > 0:
is_hub = True
mylog('verbose', [f'[{pluginName}] Mode 2: PULL (HUB) - This is a HUB as SYNC_nodes is set'])
mylog('verbose', [f'[{pluginName}] plugins_to_sync {plugins_to_sync}']) # Mode 1: PUSH/SEND (NODE)
if is_node:
# PUSHING/SENDING Plugins
# Get all plugin configurations
all_plugins = get_plugins_configs()
# Plugins processing mylog('verbose', [f'[{pluginName}] plugins_to_sync {plugins_to_sync}'])
index = 0
for plugin in all_plugins: for plugin in all_plugins:
pref = plugin["unique_prefix"] pref = plugin["unique_prefix"]
if pref in plugins_to_sync: index = 0
index += 1 if pref in plugins_to_sync:
mylog('verbose', [f'[{pluginName}] synching "{pref}" ({index}/{len(plugins_to_sync)})']) index += 1
mylog('verbose', [f'[{pluginName}] synching "{pref}" ({index}/{len(plugins_to_sync)})'])
# Construct the file path for the plugin's last_result.log file # Construct the file path for the plugin's last_result.log file
plugin_folder = plugin["code_name"] plugin_folder = plugin["code_name"]
file_path = f"{INSTALL_PATH}/front/plugins/{plugin_folder}/last_result.log" file_path = f"{INSTALL_PATH}/front/plugins/{plugin_folder}/last_result.log"
if os.path.exists(file_path):
# Read the content of the log file
with open(file_path, 'r') as f:
file_content = f.read()
mylog('verbose', [f'[{pluginName}] Sending file_content: "{file_content}"'])
# encrypt and send data to the hub
send_data(api_token, file_content, encryption_key, plugin_folder, node_name, pref, hub_url)
else:
mylog('verbose', [f'[{pluginName}] {plugin_folder}/last_result.log not found'])
# PUSHING/SENDING devices
if send_devices:
file_path = f"{INSTALL_PATH}/front/api/table_devices.json"
plugin_folder = 'sync'
pref = 'SYNC'
if os.path.exists(file_path): if os.path.exists(file_path):
# Read the content of the log file # Read the content of the log file
@@ -71,131 +114,147 @@ def main():
file_content = f.read() file_content = f.read()
mylog('verbose', [f'[{pluginName}] Sending file_content: "{file_content}"']) mylog('verbose', [f'[{pluginName}] Sending file_content: "{file_content}"'])
# encrypt and send data to the hub
send_data(api_token, file_content, encryption_key, plugin_folder, node_name, pref, hub_url) send_data(api_token, file_content, encryption_key, plugin_folder, node_name, pref, hub_url)
else:
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Devices" data'])
else:
mylog('verbose', [f'[{pluginName}] SYNC_hub_url not defined, skipping posting "Plugins" and "Devices" data'])
else: # Mode 2: PULL/GET (HUB)
mylog('verbose', [f'[{pluginName}] {plugin_folder}/last_result.log not found'])
# PULLING DEVICES
# Devices procesing
if send_devices:
file_path = f"{INSTALL_PATH}/front/api/table_devices.json"
plugin_folder = 'sync'
pref = 'SYNC'
if os.path.exists(file_path):
# Read the content of the log file
with open(file_path, 'r') as f:
file_content = f.read()
mylog('verbose', [f'[{pluginName}] Sending file_content: "{file_content}"'])
send_data(api_token, file_content, encryption_key, plugin_folder, node_name, pref, hub_url)
# process any received data for the Device DB table
# Create the file path
file_dir = os.path.join(pluginsPath, 'sync') file_dir = os.path.join(pluginsPath, 'sync')
file_prefix = 'last_result' file_prefix = 'last_result'
# pull data from nodes if specified
if is_hub:
for node_url in pull_nodes:
response_json = get_data(api_token, node_url)
# Extract node_name and base64 data
node_name = response_json.get('node_name', 'unknown_node')
data_base64 = response_json.get('data_base64', '')
# Decode base64 data
decoded_data = base64.b64decode(data_base64)
# Create log file name using node name
log_file_name = f'{file_prefix}.{node_name}.log'
# Write decoded data to log file
with open(os.path.join(file_dir, log_file_name), 'wb') as log_file:
log_file.write(decoded_data)
message = f'[{pluginName}] Device data from node "{node_name}" written to {log_file_name}'
mylog('verbose', [message])
write_notification(message, 'info', timeNowTZ())
# Process any received data for the Device DB table
# Create the file path
# Decode files, rename them, and get the list of files # Decode files, rename them, and get the list of files
files_to_process = decode_and_rename_files(file_dir, file_prefix) files_to_process = decode_and_rename_files(file_dir, file_prefix)
# Connect to the App database
conn = sqlite3.connect(fullDbPath)
cursor = conn.cursor()
# Collect all unique dev_MAC values from the JSON files
unique_mac_addresses = set()
device_data = []
mylog('verbose', [f'[{pluginName}] Devices files to process: "{files_to_process}"'])
for file_name in files_to_process:
# only process received .log files, skipping the one logging the progress of this plugin
if file_name != 'last_result.log':
mylog('verbose', [f'[{pluginName}] Processing: "{file_name}"'])
# Store e.g. Node_1 from last_result.encoded.Node_1.1.log
tmp_SyncHubNodeName = ''
if len(file_name.split('.')) > 3:
tmp_SyncHubNodeName = file_name.split('.')[2]
file_path = f"{INSTALL_PATH}/front/plugins/sync/{file_name}"
with open(file_path, 'r') as f:
data = json.load(f)
for device in data['data']:
if device['dev_MAC'] not in unique_mac_addresses:
device['dev_SyncHubNodeName'] = tmp_SyncHubNodeName
unique_mac_addresses.add(device['dev_MAC'])
device_data.append(device)
if len(files_to_process) > 0:
mylog('verbose', [f'[{pluginName}] Mode 3: RECEIVE (HUB) - This is a HUB as received data found'])
if len(device_data) > 0: # Connect to the App database
# Retrieve existing dev_MAC values from the Devices table conn = sqlite3.connect(fullDbPath)
placeholders = ', '.join('?' for _ in unique_mac_addresses) cursor = conn.cursor()
cursor.execute(f'SELECT dev_MAC FROM Devices WHERE dev_MAC IN ({placeholders})', tuple(unique_mac_addresses))
existing_mac_addresses = set(row[0] for row in cursor.fetchall())
# insert devices into the lats_result.log to manage state # Collect all unique dev_MAC values from the JSON files
for device in device_data: unique_mac_addresses = set()
if device['dev_PresentLastScan'] == 1: device_data = []
plugin_objects.add_object(
primaryId = device['dev_MAC'],
secondaryId = device['dev_LastIP'],
watched1 = device['dev_Name'],
watched2 = device['dev_Vendor'],
watched3 = device['dev_SyncHubNodeName'],
watched4 = device['dev_GUID'],
extra = '',
foreignKey = device['dev_GUID'])
# Filter out existing devices mylog('verbose', [f'[{pluginName}] Devices files to process: "{files_to_process}"'])
new_devices = [device for device in device_data if device['dev_MAC'] not in existing_mac_addresses]
# Remove 'rowid' key if it exists for file_name in files_to_process:
for device in new_devices:
device.pop('rowid', None)
mylog('verbose', [f'[{pluginName}] All devices: "{len(device_data)}"']) # only process received .log files, skipping the one logging the progress of this plugin
mylog('verbose', [f'[{pluginName}] New devices: "{len(new_devices)}"']) if file_name != 'last_result.log':
mylog('verbose', [f'[{pluginName}] Processing: "{file_name}"'])
# Prepare the insert statement # Store e.g. Node_1 from last_result.encoded.Node_1.1.log
if new_devices: tmp_SyncHubNodeName = ''
if len(file_name.split('.')) > 3:
tmp_SyncHubNodeName = file_name.split('.')[2]
columns = ', '.join(k for k in new_devices[0].keys() if k != 'rowid')
placeholders = ', '.join('?' for k in new_devices[0] if k != 'rowid')
sql = f'INSERT INTO Devices ({columns}) VALUES ({placeholders})'
# Extract values for the new devices file_path = f"{INSTALL_PATH}/front/plugins/sync/{file_name}"
values = [tuple(device.values()) for device in new_devices]
with open(file_path, 'r') as f:
data = json.load(f)
for device in data['data']:
if device['dev_MAC'] not in unique_mac_addresses:
device['dev_SyncHubNodeName'] = tmp_SyncHubNodeName
unique_mac_addresses.add(device['dev_MAC'])
device_data.append(device)
mylog('verbose', [f'[{pluginName}] Inserting Devices SQL : "{sql}"']) if len(device_data) > 0:
mylog('verbose', [f'[{pluginName}] Inserting Devices VALUES: "{values}"']) # Retrieve existing dev_MAC values from the Devices table
placeholders = ', '.join('?' for _ in unique_mac_addresses)
cursor.execute(f'SELECT dev_MAC FROM Devices WHERE dev_MAC IN ({placeholders})', tuple(unique_mac_addresses))
existing_mac_addresses = set(row[0] for row in cursor.fetchall())
# Use executemany for batch insertion # insert devices into the lats_result.log to manage state
cursor.executemany(sql, values) for device in device_data:
if device['dev_PresentLastScan'] == 1:
plugin_objects.add_object(
primaryId = device['dev_MAC'],
secondaryId = device['dev_LastIP'],
watched1 = device['dev_Name'],
watched2 = device['dev_Vendor'],
watched3 = device['dev_SyncHubNodeName'],
watched4 = device['dev_GUID'],
extra = '',
foreignKey = device['dev_GUID'])
message = f'[{pluginName}] Inserted "{len(new_devices)}" new devices' # Filter out existing devices
new_devices = [device for device in device_data if device['dev_MAC'] not in existing_mac_addresses]
mylog('verbose', [message]) # Remove 'rowid' key if it exists
write_notification(message, 'info', timeNowTZ()) for device in new_devices:
device.pop('rowid', None)
# Commit and close the connection mylog('verbose', [f'[{pluginName}] All devices: "{len(device_data)}"'])
conn.commit() mylog('verbose', [f'[{pluginName}] New devices: "{len(new_devices)}"'])
conn.close()
# log result # Prepare the insert statement
plugin_objects.write_result_file() if new_devices:
columns = ', '.join(k for k in new_devices[0].keys() if k != 'rowid')
placeholders = ', '.join('?' for k in new_devices[0] if k != 'rowid')
sql = f'INSERT INTO Devices ({columns}) VALUES ({placeholders})'
# Extract values for the new devices
values = [tuple(device.values()) for device in new_devices]
mylog('verbose', [f'[{pluginName}] Inserting Devices SQL : "{sql}"'])
mylog('verbose', [f'[{pluginName}] Inserting Devices VALUES: "{values}"'])
# Use executemany for batch insertion
cursor.executemany(sql, values)
message = f'[{pluginName}] Inserted "{len(new_devices)}" new devices'
mylog('verbose', [message])
write_notification(message, 'info', timeNowTZ())
# Commit and close the connection
conn.commit()
conn.close()
# log result
plugin_objects.write_result_file()
return 0 return 0
# send data to the HUB
def send_data(api_token, file_content, encryption_key, plugin_folder, node_name, pref, hub_url): def send_data(api_token, file_content, encryption_key, plugin_folder, node_name, pref, hub_url):
# Encrypt the log data using the encryption_key # Encrypt the log data using the encryption_key
encrypted_data = encrypt_data(file_content, encryption_key) encrypted_data = encrypt_data(file_content, encryption_key)
@@ -223,6 +282,36 @@ def send_data(api_token, file_content, encryption_key, plugin_folder, node_name,
message = f'[{pluginName}] Failed to send data for "{plugin_folder}" (Status code: {response.status_code})' message = f'[{pluginName}] Failed to send data for "{plugin_folder}" (Status code: {response.status_code})'
mylog('verbose', [message]) mylog('verbose', [message])
write_notification(message, 'alert', timeNowTZ()) write_notification(message, 'alert', timeNowTZ())
# get data from the nodes to the HUB
def get_data(api_token, node_url):
mylog('verbose', [f'[{pluginName}] Getting data from node: "{node_url}"'])
# Set the authorization header with the API token
headers = {'Authorization': f'Bearer {api_token}'}
api_endpoint = f"{node_url}/plugins/sync/hub.php"
response = requests.get(api_endpoint, headers=headers)
# mylog('verbose', [f'[{pluginName}] response: "{response}"'])
if response.status_code == 200:
try:
# Parse JSON response
response_json = response.json()
return response_json
except json.JSONDecodeError:
message = f'[{pluginName}] Failed to parse JSON response from "{node_url}"'
mylog('verbose', [message])
write_notification(message, 'alert', timeNowTZ())
return ""
else:
message = f'[{pluginName}] Failed to send data for "{node_url}" (Status code: {response.status_code})'
mylog('verbose', [message])
write_notification(message, 'alert', timeNowTZ())
return ""

View File

@@ -25,28 +25,12 @@
{ {
"language_code": "en_us", "language_code": "en_us",
"string": "<i class=\"fa-solid fa-binoculars\"></i>" "string": "<i class=\"fa-solid fa-binoculars\"></i>"
},
{
"language_code": "es_es",
"string": "<i class=\"fa-solid fa-binoculars\"></i>"
},
{
"language_code": "de_de",
"string": "<i class=\"fa-solid fa-binoculars\"></i>"
} }
], ],
"description": [ "description": [
{ {
"language_code": "en_us", "language_code": "en_us",
"string": "This plugin is to import undiscoverable devices from a file." "string": "This plugin is to import undiscoverable devices from a file. Only ASCII characters are supported."
},
{
"language_code": "es_es",
"string": "Este complemento es para importar dispositivos no detectables desde un archivo."
},
{
"language_code": "de_de",
"string": "Ein Plugin zum Importieren von nicht erkennbaren Geräten aus einer Datei."
} }
], ],
"params": [ "params": [

View File

@@ -640,10 +640,15 @@ $settingsJSON_DB = json_encode($settings, JSON_HEX_TAG | JSON_HEX_AMP | JSON_HEX
// console.log(setTypeObject); // console.log(setTypeObject);
const dataType = setTypeObject.dataType; const dataType = setTypeObject.dataType;
// const lastElementObj = setTypeObject.elements[setTypeObject.elements.length - 1]; //🔽
// get the element with the input value(s) // get the element with the input value(s)
const elementsWithInputValue = setTypeObject.elements.filter(element => element.elementHasInputValue === 1); let elementsWithInputValue = setTypeObject.elements.filter(element => element.elementHasInputValue === 1);
// if none found, take last
if(elementsWithInputValue.length == 0)
{
elementsWithInputValue = setTypeObject.elements[setTypeObject.elements.length - 1]
}
const { elementType, elementOptions = [], transformers = [] } = elementsWithInputValue; const { elementType, elementOptions = [], transformers = [] } = elementsWithInputValue;
const { const {
@@ -666,15 +671,17 @@ $settingsJSON_DB = json_encode($settings, JSON_HEX_TAG | JSON_HEX_AMP | JSON_HEX
if (dataType === "string" || if (dataType === "string" ||
(dataType === "integer" && (inputType === "number" || inputType === "text"))) { (dataType === "integer" && (inputType === "number" || inputType === "text"))) {
value = $('#' + setCodeName).val(); value = $('#' + setCodeName).val();
value = applyTransformers(value, transformers); value = applyTransformers(value, transformers);
settingsArray.push([prefix, setCodeName, dataType, value]); settingsArray.push([prefix, setCodeName, dataType, value]);
} else if (dataType === 'boolean') { } else if (dataType === 'boolean') {
value = $(`#${setCodeName}`).is(':checked') ? 1 : 0; value = $(`#${setCodeName}`).is(':checked') ? 1 : 0;
value = applyTransformers(value, transformers); value = applyTransformers(value, transformers);
settingsArray.push([prefix, setCodeName, dataType, value]); settingsArray.push([prefix, setCodeName, dataType, value]);
} else if (dataType === "array" ) { } else if (dataType === "array" ) {
@@ -813,7 +820,8 @@ $settingsJSON_DB = json_encode($settings, JSON_HEX_TAG | JSON_HEX_AMP | JSON_HEX
// Reload page if outdated information might be displayed // Reload page if outdated information might be displayed
if (secondsSincePageLoad() > 10) { if (secondsSincePageLoad() > 10) {
clearCache(); console.log("App outdated, reloading...");
clearCache();
} }
} else { } else {
console.log("App not initialized, checking again in 1s..."); console.log("App not initialized, checking again in 1s...");

View File

@@ -210,6 +210,7 @@ def create_new_devices (db):
cur_Type = cur_Type.strip() if cur_Type else get_setting_value("NEWDEV_dev_DeviceType") cur_Type = cur_Type.strip() if cur_Type else get_setting_value("NEWDEV_dev_DeviceType")
cur_NetworkNodeMAC = cur_NetworkNodeMAC.strip() if cur_NetworkNodeMAC else '' cur_NetworkNodeMAC = cur_NetworkNodeMAC.strip() if cur_NetworkNodeMAC else ''
cur_NetworkNodeMAC = cur_NetworkNodeMAC if cur_NetworkNodeMAC and cur_MAC != "Internet" else (get_setting_value("NEWDEV_dev_Network_Node_MAC_ADDR") if cur_MAC != "Internet" else "null") cur_NetworkNodeMAC = cur_NetworkNodeMAC if cur_NetworkNodeMAC and cur_MAC != "Internet" else (get_setting_value("NEWDEV_dev_Network_Node_MAC_ADDR") if cur_MAC != "Internet" else "null")
cur_SyncHubNodeName = cur_SyncHubNodeName if cur_SyncHubNodeName and cur_SyncHubNodeName != "null" else (get_setting_value("SYNC_node_name"))
# Preparing the individual insert statement # Preparing the individual insert statement
sqlQuery = f"""INSERT OR IGNORE INTO Devices sqlQuery = f"""INSERT OR IGNORE INTO Devices
@@ -637,8 +638,8 @@ icons = {
def guess_icon(vendor, mac, ip, name, default): def guess_icon(vendor, mac, ip, name, default):
result = default result = default
mac = mac.upper() mac = mac.upper()
vendor = vendor.lower() vendor = vendor.lower() if vendor else "unknown"
name = name.lower() name = name.lower() if name else "(unknown)"
# Guess icon based on vendor # Guess icon based on vendor
if any(brand in vendor for brand in {"samsung", "motorola"}): if any(brand in vendor for brand in {"samsung", "motorola"}):
@@ -693,8 +694,8 @@ def guess_icon(vendor, mac, ip, name, default):
def guess_type(vendor, mac, ip, name, default): def guess_type(vendor, mac, ip, name, default):
result = default result = default
mac = mac.upper() mac = mac.upper()
vendor = vendor.lower() vendor = vendor.lower() if vendor else "unknown"
name = name.lower() name = name.lower() if name else "(unknown)"
# Guess icon based on vendor # Guess icon based on vendor
if any(brand in vendor for brand in {"samsung", "motorola"}): if any(brand in vendor for brand in {"samsung", "motorola"}):

View File

@@ -145,6 +145,7 @@ def importConfigs (db, all_plugins):
conf.REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://netalertx/' , c_d, 'NetAlertX URL', '{"dataType":"string", "elements": [{"elementType" : "input", "elementOptions" : [] ,"transformers": []}]}', '[]', 'General') conf.REPORT_DASHBOARD_URL = ccd('REPORT_DASHBOARD_URL', 'http://netalertx/' , c_d, 'NetAlertX URL', '{"dataType":"string", "elements": [{"elementType" : "input", "elementOptions" : [] ,"transformers": []}]}', '[]', 'General')
conf.DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', '{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}', '[]', 'General') conf.DAYS_TO_KEEP_EVENTS = ccd('DAYS_TO_KEEP_EVENTS', 90 , c_d, 'Delete events days', '{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}', '[]', 'General')
conf.HRS_TO_KEEP_NEWDEV = ccd('HRS_TO_KEEP_NEWDEV', 0 , c_d, 'Keep new devices for', '{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}', "[]", 'General') conf.HRS_TO_KEEP_NEWDEV = ccd('HRS_TO_KEEP_NEWDEV', 0 , c_d, 'Keep new devices for', '{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}', "[]", 'General')
conf.CLEAR_NEW_FLAG = ccd('CLEAR_NEW_FLAG', 0 , c_d, 'Clear new flag', '{"dataType":"integer", "elements": [{"elementType" : "input", "elementOptions" : [{"type": "number"}] ,"transformers": []}]}', "[]", 'General')
conf.API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', '{"dataType":"string", "elements": [{"elementType" : "input", "elementOptions" : [] ,"transformers": []}]}', '[]', 'General') conf.API_CUSTOM_SQL = ccd('API_CUSTOM_SQL', 'SELECT * FROM Devices WHERE dev_PresentLastScan = 0' , c_d, 'Custom endpoint', '{"dataType":"string", "elements": [{"elementType" : "input", "elementOptions" : [] ,"transformers": []}]}', '[]', 'General')
conf.VERSION = ccd('VERSION', '' , c_d, 'Version', '{"dataType":"string", "elements": [{"elementType" : "input", "elementOptions" : [{ "readonly": "true" }] ,"transformers": []}]}', '', 'General') conf.VERSION = ccd('VERSION', '' , c_d, 'Version', '{"dataType":"string", "elements": [{"elementType" : "input", "elementOptions" : [{ "readonly": "true" }] ,"transformers": []}]}', '', 'General')
conf.NETWORK_DEVICE_TYPES = ccd('NETWORK_DEVICE_TYPES', ['AP', 'Gateway', 'Firewall', 'Hypervisor', 'Powerline', 'Switch', 'WLAN', 'PLC', 'Router','USB LAN Adapter', 'USB WIFI Adapter', 'Internet'] , c_d, 'Network device types', '{"dataType":"array","elements":[{"elementType":"input","elementOptions":[{"placeholder":"Enter value"},{"suffix":"_in"},{"cssClasses":"col-sm-10"},{"prefillValue":"null"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":["_in"]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"addList(this,false)"},{"getStringKey":"Gen_Add"}],"transformers":[]},{"elementType":"select", "elementHasInputValue":1,"elementOptions":[{"multiple":"true"},{"readonly":"true"},{"editable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeAllOptions(this)"},{"getStringKey":"Gen_Remove_All"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeFromList(this)"},{"getStringKey":"Gen_Remove_Last"}],"transformers":[]}]}', '[]', 'General') conf.NETWORK_DEVICE_TYPES = ccd('NETWORK_DEVICE_TYPES', ['AP', 'Gateway', 'Firewall', 'Hypervisor', 'Powerline', 'Switch', 'WLAN', 'PLC', 'Router','USB LAN Adapter', 'USB WIFI Adapter', 'Internet'] , c_d, 'Network device types', '{"dataType":"array","elements":[{"elementType":"input","elementOptions":[{"placeholder":"Enter value"},{"suffix":"_in"},{"cssClasses":"col-sm-10"},{"prefillValue":"null"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":["_in"]},{"separator":""},{"cssClasses":"col-xs-12"},{"onClick":"addList(this,false)"},{"getStringKey":"Gen_Add"}],"transformers":[]},{"elementType":"select", "elementHasInputValue":1,"elementOptions":[{"multiple":"true"},{"readonly":"true"},{"editable":"true"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeAllOptions(this)"},{"getStringKey":"Gen_Remove_All"}],"transformers":[]},{"elementType":"button","elementOptions":[{"sourceSuffixes":[]},{"separator":""},{"cssClasses":"col-xs-6"},{"onClick":"removeFromList(this)"},{"getStringKey":"Gen_Remove_Last"}],"transformers":[]}]}', '[]', 'General')
@@ -342,7 +343,7 @@ def importConfigs (db, all_plugins):
# ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", regex="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False) # ccd(key, default, config_dir, name, inputtype, options, group, events=None, desc="", regex="", setJsonMetadata=None, overrideTemplate=None, forceDefault=False)
ccd('VERSION', buildTimestamp , c_d, '_KEEP_', '_KEEP_', '_KEEP_', '_KEEP_', None, "_KEEP_", "", None, None, True) ccd('VERSION', buildTimestamp , c_d, '_KEEP_', '_KEEP_', '_KEEP_', '_KEEP_', None, "_KEEP_", "", None, None, True)
write_notification(f'[Upgrade] : App upgraded 🚀 Please clear the cache: <ol> <li>Clear the browser cache (shift + browser refresh button)</li> <li> Clear app cache with the 🔄 (reload) button in the header</li></ol> Check out new features and what has changed in the <a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.', 'interrupt', timeNowTZ()) write_notification(f'[Upgrade] : App upgraded 🚀 Please clear the cache: <ol> <li>Click OK below</li> <li>Clear the browser cache (shift + browser refresh button)</li> <li> Clear app cache with the 🔄 (reload) button in the header</li><li>Go to Settings and click Save</li> </ol> Check out new features and what has changed in the <a href="https://github.com/jokob-sk/NetAlertX/releases" target="_blank">📓 release notes</a>.', 'interrupt', timeNowTZ())
# Insert settings into the DB # Insert settings into the DB