mirror of
https://github.com/jokob-sk/NetAlertX.git
synced 2026-03-30 23:03:03 -07:00
Compare commits
39 Commits
chore_time
...
copilot/su
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
258a11bf21 | ||
|
|
1b317838fd | ||
|
|
39f617be5f | ||
|
|
c4c966ffa7 | ||
|
|
f88aefe022 | ||
|
|
54db347b94 | ||
|
|
2ae87fca38 | ||
|
|
8224363c45 | ||
|
|
eb399ec193 | ||
|
|
70645e7ef3 | ||
|
|
0e94dcb091 | ||
|
|
a26137800d | ||
|
|
63810bc536 | ||
|
|
57d451fcf4 | ||
|
|
bf6218e836 | ||
|
|
e9efabd562 | ||
|
|
eb0f705587 | ||
|
|
2559702a6a | ||
|
|
6bbfc0637c | ||
|
|
688d49b5ae | ||
|
|
ab7df4384e | ||
|
|
2018636bf8 | ||
|
|
50f341e84f | ||
|
|
05c332867b | ||
|
|
12b0d911ff | ||
|
|
04884a264b | ||
|
|
2742414123 | ||
|
|
876cd4bbe1 | ||
|
|
91775deaa3 | ||
|
|
7075091569 | ||
|
|
f63658af7d | ||
|
|
774c123804 | ||
|
|
32e2d571a0 | ||
|
|
249d12ded4 | ||
|
|
3036cd04fc | ||
|
|
3d3abe7e53 | ||
|
|
a088f4580a | ||
|
|
75c7d6c015 | ||
|
|
d434cc5315 |
@@ -35,6 +35,7 @@ RUN apk add --no-cache \
|
||||
shadow \
|
||||
python3 \
|
||||
python3-dev \
|
||||
py3-psutil \
|
||||
gcc \
|
||||
musl-dev \
|
||||
libffi-dev \
|
||||
@@ -136,7 +137,7 @@ ENV LANG=C.UTF-8
|
||||
|
||||
RUN apk add --no-cache bash mtr libbsd zip lsblk tzdata curl arp-scan iproute2 iproute2-ss nmap fping \
|
||||
nmap-scripts traceroute nbtscan net-tools net-snmp-tools bind-tools awake ca-certificates \
|
||||
sqlite php83 php83-fpm php83-cgi php83-curl php83-sqlite3 php83-session python3 envsubst \
|
||||
sqlite php83 php83-fpm php83-cgi php83-curl php83-sqlite3 php83-session python3 py3-psutil envsubst \
|
||||
nginx supercronic shadow su-exec jq && \
|
||||
rm -Rf /var/cache/apk/* && \
|
||||
rm -Rf /etc/nginx && \
|
||||
|
||||
@@ -32,6 +32,7 @@ RUN apk add --no-cache \
|
||||
shadow \
|
||||
python3 \
|
||||
python3-dev \
|
||||
py3-psutil \
|
||||
gcc \
|
||||
musl-dev \
|
||||
libffi-dev \
|
||||
@@ -133,7 +134,7 @@ ENV LANG=C.UTF-8
|
||||
|
||||
RUN apk add --no-cache bash mtr libbsd zip lsblk tzdata curl arp-scan iproute2 iproute2-ss nmap fping \
|
||||
nmap-scripts traceroute nbtscan net-tools net-snmp-tools bind-tools awake ca-certificates \
|
||||
sqlite php83 php83-fpm php83-cgi php83-curl php83-sqlite3 php83-session python3 envsubst \
|
||||
sqlite php83 php83-fpm php83-cgi php83-curl php83-sqlite3 php83-session python3 py3-psutil envsubst \
|
||||
nginx supercronic shadow su-exec jq && \
|
||||
rm -Rf /var/cache/apk/* && \
|
||||
rm -Rf /etc/nginx && \
|
||||
|
||||
@@ -10,6 +10,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python3 \
|
||||
python3-dev \
|
||||
python3-pip \
|
||||
python3-psutil \
|
||||
python3-venv \
|
||||
gcc \
|
||||
git \
|
||||
@@ -193,7 +194,7 @@ RUN for vfile in .VERSION .VERSION_PREV; do \
|
||||
# setcap cap_net_raw,cap_net_admin+eip $(readlink -f ${VIRTUAL_ENV_BIN}/python) && \
|
||||
/bin/bash /build/init-nginx.sh && \
|
||||
/bin/bash /build/init-php-fpm.sh && \
|
||||
# /bin/bash /build/init-cron.sh && \
|
||||
# /bin/bash /build/init-cron.sh && \
|
||||
# Debian cron init might differ, skipping for now or need to check init-cron.sh content
|
||||
# Checking init-backend.sh
|
||||
/bin/bash /build/init-backend.sh && \
|
||||
|
||||
434
back/app.sql
434
back/app.sql
@@ -1,434 +0,0 @@
|
||||
CREATE TABLE sqlite_stat1(tbl,idx,stat);
|
||||
CREATE TABLE Events (eve_MAC STRING (50) NOT NULL COLLATE NOCASE, eve_IP STRING (50) NOT NULL COLLATE NOCASE, eve_DateTime DATETIME NOT NULL, eve_EventType STRING (30) NOT NULL COLLATE NOCASE, eve_AdditionalInfo STRING (250) DEFAULT (''), eve_PendingAlertEmail BOOLEAN NOT NULL CHECK (eve_PendingAlertEmail IN (0, 1)) DEFAULT (1), eve_PairEventRowid INTEGER);
|
||||
CREATE TABLE Sessions (ses_MAC STRING (50) COLLATE NOCASE, ses_IP STRING (50) COLLATE NOCASE, ses_EventTypeConnection STRING (30) COLLATE NOCASE, ses_DateTimeConnection DATETIME, ses_EventTypeDisconnection STRING (30) COLLATE NOCASE, ses_DateTimeDisconnection DATETIME, ses_StillConnected BOOLEAN, ses_AdditionalInfo STRING (250));
|
||||
CREATE TABLE IF NOT EXISTS "Online_History" (
|
||||
"Index" INTEGER,
|
||||
"Scan_Date" TEXT,
|
||||
"Online_Devices" INTEGER,
|
||||
"Down_Devices" INTEGER,
|
||||
"All_Devices" INTEGER,
|
||||
"Archived_Devices" INTEGER,
|
||||
"Offline_Devices" INTEGER,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
);
|
||||
CREATE TABLE sqlite_sequence(name,seq);
|
||||
CREATE TABLE Devices (
|
||||
devMac STRING (50) PRIMARY KEY NOT NULL COLLATE NOCASE,
|
||||
devName STRING (50) NOT NULL DEFAULT "(unknown)",
|
||||
devOwner STRING (30) DEFAULT "(unknown)" NOT NULL,
|
||||
devType STRING (30),
|
||||
devVendor STRING (250),
|
||||
devFavorite BOOLEAN CHECK (devFavorite IN (0, 1)) DEFAULT (0) NOT NULL,
|
||||
devGroup STRING (10),
|
||||
devComments TEXT,
|
||||
devFirstConnection DATETIME NOT NULL,
|
||||
devLastConnection DATETIME NOT NULL,
|
||||
devLastIP STRING (50) NOT NULL COLLATE NOCASE,
|
||||
devPrimaryIPv4 TEXT,
|
||||
devPrimaryIPv6 TEXT,
|
||||
devVlan TEXT,
|
||||
devForceStatus TEXT,
|
||||
devStaticIP BOOLEAN DEFAULT (0) NOT NULL CHECK (devStaticIP IN (0, 1)),
|
||||
devScan INTEGER DEFAULT (1) NOT NULL,
|
||||
devLogEvents BOOLEAN NOT NULL DEFAULT (1) CHECK (devLogEvents IN (0, 1)),
|
||||
devAlertEvents BOOLEAN NOT NULL DEFAULT (1) CHECK (devAlertEvents IN (0, 1)),
|
||||
devAlertDown BOOLEAN NOT NULL DEFAULT (0) CHECK (devAlertDown IN (0, 1)),
|
||||
devSkipRepeated INTEGER DEFAULT 0 NOT NULL,
|
||||
devLastNotification DATETIME,
|
||||
devPresentLastScan BOOLEAN NOT NULL DEFAULT (0) CHECK (devPresentLastScan IN (0, 1)),
|
||||
devIsNew BOOLEAN NOT NULL DEFAULT (1) CHECK (devIsNew IN (0, 1)),
|
||||
devLocation STRING (250) COLLATE NOCASE,
|
||||
devIsArchived BOOLEAN NOT NULL DEFAULT (0) CHECK (devIsArchived IN (0, 1)),
|
||||
devParentMAC TEXT,
|
||||
devParentPort INTEGER,
|
||||
devParentRelType TEXT,
|
||||
devIcon TEXT,
|
||||
devGUID TEXT,
|
||||
devSite TEXT,
|
||||
devSSID TEXT,
|
||||
devSyncHubNode TEXT,
|
||||
devSourcePlugin TEXT,
|
||||
devMacSource TEXT,
|
||||
devNameSource TEXT,
|
||||
devFQDNSource TEXT,
|
||||
devLastIPSource TEXT,
|
||||
devVendorSource TEXT,
|
||||
devSSIDSource TEXT,
|
||||
devParentMACSource TEXT,
|
||||
devParentPortSource TEXT,
|
||||
devParentRelTypeSource TEXT,
|
||||
devVlanSource TEXT,
|
||||
"devCustomProps" TEXT);
|
||||
CREATE TABLE IF NOT EXISTS "Settings" (
|
||||
"setKey" TEXT,
|
||||
"setName" TEXT,
|
||||
"setDescription" TEXT,
|
||||
"setType" TEXT,
|
||||
"setOptions" TEXT,
|
||||
"setGroup" TEXT,
|
||||
"setValue" TEXT,
|
||||
"setEvents" TEXT,
|
||||
"setOverriddenByEnv" INTEGER
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS "Parameters" (
|
||||
"par_ID" TEXT PRIMARY KEY,
|
||||
"par_Value" TEXT
|
||||
);
|
||||
CREATE TABLE Plugins_Objects(
|
||||
"Index" INTEGER,
|
||||
Plugin TEXT NOT NULL,
|
||||
Object_PrimaryID TEXT NOT NULL,
|
||||
Object_SecondaryID TEXT NOT NULL,
|
||||
DateTimeCreated TEXT NOT NULL,
|
||||
DateTimeChanged TEXT NOT NULL,
|
||||
Watched_Value1 TEXT NOT NULL,
|
||||
Watched_Value2 TEXT NOT NULL,
|
||||
Watched_Value3 TEXT NOT NULL,
|
||||
Watched_Value4 TEXT NOT NULL,
|
||||
Status TEXT NOT NULL,
|
||||
Extra TEXT NOT NULL,
|
||||
UserData TEXT NOT NULL,
|
||||
ForeignKey TEXT NOT NULL,
|
||||
SyncHubNodeName TEXT,
|
||||
"HelpVal1" TEXT,
|
||||
"HelpVal2" TEXT,
|
||||
"HelpVal3" TEXT,
|
||||
"HelpVal4" TEXT,
|
||||
ObjectGUID TEXT,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
);
|
||||
CREATE TABLE Plugins_Events(
|
||||
"Index" INTEGER,
|
||||
Plugin TEXT NOT NULL,
|
||||
Object_PrimaryID TEXT NOT NULL,
|
||||
Object_SecondaryID TEXT NOT NULL,
|
||||
DateTimeCreated TEXT NOT NULL,
|
||||
DateTimeChanged TEXT NOT NULL,
|
||||
Watched_Value1 TEXT NOT NULL,
|
||||
Watched_Value2 TEXT NOT NULL,
|
||||
Watched_Value3 TEXT NOT NULL,
|
||||
Watched_Value4 TEXT NOT NULL,
|
||||
Status TEXT NOT NULL,
|
||||
Extra TEXT NOT NULL,
|
||||
UserData TEXT NOT NULL,
|
||||
ForeignKey TEXT NOT NULL,
|
||||
SyncHubNodeName TEXT,
|
||||
"HelpVal1" TEXT,
|
||||
"HelpVal2" TEXT,
|
||||
"HelpVal3" TEXT,
|
||||
"HelpVal4" TEXT, "ObjectGUID" TEXT,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
);
|
||||
CREATE TABLE Plugins_History(
|
||||
"Index" INTEGER,
|
||||
Plugin TEXT NOT NULL,
|
||||
Object_PrimaryID TEXT NOT NULL,
|
||||
Object_SecondaryID TEXT NOT NULL,
|
||||
DateTimeCreated TEXT NOT NULL,
|
||||
DateTimeChanged TEXT NOT NULL,
|
||||
Watched_Value1 TEXT NOT NULL,
|
||||
Watched_Value2 TEXT NOT NULL,
|
||||
Watched_Value3 TEXT NOT NULL,
|
||||
Watched_Value4 TEXT NOT NULL,
|
||||
Status TEXT NOT NULL,
|
||||
Extra TEXT NOT NULL,
|
||||
UserData TEXT NOT NULL,
|
||||
ForeignKey TEXT NOT NULL,
|
||||
SyncHubNodeName TEXT,
|
||||
"HelpVal1" TEXT,
|
||||
"HelpVal2" TEXT,
|
||||
"HelpVal3" TEXT,
|
||||
"HelpVal4" TEXT, "ObjectGUID" TEXT,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
);
|
||||
CREATE TABLE Plugins_Language_Strings(
|
||||
"Index" INTEGER,
|
||||
Language_Code TEXT NOT NULL,
|
||||
String_Key TEXT NOT NULL,
|
||||
String_Value TEXT NOT NULL,
|
||||
Extra TEXT NOT NULL,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
);
|
||||
CREATE TABLE CurrentScan (
|
||||
scanMac STRING(50) NOT NULL COLLATE NOCASE,
|
||||
scanLastIP STRING(50) NOT NULL COLLATE NOCASE,
|
||||
scanVendor STRING(250),
|
||||
scanSourcePlugin STRING(10),
|
||||
scanName STRING(250),
|
||||
scanLastQuery STRING(250),
|
||||
scanLastConnection STRING(250),
|
||||
scanSyncHubNode STRING(50),
|
||||
scanSite STRING(250),
|
||||
scanSSID STRING(250),
|
||||
scanVlan STRING(250),
|
||||
scanParentMAC STRING(250),
|
||||
scanParentPort STRING(250),
|
||||
scanType STRING(250),
|
||||
UNIQUE(scanMac)
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS "AppEvents" (
|
||||
"Index" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"GUID" TEXT UNIQUE,
|
||||
"AppEventProcessed" BOOLEAN,
|
||||
"DateTimeCreated" TEXT,
|
||||
"ObjectType" TEXT,
|
||||
"ObjectGUID" TEXT,
|
||||
"ObjectPlugin" TEXT,
|
||||
"ObjectPrimaryID" TEXT,
|
||||
"ObjectSecondaryID" TEXT,
|
||||
"ObjectForeignKey" TEXT,
|
||||
"ObjectIndex" TEXT,
|
||||
"ObjectIsNew" BOOLEAN,
|
||||
"ObjectIsArchived" BOOLEAN,
|
||||
"ObjectStatusColumn" TEXT,
|
||||
"ObjectStatus" TEXT,
|
||||
"AppEventType" TEXT,
|
||||
"Helper1" TEXT,
|
||||
"Helper2" TEXT,
|
||||
"Helper3" TEXT,
|
||||
"Extra" TEXT
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS "Notifications" (
|
||||
"Index" INTEGER,
|
||||
"GUID" TEXT UNIQUE,
|
||||
"DateTimeCreated" TEXT,
|
||||
"DateTimePushed" TEXT,
|
||||
"Status" TEXT,
|
||||
"JSON" TEXT,
|
||||
"Text" TEXT,
|
||||
"HTML" TEXT,
|
||||
"PublishedVia" TEXT,
|
||||
"Extra" TEXT,
|
||||
PRIMARY KEY("Index" AUTOINCREMENT)
|
||||
);
|
||||
CREATE INDEX IDX_eve_DateTime ON Events (eve_DateTime);
|
||||
CREATE INDEX IDX_eve_EventType ON Events (eve_EventType COLLATE NOCASE);
|
||||
CREATE INDEX IDX_eve_MAC ON Events (eve_MAC COLLATE NOCASE);
|
||||
CREATE INDEX IDX_eve_PairEventRowid ON Events (eve_PairEventRowid);
|
||||
CREATE INDEX IDX_ses_EventTypeDisconnection ON Sessions (ses_EventTypeDisconnection COLLATE NOCASE);
|
||||
CREATE INDEX IDX_ses_EventTypeConnection ON Sessions (ses_EventTypeConnection COLLATE NOCASE);
|
||||
CREATE INDEX IDX_ses_DateTimeDisconnection ON Sessions (ses_DateTimeDisconnection);
|
||||
CREATE INDEX IDX_ses_MAC ON Sessions (ses_MAC COLLATE NOCASE);
|
||||
CREATE INDEX IDX_ses_DateTimeConnection ON Sessions (ses_DateTimeConnection);
|
||||
CREATE INDEX IDX_dev_PresentLastScan ON Devices (devPresentLastScan);
|
||||
CREATE INDEX IDX_dev_FirstConnection ON Devices (devFirstConnection);
|
||||
CREATE INDEX IDX_dev_AlertDeviceDown ON Devices (devAlertDown);
|
||||
CREATE INDEX IDX_dev_StaticIP ON Devices (devStaticIP);
|
||||
CREATE INDEX IDX_dev_ScanCycle ON Devices (devScan);
|
||||
CREATE INDEX IDX_dev_Favorite ON Devices (devFavorite);
|
||||
CREATE INDEX IDX_dev_LastIP ON Devices (devLastIP);
|
||||
CREATE INDEX IDX_dev_NewDevice ON Devices (devIsNew);
|
||||
CREATE INDEX IDX_dev_Archived ON Devices (devIsArchived);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_events_unique
|
||||
ON Events (
|
||||
eve_MAC,
|
||||
eve_IP,
|
||||
eve_EventType,
|
||||
eve_DateTime
|
||||
);
|
||||
CREATE VIEW Events_Devices AS
|
||||
SELECT *
|
||||
FROM Events
|
||||
LEFT JOIN Devices ON eve_MAC = devMac
|
||||
/* Events_Devices(eve_MAC,eve_IP,eve_DateTime,eve_EventType,eve_AdditionalInfo,eve_PendingAlertEmail,eve_PairEventRowid,devMac,devName,devOwner,devType,devVendor,devFavorite,devGroup,devComments,devFirstConnection,devLastConnection,devLastIP,devStaticIP,devScan,devLogEvents,devAlertEvents,devAlertDown,devSkipRepeated,devLastNotification,devPresentLastScan,devIsNew,devLocation,devIsArchived,devParentMAC,devParentPort,devIcon,devGUID,devSite,devSSID,devSyncHubNode,devSourcePlugin,devCustomProps) */;
|
||||
CREATE VIEW LatestEventsPerMAC AS
|
||||
WITH RankedEvents AS (
|
||||
SELECT
|
||||
e.*,
|
||||
ROW_NUMBER() OVER (PARTITION BY e.eve_MAC ORDER BY e.eve_DateTime DESC) AS row_num
|
||||
FROM Events AS e
|
||||
)
|
||||
SELECT
|
||||
e.*,
|
||||
d.*,
|
||||
c.*
|
||||
FROM RankedEvents AS e
|
||||
LEFT JOIN Devices AS d ON e.eve_MAC = d.devMac
|
||||
INNER JOIN CurrentScan AS c ON e.eve_MAC = c.scanMac
|
||||
WHERE e.row_num = 1
|
||||
/* LatestEventsPerMAC(eve_MAC,eve_IP,eve_DateTime,eve_EventType,eve_AdditionalInfo,eve_PendingAlertEmail,eve_PairEventRowid,row_num,devMac,devName,devOwner,devType,devVendor,devFavorite,devGroup,devComments,devFirstConnection,devLastConnection,devLastIP,devStaticIP,devScan,devLogEvents,devAlertEvents,devAlertDown,devSkipRepeated,devLastNotification,devPresentLastScan,devIsNew,devLocation,devIsArchived,devParentMAC,devParentPort,devIcon,devGUID,devSite,devSSID,devSyncHubNode,devSourcePlugin,devCustomProps,scanMac,scanLastIP,scanVendor,scanSourcePlugin,scanName,scanLastQuery,scanLastConnection,scanSyncHubNode,scanSite,scanSSID,scanParentMAC,scanParentPort,scanType) */;
|
||||
CREATE VIEW Sessions_Devices AS SELECT * FROM Sessions LEFT JOIN "Devices" ON ses_MAC = devMac
|
||||
/* Sessions_Devices(ses_MAC,ses_IP,ses_EventTypeConnection,ses_DateTimeConnection,ses_EventTypeDisconnection,ses_DateTimeDisconnection,ses_StillConnected,ses_AdditionalInfo,devMac,devName,devOwner,devType,devVendor,devFavorite,devGroup,devComments,devFirstConnection,devLastConnection,devLastIP,devStaticIP,devScan,devLogEvents,devAlertEvents,devAlertDown,devSkipRepeated,devLastNotification,devPresentLastScan,devIsNew,devLocation,devIsArchived,devParentMAC,devParentPort,devIcon,devGUID,devSite,devSSID,devSyncHubNode,devSourcePlugin,devCustomProps) */;
|
||||
CREATE VIEW Convert_Events_to_Sessions AS SELECT EVE1.eve_MAC,
|
||||
EVE1.eve_IP,
|
||||
EVE1.eve_EventType AS eve_EventTypeConnection,
|
||||
EVE1.eve_DateTime AS eve_DateTimeConnection,
|
||||
CASE WHEN EVE2.eve_EventType IN ('Disconnected', 'Device Down') OR
|
||||
EVE2.eve_EventType IS NULL THEN EVE2.eve_EventType ELSE '<missing event>' END AS eve_EventTypeDisconnection,
|
||||
CASE WHEN EVE2.eve_EventType IN ('Disconnected', 'Device Down') THEN EVE2.eve_DateTime ELSE NULL END AS eve_DateTimeDisconnection,
|
||||
CASE WHEN EVE2.eve_EventType IS NULL THEN 1 ELSE 0 END AS eve_StillConnected,
|
||||
EVE1.eve_AdditionalInfo
|
||||
FROM Events AS EVE1
|
||||
LEFT JOIN
|
||||
Events AS EVE2 ON EVE1.eve_PairEventRowID = EVE2.RowID
|
||||
WHERE EVE1.eve_EventType IN ('New Device', 'Connected','Down Reconnected')
|
||||
UNION
|
||||
SELECT eve_MAC,
|
||||
eve_IP,
|
||||
'<missing event>' AS eve_EventTypeConnection,
|
||||
NULL AS eve_DateTimeConnection,
|
||||
eve_EventType AS eve_EventTypeDisconnection,
|
||||
eve_DateTime AS eve_DateTimeDisconnection,
|
||||
0 AS eve_StillConnected,
|
||||
eve_AdditionalInfo
|
||||
FROM Events AS EVE1
|
||||
WHERE (eve_EventType = 'Device Down' OR
|
||||
eve_EventType = 'Disconnected') AND
|
||||
EVE1.eve_PairEventRowID IS NULL
|
||||
/* Convert_Events_to_Sessions(eve_MAC,eve_IP,eve_EventTypeConnection,eve_DateTimeConnection,eve_EventTypeDisconnection,eve_DateTimeDisconnection,eve_StillConnected,eve_AdditionalInfo) */;
|
||||
CREATE TRIGGER "trg_insert_devices"
|
||||
AFTER INSERT ON "Devices"
|
||||
WHEN NOT EXISTS (
|
||||
SELECT 1 FROM AppEvents
|
||||
WHERE AppEventProcessed = 0
|
||||
AND ObjectType = 'Devices'
|
||||
AND ObjectGUID = NEW.devGUID
|
||||
AND ObjectStatus = CASE WHEN NEW.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END
|
||||
AND AppEventType = 'insert'
|
||||
)
|
||||
BEGIN
|
||||
INSERT INTO "AppEvents" (
|
||||
"GUID",
|
||||
"DateTimeCreated",
|
||||
"AppEventProcessed",
|
||||
"ObjectType",
|
||||
"ObjectGUID",
|
||||
"ObjectPrimaryID",
|
||||
"ObjectSecondaryID",
|
||||
"ObjectStatus",
|
||||
"ObjectStatusColumn",
|
||||
"ObjectIsNew",
|
||||
"ObjectIsArchived",
|
||||
"ObjectForeignKey",
|
||||
"ObjectPlugin",
|
||||
"AppEventType"
|
||||
)
|
||||
VALUES (
|
||||
|
||||
lower(
|
||||
hex(randomblob(4)) || '-' || hex(randomblob(2)) || '-' || '4' ||
|
||||
substr(hex( randomblob(2)), 2) || '-' ||
|
||||
substr('AB89', 1 + (abs(random()) % 4) , 1) ||
|
||||
substr(hex(randomblob(2)), 2) || '-' ||
|
||||
hex(randomblob(6))
|
||||
)
|
||||
,
|
||||
DATETIME('now'),
|
||||
FALSE,
|
||||
'Devices',
|
||||
NEW.devGUID, -- ObjectGUID
|
||||
NEW.devMac, -- ObjectPrimaryID
|
||||
NEW.devLastIP, -- ObjectSecondaryID
|
||||
CASE WHEN NEW.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END, -- ObjectStatus
|
||||
'devPresentLastScan', -- ObjectStatusColumn
|
||||
NEW.devIsNew, -- ObjectIsNew
|
||||
NEW.devIsArchived, -- ObjectIsArchived
|
||||
NEW.devGUID, -- ObjectForeignKey
|
||||
'DEVICES', -- ObjectForeignKey
|
||||
'insert'
|
||||
);
|
||||
END;
|
||||
CREATE TRIGGER "trg_update_devices"
|
||||
AFTER UPDATE ON "Devices"
|
||||
WHEN NOT EXISTS (
|
||||
SELECT 1 FROM AppEvents
|
||||
WHERE AppEventProcessed = 0
|
||||
AND ObjectType = 'Devices'
|
||||
AND ObjectGUID = NEW.devGUID
|
||||
AND ObjectStatus = CASE WHEN NEW.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END
|
||||
AND AppEventType = 'update'
|
||||
)
|
||||
BEGIN
|
||||
INSERT INTO "AppEvents" (
|
||||
"GUID",
|
||||
"DateTimeCreated",
|
||||
"AppEventProcessed",
|
||||
"ObjectType",
|
||||
"ObjectGUID",
|
||||
"ObjectPrimaryID",
|
||||
"ObjectSecondaryID",
|
||||
"ObjectStatus",
|
||||
"ObjectStatusColumn",
|
||||
"ObjectIsNew",
|
||||
"ObjectIsArchived",
|
||||
"ObjectForeignKey",
|
||||
"ObjectPlugin",
|
||||
"AppEventType"
|
||||
)
|
||||
VALUES (
|
||||
|
||||
lower(
|
||||
hex(randomblob(4)) || '-' || hex(randomblob(2)) || '-' || '4' ||
|
||||
substr(hex( randomblob(2)), 2) || '-' ||
|
||||
substr('AB89', 1 + (abs(random()) % 4) , 1) ||
|
||||
substr(hex(randomblob(2)), 2) || '-' ||
|
||||
hex(randomblob(6))
|
||||
)
|
||||
,
|
||||
DATETIME('now'),
|
||||
FALSE,
|
||||
'Devices',
|
||||
NEW.devGUID, -- ObjectGUID
|
||||
NEW.devMac, -- ObjectPrimaryID
|
||||
NEW.devLastIP, -- ObjectSecondaryID
|
||||
CASE WHEN NEW.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END, -- ObjectStatus
|
||||
'devPresentLastScan', -- ObjectStatusColumn
|
||||
NEW.devIsNew, -- ObjectIsNew
|
||||
NEW.devIsArchived, -- ObjectIsArchived
|
||||
NEW.devGUID, -- ObjectForeignKey
|
||||
'DEVICES', -- ObjectForeignKey
|
||||
'update'
|
||||
);
|
||||
END;
|
||||
CREATE TRIGGER "trg_delete_devices"
|
||||
AFTER DELETE ON "Devices"
|
||||
WHEN NOT EXISTS (
|
||||
SELECT 1 FROM AppEvents
|
||||
WHERE AppEventProcessed = 0
|
||||
AND ObjectType = 'Devices'
|
||||
AND ObjectGUID = OLD.devGUID
|
||||
AND ObjectStatus = CASE WHEN OLD.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END
|
||||
AND AppEventType = 'delete'
|
||||
)
|
||||
BEGIN
|
||||
INSERT INTO "AppEvents" (
|
||||
"GUID",
|
||||
"DateTimeCreated",
|
||||
"AppEventProcessed",
|
||||
"ObjectType",
|
||||
"ObjectGUID",
|
||||
"ObjectPrimaryID",
|
||||
"ObjectSecondaryID",
|
||||
"ObjectStatus",
|
||||
"ObjectStatusColumn",
|
||||
"ObjectIsNew",
|
||||
"ObjectIsArchived",
|
||||
"ObjectForeignKey",
|
||||
"ObjectPlugin",
|
||||
"AppEventType"
|
||||
)
|
||||
VALUES (
|
||||
|
||||
lower(
|
||||
hex(randomblob(4)) || '-' || hex(randomblob(2)) || '-' || '4' ||
|
||||
substr(hex( randomblob(2)), 2) || '-' ||
|
||||
substr('AB89', 1 + (abs(random()) % 4) , 1) ||
|
||||
substr(hex(randomblob(2)), 2) || '-' ||
|
||||
hex(randomblob(6))
|
||||
)
|
||||
,
|
||||
DATETIME('now'),
|
||||
FALSE,
|
||||
'Devices',
|
||||
OLD.devGUID, -- ObjectGUID
|
||||
OLD.devMac, -- ObjectPrimaryID
|
||||
OLD.devLastIP, -- ObjectSecondaryID
|
||||
CASE WHEN OLD.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END, -- ObjectStatus
|
||||
'devPresentLastScan', -- ObjectStatusColumn
|
||||
OLD.devIsNew, -- ObjectIsNew
|
||||
OLD.devIsArchived, -- ObjectIsArchived
|
||||
OLD.devGUID, -- ObjectForeignKey
|
||||
'DEVICES', -- ObjectForeignKey
|
||||
'delete'
|
||||
);
|
||||
END;
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 170 KiB After Width: | Height: | Size: 201 KiB |
204
front/index.php
204
front/index.php
@@ -3,76 +3,149 @@
|
||||
|
||||
<?php
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// check if authenticated
|
||||
// Be CAREFUL WHEN INCLUDING NEW PHP FILES
|
||||
require_once $_SERVER['DOCUMENT_ROOT'] . '/php/server/db.php';
|
||||
require_once $_SERVER['DOCUMENT_ROOT'] . '/php/templates/language/lang.php';
|
||||
require_once $_SERVER['DOCUMENT_ROOT'] . '/php/templates/security.php';
|
||||
require_once $_SERVER['DOCUMENT_ROOT'].'/php/server/db.php';
|
||||
require_once $_SERVER['DOCUMENT_ROOT'].'/php/templates/language/lang.php';
|
||||
require_once $_SERVER['DOCUMENT_ROOT'].'/php/templates/security.php';
|
||||
|
||||
$CookieSaveLoginName = 'NetAlertX_SaveLogin';
|
||||
const DEFAULT_REDIRECT = '/devices.php';
|
||||
|
||||
if ($nax_WebProtection != 'true')
|
||||
{
|
||||
header('Location: devices.php');
|
||||
$_SESSION["login"] = 1;
|
||||
/* =====================================================
|
||||
Helper Functions
|
||||
===================================================== */
|
||||
|
||||
function safe_redirect(string $path): void {
|
||||
header("Location: {$path}", true, 302);
|
||||
exit;
|
||||
}
|
||||
|
||||
// Logout
|
||||
if (isset ($_GET["action"]) && $_GET["action"] == 'logout')
|
||||
{
|
||||
setcookie($CookieSaveLoginName, '', time()+1); // reset cookie
|
||||
$_SESSION["login"] = 0;
|
||||
header('Location: index.php');
|
||||
exit;
|
||||
function validate_local_path(?string $encoded): string {
|
||||
if (!$encoded) return DEFAULT_REDIRECT;
|
||||
|
||||
$decoded = base64_decode($encoded, true);
|
||||
if ($decoded === false) {
|
||||
return DEFAULT_REDIRECT;
|
||||
}
|
||||
|
||||
// strict local path check (allow safe query strings + fragments)
|
||||
// Using ~ as the delimiter instead of #
|
||||
if (!preg_match('~^(?!//)(?!.*://)/[a-zA-Z0-9_\-./?=&:%#]*$~', $decoded)) {
|
||||
return DEFAULT_REDIRECT;
|
||||
}
|
||||
|
||||
return $decoded;
|
||||
}
|
||||
|
||||
// Password without Cookie check -> pass and set initial cookie
|
||||
if (isset ($_POST["loginpassword"]) && $nax_Password === hash('sha256',$_POST["loginpassword"]))
|
||||
{
|
||||
header('Location: devices.php');
|
||||
$_SESSION["login"] = 1;
|
||||
if (isset($_POST['PWRemember'])) {setcookie($CookieSaveLoginName, hash('sha256',$_POST["loginpassword"]), time()+604800);}
|
||||
function extract_hash_from_path(string $path): array {
|
||||
/*
|
||||
Split a path into path and hash components.
|
||||
|
||||
For deep links encoded in the 'next' parameter like /devices.php#device-123,
|
||||
extract the hash fragment so it can be properly included in the redirect.
|
||||
|
||||
Args:
|
||||
path: Full path potentially with hash (e.g., "/devices.php#device-123")
|
||||
|
||||
Returns:
|
||||
Array with keys 'path' (without hash) and 'hash' (with # prefix, or empty string)
|
||||
*/
|
||||
$parts = explode('#', $path, 2);
|
||||
return [
|
||||
'path' => $parts[0],
|
||||
'hash' => !empty($parts[1]) ? '#' . $parts[1] : ''
|
||||
];
|
||||
}
|
||||
|
||||
// active Session or valid cookie (cookie not extends)
|
||||
if (( isset ($_SESSION["login"]) && ($_SESSION["login"] == 1)) || (isset ($_COOKIE[$CookieSaveLoginName]) && $nax_Password === $_COOKIE[$CookieSaveLoginName]))
|
||||
{
|
||||
header('Location: devices.php');
|
||||
$_SESSION["login"] = 1;
|
||||
if (isset($_POST['PWRemember'])) {setcookie($CookieSaveLoginName, hash('sha256',$_POST["loginpassword"]), time()+604800);}
|
||||
function append_hash(string $url): string {
|
||||
// First check if the URL already has a hash from the deep link
|
||||
$parts = extract_hash_from_path($url);
|
||||
if (!empty($parts['hash'])) {
|
||||
return $parts['path'] . $parts['hash'];
|
||||
}
|
||||
|
||||
// Fall back to POST url_hash (for browser-captured hashes)
|
||||
if (!empty($_POST['url_hash'])) {
|
||||
$sanitized = preg_replace('/[^#a-zA-Z0-9_\-]/', '', $_POST['url_hash']);
|
||||
if (str_starts_with($sanitized, '#')) {
|
||||
return $url . $sanitized;
|
||||
}
|
||||
}
|
||||
return $url;
|
||||
}
|
||||
|
||||
function is_authenticated(): bool {
|
||||
return isset($_SESSION['login']) && $_SESSION['login'] === 1;
|
||||
}
|
||||
|
||||
function login_user(): void {
|
||||
$_SESSION['login'] = 1;
|
||||
session_regenerate_id(true);
|
||||
}
|
||||
|
||||
|
||||
function logout_user(): void {
|
||||
$_SESSION = [];
|
||||
session_destroy();
|
||||
}
|
||||
|
||||
/* =====================================================
|
||||
Redirect Handling
|
||||
===================================================== */
|
||||
|
||||
$redirectTo = validate_local_path($_GET['next'] ?? null);
|
||||
|
||||
/* =====================================================
|
||||
Web Protection Disabled
|
||||
===================================================== */
|
||||
|
||||
if ($nax_WebProtection !== 'true') {
|
||||
if (!is_authenticated()) {
|
||||
login_user();
|
||||
}
|
||||
safe_redirect(append_hash($redirectTo));
|
||||
}
|
||||
|
||||
/* =====================================================
|
||||
Login Attempt
|
||||
===================================================== */
|
||||
|
||||
if (!empty($_POST['loginpassword'])) {
|
||||
|
||||
$incomingHash = hash('sha256', $_POST['loginpassword']);
|
||||
|
||||
if (hash_equals($nax_Password, $incomingHash)) {
|
||||
|
||||
login_user();
|
||||
|
||||
// Redirect to target page, preserving deep link hash if present
|
||||
safe_redirect(append_hash($redirectTo));
|
||||
}
|
||||
}
|
||||
|
||||
/* =====================================================
|
||||
Already Logged In
|
||||
===================================================== */
|
||||
|
||||
if (is_authenticated()) {
|
||||
safe_redirect(append_hash($redirectTo));
|
||||
}
|
||||
|
||||
/* =====================================================
|
||||
Login UI Variables
|
||||
===================================================== */
|
||||
|
||||
$login_headline = lang('Login_Toggle_Info_headline');
|
||||
$login_info = lang('Login_Info');
|
||||
$login_mode = 'danger';
|
||||
$login_display_mode = 'display: block;';
|
||||
$login_icon = 'fa-info';
|
||||
$login_info = lang('Login_Info');
|
||||
$login_mode = 'info';
|
||||
$login_display_mode = 'display:none;';
|
||||
$login_icon = 'fa-info';
|
||||
|
||||
// no active session, cookie not checked
|
||||
if (isset ($_SESSION["login"]) == FALSE || $_SESSION["login"] != 1)
|
||||
{
|
||||
if ($nax_Password === '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92')
|
||||
{
|
||||
if ($nax_Password === '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92') {
|
||||
$login_info = lang('Login_Default_PWD');
|
||||
$login_mode = 'danger';
|
||||
$login_display_mode = 'display: block;';
|
||||
$login_display_mode = 'display:block;';
|
||||
$login_headline = lang('Login_Toggle_Alert_headline');
|
||||
$login_icon = 'fa-ban';
|
||||
}
|
||||
else
|
||||
{
|
||||
$login_mode = 'info';
|
||||
$login_display_mode = 'display: none;';
|
||||
$login_headline = lang('Login_Toggle_Info_headline');
|
||||
$login_icon = 'fa-info';
|
||||
}
|
||||
}
|
||||
|
||||
// ##################################################
|
||||
// ## Login Processing end
|
||||
// ##################################################
|
||||
?>
|
||||
|
||||
<!DOCTYPE html>
|
||||
@@ -109,27 +182,21 @@ if (isset ($_SESSION["login"]) == FALSE || $_SESSION["login"] != 1)
|
||||
<!-- /.login-logo -->
|
||||
<div class="login-box-body">
|
||||
<p class="login-box-msg"><?= lang('Login_Box');?></p>
|
||||
<form action="index.php" method="post">
|
||||
<form action="index.php<?php
|
||||
echo !empty($_GET['next'])
|
||||
? '?next=' . htmlspecialchars($_GET['next'], ENT_QUOTES, 'UTF-8')
|
||||
: '';
|
||||
?>" method="post">
|
||||
<div class="form-group has-feedback">
|
||||
<input type="hidden" name="url_hash" id="url_hash">
|
||||
<input type="password" class="form-control" placeholder="<?= lang('Login_Psw-box');?>" name="loginpassword">
|
||||
<span class="glyphicon glyphicon-lock form-control-feedback"></span>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-xs-8">
|
||||
<div class="checkbox icheck">
|
||||
<label>
|
||||
<input type="checkbox" name="PWRemember">
|
||||
<div style="margin-left: 10px; display: inline-block; vertical-align: top;">
|
||||
<?= lang('Login_Remember');?><br><span style="font-size: smaller"><?= lang('Login_Remember_small');?></span>
|
||||
</div>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /.col -->
|
||||
<div class="col-xs-4" style="padding-top: 10px;">
|
||||
<div class="col-xs-12">
|
||||
<button type="submit" class="btn btn-primary btn-block btn-flat"><?= lang('Login_Submit');?></button>
|
||||
</div>
|
||||
<!-- /.col -->
|
||||
<!-- /.col -->
|
||||
</div>
|
||||
</form>
|
||||
|
||||
@@ -159,6 +226,9 @@ if (isset ($_SESSION["login"]) == FALSE || $_SESSION["login"] != 1)
|
||||
<!-- iCheck -->
|
||||
<script src="lib/iCheck/icheck.min.js"></script>
|
||||
<script>
|
||||
if (window.location.hash) {
|
||||
document.getElementById('url_hash').value = window.location.hash;
|
||||
}
|
||||
$(function () {
|
||||
$('input').iCheck({
|
||||
checkboxClass: 'icheckbox_square-blue',
|
||||
@@ -174,7 +244,7 @@ function Passwordhinfo() {
|
||||
} else {
|
||||
x.style.display = "none";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
</body>
|
||||
|
||||
23
front/lib/treeviz/treeviz.iife.js
Normal file
23
front/lib/treeviz/treeviz.iife.js
Normal file
File diff suppressed because one or more lines are too long
0
front/lib/treeviz/bundle.js → front/lib/treeviz/treeviz.iife.old.js
Executable file → Normal file
0
front/lib/treeviz/bundle.js → front/lib/treeviz/treeviz.iife.old.js
Executable file → Normal file
@@ -69,7 +69,8 @@
|
||||
require 'php/templates/footer.php';
|
||||
?>
|
||||
|
||||
<script src="lib/treeviz/bundle.js"></script>
|
||||
<!-- <script src="lib/treeviz/bundle.js"></script> -->
|
||||
<script src="lib/treeviz/treeviz.iife.js"></script>
|
||||
|
||||
<script defer>
|
||||
|
||||
@@ -388,7 +389,7 @@
|
||||
const normalized_mac = node_mac.toLowerCase();
|
||||
|
||||
const sql = `
|
||||
SELECT devName, devMac, devLastIP, devVendor, devPresentLastScan, devAlertDown, devParentPort,
|
||||
SELECT devName, devMac, devLastIP, devVendor, devPresentLastScan, devAlertDown, devParentPort, devVlan,
|
||||
CASE
|
||||
WHEN devIsNew = 1 THEN 'New'
|
||||
WHEN devPresentLastScan = 1 THEN 'On-line'
|
||||
@@ -569,10 +570,10 @@ function getChildren(node, list, path, visited = [])
|
||||
// Loop through all items to find children of the current node
|
||||
for (var i in list) {
|
||||
const item = list[i];
|
||||
const parentMac = item.devParentMAC || ""; // null-safe
|
||||
const nodeMac = node.devMac || ""; // null-safe
|
||||
const parentMac = item.devParentMAC?.toLowerCase() || ""; // null-safe
|
||||
const nodeMac = node.devMac?.toLowerCase() || ""; // null-safe
|
||||
|
||||
if (parentMac != "" && parentMac.toLowerCase() == nodeMac.toLowerCase() && !hiddenMacs.includes(parentMac)) {
|
||||
if (parentMac != "" && parentMac == nodeMac && !hiddenMacs.includes(parentMac)) {
|
||||
|
||||
visibleNodesCount++;
|
||||
|
||||
@@ -588,6 +589,8 @@ function getChildren(node, list, path, visited = [])
|
||||
parentNodesCount++;
|
||||
}
|
||||
|
||||
// console.log(node);
|
||||
|
||||
return {
|
||||
name: node.devName,
|
||||
path: path,
|
||||
@@ -607,6 +610,8 @@ function getChildren(node, list, path, visited = [])
|
||||
alertDown: node.devAlertDown,
|
||||
hasChildren: children.length > 0 || hiddenMacs.includes(node.devMac),
|
||||
relType: node.devParentRelType,
|
||||
devVlan: node.devVlan,
|
||||
devSSID: node.devSSID,
|
||||
hiddenChildren: hiddenMacs.includes(node.devMac),
|
||||
qty: children.length,
|
||||
children: children
|
||||
@@ -649,6 +654,8 @@ function toggleSubTree(parentMac, treePath)
|
||||
{
|
||||
treePath = treePath.split('|')
|
||||
|
||||
parentMac = parentMac.toLowerCase()
|
||||
|
||||
if(!hiddenMacs.includes(parentMac))
|
||||
{
|
||||
hiddenMacs.push(parentMac)
|
||||
@@ -883,6 +890,22 @@ function initTree(myHierarchy)
|
||||
idKey: "mac",
|
||||
hasFlatData: false,
|
||||
relationnalField: "children",
|
||||
linkLabel: {
|
||||
render: (parent, child) => {
|
||||
// Return text or HTML to display on the connection line
|
||||
connectionLabel = (child?.data.devVlan ?? "") + "/" + (child?.data.devSSID ?? "");
|
||||
if(connectionLabel == "/")
|
||||
{
|
||||
connectionLabel = "";
|
||||
}
|
||||
|
||||
return connectionLabel;
|
||||
// or with HTML:
|
||||
// return "<tspan><strong>reports to</strong></tspan>";
|
||||
},
|
||||
color: "#336c87ff", // Label text color (optional)
|
||||
fontSize: nodeHeightPx - 5 // Label font size in px (optional)
|
||||
},
|
||||
linkWidth: (nodeData) => 2,
|
||||
linkColor: (nodeData) => {
|
||||
relConf = getRelationshipConf(nodeData.data.relType)
|
||||
|
||||
@@ -27,7 +27,7 @@
|
||||
"AppEvents_ObjectType": "Тип объекта",
|
||||
"AppEvents_Plugin": "Плагин",
|
||||
"AppEvents_Type": "Тип",
|
||||
"BACKEND_API_URL_description": "Используется для создания URL-адресов серверного API. Укажите, используете ли вы обратный прокси-сервер для сопоставления с вашим <code>GRAPHQL_PORT</code>. Введите полный URL-адрес, начинающийся с <code>http://</code>, включая номер порта (без косой черты <code>/</code>).",
|
||||
"BACKEND_API_URL_description": "Используется для обеспечения связи между фронтендом и бэкендом. По умолчанию это значение установлено на <code>/server</code> и, как правило, не должно изменяться.",
|
||||
"BACKEND_API_URL_name": "URL-адрес серверного API",
|
||||
"BackDevDetail_Actions_Ask_Run": "Вы хотите выполнить действие?",
|
||||
"BackDevDetail_Actions_Not_Registered": "Действие не зарегистрировано:· ",
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
<?php
|
||||
|
||||
// Start session if not already started
|
||||
if (session_status() == PHP_SESSION_NONE) {
|
||||
session_start();
|
||||
}
|
||||
|
||||
// Constants
|
||||
$configFolderPath = rtrim(getenv('NETALERTX_CONFIG') ?: '/data/config', '/');
|
||||
$legacyConfigPath = $_SERVER['DOCUMENT_ROOT'] . "/../config/app.conf";
|
||||
@@ -45,10 +50,6 @@ $isLogonPage = ($parsedUrl === '/' || $parsedUrl === '/index.php');
|
||||
$authHeader = apache_request_headers()['Authorization'] ?? '';
|
||||
$sessionLogin = isset($_SESSION['login']) ? $_SESSION['login'] : 0;
|
||||
|
||||
// Start session if not already started
|
||||
if (session_status() == PHP_SESSION_NONE) {
|
||||
session_start();
|
||||
}
|
||||
|
||||
// Handle logout
|
||||
if (!empty($_REQUEST['action']) && $_REQUEST['action'] == 'logout') {
|
||||
@@ -86,7 +87,8 @@ if ($nax_WebProtection == 'true') {
|
||||
// Logged in or stay on this page if we are on the index.php already
|
||||
} else {
|
||||
// We need to redirect
|
||||
redirect('/index.php');
|
||||
$returnUrl = rawurlencode(base64_encode($_SERVER['REQUEST_URI']));
|
||||
redirect("/index.php?next=" . $returnUrl);
|
||||
exit; // exit is needed to prevent authentication bypass
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1516,18 +1516,30 @@
|
||||
},
|
||||
{
|
||||
"function": "devSSID",
|
||||
"events": [
|
||||
"add_option"
|
||||
],
|
||||
"type": {
|
||||
"dataType": "string",
|
||||
"elements": [
|
||||
{
|
||||
"elementType": "input",
|
||||
"elementType": "select",
|
||||
"elementOptions": [],
|
||||
"transformers": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"default_value": "",
|
||||
"options": [],
|
||||
"options": [
|
||||
"{value}"
|
||||
],
|
||||
"options_params": [
|
||||
{
|
||||
"name": "value",
|
||||
"type": "sql",
|
||||
"value": "SELECT DISTINCT '' as id, '❌None' as name UNION SELECT devSSID as id, devSSID as name FROM (SELECT devSSID FROM Devices) AS all_devices ORDER BY id;"
|
||||
}
|
||||
],
|
||||
"localized": [
|
||||
"name",
|
||||
"description"
|
||||
@@ -1590,11 +1602,14 @@
|
||||
},
|
||||
{
|
||||
"function": "devVlan",
|
||||
"events": [
|
||||
"add_option"
|
||||
],
|
||||
"type": {
|
||||
"dataType": "string",
|
||||
"elements": [
|
||||
{
|
||||
"elementType": "input",
|
||||
"elementType": "select",
|
||||
"elementOptions": [],
|
||||
"transformers": []
|
||||
}
|
||||
@@ -1602,7 +1617,16 @@
|
||||
},
|
||||
"maxLength": 50,
|
||||
"default_value": "",
|
||||
"options": [],
|
||||
"options": [
|
||||
"{value}"
|
||||
],
|
||||
"options_params": [
|
||||
{
|
||||
"name": "value",
|
||||
"type": "sql",
|
||||
"value": "SELECT DISTINCT '' as id, '❌None' as name UNION SELECT devVlan as id, devVlan as name FROM (SELECT devVlan FROM Devices) AS all_devices ORDER BY id;"
|
||||
}
|
||||
],
|
||||
"localized": [
|
||||
"name",
|
||||
"description"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
## Overview
|
||||
|
||||
A plugin allowing for importing devices from a UniFi controller. The plugin also tries to import the network map.
|
||||
A plugin allowing for importing devices from a UniFi controller. The plugin also tries to import the network map.
|
||||
|
||||
### Usage
|
||||
|
||||
@@ -9,19 +9,19 @@ Specify the following settings in the Settings section of NetAlertX:
|
||||
- `UNFIMP_username` - Username used to log in the UNIFI controller.
|
||||
- `UNFIMP_password` - Password used to log in the UNIFI controller.
|
||||
- `UNFIMP_host` - Host URL or IP address where the UNIFI controller is hosted (excluding `http://`)
|
||||
- `UNFIMP_sites` - Name of the sites (usually 'default', check the URL in your UniFi controller UI if unsure. The site id is in the following part of the URL: `https://192.168.1.1:8443/manage/site/this-is-the-site-id/settings/`).
|
||||
- `UNFIMP_sites` - Name of the sites (usually 'default', check the URL in your UniFi controller UI if unsure. The site id is in the following part of the URL: `https://192.168.1.1:8443/manage/site/this-is-the-site-id/settings/`).
|
||||
- `UNFIMP_protocol` - https:// or http://
|
||||
- `UNFIMP_port` - Usually `8443`, `8843`, or `443`
|
||||
- `UNFIMP_port` - Usually `8443`, `8843`, or `443` (UPSTREAM BUG: Setting `UNFIMP_version='UDMP-unifiOS'` will force `443` see [#1524](https://github.com/netalertx/NetAlertX/issues/1524) or switch to the `UNIFIAPI` plugin)
|
||||
- `UNFIMP_version` - see below table for details
|
||||
|
||||
|
||||
#### Config overview
|
||||
|
||||
| Controller | `UNFIMP_version` | `UNFIMP_port` |
|
||||
| ------------------------------------------------------ | ------------------------- | ---------------- |
|
||||
| Cloud Gateway Ultra / UCK cloudkey V2 plus (v4.0.18) | `UDMP-unifiOS` | `443` |
|
||||
| Docker hosted | `v5` | `8443` (usually) |
|
||||
| Controller | `UNFIMP_version` | `UNFIMP_port` |
|
||||
| ------------------------------------------------------ | ------------------------- | ------------------------------------ |
|
||||
| Cloud Gateway Ultra / UCK cloudkey V2 plus (v4.0.18) | `UDMP-unifiOS` | `443` (BUG: always forced) |
|
||||
| Docker hosted | `v5` | `8443` (usually) |
|
||||
|
||||
### Notes
|
||||
|
||||
- It is recommended to create a read-only user in your UniFi controller
|
||||
- It is recommended to create a read-only user in your UniFi controller
|
||||
@@ -25,7 +25,7 @@ fi
|
||||
apt-get install -y \
|
||||
tini snmp ca-certificates curl libwww-perl arp-scan perl apt-utils cron sudo gettext-base \
|
||||
nginx-light php php-cgi php-fpm php-sqlite3 php-curl sqlite3 dnsutils net-tools \
|
||||
python3 python3-dev iproute2 nmap fping python3-pip zip usbutils traceroute nbtscan avahi-daemon avahi-utils openrc build-essential git
|
||||
python3 python3-dev python3-psutil iproute2 nmap fping python3-pip zip usbutils traceroute nbtscan avahi-daemon avahi-utils openrc build-essential git
|
||||
|
||||
# alternate dependencies
|
||||
sudo apt-get install nginx nginx-core mtr php-fpm php8.2-fpm php-cli php8.2 php8.2-sqlite3 -y
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
# 36-override-loaded-plugins.sh - Applies environment variable overrides to app.conf
|
||||
|
||||
set -eu
|
||||
|
||||
# Ensure config exists
|
||||
if [ ! -f "${NETALERTX_CONFIG}/app.conf" ]; then
|
||||
echo "[ENV] No config file found at ${NETALERTX_CONFIG}/app.conf — skipping overrides"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Helper: set or append config key safely
|
||||
set_config_value() {
|
||||
_key="$1"
|
||||
_value="$2"
|
||||
|
||||
# Remove newlines just in case
|
||||
_value=$(printf '%s' "$_value" | tr -d '\n\r')
|
||||
|
||||
# Escape sed-sensitive chars
|
||||
_escaped=$(printf '%s\n' "$_value" | sed 's/[\/&]/\\&/g')
|
||||
|
||||
if grep -q "^${_key}=" "${NETALERTX_CONFIG}/app.conf"; then
|
||||
sed -i "s|^${_key}=.*|${_key}=${_escaped}|" "${NETALERTX_CONFIG}/app.conf"
|
||||
else
|
||||
echo "${_key}=${_value}" >> "${NETALERTX_CONFIG}/app.conf"
|
||||
fi
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# LOADED_PLUGINS override
|
||||
# ------------------------------------------------------------
|
||||
if [ -n "${LOADED_PLUGINS:-}" ]; then
|
||||
echo "[ENV] Applying LOADED_PLUGINS override"
|
||||
set_config_value "LOADED_PLUGINS" "$LOADED_PLUGINS"
|
||||
fi
|
||||
@@ -156,7 +156,7 @@ fi
|
||||
apt-get install -y --no-install-recommends \
|
||||
tini snmp ca-certificates curl libwww-perl arp-scan perl apt-utils cron sudo \
|
||||
php8.4 php8.4-cgi php8.4-fpm php8.4-sqlite3 php8.4-curl sqlite3 dnsutils net-tools mtr \
|
||||
python3 python3-dev iproute2 nmap fping python3-pip zip usbutils traceroute nbtscan \
|
||||
python3 python3-dev python3-psutil iproute2 nmap fping python3-pip zip usbutils traceroute nbtscan \
|
||||
avahi-daemon avahi-utils build-essential git gnupg2 lsb-release \
|
||||
debian-archive-keyring python3-venv
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ aiohttp
|
||||
graphene
|
||||
flask
|
||||
flask-cors
|
||||
unifi-sm-api
|
||||
unifi-sm-api>=0.2.3
|
||||
tplink-omada-client
|
||||
wakeonlan
|
||||
pycryptodome
|
||||
@@ -22,5 +22,6 @@ python-nmap
|
||||
dnspython
|
||||
librouteros
|
||||
yattag
|
||||
zeroconf
|
||||
zeroconf
|
||||
psutil
|
||||
git+https://github.com/foreign-sub/aiofreepybox.git
|
||||
|
||||
@@ -58,7 +58,7 @@ apt-get install -y --no-install-recommends \
|
||||
git \
|
||||
tini ca-certificates curl libwww-perl perl apt-utils cron build-essential \
|
||||
sqlite3 net-tools \
|
||||
python3 python3-venv python3-dev python3-pip
|
||||
python3 python3-venv python3-dev python3-psutil python3-pip
|
||||
|
||||
# Install plugin dependencies
|
||||
apt-get install -y --no-install-recommends \
|
||||
|
||||
@@ -4,7 +4,7 @@ aiohttp
|
||||
graphene
|
||||
flask
|
||||
flask-cors
|
||||
unifi-sm-api
|
||||
unifi-sm-api>=0.2.3
|
||||
tplink-omada-client
|
||||
wakeonlan
|
||||
pycryptodome
|
||||
@@ -22,5 +22,6 @@ python-nmap
|
||||
dnspython
|
||||
librouteros
|
||||
yattag
|
||||
zeroconf
|
||||
zeroconf
|
||||
psutil
|
||||
git+https://github.com/foreign-sub/aiofreepybox.git
|
||||
|
||||
@@ -5,7 +5,7 @@ aiohttp
|
||||
graphene
|
||||
flask
|
||||
flask-cors
|
||||
unifi-sm-api
|
||||
unifi-sm-api>=0.2.3
|
||||
tplink-omada-client
|
||||
wakeonlan
|
||||
pycryptodome
|
||||
@@ -32,4 +32,5 @@ httplib2
|
||||
gunicorn
|
||||
git+https://github.com/foreign-sub/aiofreepybox.git
|
||||
mcp
|
||||
psutil
|
||||
pydantic>=2.0,<3.0
|
||||
|
||||
@@ -75,7 +75,7 @@ from .openapi.schemas import ( # noqa: E402 [flake8 lint suppression]
|
||||
BaseResponse, DeviceTotalsResponse,
|
||||
DeviceTotalsNamedResponse,
|
||||
EventsTotalsNamedResponse,
|
||||
DeleteDevicesRequest, DeviceImportRequest,
|
||||
DeleteDevicesRequest,
|
||||
DeviceImportResponse, UpdateDeviceColumnRequest,
|
||||
LockDeviceFieldRequest, UnlockDeviceFieldsRequest,
|
||||
CopyDeviceRequest, TriggerScanRequest,
|
||||
@@ -94,7 +94,7 @@ from .openapi.schemas import ( # noqa: E402 [flake8 lint suppression]
|
||||
DbQueryRequest, DbQueryResponse,
|
||||
DbQueryUpdateRequest, DbQueryDeleteRequest,
|
||||
AddToQueueRequest, GetSettingResponse,
|
||||
RecentEventsRequest, SetDeviceAliasRequest
|
||||
RecentEventsRequest, SetDeviceAliasRequest,
|
||||
)
|
||||
|
||||
from .sse_endpoint import ( # noqa: E402 [flake8 lint suppression]
|
||||
@@ -728,7 +728,7 @@ def api_export_devices(format=None, payload=None):
|
||||
operation_id="import_devices",
|
||||
summary="Import Devices",
|
||||
description="Import devices from CSV or JSON content.",
|
||||
request_model=DeviceImportRequest,
|
||||
request_model=None,
|
||||
response_model=DeviceImportResponse,
|
||||
tags=["devices"],
|
||||
auth_callable=is_authorized,
|
||||
@@ -1933,6 +1933,9 @@ def check_auth(payload=None):
|
||||
return jsonify({"success": True, "message": "Authentication check successful"}), 200
|
||||
|
||||
|
||||
# Remember Me is now implemented via cookies only (no API endpoints required)
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Health endpoint
|
||||
# --------------------------
|
||||
|
||||
@@ -54,6 +54,7 @@ def get_mem_usage_pct():
|
||||
mylog("verbose", [f"[health] Error calculating memory usage: {e}"])
|
||||
return None
|
||||
|
||||
|
||||
def get_load_avg_1m():
|
||||
"""
|
||||
Get 1-minute load average.
|
||||
@@ -90,6 +91,7 @@ def get_storage_pct():
|
||||
mylog("verbose", [f"[health] Error calculating storage usage: {e}"])
|
||||
return None
|
||||
|
||||
|
||||
def get_cpu_temp():
|
||||
"""
|
||||
Get CPU temperature from hardware sensors if available.
|
||||
@@ -117,6 +119,42 @@ def get_cpu_temp():
|
||||
return None
|
||||
|
||||
|
||||
def get_mem_mb():
|
||||
"""
|
||||
Get total system memory in MB.
|
||||
|
||||
Returns:
|
||||
int: Total memory in MB, or None on error.
|
||||
"""
|
||||
try:
|
||||
vm = psutil.virtual_memory()
|
||||
total_mb = int(vm.total / (1024 * 1024))
|
||||
return total_mb
|
||||
|
||||
except Exception as e:
|
||||
mylog("verbose", [f"[health] Error getting memory size: {e}"])
|
||||
return None
|
||||
|
||||
|
||||
def get_storage_gb():
|
||||
"""
|
||||
Get total storage size of /data in GB.
|
||||
|
||||
Returns:
|
||||
float: Total storage in GB, or None on error.
|
||||
"""
|
||||
try:
|
||||
stat = os.statvfs(dataPath)
|
||||
total = stat.f_blocks * stat.f_frsize
|
||||
|
||||
gb = round(total / (1024 ** 3), 2)
|
||||
return gb
|
||||
|
||||
except Exception as e:
|
||||
mylog("verbose", [f"[health] Error getting storage size: {e}"])
|
||||
return None
|
||||
|
||||
|
||||
# ===============================================================================
|
||||
# Aggregator
|
||||
# ===============================================================================
|
||||
@@ -134,4 +172,6 @@ def get_health_status():
|
||||
"load_1m": get_load_avg_1m(),
|
||||
"storage_pct": get_storage_pct(),
|
||||
"cpu_temp": get_cpu_temp(),
|
||||
"storage_gb": get_storage_gb(),
|
||||
"mem_mb": get_mem_mb(),
|
||||
}
|
||||
|
||||
@@ -667,7 +667,9 @@ class HealthCheckResponse(BaseResponse):
|
||||
"mem_usage_pct": 65,
|
||||
"load_1m": 2.15,
|
||||
"storage_pct": 42,
|
||||
"cpu_temp": 58
|
||||
"cpu_temp": 58,
|
||||
"storage_gb": 8,
|
||||
"mem_mb" : 8192
|
||||
}]
|
||||
}
|
||||
)
|
||||
@@ -677,6 +679,8 @@ class HealthCheckResponse(BaseResponse):
|
||||
load_1m: float = Field(..., description="1-minute load average")
|
||||
storage_pct: Optional[int] = Field(None, ge=0, le=100, description="Disk usage percentage of /data mount (0-100, nullable if unavailable)")
|
||||
cpu_temp: Optional[int] = Field(None, description="CPU temperature in Celsius (nullable if unavailable)")
|
||||
storage_gb: Optional[int] = Field(..., description="Storage size in GB")
|
||||
mem_mb: Optional[int] = Field(..., description="Installed memory size in MB")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
@@ -1030,8 +1034,6 @@ class GetSettingResponse(BaseResponse):
|
||||
# =============================================================================
|
||||
# GRAPHQL SCHEMAS
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class GraphQLRequest(BaseModel):
|
||||
"""Request payload for GraphQL queries."""
|
||||
query: str = Field(..., description="GraphQL query string", json_schema_extra={"examples": ["{ devices { devMac devName } }"]})
|
||||
|
||||
@@ -29,6 +29,7 @@ def get_device_conditions():
|
||||
"unknown": f"WHERE devIsArchived=0 AND devName in ({NULL_EQUIVALENTS_SQL})",
|
||||
"known": f"WHERE devIsArchived=0 AND devName not in ({NULL_EQUIVALENTS_SQL})",
|
||||
"favorites_offline": "WHERE devIsArchived=0 AND devFavorite=1 AND devPresentLastScan=0",
|
||||
"new_online": "WHERE devIsArchived=0 AND devIsNew=1 AND devPresentLastScan=0",
|
||||
}
|
||||
|
||||
return conditions
|
||||
|
||||
@@ -378,8 +378,8 @@ def ensure_Parameters(sql) -> bool:
|
||||
|
||||
sql.execute("""
|
||||
CREATE TABLE "Parameters" (
|
||||
"par_ID" TEXT PRIMARY KEY,
|
||||
"par_Value" TEXT
|
||||
"parID" TEXT PRIMARY KEY,
|
||||
"parValue" TEXT
|
||||
);
|
||||
""")
|
||||
|
||||
|
||||
@@ -22,6 +22,10 @@ CREATE TABLE Devices (
|
||||
devFirstConnection DATETIME NOT NULL,
|
||||
devLastConnection DATETIME NOT NULL,
|
||||
devLastIP STRING (50) NOT NULL COLLATE NOCASE,
|
||||
devPrimaryIPv4 TEXT,
|
||||
devPrimaryIPv6 TEXT,
|
||||
devVlan TEXT,
|
||||
devForceStatus TEXT,
|
||||
devStaticIP BOOLEAN DEFAULT (0) NOT NULL CHECK (devStaticIP IN (0, 1)),
|
||||
devScan INTEGER DEFAULT (1) NOT NULL,
|
||||
devLogEvents BOOLEAN NOT NULL DEFAULT (1) CHECK (devLogEvents IN (0, 1)),
|
||||
@@ -42,7 +46,17 @@ CREATE TABLE Devices (
|
||||
devSSID TEXT,
|
||||
devSyncHubNode TEXT,
|
||||
devSourcePlugin TEXT,
|
||||
devFQDN TEXT,
|
||||
devFQDN TEXT,
|
||||
devMacSource TEXT,
|
||||
devNameSource TEXT,
|
||||
devFQDNSource TEXT,
|
||||
devLastIPSource TEXT,
|
||||
devVendorSource TEXT,
|
||||
devSSIDSource TEXT,
|
||||
devParentMACSource TEXT,
|
||||
devParentPortSource TEXT,
|
||||
devParentRelTypeSource TEXT,
|
||||
devVlanSource TEXT,
|
||||
"devCustomProps" TEXT);
|
||||
CREATE TABLE IF NOT EXISTS "Settings" (
|
||||
"setKey" TEXT,
|
||||
@@ -56,8 +70,8 @@ CREATE TABLE IF NOT EXISTS "Settings" (
|
||||
"setOverriddenByEnv" INTEGER
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS "Parameters" (
|
||||
"par_ID" TEXT PRIMARY KEY,
|
||||
"par_Value" TEXT
|
||||
"parID" TEXT PRIMARY KEY,
|
||||
"parValue" TEXT
|
||||
);
|
||||
CREATE TABLE Plugins_Objects(
|
||||
"Index" INTEGER,
|
||||
@@ -145,6 +159,7 @@ CREATE TABLE CurrentScan (
|
||||
scanSyncHubNode STRING(50),
|
||||
scanSite STRING(250),
|
||||
scanSSID STRING(250),
|
||||
scanVlan STRING(250),
|
||||
scanParentMAC STRING(250),
|
||||
scanParentPort STRING(250),
|
||||
scanType STRING(250),
|
||||
@@ -203,6 +218,13 @@ CREATE INDEX IDX_dev_Favorite ON Devices (devFavorite);
|
||||
CREATE INDEX IDX_dev_LastIP ON Devices (devLastIP);
|
||||
CREATE INDEX IDX_dev_NewDevice ON Devices (devIsNew);
|
||||
CREATE INDEX IDX_dev_Archived ON Devices (devIsArchived);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_events_unique
|
||||
ON Events (
|
||||
eve_MAC,
|
||||
eve_IP,
|
||||
eve_EventType,
|
||||
eve_DateTime
|
||||
);
|
||||
CREATE VIEW Events_Devices AS
|
||||
SELECT *
|
||||
FROM Events
|
||||
@@ -408,4 +430,4 @@ CREATE TRIGGER "trg_delete_devices"
|
||||
'DEVICES', -- ObjectForeignKey
|
||||
'delete'
|
||||
);
|
||||
END;
|
||||
END;
|
||||
|
||||
@@ -100,7 +100,7 @@ def apply_timezone(data, fields):
|
||||
# ===============================================================================
|
||||
def get_notifications(db):
|
||||
"""
|
||||
Fetch notifications for all configured sections, applying timezone conversions.
|
||||
Fetch notifications for all configured sections.
|
||||
|
||||
Args:
|
||||
db: Database object with `.sql` for executing queries.
|
||||
@@ -126,10 +126,38 @@ def get_notifications(db):
|
||||
AND eve_MAC IN (SELECT devMac FROM Devices WHERE devAlertDown = 0)
|
||||
""")
|
||||
|
||||
sections = get_setting_value("NTFPRCS_INCLUDED_SECTIONS")
|
||||
sections = get_setting_value("NTFPRCS_INCLUDED_SECTIONS") or []
|
||||
mylog("verbose", ["[Notification] Included sections: ", sections])
|
||||
|
||||
# Define SQL templates per section
|
||||
# -------------------------
|
||||
# Helper: condition mapping
|
||||
# -------------------------
|
||||
def get_section_condition(section):
|
||||
"""
|
||||
Resolve condition setting key with backward compatibility.
|
||||
"""
|
||||
# New format
|
||||
key = f"NTFPRCS_{section}_condition"
|
||||
value = get_setting_value(key)
|
||||
|
||||
if value:
|
||||
return value
|
||||
|
||||
# Legacy keys
|
||||
legacy_map = {
|
||||
"new_devices": "NTFPRCS_new_dev_condition",
|
||||
"events": "NTFPRCS_event_condition",
|
||||
}
|
||||
|
||||
legacy_key = legacy_map.get(section)
|
||||
if legacy_key:
|
||||
return get_setting_value(legacy_key)
|
||||
|
||||
return ""
|
||||
|
||||
# -------------------------
|
||||
# SQL templates
|
||||
# -------------------------
|
||||
sql_templates = {
|
||||
"new_devices": """
|
||||
SELECT
|
||||
@@ -140,10 +168,11 @@ def get_notifications(db):
|
||||
devName as "Device name",
|
||||
devComments as Comments
|
||||
FROM Events_Devices
|
||||
WHERE eve_PendingAlertEmail = 1 AND eve_EventType = 'New Device' {condition}
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND eve_EventType = 'New Device' {condition}
|
||||
ORDER BY eve_DateTime
|
||||
""",
|
||||
"down_devices": f"""
|
||||
"down_devices": """
|
||||
SELECT
|
||||
devName,
|
||||
eve_MAC,
|
||||
@@ -154,7 +183,7 @@ def get_notifications(db):
|
||||
FROM Events_Devices AS down_events
|
||||
WHERE eve_PendingAlertEmail = 1
|
||||
AND down_events.eve_EventType = 'Device Down'
|
||||
AND eve_DateTime < datetime('now', '-{int(get_setting_value("NTFPRCS_alert_down_time") or 0)} minutes')
|
||||
AND eve_DateTime < datetime('now', '-0 minutes')
|
||||
AND NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM Events AS connected_events
|
||||
@@ -214,43 +243,72 @@ def get_notifications(db):
|
||||
"plugins": "🔌 Plugins"
|
||||
}
|
||||
|
||||
final_json = {}
|
||||
# Sections that support dynamic conditions
|
||||
sections_with_conditions = {"new_devices", "events"}
|
||||
|
||||
# Pre-initialize final_json with all expected keys
|
||||
# Initialize final structure
|
||||
final_json = {}
|
||||
for section in ["new_devices", "down_devices", "down_reconnected", "events", "plugins"]:
|
||||
final_json[section] = []
|
||||
final_json[f"{section}_meta"] = {"title": section_titles.get(section, section), "columnNames": []}
|
||||
final_json[f"{section}_meta"] = {
|
||||
"title": section_titles.get(section, section),
|
||||
"columnNames": []
|
||||
}
|
||||
|
||||
condition_builder = create_safe_condition_builder()
|
||||
|
||||
# -------------------------
|
||||
# Main loop
|
||||
# -------------------------
|
||||
condition_builder = create_safe_condition_builder()
|
||||
|
||||
SECTION_CONDITION_MAP = {
|
||||
"new_devices": "NTFPRCS_new_dev_condition",
|
||||
"events": "NTFPRCS_event_condition",
|
||||
}
|
||||
|
||||
sections_with_conditions = set(SECTION_CONDITION_MAP.keys())
|
||||
|
||||
# Loop through each included section
|
||||
for section in sections:
|
||||
template = sql_templates.get(section)
|
||||
|
||||
if not template:
|
||||
mylog("verbose", ["[Notification] Unknown section: ", section])
|
||||
continue
|
||||
|
||||
safe_condition = ""
|
||||
parameters = {}
|
||||
|
||||
try:
|
||||
# Build safe condition for sections that support it
|
||||
condition_builder = create_safe_condition_builder()
|
||||
condition_setting = get_setting_value(f"NTFPRCS_{section}_condition")
|
||||
safe_condition, parameters = condition_builder.get_safe_condition_legacy(condition_setting)
|
||||
sqlQuery = sql_templates.get(section, "").format(condition=safe_condition)
|
||||
except Exception:
|
||||
# Fallback if safe condition fails
|
||||
sqlQuery = sql_templates.get(section, "").format(condition="")
|
||||
if section in sections_with_conditions:
|
||||
condition_key = SECTION_CONDITION_MAP.get(section)
|
||||
condition_setting = get_setting_value(condition_key)
|
||||
|
||||
if condition_setting:
|
||||
safe_condition, parameters = condition_builder.get_safe_condition_legacy(
|
||||
condition_setting
|
||||
)
|
||||
|
||||
sqlQuery = template.format(condition=safe_condition)
|
||||
|
||||
except Exception as e:
|
||||
mylog("verbose", [f"[Notification] Error building condition for {section}: ", e])
|
||||
sqlQuery = template.format(condition="")
|
||||
parameters = {}
|
||||
|
||||
mylog("debug", [f"[Notification] {section} SQL query: ", sqlQuery])
|
||||
mylog("debug", [f"[Notification] {section} parameters: ", parameters])
|
||||
|
||||
# Fetch data as JSON
|
||||
json_obj = db.get_table_as_json(sqlQuery, parameters)
|
||||
try:
|
||||
json_obj = db.get_table_as_json(sqlQuery, parameters)
|
||||
except Exception as e:
|
||||
mylog("minimal", [f"[Notification] DB error in section {section}: ", e])
|
||||
continue
|
||||
|
||||
mylog("debug", [f"[Notification] json_obj.json: {json.dumps(json_obj.json)}"])
|
||||
|
||||
# Apply timezone conversion
|
||||
json_obj.json["data"] = apply_timezone_to_json(json_obj, section=section)
|
||||
|
||||
# Save data and metadata
|
||||
final_json[section] = json_obj.json["data"]
|
||||
final_json[section] = json_obj.json.get("data", [])
|
||||
final_json[f"{section}_meta"] = {
|
||||
"title": section_titles.get(section, section),
|
||||
"columnNames": json_obj.columnNames
|
||||
"columnNames": getattr(json_obj, "columnNames", [])
|
||||
}
|
||||
|
||||
mylog("debug", [f"[Notification] final_json: {json.dumps(final_json)}"])
|
||||
|
||||
204
server/models/parameters_instance.py
Normal file
204
server/models/parameters_instance.py
Normal file
@@ -0,0 +1,204 @@
|
||||
"""
|
||||
Parameters Instance - Handles Parameters table operations for Remember Me tokens and other system parameters.
|
||||
|
||||
The Parameters table is used for temporary, ephemeral settings like Remember Me tokens.
|
||||
Structure:
|
||||
parID: TEXT PRIMARY KEY (e.g., "remember_me_token_{uuid}")
|
||||
parValue: TEXT (e.g., hashed token value)
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import sqlite3
|
||||
from database import get_temp_db_connection
|
||||
from logger import mylog
|
||||
|
||||
|
||||
class ParametersInstance:
|
||||
"""Handler for Parameters table operations."""
|
||||
|
||||
# --- helper methods (DRY pattern from DeviceInstance) ----------------------
|
||||
def _fetchall(self, query, params=()):
|
||||
"""Fetch all rows and return as list of dicts."""
|
||||
conn = get_temp_db_connection()
|
||||
rows = conn.execute(query, params).fetchall()
|
||||
conn.close()
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
def _fetchone(self, query, params=()):
|
||||
"""Fetch single row and return as dict or None."""
|
||||
conn = get_temp_db_connection()
|
||||
row = conn.execute(query, params).fetchone()
|
||||
conn.close()
|
||||
return dict(row) if row else None
|
||||
|
||||
def _execute(self, query, params=()):
|
||||
"""Execute write query (INSERT/UPDATE/DELETE)."""
|
||||
conn = get_temp_db_connection()
|
||||
cur = conn.cursor()
|
||||
cur.execute(query, params)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
# --- public API -----------------------------------------------------------
|
||||
|
||||
def get_parameter(self, par_id):
|
||||
"""
|
||||
Retrieve a parameter value by ID.
|
||||
|
||||
Args:
|
||||
par_id (str): The parameter ID to retrieve
|
||||
|
||||
Returns:
|
||||
str: The parameter value, or None if not found
|
||||
"""
|
||||
try:
|
||||
# Try with quoted column names in case they're reserved or have special chars
|
||||
row = self._fetchone(
|
||||
'SELECT "parValue" FROM "Parameters" WHERE "parID" = ?',
|
||||
(par_id,)
|
||||
)
|
||||
return row['parValue'] if row else None
|
||||
except Exception as e:
|
||||
mylog("verbose", [f"[ParametersInstance] Error retrieving parameter {par_id}: {e}"])
|
||||
return None
|
||||
|
||||
def set_parameter(self, par_id, par_value):
|
||||
"""
|
||||
Store or update a parameter (INSERT OR REPLACE).
|
||||
|
||||
Args:
|
||||
par_id (str): The parameter ID
|
||||
par_value (str): The parameter value
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
# Try with quoted column names in case they're reserved or have special chars
|
||||
self._execute(
|
||||
'INSERT OR REPLACE INTO "Parameters" ("parID", "parValue") VALUES (?, ?)',
|
||||
(par_id, par_value)
|
||||
)
|
||||
mylog("verbose", [f"[ParametersInstance] Parameter {par_id} stored successfully"])
|
||||
return True
|
||||
except Exception as e:
|
||||
mylog("verbose", [f"[ParametersInstance] Error storing parameter {par_id}: {e}"])
|
||||
return False
|
||||
|
||||
def delete_parameter(self, par_id):
|
||||
"""
|
||||
Delete a parameter by ID.
|
||||
|
||||
Args:
|
||||
par_id (str): The parameter ID to delete
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
# Try with quoted column names in case they're reserved or have special chars
|
||||
self._execute(
|
||||
'DELETE FROM "Parameters" WHERE "parID" = ?',
|
||||
(par_id,)
|
||||
)
|
||||
mylog("verbose", [f"[ParametersInstance] Parameter {par_id} deleted successfully"])
|
||||
return True
|
||||
except Exception as e:
|
||||
mylog("verbose", [f"[ParametersInstance] Error deleting parameter {par_id}: {e}"])
|
||||
return False
|
||||
|
||||
def delete_parameters_by_prefix(self, prefix):
|
||||
"""
|
||||
Delete all parameters matching a prefix pattern (for cleanup).
|
||||
|
||||
Args:
|
||||
prefix (str): The prefix pattern (e.g., "remember_me_token_")
|
||||
|
||||
Returns:
|
||||
int: Number of parameters deleted
|
||||
"""
|
||||
try:
|
||||
conn = get_temp_db_connection()
|
||||
cur = conn.cursor()
|
||||
cur.execute('DELETE FROM "Parameters" WHERE "parID" LIKE ?', (f"{prefix}%",))
|
||||
deleted_count = cur.rowcount
|
||||
conn.commit()
|
||||
conn.close()
|
||||
mylog("verbose", [f"[ParametersInstance] Deleted {deleted_count} parameters with prefix '{prefix}'"])
|
||||
return deleted_count
|
||||
except Exception as e:
|
||||
mylog("verbose", [f"[ParametersInstance] Error deleting parameters with prefix '{prefix}': {e}"])
|
||||
return 0
|
||||
|
||||
def validate_token(self, token):
|
||||
"""
|
||||
Validate a Remember Me token against stored hash.
|
||||
|
||||
Security: Compares hash(token) against stored hashes using hash_equals (timing-safe).
|
||||
|
||||
Args:
|
||||
token (str): The unhashed token (from cookie)
|
||||
|
||||
Returns:
|
||||
dict: {
|
||||
'valid': bool,
|
||||
'par_id': str or None # The matching parameter ID if valid
|
||||
}
|
||||
|
||||
Note:
|
||||
Returns immediately on first match. Use hash_equals() to prevent timing attacks.
|
||||
"""
|
||||
if not token:
|
||||
return {'valid': False, 'par_id': None}
|
||||
|
||||
try:
|
||||
# Compute hash of provided token
|
||||
computed_hash = hashlib.sha256(token.encode('utf-8')).hexdigest()
|
||||
|
||||
# Retrieve all remember_me tokens from Parameters table
|
||||
remember_tokens = self._fetchall(
|
||||
'SELECT "parID", "parValue" FROM "Parameters" WHERE "parID" LIKE ?',
|
||||
("remember_me_token_%",)
|
||||
)
|
||||
|
||||
# Check each stored token using timing-safe comparison
|
||||
for token_record in remember_tokens:
|
||||
stored_hash = token_record['parValue']
|
||||
stored_id = token_record['parID']
|
||||
|
||||
# Use hash_equals() to prevent timing attacks
|
||||
if self._hash_equals(stored_hash, computed_hash):
|
||||
mylog("verbose", [f"[ParametersInstance] Token validation successful for {stored_id}"])
|
||||
return {'valid': True, 'par_id': stored_id}
|
||||
|
||||
mylog("verbose", ["[ParametersInstance] Token validation failed: no matching token found"])
|
||||
return {'valid': False, 'par_id': None}
|
||||
|
||||
except Exception as e:
|
||||
mylog("verbose", [f"[ParametersInstance] Error validating token: {e}"])
|
||||
return {'valid': False, 'par_id': None}
|
||||
|
||||
@staticmethod
|
||||
def _hash_equals(known_string, user_string):
|
||||
"""
|
||||
Timing-safe string comparison to prevent timing attacks.
|
||||
|
||||
Args:
|
||||
known_string (str): The known value (stored hash)
|
||||
user_string (str): The user-supplied value (computed hash)
|
||||
|
||||
Returns:
|
||||
bool: True if strings match, False otherwise
|
||||
"""
|
||||
if not isinstance(known_string, str) or not isinstance(user_string, str):
|
||||
return False
|
||||
|
||||
if len(known_string) != len(user_string):
|
||||
return False
|
||||
|
||||
# Compare all characters regardless of match (timing-safe)
|
||||
result = 0
|
||||
for x, y in zip(known_string, user_string):
|
||||
result |= ord(x) ^ ord(y)
|
||||
|
||||
return result == 0
|
||||
@@ -82,6 +82,8 @@ def test_health_response_structure(client, api_token):
|
||||
assert "load_1m" in data
|
||||
assert "storage_pct" in data
|
||||
assert "cpu_temp" in data
|
||||
assert "storage_gb" in data
|
||||
assert "mem_mb" in data
|
||||
|
||||
|
||||
def test_health_db_size_type(client, api_token):
|
||||
@@ -204,6 +206,8 @@ def test_health_multiple_calls_consistency(client, api_token):
|
||||
assert "load_1m" in data
|
||||
assert "storage_pct" in data
|
||||
assert "cpu_temp" in data
|
||||
assert "storage_gb" in data
|
||||
assert "mem_mb" in data
|
||||
|
||||
|
||||
# ========================================================================
|
||||
|
||||
244
test/ui/test_ui_login.py
Normal file
244
test/ui/test_ui_login.py
Normal file
@@ -0,0 +1,244 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Login Page UI Tests
|
||||
Tests login functionality and deep link support after login
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import time
|
||||
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.support.ui import WebDriverWait
|
||||
from selenium.webdriver.support import expected_conditions as EC
|
||||
|
||||
# Add test directory to path
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
|
||||
from .test_helpers import BASE_URL, wait_for_page_load, wait_for_element_by_css # noqa: E402
|
||||
|
||||
|
||||
def get_login_password():
|
||||
"""Get login password from config file or environment
|
||||
|
||||
Returns the plaintext password that should be used for login.
|
||||
For test/dev environments, tries common test passwords and defaults.
|
||||
Returns None if password cannot be determined (will skip test).
|
||||
"""
|
||||
# Try environment variable first (for testing)
|
||||
if os.getenv("LOGIN_PASSWORD"):
|
||||
return os.getenv("LOGIN_PASSWORD")
|
||||
|
||||
# SHA256 hash of "password" - the default test password (from index.php)
|
||||
DEFAULT_PASSWORD_HASH = '8d969eef6ecad3c29a3a629280e686cf0c3f5d5a86aff3ca12020c923adc6c92'
|
||||
|
||||
|
||||
# Try common config file locations
|
||||
config_paths = [
|
||||
"/data/config/app.conf",
|
||||
"/app/back/app.conf",
|
||||
os.path.expanduser("~/.netalertx/app.conf")
|
||||
]
|
||||
|
||||
for config_path in config_paths:
|
||||
try:
|
||||
if os.path.exists(config_path):
|
||||
print(f"📋 Reading config from: {config_path}")
|
||||
with open(config_path, 'r') as f:
|
||||
for line in f:
|
||||
# Only look for SETPWD_password lines (not other config like API keys)
|
||||
if 'SETPWD_password' in line and '=' in line:
|
||||
# Extract the value between quotes
|
||||
value = line.split('=', 1)[1].strip()
|
||||
# Remove quotes
|
||||
value = value.strip('"').strip("'")
|
||||
print(f"✓ Found password config: {value[:32]}...")
|
||||
|
||||
# If it's the default, use the default password
|
||||
if value == DEFAULT_PASSWORD_HASH:
|
||||
print(f" Using default password: '123456'")
|
||||
return "123456"
|
||||
# If it's plaintext and looks reasonable
|
||||
elif len(value) < 100 and not value.startswith('{') and value.isalnum():
|
||||
print(f" Using plaintext password: '{value}'")
|
||||
return value
|
||||
# For other hashes, can't determine plaintext
|
||||
break # Found SETPWD_password, stop looking
|
||||
except (FileNotFoundError, IOError, PermissionError) as e:
|
||||
print(f"⚠ Error reading {config_path}: {e}")
|
||||
continue
|
||||
|
||||
# If we couldn't determine the password from config, try default password
|
||||
print(f"ℹ Password not determinable from config, trying default passwords...")
|
||||
|
||||
# For now, return first test password to try
|
||||
# Tests will skip if login fails
|
||||
return None
|
||||
|
||||
|
||||
def perform_login(driver, password=None):
|
||||
"""Helper function to perform login with optional password fallback
|
||||
|
||||
Args:
|
||||
driver: Selenium WebDriver
|
||||
password: Password to try. If None, will try default test password
|
||||
"""
|
||||
if password is None:
|
||||
password = "123456" # Default test password
|
||||
|
||||
password_input = driver.find_element(By.NAME, "loginpassword")
|
||||
password_input.send_keys(password)
|
||||
|
||||
submit_button = driver.find_element(By.CSS_SELECTOR, "button[type='submit']")
|
||||
submit_button.click()
|
||||
|
||||
# Wait for page to respond to form submission
|
||||
# This might either redirect or show login error
|
||||
time.sleep(1)
|
||||
wait_for_page_load(driver, timeout=5)
|
||||
|
||||
|
||||
def test_login_page_loads(driver):
|
||||
"""Test: Login page loads successfully"""
|
||||
driver.get(f"{BASE_URL}/index.php")
|
||||
wait_for_page_load(driver)
|
||||
|
||||
# Check that login form is present
|
||||
password_field = driver.find_element(By.NAME, "loginpassword")
|
||||
assert password_field, "Password field should be present"
|
||||
|
||||
submit_button = driver.find_element(By.CSS_SELECTOR, "button[type='submit']")
|
||||
assert submit_button, "Submit button should be present"
|
||||
|
||||
|
||||
def test_login_redirects_to_devices(driver):
|
||||
"""Test: Successful login redirects to devices page"""
|
||||
import pytest
|
||||
password = get_login_password()
|
||||
# Use password if found, otherwise helper will use default "password"
|
||||
|
||||
driver.get(f"{BASE_URL}/index.php")
|
||||
wait_for_page_load(driver)
|
||||
|
||||
perform_login(driver, password)
|
||||
|
||||
# Wait for redirect to complete (server-side redirect is usually instant)
|
||||
time.sleep(1)
|
||||
|
||||
# Should be redirected to devices page
|
||||
if '/devices.php' not in driver.current_url:
|
||||
pytest.skip(f"Login failed or not configured. URL: {driver.current_url}")
|
||||
|
||||
assert '/devices.php' in driver.current_url, \
|
||||
f"Expected redirect to devices.php, got {driver.current_url}"
|
||||
|
||||
|
||||
def test_login_with_deep_link_preserves_hash(driver):
|
||||
"""Test: Login with deep link (?next=...) preserves the URL fragment hash
|
||||
|
||||
When a user logs in from a deep link URL (e.g., ?next=base64(devices.php%23device-123)),
|
||||
they should be redirected to the target page with the hash fragment intact.
|
||||
"""
|
||||
import base64
|
||||
import pytest
|
||||
|
||||
password = get_login_password()
|
||||
|
||||
# Create a deep link to devices.php#device-123
|
||||
deep_link_path = "/devices.php#device-123"
|
||||
encoded_path = base64.b64encode(deep_link_path.encode()).decode()
|
||||
|
||||
# Navigate to login with deep link
|
||||
driver.get(f"{BASE_URL}/index.php?next={encoded_path}")
|
||||
wait_for_page_load(driver)
|
||||
|
||||
perform_login(driver, password)
|
||||
|
||||
# Wait for redirect to complete (server-side redirect + potential JS handling)
|
||||
time.sleep(2)
|
||||
|
||||
# Check that we're on the right page with the hash preserved
|
||||
current_url = driver.current_url
|
||||
print(f"URL after login with deep link: {current_url}")
|
||||
|
||||
if '/devices.php' not in current_url:
|
||||
pytest.skip(f"Login failed or redirect not configured. URL: {current_url}")
|
||||
|
||||
# Verify the hash fragment is preserved
|
||||
assert '#device-123' in current_url, f"Expected #device-123 hash in URL, got {current_url}"
|
||||
|
||||
|
||||
def test_login_with_deep_link_to_network_page(driver):
|
||||
"""Test: Login with deep link to network.php page preserves hash
|
||||
|
||||
User can login with a deep link to the network page (e.g., network.php#settings-panel),
|
||||
and should be redirected to that page with the hash fragment intact.
|
||||
"""
|
||||
import base64
|
||||
import pytest
|
||||
|
||||
password = get_login_password()
|
||||
|
||||
# Create a deep link to network.php#settings-panel
|
||||
deep_link_path = "/network.php#settings-panel"
|
||||
encoded_path = base64.b64encode(deep_link_path.encode()).decode()
|
||||
|
||||
# Navigate to login with deep link
|
||||
driver.get(f"{BASE_URL}/index.php?next={encoded_path}")
|
||||
wait_for_page_load(driver)
|
||||
|
||||
perform_login(driver, password)
|
||||
|
||||
# Wait for redirect to complete
|
||||
time.sleep(2)
|
||||
|
||||
# Check that we're on the right page with the hash preserved
|
||||
current_url = driver.current_url
|
||||
print(f"URL after login with network.php deep link: {current_url}")
|
||||
|
||||
if '/network.php' not in current_url:
|
||||
pytest.skip(f"Login failed or redirect not configured. URL: {current_url}")
|
||||
|
||||
# Verify the hash fragment is preserved
|
||||
assert '#settings-panel' in current_url, f"Expected #settings-panel hash in URL, got {current_url}"
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def test_login_without_next_parameter(driver):
|
||||
"""Test: Login without ?next parameter defaults to devices.php"""
|
||||
import pytest
|
||||
password = get_login_password()
|
||||
|
||||
driver.get(f"{BASE_URL}/index.php")
|
||||
wait_for_page_load(driver)
|
||||
|
||||
perform_login(driver, password)
|
||||
|
||||
# Wait for redirect to complete
|
||||
time.sleep(1)
|
||||
|
||||
# Should redirect to default devices page
|
||||
current_url = driver.current_url
|
||||
if '/devices.php' not in current_url:
|
||||
pytest.skip(f"Login failed or not configured. URL: {current_url}")
|
||||
|
||||
assert '/devices.php' in current_url, f"Expected default redirect to devices.php, got {current_url}"
|
||||
|
||||
|
||||
def test_url_hash_hidden_input_present(driver):
|
||||
"""Test: URL fragment hash field is present in login form
|
||||
|
||||
The hidden url_hash input field is used to capture and preserve
|
||||
URL hash fragments during form submission and redirect.
|
||||
"""
|
||||
driver.get(f"{BASE_URL}/index.php")
|
||||
wait_for_page_load(driver)
|
||||
|
||||
# Verify the hidden input field exists
|
||||
url_hash_input = driver.find_element(By.ID, "url_hash")
|
||||
assert url_hash_input, "Hidden url_hash input field should be present"
|
||||
assert url_hash_input.get_attribute("type") == "hidden", "url_hash should be a hidden input field"
|
||||
Reference in New Issue
Block a user