From 8a385a90d44b71e94fa73e7fb1f8515739a47db7 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Mon, 5 Aug 2024 09:58:18 +1000 Subject: [PATCH 1/2] =?UTF-8?q?=F0=9F=94=8CUNIFI=20work?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/PLUGINS_DEV.md | 26 +-- front/plugins/__template/rename_me.py | 7 +- front/plugins/plugin_helper.py | 37 +++- front/plugins/unifi_import/config.json | 34 ++- front/plugins/unifi_import/script.py | 280 ++++++++++++------------- server/database.py | 36 ++++ server/helper.py | 15 +- server/plugin.py | 231 +++++++++++++------- 8 files changed, 408 insertions(+), 258 deletions(-) diff --git a/docs/PLUGINS_DEV.md b/docs/PLUGINS_DEV.md index e7cf38c7..3fe9b014 100755 --- a/docs/PLUGINS_DEV.md +++ b/docs/PLUGINS_DEV.md @@ -33,24 +33,9 @@ Example use cases for plugins could be: If you wish to develop a plugin, please check the existing plugin structure. Once the settings are saved by the user they need to be removed from the `app.conf` file manually if you want to re-initialize them from the `config.json` of the plugin. -Again, please read the below carefully if you'd like to contribute with a plugin yourself. This documentation file might be outdated, so double-check the sample plugins as well. - ## ⚠ Disclaimer -Follow the below very carefully and check example plugin(s) if you'd like to write one yourself. Plugin UI is not my priority right now, happy to approve PRs if you are interested in extending/improving the UI experience (See [Frontend guidelines](/docs/FRONTEND_DEVELOPMENT.md)). Example improvements for the taking: - -* Making the tables sortable/filterable -* Using the same approach to display table data as in the Devices section (solves above) -* Adding form controls supported to display the data (Currently supported ones are listed in the section "UI settings in database_column_definitions" below) -* ... - -## ❗ Known limitations: - -These issues will be hopefully fixed with time, so please don't report them. Instead, if you know how, feel free to investigate and submit a PR to fix the below. Keep the PRs small as it's easier to approve them: - -* Existing plugin objects are sometimes not interpreted correctly and a new object is created instead, resulting in duplicate entries. (race condition?) -* Occasional (experienced twice) hanging of processing plugin script file. -* UI displays outdated values until the API endpoints get refreshed. +Please read the below carefully if you'd like to contribute with a plugin yourself. This documentation file might be outdated, so double-check the sample plugins as well. ## Plugin file structure overview @@ -67,10 +52,10 @@ These issues will be hopefully fixed with time, so please don't report them. Ins More on specifics below. -### Column order and values +### Column order and values (plugins interface contract) > [!IMPORTANT] -> Spend some time reading and trying to understand the below table. This is the interface between the Plugins and the core application. +> Spend some time reading and trying to understand the below table. This is the interface between the Plugins and the core application. The application expets 9 or 13 values The first 9 values are mandatory. The next 4 values (`HelpVal1` to `HelpVal4`) are optional. However, if you use any of these optional values (e.g., `HelpVal1`), you need to supply all optional values (e.g., `HelpVal2`, `HelpVal3`, and `HelpVal4`). If a value is not used, it should be padded with `null`. | Order | Represented Column | Value Required | Description | |----------------------|----------------------|----------------------|----------------------| @@ -83,6 +68,11 @@ More on specifics below. | 6 | `Watched_Value4` | no | As above | | 7 | `Extra` | no | Any other data you want to pass and display in NetAlertX and the notifications | | 8 | `ForeignKey` | no | A foreign key that can be used to link to the parent object (usually a MAC address) | + | 9 | `HelpVal1` | no | (optional) A helper value | + | 10 | `HelpVal2` | no | (optional) A helper value | + | 11 | `HelpVal3` | no | (optional) A helper value | + | 12 | `HelpVal4` | no | (optional) A helper value | + > [!NOTE] > De-duplication is run once an hour on the `Plugins_Objects` database table and duplicate entries with the same value in columns `Object_PrimaryID`, `Object_SecondaryID`, `Plugin` (auto-filled based on `unique_prefix` of the plugin), `UserData` (can be populated with the `"type": "textbox_save"` column type) are removed. diff --git a/front/plugins/__template/rename_me.py b/front/plugins/__template/rename_me.py index e996402e..21e81982 100755 --- a/front/plugins/__template/rename_me.py +++ b/front/plugins/__template/rename_me.py @@ -57,7 +57,12 @@ def main(): watched3 = device['device_type'], watched4 = device['last_seen'], extra = '', - foreignKey = device['mac_address']) + foreignKey = device['mac_address'] + # helpVal1 = "Something1", # Optional Helper values to be passed for mapping into the app + # helpVal2 = "Something1", # If you need to use even only 1, add the remaining ones too + # helpVal3 = "Something1", # and set them to 'null'. Check the the docs for details: + # helpVal4 = "Something1", # https://github.com/jokob-sk/NetAlertX/blob/main/docs/PLUGINS_DEV.md + ) mylog('verbose', [f'[{pluginName}] New entries: "{len(device_data)}"']) diff --git a/front/plugins/plugin_helper.py b/front/plugins/plugin_helper.py index 27f1f953..007bfc98 100755 --- a/front/plugins/plugin_helper.py +++ b/front/plugins/plugin_helper.py @@ -99,7 +99,7 @@ def normalize_mac(mac): # ------------------------------------------------------------------- class Plugin_Object: """ - Plugin_Object class to manage one object introduced by the plugin + Plugin_Object class to manage one object introduced by the plugin. An object typically is a device but could also be a website or something else that is monitored by the plugin. """ @@ -114,11 +114,15 @@ class Plugin_Object: watched4="", extra="", foreignKey="", + helpVal1="", + helpVal2="", + helpVal3="", + helpVal4="", ): self.pluginPref = "" self.primaryId = primaryId self.secondaryId = secondaryId - self.created = datetime.now(timeZone).strftime("%Y-%m-%d %H:%M:%S") + self.created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") self.changed = "" self.watched1 = watched1 self.watched2 = watched2 @@ -128,13 +132,17 @@ class Plugin_Object: self.extra = extra self.userData = "" self.foreignKey = foreignKey + self.helpVal1 = helpVal1 or "" + self.helpVal2 = helpVal2 or "" + self.helpVal3 = helpVal3 or "" + self.helpVal4 = helpVal4 or "" def write(self): """ - write the object details as a string in the - format required to write the result file + Write the object details as a string in the + format required to write the result file. """ - line = "{}|{}|{}|{}|{}|{}|{}|{}|{}\n".format( + line = "{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}\n".format( self.primaryId, self.secondaryId, self.created, @@ -144,10 +152,12 @@ class Plugin_Object: self.watched4, self.extra, self.foreignKey, + self.helpVal1, + self.helpVal2, + self.helpVal3, + self.helpVal4 ) return line - - class Plugin_Objects: """ @@ -155,7 +165,7 @@ class Plugin_Objects: It contains a list of Plugin_Object instances. And can write the required result file. """ - + def __init__(self, result_file): self.result_file = result_file self.objects = [] @@ -170,6 +180,10 @@ class Plugin_Objects: watched4="", extra="", foreignKey="", + helpVal1="", + helpVal2="", + helpVal3="", + helpVal4="", ): self.objects.append( Plugin_Object( @@ -181,16 +195,17 @@ class Plugin_Objects: watched4, extra, foreignKey, + helpVal1, + helpVal2, + helpVal3, + helpVal4 ) ) def write_result_file(self): - # print ("writing file: "+self.result_file) with open(self.result_file, mode="w") as fp: for obj in self.objects: fp.write(obj.write()) - fp.close() - def __add__(self, other): if isinstance(other, Plugin_Objects): diff --git a/front/plugins/unifi_import/config.json b/front/plugins/unifi_import/config.json index 85d7763e..0c9f6d41 100755 --- a/front/plugins/unifi_import/config.json +++ b/front/plugins/unifi_import/config.json @@ -388,6 +388,38 @@ "show": true, "type": "label" }, + { + "column": "HelpVal1", + "mapped_to_column": "cur_NetworkNodeMAC", + "css_classes": "col-sm-2", + "default_value": "", + "localized": ["name"], + "name": [ + { + "language_code": "en_us", + "string": "Parent Network MAC" + } + ], + "options": [], + "show": true, + "type": "label" + }, + { + "column": "HelpVal2", + "mapped_to_column": "cur_PORT", + "css_classes": "col-sm-2", + "default_value": "", + "localized": ["name"], + "name": [ + { + "language_code": "en_us", + "string": "Port" + } + ], + "options": [], + "show": true, + "type": "label" + }, { "column": "Status", "css_classes": "col-sm-1", @@ -492,7 +524,7 @@ } }, { - "default_value": "python3 /app/front/plugins/unifi_import/script.py username={username} password={password} host={host} sites={sites} port={port} verifyssl={verifyssl} version={version} fullimport={fullimport}", + "default_value": "python3 /app/front/plugins/unifi_import/script.py", "description": [ { "language_code": "en_us", diff --git a/front/plugins/unifi_import/script.py b/front/plugins/unifi_import/script.py index 7729a3be..d1d9e6a5 100755 --- a/front/plugins/unifi_import/script.py +++ b/front/plugins/unifi_import/script.py @@ -35,52 +35,30 @@ pluginName = 'UNFIMP' def main(): - mylog('verbose', ['[UNFIMP] In script']) + mylog('verbose', [f'[{pluginName}] In script']) # init global variables global UNIFI_USERNAME, UNIFI_PASSWORD, UNIFI_HOST, UNIFI_SITES, PORT, VERIFYSSL, VERSION, FULL_IMPORT - - parser = argparse.ArgumentParser(description='Import devices from a UNIFI controller') - - parser.add_argument('username', action="store", help="Username used to login into the UNIFI controller") - parser.add_argument('password', action="store", help="Password used to login into the UNIFI controller") - parser.add_argument('host', action="store", help="Host url or IP address where the UNIFI controller is hosted (excluding http://)") - parser.add_argument('sites', action="store", help="Name of the sites (usually 'default', check the URL in your UniFi controller UI). Separated by comma (,) if passing multiple sites") - parser.add_argument('port', action="store", help="Usually 8443") - parser.add_argument('verifyssl', action="store", help="verify SSL certificate [true|false]") - parser.add_argument('version', action="store", help="The base version of the controller API [v4|v5|unifiOS|UDMP-unifiOS]") - parser.add_argument('fullimport', action="store", help="Defines if a full import or only online devices hould be imported [disabled|once|always]") - - values = parser.parse_args() - - - - # parse output - plugin_objects = Plugin_Objects(RESULT_FILE) - + plugin_objects = Plugin_Objects(RESULT_FILE) - mylog('verbose', [f'[UNFIMP] Check if all login information is available: {values}']) + UNIFI_USERNAME = get_setting_value("UNFIMP_username") + UNIFI_PASSWORD = get_setting_value("UNFIMP_password") + UNIFI_HOST = get_setting_value("UNFIMP_host") + UNIFI_SITES = get_setting_value("UNFIMP_sites") + PORT = get_setting_value("UNFIMP_port") + VERIFYSSL = get_setting_value("UNFIMP_verifyssl") + VERSION = get_setting_value("UNFIMP_version") + FULL_IMPORT = get_setting_value("UNFIMP_fullimport") - if values.username and values.password and values.host and values.sites: - - UNIFI_USERNAME = values.username.split('=')[1] - UNIFI_PASSWORD = values.password.split('=')[1] - UNIFI_HOST = values.host.split('=')[1] - UNIFI_SITES = values.sites.split('=')[1] - PORT = values.port.split('=')[1] - VERIFYSSL = values.verifyssl.split('=')[1] - VERSION = values.version.split('=')[1] - FULL_IMPORT = values.fullimport.split('=')[1] - - plugin_objects = get_entries(plugin_objects) + plugin_objects = get_entries(plugin_objects) plugin_objects.write_result_file() - mylog('verbose', [f'[UNFIMP] Scan finished, found {len(plugin_objects)} devices']) + mylog('verbose', [f'[{pluginName}] Scan finished, found {len(plugin_objects)} devices']) # ............................................. @@ -91,152 +69,166 @@ def get_entries(plugin_objects: Plugin_Objects) -> Plugin_Objects: lock_file_value = read_lock_file() perform_full_run = check_full_run_state(FULL_IMPORT, lock_file_value) + mylog('verbose', [f'[{pluginName}] sites: {UNIFI_SITES}']) - sites = [] - - if ',' in UNIFI_SITES: - sites = UNIFI_SITES.split(',') - - else: - sites.append(UNIFI_SITES) if (VERIFYSSL.upper() == "TRUE"): VERIFYSSL = True else: VERIFYSSL = False - - for site in sites: + + # mylog('verbose', [f'[{pluginName}] sites: {sites}']) + + for site in UNIFI_SITES: + + mylog('verbose', [f'[{pluginName}] site: {site}']) c = Controller(UNIFI_HOST, UNIFI_USERNAME, UNIFI_PASSWORD, port=PORT, version=VERSION, ssl_verify=VERIFYSSL, site_id=site) - mylog('verbose', [f'[UNFIMP] Identify Unifi Devices']) - # get all Unifi devices - for ap in c.get_aps(): - - # mylog('verbose', [f'{json.dumps(ap)}']) - - deviceType = '' - if (ap['type'] == 'udm'): - deviceType = 'Router' - elif (ap['type'] == 'usg'): - deviceType = 'Router' - elif (ap['type'] == 'usw'): - deviceType = 'Switch' - elif (ap['type'] == 'uap'): - deviceType = 'AP' - - name = get_unifi_val(ap, 'name') - hostName = get_unifi_val(ap, 'hostname') - - name = set_name(name, hostName) - - ipTmp = get_unifi_val(ap, 'ip') - - # if IP not found use a default value - if ipTmp == "null": - ipTmp = '0.0.0.0' - - plugin_objects.add_object( - primaryId=ap['mac'], - secondaryId=ipTmp, - watched1=name, - watched2='Ubiquiti Networks Inc.', - watched3=deviceType, - watched4=ap['state'], - extra=get_unifi_val(ap, 'connection_network_name') - ) - - - mylog('verbose', [f'[UNFIMP] Found {len(plugin_objects)} Unifi Devices']) - - online_macs = set() + processed_macs = [] - # get_clients() returns all clients which are currently online. - for cl in c.get_clients(): + mylog('verbose', [f'[{pluginName}] Get Online Devices']) - # mylog('verbose', [f'{json.dumps(cl)}']) - online_macs.add(cl['mac']) + # Collect details for online clients + collect_details( + device_type={'cl': ''}, + devices=c.get_clients(), + online_macs=online_macs, + processed_macs=processed_macs, + plugin_objects=plugin_objects, + device_label='client', + device_vendor="" + ) - - mylog('verbose', [f'[UNFIMP] Found {len(plugin_objects)} Online Devices']) + mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Online Devices']) - # get_users() returns all clients known by the controller - for user in c.get_users(): + mylog('verbose', [f'[{pluginName}] Identify Unifi Devices']) - #mylog('verbose', [f'{json.dumps(user)}']) + # Collect details for Unifi devices + collect_details( + device_type={ + 'udm': 'Router', + 'usg': 'Router', + 'usw': 'Switch', + 'uap': 'AP' + }, + devices=c.get_aps(), + online_macs=online_macs, + processed_macs=processed_macs, + plugin_objects=plugin_objects, + device_label='ap', + device_vendor="Ubiquiti Networks Inc." + ) - name = get_unifi_val(user, 'name') - hostName = get_unifi_val(user, 'hostname') + mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Unifi Devices']) - name = set_name(name, hostName) + # Collect details for users + collect_details( + device_type={'user': ''}, + devices=c.get_users(), + online_macs=online_macs, + processed_macs=processed_macs, + plugin_objects=plugin_objects, + device_label='user', + device_vendor="" + ) - status = 1 if user['mac'] in online_macs else 0 + mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Users']) - if status == 1 or perform_full_run is True: - - ipTmp = get_unifi_val(user, 'last_ip') - - if ipTmp == 'null': - ipTmp = get_unifi_val(user, 'fixed_ip') - - # if IP not found use a default value - if ipTmp == "null": - ipTmp = '0.0.0.0' - - plugin_objects.add_object( - primaryId=user['mac'], - secondaryId=ipTmp, - watched1=name, - watched2=get_unifi_val(user, 'oui'), - watched3='Other', - watched4=status, - extra=get_unifi_val(user, 'last_connection_network_name') - ) - - # check if the lockfile needs to be adapted - - mylog('verbose', [f'[UNFIMP] check if Lock file needs to be modified']) + + mylog('verbose', [f'[{pluginName}] check if Lock file needs to be modified']) set_lock_file_value(FULL_IMPORT, lock_file_value) - mylog('verbose', [f'[UNFIMP] Found {len(plugin_objects)} Clients overall']) + mylog('verbose', [f'[{pluginName}] Found {len(plugin_objects)} Clients overall']) return plugin_objects # ----------------------------------------------------------------------------- -def get_unifi_val(obj, key): +def collect_details(device_type, devices, online_macs, processed_macs, plugin_objects, device_label, device_vendor): + for device in devices: + mylog('verbose', [f'{json.dumps(device)}']) - res = '' + name = get_name(get_unifi_val(device, 'name'), get_unifi_val(device, 'hostname')) + ipTmp = get_ip(get_unifi_val(device, 'last_ip'), get_unifi_val(device, 'fixed_ip'), get_unifi_val(device, 'ip')) + macTmp = device['mac'] + status = 1 if macTmp in online_macs else device.get('state', 0) + deviceType = device_type.get(device.get('type'), '') - res = obj.get(key, None) - - if res not in ['','None', None]: - return res - - mylog('debug', [f'[{pluginName}] Value not found for key "{key}" in obj "{json.dumps(obj)}"']) - - return 'null' + # Add object only if not processed + if macTmp not in processed_macs: + plugin_objects.add_object( + primaryId=macTmp, + secondaryId=ipTmp, + watched1=name, + watched2=get_unifi_val(device, 'oui', device_vendor), + watched3=deviceType, + watched4=status, + extra=get_unifi_val(device, 'connection_network_name', ''), + foreignKey="", + helpVal1=get_parent_mac(get_unifi_val(device, 'uplink_mac'), get_unifi_val(device, 'ap_mac'), get_unifi_val(device, 'sw_mac')), + helpVal2=get_port(get_unifi_val(device, 'sw_port'), get_unifi_val(device, 'uplink_remote_port')), + helpVal3=device_label, + helpVal4="", + ) + processed_macs.append(macTmp) +# ----------------------------------------------------------------------------- +def get_unifi_val(obj, key, default='null'): + if isinstance(obj, dict): + if key in obj and obj[key] not in ['', 'None', None]: + return obj[key] + for k, v in obj.items(): + if isinstance(v, dict): + result = get_unifi_val(v, key, default) + if result not in ['','None', None, 'null']: + return result + + mylog('debug', [f'[{pluginName}] Value not found for key "{key}" in obj "{json.dumps(obj)}"']) + return default # ----------------------------------------------------------------------------- -def set_name(name: str, hostName: str) -> str: +def get_name(*names: str) -> str: + for name in names: + if name and name != 'null': + return name + return 'null' - if name != 'null': - return name +# ----------------------------------------------------------------------------- +def get_parent_mac(*macs: str) -> str: + for mac in macs: + if mac and mac != 'null': + return mac + return 'null' - elif name == 'null' and hostName != 'null': - return hostName +# ----------------------------------------------------------------------------- +def get_port(*ports: str) -> str: + for port in ports: + if port and port != 'null': + return port + return 'null' - else: - return 'null' +# ----------------------------------------------------------------------------- +def get_port(*macs: str) -> str: + for mac in macs: + if mac and mac != 'null': + return mac + return 'null' + +# ----------------------------------------------------------------------------- +def get_ip(*ips: str) -> str: + for ip in ips: + if ip and ip != 'null': + return ip + return '0:0:0:0' # ----------------------------------------------------------------------------- def set_lock_file_value(config_value: str, lock_file_value: bool) -> None: - mylog('verbose', [f'[UNFIMP] Lock Params: config_value={config_value}, lock_file_value={lock_file_value}']) + mylog('verbose', [f'[{pluginName}] Lock Params: config_value={config_value}, lock_file_value={lock_file_value}']) # set lock if 'once' is set and the lock is not set if config_value == 'once' and lock_file_value is False: out = 1 @@ -244,10 +236,10 @@ def set_lock_file_value(config_value: str, lock_file_value: bool) -> None: elif config_value != 'once' and lock_file_value is True: out = 0 else: - mylog('verbose', [f'[UNFIMP] No change on lock file needed']) + mylog('verbose', [f'[{pluginName}] No change on lock file needed']) return - mylog('verbose', [f'[UNFIMP] Setting lock value for "full import" to {out}']) + mylog('verbose', [f'[{pluginName}] Setting lock value for "full import" to {out}']) with open(LOCK_FILE, 'w') as lock_file: lock_file.write(str(out)) @@ -265,10 +257,10 @@ def read_lock_file() -> bool: # ----------------------------------------------------------------------------- def check_full_run_state(config_value: str, lock_file_value: bool) -> bool: if config_value == 'always' or (config_value == 'once' and lock_file_value == False): - mylog('verbose', [f'[UNFIMP] Full import needs to be done: config_value: {config_value} and lock_file_value: {lock_file_value}']) + mylog('verbose', [f'[{pluginName}] Full import needs to be done: config_value: {config_value} and lock_file_value: {lock_file_value}']) return True else: - mylog('verbose', [f'[UNFIMP] Full import NOT needed: config_value: {config_value} and lock_file_value: {lock_file_value}']) + mylog('verbose', [f'[{pluginName}] Full import NOT needed: config_value: {config_value} and lock_file_value: {lock_file_value}']) return False #=============================================================================== diff --git a/server/database.py b/server/database.py index 345db587..2a8d7186 100755 --- a/server/database.py +++ b/server/database.py @@ -385,6 +385,18 @@ class DB(): self.sql.execute(""" ALTER TABLE "Plugins_Objects" ADD "SyncHubNodeName" TEXT """) + + # helper columns HelpVal1-4 + plug_HelpValues_missing = self.sql.execute (""" + SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Plugins_Objects') WHERE name='HelpVal1' + """).fetchone()[0] == 0 + + if plug_HelpValues_missing : + mylog('verbose', ["[upgradeDB] Adding HelpVal1-4 to the Plugins_Objects table"]) + self.sql.execute('ALTER TABLE "Plugins_Objects" ADD COLUMN "HelpVal1" TEXT') + self.sql.execute('ALTER TABLE "Plugins_Objects" ADD COLUMN "HelpVal2" TEXT') + self.sql.execute('ALTER TABLE "Plugins_Objects" ADD COLUMN "HelpVal3" TEXT') + self.sql.execute('ALTER TABLE "Plugins_Objects" ADD COLUMN "HelpVal4" TEXT') # Plugin execution results sql_Plugins_Events = """ CREATE TABLE IF NOT EXISTS Plugins_Events( @@ -416,6 +428,18 @@ class DB(): self.sql.execute(""" ALTER TABLE "Plugins_Events" ADD "SyncHubNodeName" TEXT """) + + # helper columns HelpVal1-4 + plug_HelpValues_missing = self.sql.execute (""" + SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Plugins_Events') WHERE name='HelpVal1' + """).fetchone()[0] == 0 + + if plug_HelpValues_missing : + mylog('verbose', ["[upgradeDB] Adding HelpVal1-4 to the Plugins_Events table"]) + self.sql.execute('ALTER TABLE "Plugins_Events" ADD COLUMN "HelpVal1" TEXT') + self.sql.execute('ALTER TABLE "Plugins_Events" ADD COLUMN "HelpVal2" TEXT') + self.sql.execute('ALTER TABLE "Plugins_Events" ADD COLUMN "HelpVal3" TEXT') + self.sql.execute('ALTER TABLE "Plugins_Events" ADD COLUMN "HelpVal4" TEXT') # Plugin execution history @@ -448,6 +472,18 @@ class DB(): self.sql.execute(""" ALTER TABLE "Plugins_History" ADD "SyncHubNodeName" TEXT """) + + # helper columns HelpVal1-4 + plug_HelpValues_missing = self.sql.execute (""" + SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Plugins_History') WHERE name='HelpVal1' + """).fetchone()[0] == 0 + + if plug_HelpValues_missing : + mylog('verbose', ["[upgradeDB] Adding HelpVal1-4 to the Plugins_History table"]) + self.sql.execute('ALTER TABLE "Plugins_History" ADD COLUMN "HelpVal1" TEXT') + self.sql.execute('ALTER TABLE "Plugins_History" ADD COLUMN "HelpVal2" TEXT') + self.sql.execute('ALTER TABLE "Plugins_History" ADD COLUMN "HelpVal3" TEXT') + self.sql.execute('ALTER TABLE "Plugins_History" ADD COLUMN "HelpVal4" TEXT') # ------------------------------------------------------------------------- diff --git a/server/helper.py b/server/helper.py index 53412ad0..7b86f47c 100755 --- a/server/helper.py +++ b/server/helper.py @@ -352,11 +352,16 @@ def setting_value_to_python_type(set_type, set_value): mylog('none', [f'[HELPER] No elements provided in set_type: {set_type} ']) return value - # Use the last element in the list - last_element = elements[len(elements)-1] - elementType = last_element.get('elementType', '') - elementOptions = last_element.get('elementOptions', []) - transformers = last_element.get('transformers', []) + # Find the first element where elementHasInputValue is 1 + element_with_input_value = next((elem for elem in elements if elem.get("elementHasInputValue") == 1), None) + + # If no such element is found, use the last element + if element_with_input_value is None: + element_with_input_value = elements[-1] + + elementType = element_with_input_value.get('elementType', '') + elementOptions = element_with_input_value.get('elementOptions', []) + transformers = element_with_input_value.get('transformers', []) # Convert value based on dataType and elementType if dataType == 'string' and elementType in ['input', 'select']: diff --git a/server/plugin.py b/server/plugin.py index 61ed752e..aace1d6a 100755 --- a/server/plugin.py +++ b/server/plugin.py @@ -248,32 +248,51 @@ def execute_plugin(db, all_plugins, plugin, pluginsState = plugins_state() ): for line in newLines: columns = line.split("|") - # There have to be always 9 columns - if len(columns) == 9: - # Create a tuple containing values to be inserted into the database. - # Each value corresponds to a column in the table in the order of the columns. - # must match the Plugins_Objects and Plugins_Events database tables and can be used as input for the plugin_object_class. - sqlParams.append( - ( - 0, # "Index" placeholder - plugin["unique_prefix"], # "Plugin" column value from the plugin dictionary - columns[0], # "Object_PrimaryID" value from columns list - columns[1], # "Object_SecondaryID" value from columns list - 'null', # Placeholder for "DateTimeCreated" column - columns[2], # "DateTimeChanged" value from columns list - columns[3], # "Watched_Value1" value from columns list - columns[4], # "Watched_Value2" value from columns list - columns[5], # "Watched_Value3" value from columns list - columns[6], # "Watched_Value4" value from columns list - 'not-processed', # "Status" column (placeholder) - columns[7], # "Extra" value from columns list - 'null', # Placeholder for "UserData" column - columns[8], # "ForeignKey" value from columns list - tmp_SyncHubNodeName # Sync Hub Node name - ) - ) + # There have to be 9 or 13 columns + # Common part of the SQL parameters + base_params = [ + 0, # "Index" placeholder + plugin["unique_prefix"], # "Plugin" column value from the plugin dictionary + columns[0], # "Object_PrimaryID" value from columns list + columns[1], # "Object_SecondaryID" value from columns list + 'null', # Placeholder for "DateTimeCreated" column + columns[2], # "DateTimeChanged" value from columns list + columns[3], # "Watched_Value1" value from columns list + columns[4], # "Watched_Value2" value from columns list + columns[5], # "Watched_Value3" value from columns list + columns[6], # "Watched_Value4" value from columns list + 'not-processed', # "Status" column (placeholder) + columns[7], # "Extra" value from columns list + 'null', # Placeholder for "UserData" column + columns[8], # "ForeignKey" value from columns list + tmp_SyncHubNodeName # Sync Hub Node name + ] + + # Extend the common part with the additional values if there are 13 columns + if len(columns) == 13: + base_params.extend([ + columns[9], # "HelpVal1" value from columns list + columns[10], # "HelpVal2" value from columns list + columns[11], # "HelpVal3" value from columns list + columns[12] # "HelpVal4" value from columns list + ]) + elif len(columns) == 9: + # add padding + base_params.extend([ + 'null', # "HelpVal1" + 'null', # "HelpVal2" + 'null', # "HelpVal3" + 'null' # "HelpVal4" + ]) else: - mylog('none', ['[Plugins] Skipped invalid line in the output: ', line]) + mylog('none', [f'[Plugins] Wrong number of input values, must be 9 or 13, got {len(columns)} from: {line} ']) + + # Create a tuple containing values to be inserted into the database. + # Each value corresponds to a column in the table in the order of the columns. + # must match the Plugins_Objects and Plugins_Events database tables and can be used as input for the plugin_object_class. + + # Append the final parameters to sqlParams + sqlParams.append(tuple(base_params)) # keep current instance log file, delete all from other nodes if filename != 'last_result.log' and os.path.exists(full_path): @@ -293,30 +312,48 @@ def execute_plugin(db, all_plugins, plugin, pluginsState = plugins_state() ): arr = db.get_sql_array (q) for row in arr: - # There has to be always 9 columns - if len(row) == 9 and (row[0] in ['','null']) == False : - # Create a tuple containing values to be inserted into the database. + # There has to be always 9 or 13 columns + if len(row) in [9, 13] and row[0] not in ['', 'null']: + # Create a base tuple containing values to be inserted into the database. # Each value corresponds to a column in the table in the order of the columns. - # must match the Plugins_Objects and Plugins_Events database tables and can be used as input for the plugin_object_class - sqlParams.append( - ( - 0, # "Index" placeholder - plugin["unique_prefix"], # "Plugin" plugin dictionary - row[0], # "Object_PrimaryID" row - handle_empty(row[1]), # "Object_SecondaryID" column after handling empty values - 'null', # Placeholder "DateTimeCreated" column - row[2], # "DateTimeChanged" row - row[3], # "Watched_Value1" row - row[4], # "Watched_Value2" row - handle_empty(row[5]), # "Watched_Value3" column after handling empty values - handle_empty(row[6]), # "Watched_Value4" column after handling empty values - 'not-processed', # "Status" column (placeholder) - row[7], # "Extra" row - 'null', # Placeholder "UserData" column - row[8], # "ForeignKey" row - 'null' # Sync Hub Node name - Only supported with scripts - ) - ) + # Must match the Plugins_Objects and Plugins_Events database tables and can be used as input for the plugin_object_class. + base_params = [ + 0, # "Index" placeholder + plugin["unique_prefix"], # "Plugin" plugin dictionary + row[0], # "Object_PrimaryID" row + handle_empty(row[1]), # "Object_SecondaryID" column after handling empty values + 'null', # Placeholder "DateTimeCreated" column + row[2], # "DateTimeChanged" row + row[3], # "Watched_Value1" row + row[4], # "Watched_Value2" row + handle_empty(row[5]), # "Watched_Value3" column after handling empty values + handle_empty(row[6]), # "Watched_Value4" column after handling empty values + 'not-processed', # "Status" column (placeholder) + row[7], # "Extra" row + 'null', # Placeholder "UserData" column + row[8], # "ForeignKey" row + 'null' # Sync Hub Node name - Only supported with scripts + ] + + # Extend the base tuple with additional values if there are 13 columns + if len(row) == 13: + base_params.extend([ + row[9], # "HelpVal1" row + row[10], # "HelpVal2" row + row[11], # "HelpVal3" row + row[12] # "HelpVal4" row + ]) + else: + # add padding + base_params.extend([ + 'null', # "HelpVal1" + 'null', # "HelpVal2" + 'null', # "HelpVal3" + 'null' # "HelpVal4" + ]) + + # Append the final parameters to sqlParams + sqlParams.append(tuple(base_params)) else: mylog('none', ['[Plugins] Skipped invalid sql result']) @@ -352,28 +389,48 @@ def execute_plugin(db, all_plugins, plugin, pluginsState = plugins_state() ): return pluginsState for row in arr: - # There has to be always 9 columns - if len(row) == 9 and (row[0] in ['','null']) == False : - # Create a tuple containing values to be inserted into the database. + # There has to be always 9 or 13 columns + if len(row) in [9, 13] and row[0] not in ['', 'null']: + # Create a base tuple containing values to be inserted into the database. # Each value corresponds to a column in the table in the order of the columns. - # must match the Plugins_Objects and Plugins_Events database tables and can be used as input for the plugin_object_class - sqlParams.append(( - 0, # "Index" placeholder - plugin["unique_prefix"], # "Plugin" - row[0], # "Object_PrimaryID" - handle_empty(row[1]), # "Object_SecondaryID" - 'null', # "DateTimeCreated" column (null placeholder) - row[2], # "DateTimeChanged" - row[3], # "Watched_Value1" - row[4], # "Watched_Value2" - handle_empty(row[5]), # "Watched_Value3" - handle_empty(row[6]), # "Watched_Value4" - 'not-processed', # "Status" column (placeholder) - row[7], # "Extra" - 'null', # "UserData" column (null placeholder) - row[8], # "ForeignKey" - 'null' # Sync Hub Node name - Only supported with scripts - )) + # Must match the Plugins_Objects and Plugins_Events database tables and can be used as input for the plugin_object_class. + base_params = [ + 0, # "Index" placeholder + plugin["unique_prefix"], # "Plugin" + row[0], # "Object_PrimaryID" + handle_empty(row[1]), # "Object_SecondaryID" + 'null', # "DateTimeCreated" column (null placeholder) + row[2], # "DateTimeChanged" + row[3], # "Watched_Value1" + row[4], # "Watched_Value2" + handle_empty(row[5]), # "Watched_Value3" + handle_empty(row[6]), # "Watched_Value4" + 'not-processed', # "Status" column (placeholder) + row[7], # "Extra" + 'null', # "UserData" column (null placeholder) + row[8], # "ForeignKey" + 'null' # Sync Hub Node name - Only supported with scripts + ] + + # Extend the base tuple with additional values if there are 13 columns + if len(row) == 13: + base_params.extend([ + row[9], # "HelpVal1" + row[10], # "HelpVal2" + row[11], # "HelpVal3" + row[12] # "HelpVal4" + ]) + else: + # add padding + base_params.extend([ + 'null', # "HelpVal1" + 'null', # "HelpVal2" + 'null', # "HelpVal3" + 'null' # "HelpVal4" + ]) + + # Append the final parameters to sqlParams + sqlParams.append(tuple(base_params)) else: mylog('none', ['[Plugins] Skipped invalid sql result']) @@ -509,12 +566,13 @@ def process_plugin_events(db, plugin, pluginsState, plugEventsArr): for plugObj in pluginObjects: # keep old createdTime time if the plugObj already was created before createdTime = plugObj.changed if plugObj.status == 'new' else plugObj.created - # 14 values without Index + # 18 values without Index values = ( plugObj.pluginPref, plugObj.primaryId, plugObj.secondaryId, createdTime, plugObj.changed, plugObj.watched1, plugObj.watched2, plugObj.watched3, plugObj.watched4, plugObj.status, plugObj.extra, plugObj.userData, - plugObj.foreignKey, plugObj.syncHubNodeName + plugObj.foreignKey, plugObj.syncHubNodeName, + plugObj.helpVal1, plugObj.helpVal2, plugObj.helpVal3, plugObj.helpVal4 ) if plugObj.status == 'new': @@ -547,8 +605,9 @@ def process_plugin_events(db, plugin, pluginsState, plugEventsArr): INSERT INTO Plugins_Objects ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", - "Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName") - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + "Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName", + "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4") + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, objects_to_insert ) @@ -559,7 +618,7 @@ def process_plugin_events(db, plugin, pluginsState, plugEventsArr): UPDATE Plugins_Objects SET "Plugin" = ?, "Object_PrimaryID" = ?, "Object_SecondaryID" = ?, "DateTimeCreated" = ?, "DateTimeChanged" = ?, "Watched_Value1" = ?, "Watched_Value2" = ?, "Watched_Value3" = ?, - "Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ? + "Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ?, "HelpVal1" = ?, "HelpVal2" = ?, "HelpVal3" = ?, "HelpVal4" = ? WHERE "Index" = ? """, objects_to_update ) @@ -572,8 +631,9 @@ def process_plugin_events(db, plugin, pluginsState, plugEventsArr): INSERT INTO Plugins_Events ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", - "Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName") - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + "Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName", + "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4") + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, events_to_insert ) @@ -585,8 +645,9 @@ def process_plugin_events(db, plugin, pluginsState, plugEventsArr): INSERT INTO Plugins_History ("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated", "DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3", - "Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName") - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + "Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName", + "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4") + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, history_to_insert ) @@ -665,6 +726,14 @@ def process_plugin_events(db, plugin, pluginsState, plugEventsArr): tmpList.append(plgEv.status) elif col['column'] == 'SyncHubNodeName': tmpList.append(plgEv.syncHubNodeName) + elif col['column'] == 'HelpVal1': + tmpList.append(plgEv.helpVal1) + elif col['column'] == 'HelpVal2': + tmpList.append(plgEv.helpVal2) + elif col['column'] == 'HelpVal3': + tmpList.append(plgEv.helpVal3) + elif col['column'] == 'HelpVal4': + tmpList.append(plgEv.helpVal4) # Check if there's a default value specified for this column in the JSON. if 'mapped_to_column_data' in col and 'value' in col['mapped_to_column_data']: @@ -714,6 +783,11 @@ class plugin_object_class: self.userData = objDbRow[12] self.foreignKey = objDbRow[13] self.syncHubNodeName = objDbRow[14] + self.helpVal1 = objDbRow[15] + self.helpVal2 = objDbRow[16] + self.helpVal3 = objDbRow[17] + self.helpVal4 = objDbRow[18] + # Check if self.status is valid if self.status not in ["exists", "watched-changed", "watched-not-changed", "new", "not-processed", "missing-in-last-scan"]: @@ -727,6 +801,7 @@ class plugin_object_class: setObj = get_plugin_setting_obj(plugin, 'WATCH') + # hash for comapring watched value changes indexNameColumnMapping = [(6, 'Watched_Value1' ), (7, 'Watched_Value2' ), (8, 'Watched_Value3' ), (9, 'Watched_Value4' )] if setObj is not None: From 8199bef55d2f2b3b3b16ba505c9705188ee786a4 Mon Sep 17 00:00:00 2001 From: lookflying Date: Wed, 7 Aug 2024 22:34:39 +0800 Subject: [PATCH 2/2] mtscan works --- .gitignore | 3 +- Dockerfile | 2 +- front/log/.gitignore | 0 front/plugins/mikrotik_scan/README.md | 7 + front/plugins/mikrotik_scan/config.json | 437 ++++++++++++++++++++++++ front/plugins/mikrotik_scan/mikrotik.py | 142 ++++++++ 6 files changed, 589 insertions(+), 2 deletions(-) mode change 100755 => 100644 front/log/.gitignore create mode 100755 front/plugins/mikrotik_scan/README.md create mode 100755 front/plugins/mikrotik_scan/config.json create mode 100755 front/plugins/mikrotik_scan/mikrotik.py diff --git a/.gitignore b/.gitignore index ff1ef480..933ebc9d 100755 --- a/.gitignore +++ b/.gitignore @@ -19,4 +19,5 @@ __pycache__/ **/last_result.log **/script.log **/pialert.conf_bak -**/pialert.db_bak \ No newline at end of file +**/pialert.db_bak +.*.swp diff --git a/Dockerfile b/Dockerfile index 9ce5e9e2..48eb5d6f 100755 --- a/Dockerfile +++ b/Dockerfile @@ -15,7 +15,7 @@ ENV PATH="/opt/venv/bin:$PATH" COPY . ${INSTALL_DIR}/ -RUN pip install netifaces tplink-omada-client pycryptodome requests paho-mqtt scapy cron-converter pytz json2table dhcp-leases pyunifi speedtest-cli chardet python-nmap dnspython \ +RUN pip install netifaces tplink-omada-client pycryptodome requests paho-mqtt scapy cron-converter pytz json2table dhcp-leases pyunifi speedtest-cli chardet python-nmap dnspython librouteros \ && bash -c "find ${INSTALL_DIR} -type d -exec chmod 750 {} \;" \ && bash -c "find ${INSTALL_DIR} -type f -exec chmod 640 {} \;" \ && bash -c "find ${INSTALL_DIR} -type f \( -name '*.sh' -o -name '*.py' -o -name 'speedtest-cli' \) -exec chmod 750 {} \;" diff --git a/front/log/.gitignore b/front/log/.gitignore old mode 100755 new mode 100644 diff --git a/front/plugins/mikrotik_scan/README.md b/front/plugins/mikrotik_scan/README.md new file mode 100755 index 00000000..3d743790 --- /dev/null +++ b/front/plugins/mikrotik_scan/README.md @@ -0,0 +1,7 @@ +## Overview + +Plugin for device name discovery via the Mikrotik dhcp-server leases + +### Usage + +- Check the Settings page for details. diff --git a/front/plugins/mikrotik_scan/config.json b/front/plugins/mikrotik_scan/config.json new file mode 100755 index 00000000..baca3a15 --- /dev/null +++ b/front/plugins/mikrotik_scan/config.json @@ -0,0 +1,437 @@ +{ + "code_name": "mikrotik_scan", + "unique_prefix": "MTSCAN", + "plugin_type": "other", + "execution_order" : "Layer_4", + "enabled": true, + "data_source": "script", + "mapped_to_table": "CurrentScan", + "show_ui": true, + "localized": ["display_name", "description", "icon"], + "display_name": [ + { + "language_code": "en_us", + "string": "Mikrotik (Name discovery)" + } + ], + "icon": [ + { + "language_code": "en_us", + "string": "" + } + ], + "description": [ + { + "language_code": "en_us", + "string": "A plugin to discover device names." + } + ], + "params": [ + { + "name": "ips", + "type": "sql", + "value": "SELECT dev_LastIP from DEVICES order by dev_MAC", + "timeoutMultiplier": true + }, + { + "name": "mt_host", + "type": "setting", + "value": "MTSCAN_MT_HOST" + }, + { + "name": "mt_port", + "type": "setting", + "value": "MTSCAN_MT_PORT" + }, + { + "name": "mt_user", + "type": "setting", + "value": "MTSCAN_MT_USER" + }, + { + "name": "mt_pass", + "type": "setting", + "value": "MTSCAN_MT_PASS" + } + ], + "settings": [ + { + "function": "RUN", + "events": ["run"], + "type": { + "dataType": "string", + "elements": [ + { "elementType": "select", "elementOptions": [], "transformers": [] } + ] + }, + "default_value": "disabled", + "options": [ + "disabled", + "before_name_updates", + "on_new_device", + "once", + "schedule", + "always_after_scan" + ], + "localized": ["name", "description"], + "name": [ + { + "language_code": "en_us", + "string": "When to run" + } + ], + "description": [ + { + "language_code": "en_us", + "string": "When the plugin should be executed. If enabled this will execute the scan until there are no (unknown) or (name not found) devices. Setting this to on_new_device or a daily schedule is recommended." + } + ] + }, + { + "function": "CMD", + "type": { + "dataType": "string", + "elements": [ + { + "elementType": "input", + "elementOptions": [{ "readonly": "true" }], + "transformers": [] + } + ] + }, + "default_value": "python3 /app/front/plugins/mikrotik_scan/mikrotik.py", + "options": [], + "localized": ["name", "description"], + "name": [ + { + "language_code": "en_us", + "string": "Command" + } + ], + "description": [ + { + "language_code": "en_us", + "string": "Command to run. This can not be changed" + } + ] + }, + { + "function": "RUN_SCHD", + "type": { + "dataType": "string", + "elements": [ + { "elementType": "input", "elementOptions": [], "transformers": [] } + ] + }, + "default_value": "*/30 * * * *", + "options": [], + "localized": ["name", "description"], + "name": [ + { + "language_code": "en_us", + "string": "Schedule" + } + ], + "description": [ + { + "language_code": "en_us", + "string": "Only enabled if you select schedule in the MKTSCAN_RUN setting. Make sure you enter the schedule in the correct cron-like format (e.g. validate at crontab.guru). For example entering 0 4 * * * will run the scan after 4 am in the TIMEZONE you set above. Will be run NEXT time the time passes." + } + ] + }, + { + "function": "MT_HOST", + "type": { + "dataType": "string", + "elements": [ + { + "elementType": "input", + "elementOptions": [], + "transformers": [] + } + ] + }, + "default_value": "192.168.88.1", + "options": [], + "localized": ["name", "description"], + "name": [ + { + "language_code": "en_us", + "string": "Mikrotik Host IP" + } + ], + "description": [ + { + "language_code": "en_us", + "string": "IP for Mikrotik Router" + } + ] + }, + { + "function": "MT_PORT", + "type": { + "dataType": "integer", + "elements": [ + { + "elementType": "input", + "elementOptions": [{ "type": "number" }], + "transformers": [] + } + ] + }, + "default_value": 8728, + "options": [], + "localized": ["name", "description"], + "name": [ + { + "language_code": "en_us", + "string": "Mikrotik API Port" + } + ], + "description": [ + { + "language_code": "en_us", + "string": "API Port for Mikrotik Router" + } + ] + }, + { + "function": "MT_USER", + "type": { + "dataType": "string", + "elements": [ + { + "elementType": "input", + "elementOptions": [], + "transformers": [] + } + ] + }, + "default_value": "admin", + "options": [], + "localized": ["name", "description"], + "name": [ + { + "language_code": "en_us", + "string": "Mikrotik User" + } + ], + "description": [ + { + "language_code": "en_us", + "string": "User for Mikrotik Router" + } + ] + }, + { + "function": "MT_PASS", + "type": { + "dataType": "string", + "elements": [ + { + "elementType": "input", + "elementOptions": [{ "type": "password" }], + "transformers": [] + } + ] + }, + "default_value": "", + "options": [], + "localized": ["name", "description"], + "name": [ + { + "language_code": "en_us", + "string": "Mikrotik Password" + } + ], + "description": [ + { + "language_code": "en_us", + "string": "Password for Mikrotik Router" + } + ] + } + ], + "database_column_definitions": [ + { + "column": "Object_PrimaryID", + "mapped_to_column": "cur_MAC", + "css_classes": "col-sm-2", + "show": true, + "type": "device_name_mac", + "default_value": "", + "options": [], + "localized": ["name"], + "name": [ + { + "language_code": "en_us", + "string": "Name" + } + ] + }, + { + "column": "ForeignKey", + "css_classes": "col-sm-2", + "show": true, + "type": "device_mac", + "default_value": "", + "options": [], + "localized": ["name"], + "name": [ + { + "language_code": "en_us", + "string": "MAC" + } + ] + }, + { + "column": "Object_SecondaryID", + "mapped_to_column": "cur_IP", + "css_classes": "col-sm-2", + "show": true, + "type": "device_ip", + "default_value": "", + "options": [], + "localized": ["name"], + "name": [ + { + "language_code": "en_us", + "string": "IP" + } + ] + }, + { + "column": "Watched_Value1", + "css_classes": "col-sm-2", + "show": true, + "type": "device_ip", + "default_value": "", + "options": [], + "localized": ["name"], + "name": [ + { + "language_code": "en_us", + "string": "Lease IP" + } + ] + }, + { + "column": "Watched_Value2", + "mapped_to_column": "cur_Name", + "css_classes": "col-sm-2", + "show": true, + "type": "label", + "default_value": "", + "options": [], + "localized": ["name"], + "name": [ + { + "language_code": "en_us", + "string": "Name" + } + ] + }, + { + "column": "Watched_Value3", + "css_classes": "col-sm-2", + "show": true, + "type": "label", + "default_value": "", + "options": [], + "localized": ["name"], + "name": [ + { + "language_code": "en_us", + "string": "Host Name" + } + ] + }, + { + "column": "Watched_Value4", + "css_classes": "col-sm-2", + "show": true, + "type": "label", + "default_value": "", + "options": [], + "localized": ["name"], + "name": [ + { + "language_code": "en_us", + "string": "Last Seen" + } + ] + }, + { + "column": "HelpVal1", + "css_classes": "col-sm-2", + "show": true, + "type": "label", + "default_value": "", + "options": [], + "localized": ["name"], + "name": [ + { + "language_code": "en_us", + "string": "Comment" + } + ] + }, + { + "column": "Dummy", + "mapped_to_column": "cur_ScanMethod", + "mapped_to_column_data": { + "value": "MTSCAN" + }, + "css_classes": "col-sm-2", + "show": false, + "type": "label", + "default_value": "", + "options": [], + "localized": ["name"], + "name": [ + { + "language_code": "en_us", + "string": "Scan method" + } + ] + }, + { + "column": "DateTimeCreated", + "css_classes": "col-sm-2", + "show": true, + "type": "label", + "default_value": "", + "options": [], + "localized": ["name"], + "name": [ + { + "language_code": "en_us", + "string": "Created" + }, + { + "language_code": "es_es", + "string": "Creado" + } + ] + }, + { + "column": "DateTimeChanged", + "css_classes": "col-sm-2", + "show": true, + "type": "label", + "default_value": "", + "options": [], + "localized": ["name"], + "name": [ + { + "language_code": "en_us", + "string": "Changed" + }, + { + "language_code": "es_es", + "string": "Cambiado" + } + ] + } + ] +} diff --git a/front/plugins/mikrotik_scan/mikrotik.py b/front/plugins/mikrotik_scan/mikrotik.py new file mode 100755 index 00000000..88c2f098 --- /dev/null +++ b/front/plugins/mikrotik_scan/mikrotik.py @@ -0,0 +1,142 @@ +#!/usr/bin/env python +# test script by running: +# tbc + +import os +import pathlib +import argparse +import subprocess +import sys +import hashlib +import csv +import sqlite3 +import re +from io import StringIO +from datetime import datetime + +# Register NetAlertX directories +INSTALL_PATH="/app" +sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"]) + +from plugin_helper import Plugin_Object, Plugin_Objects, decodeBase64 +from logger import mylog, append_line_to_file +from helper import timeNowTZ, get_setting_value +from const import logPath, applicationPath, fullDbPath +from database import DB +from device import Device_obj +import conf +from pytz import timezone +from librouteros import connect +from librouteros.exceptions import TrapError + +# Make sure the TIMEZONE for logging is correct +conf.tz = timezone(get_setting_value('TIMEZONE')) + +CUR_PATH = str(pathlib.Path(__file__).parent.resolve()) +LOG_FILE = os.path.join(CUR_PATH, 'script.log') +RESULT_FILE = os.path.join(CUR_PATH, 'last_result.log') + +pluginName = 'NSLOOKUP' + +def main(): + + mylog('verbose', [f'[{pluginName}] In script']) + + mt_host = get_setting_value('MTSCAN_MT_HOST') + mt_port = get_setting_value('MTSCAN_MT_PORT') + mt_user = get_setting_value('MTSCAN_MT_USER') + mt_password = get_setting_value('MTSCAN_MT_PASS') + + #mylog('verbose', [f'[{pluginName}] Router: {mt_host}:{mt_port} user: {mt_user}, pass: {mt_password}']) + # Create a database connection + db = DB() # instance of class DB + db.open() + + # Initialize the Plugin obj output file + plugin_objects = Plugin_Objects(RESULT_FILE) + + # Create a Device_obj instance + device_handler = Device_obj(db) + + # Retrieve devices + #unknown_devices = device_handler.getUnknown() + #mylog('verbose', [f'[{pluginName}] Unknown devices count: {len(unknown_devices)}']) + + all_devices = device_handler.getAll() + + mylog('verbose', [f'[{pluginName}] all devices count: {len(all_devices)}']) + + device_map = {d['dev_MAC']:d['dev_LastIP'] for d in all_devices} + + try: + # connect router + api = connect(username=mt_user, password=mt_password, host=mt_host, port=mt_port) + + # get dhcp leases + leases = api('/ip/dhcp-server/lease/print') + + + + for lease in leases: + lease_id = lease.get('.id') + address = lease.get('address') + mac_address = lease.get('mac-address').lower() + host_name = lease.get('host-name') + comment = lease.get('comment') + last_seen = lease.get('last-seen') + + mylog('verbose', [f"ID: {lease_id}, Address: {address}, MAC Address: {mac_address}, Host Name: {host_name}, Comment: {comment}, Last Seen: {last_seen}"]) + if mac_address in device_map.keys(): + device_name = host_name + if comment != '': + device_name = comment + + plugin_objects.add_object( + # "Name-MAC", "LastIP", "IP", "Name","Host","LastSeen","Comment" + primaryId = mac_address, + secondaryId = device_map[mac_address], + watched1 = address, + watched2 = device_name, + watched3 = host_name, + watched4 = last_seen, + extra = '', + helpVal1 = comment, + foreignKey = mac_address) + + plugin_objects.write_result_file() + except TrapError as e: + mylog('error', [f"An error occurred: {e}"]) + except Exception as e: + mylog('error', [f"Failed to connect to MikroTik API: {e}"]) + + + #for device in unknown_devices: + # domain_name, dns_server = execute_nslookup(device['dev_LastIP'], timeout) + + # if domain_name != '': + # plugin_objects.add_object( + # # "MAC", "IP", "Server", "Name" + # primaryId = device['dev_MAC'], + # secondaryId = device['dev_LastIP'], + # watched1 = dns_server, + # watched2 = domain_name, + # watched3 = '', + # watched4 = '', + # extra = '', + # foreignKey = device['dev_MAC']) + + #plugin_objects.write_result_file() + + + mylog('verbose', [f'[{pluginName}] Script finished']) + + return 0 + + + + +#=============================================================================== +# BEGIN +#=============================================================================== +if __name__ == '__main__': + main()