feat: implement languages endpoint and refactor language handling to use languages.json

This commit is contained in:
Jokob @NetAlertX
2026-02-28 01:51:12 +00:00
parent e57fd2e81e
commit 814ba02d1c
12 changed files with 377 additions and 115 deletions

View File

@@ -387,78 +387,15 @@ function getString(key) {
}
// -----------------------------------------------------------------------------
// Get current language ISO code
// below has to match exactly the values in /front/php/templates/language/lang.php & /front/js/common.js
// Get current language ISO code.
// The UI_LANG setting value is always in the form "Name (code)", e.g. "English (en_us)".
// Extracting the code with a regex means this function never needs updating when a
// new language is added — the single source of truth is languages.json.
function getLangCode() {
UI_LANG = getSetting("UI_LANG");
let lang_code = 'en_us';
switch (UI_LANG) {
case 'English (en_us)':
lang_code = 'en_us';
break;
case 'Spanish (es_es)':
lang_code = 'es_es';
break;
case 'German (de_de)':
lang_code = 'de_de';
break;
case 'Farsi (fa_fa)':
lang_code = 'fa_fa';
break;
case 'French (fr_fr)':
lang_code = 'fr_fr';
break;
case 'Norwegian (nb_no)':
lang_code = 'nb_no';
break;
case 'Polish (pl_pl)':
lang_code = 'pl_pl';
break;
case 'Portuguese (pt_br)':
lang_code = 'pt_br';
break;
case 'Portuguese (pt_pt)':
lang_code = 'pt_pt';
break;
case 'Turkish (tr_tr)':
lang_code = 'tr_tr';
break;
case 'Swedish (sv_sv)':
lang_code = 'sv_sv';
break;
case 'Italian (it_it)':
lang_code = 'it_it';
break;
case 'Japanese (ja_jp)':
lang_code = 'ja_jp';
break;
case 'Russian (ru_ru)':
lang_code = 'ru_ru';
break;
case 'Chinese (zh_cn)':
lang_code = 'zh_cn';
break;
case 'Czech (cs_cz)':
lang_code = 'cs_cz';
break;
case 'Arabic (ar_ar)':
lang_code = 'ar_ar';
break;
case 'Catalan (ca_ca)':
lang_code = 'ca_ca';
break;
case 'Ukrainian (uk_uk)':
lang_code = 'uk_ua';
break;
case 'Vietnamese (vi_vn)':
lang_code = 'vi_vn';
break;
}
return lang_code;
const match = (UI_LANG || '').match(/\(([a-z]{2}_[a-z]{2})\)\s*$/i);
return match ? match[1].toLowerCase() : 'en_us';
}
// -----------------------------------------------------------------------------

View File

@@ -12,11 +12,9 @@ var timerRefreshData = ''
var emptyArr = ['undefined', "", undefined, null, 'null'];
var UI_LANG = "English (en_us)";
const allLanguages = ["ar_ar","ca_ca","cs_cz","de_de",
"en_us","es_es","fa_fa","fr_fr",
"it_it","ja_jp","nb_no","pl_pl",
"pt_br","pt_pt","ru_ru","sv_sv",
"tr_tr","uk_ua","vi_vn","zh_cn"]; // needs to be same as in lang.php
// allLanguages is populated at init via fetchAllLanguages() from GET /languages.
// Do not hardcode this list — add new languages to languages.json instead.
let allLanguages = [];
var settingsJSON = {}
// NAX_CACHE_VERSION and CACHE_KEYS moved to cache.js
@@ -24,6 +22,25 @@ var settingsJSON = {}
// getCache, setCache, fetchJson, getAuthContext moved to cache.js
// -----------------------------------------------------------------------------
// Fetch the canonical language list from GET /languages and populate allLanguages.
// Must be called after the API token is available (e.g. alongside cacheStrings).
// -----------------------------------------------------------------------------
function fetchAllLanguages(apiToken) {
return fetch('/languages', {
headers: { 'Authorization': 'Bearer ' + apiToken }
})
.then(function(resp) { return resp.json(); })
.then(function(data) {
if (data && data.success && Array.isArray(data.languages)) {
allLanguages = data.languages.map(function(l) { return l.code; });
}
})
.catch(function(err) {
console.warn('[fetchAllLanguages] Failed to load language list:', err);
});
}
// -----------------------------------------------------------------------------
function setCookie (cookie, value, expirationMinutes='') {

View File

@@ -5,43 +5,20 @@
// ###################################
$defaultLang = "en_us";
$allLanguages = [ "ar_ar", "ca_ca", "cs_cz", "de_de",
"en_us", "es_es", "fa_fa", "fr_fr",
"it_it", "ja_jp", "nb_no", "pl_pl",
"pt_br", "pt_pt", "ru_ru", "sv_sv",
"tr_tr", "uk_ua", "vi_vn", "zh_cn"];
// Load the canonical language list from languages.json — do not hardcode here.
$_langJsonPath = dirname(__FILE__) . '/languages.json';
$_langJson = json_decode(file_get_contents($_langJsonPath), true);
$allLanguages = array_column($_langJson['languages'], 'code');
global $db;
$result = $db->querySingle("SELECT setValue FROM Settings WHERE setKey = 'UI_LANG'");
// below has to match exactly the values in /front/php/templates/language/lang.php & /front/js/common.js
switch($result){
case 'Arabic (ar_ar)': $pia_lang_selected = 'ar_ar'; break;
case 'Catalan (ca_ca)': $pia_lang_selected = 'ca_ca'; break;
case 'Czech (cs_cz)': $pia_lang_selected = 'cs_cz'; break;
case 'German (de_de)': $pia_lang_selected = 'de_de'; break;
case 'English (en_us)': $pia_lang_selected = 'en_us'; break;
case 'Spanish (es_es)': $pia_lang_selected = 'es_es'; break;
case 'Farsi (fa_fa)': $pia_lang_selected = 'fa_fa'; break;
case 'French (fr_fr)': $pia_lang_selected = 'fr_fr'; break;
case 'Italian (it_it)': $pia_lang_selected = 'it_it'; break;
case 'Japanese (ja_jp)': $pia_lang_selected = 'ja_jp'; break;
case 'Norwegian (nb_no)': $pia_lang_selected = 'nb_no'; break;
case 'Polish (pl_pl)': $pia_lang_selected = 'pl_pl'; break;
case 'Portuguese (pt_br)': $pia_lang_selected = 'pt_br'; break;
case 'Portuguese (pt_pt)': $pia_lang_selected = 'pt_pt'; break;
case 'Russian (ru_ru)': $pia_lang_selected = 'ru_ru'; break;
case 'Swedish (sv_sv)': $pia_lang_selected = 'sv_sv'; break;
case 'Turkish (tr_tr)': $pia_lang_selected = 'tr_tr'; break;
case 'Ukrainian (uk_ua)': $pia_lang_selected = 'uk_ua'; break;
case 'Vietnamese (vi_vn)': $pia_lang_selected = 'vi_vn'; break;
case 'Chinese (zh_cn)': $pia_lang_selected = 'zh_cn'; break;
default: $pia_lang_selected = 'en_us'; break;
}
if (isset($pia_lang_selected) == FALSE or (strlen($pia_lang_selected) == 0)) {$pia_lang_selected = $defaultLang;}
// Extract the language code from the display value, e.g. "English (en_us)" => "en_us".
// This regex means lang.php never needs updating when a new language is added.
preg_match('/\(([a-z]{2}_[a-z]{2})\)\s*$/i', (string) $result, $_langMatch);
$pia_lang_selected = isset($_langMatch[1]) ? strtolower($_langMatch[1]) : $defaultLang;
$result = $db->query("SELECT * FROM Plugins_Language_Strings");
$strings = array();

View File

@@ -0,0 +1,25 @@
{
"default": "en_us",
"languages": [
{ "code": "ar_ar", "display": "Arabic (ar_ar)" },
{ "code": "ca_ca", "display": "Catalan (ca_ca)" },
{ "code": "cs_cz", "display": "Czech (cs_cz)" },
{ "code": "de_de", "display": "German (de_de)" },
{ "code": "en_us", "display": "English (en_us)" },
{ "code": "es_es", "display": "Spanish (es_es)" },
{ "code": "fa_fa", "display": "Farsi (fa_fa)" },
{ "code": "fr_fr", "display": "French (fr_fr)" },
{ "code": "it_it", "display": "Italian (it_it)" },
{ "code": "ja_jp", "display": "Japanese (ja_jp)" },
{ "code": "nb_no", "display": "Norwegian (nb_no)" },
{ "code": "pl_pl", "display": "Polish (pl_pl)" },
{ "code": "pt_br", "display": "Portuguese (pt_br)" },
{ "code": "pt_pt", "display": "Portuguese (pt_pt)" },
{ "code": "ru_ru", "display": "Russian (ru_ru)" },
{ "code": "sv_sv", "display": "Swedish (sv_sv)" },
{ "code": "tr_tr", "display": "Turkish (tr_tr)" },
{ "code": "uk_ua", "display": "Ukrainian (uk_ua)" },
{ "code": "vi_vn", "display": "Vietnamese (vi_vn)" },
{ "code": "zh_cn", "display": "Chinese (zh_cn)" }
]
}

View File

@@ -31,13 +31,22 @@ def merge_translations(main_file, other_files):
f.truncate()
def load_language_codes(languages_json_path):
"""Read language codes from languages.json, guaranteeing en_us is first."""
with open(languages_json_path, "r", encoding="utf-8") as f:
data = json.load(f)
codes = [entry["code"] for entry in data["languages"]]
# Ensure en_us (the master) is always first
if "en_us" in codes:
codes.remove("en_us")
codes.insert(0, "en_us")
return codes
if __name__ == "__main__":
current_path = os.path.dirname(os.path.abspath(__file__))
# language codes can be found here: http://www.lingoes.net/en/translator/langcode.htm
# ⚠ "en_us.json" has to be first!
json_files = ["en_us.json", "ar_ar.json", "ca_ca.json", "cs_cz.json", "de_de.json",
"es_es.json", "fa_fa.json", "fr_fr.json", "it_it.json", "ja_jp.json",
"nb_no.json", "pl_pl.json", "pt_br.json", "pt_pt.json", "ru_ru.json",
"sv_sv.json", "tr_tr.json", "vi_vn.json", "uk_ua.json", "zh_cn.json"]
file_paths = [os.path.join(current_path, file) for file in json_files]
# language codes are loaded from languages.json — add a new language there
languages_json = os.path.join(current_path, "languages.json")
codes = load_language_codes(languages_json)
file_paths = [os.path.join(current_path, f"{code}.json") for code in codes]
merge_translations(file_paths[0], file_paths[1:])

View File

@@ -42,6 +42,7 @@ from .dbquery_endpoint import read_query, write_query, update_query, delete_quer
from .sync_endpoint import handle_sync_post, handle_sync_get # noqa: E402 [flake8 lint suppression]
from .logs_endpoint import clean_log # noqa: E402 [flake8 lint suppression]
from .health_endpoint import get_health_status # noqa: E402 [flake8 lint suppression]
from .languages_endpoint import get_languages # noqa: E402 [flake8 lint suppression]
from models.user_events_queue_instance import UserEventsQueueInstance # noqa: E402 [flake8 lint suppression]
from models.event_instance import EventInstance # noqa: E402 [flake8 lint suppression]
@@ -95,6 +96,7 @@ from .openapi.schemas import ( # noqa: E402 [flake8 lint suppression]
DbQueryUpdateRequest, DbQueryDeleteRequest,
AddToQueueRequest, GetSettingResponse,
RecentEventsRequest, SetDeviceAliasRequest,
LanguagesResponse,
)
from .sse_endpoint import ( # noqa: E402 [flake8 lint suppression]
@@ -1962,6 +1964,34 @@ def check_health(payload=None):
}), 500
@app.route("/languages", methods=["GET"])
@validate_request(
operation_id="get_languages",
summary="Get Supported Languages",
description="Returns the canonical list of supported UI languages loaded from languages.json.",
response_model=LanguagesResponse,
tags=["system", "languages"],
auth_callable=is_authorized
)
def list_languages(payload=None):
"""Return the canonical language registry."""
try:
data = get_languages()
return jsonify({"success": True, **data}), 200
except FileNotFoundError:
return jsonify({
"success": False,
"error": "languages.json not found",
"message": "Language registry file is missing"
}), 500
except ValueError as e:
return jsonify({
"success": False,
"error": str(e),
"message": "Language registry file is malformed"
}), 500
# --------------------------
# Background Server Start
# --------------------------

View File

@@ -545,7 +545,7 @@ class Query(ObjectType):
language_folder = '/app/front/php/templates/language/'
if os.path.exists(language_folder):
for filename in os.listdir(language_folder):
if filename.endswith('.json'):
if filename.endswith('.json') and filename != 'languages.json':
file_lang_code = filename.replace('.json', '')
# Filter by langCode if provided

View File

@@ -0,0 +1,43 @@
"""Languages endpoint — returns the canonical language registry from languages.json."""
import json
import os
from logger import mylog
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
LANGUAGES_JSON_PATH = os.path.join(
INSTALL_PATH, "front", "php", "templates", "language", "languages.json"
)
def get_languages():
"""
Load and return the canonical language registry.
Returns a dict with keys:
- default (str): the fallback language code
- languages (list[dict]): each entry has 'code' and 'display'
Raises:
FileNotFoundError: if languages.json is missing
ValueError: if the JSON is malformed or missing required fields
"""
try:
with open(LANGUAGES_JSON_PATH, "r", encoding="utf-8") as f:
data = json.load(f)
except FileNotFoundError:
mylog("none", [f"[languages] languages.json not found at {LANGUAGES_JSON_PATH}"])
raise
except json.JSONDecodeError as e:
mylog("none", [f"[languages] Failed to parse languages.json: {e}"])
raise ValueError(f"Malformed languages.json: {e}") from e
if "default" not in data or "languages" not in data:
raise ValueError("languages.json must contain 'default' and 'languages' keys")
return {
"default": data["default"],
"languages": data["languages"],
"count": len(data["languages"]),
}

View File

@@ -1031,6 +1031,41 @@ class GetSettingResponse(BaseResponse):
value: Any = Field(None, description="The setting value")
# =============================================================================
# LANGUAGES SCHEMAS
# =============================================================================
class LanguageEntry(BaseModel):
"""A single supported language entry."""
model_config = ConfigDict(extra="allow")
code: str = Field(..., description="ISO language code (e.g. 'en_us')")
display: str = Field(..., description="Human-readable display name (e.g. 'English (en_us)')")
class LanguagesResponse(BaseResponse):
"""Response for GET /languages — the canonical language registry."""
model_config = ConfigDict(
extra="allow",
json_schema_extra={
"examples": [{
"success": True,
"default": "en_us",
"count": 20,
"languages": [
{"code": "en_us", "display": "English (en_us)"},
{"code": "de_de", "display": "German (de_de)"}
]
}]
}
)
default: str = Field(..., description="Default/fallback language code")
count: int = Field(..., description="Total number of supported languages")
languages: List[LanguageEntry] = Field(..., description="All supported languages")
# =============================================================================
# GRAPHQL SCHEMAS
# =============================================================================

View File

@@ -10,7 +10,7 @@ import uuid
# Register NetAlertX libraries
import conf
from const import fullConfPath, fullConfFolder, default_tz
from const import fullConfPath, fullConfFolder, default_tz, applicationPath
from helper import getBuildTimeStampAndVersion, collect_lang_strings, updateSubnets, generate_random_string
from utils.datetime_utils import timeNowUTC
from app_state import updateState
@@ -21,6 +21,31 @@ from plugin import plugin_manager, print_plugin_info
from utils.plugin_utils import get_plugins_configs, get_set_value_for_init
from messaging.in_app import write_notification
# ===============================================================================
# Language helpers
# ===============================================================================
_LANGUAGES_JSON = os.path.join(
applicationPath, "front", "php", "templates", "language", "languages.json"
)
def _load_language_display_names():
"""Return a JSON-serialised list of display names from languages.json.
Falls back to a hardcoded English-only list on any error so that
the settings page is never broken by a missing/corrupt file.
"""
try:
with open(_LANGUAGES_JSON, "r", encoding="utf-8") as f:
data = json.load(f)
names = [entry["display"] for entry in data["languages"]]
return json.dumps(names)
except Exception as e:
mylog("none", [f"[languages] Failed to load languages.json, using fallback: {e}"])
return '["English (en_us)"]'
# ===============================================================================
# Initialise user defined values
# ===============================================================================
@@ -401,7 +426,7 @@ def importConfigs(pm, db, all_plugins):
c_d,
"Language Interface",
'{"dataType":"string", "elements": [{"elementType" : "select", "elementOptions" : [] ,"transformers": []}]}',
"['English (en_us)', 'Arabic (ar_ar)', 'Catalan (ca_ca)', 'Czech (cs_cz)', 'German (de_de)', 'Spanish (es_es)', 'Farsi (fa_fa)', 'French (fr_fr)', 'Italian (it_it)', 'Japanese (ja_jp)', 'Norwegian (nb_no)', 'Polish (pl_pl)', 'Portuguese (pt_br)', 'Portuguese (pt_pt)', 'Russian (ru_ru)', 'Swedish (sv_sv)', 'Turkish (tr_tr)', 'Ukrainian (uk_ua)', 'Vietnamese (vi_vn)', 'Chinese (zh_cn)']", # noqa: E501 - inline JSON
_load_language_display_names(), # derived from languages.json
"UI",
)

View File

@@ -169,3 +169,26 @@ def test_graphql_post_langstrings_all_languages(client, api_token):
assert data["deStrings"]["count"] >= 1
# Ensure langCode matches
assert all(e["langCode"] == "en_us" for e in data["enStrings"]["langStrings"])
def test_graphql_langstrings_excludes_languages_json(client, api_token):
"""languages.json must never appear as a language string entry (langCode='languages')"""
query = {
"query": """
{
langStrings {
langStrings { langCode langStringKey langStringText }
count
}
}
"""
}
resp = client.post("/graphql", json=query, headers=auth_headers(api_token))
assert resp.status_code == 200
all_strings = resp.json.get("data", {}).get("langStrings", {}).get("langStrings", [])
# No entry should have langCode == "languages" (i.e. from languages.json)
polluted = [s for s in all_strings if s.get("langCode") == "languages"]
assert polluted == [], (
f"languages.json leaked into langStrings as {len(polluted)} entries; "
"graphql_endpoint.py must exclude it from the directory scan"
)

View File

@@ -0,0 +1,141 @@
"""Tests for GET /languages endpoint."""
import sys
import os
import pytest
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
sys.path.extend([f"{INSTALL_PATH}/front/plugins", f"{INSTALL_PATH}/server"])
from helper import get_setting_value # noqa: E402
from api_server.api_server_start import app # noqa: E402
@pytest.fixture(scope="session")
def api_token():
"""Load API token from system settings."""
return get_setting_value("API_TOKEN")
@pytest.fixture
def client():
"""Flask test client."""
with app.test_client() as client:
yield client
def auth_headers(token):
"""Helper to construct Authorization header."""
return {"Authorization": f"Bearer {token}"}
# ========================================================================
# AUTHENTICATION TESTS
# ========================================================================
def test_languages_unauthorized(client):
"""Missing token should be forbidden."""
resp = client.get("/languages")
assert resp.status_code == 403
data = resp.get_json()
assert data is not None
assert data.get("success") is False
def test_languages_invalid_token(client):
"""Invalid bearer token should be forbidden."""
resp = client.get("/languages", headers=auth_headers("INVALID-TOKEN"))
assert resp.status_code == 403
data = resp.get_json()
assert data is not None
assert data.get("success") is False
def test_languages_valid_token(client, api_token):
"""Valid token should return 200 with success=True."""
resp = client.get("/languages", headers=auth_headers(api_token))
assert resp.status_code == 200
data = resp.get_json()
assert data is not None
assert data.get("success") is True
# ========================================================================
# RESPONSE STRUCTURE TESTS
# ========================================================================
def test_languages_response_structure(client, api_token):
"""Response must contain required fields with correct types."""
resp = client.get("/languages", headers=auth_headers(api_token))
assert resp.status_code == 200
data = resp.get_json()
assert data.get("success") is True
assert isinstance(data.get("default"), str)
assert isinstance(data.get("count"), int)
assert isinstance(data.get("languages"), list)
def test_languages_default_is_en_us(client, api_token):
"""Default language must always be en_us."""
resp = client.get("/languages", headers=auth_headers(api_token))
data = resp.get_json()
assert data["default"] == "en_us"
def test_languages_count_matches_list(client, api_token):
"""count must equal len(languages)."""
resp = client.get("/languages", headers=auth_headers(api_token))
data = resp.get_json()
assert data["count"] == len(data["languages"])
def test_languages_entry_shape(client, api_token):
"""Each language entry must have 'code' and 'display' string fields."""
resp = client.get("/languages", headers=auth_headers(api_token))
data = resp.get_json()
for entry in data["languages"]:
assert "code" in entry, f"Missing 'code' in {entry}"
assert "display" in entry, f"Missing 'display' in {entry}"
assert isinstance(entry["code"], str)
assert isinstance(entry["display"], str)
# code must match pattern xx_xx
assert len(entry["code"]) == 5 and entry["code"][2] == "_", \
f"Unexpected code format: {entry['code']}"
def test_languages_includes_en_us(client, api_token):
"""en_us must always be in the language list."""
resp = client.get("/languages", headers=auth_headers(api_token))
data = resp.get_json()
codes = [l["code"] for l in data["languages"]]
assert "en_us" in codes
def test_languages_display_contains_code(client, api_token):
"""Each display name must embed its code in parentheses, e.g. 'English (en_us)'."""
resp = client.get("/languages", headers=auth_headers(api_token))
data = resp.get_json()
for entry in data["languages"]:
assert f"({entry['code']})" in entry["display"], \
f"Display '{entry['display']}' does not contain '({entry['code']})'"
def test_languages_minimum_count(client, api_token):
"""Must have at least 20 languages (the original set)."""
resp = client.get("/languages", headers=auth_headers(api_token))
data = resp.get_json()
assert data["count"] >= 20, f"Expected >=20 languages, got {data['count']}"
def test_languages_no_duplicate_codes(client, api_token):
"""Language codes must be unique."""
resp = client.get("/languages", headers=auth_headers(api_token))
data = resp.get_json()
codes = [l["code"] for l in data["languages"]]
assert len(codes) == len(set(codes)), "Duplicate language codes found"