improve MCP spec

This commit is contained in:
Adam Outler
2026-01-30 14:25:05 +00:00
parent ed4e0388cc
commit cc8a695943
9 changed files with 175 additions and 35 deletions

View File

@@ -59,7 +59,8 @@ from .mcp_endpoint import (
mcp_sse,
mcp_messages,
openapi_spec,
) # noqa: E402 [flake8 lint suppression]
get_openapi_spec,
)
# validation and schemas for MCP v2
from .openapi.validation import validate_request # noqa: E402 [flake8 lint suppression]
from .openapi.schemas import ( # noqa: E402 [flake8 lint suppression]
@@ -950,9 +951,13 @@ def api_devices_network_topology(payload=None):
auth_callable=is_authorized
)
def api_wakeonlan(payload=None):
data = request.get_json(silent=True) or {}
mac = data.get("devMac")
ip = data.get("devLastIP") or data.get('ip')
if payload:
mac = payload.mac
ip = payload.devLastIP
else:
data = request.get_json(silent=True) or {}
mac = data.get("mac") or data.get("devMac")
ip = data.get("devLastIP") or data.get('ip')
if not mac and ip:
@@ -1858,6 +1863,14 @@ def check_auth(payload=None):
# Mount SSE endpoints after is_authorized is defined (avoid circular import)
create_sse_endpoint(app, is_authorized)
# Apply environment-driven MCP disablement by regenerating the OpenAPI spec.
# This populates the registry and applies any operation IDs listed in MCP_DISABLED_TOOLS.
try:
get_openapi_spec(force_refresh=True, flask_app=app)
mylog("verbose", [f"[MCP] Applied MCP_DISABLED_TOOLS: {os.environ.get('MCP_DISABLED_TOOLS', '')}"])
except Exception as e:
mylog("none", [f"[MCP] Error applying MCP_DISABLED_TOOLS: {e}"])
def start_server(graphql_port, app_state):
"""Start the GraphQL server in a background thread."""

View File

@@ -309,6 +309,7 @@ def map_openapi_to_mcp_tools(spec: Dict[str, Any]) -> List[Dict[str, Any]]:
This function transforms OpenAPI operations into MCP-compatible tool schemas,
ensuring proper inputSchema derivation from request bodies and parameters.
It deduplicates tools by their original operationId, preferring /mcp/ routes.
Args:
spec: OpenAPI specification dictionary
@@ -316,10 +317,10 @@ def map_openapi_to_mcp_tools(spec: Dict[str, Any]) -> List[Dict[str, Any]]:
Returns:
List of MCP tool definitions with name, description, and inputSchema
"""
tools = []
tools_map = {}
if not spec or "paths" not in spec:
return tools
return []
for path, methods in spec["paths"].items():
for method, details in methods.items():
@@ -327,6 +328,9 @@ def map_openapi_to_mcp_tools(spec: Dict[str, Any]) -> List[Dict[str, Any]]:
continue
operation_id = details["operationId"]
# Deduplicate using the original operationId (before suffixing)
# or the unique operationId as fallback.
original_op_id = details.get("x-original-operationId", operation_id)
# Build inputSchema from requestBody and parameters
input_schema = {
@@ -382,31 +386,82 @@ def map_openapi_to_mcp_tools(spec: Dict[str, Any]) -> List[Dict[str, Any]]:
tool = {
"name": operation_id,
"description": details.get("description", details.get("summary", "")),
"inputSchema": input_schema
"inputSchema": input_schema,
"_original_op_id": original_op_id,
"_is_mcp": path.startswith("/mcp/"),
"_is_post": method.upper() == "POST"
}
tools.append(tool)
# Preference logic for deduplication:
# 1. Prefer /mcp/ routes over standard ones.
# 2. Prefer POST methods over GET for the same logic (usually more robust body validation).
existing = tools_map.get(original_op_id)
if not existing:
tools_map[original_op_id] = tool
else:
# Upgrade if current is MCP and existing is not
mcp_upgrade = tool["_is_mcp"] and not existing["_is_mcp"]
# Upgrade if same route type but current is POST and existing is GET
method_upgrade = (tool["_is_mcp"] == existing["_is_mcp"]) and tool["_is_post"] and not existing["_is_post"]
if mcp_upgrade or method_upgrade:
tools_map[original_op_id] = tool
return tools
# Final cleanup: remove internal preference flags and ensure tools have the original names
# unless we explicitly want the suffixed ones.
# The user said "Eliminate Duplicate Tool Names", so we should use original_op_id as the tool name.
final_tools = []
_tool_name_to_operation_id: Dict[str, str] = {}
for tool in tools_map.values():
actual_operation_id = tool["name"] # Save before overwriting
tool["name"] = tool["_original_op_id"]
_tool_name_to_operation_id[tool["name"]] = actual_operation_id
del tool["_original_op_id"]
del tool["_is_mcp"]
del tool["_is_post"]
final_tools.append(tool)
return final_tools
def find_route_for_tool(tool_name: str) -> Optional[Dict[str, Any]]:
"""
Find the registered route for a given tool name (operationId).
Handles exact matches and deduplicated original IDs.
Args:
tool_name: The operationId to look up
tool_name: The operationId or original_operation_id to look up
Returns:
Route dictionary with path, method, and models, or None if not found
"""
registry = get_registry()
candidates = []
for entry in registry:
# Exact match (priority) - if the client passed the specific suffixed ID
if entry["operation_id"] == tool_name:
return entry
if entry.get("original_operation_id") == tool_name:
candidates.append(entry)
return None
if not candidates:
return None
# Apply same preference logic as map_openapi_to_mcp_tools to ensure we pick the
# same route definition that generated the tool schema.
# Priority 1: MCP routes (they have specialized paths/behavior)
mcp_candidates = [c for c in candidates if c["path"].startswith("/mcp/")]
pool = mcp_candidates if mcp_candidates else candidates
# Priority 2: POST methods (usually preferred for tools)
post_candidates = [c for c in pool if c["method"].upper() == "POST"]
if post_candidates:
return post_candidates[0]
# Fallback: return the first from the best pool available
return pool[0]
# =============================================================================

View File

@@ -97,6 +97,26 @@ def introspect_flask_app(app: Any):
# Ensure unique operationId
original_op_id = op_id
unique_op_id = op_id
# Semantic naming strategy for duplicates
if unique_op_id in _operation_ids:
# Construct a semantic suffix to replace numeric ones
# Priority: /mcp/ prefix and HTTP method
suffix = ""
if path.startswith("/mcp/"):
suffix = "_mcp"
if method.upper() == "POST":
suffix += "_post"
elif method.upper() == "GET":
suffix += "_get"
if suffix:
candidate = f"{op_id}{suffix}"
if candidate not in _operation_ids:
unique_op_id = candidate
# Fallback to numeric suffixes if semantic naming didn't ensure uniqueness
count = 1
while unique_op_id in _operation_ids:
unique_op_id = f"{op_id}_{count}"
@@ -115,24 +135,27 @@ def introspect_flask_app(app: Any):
if method == 'GET' and not query_params and metadata.get("request_model"):
try:
schema = pydantic_to_json_schema(metadata["request_model"])
defs = schema.get("$defs", {})
properties = schema.get("properties", {})
query_params = []
for name, prop in properties.items():
is_required = name in schema.get("required", [])
# Create param definition, preserving enum/schema
# Resolve references to inlined definitions (preserving Enums)
resolved_prop = resolve_schema_refs(prop, defs)
# Create param definition
param_def = {
"name": name,
"in": "query",
"required": is_required,
"description": prop.get("description", ""),
"schema": prop
"schema": resolved_prop
}
# Remove description from schema to avoid duplication
if "description" in param_def["schema"]:
del param_def["schema"]["description"]
query_params.append(param_def)
except Exception:
pass # Fallback to empty if schema generation fails
pass # Fallback to empty if schema generation fails
register_tool(
path=path,

View File

@@ -193,7 +193,7 @@ def resolve_schema_refs(schema: Dict[str, Any], definitions: Dict[str, Any]) ->
resolved[k] = [resolve_schema_refs(i, definitions) for i in v]
else:
resolved[k] = v
return resolved

View File

@@ -233,7 +233,17 @@ class DeviceListRequest(BaseModel):
"offline"
]] = Field(
None,
description="Filter devices by status (connected, down, favorites, new, archived, all, my, offline)"
description=(
"Filter devices by status:\n"
"- connected: Active devices present in the last scan\n"
"- down: Devices with active 'Device Down' alert\n"
"- favorites: Devices marked as favorite\n"
"- new: Devices flagged as new\n"
"- archived: Devices moved to archive\n"
"- all: All active (non-archived) devices\n"
"- my: All active devices (alias for 'all')\n"
"- offline: Devices not present in the last scan"
)
)
@@ -336,7 +346,7 @@ class UpdateDeviceColumnRequest(BaseModel):
columnName: ALLOWED_DEVICE_COLUMNS = Field(..., description="Database column name")
columnValue: Union[str, int, bool, None] = Field(
...,
description="New value for the column",
description="New value for the column. Must match the column's expected data type (e.g., string for devName, integer for devFavorite).",
json_schema_extra={
"oneOf": [
{"type": "string"},
@@ -350,7 +360,7 @@ class UpdateDeviceColumnRequest(BaseModel):
class LockDeviceFieldRequest(BaseModel):
"""Request to lock/unlock a device field."""
fieldName: Optional[str] = Field(None, description="Field name to lock/unlock (devMac, devName, devLastIP, etc.)")
fieldName: str = Field(..., description="Field name to lock/unlock (e.g., devName, devVendor). Required.")
lock: bool = Field(True, description="True to lock the field, False to unlock")
@@ -387,7 +397,7 @@ class DeviceUpdateRequest(BaseModel):
devGroup: Optional[str] = Field(None, description="Device group")
devLocation: Optional[str] = Field(None, description="Device location")
devComments: Optional[str] = Field(None, description="Comments")
createNew: bool = Field(False, description="Create new device if not exists")
createNew: bool = Field(False, description="If True, creates a new device. Recommended to provide at least devName and devVendor. If False, updates existing device.")
@field_validator("devName", "devOwner", "devType", "devVendor", "devGroup", "devLocation", "devComments")
@classmethod
@@ -461,8 +471,9 @@ class OpenPortsResponse(BaseResponse):
class WakeOnLanRequest(BaseModel):
"""Request to send Wake-on-LAN packet."""
devMac: Optional[str] = Field(
mac: Optional[str] = Field(
None,
alias="devMac",
description="Target device MAC address",
json_schema_extra={"examples": ["00:11:22:33:44:55"]}
)
@@ -476,7 +487,7 @@ class WakeOnLanRequest(BaseModel):
# But Pydantic V2 with populate_by_name=True allows both "devLastIP" and "ip".
model_config = ConfigDict(populate_by_name=True)
@field_validator("devMac")
@field_validator("mac")
@classmethod
def validate_mac_if_provided(cls, v: Optional[str]) -> Optional[str]:
if v is not None:
@@ -492,9 +503,9 @@ class WakeOnLanRequest(BaseModel):
@model_validator(mode="after")
def require_mac_or_ip(self) -> "WakeOnLanRequest":
"""Ensure at least one of devMac or devLastIP is provided."""
if self.devMac is None and self.devLastIP is None:
raise ValueError("Either 'devMac' or 'devLastIP' (alias 'ip') must be provided")
"""Ensure at least one of mac or devLastIP is provided."""
if self.mac is None and self.devLastIP is None:
raise ValueError("Either devMac (aka mac) or devLastIP (aka ip) must be provided")
return self
@@ -530,7 +541,7 @@ class NmapScanRequest(BaseModel):
"""Request to perform NMAP scan."""
scan: str = Field(
...,
description="Target IP address for NMAP scan"
description="Target IP address for NMAP scan (Single IP only, no CIDR/ranges/hostnames)."
)
mode: ALLOWED_NMAP_MODES = Field(
...,
@@ -826,7 +837,7 @@ class DbQueryUpdateRequest(BaseModel):
columnName: str = Field(..., description="Column to filter by")
id: List[Union[str, int]] = Field(
...,
description="List of IDs to update (strings for MACs, integers for row IDs)",
description="List of IDs to update. Use MAC address strings for 'Devices' table, and integer RowIDs for all other tables.",
json_schema_extra={
"items": {
"oneOf": [
@@ -865,7 +876,7 @@ class DbQueryDeleteRequest(BaseModel):
columnName: str = Field(..., description="Column to filter by")
id: List[Union[str, int]] = Field(
...,
description="List of IDs to delete (strings for MACs, integers for row IDs)",
description="List of IDs to delete. Use MAC address strings for 'Devices' table, and integer RowIDs for all other tables.",
json_schema_extra={
"items": {
"oneOf": [

View File

@@ -29,7 +29,7 @@ Usage:
"""
from __future__ import annotations
import os
import threading
from typing import Optional, List, Dict, Any
@@ -74,6 +74,37 @@ def generate_openapi_spec(
introspect_graphql_schema(devicesSchema)
introspect_flask_app(flask_app)
# Apply default disabled tools from setting `MCP_DISABLED_TOOLS`, env var, or hard-coded defaults
# Format: comma-separated operation IDs, e.g. "dbquery_read,dbquery_write"
try:
disabled_env = ""
# Prefer setting from app.conf/settings when available
try:
from helper import get_setting_value
setting_val = get_setting_value("MCP_DISABLED_TOOLS")
if setting_val:
disabled_env = str(setting_val).strip()
except Exception:
# If helper is unavailable, fall back to environment
pass
if disabled_env is None:
env_val = os.getenv("MCP_DISABLED_TOOLS")
if env_val is not None:
disabled_env = env_val.strip()
# If still not set, apply safe hard-coded defaults
if not disabled_env:
disabled_env = "dbquery_read,dbquery_write"
if disabled_env:
from .registry import set_tool_disabled
for op in [p.strip() for p in disabled_env.split(",") if p.strip()]:
set_tool_disabled(op, True)
except Exception:
# Never fail spec generation due to disablement application issues
pass
spec = {
"openapi": "3.1.0",
"info": {

View File

@@ -1,6 +1,5 @@
import os
import sys
import _io
import json
import uuid
import time

View File

@@ -66,7 +66,7 @@ class TestPydanticSchemas:
"""WakeOnLanRequest should validate MAC format."""
# Valid MAC
req = WakeOnLanRequest(devMac="00:11:22:33:44:55")
assert req.devMac == "00:11:22:33:44:55"
assert req.mac == "00:11:22:33:44:55"
# Invalid MAC
# with pytest.raises(ValidationError):
@@ -76,7 +76,7 @@ class TestPydanticSchemas:
"""WakeOnLanRequest should accept either MAC or IP."""
req_mac = WakeOnLanRequest(devMac="00:11:22:33:44:55")
req_ip = WakeOnLanRequest(devLastIP="192.168.1.50")
assert req_mac.devMac is not None
assert req_mac.mac is not None
assert req_ip.devLastIP == "192.168.1.50"
def test_traceroute_request_ip_validation(self):
@@ -216,12 +216,19 @@ class TestMCPToolMapping:
"""Test MCP tool generation from OpenAPI spec."""
def test_tools_match_registry_count(self):
"""Number of MCP tools should match registered endpoints."""
"""Number of MCP tools should match unique original operation IDs in registry."""
spec = generate_openapi_spec()
tools = map_openapi_to_mcp_tools(spec)
registry = get_registry()
assert len(tools) == len(registry)
# Count unique operation IDs (accounting for our deduplication logic)
unique_ops = set()
for entry in registry:
# We used x-original-operationId for deduplication logic, or operation_id if not present
op_id = entry.get("original_operation_id") or entry["operation_id"]
unique_ops.add(op_id)
assert len(tools) == len(unique_ops)
def test_tools_have_input_schema(self):
"""All MCP tools should have inputSchema."""

View File

@@ -99,8 +99,9 @@ class TestDeviceFieldLock:
json=payload,
headers=auth_headers
)
assert resp.status_code == 400
assert "fieldName is required" in resp.json.get("error", "")
assert resp.status_code == 422
# Pydantic error message format for missing fields
assert "Missing required 'fieldName'" in resp.json.get("error", "")
def test_lock_field_invalid_field_name(self, client, test_mac, auth_headers):
"""Lock endpoint rejects untracked fields."""