fastmcp 2.12.5__py3-none-any.whl → 2.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fastmcp/__init__.py +2 -23
- fastmcp/cli/__init__.py +0 -3
- fastmcp/cli/__main__.py +5 -0
- fastmcp/cli/cli.py +19 -33
- fastmcp/cli/install/claude_code.py +6 -6
- fastmcp/cli/install/claude_desktop.py +3 -3
- fastmcp/cli/install/cursor.py +18 -12
- fastmcp/cli/install/gemini_cli.py +3 -3
- fastmcp/cli/install/mcp_json.py +3 -3
- fastmcp/cli/install/shared.py +0 -15
- fastmcp/cli/run.py +13 -8
- fastmcp/cli/tasks.py +110 -0
- fastmcp/client/__init__.py +9 -9
- fastmcp/client/auth/oauth.py +123 -225
- fastmcp/client/client.py +697 -95
- fastmcp/client/elicitation.py +11 -5
- fastmcp/client/logging.py +18 -14
- fastmcp/client/messages.py +7 -5
- fastmcp/client/oauth_callback.py +85 -171
- fastmcp/client/roots.py +2 -1
- fastmcp/client/sampling.py +1 -1
- fastmcp/client/tasks.py +614 -0
- fastmcp/client/transports.py +117 -30
- fastmcp/contrib/component_manager/__init__.py +1 -1
- fastmcp/contrib/component_manager/component_manager.py +2 -2
- fastmcp/contrib/component_manager/component_service.py +10 -26
- fastmcp/contrib/mcp_mixin/README.md +32 -1
- fastmcp/contrib/mcp_mixin/__init__.py +2 -2
- fastmcp/contrib/mcp_mixin/mcp_mixin.py +14 -2
- fastmcp/dependencies.py +25 -0
- fastmcp/experimental/sampling/handlers/openai.py +3 -3
- fastmcp/experimental/server/openapi/__init__.py +20 -21
- fastmcp/experimental/utilities/openapi/__init__.py +16 -47
- fastmcp/mcp_config.py +3 -4
- fastmcp/prompts/__init__.py +1 -1
- fastmcp/prompts/prompt.py +54 -51
- fastmcp/prompts/prompt_manager.py +16 -101
- fastmcp/resources/__init__.py +5 -5
- fastmcp/resources/resource.py +43 -21
- fastmcp/resources/resource_manager.py +9 -168
- fastmcp/resources/template.py +161 -61
- fastmcp/resources/types.py +30 -24
- fastmcp/server/__init__.py +1 -1
- fastmcp/server/auth/__init__.py +9 -14
- fastmcp/server/auth/auth.py +197 -46
- fastmcp/server/auth/handlers/authorize.py +326 -0
- fastmcp/server/auth/jwt_issuer.py +236 -0
- fastmcp/server/auth/middleware.py +96 -0
- fastmcp/server/auth/oauth_proxy.py +1469 -298
- fastmcp/server/auth/oidc_proxy.py +91 -20
- fastmcp/server/auth/providers/auth0.py +40 -21
- fastmcp/server/auth/providers/aws.py +29 -3
- fastmcp/server/auth/providers/azure.py +312 -131
- fastmcp/server/auth/providers/debug.py +114 -0
- fastmcp/server/auth/providers/descope.py +86 -29
- fastmcp/server/auth/providers/discord.py +308 -0
- fastmcp/server/auth/providers/github.py +29 -8
- fastmcp/server/auth/providers/google.py +48 -9
- fastmcp/server/auth/providers/in_memory.py +29 -5
- fastmcp/server/auth/providers/introspection.py +281 -0
- fastmcp/server/auth/providers/jwt.py +48 -31
- fastmcp/server/auth/providers/oci.py +233 -0
- fastmcp/server/auth/providers/scalekit.py +238 -0
- fastmcp/server/auth/providers/supabase.py +188 -0
- fastmcp/server/auth/providers/workos.py +35 -17
- fastmcp/server/context.py +236 -116
- fastmcp/server/dependencies.py +503 -18
- fastmcp/server/elicitation.py +286 -48
- fastmcp/server/event_store.py +177 -0
- fastmcp/server/http.py +71 -20
- fastmcp/server/low_level.py +165 -2
- fastmcp/server/middleware/__init__.py +1 -1
- fastmcp/server/middleware/caching.py +476 -0
- fastmcp/server/middleware/error_handling.py +14 -10
- fastmcp/server/middleware/logging.py +50 -39
- fastmcp/server/middleware/middleware.py +29 -16
- fastmcp/server/middleware/rate_limiting.py +3 -3
- fastmcp/server/middleware/tool_injection.py +116 -0
- fastmcp/server/openapi/__init__.py +35 -0
- fastmcp/{experimental/server → server}/openapi/components.py +15 -10
- fastmcp/{experimental/server → server}/openapi/routing.py +3 -3
- fastmcp/{experimental/server → server}/openapi/server.py +6 -5
- fastmcp/server/proxy.py +72 -48
- fastmcp/server/server.py +1415 -733
- fastmcp/server/tasks/__init__.py +21 -0
- fastmcp/server/tasks/capabilities.py +22 -0
- fastmcp/server/tasks/config.py +89 -0
- fastmcp/server/tasks/converters.py +205 -0
- fastmcp/server/tasks/handlers.py +356 -0
- fastmcp/server/tasks/keys.py +93 -0
- fastmcp/server/tasks/protocol.py +355 -0
- fastmcp/server/tasks/subscriptions.py +205 -0
- fastmcp/settings.py +125 -113
- fastmcp/tools/__init__.py +1 -1
- fastmcp/tools/tool.py +138 -55
- fastmcp/tools/tool_manager.py +30 -112
- fastmcp/tools/tool_transform.py +12 -21
- fastmcp/utilities/cli.py +67 -28
- fastmcp/utilities/components.py +10 -5
- fastmcp/utilities/inspect.py +79 -23
- fastmcp/utilities/json_schema.py +4 -4
- fastmcp/utilities/json_schema_type.py +8 -8
- fastmcp/utilities/logging.py +118 -8
- fastmcp/utilities/mcp_config.py +1 -2
- fastmcp/utilities/mcp_server_config/__init__.py +3 -3
- fastmcp/utilities/mcp_server_config/v1/environments/base.py +1 -2
- fastmcp/utilities/mcp_server_config/v1/environments/uv.py +6 -6
- fastmcp/utilities/mcp_server_config/v1/mcp_server_config.py +5 -5
- fastmcp/utilities/mcp_server_config/v1/schema.json +3 -0
- fastmcp/utilities/mcp_server_config/v1/sources/base.py +0 -1
- fastmcp/{experimental/utilities → utilities}/openapi/README.md +7 -35
- fastmcp/utilities/openapi/__init__.py +63 -0
- fastmcp/{experimental/utilities → utilities}/openapi/director.py +14 -15
- fastmcp/{experimental/utilities → utilities}/openapi/formatters.py +5 -5
- fastmcp/{experimental/utilities → utilities}/openapi/json_schema_converter.py +7 -3
- fastmcp/{experimental/utilities → utilities}/openapi/parser.py +37 -16
- fastmcp/utilities/tests.py +92 -5
- fastmcp/utilities/types.py +86 -16
- fastmcp/utilities/ui.py +626 -0
- {fastmcp-2.12.5.dist-info → fastmcp-2.14.0.dist-info}/METADATA +24 -15
- fastmcp-2.14.0.dist-info/RECORD +156 -0
- {fastmcp-2.12.5.dist-info → fastmcp-2.14.0.dist-info}/WHEEL +1 -1
- fastmcp/cli/claude.py +0 -135
- fastmcp/server/auth/providers/bearer.py +0 -25
- fastmcp/server/openapi.py +0 -1083
- fastmcp/utilities/openapi.py +0 -1568
- fastmcp/utilities/storage.py +0 -204
- fastmcp-2.12.5.dist-info/RECORD +0 -134
- fastmcp/{experimental/server → server}/openapi/README.md +0 -0
- fastmcp/{experimental/utilities → utilities}/openapi/models.py +3 -3
- fastmcp/{experimental/utilities → utilities}/openapi/schemas.py +2 -2
- {fastmcp-2.12.5.dist-info → fastmcp-2.14.0.dist-info}/entry_points.txt +0 -0
- {fastmcp-2.12.5.dist-info → fastmcp-2.14.0.dist-info}/licenses/LICENSE +0 -0
fastmcp/utilities/openapi.py
DELETED
|
@@ -1,1568 +0,0 @@
|
|
|
1
|
-
import json
|
|
2
|
-
from typing import Any, Generic, Literal, TypeVar, cast
|
|
3
|
-
|
|
4
|
-
from openapi_pydantic import (
|
|
5
|
-
OpenAPI,
|
|
6
|
-
Operation,
|
|
7
|
-
Parameter,
|
|
8
|
-
PathItem,
|
|
9
|
-
Reference,
|
|
10
|
-
RequestBody,
|
|
11
|
-
Response,
|
|
12
|
-
Schema,
|
|
13
|
-
)
|
|
14
|
-
|
|
15
|
-
# Import OpenAPI 3.0 models as well
|
|
16
|
-
from openapi_pydantic.v3.v3_0 import OpenAPI as OpenAPI_30
|
|
17
|
-
from openapi_pydantic.v3.v3_0 import Operation as Operation_30
|
|
18
|
-
from openapi_pydantic.v3.v3_0 import Parameter as Parameter_30
|
|
19
|
-
from openapi_pydantic.v3.v3_0 import PathItem as PathItem_30
|
|
20
|
-
from openapi_pydantic.v3.v3_0 import Reference as Reference_30
|
|
21
|
-
from openapi_pydantic.v3.v3_0 import RequestBody as RequestBody_30
|
|
22
|
-
from openapi_pydantic.v3.v3_0 import Response as Response_30
|
|
23
|
-
from openapi_pydantic.v3.v3_0 import Schema as Schema_30
|
|
24
|
-
from pydantic import BaseModel, Field, ValidationError
|
|
25
|
-
|
|
26
|
-
from fastmcp.utilities.logging import get_logger
|
|
27
|
-
from fastmcp.utilities.types import FastMCPBaseModel
|
|
28
|
-
|
|
29
|
-
logger = get_logger(__name__)
|
|
30
|
-
|
|
31
|
-
# --- Intermediate Representation (IR) Definition ---
|
|
32
|
-
# (IR models remain the same)
|
|
33
|
-
|
|
34
|
-
HttpMethod = Literal[
|
|
35
|
-
"GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS", "HEAD", "TRACE"
|
|
36
|
-
]
|
|
37
|
-
ParameterLocation = Literal["path", "query", "header", "cookie"]
|
|
38
|
-
JsonSchema = dict[str, Any]
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
def format_array_parameter(
|
|
42
|
-
values: list, parameter_name: str, is_query_parameter: bool = False
|
|
43
|
-
) -> str | list:
|
|
44
|
-
"""
|
|
45
|
-
Format an array parameter according to OpenAPI specifications.
|
|
46
|
-
|
|
47
|
-
Args:
|
|
48
|
-
values: List of values to format
|
|
49
|
-
parameter_name: Name of the parameter (for error messages)
|
|
50
|
-
is_query_parameter: If True, can return list for explode=True behavior
|
|
51
|
-
|
|
52
|
-
Returns:
|
|
53
|
-
String (comma-separated) or list (for query params with explode=True)
|
|
54
|
-
"""
|
|
55
|
-
# For arrays of simple types (strings, numbers, etc.), join with commas
|
|
56
|
-
if all(isinstance(item, str | int | float | bool) for item in values):
|
|
57
|
-
return ",".join(str(v) for v in values)
|
|
58
|
-
|
|
59
|
-
# For complex types, try to create a simpler representation
|
|
60
|
-
try:
|
|
61
|
-
# Try to create a simple string representation
|
|
62
|
-
formatted_parts = []
|
|
63
|
-
for item in values:
|
|
64
|
-
if isinstance(item, dict):
|
|
65
|
-
# For objects, serialize key-value pairs
|
|
66
|
-
item_parts = []
|
|
67
|
-
for k, v in item.items():
|
|
68
|
-
item_parts.append(f"{k}:{v}")
|
|
69
|
-
formatted_parts.append(".".join(item_parts))
|
|
70
|
-
else:
|
|
71
|
-
formatted_parts.append(str(item))
|
|
72
|
-
|
|
73
|
-
return ",".join(formatted_parts)
|
|
74
|
-
except Exception as e:
|
|
75
|
-
param_type = "query" if is_query_parameter else "path"
|
|
76
|
-
logger.warning(
|
|
77
|
-
f"Failed to format complex array {param_type} parameter '{parameter_name}': {e}"
|
|
78
|
-
)
|
|
79
|
-
|
|
80
|
-
if is_query_parameter:
|
|
81
|
-
# For query parameters, fallback to original list
|
|
82
|
-
return values
|
|
83
|
-
else:
|
|
84
|
-
# For path parameters, fallback to string representation without Python syntax
|
|
85
|
-
# Use str.translate() for efficient character removal
|
|
86
|
-
translation_table = str.maketrans("", "", "[]'\"")
|
|
87
|
-
str_value = str(values).translate(translation_table)
|
|
88
|
-
return str_value
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
def format_deep_object_parameter(
|
|
92
|
-
param_value: dict, parameter_name: str
|
|
93
|
-
) -> dict[str, str]:
|
|
94
|
-
"""
|
|
95
|
-
Format a dictionary parameter for deepObject style serialization.
|
|
96
|
-
|
|
97
|
-
According to OpenAPI 3.0 spec, deepObject style with explode=true serializes
|
|
98
|
-
object properties as separate query parameters with bracket notation.
|
|
99
|
-
|
|
100
|
-
For example: `{"id": "123", "type": "user"}` becomes `param[id]=123¶m[type]=user`.
|
|
101
|
-
|
|
102
|
-
Args:
|
|
103
|
-
param_value: Dictionary value to format
|
|
104
|
-
parameter_name: Name of the parameter
|
|
105
|
-
|
|
106
|
-
Returns:
|
|
107
|
-
Dictionary with bracketed parameter names as keys
|
|
108
|
-
"""
|
|
109
|
-
if not isinstance(param_value, dict):
|
|
110
|
-
logger.warning(
|
|
111
|
-
f"deepObject style parameter '{parameter_name}' expected dict, got {type(param_value)}"
|
|
112
|
-
)
|
|
113
|
-
return {}
|
|
114
|
-
|
|
115
|
-
result = {}
|
|
116
|
-
for key, value in param_value.items():
|
|
117
|
-
# Format as param[key]=value
|
|
118
|
-
bracketed_key = f"{parameter_name}[{key}]"
|
|
119
|
-
result[bracketed_key] = str(value)
|
|
120
|
-
|
|
121
|
-
return result
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
class ParameterInfo(FastMCPBaseModel):
|
|
125
|
-
"""Represents a single parameter for an HTTP operation in our IR."""
|
|
126
|
-
|
|
127
|
-
name: str
|
|
128
|
-
location: ParameterLocation # Mapped from 'in' field of openapi-pydantic Parameter
|
|
129
|
-
required: bool = False
|
|
130
|
-
schema_: JsonSchema = Field(..., alias="schema") # Target name in IR
|
|
131
|
-
description: str | None = None
|
|
132
|
-
explode: bool | None = None # OpenAPI explode property for array parameters
|
|
133
|
-
style: str | None = None # OpenAPI style property for parameter serialization
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
class RequestBodyInfo(FastMCPBaseModel):
|
|
137
|
-
"""Represents the request body for an HTTP operation in our IR."""
|
|
138
|
-
|
|
139
|
-
required: bool = False
|
|
140
|
-
content_schema: dict[str, JsonSchema] = Field(
|
|
141
|
-
default_factory=dict
|
|
142
|
-
) # Key: media type
|
|
143
|
-
description: str | None = None
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
class ResponseInfo(FastMCPBaseModel):
|
|
147
|
-
"""Represents response information in our IR."""
|
|
148
|
-
|
|
149
|
-
description: str | None = None
|
|
150
|
-
# Store schema per media type, key is media type
|
|
151
|
-
content_schema: dict[str, JsonSchema] = Field(default_factory=dict)
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
class HTTPRoute(FastMCPBaseModel):
|
|
155
|
-
"""Intermediate Representation for a single OpenAPI operation."""
|
|
156
|
-
|
|
157
|
-
path: str
|
|
158
|
-
method: HttpMethod
|
|
159
|
-
operation_id: str | None = None
|
|
160
|
-
summary: str | None = None
|
|
161
|
-
description: str | None = None
|
|
162
|
-
tags: list[str] = Field(default_factory=list)
|
|
163
|
-
parameters: list[ParameterInfo] = Field(default_factory=list)
|
|
164
|
-
request_body: RequestBodyInfo | None = None
|
|
165
|
-
responses: dict[str, ResponseInfo] = Field(
|
|
166
|
-
default_factory=dict
|
|
167
|
-
) # Key: status code str
|
|
168
|
-
schema_definitions: dict[str, JsonSchema] = Field(
|
|
169
|
-
default_factory=dict
|
|
170
|
-
) # Store component schemas
|
|
171
|
-
extensions: dict[str, Any] = Field(default_factory=dict)
|
|
172
|
-
openapi_version: str | None = None
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
# Export public symbols
|
|
176
|
-
__all__ = [
|
|
177
|
-
"HTTPRoute",
|
|
178
|
-
"ParameterInfo",
|
|
179
|
-
"RequestBodyInfo",
|
|
180
|
-
"ResponseInfo",
|
|
181
|
-
"HttpMethod",
|
|
182
|
-
"ParameterLocation",
|
|
183
|
-
"JsonSchema",
|
|
184
|
-
"parse_openapi_to_http_routes",
|
|
185
|
-
"extract_output_schema_from_responses",
|
|
186
|
-
"format_deep_object_parameter",
|
|
187
|
-
"_handle_nullable_fields",
|
|
188
|
-
]
|
|
189
|
-
|
|
190
|
-
# Type variables for generic parser
|
|
191
|
-
TOpenAPI = TypeVar("TOpenAPI", OpenAPI, OpenAPI_30)
|
|
192
|
-
TSchema = TypeVar("TSchema", Schema, Schema_30)
|
|
193
|
-
TReference = TypeVar("TReference", Reference, Reference_30)
|
|
194
|
-
TParameter = TypeVar("TParameter", Parameter, Parameter_30)
|
|
195
|
-
TRequestBody = TypeVar("TRequestBody", RequestBody, RequestBody_30)
|
|
196
|
-
TResponse = TypeVar("TResponse", Response, Response_30)
|
|
197
|
-
TOperation = TypeVar("TOperation", Operation, Operation_30)
|
|
198
|
-
TPathItem = TypeVar("TPathItem", PathItem, PathItem_30)
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
def parse_openapi_to_http_routes(openapi_dict: dict[str, Any]) -> list[HTTPRoute]:
|
|
202
|
-
"""
|
|
203
|
-
Parses an OpenAPI schema dictionary into a list of HTTPRoute objects
|
|
204
|
-
using the openapi-pydantic library.
|
|
205
|
-
|
|
206
|
-
Supports both OpenAPI 3.0.x and 3.1.x versions.
|
|
207
|
-
"""
|
|
208
|
-
# Check OpenAPI version to use appropriate model
|
|
209
|
-
openapi_version = openapi_dict.get("openapi", "")
|
|
210
|
-
|
|
211
|
-
try:
|
|
212
|
-
if openapi_version.startswith("3.0"):
|
|
213
|
-
# Use OpenAPI 3.0 models
|
|
214
|
-
openapi_30 = OpenAPI_30.model_validate(openapi_dict)
|
|
215
|
-
logger.debug(
|
|
216
|
-
f"Successfully parsed OpenAPI 3.0 schema version: {openapi_30.openapi}"
|
|
217
|
-
)
|
|
218
|
-
parser = OpenAPIParser(
|
|
219
|
-
openapi_30,
|
|
220
|
-
Reference_30,
|
|
221
|
-
Schema_30,
|
|
222
|
-
Parameter_30,
|
|
223
|
-
RequestBody_30,
|
|
224
|
-
Response_30,
|
|
225
|
-
Operation_30,
|
|
226
|
-
PathItem_30,
|
|
227
|
-
openapi_version,
|
|
228
|
-
)
|
|
229
|
-
return parser.parse()
|
|
230
|
-
else:
|
|
231
|
-
# Default to OpenAPI 3.1 models
|
|
232
|
-
openapi_31 = OpenAPI.model_validate(openapi_dict)
|
|
233
|
-
logger.debug(
|
|
234
|
-
f"Successfully parsed OpenAPI 3.1 schema version: {openapi_31.openapi}"
|
|
235
|
-
)
|
|
236
|
-
parser = OpenAPIParser(
|
|
237
|
-
openapi_31,
|
|
238
|
-
Reference,
|
|
239
|
-
Schema,
|
|
240
|
-
Parameter,
|
|
241
|
-
RequestBody,
|
|
242
|
-
Response,
|
|
243
|
-
Operation,
|
|
244
|
-
PathItem,
|
|
245
|
-
openapi_version,
|
|
246
|
-
)
|
|
247
|
-
return parser.parse()
|
|
248
|
-
except ValidationError as e:
|
|
249
|
-
logger.error(f"OpenAPI schema validation failed: {e}")
|
|
250
|
-
error_details = e.errors()
|
|
251
|
-
logger.error(f"Validation errors: {error_details}")
|
|
252
|
-
raise ValueError(f"Invalid OpenAPI schema: {error_details}") from e
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
class OpenAPIParser(
|
|
256
|
-
Generic[
|
|
257
|
-
TOpenAPI,
|
|
258
|
-
TReference,
|
|
259
|
-
TSchema,
|
|
260
|
-
TParameter,
|
|
261
|
-
TRequestBody,
|
|
262
|
-
TResponse,
|
|
263
|
-
TOperation,
|
|
264
|
-
TPathItem,
|
|
265
|
-
]
|
|
266
|
-
):
|
|
267
|
-
"""Unified parser for OpenAPI schemas with generic type parameters to handle both 3.0 and 3.1."""
|
|
268
|
-
|
|
269
|
-
def __init__(
|
|
270
|
-
self,
|
|
271
|
-
openapi: TOpenAPI,
|
|
272
|
-
reference_cls: type[TReference],
|
|
273
|
-
schema_cls: type[TSchema],
|
|
274
|
-
parameter_cls: type[TParameter],
|
|
275
|
-
request_body_cls: type[TRequestBody],
|
|
276
|
-
response_cls: type[TResponse],
|
|
277
|
-
operation_cls: type[TOperation],
|
|
278
|
-
path_item_cls: type[TPathItem],
|
|
279
|
-
openapi_version: str,
|
|
280
|
-
):
|
|
281
|
-
"""Initialize the parser with the OpenAPI schema and type classes."""
|
|
282
|
-
self.openapi = openapi
|
|
283
|
-
self.reference_cls = reference_cls
|
|
284
|
-
self.schema_cls = schema_cls
|
|
285
|
-
self.parameter_cls = parameter_cls
|
|
286
|
-
self.request_body_cls = request_body_cls
|
|
287
|
-
self.response_cls = response_cls
|
|
288
|
-
self.operation_cls = operation_cls
|
|
289
|
-
self.path_item_cls = path_item_cls
|
|
290
|
-
self.openapi_version = openapi_version
|
|
291
|
-
|
|
292
|
-
def _convert_to_parameter_location(self, param_in: str) -> ParameterLocation:
|
|
293
|
-
"""Convert string parameter location to our ParameterLocation type."""
|
|
294
|
-
if param_in in ["path", "query", "header", "cookie"]:
|
|
295
|
-
return param_in # type: ignore[return-value] # Safe cast since we checked values
|
|
296
|
-
logger.warning(f"Unknown parameter location: {param_in}, defaulting to 'query'")
|
|
297
|
-
return "query" # type: ignore[return-value] # Safe cast to default value
|
|
298
|
-
|
|
299
|
-
def _resolve_ref(self, item: Any) -> Any:
|
|
300
|
-
"""Resolves a reference to its target definition."""
|
|
301
|
-
if isinstance(item, self.reference_cls):
|
|
302
|
-
ref_str = item.ref
|
|
303
|
-
try:
|
|
304
|
-
if not ref_str.startswith("#/"):
|
|
305
|
-
raise ValueError(
|
|
306
|
-
f"External or non-local reference not supported: {ref_str}"
|
|
307
|
-
)
|
|
308
|
-
|
|
309
|
-
parts = ref_str.strip("#/").split("/")
|
|
310
|
-
target = self.openapi
|
|
311
|
-
|
|
312
|
-
for part in parts:
|
|
313
|
-
if part.isdigit() and isinstance(target, list):
|
|
314
|
-
target = target[int(part)]
|
|
315
|
-
elif isinstance(target, BaseModel):
|
|
316
|
-
# Check class fields first, then model_extra
|
|
317
|
-
if part in target.__class__.model_fields:
|
|
318
|
-
target = getattr(target, part, None)
|
|
319
|
-
elif target.model_extra and part in target.model_extra:
|
|
320
|
-
target = target.model_extra[part]
|
|
321
|
-
else:
|
|
322
|
-
# Special handling for components
|
|
323
|
-
if part == "components" and hasattr(target, "components"):
|
|
324
|
-
target = getattr(target, "components")
|
|
325
|
-
elif hasattr(target, part): # Fallback check
|
|
326
|
-
target = getattr(target, part, None)
|
|
327
|
-
else:
|
|
328
|
-
target = None # Part not found
|
|
329
|
-
elif isinstance(target, dict):
|
|
330
|
-
target = target.get(part)
|
|
331
|
-
else:
|
|
332
|
-
raise ValueError(
|
|
333
|
-
f"Cannot traverse part '{part}' in reference '{ref_str}'"
|
|
334
|
-
)
|
|
335
|
-
|
|
336
|
-
if target is None:
|
|
337
|
-
raise ValueError(
|
|
338
|
-
f"Reference part '{part}' not found in path '{ref_str}'"
|
|
339
|
-
)
|
|
340
|
-
|
|
341
|
-
# Handle nested references
|
|
342
|
-
if isinstance(target, self.reference_cls):
|
|
343
|
-
return self._resolve_ref(target)
|
|
344
|
-
|
|
345
|
-
return target
|
|
346
|
-
except (AttributeError, KeyError, IndexError, TypeError, ValueError) as e:
|
|
347
|
-
raise ValueError(f"Failed to resolve reference '{ref_str}': {e}") from e
|
|
348
|
-
|
|
349
|
-
return item
|
|
350
|
-
|
|
351
|
-
def _extract_schema_as_dict(self, schema_obj: Any) -> JsonSchema:
|
|
352
|
-
"""Resolves a schema and returns it as a dictionary."""
|
|
353
|
-
try:
|
|
354
|
-
resolved_schema = self._resolve_ref(schema_obj)
|
|
355
|
-
|
|
356
|
-
if isinstance(resolved_schema, (self.schema_cls)):
|
|
357
|
-
# Convert schema to dictionary
|
|
358
|
-
result = resolved_schema.model_dump(
|
|
359
|
-
mode="json", by_alias=True, exclude_none=True
|
|
360
|
-
)
|
|
361
|
-
elif isinstance(resolved_schema, dict):
|
|
362
|
-
result = resolved_schema
|
|
363
|
-
else:
|
|
364
|
-
logger.warning(
|
|
365
|
-
f"Expected Schema after resolving, got {type(resolved_schema)}. Returning empty dict."
|
|
366
|
-
)
|
|
367
|
-
result = {}
|
|
368
|
-
|
|
369
|
-
return _replace_ref_with_defs(result)
|
|
370
|
-
except ValueError as e:
|
|
371
|
-
# Re-raise ValueError for external reference errors and other validation issues
|
|
372
|
-
if "External or non-local reference not supported" in str(e):
|
|
373
|
-
raise
|
|
374
|
-
logger.error(f"Failed to extract schema as dict: {e}", exc_info=False)
|
|
375
|
-
return {}
|
|
376
|
-
except Exception as e:
|
|
377
|
-
logger.error(f"Failed to extract schema as dict: {e}", exc_info=False)
|
|
378
|
-
return {}
|
|
379
|
-
|
|
380
|
-
def _extract_parameters(
|
|
381
|
-
self,
|
|
382
|
-
operation_params: list[Any] | None = None,
|
|
383
|
-
path_item_params: list[Any] | None = None,
|
|
384
|
-
) -> list[ParameterInfo]:
|
|
385
|
-
"""Extract and resolve parameters from operation and path item."""
|
|
386
|
-
extracted_params: list[ParameterInfo] = []
|
|
387
|
-
seen_params: dict[
|
|
388
|
-
tuple[str, str], bool
|
|
389
|
-
] = {} # Use tuple of (name, location) as key
|
|
390
|
-
all_params = (operation_params or []) + (path_item_params or [])
|
|
391
|
-
|
|
392
|
-
for param_or_ref in all_params:
|
|
393
|
-
try:
|
|
394
|
-
parameter = self._resolve_ref(param_or_ref)
|
|
395
|
-
|
|
396
|
-
if not isinstance(parameter, self.parameter_cls):
|
|
397
|
-
logger.warning(
|
|
398
|
-
f"Expected Parameter after resolving, got {type(parameter)}. Skipping."
|
|
399
|
-
)
|
|
400
|
-
continue
|
|
401
|
-
|
|
402
|
-
# Extract parameter info - handle both 3.0 and 3.1 parameter models
|
|
403
|
-
param_in = parameter.param_in # Both use param_in
|
|
404
|
-
# Handle enum or string parameter locations
|
|
405
|
-
from enum import Enum
|
|
406
|
-
|
|
407
|
-
param_in_str = (
|
|
408
|
-
param_in.value if isinstance(param_in, Enum) else param_in
|
|
409
|
-
)
|
|
410
|
-
param_location = self._convert_to_parameter_location(param_in_str)
|
|
411
|
-
param_schema_obj = parameter.param_schema # Both use param_schema
|
|
412
|
-
|
|
413
|
-
# Skip duplicate parameters (same name and location)
|
|
414
|
-
param_key = (parameter.name, param_in_str)
|
|
415
|
-
if param_key in seen_params:
|
|
416
|
-
continue
|
|
417
|
-
seen_params[param_key] = True
|
|
418
|
-
|
|
419
|
-
# Extract schema
|
|
420
|
-
param_schema_dict = {}
|
|
421
|
-
if param_schema_obj:
|
|
422
|
-
# Process schema object
|
|
423
|
-
param_schema_dict = self._extract_schema_as_dict(param_schema_obj)
|
|
424
|
-
|
|
425
|
-
# Handle default value
|
|
426
|
-
resolved_schema = self._resolve_ref(param_schema_obj)
|
|
427
|
-
if (
|
|
428
|
-
not isinstance(resolved_schema, self.reference_cls)
|
|
429
|
-
and hasattr(resolved_schema, "default")
|
|
430
|
-
and resolved_schema.default is not None
|
|
431
|
-
):
|
|
432
|
-
param_schema_dict["default"] = resolved_schema.default
|
|
433
|
-
|
|
434
|
-
elif hasattr(parameter, "content") and parameter.content:
|
|
435
|
-
# Handle content-based parameters
|
|
436
|
-
first_media_type = next(iter(parameter.content.values()), None)
|
|
437
|
-
if (
|
|
438
|
-
first_media_type
|
|
439
|
-
and hasattr(first_media_type, "media_type_schema")
|
|
440
|
-
and first_media_type.media_type_schema
|
|
441
|
-
):
|
|
442
|
-
media_schema = first_media_type.media_type_schema
|
|
443
|
-
param_schema_dict = self._extract_schema_as_dict(media_schema)
|
|
444
|
-
|
|
445
|
-
# Handle default value in content schema
|
|
446
|
-
resolved_media_schema = self._resolve_ref(media_schema)
|
|
447
|
-
if (
|
|
448
|
-
not isinstance(resolved_media_schema, self.reference_cls)
|
|
449
|
-
and hasattr(resolved_media_schema, "default")
|
|
450
|
-
and resolved_media_schema.default is not None
|
|
451
|
-
):
|
|
452
|
-
param_schema_dict["default"] = resolved_media_schema.default
|
|
453
|
-
|
|
454
|
-
# Extract explode and style properties if present
|
|
455
|
-
explode = getattr(parameter, "explode", None)
|
|
456
|
-
style = getattr(parameter, "style", None)
|
|
457
|
-
|
|
458
|
-
# Create parameter info object
|
|
459
|
-
param_info = ParameterInfo(
|
|
460
|
-
name=parameter.name,
|
|
461
|
-
location=param_location,
|
|
462
|
-
required=parameter.required,
|
|
463
|
-
schema=param_schema_dict,
|
|
464
|
-
description=parameter.description,
|
|
465
|
-
explode=explode,
|
|
466
|
-
style=style,
|
|
467
|
-
)
|
|
468
|
-
extracted_params.append(param_info)
|
|
469
|
-
except Exception as e:
|
|
470
|
-
param_name = getattr(
|
|
471
|
-
param_or_ref, "name", getattr(param_or_ref, "ref", "unknown")
|
|
472
|
-
)
|
|
473
|
-
logger.error(
|
|
474
|
-
f"Failed to extract parameter '{param_name}': {e}", exc_info=False
|
|
475
|
-
)
|
|
476
|
-
|
|
477
|
-
return extracted_params
|
|
478
|
-
|
|
479
|
-
def _extract_request_body(self, request_body_or_ref: Any) -> RequestBodyInfo | None:
|
|
480
|
-
"""Extract and resolve request body information."""
|
|
481
|
-
if not request_body_or_ref:
|
|
482
|
-
return None
|
|
483
|
-
|
|
484
|
-
try:
|
|
485
|
-
request_body = self._resolve_ref(request_body_or_ref)
|
|
486
|
-
|
|
487
|
-
if not isinstance(request_body, self.request_body_cls):
|
|
488
|
-
logger.warning(
|
|
489
|
-
f"Expected RequestBody after resolving, got {type(request_body)}. Returning None."
|
|
490
|
-
)
|
|
491
|
-
return None
|
|
492
|
-
|
|
493
|
-
# Create request body info
|
|
494
|
-
request_body_info = RequestBodyInfo(
|
|
495
|
-
required=request_body.required,
|
|
496
|
-
description=request_body.description,
|
|
497
|
-
)
|
|
498
|
-
|
|
499
|
-
# Extract content schemas
|
|
500
|
-
if hasattr(request_body, "content") and request_body.content:
|
|
501
|
-
for media_type_str, media_type_obj in request_body.content.items():
|
|
502
|
-
if (
|
|
503
|
-
media_type_obj
|
|
504
|
-
and hasattr(media_type_obj, "media_type_schema")
|
|
505
|
-
and media_type_obj.media_type_schema
|
|
506
|
-
):
|
|
507
|
-
try:
|
|
508
|
-
schema_dict = self._extract_schema_as_dict(
|
|
509
|
-
media_type_obj.media_type_schema
|
|
510
|
-
)
|
|
511
|
-
request_body_info.content_schema[media_type_str] = (
|
|
512
|
-
schema_dict
|
|
513
|
-
)
|
|
514
|
-
except ValueError as e:
|
|
515
|
-
# Re-raise ValueError for external reference errors
|
|
516
|
-
if "External or non-local reference not supported" in str(
|
|
517
|
-
e
|
|
518
|
-
):
|
|
519
|
-
raise
|
|
520
|
-
logger.error(
|
|
521
|
-
f"Failed to extract schema for media type '{media_type_str}': {e}"
|
|
522
|
-
)
|
|
523
|
-
except Exception as e:
|
|
524
|
-
logger.error(
|
|
525
|
-
f"Failed to extract schema for media type '{media_type_str}': {e}"
|
|
526
|
-
)
|
|
527
|
-
|
|
528
|
-
return request_body_info
|
|
529
|
-
except ValueError as e:
|
|
530
|
-
# Re-raise ValueError for external reference errors
|
|
531
|
-
if "External or non-local reference not supported" in str(e):
|
|
532
|
-
raise
|
|
533
|
-
ref_name = getattr(request_body_or_ref, "ref", "unknown")
|
|
534
|
-
logger.error(
|
|
535
|
-
f"Failed to extract request body '{ref_name}': {e}", exc_info=False
|
|
536
|
-
)
|
|
537
|
-
return None
|
|
538
|
-
except Exception as e:
|
|
539
|
-
ref_name = getattr(request_body_or_ref, "ref", "unknown")
|
|
540
|
-
logger.error(
|
|
541
|
-
f"Failed to extract request body '{ref_name}': {e}", exc_info=False
|
|
542
|
-
)
|
|
543
|
-
return None
|
|
544
|
-
|
|
545
|
-
def _extract_responses(
|
|
546
|
-
self, operation_responses: dict[str, Any] | None
|
|
547
|
-
) -> dict[str, ResponseInfo]:
|
|
548
|
-
"""Extract and resolve response information."""
|
|
549
|
-
extracted_responses: dict[str, ResponseInfo] = {}
|
|
550
|
-
|
|
551
|
-
if not operation_responses:
|
|
552
|
-
return extracted_responses
|
|
553
|
-
|
|
554
|
-
for status_code, resp_or_ref in operation_responses.items():
|
|
555
|
-
try:
|
|
556
|
-
response = self._resolve_ref(resp_or_ref)
|
|
557
|
-
|
|
558
|
-
if not isinstance(response, self.response_cls):
|
|
559
|
-
logger.warning(
|
|
560
|
-
f"Expected Response after resolving for status code {status_code}, "
|
|
561
|
-
f"got {type(response)}. Skipping."
|
|
562
|
-
)
|
|
563
|
-
continue
|
|
564
|
-
|
|
565
|
-
# Create response info
|
|
566
|
-
resp_info = ResponseInfo(description=response.description)
|
|
567
|
-
|
|
568
|
-
# Extract content schemas
|
|
569
|
-
if hasattr(response, "content") and response.content:
|
|
570
|
-
for media_type_str, media_type_obj in response.content.items():
|
|
571
|
-
if (
|
|
572
|
-
media_type_obj
|
|
573
|
-
and hasattr(media_type_obj, "media_type_schema")
|
|
574
|
-
and media_type_obj.media_type_schema
|
|
575
|
-
):
|
|
576
|
-
try:
|
|
577
|
-
schema_dict = self._extract_schema_as_dict(
|
|
578
|
-
media_type_obj.media_type_schema
|
|
579
|
-
)
|
|
580
|
-
resp_info.content_schema[media_type_str] = schema_dict
|
|
581
|
-
except ValueError as e:
|
|
582
|
-
# Re-raise ValueError for external reference errors
|
|
583
|
-
if (
|
|
584
|
-
"External or non-local reference not supported"
|
|
585
|
-
in str(e)
|
|
586
|
-
):
|
|
587
|
-
raise
|
|
588
|
-
logger.error(
|
|
589
|
-
f"Failed to extract schema for media type '{media_type_str}' "
|
|
590
|
-
f"in response {status_code}: {e}"
|
|
591
|
-
)
|
|
592
|
-
except Exception as e:
|
|
593
|
-
logger.error(
|
|
594
|
-
f"Failed to extract schema for media type '{media_type_str}' "
|
|
595
|
-
f"in response {status_code}: {e}"
|
|
596
|
-
)
|
|
597
|
-
|
|
598
|
-
extracted_responses[str(status_code)] = resp_info
|
|
599
|
-
except ValueError as e:
|
|
600
|
-
# Re-raise ValueError for external reference errors
|
|
601
|
-
if "External or non-local reference not supported" in str(e):
|
|
602
|
-
raise
|
|
603
|
-
ref_name = getattr(resp_or_ref, "ref", "unknown")
|
|
604
|
-
logger.error(
|
|
605
|
-
f"Failed to extract response for status code {status_code} "
|
|
606
|
-
f"from reference '{ref_name}': {e}",
|
|
607
|
-
exc_info=False,
|
|
608
|
-
)
|
|
609
|
-
except Exception as e:
|
|
610
|
-
ref_name = getattr(resp_or_ref, "ref", "unknown")
|
|
611
|
-
logger.error(
|
|
612
|
-
f"Failed to extract response for status code {status_code} "
|
|
613
|
-
f"from reference '{ref_name}': {e}",
|
|
614
|
-
exc_info=False,
|
|
615
|
-
)
|
|
616
|
-
|
|
617
|
-
return extracted_responses
|
|
618
|
-
|
|
619
|
-
def parse(self) -> list[HTTPRoute]:
|
|
620
|
-
"""Parse the OpenAPI schema into HTTP routes."""
|
|
621
|
-
routes: list[HTTPRoute] = []
|
|
622
|
-
|
|
623
|
-
if not hasattr(self.openapi, "paths") or not self.openapi.paths:
|
|
624
|
-
logger.warning("OpenAPI schema has no paths defined.")
|
|
625
|
-
return []
|
|
626
|
-
|
|
627
|
-
# Extract component schemas
|
|
628
|
-
schema_definitions = {}
|
|
629
|
-
if hasattr(self.openapi, "components") and self.openapi.components:
|
|
630
|
-
components = self.openapi.components
|
|
631
|
-
if hasattr(components, "schemas") and components.schemas:
|
|
632
|
-
for name, schema in components.schemas.items():
|
|
633
|
-
try:
|
|
634
|
-
if isinstance(schema, self.reference_cls):
|
|
635
|
-
resolved_schema = self._resolve_ref(schema)
|
|
636
|
-
schema_definitions[name] = self._extract_schema_as_dict(
|
|
637
|
-
resolved_schema
|
|
638
|
-
)
|
|
639
|
-
else:
|
|
640
|
-
schema_definitions[name] = self._extract_schema_as_dict(
|
|
641
|
-
schema
|
|
642
|
-
)
|
|
643
|
-
except Exception as e:
|
|
644
|
-
logger.warning(
|
|
645
|
-
f"Failed to extract schema definition '{name}': {e}"
|
|
646
|
-
)
|
|
647
|
-
|
|
648
|
-
# Process paths and operations
|
|
649
|
-
for path_str, path_item_obj in self.openapi.paths.items():
|
|
650
|
-
if not isinstance(path_item_obj, self.path_item_cls):
|
|
651
|
-
logger.warning(
|
|
652
|
-
f"Skipping invalid path item for path '{path_str}' (type: {type(path_item_obj)})"
|
|
653
|
-
)
|
|
654
|
-
continue
|
|
655
|
-
|
|
656
|
-
path_level_params = (
|
|
657
|
-
path_item_obj.parameters
|
|
658
|
-
if hasattr(path_item_obj, "parameters")
|
|
659
|
-
else None
|
|
660
|
-
)
|
|
661
|
-
|
|
662
|
-
# Get HTTP methods from the path item class fields
|
|
663
|
-
http_methods = [
|
|
664
|
-
"get",
|
|
665
|
-
"put",
|
|
666
|
-
"post",
|
|
667
|
-
"delete",
|
|
668
|
-
"options",
|
|
669
|
-
"head",
|
|
670
|
-
"patch",
|
|
671
|
-
"trace",
|
|
672
|
-
]
|
|
673
|
-
for method_lower in http_methods:
|
|
674
|
-
operation = getattr(path_item_obj, method_lower, None)
|
|
675
|
-
|
|
676
|
-
if operation and isinstance(operation, self.operation_cls):
|
|
677
|
-
# Cast method to HttpMethod - safe since we only use valid HTTP methods
|
|
678
|
-
method_upper = method_lower.upper()
|
|
679
|
-
|
|
680
|
-
try:
|
|
681
|
-
parameters = self._extract_parameters(
|
|
682
|
-
getattr(operation, "parameters", None), path_level_params
|
|
683
|
-
)
|
|
684
|
-
|
|
685
|
-
request_body_info = self._extract_request_body(
|
|
686
|
-
getattr(operation, "requestBody", None)
|
|
687
|
-
)
|
|
688
|
-
|
|
689
|
-
responses = self._extract_responses(
|
|
690
|
-
getattr(operation, "responses", None)
|
|
691
|
-
)
|
|
692
|
-
|
|
693
|
-
extensions = {}
|
|
694
|
-
if hasattr(operation, "model_extra") and operation.model_extra:
|
|
695
|
-
extensions = {
|
|
696
|
-
k: v
|
|
697
|
-
for k, v in operation.model_extra.items()
|
|
698
|
-
if k.startswith("x-")
|
|
699
|
-
}
|
|
700
|
-
|
|
701
|
-
route = HTTPRoute(
|
|
702
|
-
path=path_str,
|
|
703
|
-
method=method_upper, # type: ignore[arg-type] # Known valid HTTP method
|
|
704
|
-
operation_id=getattr(operation, "operationId", None),
|
|
705
|
-
summary=getattr(operation, "summary", None),
|
|
706
|
-
description=getattr(operation, "description", None),
|
|
707
|
-
tags=getattr(operation, "tags", []) or [],
|
|
708
|
-
parameters=parameters,
|
|
709
|
-
request_body=request_body_info,
|
|
710
|
-
responses=responses,
|
|
711
|
-
schema_definitions=schema_definitions,
|
|
712
|
-
extensions=extensions,
|
|
713
|
-
openapi_version=self.openapi_version,
|
|
714
|
-
)
|
|
715
|
-
routes.append(route)
|
|
716
|
-
logger.debug(
|
|
717
|
-
f"Successfully extracted route: {method_upper} {path_str}"
|
|
718
|
-
)
|
|
719
|
-
except ValueError as op_error:
|
|
720
|
-
# Re-raise ValueError for external reference errors
|
|
721
|
-
if "External or non-local reference not supported" in str(
|
|
722
|
-
op_error
|
|
723
|
-
):
|
|
724
|
-
raise
|
|
725
|
-
op_id = getattr(operation, "operationId", "unknown")
|
|
726
|
-
logger.error(
|
|
727
|
-
f"Failed to process operation {method_upper} {path_str} (ID: {op_id}): {op_error}",
|
|
728
|
-
exc_info=True,
|
|
729
|
-
)
|
|
730
|
-
except Exception as op_error:
|
|
731
|
-
op_id = getattr(operation, "operationId", "unknown")
|
|
732
|
-
logger.error(
|
|
733
|
-
f"Failed to process operation {method_upper} {path_str} (ID: {op_id}): {op_error}",
|
|
734
|
-
exc_info=True,
|
|
735
|
-
)
|
|
736
|
-
|
|
737
|
-
logger.debug(f"Finished parsing. Extracted {len(routes)} HTTP routes.")
|
|
738
|
-
return routes
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
def clean_schema_for_display(schema: JsonSchema | None) -> JsonSchema | None:
|
|
742
|
-
"""
|
|
743
|
-
Clean up a schema dictionary for display by removing internal/complex fields.
|
|
744
|
-
"""
|
|
745
|
-
if not schema or not isinstance(schema, dict):
|
|
746
|
-
return schema
|
|
747
|
-
|
|
748
|
-
# Make a copy to avoid modifying the input schema
|
|
749
|
-
cleaned = schema.copy()
|
|
750
|
-
|
|
751
|
-
# Fields commonly removed for simpler display to LLMs or users
|
|
752
|
-
fields_to_remove = [
|
|
753
|
-
"allOf",
|
|
754
|
-
"anyOf",
|
|
755
|
-
"oneOf",
|
|
756
|
-
"not", # Composition keywords
|
|
757
|
-
"nullable", # Handled by type unions usually
|
|
758
|
-
"discriminator",
|
|
759
|
-
"readOnly",
|
|
760
|
-
"writeOnly",
|
|
761
|
-
"deprecated",
|
|
762
|
-
"xml",
|
|
763
|
-
"externalDocs",
|
|
764
|
-
# Can be verbose, maybe remove based on flag?
|
|
765
|
-
# "pattern", "minLength", "maxLength",
|
|
766
|
-
# "minimum", "maximum", "exclusiveMinimum", "exclusiveMaximum",
|
|
767
|
-
# "multipleOf", "minItems", "maxItems", "uniqueItems",
|
|
768
|
-
# "minProperties", "maxProperties"
|
|
769
|
-
]
|
|
770
|
-
|
|
771
|
-
for field in fields_to_remove:
|
|
772
|
-
if field in cleaned:
|
|
773
|
-
cleaned.pop(field)
|
|
774
|
-
|
|
775
|
-
# Recursively clean properties and items
|
|
776
|
-
if "properties" in cleaned:
|
|
777
|
-
cleaned["properties"] = {
|
|
778
|
-
k: clean_schema_for_display(v) for k, v in cleaned["properties"].items()
|
|
779
|
-
}
|
|
780
|
-
# Remove properties section if empty after cleaning
|
|
781
|
-
if not cleaned["properties"]:
|
|
782
|
-
cleaned.pop("properties")
|
|
783
|
-
|
|
784
|
-
if "items" in cleaned:
|
|
785
|
-
cleaned["items"] = clean_schema_for_display(cleaned["items"])
|
|
786
|
-
# Remove items section if empty after cleaning
|
|
787
|
-
if not cleaned["items"]:
|
|
788
|
-
cleaned.pop("items")
|
|
789
|
-
|
|
790
|
-
if "additionalProperties" in cleaned:
|
|
791
|
-
# Often verbose, can be simplified
|
|
792
|
-
if isinstance(cleaned["additionalProperties"], dict):
|
|
793
|
-
cleaned["additionalProperties"] = clean_schema_for_display(
|
|
794
|
-
cleaned["additionalProperties"]
|
|
795
|
-
)
|
|
796
|
-
elif cleaned["additionalProperties"] is True:
|
|
797
|
-
# Maybe keep 'true' or represent as 'Allows additional properties' text?
|
|
798
|
-
pass # Keep simple boolean for now
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
def generate_example_from_schema(schema: JsonSchema | None) -> Any:
|
|
802
|
-
"""
|
|
803
|
-
Generate a simple example value from a JSON schema dictionary.
|
|
804
|
-
Very basic implementation focusing on types.
|
|
805
|
-
"""
|
|
806
|
-
if not schema or not isinstance(schema, dict):
|
|
807
|
-
return "unknown" # Or None?
|
|
808
|
-
|
|
809
|
-
# Use default value if provided
|
|
810
|
-
if "default" in schema:
|
|
811
|
-
return schema["default"]
|
|
812
|
-
# Use first enum value if provided
|
|
813
|
-
if "enum" in schema and isinstance(schema["enum"], list) and schema["enum"]:
|
|
814
|
-
return schema["enum"][0]
|
|
815
|
-
# Use first example if provided
|
|
816
|
-
if (
|
|
817
|
-
"examples" in schema
|
|
818
|
-
and isinstance(schema["examples"], list)
|
|
819
|
-
and schema["examples"]
|
|
820
|
-
):
|
|
821
|
-
return schema["examples"][0]
|
|
822
|
-
if "example" in schema:
|
|
823
|
-
return schema["example"]
|
|
824
|
-
|
|
825
|
-
schema_type = schema.get("type")
|
|
826
|
-
|
|
827
|
-
if schema_type == "object":
|
|
828
|
-
result = {}
|
|
829
|
-
properties = schema.get("properties", {})
|
|
830
|
-
if isinstance(properties, dict):
|
|
831
|
-
# Generate example for first few properties or required ones? Limit complexity.
|
|
832
|
-
required_props = set(schema.get("required", []))
|
|
833
|
-
props_to_include = list(properties.keys())[
|
|
834
|
-
:3
|
|
835
|
-
] # Limit to first 3 for brevity
|
|
836
|
-
for prop_name in props_to_include:
|
|
837
|
-
if prop_name in properties:
|
|
838
|
-
result[prop_name] = generate_example_from_schema(
|
|
839
|
-
properties[prop_name]
|
|
840
|
-
)
|
|
841
|
-
# Ensure required props are present if possible
|
|
842
|
-
for req_prop in required_props:
|
|
843
|
-
if req_prop not in result and req_prop in properties:
|
|
844
|
-
result[req_prop] = generate_example_from_schema(
|
|
845
|
-
properties[req_prop]
|
|
846
|
-
)
|
|
847
|
-
return result if result else {"key": "value"} # Basic object if no props
|
|
848
|
-
|
|
849
|
-
elif schema_type == "array":
|
|
850
|
-
items_schema = schema.get("items")
|
|
851
|
-
if isinstance(items_schema, dict):
|
|
852
|
-
# Generate one example item
|
|
853
|
-
item_example = generate_example_from_schema(items_schema)
|
|
854
|
-
return [item_example] if item_example is not None else []
|
|
855
|
-
return ["example_item"] # Fallback
|
|
856
|
-
|
|
857
|
-
elif schema_type == "string":
|
|
858
|
-
format_type = schema.get("format")
|
|
859
|
-
if format_type == "date-time":
|
|
860
|
-
return "2024-01-01T12:00:00Z"
|
|
861
|
-
if format_type == "date":
|
|
862
|
-
return "2024-01-01"
|
|
863
|
-
if format_type == "email":
|
|
864
|
-
return "user@example.com"
|
|
865
|
-
if format_type == "uuid":
|
|
866
|
-
return "123e4567-e89b-12d3-a456-426614174000"
|
|
867
|
-
if format_type == "byte":
|
|
868
|
-
return "ZXhhbXBsZQ==" # "example" base64
|
|
869
|
-
return "string"
|
|
870
|
-
|
|
871
|
-
elif schema_type == "integer":
|
|
872
|
-
return 1
|
|
873
|
-
elif schema_type == "number":
|
|
874
|
-
return 1.5
|
|
875
|
-
elif schema_type == "boolean":
|
|
876
|
-
return True
|
|
877
|
-
elif schema_type == "null":
|
|
878
|
-
return None
|
|
879
|
-
|
|
880
|
-
# Fallback if type is unknown or missing
|
|
881
|
-
return "unknown_type"
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
def format_json_for_description(data: Any, indent: int = 2) -> str:
|
|
885
|
-
"""Formats Python data as a JSON string block for markdown."""
|
|
886
|
-
try:
|
|
887
|
-
json_str = json.dumps(data, indent=indent)
|
|
888
|
-
return f"```json\n{json_str}\n```"
|
|
889
|
-
except TypeError:
|
|
890
|
-
return f"```\nCould not serialize to JSON: {data}\n```"
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
def format_description_with_responses(
|
|
894
|
-
base_description: str,
|
|
895
|
-
responses: dict[
|
|
896
|
-
str, Any
|
|
897
|
-
], # Changed from specific ResponseInfo type to avoid circular imports
|
|
898
|
-
parameters: list[ParameterInfo] | None = None, # Add parameters parameter
|
|
899
|
-
request_body: RequestBodyInfo | None = None, # Add request_body parameter
|
|
900
|
-
) -> str:
|
|
901
|
-
"""
|
|
902
|
-
Formats the base description string with response, parameter, and request body information.
|
|
903
|
-
|
|
904
|
-
Args:
|
|
905
|
-
base_description (str): The initial description to be formatted.
|
|
906
|
-
responses (dict[str, Any]): A dictionary of response information, keyed by status code.
|
|
907
|
-
parameters (list[ParameterInfo] | None, optional): A list of parameter information,
|
|
908
|
-
including path and query parameters. Each parameter includes details such as name,
|
|
909
|
-
location, whether it is required, and a description.
|
|
910
|
-
request_body (RequestBodyInfo | None, optional): Information about the request body,
|
|
911
|
-
including its description, whether it is required, and its content schema.
|
|
912
|
-
|
|
913
|
-
Returns:
|
|
914
|
-
str: The formatted description string with additional details about responses, parameters,
|
|
915
|
-
and the request body.
|
|
916
|
-
"""
|
|
917
|
-
desc_parts = [base_description]
|
|
918
|
-
|
|
919
|
-
# Add parameter information
|
|
920
|
-
if parameters:
|
|
921
|
-
# Process path parameters
|
|
922
|
-
path_params = [p for p in parameters if p.location == "path"]
|
|
923
|
-
if path_params:
|
|
924
|
-
param_section = "\n\n**Path Parameters:**"
|
|
925
|
-
desc_parts.append(param_section)
|
|
926
|
-
for param in path_params:
|
|
927
|
-
required_marker = " (Required)" if param.required else ""
|
|
928
|
-
param_desc = f"\n- **{param.name}**{required_marker}: {param.description or 'No description.'}"
|
|
929
|
-
desc_parts.append(param_desc)
|
|
930
|
-
|
|
931
|
-
# Process query parameters
|
|
932
|
-
query_params = [p for p in parameters if p.location == "query"]
|
|
933
|
-
if query_params:
|
|
934
|
-
param_section = "\n\n**Query Parameters:**"
|
|
935
|
-
desc_parts.append(param_section)
|
|
936
|
-
for param in query_params:
|
|
937
|
-
required_marker = " (Required)" if param.required else ""
|
|
938
|
-
param_desc = f"\n- **{param.name}**{required_marker}: {param.description or 'No description.'}"
|
|
939
|
-
desc_parts.append(param_desc)
|
|
940
|
-
|
|
941
|
-
# Add request body information if present
|
|
942
|
-
if request_body and request_body.description:
|
|
943
|
-
req_body_section = "\n\n**Request Body:**"
|
|
944
|
-
desc_parts.append(req_body_section)
|
|
945
|
-
required_marker = " (Required)" if request_body.required else ""
|
|
946
|
-
desc_parts.append(f"\n{request_body.description}{required_marker}")
|
|
947
|
-
|
|
948
|
-
# Add request body property descriptions if available
|
|
949
|
-
if request_body.content_schema:
|
|
950
|
-
media_type = (
|
|
951
|
-
"application/json"
|
|
952
|
-
if "application/json" in request_body.content_schema
|
|
953
|
-
else next(iter(request_body.content_schema), None)
|
|
954
|
-
)
|
|
955
|
-
if media_type:
|
|
956
|
-
schema = request_body.content_schema.get(media_type, {})
|
|
957
|
-
if isinstance(schema, dict) and "properties" in schema:
|
|
958
|
-
desc_parts.append("\n\n**Request Properties:**")
|
|
959
|
-
for prop_name, prop_schema in schema["properties"].items():
|
|
960
|
-
if (
|
|
961
|
-
isinstance(prop_schema, dict)
|
|
962
|
-
and "description" in prop_schema
|
|
963
|
-
):
|
|
964
|
-
required = prop_name in schema.get("required", [])
|
|
965
|
-
req_mark = " (Required)" if required else ""
|
|
966
|
-
desc_parts.append(
|
|
967
|
-
f"\n- **{prop_name}**{req_mark}: {prop_schema['description']}"
|
|
968
|
-
)
|
|
969
|
-
|
|
970
|
-
# Add response information
|
|
971
|
-
if responses:
|
|
972
|
-
response_section = "\n\n**Responses:**"
|
|
973
|
-
added_response_section = False
|
|
974
|
-
|
|
975
|
-
# Determine success codes (common ones)
|
|
976
|
-
success_codes = {"200", "201", "202", "204"} # As strings
|
|
977
|
-
success_status = next((s for s in success_codes if s in responses), None)
|
|
978
|
-
|
|
979
|
-
# Process all responses
|
|
980
|
-
responses_to_process = responses.items()
|
|
981
|
-
|
|
982
|
-
for status_code, resp_info in sorted(responses_to_process):
|
|
983
|
-
if not added_response_section:
|
|
984
|
-
desc_parts.append(response_section)
|
|
985
|
-
added_response_section = True
|
|
986
|
-
|
|
987
|
-
status_marker = " (Success)" if status_code == success_status else ""
|
|
988
|
-
desc_parts.append(
|
|
989
|
-
f"\n- **{status_code}**{status_marker}: {resp_info.description or 'No description.'}"
|
|
990
|
-
)
|
|
991
|
-
|
|
992
|
-
# Process content schemas for this response
|
|
993
|
-
if resp_info.content_schema:
|
|
994
|
-
# Prioritize json, then take first available
|
|
995
|
-
media_type = (
|
|
996
|
-
"application/json"
|
|
997
|
-
if "application/json" in resp_info.content_schema
|
|
998
|
-
else next(iter(resp_info.content_schema), None)
|
|
999
|
-
)
|
|
1000
|
-
|
|
1001
|
-
if media_type:
|
|
1002
|
-
schema = resp_info.content_schema.get(media_type)
|
|
1003
|
-
desc_parts.append(f" - Content-Type: `{media_type}`")
|
|
1004
|
-
|
|
1005
|
-
# Add response property descriptions
|
|
1006
|
-
if isinstance(schema, dict):
|
|
1007
|
-
# Handle array responses
|
|
1008
|
-
if schema.get("type") == "array" and "items" in schema:
|
|
1009
|
-
items_schema = schema["items"]
|
|
1010
|
-
if (
|
|
1011
|
-
isinstance(items_schema, dict)
|
|
1012
|
-
and "properties" in items_schema
|
|
1013
|
-
):
|
|
1014
|
-
desc_parts.append("\n - **Response Item Properties:**")
|
|
1015
|
-
for prop_name, prop_schema in items_schema[
|
|
1016
|
-
"properties"
|
|
1017
|
-
].items():
|
|
1018
|
-
if (
|
|
1019
|
-
isinstance(prop_schema, dict)
|
|
1020
|
-
and "description" in prop_schema
|
|
1021
|
-
):
|
|
1022
|
-
desc_parts.append(
|
|
1023
|
-
f"\n - **{prop_name}**: {prop_schema['description']}"
|
|
1024
|
-
)
|
|
1025
|
-
# Handle object responses
|
|
1026
|
-
elif "properties" in schema:
|
|
1027
|
-
desc_parts.append("\n - **Response Properties:**")
|
|
1028
|
-
for prop_name, prop_schema in schema["properties"].items():
|
|
1029
|
-
if (
|
|
1030
|
-
isinstance(prop_schema, dict)
|
|
1031
|
-
and "description" in prop_schema
|
|
1032
|
-
):
|
|
1033
|
-
desc_parts.append(
|
|
1034
|
-
f"\n - **{prop_name}**: {prop_schema['description']}"
|
|
1035
|
-
)
|
|
1036
|
-
|
|
1037
|
-
# Generate Example
|
|
1038
|
-
if schema:
|
|
1039
|
-
example = generate_example_from_schema(schema)
|
|
1040
|
-
if example != "unknown_type" and example is not None:
|
|
1041
|
-
desc_parts.append("\n - **Example:**")
|
|
1042
|
-
desc_parts.append(
|
|
1043
|
-
format_json_for_description(example, indent=2)
|
|
1044
|
-
)
|
|
1045
|
-
|
|
1046
|
-
return "\n".join(desc_parts)
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
def _replace_ref_with_defs(
|
|
1050
|
-
info: dict[str, Any], description: str | None = None
|
|
1051
|
-
) -> dict[str, Any]:
|
|
1052
|
-
"""
|
|
1053
|
-
Replace openapi $ref with jsonschema $defs
|
|
1054
|
-
|
|
1055
|
-
Examples:
|
|
1056
|
-
- {"type": "object", "properties": {"$ref": "#/components/schemas/..."}}
|
|
1057
|
-
- {"$ref": "#/components/schemas/..."}
|
|
1058
|
-
- {"items": {"$ref": "#/components/schemas/..."}}
|
|
1059
|
-
- {"anyOf": [{"$ref": "#/components/schemas/..."}]}
|
|
1060
|
-
- {"allOf": [{"$ref": "#/components/schemas/..."}]}
|
|
1061
|
-
- {"oneOf": [{"$ref": "#/components/schemas/..."}]}
|
|
1062
|
-
|
|
1063
|
-
Args:
|
|
1064
|
-
info: dict[str, Any]
|
|
1065
|
-
description: str | None
|
|
1066
|
-
|
|
1067
|
-
Returns:
|
|
1068
|
-
dict[str, Any]
|
|
1069
|
-
"""
|
|
1070
|
-
schema = info.copy()
|
|
1071
|
-
if ref_path := schema.get("$ref"):
|
|
1072
|
-
if isinstance(ref_path, str):
|
|
1073
|
-
if ref_path.startswith("#/components/schemas/"):
|
|
1074
|
-
schema_name = ref_path.split("/")[-1]
|
|
1075
|
-
schema["$ref"] = f"#/$defs/{schema_name}"
|
|
1076
|
-
elif not ref_path.startswith("#/"):
|
|
1077
|
-
raise ValueError(
|
|
1078
|
-
f"External or non-local reference not supported: {ref_path}. "
|
|
1079
|
-
f"FastMCP only supports local schema references starting with '#/'. "
|
|
1080
|
-
f"Please include all schema definitions within the OpenAPI document."
|
|
1081
|
-
)
|
|
1082
|
-
elif properties := schema.get("properties"):
|
|
1083
|
-
if "$ref" in properties:
|
|
1084
|
-
schema["properties"] = _replace_ref_with_defs(properties)
|
|
1085
|
-
else:
|
|
1086
|
-
schema["properties"] = {
|
|
1087
|
-
prop_name: _replace_ref_with_defs(prop_schema)
|
|
1088
|
-
for prop_name, prop_schema in properties.items()
|
|
1089
|
-
}
|
|
1090
|
-
elif item_schema := schema.get("items"):
|
|
1091
|
-
schema["items"] = _replace_ref_with_defs(item_schema)
|
|
1092
|
-
for section in ["anyOf", "allOf", "oneOf"]:
|
|
1093
|
-
for i, item in enumerate(schema.get(section, [])):
|
|
1094
|
-
schema[section][i] = _replace_ref_with_defs(item)
|
|
1095
|
-
if info.get("description", description) and not schema.get("description"):
|
|
1096
|
-
schema["description"] = description
|
|
1097
|
-
return schema
|
|
1098
|
-
|
|
1099
|
-
|
|
1100
|
-
def _make_optional_parameter_nullable(schema: dict[str, Any]) -> dict[str, Any]:
|
|
1101
|
-
"""
|
|
1102
|
-
Make an optional parameter schema nullable to allow None values.
|
|
1103
|
-
|
|
1104
|
-
For optional parameters, we need to allow null values in addition to the
|
|
1105
|
-
specified type to handle cases where None is passed for optional parameters.
|
|
1106
|
-
"""
|
|
1107
|
-
# If schema already has multiple types or is already nullable, don't modify
|
|
1108
|
-
if "anyOf" in schema or "oneOf" in schema or "allOf" in schema:
|
|
1109
|
-
return schema
|
|
1110
|
-
|
|
1111
|
-
# If it's already nullable (type includes null), don't modify
|
|
1112
|
-
if isinstance(schema.get("type"), list) and "null" in schema["type"]:
|
|
1113
|
-
return schema
|
|
1114
|
-
|
|
1115
|
-
# Create a new schema that allows null in addition to the original type
|
|
1116
|
-
if "type" in schema:
|
|
1117
|
-
original_type = schema["type"]
|
|
1118
|
-
|
|
1119
|
-
if isinstance(original_type, str):
|
|
1120
|
-
# Single type - make it a union with null
|
|
1121
|
-
# Optimize: avoid full schema copy by building directly
|
|
1122
|
-
nested_non_nullable_schema = {
|
|
1123
|
-
"type": original_type,
|
|
1124
|
-
}
|
|
1125
|
-
nullable_schema = {}
|
|
1126
|
-
|
|
1127
|
-
# Define type-specific properties that should move to nested schema
|
|
1128
|
-
type_specific_properties = set()
|
|
1129
|
-
if original_type == "array":
|
|
1130
|
-
# https://json-schema.org/understanding-json-schema/reference/array
|
|
1131
|
-
type_specific_properties = {
|
|
1132
|
-
"items",
|
|
1133
|
-
"prefixItems",
|
|
1134
|
-
"unevaluatedItems",
|
|
1135
|
-
"contains",
|
|
1136
|
-
"minContains",
|
|
1137
|
-
"maxContains",
|
|
1138
|
-
"minItems",
|
|
1139
|
-
"maxItems",
|
|
1140
|
-
"uniqueItems",
|
|
1141
|
-
}
|
|
1142
|
-
elif original_type == "object":
|
|
1143
|
-
# https://json-schema.org/understanding-json-schema/reference/object
|
|
1144
|
-
type_specific_properties = {
|
|
1145
|
-
"properties",
|
|
1146
|
-
"patternProperties",
|
|
1147
|
-
"additionalProperties",
|
|
1148
|
-
"unevaluatedProperties",
|
|
1149
|
-
"required",
|
|
1150
|
-
"propertyNames",
|
|
1151
|
-
"minProperties",
|
|
1152
|
-
"maxProperties",
|
|
1153
|
-
}
|
|
1154
|
-
|
|
1155
|
-
# Efficiently distribute properties without copying the entire schema
|
|
1156
|
-
for key, value in schema.items():
|
|
1157
|
-
if key == "type":
|
|
1158
|
-
continue # Already handled
|
|
1159
|
-
elif key in type_specific_properties:
|
|
1160
|
-
nested_non_nullable_schema[key] = value
|
|
1161
|
-
else:
|
|
1162
|
-
nullable_schema[key] = value
|
|
1163
|
-
|
|
1164
|
-
nullable_schema["anyOf"] = [nested_non_nullable_schema, {"type": "null"}]
|
|
1165
|
-
return nullable_schema
|
|
1166
|
-
|
|
1167
|
-
return schema
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
def _add_null_to_type(schema: dict[str, Any]) -> None:
|
|
1171
|
-
"""Add 'null' to the schema's type field or handle oneOf/anyOf/allOf constructs if not already present."""
|
|
1172
|
-
if "type" in schema:
|
|
1173
|
-
current_type = schema["type"]
|
|
1174
|
-
|
|
1175
|
-
if isinstance(current_type, str):
|
|
1176
|
-
# Convert string type to array with null
|
|
1177
|
-
schema["type"] = [current_type, "null"]
|
|
1178
|
-
elif isinstance(current_type, list):
|
|
1179
|
-
# Add null to array if not already present
|
|
1180
|
-
if "null" not in current_type:
|
|
1181
|
-
schema["type"] = current_type + ["null"]
|
|
1182
|
-
elif "oneOf" in schema:
|
|
1183
|
-
# Convert oneOf to anyOf with null type
|
|
1184
|
-
schema["anyOf"] = schema.pop("oneOf") + [{"type": "null"}]
|
|
1185
|
-
elif "anyOf" in schema:
|
|
1186
|
-
# Add null type to anyOf if not already present
|
|
1187
|
-
if not any(item.get("type") == "null" for item in schema["anyOf"]):
|
|
1188
|
-
schema["anyOf"].append({"type": "null"})
|
|
1189
|
-
elif "allOf" in schema:
|
|
1190
|
-
# For allOf, wrap in anyOf with null - this means (all conditions) OR null
|
|
1191
|
-
schema["anyOf"] = [{"allOf": schema.pop("allOf")}, {"type": "null"}]
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
def _handle_nullable_fields(schema: dict[str, Any] | Any) -> dict[str, Any] | Any:
|
|
1195
|
-
"""Convert OpenAPI nullable fields to JSON Schema format: {"type": "string",
|
|
1196
|
-
"nullable": true} -> {"type": ["string", "null"]}"""
|
|
1197
|
-
|
|
1198
|
-
if not isinstance(schema, dict):
|
|
1199
|
-
return schema
|
|
1200
|
-
|
|
1201
|
-
# Check if we need to modify anything first to avoid unnecessary copying
|
|
1202
|
-
has_root_nullable_field = "nullable" in schema
|
|
1203
|
-
has_root_nullable_true = (
|
|
1204
|
-
has_root_nullable_field
|
|
1205
|
-
and schema["nullable"]
|
|
1206
|
-
and (
|
|
1207
|
-
"type" in schema
|
|
1208
|
-
or "oneOf" in schema
|
|
1209
|
-
or "anyOf" in schema
|
|
1210
|
-
or "allOf" in schema
|
|
1211
|
-
)
|
|
1212
|
-
)
|
|
1213
|
-
|
|
1214
|
-
has_property_nullable_field = False
|
|
1215
|
-
if "properties" in schema:
|
|
1216
|
-
for prop_schema in schema["properties"].values():
|
|
1217
|
-
if isinstance(prop_schema, dict) and "nullable" in prop_schema:
|
|
1218
|
-
has_property_nullable_field = True
|
|
1219
|
-
break
|
|
1220
|
-
|
|
1221
|
-
# If no nullable fields at all, return original schema unchanged
|
|
1222
|
-
if not has_root_nullable_field and not has_property_nullable_field:
|
|
1223
|
-
return schema
|
|
1224
|
-
|
|
1225
|
-
# Only copy if we need to modify
|
|
1226
|
-
result = schema.copy()
|
|
1227
|
-
|
|
1228
|
-
# Handle root level nullable - always remove the field, convert type if true
|
|
1229
|
-
if has_root_nullable_field:
|
|
1230
|
-
result.pop("nullable")
|
|
1231
|
-
if has_root_nullable_true:
|
|
1232
|
-
_add_null_to_type(result)
|
|
1233
|
-
|
|
1234
|
-
# Handle properties nullable fields
|
|
1235
|
-
if has_property_nullable_field and "properties" in result:
|
|
1236
|
-
for prop_name, prop_schema in result["properties"].items():
|
|
1237
|
-
if isinstance(prop_schema, dict) and "nullable" in prop_schema:
|
|
1238
|
-
nullable_value = prop_schema.pop("nullable")
|
|
1239
|
-
if nullable_value and (
|
|
1240
|
-
"type" in prop_schema
|
|
1241
|
-
or "oneOf" in prop_schema
|
|
1242
|
-
or "anyOf" in prop_schema
|
|
1243
|
-
or "allOf" in prop_schema
|
|
1244
|
-
):
|
|
1245
|
-
_add_null_to_type(prop_schema)
|
|
1246
|
-
|
|
1247
|
-
return result
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
def _combine_schemas(route: HTTPRoute) -> dict[str, Any]:
|
|
1251
|
-
"""
|
|
1252
|
-
Combines parameter and request body schemas into a single schema.
|
|
1253
|
-
Handles parameter name collisions by adding location suffixes.
|
|
1254
|
-
|
|
1255
|
-
Args:
|
|
1256
|
-
route: HTTPRoute object
|
|
1257
|
-
|
|
1258
|
-
Returns:
|
|
1259
|
-
Combined schema dictionary
|
|
1260
|
-
"""
|
|
1261
|
-
properties = {}
|
|
1262
|
-
required = []
|
|
1263
|
-
|
|
1264
|
-
# First pass: collect parameter names by location and body properties
|
|
1265
|
-
param_names_by_location = {
|
|
1266
|
-
"path": set(),
|
|
1267
|
-
"query": set(),
|
|
1268
|
-
"header": set(),
|
|
1269
|
-
"cookie": set(),
|
|
1270
|
-
}
|
|
1271
|
-
body_props = {}
|
|
1272
|
-
|
|
1273
|
-
for param in route.parameters:
|
|
1274
|
-
param_names_by_location[param.location].add(param.name)
|
|
1275
|
-
|
|
1276
|
-
if route.request_body and route.request_body.content_schema:
|
|
1277
|
-
content_type = next(iter(route.request_body.content_schema))
|
|
1278
|
-
body_schema = _replace_ref_with_defs(
|
|
1279
|
-
route.request_body.content_schema[content_type].copy(),
|
|
1280
|
-
route.request_body.description,
|
|
1281
|
-
)
|
|
1282
|
-
body_props = body_schema.get("properties", {})
|
|
1283
|
-
|
|
1284
|
-
# Detect collisions: parameters that exist in both body and path/query/header
|
|
1285
|
-
all_non_body_params = set()
|
|
1286
|
-
for location_params in param_names_by_location.values():
|
|
1287
|
-
all_non_body_params.update(location_params)
|
|
1288
|
-
|
|
1289
|
-
body_param_names = set(body_props.keys())
|
|
1290
|
-
colliding_params = all_non_body_params & body_param_names
|
|
1291
|
-
|
|
1292
|
-
# Add parameters with suffixes for collisions
|
|
1293
|
-
for param in route.parameters:
|
|
1294
|
-
if param.name in colliding_params:
|
|
1295
|
-
# Add suffix for non-body parameters when collision detected
|
|
1296
|
-
suffixed_name = f"{param.name}__{param.location}"
|
|
1297
|
-
if param.required:
|
|
1298
|
-
required.append(suffixed_name)
|
|
1299
|
-
|
|
1300
|
-
# Add location info to description
|
|
1301
|
-
param_schema = _replace_ref_with_defs(
|
|
1302
|
-
param.schema_.copy(), param.description
|
|
1303
|
-
)
|
|
1304
|
-
original_desc = param_schema.get("description", "")
|
|
1305
|
-
location_desc = f"({param.location.capitalize()} parameter)"
|
|
1306
|
-
if original_desc:
|
|
1307
|
-
param_schema["description"] = f"{original_desc} {location_desc}"
|
|
1308
|
-
else:
|
|
1309
|
-
param_schema["description"] = location_desc
|
|
1310
|
-
|
|
1311
|
-
# Don't make optional parameters nullable - they can simply be omitted
|
|
1312
|
-
# The OpenAPI specification doesn't require optional parameters to accept null values
|
|
1313
|
-
|
|
1314
|
-
properties[suffixed_name] = param_schema
|
|
1315
|
-
else:
|
|
1316
|
-
# No collision, use original name
|
|
1317
|
-
if param.required:
|
|
1318
|
-
required.append(param.name)
|
|
1319
|
-
param_schema = _replace_ref_with_defs(
|
|
1320
|
-
param.schema_.copy(), param.description
|
|
1321
|
-
)
|
|
1322
|
-
|
|
1323
|
-
# Don't make optional parameters nullable - they can simply be omitted
|
|
1324
|
-
# The OpenAPI specification doesn't require optional parameters to accept null values
|
|
1325
|
-
|
|
1326
|
-
properties[param.name] = param_schema
|
|
1327
|
-
|
|
1328
|
-
# Add request body properties (no suffixes for body parameters)
|
|
1329
|
-
if route.request_body and route.request_body.content_schema:
|
|
1330
|
-
for prop_name, prop_schema in body_props.items():
|
|
1331
|
-
properties[prop_name] = prop_schema
|
|
1332
|
-
|
|
1333
|
-
if route.request_body.required:
|
|
1334
|
-
required.extend(body_schema.get("required", []))
|
|
1335
|
-
|
|
1336
|
-
result = {
|
|
1337
|
-
"type": "object",
|
|
1338
|
-
"properties": properties,
|
|
1339
|
-
"required": required,
|
|
1340
|
-
}
|
|
1341
|
-
# Add schema definitions if available
|
|
1342
|
-
if route.schema_definitions:
|
|
1343
|
-
result["$defs"] = route.schema_definitions.copy()
|
|
1344
|
-
|
|
1345
|
-
# Use lightweight compression - prune additionalProperties and unused definitions
|
|
1346
|
-
if result.get("additionalProperties") is False:
|
|
1347
|
-
result.pop("additionalProperties")
|
|
1348
|
-
|
|
1349
|
-
# Remove unused definitions (lightweight approach - just check direct $ref usage)
|
|
1350
|
-
if "$defs" in result:
|
|
1351
|
-
used_refs = set()
|
|
1352
|
-
|
|
1353
|
-
def find_refs_in_value(value):
|
|
1354
|
-
if isinstance(value, dict):
|
|
1355
|
-
if "$ref" in value and isinstance(value["$ref"], str):
|
|
1356
|
-
ref = value["$ref"]
|
|
1357
|
-
if ref.startswith("#/$defs/"):
|
|
1358
|
-
used_refs.add(ref.split("/")[-1])
|
|
1359
|
-
for v in value.values():
|
|
1360
|
-
find_refs_in_value(v)
|
|
1361
|
-
elif isinstance(value, list):
|
|
1362
|
-
for item in value:
|
|
1363
|
-
find_refs_in_value(item)
|
|
1364
|
-
|
|
1365
|
-
# Find refs in the main schema (excluding $defs section)
|
|
1366
|
-
for key, value in result.items():
|
|
1367
|
-
if key != "$defs":
|
|
1368
|
-
find_refs_in_value(value)
|
|
1369
|
-
|
|
1370
|
-
# Remove unused definitions
|
|
1371
|
-
if used_refs:
|
|
1372
|
-
result["$defs"] = {
|
|
1373
|
-
name: def_schema
|
|
1374
|
-
for name, def_schema in result["$defs"].items()
|
|
1375
|
-
if name in used_refs
|
|
1376
|
-
}
|
|
1377
|
-
else:
|
|
1378
|
-
result.pop("$defs")
|
|
1379
|
-
|
|
1380
|
-
return result
|
|
1381
|
-
|
|
1382
|
-
|
|
1383
|
-
def _adjust_union_types(
|
|
1384
|
-
schema: dict[str, Any] | list[Any],
|
|
1385
|
-
) -> dict[str, Any] | list[Any]:
|
|
1386
|
-
"""Recursively replace 'oneOf' with 'anyOf' in schema to handle overlapping unions."""
|
|
1387
|
-
if isinstance(schema, dict):
|
|
1388
|
-
# Optimize: only copy if we need to modify something
|
|
1389
|
-
has_one_of = "oneOf" in schema
|
|
1390
|
-
needs_recursive_processing = False
|
|
1391
|
-
|
|
1392
|
-
# Check if we need recursive processing
|
|
1393
|
-
for v in schema.values():
|
|
1394
|
-
if isinstance(v, dict | list):
|
|
1395
|
-
needs_recursive_processing = True
|
|
1396
|
-
break
|
|
1397
|
-
|
|
1398
|
-
# If nothing to change, return original
|
|
1399
|
-
if not has_one_of and not needs_recursive_processing:
|
|
1400
|
-
return schema
|
|
1401
|
-
|
|
1402
|
-
# Work on a copy only when modification is needed
|
|
1403
|
-
result = schema.copy()
|
|
1404
|
-
if has_one_of:
|
|
1405
|
-
result["anyOf"] = result.pop("oneOf")
|
|
1406
|
-
|
|
1407
|
-
# Only recurse where needed
|
|
1408
|
-
if needs_recursive_processing:
|
|
1409
|
-
for k, v in result.items():
|
|
1410
|
-
if isinstance(v, dict | list):
|
|
1411
|
-
result[k] = _adjust_union_types(v)
|
|
1412
|
-
|
|
1413
|
-
return result
|
|
1414
|
-
elif isinstance(schema, list):
|
|
1415
|
-
return [_adjust_union_types(item) for item in schema]
|
|
1416
|
-
return schema
|
|
1417
|
-
|
|
1418
|
-
|
|
1419
|
-
def extract_output_schema_from_responses(
|
|
1420
|
-
responses: dict[str, ResponseInfo],
|
|
1421
|
-
schema_definitions: dict[str, Any] | None = None,
|
|
1422
|
-
openapi_version: str | None = None,
|
|
1423
|
-
) -> dict[str, Any] | None:
|
|
1424
|
-
"""
|
|
1425
|
-
Extract output schema from OpenAPI responses for use as MCP tool output schema.
|
|
1426
|
-
|
|
1427
|
-
This function finds the first successful response (200, 201, 202, 204) with a
|
|
1428
|
-
JSON-compatible content type and extracts its schema. If the schema is not an
|
|
1429
|
-
object type, it wraps it to comply with MCP requirements.
|
|
1430
|
-
|
|
1431
|
-
Args:
|
|
1432
|
-
responses: Dictionary of ResponseInfo objects keyed by status code
|
|
1433
|
-
schema_definitions: Optional schema definitions to include in the output schema
|
|
1434
|
-
openapi_version: OpenAPI version string, used to optimize nullable field handling
|
|
1435
|
-
|
|
1436
|
-
Returns:
|
|
1437
|
-
dict: MCP-compliant output schema with potential wrapping, or None if no suitable schema found
|
|
1438
|
-
"""
|
|
1439
|
-
if not responses:
|
|
1440
|
-
return None
|
|
1441
|
-
|
|
1442
|
-
# Priority order for success status codes
|
|
1443
|
-
success_codes = ["200", "201", "202", "204"]
|
|
1444
|
-
|
|
1445
|
-
# Find the first successful response
|
|
1446
|
-
response_info = None
|
|
1447
|
-
for status_code in success_codes:
|
|
1448
|
-
if status_code in responses:
|
|
1449
|
-
response_info = responses[status_code]
|
|
1450
|
-
break
|
|
1451
|
-
|
|
1452
|
-
# If no explicit success codes, try any 2xx response
|
|
1453
|
-
if response_info is None:
|
|
1454
|
-
for status_code, resp_info in responses.items():
|
|
1455
|
-
if status_code.startswith("2"):
|
|
1456
|
-
response_info = resp_info
|
|
1457
|
-
break
|
|
1458
|
-
|
|
1459
|
-
if response_info is None or not response_info.content_schema:
|
|
1460
|
-
return None
|
|
1461
|
-
|
|
1462
|
-
# Prefer application/json, then fall back to other JSON-compatible types
|
|
1463
|
-
json_compatible_types = [
|
|
1464
|
-
"application/json",
|
|
1465
|
-
"application/vnd.api+json",
|
|
1466
|
-
"application/hal+json",
|
|
1467
|
-
"application/ld+json",
|
|
1468
|
-
"text/json",
|
|
1469
|
-
]
|
|
1470
|
-
|
|
1471
|
-
schema = None
|
|
1472
|
-
for content_type in json_compatible_types:
|
|
1473
|
-
if content_type in response_info.content_schema:
|
|
1474
|
-
schema = response_info.content_schema[content_type]
|
|
1475
|
-
break
|
|
1476
|
-
|
|
1477
|
-
# If no JSON-compatible type found, try the first available content type
|
|
1478
|
-
if schema is None and response_info.content_schema:
|
|
1479
|
-
first_content_type = next(iter(response_info.content_schema))
|
|
1480
|
-
schema = response_info.content_schema[first_content_type]
|
|
1481
|
-
logger.debug(
|
|
1482
|
-
f"Using non-JSON content type for output schema: {first_content_type}"
|
|
1483
|
-
)
|
|
1484
|
-
|
|
1485
|
-
if not schema or not isinstance(schema, dict):
|
|
1486
|
-
return None
|
|
1487
|
-
|
|
1488
|
-
# Clean and copy the schema
|
|
1489
|
-
output_schema = schema.copy()
|
|
1490
|
-
|
|
1491
|
-
# If schema has a $ref, resolve it first before processing nullable fields
|
|
1492
|
-
if "$ref" in output_schema and schema_definitions:
|
|
1493
|
-
ref_path = output_schema["$ref"]
|
|
1494
|
-
if ref_path.startswith("#/components/schemas/"):
|
|
1495
|
-
schema_name = ref_path.split("/")[-1]
|
|
1496
|
-
if schema_name in schema_definitions:
|
|
1497
|
-
# Replace $ref with the actual schema definition
|
|
1498
|
-
output_schema = schema_definitions[schema_name].copy()
|
|
1499
|
-
|
|
1500
|
-
# Handle OpenAPI nullable fields by converting them to JSON Schema format
|
|
1501
|
-
# This prevents "None is not of type 'string'" validation errors
|
|
1502
|
-
# Only needed for OpenAPI 3.0 - 3.1 uses standard JSON Schema null types
|
|
1503
|
-
if openapi_version and openapi_version.startswith("3.0"):
|
|
1504
|
-
output_schema = _handle_nullable_fields(output_schema)
|
|
1505
|
-
|
|
1506
|
-
# MCP requires output schemas to be objects. If this schema is not an object,
|
|
1507
|
-
# we need to wrap it similar to how ParsedFunction.from_function() does it
|
|
1508
|
-
if output_schema.get("type") != "object":
|
|
1509
|
-
# Create a wrapped schema that contains the original schema under a "result" key
|
|
1510
|
-
wrapped_schema = {
|
|
1511
|
-
"type": "object",
|
|
1512
|
-
"properties": {"result": output_schema},
|
|
1513
|
-
"required": ["result"],
|
|
1514
|
-
"x-fastmcp-wrap-result": True,
|
|
1515
|
-
}
|
|
1516
|
-
output_schema = wrapped_schema
|
|
1517
|
-
|
|
1518
|
-
# Add schema definitions if available and handle nullable fields in them
|
|
1519
|
-
# Only add $defs if we didn't resolve the $ref inline above
|
|
1520
|
-
if schema_definitions and "$ref" not in schema.copy():
|
|
1521
|
-
processed_defs = {}
|
|
1522
|
-
for def_name, def_schema in schema_definitions.items():
|
|
1523
|
-
# Only handle nullable fields for OpenAPI 3.0 - 3.1 uses standard JSON Schema null types
|
|
1524
|
-
if openapi_version and openapi_version.startswith("3.0"):
|
|
1525
|
-
processed_defs[def_name] = _handle_nullable_fields(def_schema)
|
|
1526
|
-
else:
|
|
1527
|
-
processed_defs[def_name] = def_schema
|
|
1528
|
-
output_schema["$defs"] = processed_defs
|
|
1529
|
-
|
|
1530
|
-
# Use lightweight compression - prune additionalProperties and unused definitions
|
|
1531
|
-
if output_schema.get("additionalProperties") is False:
|
|
1532
|
-
output_schema.pop("additionalProperties")
|
|
1533
|
-
|
|
1534
|
-
# Remove unused definitions (lightweight approach - just check direct $ref usage)
|
|
1535
|
-
if "$defs" in output_schema:
|
|
1536
|
-
used_refs = set()
|
|
1537
|
-
|
|
1538
|
-
def find_refs_in_value(value):
|
|
1539
|
-
if isinstance(value, dict):
|
|
1540
|
-
if "$ref" in value and isinstance(value["$ref"], str):
|
|
1541
|
-
ref = value["$ref"]
|
|
1542
|
-
if ref.startswith("#/$defs/"):
|
|
1543
|
-
used_refs.add(ref.split("/")[-1])
|
|
1544
|
-
for v in value.values():
|
|
1545
|
-
find_refs_in_value(v)
|
|
1546
|
-
elif isinstance(value, list):
|
|
1547
|
-
for item in value:
|
|
1548
|
-
find_refs_in_value(item)
|
|
1549
|
-
|
|
1550
|
-
# Find refs in the main schema (excluding $defs section)
|
|
1551
|
-
for key, value in output_schema.items():
|
|
1552
|
-
if key != "$defs":
|
|
1553
|
-
find_refs_in_value(value)
|
|
1554
|
-
|
|
1555
|
-
# Remove unused definitions
|
|
1556
|
-
if used_refs:
|
|
1557
|
-
output_schema["$defs"] = {
|
|
1558
|
-
name: def_schema
|
|
1559
|
-
for name, def_schema in output_schema["$defs"].items()
|
|
1560
|
-
if name in used_refs
|
|
1561
|
-
}
|
|
1562
|
-
else:
|
|
1563
|
-
output_schema.pop("$defs")
|
|
1564
|
-
|
|
1565
|
-
# Adjust union types to handle overlapping unions
|
|
1566
|
-
output_schema = cast(dict[str, Any], _adjust_union_types(output_schema))
|
|
1567
|
-
|
|
1568
|
-
return output_schema
|