fastmcp 1.0__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fastmcp/__init__.py +15 -4
- fastmcp/cli/__init__.py +0 -1
- fastmcp/cli/claude.py +13 -11
- fastmcp/cli/cli.py +61 -41
- fastmcp/client/__init__.py +25 -0
- fastmcp/client/base.py +1 -0
- fastmcp/client/client.py +181 -0
- fastmcp/client/roots.py +75 -0
- fastmcp/client/sampling.py +50 -0
- fastmcp/client/transports.py +411 -0
- fastmcp/prompts/__init__.py +1 -1
- fastmcp/prompts/base.py +27 -26
- fastmcp/prompts/prompt_manager.py +50 -12
- fastmcp/resources/__init__.py +5 -5
- fastmcp/resources/base.py +2 -2
- fastmcp/resources/resource_manager.py +66 -9
- fastmcp/resources/templates.py +15 -10
- fastmcp/resources/types.py +16 -11
- fastmcp/server/__init__.py +5 -0
- fastmcp/server/context.py +222 -0
- fastmcp/server/openapi.py +625 -0
- fastmcp/server/proxy.py +219 -0
- fastmcp/{server.py → server/server.py} +251 -262
- fastmcp/settings.py +73 -0
- fastmcp/tools/base.py +28 -18
- fastmcp/tools/tool_manager.py +45 -10
- fastmcp/utilities/func_metadata.py +33 -19
- fastmcp/utilities/openapi.py +797 -0
- fastmcp/utilities/types.py +3 -4
- fastmcp-2.0.0.dist-info/METADATA +770 -0
- fastmcp-2.0.0.dist-info/RECORD +39 -0
- fastmcp-2.0.0.dist-info/licenses/LICENSE +201 -0
- fastmcp/prompts/manager.py +0 -50
- fastmcp-1.0.dist-info/METADATA +0 -604
- fastmcp-1.0.dist-info/RECORD +0 -28
- fastmcp-1.0.dist-info/licenses/LICENSE +0 -21
- {fastmcp-1.0.dist-info → fastmcp-2.0.0.dist-info}/WHEEL +0 -0
- {fastmcp-1.0.dist-info → fastmcp-2.0.0.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,797 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Any, Literal, cast
|
|
4
|
+
|
|
5
|
+
# Using the recommended library: openapi-pydantic
|
|
6
|
+
from openapi_pydantic import (
|
|
7
|
+
MediaType,
|
|
8
|
+
OpenAPI,
|
|
9
|
+
Operation,
|
|
10
|
+
Parameter,
|
|
11
|
+
PathItem,
|
|
12
|
+
Reference,
|
|
13
|
+
RequestBody,
|
|
14
|
+
Response,
|
|
15
|
+
Schema,
|
|
16
|
+
)
|
|
17
|
+
from pydantic import BaseModel, Field, ValidationError
|
|
18
|
+
|
|
19
|
+
from fastmcp.utilities import openapi
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
# --- Intermediate Representation (IR) Definition ---
|
|
24
|
+
# (IR models remain the same)
|
|
25
|
+
|
|
26
|
+
HttpMethod = Literal[
|
|
27
|
+
"GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS", "HEAD", "TRACE"
|
|
28
|
+
]
|
|
29
|
+
ParameterLocation = Literal["path", "query", "header", "cookie"]
|
|
30
|
+
JsonSchema = dict[str, Any]
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class ParameterInfo(BaseModel):
|
|
34
|
+
"""Represents a single parameter for an HTTP operation in our IR."""
|
|
35
|
+
|
|
36
|
+
name: str
|
|
37
|
+
location: ParameterLocation # Mapped from 'in' field of openapi-pydantic Parameter
|
|
38
|
+
required: bool = False
|
|
39
|
+
schema_: JsonSchema = Field(..., alias="schema") # Target name in IR
|
|
40
|
+
description: str | None = None
|
|
41
|
+
|
|
42
|
+
# No model_config needed here if we populate manually after accessing 'in'
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class RequestBodyInfo(BaseModel):
|
|
46
|
+
"""Represents the request body for an HTTP operation in our IR."""
|
|
47
|
+
|
|
48
|
+
required: bool = False
|
|
49
|
+
content_schema: dict[str, JsonSchema] = Field(
|
|
50
|
+
default_factory=dict
|
|
51
|
+
) # Key: media type
|
|
52
|
+
description: str | None = None
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class ResponseInfo(BaseModel):
|
|
56
|
+
"""Represents response information in our IR."""
|
|
57
|
+
|
|
58
|
+
description: str | None = None
|
|
59
|
+
# Store schema per media type, key is media type
|
|
60
|
+
content_schema: dict[str, JsonSchema] = Field(default_factory=dict)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class HTTPRoute(BaseModel):
|
|
64
|
+
"""Intermediate Representation for a single OpenAPI operation."""
|
|
65
|
+
|
|
66
|
+
path: str
|
|
67
|
+
method: HttpMethod
|
|
68
|
+
operation_id: str | None = None
|
|
69
|
+
summary: str | None = None
|
|
70
|
+
description: str | None = None
|
|
71
|
+
tags: list[str] = Field(default_factory=list)
|
|
72
|
+
parameters: list[ParameterInfo] = Field(default_factory=list)
|
|
73
|
+
request_body: RequestBodyInfo | None = None
|
|
74
|
+
responses: dict[str, ResponseInfo] = Field(
|
|
75
|
+
default_factory=dict
|
|
76
|
+
) # Key: status code str
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
# Export public symbols
|
|
80
|
+
__all__ = [
|
|
81
|
+
"HTTPRoute",
|
|
82
|
+
"ParameterInfo",
|
|
83
|
+
"RequestBodyInfo",
|
|
84
|
+
"ResponseInfo",
|
|
85
|
+
"HttpMethod",
|
|
86
|
+
"ParameterLocation",
|
|
87
|
+
"JsonSchema",
|
|
88
|
+
"parse_openapi_to_http_routes",
|
|
89
|
+
]
|
|
90
|
+
|
|
91
|
+
# --- Helper Functions ---
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def _resolve_ref(
|
|
95
|
+
item: Reference | Schema | Parameter | RequestBody | Any, openapi: OpenAPI
|
|
96
|
+
) -> Any:
|
|
97
|
+
"""Resolves a potential Reference object to its target definition (no changes needed here)."""
|
|
98
|
+
if isinstance(item, Reference):
|
|
99
|
+
ref_str = item.ref
|
|
100
|
+
try:
|
|
101
|
+
if not ref_str.startswith("#/"):
|
|
102
|
+
raise ValueError(
|
|
103
|
+
f"External or non-local reference not supported: {ref_str}"
|
|
104
|
+
)
|
|
105
|
+
parts = ref_str.strip("#/").split("/")
|
|
106
|
+
target = openapi
|
|
107
|
+
for part in parts:
|
|
108
|
+
if part.isdigit() and isinstance(target, list):
|
|
109
|
+
target = target[int(part)]
|
|
110
|
+
elif isinstance(target, BaseModel):
|
|
111
|
+
# Use model_extra for fields not explicitly defined (like components types)
|
|
112
|
+
# Check class fields first, then model_extra
|
|
113
|
+
if part in target.__class__.model_fields:
|
|
114
|
+
target = getattr(target, part, None)
|
|
115
|
+
elif target.model_extra and part in target.model_extra:
|
|
116
|
+
target = target.model_extra[part]
|
|
117
|
+
else:
|
|
118
|
+
# Special handling for components sub-types common structure
|
|
119
|
+
if part == "components" and hasattr(target, "components"):
|
|
120
|
+
target = getattr(target, "components")
|
|
121
|
+
elif hasattr(target, part): # Fallback check
|
|
122
|
+
target = getattr(target, part, None)
|
|
123
|
+
else:
|
|
124
|
+
target = None # Part not found
|
|
125
|
+
elif isinstance(target, dict):
|
|
126
|
+
target = target.get(part)
|
|
127
|
+
else:
|
|
128
|
+
raise ValueError(
|
|
129
|
+
f"Cannot traverse part '{part}' in reference '{ref_str}' from type {type(target)}"
|
|
130
|
+
)
|
|
131
|
+
if target is None:
|
|
132
|
+
raise ValueError(
|
|
133
|
+
f"Reference part '{part}' not found in path '{ref_str}'"
|
|
134
|
+
)
|
|
135
|
+
if isinstance(target, Reference):
|
|
136
|
+
return _resolve_ref(target, openapi)
|
|
137
|
+
return target
|
|
138
|
+
except (AttributeError, KeyError, IndexError, TypeError, ValueError) as e:
|
|
139
|
+
raise ValueError(f"Failed to resolve reference '{ref_str}': {e}") from e
|
|
140
|
+
return item
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def _extract_schema_as_dict(
|
|
144
|
+
schema_obj: Schema | Reference, openapi: OpenAPI
|
|
145
|
+
) -> JsonSchema:
|
|
146
|
+
"""Resolves a schema/reference and returns it as a dictionary."""
|
|
147
|
+
resolved_schema = _resolve_ref(schema_obj, openapi)
|
|
148
|
+
if isinstance(resolved_schema, Schema):
|
|
149
|
+
# Using exclude_none=True might be better than exclude_unset sometimes
|
|
150
|
+
return resolved_schema.model_dump(mode="json", by_alias=True, exclude_none=True)
|
|
151
|
+
elif isinstance(resolved_schema, dict):
|
|
152
|
+
logger.warning(
|
|
153
|
+
"Resolved schema reference resulted in a dict, not a Schema model."
|
|
154
|
+
)
|
|
155
|
+
return resolved_schema
|
|
156
|
+
else:
|
|
157
|
+
ref_str = getattr(schema_obj, "ref", "unknown")
|
|
158
|
+
logger.warning(
|
|
159
|
+
f"Expected Schema after resolving ref '{ref_str}', got {type(resolved_schema)}. Returning empty dict."
|
|
160
|
+
)
|
|
161
|
+
return {}
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _convert_to_parameter_location(param_in: str) -> ParameterLocation:
|
|
165
|
+
"""Convert string parameter location to our ParameterLocation type."""
|
|
166
|
+
if param_in == "path":
|
|
167
|
+
return "path"
|
|
168
|
+
elif param_in == "query":
|
|
169
|
+
return "query"
|
|
170
|
+
elif param_in == "header":
|
|
171
|
+
return "header"
|
|
172
|
+
elif param_in == "cookie":
|
|
173
|
+
return "cookie"
|
|
174
|
+
else:
|
|
175
|
+
logger.warning(f"Unknown parameter location: {param_in}, defaulting to 'query'")
|
|
176
|
+
return "query"
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def _extract_parameters(
|
|
180
|
+
operation_params: list[Parameter | Reference] | None,
|
|
181
|
+
path_item_params: list[Parameter | Reference] | None,
|
|
182
|
+
openapi: OpenAPI,
|
|
183
|
+
) -> list[ParameterInfo]:
|
|
184
|
+
"""Extracts and resolves parameters using corrected attribute names."""
|
|
185
|
+
extracted_params: list[ParameterInfo] = []
|
|
186
|
+
seen_params: dict[
|
|
187
|
+
tuple[str, str], bool
|
|
188
|
+
] = {} # Use string keys to avoid type issues
|
|
189
|
+
all_params_refs = (operation_params or []) + (path_item_params or [])
|
|
190
|
+
|
|
191
|
+
for param_or_ref in all_params_refs:
|
|
192
|
+
try:
|
|
193
|
+
parameter = cast(Parameter, _resolve_ref(param_or_ref, openapi))
|
|
194
|
+
if not isinstance(parameter, Parameter):
|
|
195
|
+
# ... (error logging remains the same)
|
|
196
|
+
continue
|
|
197
|
+
|
|
198
|
+
# --- *** CORRECTED ATTRIBUTE ACCESS HERE *** ---
|
|
199
|
+
param_in = parameter.param_in # CORRECTED: Use 'param_in'
|
|
200
|
+
param_location = _convert_to_parameter_location(param_in)
|
|
201
|
+
param_schema_obj = parameter.param_schema # CORRECTED: Use 'param_schema'
|
|
202
|
+
# --- *** ---
|
|
203
|
+
|
|
204
|
+
param_key = (parameter.name, param_in)
|
|
205
|
+
if param_key in seen_params:
|
|
206
|
+
continue
|
|
207
|
+
seen_params[param_key] = True
|
|
208
|
+
|
|
209
|
+
param_schema_dict = {}
|
|
210
|
+
if param_schema_obj: # Check if schema exists
|
|
211
|
+
param_schema_dict = _extract_schema_as_dict(param_schema_obj, openapi)
|
|
212
|
+
elif parameter.content:
|
|
213
|
+
# Handle complex parameters with 'content'
|
|
214
|
+
first_media_type = next(iter(parameter.content.values()), None)
|
|
215
|
+
if (
|
|
216
|
+
first_media_type and first_media_type.media_type_schema
|
|
217
|
+
): # CORRECTED: Use 'media_type_schema'
|
|
218
|
+
param_schema_dict = _extract_schema_as_dict(
|
|
219
|
+
first_media_type.media_type_schema, openapi
|
|
220
|
+
)
|
|
221
|
+
logger.debug(
|
|
222
|
+
f"Parameter '{parameter.name}' using schema from 'content' field."
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
# Manually create ParameterInfo instance using correct field names
|
|
226
|
+
param_info = ParameterInfo(
|
|
227
|
+
name=parameter.name,
|
|
228
|
+
location=param_location, # Use converted parameter location
|
|
229
|
+
required=parameter.required,
|
|
230
|
+
schema=param_schema_dict, # Populate 'schema' field in IR
|
|
231
|
+
description=parameter.description,
|
|
232
|
+
)
|
|
233
|
+
extracted_params.append(param_info)
|
|
234
|
+
|
|
235
|
+
except (
|
|
236
|
+
ValidationError,
|
|
237
|
+
ValueError,
|
|
238
|
+
AttributeError,
|
|
239
|
+
TypeError,
|
|
240
|
+
) as e: # Added TypeError
|
|
241
|
+
param_name = getattr(
|
|
242
|
+
param_or_ref, "name", getattr(param_or_ref, "ref", "unknown")
|
|
243
|
+
)
|
|
244
|
+
logger.error(
|
|
245
|
+
f"Failed to extract parameter '{param_name}': {e}", exc_info=False
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
return extracted_params
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def _extract_request_body(
|
|
252
|
+
request_body_or_ref: RequestBody | Reference | None, openapi: OpenAPI
|
|
253
|
+
) -> RequestBodyInfo | None:
|
|
254
|
+
"""Extracts and resolves the request body using corrected attribute names."""
|
|
255
|
+
if not request_body_or_ref:
|
|
256
|
+
return None
|
|
257
|
+
try:
|
|
258
|
+
request_body = cast(RequestBody, _resolve_ref(request_body_or_ref, openapi))
|
|
259
|
+
if not isinstance(request_body, RequestBody):
|
|
260
|
+
# ... (error logging remains the same)
|
|
261
|
+
return None
|
|
262
|
+
|
|
263
|
+
content_schemas: dict[str, JsonSchema] = {}
|
|
264
|
+
if request_body.content:
|
|
265
|
+
for media_type_str, media_type_obj in request_body.content.items():
|
|
266
|
+
# --- *** CORRECTED ATTRIBUTE ACCESS HERE *** ---
|
|
267
|
+
if (
|
|
268
|
+
isinstance(media_type_obj, MediaType)
|
|
269
|
+
and media_type_obj.media_type_schema
|
|
270
|
+
): # CORRECTED: Use 'media_type_schema'
|
|
271
|
+
# --- *** ---
|
|
272
|
+
try:
|
|
273
|
+
# Use the corrected attribute here as well
|
|
274
|
+
schema_dict = _extract_schema_as_dict(
|
|
275
|
+
media_type_obj.media_type_schema, openapi
|
|
276
|
+
)
|
|
277
|
+
content_schemas[media_type_str] = schema_dict
|
|
278
|
+
except ValueError as schema_err:
|
|
279
|
+
logger.error(
|
|
280
|
+
f"Failed to extract schema for media type '{media_type_str}' in request body: {schema_err}"
|
|
281
|
+
)
|
|
282
|
+
elif not isinstance(media_type_obj, MediaType):
|
|
283
|
+
logger.warning(
|
|
284
|
+
f"Skipping invalid media type object for '{media_type_str}' (type: {type(media_type_obj)}) in request body."
|
|
285
|
+
)
|
|
286
|
+
elif not media_type_obj.media_type_schema: # Corrected check
|
|
287
|
+
logger.warning(
|
|
288
|
+
f"Skipping media type '{media_type_str}' in request body because it lacks a schema."
|
|
289
|
+
)
|
|
290
|
+
|
|
291
|
+
return RequestBodyInfo(
|
|
292
|
+
required=request_body.required,
|
|
293
|
+
content_schema=content_schemas,
|
|
294
|
+
description=request_body.description,
|
|
295
|
+
)
|
|
296
|
+
except (ValidationError, ValueError, AttributeError) as e:
|
|
297
|
+
ref_name = getattr(request_body_or_ref, "ref", "unknown")
|
|
298
|
+
logger.error(
|
|
299
|
+
f"Failed to extract request body '{ref_name}': {e}", exc_info=False
|
|
300
|
+
)
|
|
301
|
+
return None
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
def _extract_responses(
|
|
305
|
+
operation_responses: dict[str, Response | Reference] | None,
|
|
306
|
+
openapi: OpenAPI,
|
|
307
|
+
) -> dict[str, ResponseInfo]:
|
|
308
|
+
"""Extracts and resolves response information for an operation."""
|
|
309
|
+
extracted_responses: dict[str, ResponseInfo] = {}
|
|
310
|
+
if not operation_responses:
|
|
311
|
+
return extracted_responses
|
|
312
|
+
|
|
313
|
+
for status_code, resp_or_ref in operation_responses.items():
|
|
314
|
+
try:
|
|
315
|
+
response = cast(Response, _resolve_ref(resp_or_ref, openapi))
|
|
316
|
+
if not isinstance(response, Response):
|
|
317
|
+
ref_str = getattr(resp_or_ref, "ref", "unknown")
|
|
318
|
+
logger.warning(
|
|
319
|
+
f"Expected Response after resolving ref '{ref_str}' for status code {status_code}, got {type(response)}. Skipping."
|
|
320
|
+
)
|
|
321
|
+
continue
|
|
322
|
+
|
|
323
|
+
content_schemas: dict[str, JsonSchema] = {}
|
|
324
|
+
if response.content:
|
|
325
|
+
for media_type_str, media_type_obj in response.content.items():
|
|
326
|
+
if (
|
|
327
|
+
isinstance(media_type_obj, MediaType)
|
|
328
|
+
and media_type_obj.media_type_schema
|
|
329
|
+
):
|
|
330
|
+
try:
|
|
331
|
+
schema_dict = _extract_schema_as_dict(
|
|
332
|
+
media_type_obj.media_type_schema, openapi
|
|
333
|
+
)
|
|
334
|
+
content_schemas[media_type_str] = schema_dict
|
|
335
|
+
except ValueError as schema_err:
|
|
336
|
+
logger.error(
|
|
337
|
+
f"Failed to extract schema for media type '{media_type_str}' in response {status_code}: {schema_err}"
|
|
338
|
+
)
|
|
339
|
+
|
|
340
|
+
resp_info = ResponseInfo(
|
|
341
|
+
description=response.description, content_schema=content_schemas
|
|
342
|
+
)
|
|
343
|
+
extracted_responses[str(status_code)] = resp_info
|
|
344
|
+
|
|
345
|
+
except (ValidationError, ValueError, AttributeError) as e:
|
|
346
|
+
ref_name = getattr(resp_or_ref, "ref", "unknown")
|
|
347
|
+
logger.error(
|
|
348
|
+
f"Failed to extract response for status code {status_code} (ref: '{ref_name}'): {e}",
|
|
349
|
+
exc_info=False,
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
return extracted_responses
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
# --- Main Parsing Function ---
|
|
356
|
+
# (No changes needed in the main loop logic, only in the helpers it calls)
|
|
357
|
+
def parse_openapi_to_http_routes(openapi_dict: dict[str, Any]) -> list[HTTPRoute]:
|
|
358
|
+
"""
|
|
359
|
+
Parses an OpenAPI schema dictionary into a list of HTTPRoute objects
|
|
360
|
+
using the openapi-pydantic library.
|
|
361
|
+
"""
|
|
362
|
+
routes: list[HTTPRoute] = []
|
|
363
|
+
try:
|
|
364
|
+
openapi: OpenAPI = OpenAPI.model_validate(openapi_dict)
|
|
365
|
+
logger.info(f"Successfully parsed OpenAPI schema version: {openapi.openapi}")
|
|
366
|
+
except ValidationError as e:
|
|
367
|
+
logger.error(f"OpenAPI schema validation failed: {e}")
|
|
368
|
+
error_details = e.errors()
|
|
369
|
+
logger.error(f"Validation errors: {error_details}")
|
|
370
|
+
raise ValueError(f"Invalid OpenAPI schema: {error_details}") from e
|
|
371
|
+
|
|
372
|
+
if not openapi.paths:
|
|
373
|
+
logger.warning("OpenAPI schema has no paths defined.")
|
|
374
|
+
return []
|
|
375
|
+
|
|
376
|
+
for path_str, path_item_obj in openapi.paths.items():
|
|
377
|
+
if not isinstance(path_item_obj, PathItem):
|
|
378
|
+
logger.warning(
|
|
379
|
+
f"Skipping invalid path item object for path '{path_str}' (type: {type(path_item_obj)})"
|
|
380
|
+
)
|
|
381
|
+
continue
|
|
382
|
+
|
|
383
|
+
path_level_params = path_item_obj.parameters
|
|
384
|
+
|
|
385
|
+
# Iterate through possible HTTP methods defined in the PathItem model fields
|
|
386
|
+
# Use model_fields from the class, not the instance
|
|
387
|
+
for method_lower in PathItem.model_fields.keys():
|
|
388
|
+
if method_lower not in [
|
|
389
|
+
"get",
|
|
390
|
+
"put",
|
|
391
|
+
"post",
|
|
392
|
+
"delete",
|
|
393
|
+
"options",
|
|
394
|
+
"head",
|
|
395
|
+
"patch",
|
|
396
|
+
"trace",
|
|
397
|
+
]:
|
|
398
|
+
continue
|
|
399
|
+
|
|
400
|
+
operation: Operation | None = getattr(path_item_obj, method_lower, None)
|
|
401
|
+
|
|
402
|
+
if operation and isinstance(operation, Operation):
|
|
403
|
+
method_upper = cast(HttpMethod, method_lower.upper())
|
|
404
|
+
logger.debug(f"Processing operation: {method_upper} {path_str}")
|
|
405
|
+
try:
|
|
406
|
+
parameters = _extract_parameters(
|
|
407
|
+
operation.parameters, path_level_params, openapi
|
|
408
|
+
)
|
|
409
|
+
request_body_info = _extract_request_body(
|
|
410
|
+
operation.requestBody, openapi
|
|
411
|
+
)
|
|
412
|
+
responses = _extract_responses(operation.responses, openapi)
|
|
413
|
+
|
|
414
|
+
route = HTTPRoute(
|
|
415
|
+
path=path_str,
|
|
416
|
+
method=method_upper,
|
|
417
|
+
operation_id=operation.operationId,
|
|
418
|
+
summary=operation.summary,
|
|
419
|
+
description=operation.description,
|
|
420
|
+
tags=operation.tags or [],
|
|
421
|
+
parameters=parameters,
|
|
422
|
+
request_body=request_body_info,
|
|
423
|
+
responses=responses,
|
|
424
|
+
)
|
|
425
|
+
routes.append(route)
|
|
426
|
+
logger.info(
|
|
427
|
+
f"Successfully extracted route: {method_upper} {path_str}"
|
|
428
|
+
)
|
|
429
|
+
except Exception as op_error:
|
|
430
|
+
op_id = operation.operationId or "unknown"
|
|
431
|
+
logger.error(
|
|
432
|
+
f"Failed to process operation {method_upper} {path_str} (ID: {op_id}): {op_error}",
|
|
433
|
+
exc_info=True,
|
|
434
|
+
)
|
|
435
|
+
|
|
436
|
+
logger.info(f"Finished parsing. Extracted {len(routes)} HTTP routes.")
|
|
437
|
+
return routes
|
|
438
|
+
|
|
439
|
+
|
|
440
|
+
# --- Example Usage (Optional) ---
|
|
441
|
+
if __name__ == "__main__":
|
|
442
|
+
import json
|
|
443
|
+
|
|
444
|
+
logging.basicConfig(
|
|
445
|
+
level=logging.INFO, format="%(levelname)s:%(name)s:%(message)s"
|
|
446
|
+
) # Set to INFO
|
|
447
|
+
|
|
448
|
+
petstore_schema = {
|
|
449
|
+
"openapi": "3.1.0", # Keep corrected version
|
|
450
|
+
"info": {"title": "Simple Pet Store API", "version": "1.0.0"},
|
|
451
|
+
"paths": {
|
|
452
|
+
"/pets": {
|
|
453
|
+
"get": {
|
|
454
|
+
"summary": "list all pets",
|
|
455
|
+
"operationId": "listPets",
|
|
456
|
+
"tags": ["pets"],
|
|
457
|
+
"parameters": [
|
|
458
|
+
{
|
|
459
|
+
"name": "limit",
|
|
460
|
+
"in": "query",
|
|
461
|
+
"description": "How many items to return",
|
|
462
|
+
"required": False,
|
|
463
|
+
"schema": {"type": "integer", "format": "int32"},
|
|
464
|
+
}
|
|
465
|
+
],
|
|
466
|
+
"responses": {"200": {"description": "A paged array of pets"}},
|
|
467
|
+
},
|
|
468
|
+
"post": {
|
|
469
|
+
"summary": "Create a pet",
|
|
470
|
+
"operationId": "createPet",
|
|
471
|
+
"tags": ["pets"],
|
|
472
|
+
"requestBody": {"$ref": "#/components/requestBodies/PetBody"},
|
|
473
|
+
"responses": {"201": {"description": "Null response"}},
|
|
474
|
+
},
|
|
475
|
+
},
|
|
476
|
+
"/pets/{petId}": {
|
|
477
|
+
"get": {
|
|
478
|
+
"summary": "Info for a specific pet",
|
|
479
|
+
"operationId": "showPetById",
|
|
480
|
+
"tags": ["pets"],
|
|
481
|
+
"parameters": [
|
|
482
|
+
{
|
|
483
|
+
"name": "petId",
|
|
484
|
+
"in": "path",
|
|
485
|
+
"required": True,
|
|
486
|
+
"description": "The id of the pet",
|
|
487
|
+
"schema": {"type": "string"},
|
|
488
|
+
},
|
|
489
|
+
{
|
|
490
|
+
"name": "X-Request-ID",
|
|
491
|
+
"in": "header",
|
|
492
|
+
"required": False,
|
|
493
|
+
"schema": {"type": "string", "format": "uuid"},
|
|
494
|
+
},
|
|
495
|
+
],
|
|
496
|
+
"responses": {"200": {"description": "Information about the pet"}},
|
|
497
|
+
},
|
|
498
|
+
"parameters": [ # Path level parameter example
|
|
499
|
+
{
|
|
500
|
+
"name": "traceId",
|
|
501
|
+
"in": "header",
|
|
502
|
+
"description": "Common trace ID",
|
|
503
|
+
"required": False,
|
|
504
|
+
"schema": {"type": "string"},
|
|
505
|
+
}
|
|
506
|
+
],
|
|
507
|
+
},
|
|
508
|
+
},
|
|
509
|
+
"components": {
|
|
510
|
+
"schemas": {
|
|
511
|
+
"Pet": {
|
|
512
|
+
"type": "object",
|
|
513
|
+
"required": ["id", "name"],
|
|
514
|
+
"properties": {
|
|
515
|
+
"id": {"type": "integer", "format": "int64"},
|
|
516
|
+
"name": {"type": "string"},
|
|
517
|
+
"tag": {"type": "string"},
|
|
518
|
+
},
|
|
519
|
+
}
|
|
520
|
+
},
|
|
521
|
+
"requestBodies": {
|
|
522
|
+
"PetBody": {
|
|
523
|
+
"description": "Pet object",
|
|
524
|
+
"required": True,
|
|
525
|
+
"content": {
|
|
526
|
+
"application/json": {
|
|
527
|
+
"schema": {"$ref": "#/components/schemas/Pet"}
|
|
528
|
+
}
|
|
529
|
+
},
|
|
530
|
+
}
|
|
531
|
+
},
|
|
532
|
+
},
|
|
533
|
+
}
|
|
534
|
+
|
|
535
|
+
print("--- Parsing Pet Store Schema using openapi-pydantic (Corrected) ---")
|
|
536
|
+
try:
|
|
537
|
+
http_routes = parse_openapi_to_http_routes(petstore_schema)
|
|
538
|
+
print(f"\n--- Extracted {len(http_routes)} Routes ---")
|
|
539
|
+
for i, route in enumerate(http_routes):
|
|
540
|
+
print(f"\nRoute {i + 1}:")
|
|
541
|
+
# Use model_dump for clean JSON-like output, show aliases from IR model
|
|
542
|
+
print(
|
|
543
|
+
json.dumps(route.model_dump(by_alias=True, exclude_none=True), indent=2)
|
|
544
|
+
) # exclude_none is often cleaner
|
|
545
|
+
except ValueError as e:
|
|
546
|
+
print(f"\nError parsing schema: {e}")
|
|
547
|
+
except Exception as e:
|
|
548
|
+
print(f"\nAn unexpected error occurred: {e}")
|
|
549
|
+
|
|
550
|
+
|
|
551
|
+
def clean_schema_for_display(schema: JsonSchema | None) -> JsonSchema | None:
|
|
552
|
+
"""
|
|
553
|
+
Clean up a schema dictionary for display by removing internal/complex fields.
|
|
554
|
+
"""
|
|
555
|
+
if not schema or not isinstance(schema, dict):
|
|
556
|
+
return schema
|
|
557
|
+
|
|
558
|
+
# Make a copy to avoid modifying the input schema
|
|
559
|
+
cleaned = schema.copy()
|
|
560
|
+
|
|
561
|
+
# Fields commonly removed for simpler display to LLMs or users
|
|
562
|
+
fields_to_remove = [
|
|
563
|
+
"allOf",
|
|
564
|
+
"anyOf",
|
|
565
|
+
"oneOf",
|
|
566
|
+
"not", # Composition keywords
|
|
567
|
+
"nullable", # Handled by type unions usually
|
|
568
|
+
"discriminator",
|
|
569
|
+
"readOnly",
|
|
570
|
+
"writeOnly",
|
|
571
|
+
"deprecated",
|
|
572
|
+
"xml",
|
|
573
|
+
"externalDocs",
|
|
574
|
+
# Can be verbose, maybe remove based on flag?
|
|
575
|
+
# "pattern", "minLength", "maxLength",
|
|
576
|
+
# "minimum", "maximum", "exclusiveMinimum", "exclusiveMaximum",
|
|
577
|
+
# "multipleOf", "minItems", "maxItems", "uniqueItems",
|
|
578
|
+
# "minProperties", "maxProperties"
|
|
579
|
+
]
|
|
580
|
+
for field in fields_to_remove:
|
|
581
|
+
if field in cleaned:
|
|
582
|
+
cleaned.pop(field)
|
|
583
|
+
|
|
584
|
+
# Recursively clean properties and items
|
|
585
|
+
if "properties" in cleaned:
|
|
586
|
+
cleaned["properties"] = {
|
|
587
|
+
k: clean_schema_for_display(v) for k, v in cleaned["properties"].items()
|
|
588
|
+
}
|
|
589
|
+
# Remove properties section if empty after cleaning
|
|
590
|
+
if not cleaned["properties"]:
|
|
591
|
+
cleaned.pop("properties")
|
|
592
|
+
|
|
593
|
+
if "items" in cleaned:
|
|
594
|
+
cleaned["items"] = clean_schema_for_display(cleaned["items"])
|
|
595
|
+
# Remove items section if empty after cleaning
|
|
596
|
+
if not cleaned["items"]:
|
|
597
|
+
cleaned.pop("items")
|
|
598
|
+
|
|
599
|
+
if "additionalProperties" in cleaned:
|
|
600
|
+
# Often verbose, can be simplified
|
|
601
|
+
if isinstance(cleaned["additionalProperties"], dict):
|
|
602
|
+
cleaned["additionalProperties"] = clean_schema_for_display(
|
|
603
|
+
cleaned["additionalProperties"]
|
|
604
|
+
)
|
|
605
|
+
elif cleaned["additionalProperties"] is True:
|
|
606
|
+
# Maybe keep 'true' or represent as 'Allows additional properties' text?
|
|
607
|
+
pass # Keep simple boolean for now
|
|
608
|
+
|
|
609
|
+
# Remove title if it just repeats the property name (heuristic)
|
|
610
|
+
# This requires knowing the property name, so better done when formatting properties dict
|
|
611
|
+
|
|
612
|
+
return cleaned
|
|
613
|
+
|
|
614
|
+
|
|
615
|
+
def generate_example_from_schema(schema: JsonSchema | None) -> Any:
|
|
616
|
+
"""
|
|
617
|
+
Generate a simple example value from a JSON schema dictionary.
|
|
618
|
+
Very basic implementation focusing on types.
|
|
619
|
+
"""
|
|
620
|
+
if not schema or not isinstance(schema, dict):
|
|
621
|
+
return "unknown" # Or None?
|
|
622
|
+
|
|
623
|
+
# Use default value if provided
|
|
624
|
+
if "default" in schema:
|
|
625
|
+
return schema["default"]
|
|
626
|
+
# Use first enum value if provided
|
|
627
|
+
if "enum" in schema and isinstance(schema["enum"], list) and schema["enum"]:
|
|
628
|
+
return schema["enum"][0]
|
|
629
|
+
# Use first example if provided
|
|
630
|
+
if (
|
|
631
|
+
"examples" in schema
|
|
632
|
+
and isinstance(schema["examples"], list)
|
|
633
|
+
and schema["examples"]
|
|
634
|
+
):
|
|
635
|
+
return schema["examples"][0]
|
|
636
|
+
if "example" in schema:
|
|
637
|
+
return schema["example"]
|
|
638
|
+
|
|
639
|
+
schema_type = schema.get("type")
|
|
640
|
+
|
|
641
|
+
if schema_type == "object":
|
|
642
|
+
result = {}
|
|
643
|
+
properties = schema.get("properties", {})
|
|
644
|
+
if isinstance(properties, dict):
|
|
645
|
+
# Generate example for first few properties or required ones? Limit complexity.
|
|
646
|
+
required_props = set(schema.get("required", []))
|
|
647
|
+
props_to_include = list(properties.keys())[
|
|
648
|
+
:3
|
|
649
|
+
] # Limit to first 3 for brevity
|
|
650
|
+
for prop_name in props_to_include:
|
|
651
|
+
if prop_name in properties:
|
|
652
|
+
result[prop_name] = generate_example_from_schema(
|
|
653
|
+
properties[prop_name]
|
|
654
|
+
)
|
|
655
|
+
# Ensure required props are present if possible
|
|
656
|
+
for req_prop in required_props:
|
|
657
|
+
if req_prop not in result and req_prop in properties:
|
|
658
|
+
result[req_prop] = generate_example_from_schema(
|
|
659
|
+
properties[req_prop]
|
|
660
|
+
)
|
|
661
|
+
return result if result else {"key": "value"} # Basic object if no props
|
|
662
|
+
|
|
663
|
+
elif schema_type == "array":
|
|
664
|
+
items_schema = schema.get("items")
|
|
665
|
+
if isinstance(items_schema, dict):
|
|
666
|
+
# Generate one example item
|
|
667
|
+
item_example = generate_example_from_schema(items_schema)
|
|
668
|
+
return [item_example] if item_example is not None else []
|
|
669
|
+
return ["example_item"] # Fallback
|
|
670
|
+
|
|
671
|
+
elif schema_type == "string":
|
|
672
|
+
format_type = schema.get("format")
|
|
673
|
+
if format_type == "date-time":
|
|
674
|
+
return "2024-01-01T12:00:00Z"
|
|
675
|
+
if format_type == "date":
|
|
676
|
+
return "2024-01-01"
|
|
677
|
+
if format_type == "email":
|
|
678
|
+
return "user@example.com"
|
|
679
|
+
if format_type == "uuid":
|
|
680
|
+
return "123e4567-e89b-12d3-a456-426614174000"
|
|
681
|
+
if format_type == "byte":
|
|
682
|
+
return "ZXhhbXBsZQ==" # "example" base64
|
|
683
|
+
return "string"
|
|
684
|
+
|
|
685
|
+
elif schema_type == "integer":
|
|
686
|
+
return 1
|
|
687
|
+
elif schema_type == "number":
|
|
688
|
+
return 1.5
|
|
689
|
+
elif schema_type == "boolean":
|
|
690
|
+
return True
|
|
691
|
+
elif schema_type == "null":
|
|
692
|
+
return None
|
|
693
|
+
|
|
694
|
+
# Fallback if type is unknown or missing
|
|
695
|
+
return "unknown_type"
|
|
696
|
+
|
|
697
|
+
|
|
698
|
+
def format_json_for_description(data: Any, indent: int = 2) -> str:
|
|
699
|
+
"""Formats Python data as a JSON string block for markdown."""
|
|
700
|
+
try:
|
|
701
|
+
json_str = json.dumps(data, indent=indent)
|
|
702
|
+
return f"```json\n{json_str}\n```"
|
|
703
|
+
except TypeError:
|
|
704
|
+
return f"```\nCould not serialize to JSON: {data}\n```"
|
|
705
|
+
|
|
706
|
+
|
|
707
|
+
def format_description_with_responses(
|
|
708
|
+
base_description: str,
|
|
709
|
+
responses: dict[
|
|
710
|
+
str, Any
|
|
711
|
+
], # Changed from specific ResponseInfo type to avoid circular imports
|
|
712
|
+
) -> str:
|
|
713
|
+
"""Formats the base description string with response information."""
|
|
714
|
+
if not responses:
|
|
715
|
+
return base_description
|
|
716
|
+
|
|
717
|
+
desc_parts = [base_description]
|
|
718
|
+
response_section = "\n\n**Responses:**"
|
|
719
|
+
added_response_section = False
|
|
720
|
+
|
|
721
|
+
# Determine success codes (common ones)
|
|
722
|
+
success_codes = {"200", "201", "202", "204"} # As strings
|
|
723
|
+
success_status = next((s for s in success_codes if s in responses), None)
|
|
724
|
+
|
|
725
|
+
# Process all responses
|
|
726
|
+
responses_to_process = responses.items()
|
|
727
|
+
|
|
728
|
+
for status_code, resp_info in sorted(responses_to_process):
|
|
729
|
+
if not added_response_section:
|
|
730
|
+
desc_parts.append(response_section)
|
|
731
|
+
added_response_section = True
|
|
732
|
+
|
|
733
|
+
status_marker = " (Success)" if status_code == success_status else ""
|
|
734
|
+
desc_parts.append(
|
|
735
|
+
f"\n- **{status_code}**{status_marker}: {resp_info.description or 'No description.'}"
|
|
736
|
+
)
|
|
737
|
+
|
|
738
|
+
# Process content schemas for this response
|
|
739
|
+
if resp_info.content_schema:
|
|
740
|
+
# Prioritize json, then take first available
|
|
741
|
+
media_type = (
|
|
742
|
+
"application/json"
|
|
743
|
+
if "application/json" in resp_info.content_schema
|
|
744
|
+
else next(iter(resp_info.content_schema), None)
|
|
745
|
+
)
|
|
746
|
+
|
|
747
|
+
if media_type:
|
|
748
|
+
schema = resp_info.content_schema.get(media_type)
|
|
749
|
+
desc_parts.append(f" - Content-Type: `{media_type}`")
|
|
750
|
+
|
|
751
|
+
if schema:
|
|
752
|
+
# Generate Example
|
|
753
|
+
example = generate_example_from_schema(schema)
|
|
754
|
+
if example != "unknown_type" and example is not None:
|
|
755
|
+
desc_parts.append("\n - **Example:**")
|
|
756
|
+
desc_parts.append(
|
|
757
|
+
format_json_for_description(example, indent=2)
|
|
758
|
+
)
|
|
759
|
+
|
|
760
|
+
return "\n".join(desc_parts)
|
|
761
|
+
|
|
762
|
+
|
|
763
|
+
def _combine_schemas(route: openapi.HTTPRoute) -> dict[str, Any]:
|
|
764
|
+
"""
|
|
765
|
+
Combines parameter and request body schemas into a single schema.
|
|
766
|
+
|
|
767
|
+
Args:
|
|
768
|
+
route: HTTPRoute object
|
|
769
|
+
|
|
770
|
+
Returns:
|
|
771
|
+
Combined schema dictionary
|
|
772
|
+
"""
|
|
773
|
+
properties = {}
|
|
774
|
+
required = []
|
|
775
|
+
|
|
776
|
+
# Add path parameters
|
|
777
|
+
for param in route.parameters:
|
|
778
|
+
if param.required:
|
|
779
|
+
required.append(param.name)
|
|
780
|
+
properties[param.name] = param.schema_
|
|
781
|
+
|
|
782
|
+
# Add request body if it exists
|
|
783
|
+
if route.request_body and route.request_body.content_schema:
|
|
784
|
+
# For now, just use the first content type's schema
|
|
785
|
+
content_type = next(iter(route.request_body.content_schema))
|
|
786
|
+
body_schema = route.request_body.content_schema[content_type]
|
|
787
|
+
body_props = body_schema.get("properties", {})
|
|
788
|
+
for prop_name, prop_schema in body_props.items():
|
|
789
|
+
properties[prop_name] = prop_schema
|
|
790
|
+
if route.request_body.required:
|
|
791
|
+
required.extend(body_schema.get("required", []))
|
|
792
|
+
|
|
793
|
+
return {
|
|
794
|
+
"type": "object",
|
|
795
|
+
"properties": properties,
|
|
796
|
+
"required": required,
|
|
797
|
+
}
|