fastmcp 2.2.9__py3-none-any.whl → 2.3.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fastmcp/__init__.py +2 -1
- fastmcp/cli/cli.py +1 -1
- fastmcp/client/client.py +3 -2
- fastmcp/client/transports.py +45 -1
- fastmcp/prompts/prompt.py +10 -15
- fastmcp/prompts/prompt_manager.py +3 -10
- fastmcp/resources/resource.py +2 -7
- fastmcp/resources/resource_manager.py +2 -4
- fastmcp/resources/template.py +11 -24
- fastmcp/resources/types.py +15 -44
- fastmcp/server/__init__.py +1 -0
- fastmcp/server/context.py +50 -38
- fastmcp/server/dependencies.py +35 -0
- fastmcp/server/http.py +309 -0
- fastmcp/server/openapi.py +5 -16
- fastmcp/server/proxy.py +4 -13
- fastmcp/server/server.py +196 -271
- fastmcp/server/streamable_http_manager.py +241 -0
- fastmcp/settings.py +20 -0
- fastmcp/tools/tool.py +40 -33
- fastmcp/tools/tool_manager.py +3 -9
- fastmcp/utilities/cache.py +26 -0
- fastmcp/utilities/tests.py +113 -0
- fastmcp/utilities/types.py +4 -7
- {fastmcp-2.2.9.dist-info → fastmcp-2.3.0rc1.dist-info}/METADATA +6 -2
- fastmcp-2.3.0rc1.dist-info/RECORD +55 -0
- fastmcp/utilities/http.py +0 -44
- fastmcp-2.2.9.dist-info/RECORD +0 -51
- {fastmcp-2.2.9.dist-info → fastmcp-2.3.0rc1.dist-info}/WHEEL +0 -0
- {fastmcp-2.2.9.dist-info → fastmcp-2.3.0rc1.dist-info}/entry_points.txt +0 -0
- {fastmcp-2.2.9.dist-info → fastmcp-2.3.0rc1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
"""StreamableHTTP Session Manager for MCP servers."""
|
|
2
|
+
|
|
3
|
+
# follows https://github.com/modelcontextprotocol/python-sdk/blob/ihrpr/shttp/src/mcp/server/streamable_http_manager.py
|
|
4
|
+
# and can be removed once that spec is finalized
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import contextlib
|
|
9
|
+
import logging
|
|
10
|
+
from collections.abc import AsyncIterator
|
|
11
|
+
from http import HTTPStatus
|
|
12
|
+
from typing import Any
|
|
13
|
+
from uuid import uuid4
|
|
14
|
+
|
|
15
|
+
import anyio
|
|
16
|
+
from anyio.abc import TaskStatus
|
|
17
|
+
from mcp.server.lowlevel.server import Server as MCPServer
|
|
18
|
+
from mcp.server.streamable_http import (
|
|
19
|
+
MCP_SESSION_ID_HEADER,
|
|
20
|
+
EventStore,
|
|
21
|
+
StreamableHTTPServerTransport,
|
|
22
|
+
)
|
|
23
|
+
from starlette.requests import Request
|
|
24
|
+
from starlette.responses import Response
|
|
25
|
+
from starlette.types import Receive, Scope, Send
|
|
26
|
+
|
|
27
|
+
logger = logging.getLogger(__name__)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class StreamableHTTPSessionManager:
|
|
31
|
+
"""
|
|
32
|
+
Manages StreamableHTTP sessions with optional resumability via event store.
|
|
33
|
+
|
|
34
|
+
This class abstracts away the complexity of session management, event storage,
|
|
35
|
+
and request handling for StreamableHTTP transports. It handles:
|
|
36
|
+
|
|
37
|
+
1. Session tracking for clients
|
|
38
|
+
2. Resumability via an optional event store
|
|
39
|
+
3. Connection management and lifecycle
|
|
40
|
+
4. Request handling and transport setup
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
app: The MCP server instance
|
|
44
|
+
event_store: Optional event store for resumability support.
|
|
45
|
+
If provided, enables resumable connections where clients
|
|
46
|
+
can reconnect and receive missed events.
|
|
47
|
+
If None, sessions are still tracked but not resumable.
|
|
48
|
+
json_response: Whether to use JSON responses instead of SSE streams
|
|
49
|
+
stateless: If True, creates a completely fresh transport for each request
|
|
50
|
+
with no session tracking or state persistence between requests.
|
|
51
|
+
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
def __init__(
|
|
55
|
+
self,
|
|
56
|
+
app: MCPServer[Any],
|
|
57
|
+
event_store: EventStore | None = None,
|
|
58
|
+
json_response: bool = False,
|
|
59
|
+
stateless: bool = False,
|
|
60
|
+
):
|
|
61
|
+
self.app = app
|
|
62
|
+
self.event_store = event_store
|
|
63
|
+
self.json_response = json_response
|
|
64
|
+
self.stateless = stateless
|
|
65
|
+
|
|
66
|
+
# Session tracking (only used if not stateless)
|
|
67
|
+
self._session_creation_lock = anyio.Lock()
|
|
68
|
+
self._server_instances: dict[str, StreamableHTTPServerTransport] = {}
|
|
69
|
+
|
|
70
|
+
# The task group will be set during lifespan
|
|
71
|
+
self._task_group = None
|
|
72
|
+
|
|
73
|
+
@contextlib.asynccontextmanager
|
|
74
|
+
async def run(self) -> AsyncIterator[None]:
|
|
75
|
+
"""
|
|
76
|
+
Run the session manager with proper lifecycle management.
|
|
77
|
+
|
|
78
|
+
This creates and manages the task group for all session operations.
|
|
79
|
+
|
|
80
|
+
Use this in the lifespan context manager of your Starlette app:
|
|
81
|
+
|
|
82
|
+
@contextlib.asynccontextmanager
|
|
83
|
+
async def lifespan(app: Starlette) -> AsyncIterator[None]:
|
|
84
|
+
async with session_manager.run():
|
|
85
|
+
yield
|
|
86
|
+
"""
|
|
87
|
+
async with anyio.create_task_group() as tg:
|
|
88
|
+
# Store the task group for later use
|
|
89
|
+
self._task_group = tg
|
|
90
|
+
logger.info("StreamableHTTP session manager started")
|
|
91
|
+
try:
|
|
92
|
+
yield # Let the application run
|
|
93
|
+
finally:
|
|
94
|
+
logger.info("StreamableHTTP session manager shutting down")
|
|
95
|
+
# Cancel task group to stop all spawned tasks
|
|
96
|
+
tg.cancel_scope.cancel()
|
|
97
|
+
self._task_group = None
|
|
98
|
+
# Clear any remaining server instances
|
|
99
|
+
self._server_instances.clear()
|
|
100
|
+
|
|
101
|
+
async def handle_request(
|
|
102
|
+
self,
|
|
103
|
+
scope: Scope,
|
|
104
|
+
receive: Receive,
|
|
105
|
+
send: Send,
|
|
106
|
+
) -> None:
|
|
107
|
+
"""
|
|
108
|
+
Process ASGI request with proper session handling and transport setup.
|
|
109
|
+
|
|
110
|
+
Dispatches to the appropriate handler based on stateless mode.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
scope: ASGI scope
|
|
114
|
+
receive: ASGI receive function
|
|
115
|
+
send: ASGI send function
|
|
116
|
+
"""
|
|
117
|
+
if self._task_group is None:
|
|
118
|
+
raise RuntimeError(
|
|
119
|
+
"Task group is not initialized. Make sure to use the run()."
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
# Dispatch to the appropriate handler
|
|
123
|
+
if self.stateless:
|
|
124
|
+
await self._handle_stateless_request(scope, receive, send)
|
|
125
|
+
else:
|
|
126
|
+
await self._handle_stateful_request(scope, receive, send)
|
|
127
|
+
|
|
128
|
+
async def _handle_stateless_request(
|
|
129
|
+
self,
|
|
130
|
+
scope: Scope,
|
|
131
|
+
receive: Receive,
|
|
132
|
+
send: Send,
|
|
133
|
+
) -> None:
|
|
134
|
+
"""
|
|
135
|
+
Process request in stateless mode - creating a new transport for each request.
|
|
136
|
+
|
|
137
|
+
Args:
|
|
138
|
+
scope: ASGI scope
|
|
139
|
+
receive: ASGI receive function
|
|
140
|
+
send: ASGI send function
|
|
141
|
+
"""
|
|
142
|
+
logger.debug("Stateless mode: Creating new transport for this request")
|
|
143
|
+
# No session ID needed in stateless mode
|
|
144
|
+
http_transport = StreamableHTTPServerTransport(
|
|
145
|
+
mcp_session_id=None, # No session tracking in stateless mode
|
|
146
|
+
is_json_response_enabled=self.json_response,
|
|
147
|
+
event_store=None, # No event store in stateless mode
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
# Start server in a new task
|
|
151
|
+
async def run_stateless_server(
|
|
152
|
+
*, task_status: TaskStatus[None] = anyio.TASK_STATUS_IGNORED
|
|
153
|
+
):
|
|
154
|
+
async with http_transport.connect() as streams:
|
|
155
|
+
read_stream, write_stream = streams
|
|
156
|
+
task_status.started()
|
|
157
|
+
await self.app.run(
|
|
158
|
+
read_stream,
|
|
159
|
+
write_stream,
|
|
160
|
+
self.app.create_initialization_options(),
|
|
161
|
+
stateless=True,
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
# Assert task group is not None for type checking
|
|
165
|
+
assert self._task_group is not None
|
|
166
|
+
# Start the server task
|
|
167
|
+
await self._task_group.start(run_stateless_server)
|
|
168
|
+
|
|
169
|
+
# Handle the HTTP request and return the response
|
|
170
|
+
await http_transport.handle_request(scope, receive, send)
|
|
171
|
+
|
|
172
|
+
async def _handle_stateful_request(
|
|
173
|
+
self,
|
|
174
|
+
scope: Scope,
|
|
175
|
+
receive: Receive,
|
|
176
|
+
send: Send,
|
|
177
|
+
) -> None:
|
|
178
|
+
"""
|
|
179
|
+
Process request in stateful mode - maintaining session state between requests.
|
|
180
|
+
|
|
181
|
+
Args:
|
|
182
|
+
scope: ASGI scope
|
|
183
|
+
receive: ASGI receive function
|
|
184
|
+
send: ASGI send function
|
|
185
|
+
"""
|
|
186
|
+
request = Request(scope, receive)
|
|
187
|
+
request_mcp_session_id = request.headers.get(MCP_SESSION_ID_HEADER)
|
|
188
|
+
|
|
189
|
+
# Existing session case
|
|
190
|
+
if (
|
|
191
|
+
request_mcp_session_id is not None
|
|
192
|
+
and request_mcp_session_id in self._server_instances
|
|
193
|
+
):
|
|
194
|
+
transport = self._server_instances[request_mcp_session_id]
|
|
195
|
+
logger.debug("Session already exists, handling request directly")
|
|
196
|
+
await transport.handle_request(scope, receive, send)
|
|
197
|
+
return
|
|
198
|
+
|
|
199
|
+
if request_mcp_session_id is None:
|
|
200
|
+
# New session case
|
|
201
|
+
logger.debug("Creating new transport")
|
|
202
|
+
async with self._session_creation_lock:
|
|
203
|
+
new_session_id = uuid4().hex
|
|
204
|
+
http_transport = StreamableHTTPServerTransport(
|
|
205
|
+
mcp_session_id=new_session_id,
|
|
206
|
+
is_json_response_enabled=self.json_response,
|
|
207
|
+
event_store=self.event_store, # May be None (no resumability)
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
assert http_transport.mcp_session_id is not None
|
|
211
|
+
self._server_instances[http_transport.mcp_session_id] = http_transport
|
|
212
|
+
logger.info(f"Created new transport with session ID: {new_session_id}")
|
|
213
|
+
|
|
214
|
+
# Define the server runner
|
|
215
|
+
async def run_server(
|
|
216
|
+
*, task_status: TaskStatus[None] = anyio.TASK_STATUS_IGNORED
|
|
217
|
+
) -> None:
|
|
218
|
+
async with http_transport.connect() as streams:
|
|
219
|
+
read_stream, write_stream = streams
|
|
220
|
+
task_status.started()
|
|
221
|
+
await self.app.run(
|
|
222
|
+
read_stream,
|
|
223
|
+
write_stream,
|
|
224
|
+
self.app.create_initialization_options(),
|
|
225
|
+
stateless=False, # Stateful mode
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
# Assert task group is not None for type checking
|
|
229
|
+
assert self._task_group is not None
|
|
230
|
+
# Start the server task
|
|
231
|
+
await self._task_group.start(run_server)
|
|
232
|
+
|
|
233
|
+
# Handle the HTTP request and return the response
|
|
234
|
+
await http_transport.handle_request(scope, receive, send)
|
|
235
|
+
else:
|
|
236
|
+
# Invalid session ID
|
|
237
|
+
response = Response(
|
|
238
|
+
"Bad Request: No valid session ID provided",
|
|
239
|
+
status_code=HTTPStatus.BAD_REQUEST,
|
|
240
|
+
)
|
|
241
|
+
await response(scope, receive, send)
|
fastmcp/settings.py
CHANGED
|
@@ -27,6 +27,16 @@ class Settings(BaseSettings):
|
|
|
27
27
|
|
|
28
28
|
test_mode: bool = False
|
|
29
29
|
log_level: LOG_LEVEL = "INFO"
|
|
30
|
+
tool_attempt_parse_json_args: bool = Field(
|
|
31
|
+
default=False,
|
|
32
|
+
description="""
|
|
33
|
+
Note: this enables a legacy behavior. If True, will attempt to parse
|
|
34
|
+
stringified JSON lists and objects strings in tool arguments before
|
|
35
|
+
passing them to the tool. This is an old behavior that can create
|
|
36
|
+
unexpected type coercion issues, but may be helpful for less powerful
|
|
37
|
+
LLMs that stringify JSON instead of passing actual lists and objects.
|
|
38
|
+
Defaults to False.""",
|
|
39
|
+
)
|
|
30
40
|
|
|
31
41
|
|
|
32
42
|
class ServerSettings(BaseSettings):
|
|
@@ -51,6 +61,7 @@ class ServerSettings(BaseSettings):
|
|
|
51
61
|
port: int = 8000
|
|
52
62
|
sse_path: str = "/sse"
|
|
53
63
|
message_path: str = "/messages/"
|
|
64
|
+
streamable_http_path: str = "/mcp"
|
|
54
65
|
debug: bool = False
|
|
55
66
|
|
|
56
67
|
# resource settings
|
|
@@ -72,6 +83,12 @@ class ServerSettings(BaseSettings):
|
|
|
72
83
|
|
|
73
84
|
auth: AuthSettings | None = None
|
|
74
85
|
|
|
86
|
+
# StreamableHTTP settings
|
|
87
|
+
json_response: bool = False
|
|
88
|
+
stateless_http: bool = (
|
|
89
|
+
False # If True, uses true stateless mode (new transport per request)
|
|
90
|
+
)
|
|
91
|
+
|
|
75
92
|
|
|
76
93
|
class ClientSettings(BaseSettings):
|
|
77
94
|
"""FastMCP client settings."""
|
|
@@ -83,3 +100,6 @@ class ClientSettings(BaseSettings):
|
|
|
83
100
|
)
|
|
84
101
|
|
|
85
102
|
log_level: LOG_LEVEL = Field(default_factory=lambda: Settings().log_level)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
settings = Settings()
|
fastmcp/tools/tool.py
CHANGED
|
@@ -10,7 +10,9 @@ from mcp.types import EmbeddedResource, ImageContent, TextContent, ToolAnnotatio
|
|
|
10
10
|
from mcp.types import Tool as MCPTool
|
|
11
11
|
from pydantic import BaseModel, BeforeValidator, Field
|
|
12
12
|
|
|
13
|
+
import fastmcp
|
|
13
14
|
from fastmcp.exceptions import ToolError
|
|
15
|
+
from fastmcp.server.dependencies import get_context
|
|
14
16
|
from fastmcp.utilities.json_schema import prune_params
|
|
15
17
|
from fastmcp.utilities.logging import get_logger
|
|
16
18
|
from fastmcp.utilities.types import (
|
|
@@ -21,10 +23,7 @@ from fastmcp.utilities.types import (
|
|
|
21
23
|
)
|
|
22
24
|
|
|
23
25
|
if TYPE_CHECKING:
|
|
24
|
-
|
|
25
|
-
from mcp.shared.context import LifespanContextT
|
|
26
|
-
|
|
27
|
-
from fastmcp.server import Context
|
|
26
|
+
pass
|
|
28
27
|
|
|
29
28
|
logger = get_logger(__name__)
|
|
30
29
|
|
|
@@ -40,9 +39,6 @@ class Tool(BaseModel):
|
|
|
40
39
|
name: str = Field(description="Name of the tool")
|
|
41
40
|
description: str = Field(description="Description of what the tool does")
|
|
42
41
|
parameters: dict[str, Any] = Field(description="JSON schema for tool parameters")
|
|
43
|
-
context_kwarg: str | None = Field(
|
|
44
|
-
None, description="Name of the kwarg that should receive context"
|
|
45
|
-
)
|
|
46
42
|
tags: Annotated[set[str], BeforeValidator(_convert_set_defaults)] = Field(
|
|
47
43
|
default_factory=set, description="Tags for the tool"
|
|
48
44
|
)
|
|
@@ -59,13 +55,12 @@ class Tool(BaseModel):
|
|
|
59
55
|
fn: Callable[..., Any],
|
|
60
56
|
name: str | None = None,
|
|
61
57
|
description: str | None = None,
|
|
62
|
-
context_kwarg: str | None = None,
|
|
63
58
|
tags: set[str] | None = None,
|
|
64
59
|
annotations: ToolAnnotations | None = None,
|
|
65
60
|
serializer: Callable[[Any], str] | None = None,
|
|
66
61
|
) -> Tool:
|
|
67
62
|
"""Create a Tool from a function."""
|
|
68
|
-
from fastmcp import Context
|
|
63
|
+
from fastmcp.server.context import Context
|
|
69
64
|
|
|
70
65
|
# Reject functions with *args or **kwargs
|
|
71
66
|
sig = inspect.signature(fn)
|
|
@@ -85,8 +80,7 @@ class Tool(BaseModel):
|
|
|
85
80
|
type_adapter = get_cached_typeadapter(fn)
|
|
86
81
|
schema = type_adapter.json_schema()
|
|
87
82
|
|
|
88
|
-
|
|
89
|
-
context_kwarg = find_kwarg_by_type(fn, kwarg_type=Context)
|
|
83
|
+
context_kwarg = find_kwarg_by_type(fn, kwarg_type=Context)
|
|
90
84
|
if context_kwarg:
|
|
91
85
|
schema = prune_params(schema, params=[context_kwarg])
|
|
92
86
|
|
|
@@ -95,42 +89,55 @@ class Tool(BaseModel):
|
|
|
95
89
|
name=func_name,
|
|
96
90
|
description=func_doc,
|
|
97
91
|
parameters=schema,
|
|
98
|
-
context_kwarg=context_kwarg,
|
|
99
92
|
tags=tags or set(),
|
|
100
93
|
annotations=annotations,
|
|
101
94
|
serializer=serializer,
|
|
102
95
|
)
|
|
103
96
|
|
|
104
97
|
async def run(
|
|
105
|
-
self,
|
|
106
|
-
arguments: dict[str, Any],
|
|
107
|
-
context: Context[ServerSessionT, LifespanContextT] | None = None,
|
|
98
|
+
self, arguments: dict[str, Any]
|
|
108
99
|
) -> list[TextContent | ImageContent | EmbeddedResource]:
|
|
109
100
|
"""Run the tool with arguments."""
|
|
110
|
-
|
|
111
|
-
injected_args = (
|
|
112
|
-
{self.context_kwarg: context} if self.context_kwarg is not None else {}
|
|
113
|
-
)
|
|
101
|
+
from fastmcp.server.context import Context
|
|
114
102
|
|
|
115
|
-
|
|
103
|
+
arguments = arguments.copy()
|
|
116
104
|
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
105
|
+
try:
|
|
106
|
+
context_kwarg = find_kwarg_by_type(self.fn, kwarg_type=Context)
|
|
107
|
+
if context_kwarg and context_kwarg not in arguments:
|
|
108
|
+
arguments[context_kwarg] = get_context()
|
|
109
|
+
|
|
110
|
+
if fastmcp.settings.settings.tool_attempt_parse_json_args:
|
|
111
|
+
# Pre-parse data from JSON in order to handle cases like `["a", "b", "c"]`
|
|
112
|
+
# being passed in as JSON inside a string rather than an actual list.
|
|
113
|
+
#
|
|
114
|
+
# Claude desktop is prone to this - in fact it seems incapable of NOT doing
|
|
115
|
+
# this. For sub-models, it tends to pass dicts (JSON objects) as JSON strings,
|
|
116
|
+
# which can be pre-parsed here.
|
|
117
|
+
signature = inspect.signature(self.fn)
|
|
118
|
+
for param_name in self.parameters["properties"]:
|
|
119
|
+
arg = arguments.get(param_name, None)
|
|
120
|
+
# if not in signature, we won't have annotations, so skip logic
|
|
121
|
+
if param_name not in signature.parameters:
|
|
122
|
+
continue
|
|
123
|
+
# if not a string, we won't have a JSON to parse, so skip logic
|
|
124
|
+
if not isinstance(arg, str):
|
|
125
|
+
continue
|
|
126
|
+
# skip if the type is a simple type (int, float, bool)
|
|
127
|
+
if signature.parameters[param_name].annotation in (
|
|
128
|
+
int,
|
|
129
|
+
float,
|
|
130
|
+
bool,
|
|
131
|
+
):
|
|
132
|
+
continue
|
|
125
133
|
try:
|
|
126
|
-
|
|
134
|
+
arguments[param_name] = json.loads(arg)
|
|
135
|
+
|
|
127
136
|
except json.JSONDecodeError:
|
|
128
137
|
pass
|
|
129
138
|
|
|
130
|
-
type_adapter = get_cached_typeadapter(
|
|
131
|
-
|
|
132
|
-
)
|
|
133
|
-
result = type_adapter.validate_python(parsed_args | injected_args)
|
|
139
|
+
type_adapter = get_cached_typeadapter(self.fn)
|
|
140
|
+
result = type_adapter.validate_python(arguments)
|
|
134
141
|
if inspect.isawaitable(result):
|
|
135
142
|
result = await result
|
|
136
143
|
|
fastmcp/tools/tool_manager.py
CHANGED
|
@@ -3,7 +3,6 @@ from __future__ import annotations as _annotations
|
|
|
3
3
|
from collections.abc import Callable
|
|
4
4
|
from typing import TYPE_CHECKING, Any
|
|
5
5
|
|
|
6
|
-
from mcp.shared.context import LifespanContextT
|
|
7
6
|
from mcp.types import EmbeddedResource, ImageContent, TextContent, ToolAnnotations
|
|
8
7
|
|
|
9
8
|
from fastmcp.exceptions import NotFoundError
|
|
@@ -12,9 +11,7 @@ from fastmcp.tools.tool import Tool
|
|
|
12
11
|
from fastmcp.utilities.logging import get_logger
|
|
13
12
|
|
|
14
13
|
if TYPE_CHECKING:
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
from fastmcp.server import Context
|
|
14
|
+
pass
|
|
18
15
|
|
|
19
16
|
logger = get_logger(__name__)
|
|
20
17
|
|
|
@@ -98,14 +95,11 @@ class ToolManager:
|
|
|
98
95
|
return tool
|
|
99
96
|
|
|
100
97
|
async def call_tool(
|
|
101
|
-
self,
|
|
102
|
-
key: str,
|
|
103
|
-
arguments: dict[str, Any],
|
|
104
|
-
context: Context[ServerSessionT, LifespanContextT] | None = None,
|
|
98
|
+
self, key: str, arguments: dict[str, Any]
|
|
105
99
|
) -> list[TextContent | ImageContent | EmbeddedResource]:
|
|
106
100
|
"""Call a tool by name with arguments."""
|
|
107
101
|
tool = self.get_tool(key)
|
|
108
102
|
if not tool:
|
|
109
103
|
raise NotFoundError(f"Unknown tool: {key}")
|
|
110
104
|
|
|
111
|
-
return await tool.run(arguments
|
|
105
|
+
return await tool.run(arguments)
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
UTC = datetime.timezone.utc
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class TimedCache:
|
|
8
|
+
NOT_FOUND = object()
|
|
9
|
+
|
|
10
|
+
def __init__(self, expiration: datetime.timedelta):
|
|
11
|
+
self.expiration = expiration
|
|
12
|
+
self.cache: dict[Any, tuple[Any, datetime.datetime]] = {}
|
|
13
|
+
|
|
14
|
+
def set(self, key: Any, value: Any) -> None:
|
|
15
|
+
expires = datetime.datetime.now(UTC) + self.expiration
|
|
16
|
+
self.cache[key] = (value, expires)
|
|
17
|
+
|
|
18
|
+
def get(self, key: Any) -> Any:
|
|
19
|
+
value = self.cache.get(key)
|
|
20
|
+
if value is not None and value[1] > datetime.datetime.now(UTC):
|
|
21
|
+
return value[0]
|
|
22
|
+
else:
|
|
23
|
+
return self.NOT_FOUND
|
|
24
|
+
|
|
25
|
+
def clear(self) -> None:
|
|
26
|
+
self.cache.clear()
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import copy
|
|
4
|
+
import multiprocessing
|
|
5
|
+
import socket
|
|
6
|
+
import time
|
|
7
|
+
from collections.abc import Callable, Generator
|
|
8
|
+
from contextlib import contextmanager
|
|
9
|
+
from typing import TYPE_CHECKING, Any, Literal
|
|
10
|
+
|
|
11
|
+
import uvicorn
|
|
12
|
+
|
|
13
|
+
from fastmcp.settings import settings
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING:
|
|
16
|
+
from fastmcp.server.server import FastMCP
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@contextmanager
|
|
20
|
+
def temporary_settings(**kwargs: Any):
|
|
21
|
+
"""
|
|
22
|
+
Temporarily override ControlFlow setting values.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
**kwargs: The settings to override, including nested settings.
|
|
26
|
+
|
|
27
|
+
Example:
|
|
28
|
+
Temporarily override a setting:
|
|
29
|
+
```python
|
|
30
|
+
import fastmcp
|
|
31
|
+
from fastmcp.utilities.tests import temporary_settings
|
|
32
|
+
|
|
33
|
+
with temporary_settings(log_level='DEBUG'):
|
|
34
|
+
assert fastmcp.settings.settings.log_level == 'DEBUG'
|
|
35
|
+
assert fastmcp.settings.settings.log_level == 'INFO'
|
|
36
|
+
```
|
|
37
|
+
"""
|
|
38
|
+
old_settings = copy.deepcopy(settings.model_dump())
|
|
39
|
+
|
|
40
|
+
try:
|
|
41
|
+
# apply the new settings
|
|
42
|
+
for attr, value in kwargs.items():
|
|
43
|
+
if not hasattr(settings, attr):
|
|
44
|
+
raise AttributeError(f"Setting {attr} does not exist.")
|
|
45
|
+
setattr(settings, attr, value)
|
|
46
|
+
yield
|
|
47
|
+
|
|
48
|
+
finally:
|
|
49
|
+
# restore the old settings
|
|
50
|
+
for attr in kwargs:
|
|
51
|
+
if hasattr(settings, attr):
|
|
52
|
+
setattr(settings, attr, old_settings[attr])
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _run_server(mcp_server: FastMCP, transport: Literal["sse"], port: int) -> None:
|
|
56
|
+
# Some Starlette apps are not pickleable, so we need to create them here based on the indicated transport
|
|
57
|
+
if transport == "sse":
|
|
58
|
+
app = mcp_server.sse_app()
|
|
59
|
+
else:
|
|
60
|
+
raise ValueError(f"Invalid transport: {transport}")
|
|
61
|
+
uvicorn_server = uvicorn.Server(
|
|
62
|
+
config=uvicorn.Config(
|
|
63
|
+
app=app,
|
|
64
|
+
host="127.0.0.1",
|
|
65
|
+
port=port,
|
|
66
|
+
log_level="error",
|
|
67
|
+
)
|
|
68
|
+
)
|
|
69
|
+
uvicorn_server.run()
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
@contextmanager
|
|
73
|
+
def run_server_in_process(
|
|
74
|
+
server_fn: Callable[[str, int], None],
|
|
75
|
+
) -> Generator[str, None, None]:
|
|
76
|
+
"""
|
|
77
|
+
Context manager that runs a Starlette app in a separate process and returns the
|
|
78
|
+
server URL. When the context manager is exited, the server process is killed.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
app: The Starlette app to run.
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
The server URL.
|
|
85
|
+
"""
|
|
86
|
+
host = "127.0.0.1"
|
|
87
|
+
with socket.socket() as s:
|
|
88
|
+
s.bind((host, 0))
|
|
89
|
+
port = s.getsockname()[1]
|
|
90
|
+
|
|
91
|
+
proc = multiprocessing.Process(target=server_fn, args=(host, port), daemon=True)
|
|
92
|
+
proc.start()
|
|
93
|
+
|
|
94
|
+
# Wait for server to be running
|
|
95
|
+
max_attempts = 100
|
|
96
|
+
attempt = 0
|
|
97
|
+
while attempt < max_attempts and proc.is_alive():
|
|
98
|
+
try:
|
|
99
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
|
100
|
+
s.connect((host, port))
|
|
101
|
+
break
|
|
102
|
+
except ConnectionRefusedError:
|
|
103
|
+
time.sleep(0.01)
|
|
104
|
+
attempt += 1
|
|
105
|
+
else:
|
|
106
|
+
raise RuntimeError(f"Server failed to start after {max_attempts} attempts")
|
|
107
|
+
|
|
108
|
+
yield f"http://{host}:{port}"
|
|
109
|
+
|
|
110
|
+
proc.kill()
|
|
111
|
+
proc.join(timeout=2)
|
|
112
|
+
if proc.is_alive():
|
|
113
|
+
raise RuntimeError("Server process failed to terminate")
|
fastmcp/utilities/types.py
CHANGED
|
@@ -6,26 +6,23 @@ from collections.abc import Callable
|
|
|
6
6
|
from functools import lru_cache
|
|
7
7
|
from pathlib import Path
|
|
8
8
|
from types import UnionType
|
|
9
|
-
from typing import Annotated,
|
|
9
|
+
from typing import Annotated, TypeVar, Union, get_args, get_origin
|
|
10
10
|
|
|
11
11
|
from mcp.types import ImageContent
|
|
12
|
-
from pydantic import
|
|
12
|
+
from pydantic import TypeAdapter
|
|
13
13
|
|
|
14
14
|
T = TypeVar("T")
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
@lru_cache(maxsize=5000)
|
|
18
|
-
def get_cached_typeadapter(
|
|
19
|
-
cls: T, config: frozenset[tuple[str, Any]] | None = None
|
|
20
|
-
) -> TypeAdapter[T]:
|
|
18
|
+
def get_cached_typeadapter(cls: T) -> TypeAdapter[T]:
|
|
21
19
|
"""
|
|
22
20
|
TypeAdapters are heavy objects, and in an application context we'd typically
|
|
23
21
|
create them once in a global scope and reuse them as often as possible.
|
|
24
22
|
However, this isn't feasible for user-generated functions. Instead, we use a
|
|
25
23
|
cache to minimize the cost of creating them as much as possible.
|
|
26
24
|
"""
|
|
27
|
-
|
|
28
|
-
return TypeAdapter(cls, config=ConfigDict(**config_dict))
|
|
25
|
+
return TypeAdapter(cls)
|
|
29
26
|
|
|
30
27
|
|
|
31
28
|
def issubclass_safe(cls: type, base: type) -> bool:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: fastmcp
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.3.0rc1
|
|
4
4
|
Summary: The fast, Pythonic way to build MCP servers.
|
|
5
5
|
Project-URL: Homepage, https://gofastmcp.com
|
|
6
6
|
Project-URL: Repository, https://github.com/jlowin/fastmcp
|
|
@@ -19,7 +19,7 @@ Classifier: Typing :: Typed
|
|
|
19
19
|
Requires-Python: >=3.10
|
|
20
20
|
Requires-Dist: exceptiongroup>=1.2.2
|
|
21
21
|
Requires-Dist: httpx>=0.28.1
|
|
22
|
-
Requires-Dist: mcp
|
|
22
|
+
Requires-Dist: mcp
|
|
23
23
|
Requires-Dist: openapi-pydantic>=0.5.1
|
|
24
24
|
Requires-Dist: python-dotenv>=1.1.0
|
|
25
25
|
Requires-Dist: rich>=13.9.4
|
|
@@ -379,6 +379,10 @@ Run tests using pytest:
|
|
|
379
379
|
```bash
|
|
380
380
|
pytest
|
|
381
381
|
```
|
|
382
|
+
or if you want an overview of the code coverage
|
|
383
|
+
```bash
|
|
384
|
+
uv run pytest --cov=src --cov=examples --cov-report=html
|
|
385
|
+
```
|
|
382
386
|
|
|
383
387
|
### Static Checks
|
|
384
388
|
|