fast-agent-mcp 0.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fast-agent-mcp might be problematic. Click here for more details.
- fast_agent_mcp-0.0.7.dist-info/METADATA +322 -0
- fast_agent_mcp-0.0.7.dist-info/RECORD +100 -0
- fast_agent_mcp-0.0.7.dist-info/WHEEL +4 -0
- fast_agent_mcp-0.0.7.dist-info/entry_points.txt +5 -0
- fast_agent_mcp-0.0.7.dist-info/licenses/LICENSE +201 -0
- mcp_agent/__init__.py +0 -0
- mcp_agent/agents/__init__.py +0 -0
- mcp_agent/agents/agent.py +277 -0
- mcp_agent/app.py +303 -0
- mcp_agent/cli/__init__.py +0 -0
- mcp_agent/cli/__main__.py +4 -0
- mcp_agent/cli/commands/bootstrap.py +221 -0
- mcp_agent/cli/commands/config.py +11 -0
- mcp_agent/cli/commands/setup.py +229 -0
- mcp_agent/cli/main.py +68 -0
- mcp_agent/cli/terminal.py +24 -0
- mcp_agent/config.py +334 -0
- mcp_agent/console.py +28 -0
- mcp_agent/context.py +251 -0
- mcp_agent/context_dependent.py +48 -0
- mcp_agent/core/fastagent.py +1013 -0
- mcp_agent/eval/__init__.py +0 -0
- mcp_agent/event_progress.py +88 -0
- mcp_agent/executor/__init__.py +0 -0
- mcp_agent/executor/decorator_registry.py +120 -0
- mcp_agent/executor/executor.py +293 -0
- mcp_agent/executor/task_registry.py +34 -0
- mcp_agent/executor/temporal.py +405 -0
- mcp_agent/executor/workflow.py +197 -0
- mcp_agent/executor/workflow_signal.py +325 -0
- mcp_agent/human_input/__init__.py +0 -0
- mcp_agent/human_input/handler.py +49 -0
- mcp_agent/human_input/types.py +58 -0
- mcp_agent/logging/__init__.py +0 -0
- mcp_agent/logging/events.py +123 -0
- mcp_agent/logging/json_serializer.py +163 -0
- mcp_agent/logging/listeners.py +216 -0
- mcp_agent/logging/logger.py +365 -0
- mcp_agent/logging/rich_progress.py +120 -0
- mcp_agent/logging/tracing.py +140 -0
- mcp_agent/logging/transport.py +461 -0
- mcp_agent/mcp/__init__.py +0 -0
- mcp_agent/mcp/gen_client.py +85 -0
- mcp_agent/mcp/mcp_activity.py +18 -0
- mcp_agent/mcp/mcp_agent_client_session.py +242 -0
- mcp_agent/mcp/mcp_agent_server.py +56 -0
- mcp_agent/mcp/mcp_aggregator.py +394 -0
- mcp_agent/mcp/mcp_connection_manager.py +330 -0
- mcp_agent/mcp/stdio.py +104 -0
- mcp_agent/mcp_server_registry.py +275 -0
- mcp_agent/progress_display.py +10 -0
- mcp_agent/resources/examples/decorator/main.py +26 -0
- mcp_agent/resources/examples/decorator/optimizer.py +78 -0
- mcp_agent/resources/examples/decorator/orchestrator.py +68 -0
- mcp_agent/resources/examples/decorator/parallel.py +81 -0
- mcp_agent/resources/examples/decorator/router.py +56 -0
- mcp_agent/resources/examples/decorator/tiny.py +22 -0
- mcp_agent/resources/examples/mcp_researcher/main-evalopt.py +53 -0
- mcp_agent/resources/examples/mcp_researcher/main.py +38 -0
- mcp_agent/telemetry/__init__.py +0 -0
- mcp_agent/telemetry/usage_tracking.py +18 -0
- mcp_agent/workflows/__init__.py +0 -0
- mcp_agent/workflows/embedding/__init__.py +0 -0
- mcp_agent/workflows/embedding/embedding_base.py +61 -0
- mcp_agent/workflows/embedding/embedding_cohere.py +49 -0
- mcp_agent/workflows/embedding/embedding_openai.py +46 -0
- mcp_agent/workflows/evaluator_optimizer/__init__.py +0 -0
- mcp_agent/workflows/evaluator_optimizer/evaluator_optimizer.py +359 -0
- mcp_agent/workflows/intent_classifier/__init__.py +0 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_base.py +120 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding.py +134 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding_cohere.py +45 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding_openai.py +45 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_llm.py +161 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_llm_anthropic.py +60 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_llm_openai.py +60 -0
- mcp_agent/workflows/llm/__init__.py +0 -0
- mcp_agent/workflows/llm/augmented_llm.py +645 -0
- mcp_agent/workflows/llm/augmented_llm_anthropic.py +539 -0
- mcp_agent/workflows/llm/augmented_llm_openai.py +615 -0
- mcp_agent/workflows/llm/llm_selector.py +345 -0
- mcp_agent/workflows/llm/model_factory.py +175 -0
- mcp_agent/workflows/orchestrator/__init__.py +0 -0
- mcp_agent/workflows/orchestrator/orchestrator.py +407 -0
- mcp_agent/workflows/orchestrator/orchestrator_models.py +154 -0
- mcp_agent/workflows/orchestrator/orchestrator_prompts.py +113 -0
- mcp_agent/workflows/parallel/__init__.py +0 -0
- mcp_agent/workflows/parallel/fan_in.py +350 -0
- mcp_agent/workflows/parallel/fan_out.py +187 -0
- mcp_agent/workflows/parallel/parallel_llm.py +141 -0
- mcp_agent/workflows/router/__init__.py +0 -0
- mcp_agent/workflows/router/router_base.py +276 -0
- mcp_agent/workflows/router/router_embedding.py +240 -0
- mcp_agent/workflows/router/router_embedding_cohere.py +59 -0
- mcp_agent/workflows/router/router_embedding_openai.py +59 -0
- mcp_agent/workflows/router/router_llm.py +301 -0
- mcp_agent/workflows/swarm/__init__.py +0 -0
- mcp_agent/workflows/swarm/swarm.py +320 -0
- mcp_agent/workflows/swarm/swarm_anthropic.py +42 -0
- mcp_agent/workflows/swarm/swarm_openai.py +41 -0
|
@@ -0,0 +1,330 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Manages the lifecycle of multiple MCP server connections.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from datetime import timedelta
|
|
6
|
+
import asyncio
|
|
7
|
+
from typing import (
|
|
8
|
+
AsyncGenerator,
|
|
9
|
+
Callable,
|
|
10
|
+
Dict,
|
|
11
|
+
Optional,
|
|
12
|
+
TYPE_CHECKING,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
from anyio import Event, create_task_group, Lock
|
|
16
|
+
from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
|
|
17
|
+
|
|
18
|
+
from mcp import ClientSession
|
|
19
|
+
from mcp.client.stdio import (
|
|
20
|
+
StdioServerParameters,
|
|
21
|
+
get_default_environment,
|
|
22
|
+
)
|
|
23
|
+
from mcp.client.sse import sse_client
|
|
24
|
+
from mcp.types import JSONRPCMessage
|
|
25
|
+
|
|
26
|
+
from mcp_agent.config import MCPServerSettings
|
|
27
|
+
from mcp_agent.logging.logger import get_logger
|
|
28
|
+
from mcp_agent.mcp.stdio import stdio_client_with_rich_stderr
|
|
29
|
+
from mcp_agent.context_dependent import ContextDependent
|
|
30
|
+
|
|
31
|
+
if TYPE_CHECKING:
|
|
32
|
+
from mcp_agent.mcp_server_registry import InitHookCallable, ServerRegistry
|
|
33
|
+
from mcp_agent.context import Context
|
|
34
|
+
|
|
35
|
+
logger = get_logger(__name__)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class ServerConnection:
|
|
39
|
+
"""
|
|
40
|
+
Represents a long-lived MCP server connection, including:
|
|
41
|
+
- The ClientSession to the server
|
|
42
|
+
- The transport streams (via stdio/sse, etc.)
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
def __init__(
|
|
46
|
+
self,
|
|
47
|
+
server_name: str,
|
|
48
|
+
server_config: MCPServerSettings,
|
|
49
|
+
transport_context_factory: Callable[
|
|
50
|
+
[],
|
|
51
|
+
AsyncGenerator[
|
|
52
|
+
tuple[
|
|
53
|
+
MemoryObjectReceiveStream[JSONRPCMessage | Exception],
|
|
54
|
+
MemoryObjectSendStream[JSONRPCMessage],
|
|
55
|
+
],
|
|
56
|
+
None,
|
|
57
|
+
],
|
|
58
|
+
],
|
|
59
|
+
client_session_factory: Callable[
|
|
60
|
+
[MemoryObjectReceiveStream, MemoryObjectSendStream, timedelta | None],
|
|
61
|
+
ClientSession,
|
|
62
|
+
],
|
|
63
|
+
init_hook: Optional["InitHookCallable"] = None,
|
|
64
|
+
):
|
|
65
|
+
self.server_name = server_name
|
|
66
|
+
self.server_config = server_config
|
|
67
|
+
self.session: ClientSession | None = None
|
|
68
|
+
self._client_session_factory = client_session_factory
|
|
69
|
+
self._init_hook = init_hook
|
|
70
|
+
self._transport_context_factory = transport_context_factory
|
|
71
|
+
# Signal that session is fully up and initialized
|
|
72
|
+
self._initialized_event = Event()
|
|
73
|
+
|
|
74
|
+
# Signal we want to shut down
|
|
75
|
+
self._shutdown_event = Event()
|
|
76
|
+
|
|
77
|
+
def request_shutdown(self) -> None:
|
|
78
|
+
"""
|
|
79
|
+
Request the server to shut down. Signals the server lifecycle task to exit.
|
|
80
|
+
"""
|
|
81
|
+
self._shutdown_event.set()
|
|
82
|
+
|
|
83
|
+
async def wait_for_shutdown_request(self) -> None:
|
|
84
|
+
"""
|
|
85
|
+
Wait until the shutdown event is set.
|
|
86
|
+
"""
|
|
87
|
+
await self._shutdown_event.wait()
|
|
88
|
+
|
|
89
|
+
async def initialize_session(self) -> None:
|
|
90
|
+
"""
|
|
91
|
+
Initializes the server connection and session.
|
|
92
|
+
Must be called within an async context.
|
|
93
|
+
"""
|
|
94
|
+
|
|
95
|
+
await self.session.initialize()
|
|
96
|
+
|
|
97
|
+
# If there's an init hook, run it
|
|
98
|
+
if self._init_hook:
|
|
99
|
+
logger.info(f"{self.server_name}: Executing init hook.")
|
|
100
|
+
self._init_hook(self.session, self.server_config.auth)
|
|
101
|
+
|
|
102
|
+
# Now the session is ready for use
|
|
103
|
+
self._initialized_event.set()
|
|
104
|
+
|
|
105
|
+
async def wait_for_initialized(self) -> None:
|
|
106
|
+
"""
|
|
107
|
+
Wait until the session is fully initialized.
|
|
108
|
+
"""
|
|
109
|
+
await self._initialized_event.wait()
|
|
110
|
+
|
|
111
|
+
def create_session(
|
|
112
|
+
self,
|
|
113
|
+
read_stream: MemoryObjectReceiveStream,
|
|
114
|
+
send_stream: MemoryObjectSendStream,
|
|
115
|
+
) -> ClientSession:
|
|
116
|
+
"""
|
|
117
|
+
Create a new session instance for this server connection.
|
|
118
|
+
"""
|
|
119
|
+
|
|
120
|
+
read_timeout = (
|
|
121
|
+
timedelta(seconds=self.server_config.read_timeout_seconds)
|
|
122
|
+
if self.server_config.read_timeout_seconds
|
|
123
|
+
else None
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
session = self._client_session_factory(read_stream, send_stream, read_timeout)
|
|
127
|
+
|
|
128
|
+
# Make the server config available to the session for initialization
|
|
129
|
+
if hasattr(session, "server_config"):
|
|
130
|
+
session.server_config = self.server_config
|
|
131
|
+
|
|
132
|
+
self.session = session
|
|
133
|
+
|
|
134
|
+
return session
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
async def _server_lifecycle_task(server_conn: ServerConnection) -> None:
|
|
138
|
+
"""
|
|
139
|
+
Manage the lifecycle of a single server connection.
|
|
140
|
+
Runs inside the MCPConnectionManager's shared TaskGroup.
|
|
141
|
+
"""
|
|
142
|
+
server_name = server_conn.server_name
|
|
143
|
+
try:
|
|
144
|
+
transport_context = server_conn._transport_context_factory()
|
|
145
|
+
|
|
146
|
+
async with transport_context as (read_stream, write_stream):
|
|
147
|
+
server_conn.create_session(read_stream, write_stream)
|
|
148
|
+
|
|
149
|
+
async with server_conn.session:
|
|
150
|
+
await server_conn.initialize_session()
|
|
151
|
+
|
|
152
|
+
await server_conn.wait_for_shutdown_request()
|
|
153
|
+
|
|
154
|
+
except Exception as exc:
|
|
155
|
+
logger.error(
|
|
156
|
+
f"{server_name}: Lifecycle task encountered an error: {exc}", exc_info=True
|
|
157
|
+
)
|
|
158
|
+
# If there's an error, we should also set the event so that
|
|
159
|
+
# 'get_server' won't hang
|
|
160
|
+
server_conn._initialized_event.set()
|
|
161
|
+
raise
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
class MCPConnectionManager(ContextDependent):
|
|
165
|
+
"""
|
|
166
|
+
Manages the lifecycle of multiple MCP server connections.
|
|
167
|
+
Integrates with the application context system for proper resource management.
|
|
168
|
+
"""
|
|
169
|
+
|
|
170
|
+
def __init__(
|
|
171
|
+
self, server_registry: "ServerRegistry", context: Optional["Context"] = None
|
|
172
|
+
):
|
|
173
|
+
super().__init__(context=context)
|
|
174
|
+
self.server_registry = server_registry
|
|
175
|
+
self.running_servers: Dict[str, ServerConnection] = {}
|
|
176
|
+
self._lock = Lock()
|
|
177
|
+
|
|
178
|
+
async def __aenter__(self):
|
|
179
|
+
current_task = asyncio.current_task()
|
|
180
|
+
|
|
181
|
+
# Get or create task group from context
|
|
182
|
+
if not hasattr(self.context, "_connection_task_group"):
|
|
183
|
+
self.context._connection_task_group = create_task_group()
|
|
184
|
+
self.context._connection_task_group_context = current_task.get_name()
|
|
185
|
+
await self.context._connection_task_group.__aenter__()
|
|
186
|
+
|
|
187
|
+
self._tg = self.context._connection_task_group
|
|
188
|
+
return self
|
|
189
|
+
|
|
190
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
191
|
+
"""Ensure clean shutdown of all connections before exiting."""
|
|
192
|
+
current_task = asyncio.current_task()
|
|
193
|
+
|
|
194
|
+
try:
|
|
195
|
+
# First request all servers to shutdown
|
|
196
|
+
await self.disconnect_all()
|
|
197
|
+
|
|
198
|
+
# Only clean up task group if we're in the original context
|
|
199
|
+
if (
|
|
200
|
+
hasattr(self.context, "_connection_task_group")
|
|
201
|
+
and current_task.get_name()
|
|
202
|
+
== self.context._connection_task_group_context
|
|
203
|
+
):
|
|
204
|
+
await self.context._connection_task_group.__aexit__(
|
|
205
|
+
exc_type, exc_val, exc_tb
|
|
206
|
+
)
|
|
207
|
+
delattr(self.context, "_connection_task_group")
|
|
208
|
+
delattr(self.context, "_connection_task_group_context")
|
|
209
|
+
except Exception as e:
|
|
210
|
+
logger.error(f"Error during connection manager shutdown: {e}")
|
|
211
|
+
|
|
212
|
+
async def launch_server(
|
|
213
|
+
self,
|
|
214
|
+
server_name: str,
|
|
215
|
+
client_session_factory: Callable[
|
|
216
|
+
[MemoryObjectReceiveStream, MemoryObjectSendStream, timedelta | None],
|
|
217
|
+
ClientSession,
|
|
218
|
+
],
|
|
219
|
+
init_hook: Optional["InitHookCallable"] = None,
|
|
220
|
+
) -> ServerConnection:
|
|
221
|
+
"""
|
|
222
|
+
Connect to a server and return a RunningServer instance that will persist
|
|
223
|
+
until explicitly disconnected.
|
|
224
|
+
"""
|
|
225
|
+
if not self._tg:
|
|
226
|
+
raise RuntimeError(
|
|
227
|
+
"MCPConnectionManager must be used inside an async context (i.e. 'async with' or after __aenter__)."
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
config = self.server_registry.registry.get(server_name)
|
|
231
|
+
if not config:
|
|
232
|
+
raise ValueError(f"Server '{server_name}' not found in registry.")
|
|
233
|
+
|
|
234
|
+
logger.debug(
|
|
235
|
+
f"{server_name}: Found server configuration=", data=config.model_dump()
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
def transport_context_factory():
|
|
239
|
+
if config.transport == "stdio":
|
|
240
|
+
server_params = StdioServerParameters(
|
|
241
|
+
command=config.command,
|
|
242
|
+
args=config.args,
|
|
243
|
+
env={**get_default_environment(), **(config.env or {})},
|
|
244
|
+
)
|
|
245
|
+
# Create stdio client config with redirected stderr
|
|
246
|
+
return stdio_client_with_rich_stderr(server_params)
|
|
247
|
+
elif config.transport == "sse":
|
|
248
|
+
return sse_client(config.url)
|
|
249
|
+
else:
|
|
250
|
+
raise ValueError(f"Unsupported transport: {config.transport}")
|
|
251
|
+
|
|
252
|
+
server_conn = ServerConnection(
|
|
253
|
+
server_name=server_name,
|
|
254
|
+
server_config=config,
|
|
255
|
+
transport_context_factory=transport_context_factory,
|
|
256
|
+
client_session_factory=client_session_factory,
|
|
257
|
+
init_hook=init_hook or self.server_registry.init_hooks.get(server_name),
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
async with self._lock:
|
|
261
|
+
# Check if already running
|
|
262
|
+
if server_name in self.running_servers:
|
|
263
|
+
return self.running_servers[server_name]
|
|
264
|
+
|
|
265
|
+
self.running_servers[server_name] = server_conn
|
|
266
|
+
self._tg.start_soon(_server_lifecycle_task, server_conn)
|
|
267
|
+
|
|
268
|
+
logger.info(f"{server_name}: Up and running with a persistent connection!")
|
|
269
|
+
return server_conn
|
|
270
|
+
|
|
271
|
+
async def get_server(
|
|
272
|
+
self,
|
|
273
|
+
server_name: str,
|
|
274
|
+
client_session_factory: Callable,
|
|
275
|
+
init_hook: Optional["InitHookCallable"] = None,
|
|
276
|
+
) -> ServerConnection:
|
|
277
|
+
"""
|
|
278
|
+
Get a running server instance, launching it if needed.
|
|
279
|
+
"""
|
|
280
|
+
# Get the server connection if it's already running
|
|
281
|
+
async with self._lock:
|
|
282
|
+
server_conn = self.running_servers.get(server_name)
|
|
283
|
+
if server_conn:
|
|
284
|
+
return server_conn
|
|
285
|
+
|
|
286
|
+
# Launch the connection
|
|
287
|
+
server_conn = await self.launch_server(
|
|
288
|
+
server_name=server_name,
|
|
289
|
+
client_session_factory=client_session_factory,
|
|
290
|
+
init_hook=init_hook,
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
# Wait until it's fully initialized, or an error occurs
|
|
294
|
+
await server_conn.wait_for_initialized()
|
|
295
|
+
|
|
296
|
+
# If the session is still None, it means the lifecycle task crashed
|
|
297
|
+
if not server_conn or not server_conn.session:
|
|
298
|
+
raise RuntimeError(
|
|
299
|
+
f"{server_name}: Failed to initialize server; check logs for errors."
|
|
300
|
+
)
|
|
301
|
+
return server_conn
|
|
302
|
+
|
|
303
|
+
async def disconnect_server(self, server_name: str) -> None:
|
|
304
|
+
"""
|
|
305
|
+
Disconnect a specific server if it's running under this connection manager.
|
|
306
|
+
"""
|
|
307
|
+
logger.info(f"{server_name}: Disconnecting persistent connection to server...")
|
|
308
|
+
|
|
309
|
+
async with self._lock:
|
|
310
|
+
server_conn = self.running_servers.pop(server_name, None)
|
|
311
|
+
if server_conn:
|
|
312
|
+
server_conn.request_shutdown()
|
|
313
|
+
logger.info(
|
|
314
|
+
f"{server_name}: Shutdown signal sent (lifecycle task will exit)."
|
|
315
|
+
)
|
|
316
|
+
else:
|
|
317
|
+
logger.info(
|
|
318
|
+
f"{server_name}: No persistent connection found. Skipping server shutdown"
|
|
319
|
+
)
|
|
320
|
+
|
|
321
|
+
async def disconnect_all(self) -> None:
|
|
322
|
+
"""Disconnect all servers that are running under this connection manager."""
|
|
323
|
+
async with self._lock:
|
|
324
|
+
if not self.running_servers:
|
|
325
|
+
return
|
|
326
|
+
|
|
327
|
+
for name, conn in self.running_servers.items():
|
|
328
|
+
conn.request_shutdown()
|
|
329
|
+
|
|
330
|
+
self.running_servers.clear()
|
mcp_agent/mcp/stdio.py
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Custom implementation of stdio_client that handles stderr through rich console.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from contextlib import asynccontextmanager
|
|
6
|
+
import subprocess
|
|
7
|
+
import anyio
|
|
8
|
+
from anyio.streams.text import TextReceiveStream
|
|
9
|
+
from mcp.client.stdio import StdioServerParameters, get_default_environment
|
|
10
|
+
import mcp.types as types
|
|
11
|
+
from mcp_agent.logging.logger import get_logger
|
|
12
|
+
|
|
13
|
+
logger = get_logger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@asynccontextmanager
|
|
17
|
+
async def stdio_client_with_rich_stderr(server: StdioServerParameters):
|
|
18
|
+
"""
|
|
19
|
+
Modified version of stdio_client that captures stderr and routes it through our rich console.
|
|
20
|
+
Follows the original pattern closely for reliability.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
server: The server parameters for the stdio connection
|
|
24
|
+
"""
|
|
25
|
+
read_stream_writer, read_stream = anyio.create_memory_object_stream(0)
|
|
26
|
+
write_stream, write_stream_reader = anyio.create_memory_object_stream(0)
|
|
27
|
+
|
|
28
|
+
# Open process with stderr piped for capture
|
|
29
|
+
process = await anyio.open_process(
|
|
30
|
+
[server.command, *server.args],
|
|
31
|
+
env=server.env if server.env is not None else get_default_environment(),
|
|
32
|
+
stderr=subprocess.PIPE,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
if process.pid:
|
|
36
|
+
logger.debug(f"Started process '{server.command}' with PID: {process.pid}")
|
|
37
|
+
|
|
38
|
+
if process.returncode is not None:
|
|
39
|
+
logger.debug(f"return code (early){process.returncode}")
|
|
40
|
+
raise RuntimeError(
|
|
41
|
+
f"Process terminated immediately with code {process.returncode}"
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
async def stdout_reader():
|
|
45
|
+
assert process.stdout, "Opened process is missing stdout"
|
|
46
|
+
try:
|
|
47
|
+
async with read_stream_writer:
|
|
48
|
+
buffer = ""
|
|
49
|
+
async for chunk in TextReceiveStream(
|
|
50
|
+
process.stdout,
|
|
51
|
+
encoding=server.encoding,
|
|
52
|
+
errors=server.encoding_error_handler,
|
|
53
|
+
):
|
|
54
|
+
lines = (buffer + chunk).split("\n")
|
|
55
|
+
buffer = lines.pop()
|
|
56
|
+
|
|
57
|
+
for line in lines:
|
|
58
|
+
if not line:
|
|
59
|
+
continue
|
|
60
|
+
try:
|
|
61
|
+
message = types.JSONRPCMessage.model_validate_json(line)
|
|
62
|
+
except Exception as exc:
|
|
63
|
+
await read_stream_writer.send(exc)
|
|
64
|
+
continue
|
|
65
|
+
|
|
66
|
+
await read_stream_writer.send(message)
|
|
67
|
+
except anyio.ClosedResourceError:
|
|
68
|
+
await anyio.lowlevel.checkpoint()
|
|
69
|
+
|
|
70
|
+
async def stderr_reader():
|
|
71
|
+
assert process.stderr, "Opened process is missing stderr"
|
|
72
|
+
try:
|
|
73
|
+
async for chunk in TextReceiveStream(
|
|
74
|
+
process.stderr,
|
|
75
|
+
encoding=server.encoding,
|
|
76
|
+
errors=server.encoding_error_handler,
|
|
77
|
+
):
|
|
78
|
+
if chunk.strip():
|
|
79
|
+
# Let the logging system handle the formatting consistently
|
|
80
|
+
logger.event("info", "mcpserver.stderr", chunk.rstrip(), None, {})
|
|
81
|
+
except anyio.ClosedResourceError:
|
|
82
|
+
await anyio.lowlevel.checkpoint()
|
|
83
|
+
|
|
84
|
+
async def stdin_writer():
|
|
85
|
+
assert process.stdin, "Opened process is missing stdin"
|
|
86
|
+
try:
|
|
87
|
+
async with write_stream_reader:
|
|
88
|
+
async for message in write_stream_reader:
|
|
89
|
+
json = message.model_dump_json(by_alias=True, exclude_none=True)
|
|
90
|
+
await process.stdin.send(
|
|
91
|
+
(json + "\n").encode(
|
|
92
|
+
encoding=server.encoding,
|
|
93
|
+
errors=server.encoding_error_handler,
|
|
94
|
+
)
|
|
95
|
+
)
|
|
96
|
+
except anyio.ClosedResourceError:
|
|
97
|
+
await anyio.lowlevel.checkpoint()
|
|
98
|
+
|
|
99
|
+
# Use context managers to handle cleanup automatically
|
|
100
|
+
async with anyio.create_task_group() as tg, process:
|
|
101
|
+
tg.start_soon(stdout_reader)
|
|
102
|
+
tg.start_soon(stdin_writer)
|
|
103
|
+
tg.start_soon(stderr_reader)
|
|
104
|
+
yield read_stream, write_stream
|