fast-agent-mcp 0.1.11__py3-none-any.whl → 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {fast_agent_mcp-0.1.11.dist-info → fast_agent_mcp-0.1.13.dist-info}/METADATA +1 -1
- fast_agent_mcp-0.1.13.dist-info/RECORD +164 -0
- mcp_agent/agents/agent.py +37 -102
- mcp_agent/app.py +16 -27
- mcp_agent/cli/commands/bootstrap.py +22 -52
- mcp_agent/cli/commands/config.py +4 -4
- mcp_agent/cli/commands/setup.py +11 -26
- mcp_agent/cli/main.py +6 -9
- mcp_agent/cli/terminal.py +2 -2
- mcp_agent/config.py +1 -5
- mcp_agent/context.py +13 -26
- mcp_agent/context_dependent.py +3 -7
- mcp_agent/core/agent_app.py +46 -122
- mcp_agent/core/agent_types.py +29 -2
- mcp_agent/core/agent_utils.py +3 -5
- mcp_agent/core/decorators.py +6 -14
- mcp_agent/core/enhanced_prompt.py +25 -52
- mcp_agent/core/error_handling.py +1 -1
- mcp_agent/core/exceptions.py +8 -8
- mcp_agent/core/factory.py +30 -72
- mcp_agent/core/fastagent.py +48 -88
- mcp_agent/core/mcp_content.py +10 -19
- mcp_agent/core/prompt.py +8 -15
- mcp_agent/core/proxies.py +34 -25
- mcp_agent/core/request_params.py +46 -0
- mcp_agent/core/types.py +6 -6
- mcp_agent/core/validation.py +16 -16
- mcp_agent/executor/decorator_registry.py +11 -23
- mcp_agent/executor/executor.py +8 -17
- mcp_agent/executor/task_registry.py +2 -4
- mcp_agent/executor/temporal.py +28 -74
- mcp_agent/executor/workflow.py +3 -5
- mcp_agent/executor/workflow_signal.py +17 -29
- mcp_agent/human_input/handler.py +4 -9
- mcp_agent/human_input/types.py +2 -3
- mcp_agent/logging/events.py +1 -5
- mcp_agent/logging/json_serializer.py +7 -6
- mcp_agent/logging/listeners.py +20 -23
- mcp_agent/logging/logger.py +15 -17
- mcp_agent/logging/rich_progress.py +10 -8
- mcp_agent/logging/tracing.py +4 -6
- mcp_agent/logging/transport.py +24 -24
- mcp_agent/mcp/gen_client.py +4 -12
- mcp_agent/mcp/interfaces.py +107 -88
- mcp_agent/mcp/mcp_agent_client_session.py +11 -19
- mcp_agent/mcp/mcp_agent_server.py +8 -10
- mcp_agent/mcp/mcp_aggregator.py +49 -122
- mcp_agent/mcp/mcp_connection_manager.py +16 -37
- mcp_agent/mcp/prompt_message_multipart.py +12 -18
- mcp_agent/mcp/prompt_serialization.py +13 -38
- mcp_agent/mcp/prompts/prompt_load.py +99 -0
- mcp_agent/mcp/prompts/prompt_server.py +21 -128
- mcp_agent/mcp/prompts/prompt_template.py +20 -42
- mcp_agent/mcp/resource_utils.py +8 -17
- mcp_agent/mcp/sampling.py +62 -64
- mcp_agent/mcp/stdio.py +11 -8
- mcp_agent/mcp_server/__init__.py +1 -1
- mcp_agent/mcp_server/agent_server.py +10 -17
- mcp_agent/mcp_server_registry.py +13 -35
- mcp_agent/resources/examples/data-analysis/analysis-campaign.py +1 -1
- mcp_agent/resources/examples/data-analysis/analysis.py +1 -1
- mcp_agent/resources/examples/data-analysis/slides.py +110 -0
- mcp_agent/resources/examples/internal/agent.py +2 -1
- mcp_agent/resources/examples/internal/job.py +2 -1
- mcp_agent/resources/examples/internal/prompt_category.py +1 -1
- mcp_agent/resources/examples/internal/prompt_sizing.py +3 -5
- mcp_agent/resources/examples/internal/sizer.py +2 -1
- mcp_agent/resources/examples/internal/social.py +2 -1
- mcp_agent/resources/examples/mcp_researcher/researcher-eval.py +1 -1
- mcp_agent/resources/examples/prompting/__init__.py +1 -1
- mcp_agent/resources/examples/prompting/agent.py +2 -1
- mcp_agent/resources/examples/prompting/image_server.py +5 -11
- mcp_agent/resources/examples/researcher/researcher-eval.py +1 -1
- mcp_agent/resources/examples/researcher/researcher-imp.py +3 -4
- mcp_agent/resources/examples/researcher/researcher.py +2 -1
- mcp_agent/resources/examples/workflows/agent_build.py +2 -1
- mcp_agent/resources/examples/workflows/chaining.py +2 -1
- mcp_agent/resources/examples/workflows/evaluator.py +2 -1
- mcp_agent/resources/examples/workflows/human_input.py +2 -1
- mcp_agent/resources/examples/workflows/orchestrator.py +2 -1
- mcp_agent/resources/examples/workflows/parallel.py +2 -1
- mcp_agent/resources/examples/workflows/router.py +2 -1
- mcp_agent/resources/examples/workflows/sse.py +1 -1
- mcp_agent/telemetry/usage_tracking.py +2 -1
- mcp_agent/ui/console_display.py +17 -41
- mcp_agent/workflows/embedding/embedding_base.py +1 -4
- mcp_agent/workflows/embedding/embedding_cohere.py +2 -2
- mcp_agent/workflows/embedding/embedding_openai.py +4 -13
- mcp_agent/workflows/evaluator_optimizer/evaluator_optimizer.py +23 -57
- mcp_agent/workflows/intent_classifier/intent_classifier_base.py +5 -8
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding.py +7 -11
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding_cohere.py +4 -8
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding_openai.py +4 -8
- mcp_agent/workflows/intent_classifier/intent_classifier_llm.py +11 -22
- mcp_agent/workflows/intent_classifier/intent_classifier_llm_anthropic.py +3 -3
- mcp_agent/workflows/intent_classifier/intent_classifier_llm_openai.py +4 -6
- mcp_agent/workflows/llm/anthropic_utils.py +8 -29
- mcp_agent/workflows/llm/augmented_llm.py +94 -332
- mcp_agent/workflows/llm/augmented_llm_anthropic.py +43 -76
- mcp_agent/workflows/llm/augmented_llm_openai.py +46 -100
- mcp_agent/workflows/llm/augmented_llm_passthrough.py +42 -20
- mcp_agent/workflows/llm/augmented_llm_playback.py +8 -6
- mcp_agent/workflows/llm/memory.py +103 -0
- mcp_agent/workflows/llm/model_factory.py +9 -21
- mcp_agent/workflows/llm/openai_utils.py +1 -1
- mcp_agent/workflows/llm/prompt_utils.py +39 -27
- mcp_agent/workflows/llm/providers/multipart_converter_anthropic.py +246 -184
- mcp_agent/workflows/llm/providers/multipart_converter_openai.py +212 -202
- mcp_agent/workflows/llm/providers/openai_multipart.py +19 -61
- mcp_agent/workflows/llm/providers/sampling_converter_anthropic.py +11 -212
- mcp_agent/workflows/llm/providers/sampling_converter_openai.py +13 -215
- mcp_agent/workflows/llm/sampling_converter.py +117 -0
- mcp_agent/workflows/llm/sampling_format_converter.py +12 -29
- mcp_agent/workflows/orchestrator/orchestrator.py +24 -67
- mcp_agent/workflows/orchestrator/orchestrator_models.py +14 -40
- mcp_agent/workflows/parallel/fan_in.py +17 -47
- mcp_agent/workflows/parallel/fan_out.py +6 -12
- mcp_agent/workflows/parallel/parallel_llm.py +9 -26
- mcp_agent/workflows/router/router_base.py +29 -59
- mcp_agent/workflows/router/router_embedding.py +11 -25
- mcp_agent/workflows/router/router_embedding_cohere.py +2 -2
- mcp_agent/workflows/router/router_embedding_openai.py +2 -2
- mcp_agent/workflows/router/router_llm.py +12 -28
- mcp_agent/workflows/swarm/swarm.py +20 -48
- mcp_agent/workflows/swarm/swarm_anthropic.py +2 -2
- mcp_agent/workflows/swarm/swarm_openai.py +2 -2
- fast_agent_mcp-0.1.11.dist-info/RECORD +0 -160
- mcp_agent/workflows/llm/llm_selector.py +0 -345
- {fast_agent_mcp-0.1.11.dist-info → fast_agent_mcp-0.1.13.dist-info}/WHEEL +0 -0
- {fast_agent_mcp-0.1.11.dist-info → fast_agent_mcp-0.1.13.dist-info}/entry_points.txt +0 -0
- {fast_agent_mcp-0.1.11.dist-info → fast_agent_mcp-0.1.13.dist-info}/licenses/LICENSE +0 -0
mcp_agent/mcp/mcp_aggregator.py
CHANGED
@@ -1,31 +1,31 @@
|
|
1
1
|
from asyncio import Lock, gather
|
2
2
|
from typing import (
|
3
|
-
List,
|
4
|
-
Dict,
|
5
|
-
Optional,
|
6
3
|
TYPE_CHECKING,
|
7
4
|
Any,
|
8
5
|
Callable,
|
6
|
+
Dict,
|
7
|
+
List,
|
8
|
+
Optional,
|
9
9
|
TypeVar,
|
10
10
|
)
|
11
|
+
|
11
12
|
from mcp import GetPromptResult, ReadResourceResult
|
12
|
-
from pydantic import AnyUrl, BaseModel, ConfigDict
|
13
13
|
from mcp.client.session import ClientSession
|
14
14
|
from mcp.server.lowlevel.server import Server
|
15
15
|
from mcp.server.stdio import stdio_server
|
16
16
|
from mcp.types import (
|
17
17
|
CallToolResult,
|
18
18
|
ListToolsResult,
|
19
|
+
Prompt,
|
19
20
|
TextContent,
|
20
21
|
Tool,
|
21
|
-
Prompt,
|
22
22
|
)
|
23
|
+
from pydantic import AnyUrl, BaseModel, ConfigDict
|
23
24
|
|
25
|
+
from mcp_agent.context_dependent import ContextDependent
|
24
26
|
from mcp_agent.event_progress import ProgressAction
|
25
27
|
from mcp_agent.logging.logger import get_logger
|
26
28
|
from mcp_agent.mcp.gen_client import gen_client
|
27
|
-
|
28
|
-
from mcp_agent.context_dependent import ContextDependent
|
29
29
|
from mcp_agent.mcp.mcp_agent_client_session import MCPAgentClientSession
|
30
30
|
from mcp_agent.mcp.mcp_connection_manager import MCPConnectionManager
|
31
31
|
|
@@ -33,9 +33,7 @@ if TYPE_CHECKING:
|
|
33
33
|
from mcp_agent.context import Context
|
34
34
|
|
35
35
|
|
36
|
-
logger = get_logger(
|
37
|
-
__name__
|
38
|
-
) # This will be replaced per-instance when agent_name is available
|
36
|
+
logger = get_logger(__name__) # This will be replaced per-instance when agent_name is available
|
39
37
|
|
40
38
|
SEP = "-"
|
41
39
|
|
@@ -79,9 +77,7 @@ class MCPAggregator(ContextDependent):
|
|
79
77
|
if self.connection_persistence:
|
80
78
|
# Try to get existing connection manager from context
|
81
79
|
if not hasattr(self.context, "_connection_manager"):
|
82
|
-
self.context._connection_manager = MCPConnectionManager(
|
83
|
-
self.context.server_registry
|
84
|
-
)
|
80
|
+
self.context._connection_manager = MCPConnectionManager(self.context.server_registry)
|
85
81
|
await self.context._connection_manager.__aenter__()
|
86
82
|
self._persistent_connection_manager = self.context._connection_manager
|
87
83
|
|
@@ -99,7 +95,7 @@ class MCPAggregator(ContextDependent):
|
|
99
95
|
context: Optional["Context"] = None,
|
100
96
|
name: str = None,
|
101
97
|
**kwargs,
|
102
|
-
):
|
98
|
+
) -> None:
|
103
99
|
"""
|
104
100
|
:param server_names: A list of server names to connect to.
|
105
101
|
:param connection_persistence: Whether to maintain persistent connections to servers (default: True).
|
@@ -130,23 +126,17 @@ class MCPAggregator(ContextDependent):
|
|
130
126
|
self._prompt_cache: Dict[str, List[Prompt]] = {}
|
131
127
|
self._prompt_cache_lock = Lock()
|
132
128
|
|
133
|
-
async def close(self):
|
129
|
+
async def close(self) -> None:
|
134
130
|
"""
|
135
131
|
Close all persistent connections when the aggregator is deleted.
|
136
132
|
"""
|
137
133
|
if self.connection_persistence and self._persistent_connection_manager:
|
138
134
|
try:
|
139
135
|
# Only attempt cleanup if we own the connection manager
|
140
|
-
if (
|
141
|
-
hasattr(self.context, "_connection_manager")
|
142
|
-
and self.context._connection_manager
|
143
|
-
== self._persistent_connection_manager
|
144
|
-
):
|
136
|
+
if hasattr(self.context, "_connection_manager") and self.context._connection_manager == self._persistent_connection_manager:
|
145
137
|
logger.info("Shutting down all persistent connections...")
|
146
138
|
await self._persistent_connection_manager.disconnect_all()
|
147
|
-
await self._persistent_connection_manager.__aexit__(
|
148
|
-
None, None, None
|
149
|
-
)
|
139
|
+
await self._persistent_connection_manager.__aexit__(None, None, None)
|
150
140
|
delattr(self.context, "_connection_manager")
|
151
141
|
self.initialized = False
|
152
142
|
except Exception as e:
|
@@ -185,7 +175,7 @@ class MCPAggregator(ContextDependent):
|
|
185
175
|
logger.error(f"Error creating MCPAggregator: {e}")
|
186
176
|
await instance.__aexit__(None, None, None)
|
187
177
|
|
188
|
-
async def load_servers(self):
|
178
|
+
async def load_servers(self) -> None:
|
189
179
|
"""
|
190
180
|
Discover tools from each server in parallel and build an index of namespaced tool names.
|
191
181
|
Also populate the prompt cache.
|
@@ -212,9 +202,7 @@ class MCPAggregator(ContextDependent):
|
|
212
202
|
},
|
213
203
|
)
|
214
204
|
|
215
|
-
await self._persistent_connection_manager.get_server(
|
216
|
-
server_name, client_session_factory=MCPAgentClientSession
|
217
|
-
)
|
205
|
+
await self._persistent_connection_manager.get_server(server_name, client_session_factory=MCPAgentClientSession)
|
218
206
|
|
219
207
|
logger.info(
|
220
208
|
f"MCP Servers initialized for agent '{self.agent_name}'",
|
@@ -232,9 +220,7 @@ class MCPAggregator(ContextDependent):
|
|
232
220
|
logger.error(f"Error loading tools from server '{server_name}'", data=e)
|
233
221
|
return []
|
234
222
|
|
235
|
-
async def fetch_prompts(
|
236
|
-
client: ClientSession, server_name: str
|
237
|
-
) -> List[Prompt]:
|
223
|
+
async def fetch_prompts(client: ClientSession, server_name: str) -> List[Prompt]:
|
238
224
|
# Only fetch prompts if the server supports them
|
239
225
|
capabilities = await self.get_capabilities(server_name)
|
240
226
|
if not capabilities or not capabilities.prompts:
|
@@ -253,17 +239,11 @@ class MCPAggregator(ContextDependent):
|
|
253
239
|
prompts: List[Prompt] = []
|
254
240
|
|
255
241
|
if self.connection_persistence:
|
256
|
-
server_connection = (
|
257
|
-
await self._persistent_connection_manager.get_server(
|
258
|
-
server_name, client_session_factory=MCPAgentClientSession
|
259
|
-
)
|
260
|
-
)
|
242
|
+
server_connection = await self._persistent_connection_manager.get_server(server_name, client_session_factory=MCPAgentClientSession)
|
261
243
|
tools = await fetch_tools(server_connection.session)
|
262
244
|
prompts = await fetch_prompts(server_connection.session, server_name)
|
263
245
|
else:
|
264
|
-
async with gen_client(
|
265
|
-
server_name, server_registry=self.context.server_registry
|
266
|
-
) as client:
|
246
|
+
async with gen_client(server_name, server_registry=self.context.server_registry) as client:
|
267
247
|
tools = await fetch_tools(client)
|
268
248
|
prompts = await fetch_prompts(client, server_name)
|
269
249
|
|
@@ -319,9 +299,7 @@ class MCPAggregator(ContextDependent):
|
|
319
299
|
return None
|
320
300
|
|
321
301
|
try:
|
322
|
-
server_conn = await self._persistent_connection_manager.get_server(
|
323
|
-
server_name, client_session_factory=MCPAgentClientSession
|
324
|
-
)
|
302
|
+
server_conn = await self._persistent_connection_manager.get_server(server_name, client_session_factory=MCPAgentClientSession)
|
325
303
|
# server_capabilities is a property, not a coroutine
|
326
304
|
return server_conn.server_capabilities
|
327
305
|
except Exception as e:
|
@@ -383,9 +361,7 @@ class MCPAggregator(ContextDependent):
|
|
383
361
|
return error_factory(error_msg) if error_factory else None
|
384
362
|
|
385
363
|
if self.connection_persistence:
|
386
|
-
server_connection = await self._persistent_connection_manager.get_server(
|
387
|
-
server_name, client_session_factory=MCPAgentClientSession
|
388
|
-
)
|
364
|
+
server_connection = await self._persistent_connection_manager.get_server(server_name, client_session_factory=MCPAgentClientSession)
|
389
365
|
return await try_execute(server_connection.session)
|
390
366
|
else:
|
391
367
|
logger.debug(
|
@@ -396,9 +372,7 @@ class MCPAggregator(ContextDependent):
|
|
396
372
|
"agent_name": self.agent_name,
|
397
373
|
},
|
398
374
|
)
|
399
|
-
async with gen_client(
|
400
|
-
server_name, server_registry=self.context.server_registry
|
401
|
-
) as client:
|
375
|
+
async with gen_client(server_name, server_registry=self.context.server_registry) as client:
|
402
376
|
result = await try_execute(client)
|
403
377
|
logger.debug(
|
404
378
|
f"Closing temporary connection to server: {server_name}",
|
@@ -410,9 +384,7 @@ class MCPAggregator(ContextDependent):
|
|
410
384
|
)
|
411
385
|
return result
|
412
386
|
|
413
|
-
async def _parse_resource_name(
|
414
|
-
self, name: str, resource_type: str
|
415
|
-
) -> tuple[str, str]:
|
387
|
+
async def _parse_resource_name(self, name: str, resource_type: str) -> tuple[str, str]:
|
416
388
|
"""
|
417
389
|
Parse a possibly namespaced resource name into server name and local resource name.
|
418
390
|
|
@@ -447,9 +419,7 @@ class MCPAggregator(ContextDependent):
|
|
447
419
|
|
448
420
|
return server_name, local_name
|
449
421
|
|
450
|
-
async def call_tool(
|
451
|
-
self, name: str, arguments: dict | None = None
|
452
|
-
) -> CallToolResult:
|
422
|
+
async def call_tool(self, name: str, arguments: dict | None = None) -> CallToolResult:
|
453
423
|
"""
|
454
424
|
Call a namespaced tool, e.g., 'server_name.tool_name'.
|
455
425
|
"""
|
@@ -461,8 +431,8 @@ class MCPAggregator(ContextDependent):
|
|
461
431
|
if server_name is None or local_tool_name is None:
|
462
432
|
logger.error(f"Error: Tool '{name}' not found")
|
463
433
|
return CallToolResult(
|
464
|
-
isError=True,
|
465
|
-
content=[TextContent(type="text", text=f"Tool '{name}' not found")]
|
434
|
+
isError=True,
|
435
|
+
content=[TextContent(type="text", text=f"Tool '{name}' not found")],
|
466
436
|
)
|
467
437
|
|
468
438
|
logger.info(
|
@@ -481,15 +451,10 @@ class MCPAggregator(ContextDependent):
|
|
481
451
|
operation_name=local_tool_name,
|
482
452
|
method_name="call_tool",
|
483
453
|
method_args={"name": local_tool_name, "arguments": arguments},
|
484
|
-
error_factory=lambda msg: CallToolResult(
|
485
|
-
isError=True,
|
486
|
-
content=[TextContent(type="text", text=msg)]
|
487
|
-
),
|
454
|
+
error_factory=lambda msg: CallToolResult(isError=True, content=[TextContent(type="text", text=msg)]),
|
488
455
|
)
|
489
456
|
|
490
|
-
async def get_prompt(
|
491
|
-
self, prompt_name: str = None, arguments: dict[str, str] = None
|
492
|
-
) -> GetPromptResult:
|
457
|
+
async def get_prompt(self, prompt_name: str | None, arguments: dict[str, str] | None) -> GetPromptResult:
|
493
458
|
"""
|
494
459
|
Get a prompt from a server.
|
495
460
|
|
@@ -541,13 +506,9 @@ class MCPAggregator(ContextDependent):
|
|
541
506
|
async with self._prompt_cache_lock:
|
542
507
|
if server_name in self._prompt_cache:
|
543
508
|
# Check if any prompt in the cache has this name
|
544
|
-
prompt_names = [
|
545
|
-
prompt.name for prompt in self._prompt_cache[server_name]
|
546
|
-
]
|
509
|
+
prompt_names = [prompt.name for prompt in self._prompt_cache[server_name]]
|
547
510
|
if local_prompt_name not in prompt_names:
|
548
|
-
logger.debug(
|
549
|
-
f"Prompt '{local_prompt_name}' not found in cache for server '{server_name}'"
|
550
|
-
)
|
511
|
+
logger.debug(f"Prompt '{local_prompt_name}' not found in cache for server '{server_name}'")
|
551
512
|
return GetPromptResult(
|
552
513
|
description=f"Prompt '{local_prompt_name}' not found on server '{server_name}'",
|
553
514
|
messages=[],
|
@@ -569,9 +530,7 @@ class MCPAggregator(ContextDependent):
|
|
569
530
|
|
570
531
|
# Add namespaced name and source server to the result
|
571
532
|
if result and result.messages:
|
572
|
-
result.namespaced_name =
|
573
|
-
namespaced_name or f"{server_name}{SEP}{local_prompt_name}"
|
574
|
-
)
|
533
|
+
result.namespaced_name = namespaced_name or f"{server_name}{SEP}{local_prompt_name}"
|
575
534
|
|
576
535
|
# Store the arguments in the result for display purposes
|
577
536
|
if arguments:
|
@@ -591,18 +550,14 @@ class MCPAggregator(ContextDependent):
|
|
591
550
|
potential_servers.append(s_name)
|
592
551
|
|
593
552
|
if potential_servers:
|
594
|
-
logger.debug(
|
595
|
-
f"Found prompt '{local_prompt_name}' in cache for servers: {potential_servers}"
|
596
|
-
)
|
553
|
+
logger.debug(f"Found prompt '{local_prompt_name}' in cache for servers: {potential_servers}")
|
597
554
|
|
598
555
|
# Try each server from the cache
|
599
556
|
for s_name in potential_servers:
|
600
557
|
# Check if this server supports prompts
|
601
558
|
capabilities = await self.get_capabilities(s_name)
|
602
559
|
if not capabilities or not capabilities.prompts:
|
603
|
-
logger.debug(
|
604
|
-
f"Server '{s_name}' does not support prompts, skipping"
|
605
|
-
)
|
560
|
+
logger.debug(f"Server '{s_name}' does not support prompts, skipping")
|
606
561
|
continue
|
607
562
|
|
608
563
|
try:
|
@@ -621,9 +576,7 @@ class MCPAggregator(ContextDependent):
|
|
621
576
|
|
622
577
|
# If we got a successful result with messages, return it
|
623
578
|
if result and result.messages:
|
624
|
-
logger.debug(
|
625
|
-
f"Successfully retrieved prompt '{local_prompt_name}' from server '{s_name}'"
|
626
|
-
)
|
579
|
+
logger.debug(f"Successfully retrieved prompt '{local_prompt_name}' from server '{s_name}'")
|
627
580
|
# Add namespaced name using the actual server where found
|
628
581
|
result.namespaced_name = f"{s_name}{SEP}{local_prompt_name}"
|
629
582
|
|
@@ -636,9 +589,7 @@ class MCPAggregator(ContextDependent):
|
|
636
589
|
except Exception as e:
|
637
590
|
logger.debug(f"Error retrieving prompt from server '{s_name}': {e}")
|
638
591
|
else:
|
639
|
-
logger.debug(
|
640
|
-
f"Prompt '{local_prompt_name}' not found in any server's cache"
|
641
|
-
)
|
592
|
+
logger.debug(f"Prompt '{local_prompt_name}' not found in any server's cache")
|
642
593
|
|
643
594
|
# If not in cache, perform a full search as fallback (cache might be outdated)
|
644
595
|
# First identify servers that support prompts
|
@@ -648,9 +599,7 @@ class MCPAggregator(ContextDependent):
|
|
648
599
|
if capabilities and capabilities.prompts:
|
649
600
|
supported_servers.append(s_name)
|
650
601
|
else:
|
651
|
-
logger.debug(
|
652
|
-
f"Server '{s_name}' does not support prompts, skipping from fallback search"
|
653
|
-
)
|
602
|
+
logger.debug(f"Server '{s_name}' does not support prompts, skipping from fallback search")
|
654
603
|
|
655
604
|
# Try all supported servers in order
|
656
605
|
for s_name in supported_servers:
|
@@ -671,9 +620,7 @@ class MCPAggregator(ContextDependent):
|
|
671
620
|
|
672
621
|
# If we got a successful result with messages, return it
|
673
622
|
if result and result.messages:
|
674
|
-
logger.debug(
|
675
|
-
f"Found prompt '{local_prompt_name}' on server '{s_name}' (not in cache)"
|
676
|
-
)
|
623
|
+
logger.debug(f"Found prompt '{local_prompt_name}' on server '{s_name}' (not in cache)")
|
677
624
|
# Add namespaced name using the actual server where found
|
678
625
|
result.namespaced_name = f"{s_name}{SEP}{local_prompt_name}"
|
679
626
|
|
@@ -692,21 +639,15 @@ class MCPAggregator(ContextDependent):
|
|
692
639
|
)
|
693
640
|
|
694
641
|
prompts = getattr(prompt_list_result, "prompts", [])
|
695
|
-
matching_prompts = [
|
696
|
-
p for p in prompts if p.name == local_prompt_name
|
697
|
-
]
|
642
|
+
matching_prompts = [p for p in prompts if p.name == local_prompt_name]
|
698
643
|
if matching_prompts:
|
699
644
|
async with self._prompt_cache_lock:
|
700
645
|
if s_name not in self._prompt_cache:
|
701
646
|
self._prompt_cache[s_name] = []
|
702
647
|
# Add if not already in the cache
|
703
|
-
prompt_names_in_cache = [
|
704
|
-
p.name for p in self._prompt_cache[s_name]
|
705
|
-
]
|
648
|
+
prompt_names_in_cache = [p.name for p in self._prompt_cache[s_name]]
|
706
649
|
if local_prompt_name not in prompt_names_in_cache:
|
707
|
-
self._prompt_cache[s_name].append(
|
708
|
-
matching_prompts[0]
|
709
|
-
)
|
650
|
+
self._prompt_cache[s_name].append(matching_prompts[0])
|
710
651
|
except Exception:
|
711
652
|
# Ignore errors when updating cache
|
712
653
|
pass
|
@@ -755,9 +696,7 @@ class MCPAggregator(ContextDependent):
|
|
755
696
|
async with self._prompt_cache_lock:
|
756
697
|
if server_name in self._prompt_cache:
|
757
698
|
results[server_name] = self._prompt_cache[server_name]
|
758
|
-
logger.debug(
|
759
|
-
f"Returning cached prompts for server '{server_name}'"
|
760
|
-
)
|
699
|
+
logger.debug(f"Returning cached prompts for server '{server_name}'")
|
761
700
|
return results
|
762
701
|
|
763
702
|
# Check if server supports prompts
|
@@ -791,9 +730,7 @@ class MCPAggregator(ContextDependent):
|
|
791
730
|
if capabilities and capabilities.prompts:
|
792
731
|
supported_servers.append(s_name)
|
793
732
|
else:
|
794
|
-
logger.debug(
|
795
|
-
f"Server '{s_name}' does not support prompts, skipping"
|
796
|
-
)
|
733
|
+
logger.debug(f"Server '{s_name}' does not support prompts, skipping")
|
797
734
|
# Add empty list to results for this server
|
798
735
|
results[s_name] = []
|
799
736
|
|
@@ -829,9 +766,7 @@ class MCPAggregator(ContextDependent):
|
|
829
766
|
logger.debug(f"Available prompts across servers: {results}")
|
830
767
|
return results
|
831
768
|
|
832
|
-
async def get_resource(
|
833
|
-
self, server_name: str, resource_uri: str
|
834
|
-
) -> ReadResourceResult:
|
769
|
+
async def get_resource(self, server_name: str, resource_uri: str) -> ReadResourceResult:
|
835
770
|
"""
|
836
771
|
Get a resource directly from an MCP server by URI.
|
837
772
|
|
@@ -882,7 +817,7 @@ class MCPCompoundServer(Server):
|
|
882
817
|
A compound server (server-of-servers) that aggregates multiple MCP servers and is itself an MCP server
|
883
818
|
"""
|
884
819
|
|
885
|
-
def __init__(self, server_names: List[str], name: str = "MCPCompoundServer"):
|
820
|
+
def __init__(self, server_names: List[str], name: str = "MCPCompoundServer") -> None:
|
886
821
|
super().__init__(name)
|
887
822
|
self.aggregator = MCPAggregator(server_names)
|
888
823
|
|
@@ -897,22 +832,18 @@ class MCPCompoundServer(Server):
|
|
897
832
|
tools_result = await self.aggregator.list_tools()
|
898
833
|
return tools_result.tools
|
899
834
|
|
900
|
-
async def _call_tool(
|
901
|
-
self, name: str, arguments: dict | None = None
|
902
|
-
) -> CallToolResult:
|
835
|
+
async def _call_tool(self, name: str, arguments: dict | None = None) -> CallToolResult:
|
903
836
|
"""Call a specific tool from the aggregated servers."""
|
904
837
|
try:
|
905
838
|
result = await self.aggregator.call_tool(name=name, arguments=arguments)
|
906
839
|
return result.content
|
907
840
|
except Exception as e:
|
908
841
|
return CallToolResult(
|
909
|
-
isError=True,
|
910
|
-
content=[TextContent(type="text", text=f"Error calling tool: {e}")]
|
842
|
+
isError=True,
|
843
|
+
content=[TextContent(type="text", text=f"Error calling tool: {e}")],
|
911
844
|
)
|
912
845
|
|
913
|
-
async def _get_prompt(
|
914
|
-
self, name: str = None, arguments: dict[str, str] = None
|
915
|
-
) -> GetPromptResult:
|
846
|
+
async def _get_prompt(self, name: str = None, arguments: dict[str, str] = None) -> GetPromptResult:
|
916
847
|
"""
|
917
848
|
Get a prompt from the aggregated servers.
|
918
849
|
|
@@ -921,14 +852,10 @@ class MCPCompoundServer(Server):
|
|
921
852
|
arguments: Optional dictionary of string arguments for prompt templating
|
922
853
|
"""
|
923
854
|
try:
|
924
|
-
result = await self.aggregator.get_prompt(
|
925
|
-
prompt_name=name, arguments=arguments
|
926
|
-
)
|
855
|
+
result = await self.aggregator.get_prompt(prompt_name=name, arguments=arguments)
|
927
856
|
return result
|
928
857
|
except Exception as e:
|
929
|
-
return GetPromptResult(
|
930
|
-
description=f"Error getting prompt: {e}", messages=[]
|
931
|
-
)
|
858
|
+
return GetPromptResult(description=f"Error getting prompt: {e}", messages=[])
|
932
859
|
|
933
860
|
async def _list_prompts(self, server_name: str = None) -> Dict[str, List[str]]:
|
934
861
|
"""List available prompts from the aggregated servers."""
|
@@ -2,38 +2,37 @@
|
|
2
2
|
Manages the lifecycle of multiple MCP server connections.
|
3
3
|
"""
|
4
4
|
|
5
|
-
from datetime import timedelta
|
6
5
|
import asyncio
|
6
|
+
from datetime import timedelta
|
7
7
|
from typing import (
|
8
|
+
TYPE_CHECKING,
|
8
9
|
AsyncGenerator,
|
9
10
|
Callable,
|
10
11
|
Dict,
|
11
12
|
Optional,
|
12
|
-
TYPE_CHECKING,
|
13
13
|
)
|
14
14
|
|
15
|
-
from anyio import Event,
|
15
|
+
from anyio import Event, Lock, create_task_group
|
16
16
|
from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
|
17
|
-
|
18
17
|
from mcp import ClientSession
|
18
|
+
from mcp.client.sse import sse_client
|
19
19
|
from mcp.client.stdio import (
|
20
20
|
StdioServerParameters,
|
21
21
|
get_default_environment,
|
22
22
|
)
|
23
|
-
from mcp.client.sse import sse_client
|
24
23
|
from mcp.types import JSONRPCMessage, ServerCapabilities
|
25
24
|
|
26
25
|
from mcp_agent.config import MCPServerSettings
|
26
|
+
from mcp_agent.context_dependent import ContextDependent
|
27
27
|
from mcp_agent.core.exceptions import ServerInitializationError
|
28
28
|
from mcp_agent.event_progress import ProgressAction
|
29
29
|
from mcp_agent.logging.logger import get_logger
|
30
30
|
from mcp_agent.mcp.mcp_agent_client_session import MCPAgentClientSession
|
31
31
|
from mcp_agent.mcp.stdio import stdio_client_with_rich_stderr
|
32
|
-
from mcp_agent.context_dependent import ContextDependent
|
33
32
|
|
34
33
|
if TYPE_CHECKING:
|
35
|
-
from mcp_agent.mcp_server_registry import InitHookCallable, ServerRegistry
|
36
34
|
from mcp_agent.context import Context
|
35
|
+
from mcp_agent.mcp_server_registry import InitHookCallable, ServerRegistry
|
37
36
|
|
38
37
|
logger = get_logger(__name__)
|
39
38
|
|
@@ -64,7 +63,7 @@ class ServerConnection:
|
|
64
63
|
ClientSession,
|
65
64
|
],
|
66
65
|
init_hook: Optional["InitHookCallable"] = None,
|
67
|
-
):
|
66
|
+
) -> None:
|
68
67
|
self.server_name = server_name
|
69
68
|
self.server_config = server_config
|
70
69
|
self.session: ClientSession | None = None
|
@@ -134,11 +133,7 @@ class ServerConnection:
|
|
134
133
|
Create a new session instance for this server connection.
|
135
134
|
"""
|
136
135
|
|
137
|
-
read_timeout = (
|
138
|
-
timedelta(seconds=self.server_config.read_timeout_seconds)
|
139
|
-
if self.server_config.read_timeout_seconds
|
140
|
-
else None
|
141
|
-
)
|
136
|
+
read_timeout = timedelta(seconds=self.server_config.read_timeout_seconds) if self.server_config.read_timeout_seconds else None
|
142
137
|
|
143
138
|
session = self._client_session_factory(read_stream, send_stream, read_timeout)
|
144
139
|
|
@@ -192,9 +187,7 @@ class MCPConnectionManager(ContextDependent):
|
|
192
187
|
Integrates with the application context system for proper resource management.
|
193
188
|
"""
|
194
189
|
|
195
|
-
def __init__(
|
196
|
-
self, server_registry: "ServerRegistry", context: Optional["Context"] = None
|
197
|
-
):
|
190
|
+
def __init__(self, server_registry: "ServerRegistry", context: Optional["Context"] = None) -> None:
|
198
191
|
super().__init__(context=context)
|
199
192
|
self.server_registry = server_registry
|
200
193
|
self.running_servers: Dict[str, ServerConnection] = {}
|
@@ -255,9 +248,7 @@ class MCPConnectionManager(ContextDependent):
|
|
255
248
|
if not config:
|
256
249
|
raise ValueError(f"Server '{server_name}' not found in registry.")
|
257
250
|
|
258
|
-
logger.debug(
|
259
|
-
f"{server_name}: Found server configuration=", data=config.model_dump()
|
260
|
-
)
|
251
|
+
logger.debug(f"{server_name}: Found server configuration=", data=config.model_dump())
|
261
252
|
|
262
253
|
def transport_context_factory():
|
263
254
|
if config.transport == "stdio":
|
@@ -309,9 +300,7 @@ class MCPConnectionManager(ContextDependent):
|
|
309
300
|
|
310
301
|
# If server exists but isn't healthy, remove it so we can create a new one
|
311
302
|
if server_conn:
|
312
|
-
logger.info(
|
313
|
-
f"{server_name}: Server exists but is unhealthy, recreating..."
|
314
|
-
)
|
303
|
+
logger.info(f"{server_name}: Server exists but is unhealthy, recreating...")
|
315
304
|
self.running_servers.pop(server_name)
|
316
305
|
server_conn.request_shutdown()
|
317
306
|
|
@@ -328,19 +317,13 @@ class MCPConnectionManager(ContextDependent):
|
|
328
317
|
# Check if the server is healthy after initialization
|
329
318
|
if not server_conn.is_healthy():
|
330
319
|
error_msg = server_conn._error_message or "Unknown error"
|
331
|
-
raise ServerInitializationError(
|
332
|
-
f"MCP Server: '{server_name}': Failed to initialize with error: '{error_msg}'. Check fastagent.config.yaml"
|
333
|
-
)
|
320
|
+
raise ServerInitializationError(f"MCP Server: '{server_name}': Failed to initialize with error: '{error_msg}'. Check fastagent.config.yaml")
|
334
321
|
|
335
322
|
return server_conn
|
336
323
|
|
337
|
-
async def get_server_capabilities(
|
338
|
-
self, server_name: str
|
339
|
-
) -> ServerCapabilities | None:
|
324
|
+
async def get_server_capabilities(self, server_name: str) -> ServerCapabilities | None:
|
340
325
|
"""Get the capabilities of a specific server."""
|
341
|
-
server_conn = await self.get_server(
|
342
|
-
server_name, client_session_factory=MCPAgentClientSession
|
343
|
-
)
|
326
|
+
server_conn = await self.get_server(server_name, client_session_factory=MCPAgentClientSession)
|
344
327
|
return server_conn.server_capabilities if server_conn else None
|
345
328
|
|
346
329
|
async def disconnect_server(self, server_name: str) -> None:
|
@@ -353,13 +336,9 @@ class MCPConnectionManager(ContextDependent):
|
|
353
336
|
server_conn = self.running_servers.pop(server_name, None)
|
354
337
|
if server_conn:
|
355
338
|
server_conn.request_shutdown()
|
356
|
-
logger.info(
|
357
|
-
f"{server_name}: Shutdown signal sent (lifecycle task will exit)."
|
358
|
-
)
|
339
|
+
logger.info(f"{server_name}: Shutdown signal sent (lifecycle task will exit).")
|
359
340
|
else:
|
360
|
-
logger.info(
|
361
|
-
f"{server_name}: No persistent connection found. Skipping server shutdown"
|
362
|
-
)
|
341
|
+
logger.info(f"{server_name}: No persistent connection found. Skipping server shutdown")
|
363
342
|
|
364
343
|
async def disconnect_all(self) -> None:
|
365
344
|
"""Disconnect all servers that are running under this connection manager."""
|
@@ -1,14 +1,14 @@
|
|
1
1
|
from typing import List, Union
|
2
|
-
from pydantic import BaseModel
|
3
2
|
|
4
3
|
from mcp.types import (
|
5
|
-
PromptMessage,
|
6
|
-
TextContent,
|
7
|
-
ImageContent,
|
8
4
|
EmbeddedResource,
|
9
|
-
Role,
|
10
5
|
GetPromptResult,
|
6
|
+
ImageContent,
|
7
|
+
PromptMessage,
|
8
|
+
Role,
|
9
|
+
TextContent,
|
11
10
|
)
|
11
|
+
from pydantic import BaseModel
|
12
12
|
|
13
13
|
|
14
14
|
class PromptMessageMultipart(BaseModel):
|
@@ -21,9 +21,7 @@ class PromptMessageMultipart(BaseModel):
|
|
21
21
|
content: List[Union[TextContent, ImageContent, EmbeddedResource]]
|
22
22
|
|
23
23
|
@classmethod
|
24
|
-
def
|
25
|
-
cls, messages: List[PromptMessage]
|
26
|
-
) -> List["PromptMessageMultipart"]:
|
24
|
+
def to_multipart(cls, messages: List[PromptMessage]) -> List["PromptMessageMultipart"]:
|
27
25
|
"""Convert a sequence of PromptMessages into PromptMessageMultipart objects."""
|
28
26
|
if not messages:
|
29
27
|
return []
|
@@ -41,7 +39,8 @@ class PromptMessageMultipart(BaseModel):
|
|
41
39
|
current_group = cls(role=msg.role, content=[msg.content])
|
42
40
|
else:
|
43
41
|
# Same role, add to current message
|
44
|
-
current_group
|
42
|
+
if current_group is not None:
|
43
|
+
current_group.content.append(msg.content)
|
45
44
|
|
46
45
|
# Add the last group
|
47
46
|
if current_group is not None:
|
@@ -49,16 +48,11 @@ class PromptMessageMultipart(BaseModel):
|
|
49
48
|
|
50
49
|
return result
|
51
50
|
|
52
|
-
def
|
51
|
+
def from_multipart(self) -> List[PromptMessage]:
|
53
52
|
"""Convert this PromptMessageMultipart to a sequence of standard PromptMessages."""
|
54
|
-
return [
|
55
|
-
PromptMessage(role=self.role, content=content_part)
|
56
|
-
for content_part in self.content
|
57
|
-
]
|
53
|
+
return [PromptMessage(role=self.role, content=content_part) for content_part in self.content]
|
58
54
|
|
59
55
|
@classmethod
|
60
|
-
def parse_get_prompt_result(
|
61
|
-
cls, result: GetPromptResult
|
62
|
-
) -> List["PromptMessageMultipart"]:
|
56
|
+
def parse_get_prompt_result(cls, result: GetPromptResult) -> List["PromptMessageMultipart"]:
|
63
57
|
"""Parse a GetPromptResult into PromptMessageMultipart objects."""
|
64
|
-
return cls.
|
58
|
+
return cls.to_multipart(result.messages)
|