fast-agent-mcp 0.1.12__py3-none-any.whl → 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {fast_agent_mcp-0.1.12.dist-info → fast_agent_mcp-0.1.13.dist-info}/METADATA +1 -1
- fast_agent_mcp-0.1.13.dist-info/RECORD +164 -0
- mcp_agent/agents/agent.py +37 -79
- mcp_agent/app.py +16 -22
- mcp_agent/cli/commands/bootstrap.py +22 -52
- mcp_agent/cli/commands/config.py +4 -4
- mcp_agent/cli/commands/setup.py +11 -26
- mcp_agent/cli/main.py +6 -9
- mcp_agent/cli/terminal.py +2 -2
- mcp_agent/config.py +1 -5
- mcp_agent/context.py +13 -24
- mcp_agent/context_dependent.py +3 -7
- mcp_agent/core/agent_app.py +45 -121
- mcp_agent/core/agent_utils.py +3 -5
- mcp_agent/core/decorators.py +5 -12
- mcp_agent/core/enhanced_prompt.py +25 -52
- mcp_agent/core/exceptions.py +8 -8
- mcp_agent/core/factory.py +29 -70
- mcp_agent/core/fastagent.py +48 -88
- mcp_agent/core/mcp_content.py +8 -16
- mcp_agent/core/prompt.py +8 -15
- mcp_agent/core/proxies.py +34 -25
- mcp_agent/core/request_params.py +6 -3
- mcp_agent/core/types.py +4 -6
- mcp_agent/core/validation.py +4 -3
- mcp_agent/executor/decorator_registry.py +11 -23
- mcp_agent/executor/executor.py +8 -17
- mcp_agent/executor/task_registry.py +2 -4
- mcp_agent/executor/temporal.py +28 -74
- mcp_agent/executor/workflow.py +3 -5
- mcp_agent/executor/workflow_signal.py +17 -29
- mcp_agent/human_input/handler.py +4 -9
- mcp_agent/human_input/types.py +2 -3
- mcp_agent/logging/events.py +1 -5
- mcp_agent/logging/json_serializer.py +7 -6
- mcp_agent/logging/listeners.py +20 -23
- mcp_agent/logging/logger.py +15 -17
- mcp_agent/logging/rich_progress.py +10 -8
- mcp_agent/logging/tracing.py +4 -6
- mcp_agent/logging/transport.py +22 -22
- mcp_agent/mcp/gen_client.py +4 -12
- mcp_agent/mcp/interfaces.py +71 -86
- mcp_agent/mcp/mcp_agent_client_session.py +11 -19
- mcp_agent/mcp/mcp_agent_server.py +8 -10
- mcp_agent/mcp/mcp_aggregator.py +45 -117
- mcp_agent/mcp/mcp_connection_manager.py +16 -37
- mcp_agent/mcp/prompt_message_multipart.py +12 -18
- mcp_agent/mcp/prompt_serialization.py +13 -38
- mcp_agent/mcp/prompts/prompt_load.py +99 -0
- mcp_agent/mcp/prompts/prompt_server.py +21 -128
- mcp_agent/mcp/prompts/prompt_template.py +20 -42
- mcp_agent/mcp/resource_utils.py +8 -17
- mcp_agent/mcp/sampling.py +5 -14
- mcp_agent/mcp/stdio.py +11 -8
- mcp_agent/mcp_server/agent_server.py +10 -17
- mcp_agent/mcp_server_registry.py +13 -35
- mcp_agent/resources/examples/data-analysis/analysis-campaign.py +1 -1
- mcp_agent/resources/examples/data-analysis/analysis.py +1 -1
- mcp_agent/resources/examples/data-analysis/slides.py +110 -0
- mcp_agent/resources/examples/internal/agent.py +2 -1
- mcp_agent/resources/examples/internal/job.py +2 -1
- mcp_agent/resources/examples/internal/prompt_category.py +1 -1
- mcp_agent/resources/examples/internal/prompt_sizing.py +3 -5
- mcp_agent/resources/examples/internal/sizer.py +2 -1
- mcp_agent/resources/examples/internal/social.py +2 -1
- mcp_agent/resources/examples/mcp_researcher/researcher-eval.py +1 -1
- mcp_agent/resources/examples/prompting/agent.py +2 -1
- mcp_agent/resources/examples/prompting/image_server.py +5 -11
- mcp_agent/resources/examples/researcher/researcher-eval.py +1 -1
- mcp_agent/resources/examples/researcher/researcher-imp.py +3 -4
- mcp_agent/resources/examples/researcher/researcher.py +2 -1
- mcp_agent/resources/examples/workflows/agent_build.py +2 -1
- mcp_agent/resources/examples/workflows/chaining.py +2 -1
- mcp_agent/resources/examples/workflows/evaluator.py +2 -1
- mcp_agent/resources/examples/workflows/human_input.py +2 -1
- mcp_agent/resources/examples/workflows/orchestrator.py +2 -1
- mcp_agent/resources/examples/workflows/parallel.py +2 -1
- mcp_agent/resources/examples/workflows/router.py +2 -1
- mcp_agent/resources/examples/workflows/sse.py +1 -1
- mcp_agent/telemetry/usage_tracking.py +2 -1
- mcp_agent/ui/console_display.py +15 -39
- mcp_agent/workflows/embedding/embedding_base.py +1 -4
- mcp_agent/workflows/embedding/embedding_cohere.py +2 -2
- mcp_agent/workflows/embedding/embedding_openai.py +4 -13
- mcp_agent/workflows/evaluator_optimizer/evaluator_optimizer.py +23 -57
- mcp_agent/workflows/intent_classifier/intent_classifier_base.py +5 -8
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding.py +7 -11
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding_cohere.py +4 -8
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding_openai.py +4 -8
- mcp_agent/workflows/intent_classifier/intent_classifier_llm.py +11 -22
- mcp_agent/workflows/intent_classifier/intent_classifier_llm_anthropic.py +3 -3
- mcp_agent/workflows/intent_classifier/intent_classifier_llm_openai.py +4 -6
- mcp_agent/workflows/llm/anthropic_utils.py +8 -29
- mcp_agent/workflows/llm/augmented_llm.py +69 -247
- mcp_agent/workflows/llm/augmented_llm_anthropic.py +39 -73
- mcp_agent/workflows/llm/augmented_llm_openai.py +42 -97
- mcp_agent/workflows/llm/augmented_llm_passthrough.py +13 -20
- mcp_agent/workflows/llm/augmented_llm_playback.py +8 -6
- mcp_agent/workflows/llm/memory.py +103 -0
- mcp_agent/workflows/llm/model_factory.py +8 -20
- mcp_agent/workflows/llm/openai_utils.py +1 -1
- mcp_agent/workflows/llm/prompt_utils.py +1 -3
- mcp_agent/workflows/llm/providers/multipart_converter_anthropic.py +47 -89
- mcp_agent/workflows/llm/providers/multipart_converter_openai.py +20 -55
- mcp_agent/workflows/llm/providers/openai_multipart.py +19 -61
- mcp_agent/workflows/llm/providers/sampling_converter_anthropic.py +10 -12
- mcp_agent/workflows/llm/providers/sampling_converter_openai.py +7 -11
- mcp_agent/workflows/llm/sampling_converter.py +4 -11
- mcp_agent/workflows/llm/sampling_format_converter.py +12 -12
- mcp_agent/workflows/orchestrator/orchestrator.py +24 -67
- mcp_agent/workflows/orchestrator/orchestrator_models.py +14 -40
- mcp_agent/workflows/parallel/fan_in.py +17 -47
- mcp_agent/workflows/parallel/fan_out.py +6 -12
- mcp_agent/workflows/parallel/parallel_llm.py +9 -26
- mcp_agent/workflows/router/router_base.py +19 -49
- mcp_agent/workflows/router/router_embedding.py +11 -25
- mcp_agent/workflows/router/router_embedding_cohere.py +2 -2
- mcp_agent/workflows/router/router_embedding_openai.py +2 -2
- mcp_agent/workflows/router/router_llm.py +12 -28
- mcp_agent/workflows/swarm/swarm.py +20 -48
- mcp_agent/workflows/swarm/swarm_anthropic.py +2 -2
- mcp_agent/workflows/swarm/swarm_openai.py +2 -2
- fast_agent_mcp-0.1.12.dist-info/RECORD +0 -161
- {fast_agent_mcp-0.1.12.dist-info → fast_agent_mcp-0.1.13.dist-info}/WHEEL +0 -0
- {fast_agent_mcp-0.1.12.dist-info → fast_agent_mcp-0.1.13.dist-info}/entry_points.txt +0 -0
- {fast_agent_mcp-0.1.12.dist-info → fast_agent_mcp-0.1.13.dist-info}/licenses/LICENSE +0 -0
mcp_agent/mcp/mcp_aggregator.py
CHANGED
@@ -1,31 +1,31 @@
|
|
1
1
|
from asyncio import Lock, gather
|
2
2
|
from typing import (
|
3
|
-
List,
|
4
|
-
Dict,
|
5
|
-
Optional,
|
6
3
|
TYPE_CHECKING,
|
7
4
|
Any,
|
8
5
|
Callable,
|
6
|
+
Dict,
|
7
|
+
List,
|
8
|
+
Optional,
|
9
9
|
TypeVar,
|
10
10
|
)
|
11
|
+
|
11
12
|
from mcp import GetPromptResult, ReadResourceResult
|
12
|
-
from pydantic import AnyUrl, BaseModel, ConfigDict
|
13
13
|
from mcp.client.session import ClientSession
|
14
14
|
from mcp.server.lowlevel.server import Server
|
15
15
|
from mcp.server.stdio import stdio_server
|
16
16
|
from mcp.types import (
|
17
17
|
CallToolResult,
|
18
18
|
ListToolsResult,
|
19
|
+
Prompt,
|
19
20
|
TextContent,
|
20
21
|
Tool,
|
21
|
-
Prompt,
|
22
22
|
)
|
23
|
+
from pydantic import AnyUrl, BaseModel, ConfigDict
|
23
24
|
|
25
|
+
from mcp_agent.context_dependent import ContextDependent
|
24
26
|
from mcp_agent.event_progress import ProgressAction
|
25
27
|
from mcp_agent.logging.logger import get_logger
|
26
28
|
from mcp_agent.mcp.gen_client import gen_client
|
27
|
-
|
28
|
-
from mcp_agent.context_dependent import ContextDependent
|
29
29
|
from mcp_agent.mcp.mcp_agent_client_session import MCPAgentClientSession
|
30
30
|
from mcp_agent.mcp.mcp_connection_manager import MCPConnectionManager
|
31
31
|
|
@@ -33,9 +33,7 @@ if TYPE_CHECKING:
|
|
33
33
|
from mcp_agent.context import Context
|
34
34
|
|
35
35
|
|
36
|
-
logger = get_logger(
|
37
|
-
__name__
|
38
|
-
) # This will be replaced per-instance when agent_name is available
|
36
|
+
logger = get_logger(__name__) # This will be replaced per-instance when agent_name is available
|
39
37
|
|
40
38
|
SEP = "-"
|
41
39
|
|
@@ -79,9 +77,7 @@ class MCPAggregator(ContextDependent):
|
|
79
77
|
if self.connection_persistence:
|
80
78
|
# Try to get existing connection manager from context
|
81
79
|
if not hasattr(self.context, "_connection_manager"):
|
82
|
-
self.context._connection_manager = MCPConnectionManager(
|
83
|
-
self.context.server_registry
|
84
|
-
)
|
80
|
+
self.context._connection_manager = MCPConnectionManager(self.context.server_registry)
|
85
81
|
await self.context._connection_manager.__aenter__()
|
86
82
|
self._persistent_connection_manager = self.context._connection_manager
|
87
83
|
|
@@ -99,7 +95,7 @@ class MCPAggregator(ContextDependent):
|
|
99
95
|
context: Optional["Context"] = None,
|
100
96
|
name: str = None,
|
101
97
|
**kwargs,
|
102
|
-
):
|
98
|
+
) -> None:
|
103
99
|
"""
|
104
100
|
:param server_names: A list of server names to connect to.
|
105
101
|
:param connection_persistence: Whether to maintain persistent connections to servers (default: True).
|
@@ -130,23 +126,17 @@ class MCPAggregator(ContextDependent):
|
|
130
126
|
self._prompt_cache: Dict[str, List[Prompt]] = {}
|
131
127
|
self._prompt_cache_lock = Lock()
|
132
128
|
|
133
|
-
async def close(self):
|
129
|
+
async def close(self) -> None:
|
134
130
|
"""
|
135
131
|
Close all persistent connections when the aggregator is deleted.
|
136
132
|
"""
|
137
133
|
if self.connection_persistence and self._persistent_connection_manager:
|
138
134
|
try:
|
139
135
|
# Only attempt cleanup if we own the connection manager
|
140
|
-
if (
|
141
|
-
hasattr(self.context, "_connection_manager")
|
142
|
-
and self.context._connection_manager
|
143
|
-
== self._persistent_connection_manager
|
144
|
-
):
|
136
|
+
if hasattr(self.context, "_connection_manager") and self.context._connection_manager == self._persistent_connection_manager:
|
145
137
|
logger.info("Shutting down all persistent connections...")
|
146
138
|
await self._persistent_connection_manager.disconnect_all()
|
147
|
-
await self._persistent_connection_manager.__aexit__(
|
148
|
-
None, None, None
|
149
|
-
)
|
139
|
+
await self._persistent_connection_manager.__aexit__(None, None, None)
|
150
140
|
delattr(self.context, "_connection_manager")
|
151
141
|
self.initialized = False
|
152
142
|
except Exception as e:
|
@@ -185,7 +175,7 @@ class MCPAggregator(ContextDependent):
|
|
185
175
|
logger.error(f"Error creating MCPAggregator: {e}")
|
186
176
|
await instance.__aexit__(None, None, None)
|
187
177
|
|
188
|
-
async def load_servers(self):
|
178
|
+
async def load_servers(self) -> None:
|
189
179
|
"""
|
190
180
|
Discover tools from each server in parallel and build an index of namespaced tool names.
|
191
181
|
Also populate the prompt cache.
|
@@ -212,9 +202,7 @@ class MCPAggregator(ContextDependent):
|
|
212
202
|
},
|
213
203
|
)
|
214
204
|
|
215
|
-
await self._persistent_connection_manager.get_server(
|
216
|
-
server_name, client_session_factory=MCPAgentClientSession
|
217
|
-
)
|
205
|
+
await self._persistent_connection_manager.get_server(server_name, client_session_factory=MCPAgentClientSession)
|
218
206
|
|
219
207
|
logger.info(
|
220
208
|
f"MCP Servers initialized for agent '{self.agent_name}'",
|
@@ -232,9 +220,7 @@ class MCPAggregator(ContextDependent):
|
|
232
220
|
logger.error(f"Error loading tools from server '{server_name}'", data=e)
|
233
221
|
return []
|
234
222
|
|
235
|
-
async def fetch_prompts(
|
236
|
-
client: ClientSession, server_name: str
|
237
|
-
) -> List[Prompt]:
|
223
|
+
async def fetch_prompts(client: ClientSession, server_name: str) -> List[Prompt]:
|
238
224
|
# Only fetch prompts if the server supports them
|
239
225
|
capabilities = await self.get_capabilities(server_name)
|
240
226
|
if not capabilities or not capabilities.prompts:
|
@@ -253,17 +239,11 @@ class MCPAggregator(ContextDependent):
|
|
253
239
|
prompts: List[Prompt] = []
|
254
240
|
|
255
241
|
if self.connection_persistence:
|
256
|
-
server_connection = (
|
257
|
-
await self._persistent_connection_manager.get_server(
|
258
|
-
server_name, client_session_factory=MCPAgentClientSession
|
259
|
-
)
|
260
|
-
)
|
242
|
+
server_connection = await self._persistent_connection_manager.get_server(server_name, client_session_factory=MCPAgentClientSession)
|
261
243
|
tools = await fetch_tools(server_connection.session)
|
262
244
|
prompts = await fetch_prompts(server_connection.session, server_name)
|
263
245
|
else:
|
264
|
-
async with gen_client(
|
265
|
-
server_name, server_registry=self.context.server_registry
|
266
|
-
) as client:
|
246
|
+
async with gen_client(server_name, server_registry=self.context.server_registry) as client:
|
267
247
|
tools = await fetch_tools(client)
|
268
248
|
prompts = await fetch_prompts(client, server_name)
|
269
249
|
|
@@ -319,9 +299,7 @@ class MCPAggregator(ContextDependent):
|
|
319
299
|
return None
|
320
300
|
|
321
301
|
try:
|
322
|
-
server_conn = await self._persistent_connection_manager.get_server(
|
323
|
-
server_name, client_session_factory=MCPAgentClientSession
|
324
|
-
)
|
302
|
+
server_conn = await self._persistent_connection_manager.get_server(server_name, client_session_factory=MCPAgentClientSession)
|
325
303
|
# server_capabilities is a property, not a coroutine
|
326
304
|
return server_conn.server_capabilities
|
327
305
|
except Exception as e:
|
@@ -383,9 +361,7 @@ class MCPAggregator(ContextDependent):
|
|
383
361
|
return error_factory(error_msg) if error_factory else None
|
384
362
|
|
385
363
|
if self.connection_persistence:
|
386
|
-
server_connection = await self._persistent_connection_manager.get_server(
|
387
|
-
server_name, client_session_factory=MCPAgentClientSession
|
388
|
-
)
|
364
|
+
server_connection = await self._persistent_connection_manager.get_server(server_name, client_session_factory=MCPAgentClientSession)
|
389
365
|
return await try_execute(server_connection.session)
|
390
366
|
else:
|
391
367
|
logger.debug(
|
@@ -396,9 +372,7 @@ class MCPAggregator(ContextDependent):
|
|
396
372
|
"agent_name": self.agent_name,
|
397
373
|
},
|
398
374
|
)
|
399
|
-
async with gen_client(
|
400
|
-
server_name, server_registry=self.context.server_registry
|
401
|
-
) as client:
|
375
|
+
async with gen_client(server_name, server_registry=self.context.server_registry) as client:
|
402
376
|
result = await try_execute(client)
|
403
377
|
logger.debug(
|
404
378
|
f"Closing temporary connection to server: {server_name}",
|
@@ -410,9 +384,7 @@ class MCPAggregator(ContextDependent):
|
|
410
384
|
)
|
411
385
|
return result
|
412
386
|
|
413
|
-
async def _parse_resource_name(
|
414
|
-
self, name: str, resource_type: str
|
415
|
-
) -> tuple[str, str]:
|
387
|
+
async def _parse_resource_name(self, name: str, resource_type: str) -> tuple[str, str]:
|
416
388
|
"""
|
417
389
|
Parse a possibly namespaced resource name into server name and local resource name.
|
418
390
|
|
@@ -447,9 +419,7 @@ class MCPAggregator(ContextDependent):
|
|
447
419
|
|
448
420
|
return server_name, local_name
|
449
421
|
|
450
|
-
async def call_tool(
|
451
|
-
self, name: str, arguments: dict | None = None
|
452
|
-
) -> CallToolResult:
|
422
|
+
async def call_tool(self, name: str, arguments: dict | None = None) -> CallToolResult:
|
453
423
|
"""
|
454
424
|
Call a namespaced tool, e.g., 'server_name.tool_name'.
|
455
425
|
"""
|
@@ -481,14 +451,10 @@ class MCPAggregator(ContextDependent):
|
|
481
451
|
operation_name=local_tool_name,
|
482
452
|
method_name="call_tool",
|
483
453
|
method_args={"name": local_tool_name, "arguments": arguments},
|
484
|
-
error_factory=lambda msg: CallToolResult(
|
485
|
-
isError=True, content=[TextContent(type="text", text=msg)]
|
486
|
-
),
|
454
|
+
error_factory=lambda msg: CallToolResult(isError=True, content=[TextContent(type="text", text=msg)]),
|
487
455
|
)
|
488
456
|
|
489
|
-
async def get_prompt(
|
490
|
-
self, prompt_name: str = None, arguments: dict[str, str] = None
|
491
|
-
) -> GetPromptResult:
|
457
|
+
async def get_prompt(self, prompt_name: str | None, arguments: dict[str, str] | None) -> GetPromptResult:
|
492
458
|
"""
|
493
459
|
Get a prompt from a server.
|
494
460
|
|
@@ -540,13 +506,9 @@ class MCPAggregator(ContextDependent):
|
|
540
506
|
async with self._prompt_cache_lock:
|
541
507
|
if server_name in self._prompt_cache:
|
542
508
|
# Check if any prompt in the cache has this name
|
543
|
-
prompt_names = [
|
544
|
-
prompt.name for prompt in self._prompt_cache[server_name]
|
545
|
-
]
|
509
|
+
prompt_names = [prompt.name for prompt in self._prompt_cache[server_name]]
|
546
510
|
if local_prompt_name not in prompt_names:
|
547
|
-
logger.debug(
|
548
|
-
f"Prompt '{local_prompt_name}' not found in cache for server '{server_name}'"
|
549
|
-
)
|
511
|
+
logger.debug(f"Prompt '{local_prompt_name}' not found in cache for server '{server_name}'")
|
550
512
|
return GetPromptResult(
|
551
513
|
description=f"Prompt '{local_prompt_name}' not found on server '{server_name}'",
|
552
514
|
messages=[],
|
@@ -568,9 +530,7 @@ class MCPAggregator(ContextDependent):
|
|
568
530
|
|
569
531
|
# Add namespaced name and source server to the result
|
570
532
|
if result and result.messages:
|
571
|
-
result.namespaced_name =
|
572
|
-
namespaced_name or f"{server_name}{SEP}{local_prompt_name}"
|
573
|
-
)
|
533
|
+
result.namespaced_name = namespaced_name or f"{server_name}{SEP}{local_prompt_name}"
|
574
534
|
|
575
535
|
# Store the arguments in the result for display purposes
|
576
536
|
if arguments:
|
@@ -590,18 +550,14 @@ class MCPAggregator(ContextDependent):
|
|
590
550
|
potential_servers.append(s_name)
|
591
551
|
|
592
552
|
if potential_servers:
|
593
|
-
logger.debug(
|
594
|
-
f"Found prompt '{local_prompt_name}' in cache for servers: {potential_servers}"
|
595
|
-
)
|
553
|
+
logger.debug(f"Found prompt '{local_prompt_name}' in cache for servers: {potential_servers}")
|
596
554
|
|
597
555
|
# Try each server from the cache
|
598
556
|
for s_name in potential_servers:
|
599
557
|
# Check if this server supports prompts
|
600
558
|
capabilities = await self.get_capabilities(s_name)
|
601
559
|
if not capabilities or not capabilities.prompts:
|
602
|
-
logger.debug(
|
603
|
-
f"Server '{s_name}' does not support prompts, skipping"
|
604
|
-
)
|
560
|
+
logger.debug(f"Server '{s_name}' does not support prompts, skipping")
|
605
561
|
continue
|
606
562
|
|
607
563
|
try:
|
@@ -620,9 +576,7 @@ class MCPAggregator(ContextDependent):
|
|
620
576
|
|
621
577
|
# If we got a successful result with messages, return it
|
622
578
|
if result and result.messages:
|
623
|
-
logger.debug(
|
624
|
-
f"Successfully retrieved prompt '{local_prompt_name}' from server '{s_name}'"
|
625
|
-
)
|
579
|
+
logger.debug(f"Successfully retrieved prompt '{local_prompt_name}' from server '{s_name}'")
|
626
580
|
# Add namespaced name using the actual server where found
|
627
581
|
result.namespaced_name = f"{s_name}{SEP}{local_prompt_name}"
|
628
582
|
|
@@ -635,9 +589,7 @@ class MCPAggregator(ContextDependent):
|
|
635
589
|
except Exception as e:
|
636
590
|
logger.debug(f"Error retrieving prompt from server '{s_name}': {e}")
|
637
591
|
else:
|
638
|
-
logger.debug(
|
639
|
-
f"Prompt '{local_prompt_name}' not found in any server's cache"
|
640
|
-
)
|
592
|
+
logger.debug(f"Prompt '{local_prompt_name}' not found in any server's cache")
|
641
593
|
|
642
594
|
# If not in cache, perform a full search as fallback (cache might be outdated)
|
643
595
|
# First identify servers that support prompts
|
@@ -647,9 +599,7 @@ class MCPAggregator(ContextDependent):
|
|
647
599
|
if capabilities and capabilities.prompts:
|
648
600
|
supported_servers.append(s_name)
|
649
601
|
else:
|
650
|
-
logger.debug(
|
651
|
-
f"Server '{s_name}' does not support prompts, skipping from fallback search"
|
652
|
-
)
|
602
|
+
logger.debug(f"Server '{s_name}' does not support prompts, skipping from fallback search")
|
653
603
|
|
654
604
|
# Try all supported servers in order
|
655
605
|
for s_name in supported_servers:
|
@@ -670,9 +620,7 @@ class MCPAggregator(ContextDependent):
|
|
670
620
|
|
671
621
|
# If we got a successful result with messages, return it
|
672
622
|
if result and result.messages:
|
673
|
-
logger.debug(
|
674
|
-
f"Found prompt '{local_prompt_name}' on server '{s_name}' (not in cache)"
|
675
|
-
)
|
623
|
+
logger.debug(f"Found prompt '{local_prompt_name}' on server '{s_name}' (not in cache)")
|
676
624
|
# Add namespaced name using the actual server where found
|
677
625
|
result.namespaced_name = f"{s_name}{SEP}{local_prompt_name}"
|
678
626
|
|
@@ -691,21 +639,15 @@ class MCPAggregator(ContextDependent):
|
|
691
639
|
)
|
692
640
|
|
693
641
|
prompts = getattr(prompt_list_result, "prompts", [])
|
694
|
-
matching_prompts = [
|
695
|
-
p for p in prompts if p.name == local_prompt_name
|
696
|
-
]
|
642
|
+
matching_prompts = [p for p in prompts if p.name == local_prompt_name]
|
697
643
|
if matching_prompts:
|
698
644
|
async with self._prompt_cache_lock:
|
699
645
|
if s_name not in self._prompt_cache:
|
700
646
|
self._prompt_cache[s_name] = []
|
701
647
|
# Add if not already in the cache
|
702
|
-
prompt_names_in_cache = [
|
703
|
-
p.name for p in self._prompt_cache[s_name]
|
704
|
-
]
|
648
|
+
prompt_names_in_cache = [p.name for p in self._prompt_cache[s_name]]
|
705
649
|
if local_prompt_name not in prompt_names_in_cache:
|
706
|
-
self._prompt_cache[s_name].append(
|
707
|
-
matching_prompts[0]
|
708
|
-
)
|
650
|
+
self._prompt_cache[s_name].append(matching_prompts[0])
|
709
651
|
except Exception:
|
710
652
|
# Ignore errors when updating cache
|
711
653
|
pass
|
@@ -754,9 +696,7 @@ class MCPAggregator(ContextDependent):
|
|
754
696
|
async with self._prompt_cache_lock:
|
755
697
|
if server_name in self._prompt_cache:
|
756
698
|
results[server_name] = self._prompt_cache[server_name]
|
757
|
-
logger.debug(
|
758
|
-
f"Returning cached prompts for server '{server_name}'"
|
759
|
-
)
|
699
|
+
logger.debug(f"Returning cached prompts for server '{server_name}'")
|
760
700
|
return results
|
761
701
|
|
762
702
|
# Check if server supports prompts
|
@@ -790,9 +730,7 @@ class MCPAggregator(ContextDependent):
|
|
790
730
|
if capabilities and capabilities.prompts:
|
791
731
|
supported_servers.append(s_name)
|
792
732
|
else:
|
793
|
-
logger.debug(
|
794
|
-
f"Server '{s_name}' does not support prompts, skipping"
|
795
|
-
)
|
733
|
+
logger.debug(f"Server '{s_name}' does not support prompts, skipping")
|
796
734
|
# Add empty list to results for this server
|
797
735
|
results[s_name] = []
|
798
736
|
|
@@ -828,9 +766,7 @@ class MCPAggregator(ContextDependent):
|
|
828
766
|
logger.debug(f"Available prompts across servers: {results}")
|
829
767
|
return results
|
830
768
|
|
831
|
-
async def get_resource(
|
832
|
-
self, server_name: str, resource_uri: str
|
833
|
-
) -> ReadResourceResult:
|
769
|
+
async def get_resource(self, server_name: str, resource_uri: str) -> ReadResourceResult:
|
834
770
|
"""
|
835
771
|
Get a resource directly from an MCP server by URI.
|
836
772
|
|
@@ -881,7 +817,7 @@ class MCPCompoundServer(Server):
|
|
881
817
|
A compound server (server-of-servers) that aggregates multiple MCP servers and is itself an MCP server
|
882
818
|
"""
|
883
819
|
|
884
|
-
def __init__(self, server_names: List[str], name: str = "MCPCompoundServer"):
|
820
|
+
def __init__(self, server_names: List[str], name: str = "MCPCompoundServer") -> None:
|
885
821
|
super().__init__(name)
|
886
822
|
self.aggregator = MCPAggregator(server_names)
|
887
823
|
|
@@ -896,9 +832,7 @@ class MCPCompoundServer(Server):
|
|
896
832
|
tools_result = await self.aggregator.list_tools()
|
897
833
|
return tools_result.tools
|
898
834
|
|
899
|
-
async def _call_tool(
|
900
|
-
self, name: str, arguments: dict | None = None
|
901
|
-
) -> CallToolResult:
|
835
|
+
async def _call_tool(self, name: str, arguments: dict | None = None) -> CallToolResult:
|
902
836
|
"""Call a specific tool from the aggregated servers."""
|
903
837
|
try:
|
904
838
|
result = await self.aggregator.call_tool(name=name, arguments=arguments)
|
@@ -909,9 +843,7 @@ class MCPCompoundServer(Server):
|
|
909
843
|
content=[TextContent(type="text", text=f"Error calling tool: {e}")],
|
910
844
|
)
|
911
845
|
|
912
|
-
async def _get_prompt(
|
913
|
-
self, name: str = None, arguments: dict[str, str] = None
|
914
|
-
) -> GetPromptResult:
|
846
|
+
async def _get_prompt(self, name: str = None, arguments: dict[str, str] = None) -> GetPromptResult:
|
915
847
|
"""
|
916
848
|
Get a prompt from the aggregated servers.
|
917
849
|
|
@@ -920,14 +852,10 @@ class MCPCompoundServer(Server):
|
|
920
852
|
arguments: Optional dictionary of string arguments for prompt templating
|
921
853
|
"""
|
922
854
|
try:
|
923
|
-
result = await self.aggregator.get_prompt(
|
924
|
-
prompt_name=name, arguments=arguments
|
925
|
-
)
|
855
|
+
result = await self.aggregator.get_prompt(prompt_name=name, arguments=arguments)
|
926
856
|
return result
|
927
857
|
except Exception as e:
|
928
|
-
return GetPromptResult(
|
929
|
-
description=f"Error getting prompt: {e}", messages=[]
|
930
|
-
)
|
858
|
+
return GetPromptResult(description=f"Error getting prompt: {e}", messages=[])
|
931
859
|
|
932
860
|
async def _list_prompts(self, server_name: str = None) -> Dict[str, List[str]]:
|
933
861
|
"""List available prompts from the aggregated servers."""
|
@@ -2,38 +2,37 @@
|
|
2
2
|
Manages the lifecycle of multiple MCP server connections.
|
3
3
|
"""
|
4
4
|
|
5
|
-
from datetime import timedelta
|
6
5
|
import asyncio
|
6
|
+
from datetime import timedelta
|
7
7
|
from typing import (
|
8
|
+
TYPE_CHECKING,
|
8
9
|
AsyncGenerator,
|
9
10
|
Callable,
|
10
11
|
Dict,
|
11
12
|
Optional,
|
12
|
-
TYPE_CHECKING,
|
13
13
|
)
|
14
14
|
|
15
|
-
from anyio import Event,
|
15
|
+
from anyio import Event, Lock, create_task_group
|
16
16
|
from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
|
17
|
-
|
18
17
|
from mcp import ClientSession
|
18
|
+
from mcp.client.sse import sse_client
|
19
19
|
from mcp.client.stdio import (
|
20
20
|
StdioServerParameters,
|
21
21
|
get_default_environment,
|
22
22
|
)
|
23
|
-
from mcp.client.sse import sse_client
|
24
23
|
from mcp.types import JSONRPCMessage, ServerCapabilities
|
25
24
|
|
26
25
|
from mcp_agent.config import MCPServerSettings
|
26
|
+
from mcp_agent.context_dependent import ContextDependent
|
27
27
|
from mcp_agent.core.exceptions import ServerInitializationError
|
28
28
|
from mcp_agent.event_progress import ProgressAction
|
29
29
|
from mcp_agent.logging.logger import get_logger
|
30
30
|
from mcp_agent.mcp.mcp_agent_client_session import MCPAgentClientSession
|
31
31
|
from mcp_agent.mcp.stdio import stdio_client_with_rich_stderr
|
32
|
-
from mcp_agent.context_dependent import ContextDependent
|
33
32
|
|
34
33
|
if TYPE_CHECKING:
|
35
|
-
from mcp_agent.mcp_server_registry import InitHookCallable, ServerRegistry
|
36
34
|
from mcp_agent.context import Context
|
35
|
+
from mcp_agent.mcp_server_registry import InitHookCallable, ServerRegistry
|
37
36
|
|
38
37
|
logger = get_logger(__name__)
|
39
38
|
|
@@ -64,7 +63,7 @@ class ServerConnection:
|
|
64
63
|
ClientSession,
|
65
64
|
],
|
66
65
|
init_hook: Optional["InitHookCallable"] = None,
|
67
|
-
):
|
66
|
+
) -> None:
|
68
67
|
self.server_name = server_name
|
69
68
|
self.server_config = server_config
|
70
69
|
self.session: ClientSession | None = None
|
@@ -134,11 +133,7 @@ class ServerConnection:
|
|
134
133
|
Create a new session instance for this server connection.
|
135
134
|
"""
|
136
135
|
|
137
|
-
read_timeout = (
|
138
|
-
timedelta(seconds=self.server_config.read_timeout_seconds)
|
139
|
-
if self.server_config.read_timeout_seconds
|
140
|
-
else None
|
141
|
-
)
|
136
|
+
read_timeout = timedelta(seconds=self.server_config.read_timeout_seconds) if self.server_config.read_timeout_seconds else None
|
142
137
|
|
143
138
|
session = self._client_session_factory(read_stream, send_stream, read_timeout)
|
144
139
|
|
@@ -192,9 +187,7 @@ class MCPConnectionManager(ContextDependent):
|
|
192
187
|
Integrates with the application context system for proper resource management.
|
193
188
|
"""
|
194
189
|
|
195
|
-
def __init__(
|
196
|
-
self, server_registry: "ServerRegistry", context: Optional["Context"] = None
|
197
|
-
):
|
190
|
+
def __init__(self, server_registry: "ServerRegistry", context: Optional["Context"] = None) -> None:
|
198
191
|
super().__init__(context=context)
|
199
192
|
self.server_registry = server_registry
|
200
193
|
self.running_servers: Dict[str, ServerConnection] = {}
|
@@ -255,9 +248,7 @@ class MCPConnectionManager(ContextDependent):
|
|
255
248
|
if not config:
|
256
249
|
raise ValueError(f"Server '{server_name}' not found in registry.")
|
257
250
|
|
258
|
-
logger.debug(
|
259
|
-
f"{server_name}: Found server configuration=", data=config.model_dump()
|
260
|
-
)
|
251
|
+
logger.debug(f"{server_name}: Found server configuration=", data=config.model_dump())
|
261
252
|
|
262
253
|
def transport_context_factory():
|
263
254
|
if config.transport == "stdio":
|
@@ -309,9 +300,7 @@ class MCPConnectionManager(ContextDependent):
|
|
309
300
|
|
310
301
|
# If server exists but isn't healthy, remove it so we can create a new one
|
311
302
|
if server_conn:
|
312
|
-
logger.info(
|
313
|
-
f"{server_name}: Server exists but is unhealthy, recreating..."
|
314
|
-
)
|
303
|
+
logger.info(f"{server_name}: Server exists but is unhealthy, recreating...")
|
315
304
|
self.running_servers.pop(server_name)
|
316
305
|
server_conn.request_shutdown()
|
317
306
|
|
@@ -328,19 +317,13 @@ class MCPConnectionManager(ContextDependent):
|
|
328
317
|
# Check if the server is healthy after initialization
|
329
318
|
if not server_conn.is_healthy():
|
330
319
|
error_msg = server_conn._error_message or "Unknown error"
|
331
|
-
raise ServerInitializationError(
|
332
|
-
f"MCP Server: '{server_name}': Failed to initialize with error: '{error_msg}'. Check fastagent.config.yaml"
|
333
|
-
)
|
320
|
+
raise ServerInitializationError(f"MCP Server: '{server_name}': Failed to initialize with error: '{error_msg}'. Check fastagent.config.yaml")
|
334
321
|
|
335
322
|
return server_conn
|
336
323
|
|
337
|
-
async def get_server_capabilities(
|
338
|
-
self, server_name: str
|
339
|
-
) -> ServerCapabilities | None:
|
324
|
+
async def get_server_capabilities(self, server_name: str) -> ServerCapabilities | None:
|
340
325
|
"""Get the capabilities of a specific server."""
|
341
|
-
server_conn = await self.get_server(
|
342
|
-
server_name, client_session_factory=MCPAgentClientSession
|
343
|
-
)
|
326
|
+
server_conn = await self.get_server(server_name, client_session_factory=MCPAgentClientSession)
|
344
327
|
return server_conn.server_capabilities if server_conn else None
|
345
328
|
|
346
329
|
async def disconnect_server(self, server_name: str) -> None:
|
@@ -353,13 +336,9 @@ class MCPConnectionManager(ContextDependent):
|
|
353
336
|
server_conn = self.running_servers.pop(server_name, None)
|
354
337
|
if server_conn:
|
355
338
|
server_conn.request_shutdown()
|
356
|
-
logger.info(
|
357
|
-
f"{server_name}: Shutdown signal sent (lifecycle task will exit)."
|
358
|
-
)
|
339
|
+
logger.info(f"{server_name}: Shutdown signal sent (lifecycle task will exit).")
|
359
340
|
else:
|
360
|
-
logger.info(
|
361
|
-
f"{server_name}: No persistent connection found. Skipping server shutdown"
|
362
|
-
)
|
341
|
+
logger.info(f"{server_name}: No persistent connection found. Skipping server shutdown")
|
363
342
|
|
364
343
|
async def disconnect_all(self) -> None:
|
365
344
|
"""Disconnect all servers that are running under this connection manager."""
|
@@ -1,14 +1,14 @@
|
|
1
1
|
from typing import List, Union
|
2
|
-
from pydantic import BaseModel
|
3
2
|
|
4
3
|
from mcp.types import (
|
5
|
-
PromptMessage,
|
6
|
-
TextContent,
|
7
|
-
ImageContent,
|
8
4
|
EmbeddedResource,
|
9
|
-
Role,
|
10
5
|
GetPromptResult,
|
6
|
+
ImageContent,
|
7
|
+
PromptMessage,
|
8
|
+
Role,
|
9
|
+
TextContent,
|
11
10
|
)
|
11
|
+
from pydantic import BaseModel
|
12
12
|
|
13
13
|
|
14
14
|
class PromptMessageMultipart(BaseModel):
|
@@ -21,9 +21,7 @@ class PromptMessageMultipart(BaseModel):
|
|
21
21
|
content: List[Union[TextContent, ImageContent, EmbeddedResource]]
|
22
22
|
|
23
23
|
@classmethod
|
24
|
-
def
|
25
|
-
cls, messages: List[PromptMessage]
|
26
|
-
) -> List["PromptMessageMultipart"]:
|
24
|
+
def to_multipart(cls, messages: List[PromptMessage]) -> List["PromptMessageMultipart"]:
|
27
25
|
"""Convert a sequence of PromptMessages into PromptMessageMultipart objects."""
|
28
26
|
if not messages:
|
29
27
|
return []
|
@@ -41,7 +39,8 @@ class PromptMessageMultipart(BaseModel):
|
|
41
39
|
current_group = cls(role=msg.role, content=[msg.content])
|
42
40
|
else:
|
43
41
|
# Same role, add to current message
|
44
|
-
current_group
|
42
|
+
if current_group is not None:
|
43
|
+
current_group.content.append(msg.content)
|
45
44
|
|
46
45
|
# Add the last group
|
47
46
|
if current_group is not None:
|
@@ -49,16 +48,11 @@ class PromptMessageMultipart(BaseModel):
|
|
49
48
|
|
50
49
|
return result
|
51
50
|
|
52
|
-
def
|
51
|
+
def from_multipart(self) -> List[PromptMessage]:
|
53
52
|
"""Convert this PromptMessageMultipart to a sequence of standard PromptMessages."""
|
54
|
-
return [
|
55
|
-
PromptMessage(role=self.role, content=content_part)
|
56
|
-
for content_part in self.content
|
57
|
-
]
|
53
|
+
return [PromptMessage(role=self.role, content=content_part) for content_part in self.content]
|
58
54
|
|
59
55
|
@classmethod
|
60
|
-
def parse_get_prompt_result(
|
61
|
-
cls, result: GetPromptResult
|
62
|
-
) -> List["PromptMessageMultipart"]:
|
56
|
+
def parse_get_prompt_result(cls, result: GetPromptResult) -> List["PromptMessageMultipart"]:
|
63
57
|
"""Parse a GetPromptResult into PromptMessageMultipart objects."""
|
64
|
-
return cls.
|
58
|
+
return cls.to_multipart(result.messages)
|