fast-agent-mcp 0.1.9__py3-none-any.whl → 0.1.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {fast_agent_mcp-0.1.9.dist-info → fast_agent_mcp-0.1.10.dist-info}/METADATA +4 -3
- {fast_agent_mcp-0.1.9.dist-info → fast_agent_mcp-0.1.10.dist-info}/RECORD +20 -20
- mcp_agent/agents/agent.py +112 -0
- mcp_agent/context.py +0 -2
- mcp_agent/core/factory.py +14 -13
- mcp_agent/core/fastagent.py +2 -2
- mcp_agent/core/proxies.py +38 -0
- mcp_agent/logging/listeners.py +3 -6
- mcp_agent/mcp/mcp_agent_client_session.py +21 -145
- mcp_agent/mcp/mcp_aggregator.py +50 -2
- mcp_agent/mcp/mcp_connection_manager.py +0 -1
- mcp_agent/mcp/prompts/prompt_server.py +12 -11
- mcp_agent/mcp/resource_utils.py +58 -38
- mcp_agent/mcp/stdio.py +23 -15
- mcp_agent/mcp_server_registry.py +5 -2
- mcp_agent/resources/examples/workflows/orchestrator.py +3 -3
- mcp_agent/workflows/llm/augmented_llm_passthrough.py +57 -2
- {fast_agent_mcp-0.1.9.dist-info → fast_agent_mcp-0.1.10.dist-info}/WHEEL +0 -0
- {fast_agent_mcp-0.1.9.dist-info → fast_agent_mcp-0.1.10.dist-info}/entry_points.txt +0 -0
- {fast_agent_mcp-0.1.9.dist-info → fast_agent_mcp-0.1.10.dist-info}/licenses/LICENSE +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: fast-agent-mcp
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.10
|
4
4
|
Summary: Define, Prompt and Test MCP enabled Agents and Workflows
|
5
5
|
Author-email: Shaun Smith <fastagent@llmindset.co.uk>, Sarmad Qadri <sarmad@lastmileai.dev>
|
6
6
|
License: Apache License
|
@@ -259,9 +259,10 @@ The simple declarative syntax lets you concentrate on composing your Prompts and
|
|
259
259
|
|
260
260
|
Evaluate how different models handle Agent and MCP Server calling tasks, then build multi-model workflows using the best provider for each task.
|
261
261
|
|
262
|
-
`fast-agent` is now multi-modal, supporting Images and PDFs for both Anthropic and OpenAI endpoints (for supported models), via Prompts and MCP Tool Call results.
|
262
|
+
`fast-agent` is now multi-modal, supporting Images and PDFs for both Anthropic and OpenAI endpoints (for supported models), via Prompts, Resources and MCP Tool Call results.
|
263
263
|
|
264
|
-
> [!TIP]
|
264
|
+
> [!TIP]
|
265
|
+
> `fast-agent` is now MCP Native! Coming Soon - Full Documentation Site.
|
265
266
|
|
266
267
|
### Agent Application Development
|
267
268
|
|
@@ -2,13 +2,13 @@ mcp_agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
2
|
mcp_agent/app.py,sha256=0_C1xmNZlk9qZoewnNI_mC7sSfO9oJgkOyiKkQ62MHU,10606
|
3
3
|
mcp_agent/config.py,sha256=OpPTsk9gNm2IA1laUomAMkGA-pAlp5uILQpEPBjavQs,10644
|
4
4
|
mcp_agent/console.py,sha256=Gjf2QLFumwG1Lav__c07X_kZxxEUSkzV-1_-YbAwcwo,813
|
5
|
-
mcp_agent/context.py,sha256=
|
5
|
+
mcp_agent/context.py,sha256=m1S5M9a2Kdxy5rEGG6Uwwmi19bDEpU6u-e5ZgPmVXfY,8031
|
6
6
|
mcp_agent/context_dependent.py,sha256=TGqRLzYCOnsWGoaD1HtrliYtWo8MeaWCQk6ePUmyYCw,1446
|
7
7
|
mcp_agent/event_progress.py,sha256=25iz0yyg-O4glMmtijcYpDdUmtUIKsCmR_8A52GgeC4,2716
|
8
|
-
mcp_agent/mcp_server_registry.py,sha256=
|
8
|
+
mcp_agent/mcp_server_registry.py,sha256=eQbl0usicnsNE03haxc6C_FHl_0goPAZdcb082cDIQk,9992
|
9
9
|
mcp_agent/progress_display.py,sha256=GeJU9VUt6qKsFVymG688hCMVCsAygG9ifiiEb5IcbN4,361
|
10
10
|
mcp_agent/agents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
11
|
-
mcp_agent/agents/agent.py,sha256=
|
11
|
+
mcp_agent/agents/agent.py,sha256=qI3njT8SPTLKzCwBfdlWNMVoSEwLGNjuq6owPfhvIas,17444
|
12
12
|
mcp_agent/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
13
13
|
mcp_agent/cli/__main__.py,sha256=AVZ7tQFhU_sDOGuUGJq8ujgKtcxsYJBJwHbVaaiRDlI,166
|
14
14
|
mcp_agent/cli/main.py,sha256=DE6EZzspfzHwPK59x8vL4AIDHRQkVQ1Ja70XRGU1IQs,2753
|
@@ -24,11 +24,11 @@ mcp_agent/core/decorators.py,sha256=dkAah1eIuYsEfQISDryG0u2GrzNnsO_jyN7lhpQfNlM,
|
|
24
24
|
mcp_agent/core/enhanced_prompt.py,sha256=bykUEnnc1CEWODJwXvl4VGfCtrJPtVXU0D4mUglJK7A,18827
|
25
25
|
mcp_agent/core/error_handling.py,sha256=D3HMW5odrbJvaKqcpCGj6eDXrbFcuqYaCZz7fyYiTu4,623
|
26
26
|
mcp_agent/core/exceptions.py,sha256=a2-JGRwFFRoQEPuAq0JC5PhAJ5TO3xVJfdS4-VN29cw,2225
|
27
|
-
mcp_agent/core/factory.py,sha256=
|
28
|
-
mcp_agent/core/fastagent.py,sha256=
|
27
|
+
mcp_agent/core/factory.py,sha256=MhlYS0G0IyFy_j46HVJdjEznJzfCFjx_NRhUPcbQIJI,19081
|
28
|
+
mcp_agent/core/fastagent.py,sha256=jJmO0DryFGwSkse_3q5Ll-5XONDvj7k_Oeb-ETBKFkA,19620
|
29
29
|
mcp_agent/core/mcp_content.py,sha256=rXT2C5gP9qgC-TI5F362ZLJi_erzcEOnlP9D2ZKK0i0,6860
|
30
30
|
mcp_agent/core/prompt.py,sha256=R-X3kptu3ehV_SQeiGnP6F9HMN-92I8e73gnkQ1tDVs,4317
|
31
|
-
mcp_agent/core/proxies.py,sha256=
|
31
|
+
mcp_agent/core/proxies.py,sha256=a5tNv-EVcv67XNAkbzaybQVbRgkNEfhIkcveS1LBp2s,10242
|
32
32
|
mcp_agent/core/types.py,sha256=Zhi9iW7uiOfdpSt9NC0FCtGRFtJPg4mpZPK2aYi7a7M,817
|
33
33
|
mcp_agent/core/validation.py,sha256=x0fsx5eLTawASFm9MDtEukwGOj_RTdY1OW064UihMR8,8309
|
34
34
|
mcp_agent/eval/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -45,7 +45,7 @@ mcp_agent/human_input/types.py,sha256=ZvuDHvI0-wO2tFoS0bzrv8U5B83zYdxAG7g9G9jCxu
|
|
45
45
|
mcp_agent/logging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
46
46
|
mcp_agent/logging/events.py,sha256=qfYJnrqgXdujV-nl-iOwBEBh6HMraowBI4zeAWPPU4A,3461
|
47
47
|
mcp_agent/logging/json_serializer.py,sha256=pa_mf0i0YKpLsGq3THuITFUdAbmae-dv1OPOLbcS0to,5782
|
48
|
-
mcp_agent/logging/listeners.py,sha256=
|
48
|
+
mcp_agent/logging/listeners.py,sha256=1DOc0CvAE6pFxOljfZqs2TGgF50sZZGMDt4Gm_PAjWo,6551
|
49
49
|
mcp_agent/logging/logger.py,sha256=Tr009BnfGUKuZcdinnSin0Z_zIsfDNGdcnamw2rDHRQ,10604
|
50
50
|
mcp_agent/logging/rich_progress.py,sha256=IEVFdFGA0nwg6pSt9Ydni5LCNYZZPKYMe-6DCi9pO4Y,4851
|
51
51
|
mcp_agent/logging/tracing.py,sha256=jQivxKYl870oXakmyUk7TXuTQSvsIzpHwZlSQfy4b0c,5203
|
@@ -53,18 +53,18 @@ mcp_agent/logging/transport.py,sha256=MFgiCQ-YFP0tSMhDMpZCj585vflWcMydM4oyCFduVf
|
|
53
53
|
mcp_agent/mcp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
54
54
|
mcp_agent/mcp/gen_client.py,sha256=u0HwdJiw9YCerS5JC7JDuGgBh9oTcLd7vv9vPjwibXc,3025
|
55
55
|
mcp_agent/mcp/mcp_activity.py,sha256=CajXCFWZ2cKEX9s4-HfNVAj471ePTVs4NOkvmIh65tE,592
|
56
|
-
mcp_agent/mcp/mcp_agent_client_session.py,sha256=
|
56
|
+
mcp_agent/mcp/mcp_agent_client_session.py,sha256=lfz38wzIoMfZyH3dAgclHohOVX0tR7Y2FCE2t7CVsPw,3956
|
57
57
|
mcp_agent/mcp/mcp_agent_server.py,sha256=xP09HZTeguJi4Fq0p3fjLBP55uSYe5AdqM90xCgn9Ho,1639
|
58
|
-
mcp_agent/mcp/mcp_aggregator.py,sha256=
|
59
|
-
mcp_agent/mcp/mcp_connection_manager.py,sha256=
|
58
|
+
mcp_agent/mcp/mcp_aggregator.py,sha256=NuFslY5-0as2VAfcg6t-k3sgpX-mh3AWttuS9KHL4n4,37684
|
59
|
+
mcp_agent/mcp/mcp_connection_manager.py,sha256=PdLia-rxbhUdAdEnW7TQbkf1qeI9RR3xhQw1j11Bi6o,13612
|
60
60
|
mcp_agent/mcp/mime_utils.py,sha256=difepNR_gpb4MpMLkBRAoyhDk-AjXUHTiqKvT_VwS1o,1805
|
61
61
|
mcp_agent/mcp/prompt_message_multipart.py,sha256=U7IN0JStmy26akTXcqE4x90oWzm8xs1qa0VeKIyPKmE,1962
|
62
62
|
mcp_agent/mcp/prompt_serialization.py,sha256=StcXV7V4fqqtCmOCXGCyYXx5vpwNhL2xr3RG_awwdqI,16056
|
63
|
-
mcp_agent/mcp/resource_utils.py,sha256=
|
64
|
-
mcp_agent/mcp/stdio.py,sha256=
|
63
|
+
mcp_agent/mcp/resource_utils.py,sha256=G9IBWyasxKKcbq3T_fSpM6mHE8PjBargEdfQnBPrkZY,6650
|
64
|
+
mcp_agent/mcp/stdio.py,sha256=QJcxEw2CXJrhR7PHyhuwUekzaXoDng_cNjai-rdZNg0,4479
|
65
65
|
mcp_agent/mcp/prompts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
66
66
|
mcp_agent/mcp/prompts/__main__.py,sha256=gr1Tdz9fcK0EXjEuZg_BOnKUmvhYq5AH2lFZicVyNb0,237
|
67
|
-
mcp_agent/mcp/prompts/prompt_server.py,sha256=
|
67
|
+
mcp_agent/mcp/prompts/prompt_server.py,sha256=6K4FeKNW_JApWUNB055gl8UnWyC1mvtl_kPEvgUnPjk,17348
|
68
68
|
mcp_agent/mcp/prompts/prompt_template.py,sha256=NDnSVA0W1wayZHCVx27lfuVPoxlAz-FfBwiCEQG9Ixk,16324
|
69
69
|
mcp_agent/mcp_server/__init__.py,sha256=SEWyU7aSFzdSk6iTYnrQu-llji5_P5dp3TaztCt_rzo,154
|
70
70
|
mcp_agent/mcp_server/agent_server.py,sha256=SUBggPyrzWtBRUC5xIMpCxu6ei-6Vah3q9Si12BQ-zY,4444
|
@@ -93,7 +93,7 @@ mcp_agent/resources/examples/workflows/chaining.py,sha256=1G_0XBcFkSJCOXb6N_iXWl
|
|
93
93
|
mcp_agent/resources/examples/workflows/evaluator.py,sha256=3XmW1mjImlaWb0c5FWHYS9yP8nVGTbEdJySAoWXwrDg,3109
|
94
94
|
mcp_agent/resources/examples/workflows/fastagent.config.yaml,sha256=k2AiapOcK42uqG2nWDVvnSLqN4okQIQZK0FTbZufBpY,809
|
95
95
|
mcp_agent/resources/examples/workflows/human_input.py,sha256=c8cBdLEPbaMXddFwsfN3Z7RFs5PZXsdrjANfvq1VTPM,605
|
96
|
-
mcp_agent/resources/examples/workflows/orchestrator.py,sha256=
|
96
|
+
mcp_agent/resources/examples/workflows/orchestrator.py,sha256=2nkRkpMgi4V-M6fwcpipQUImf0nZoGf4zfVllfojsz8,2596
|
97
97
|
mcp_agent/resources/examples/workflows/parallel.py,sha256=pLbQrtXfbdYqMVddxtg5dZnBnm5Wo2mXlIa1Vf2F1FQ,3096
|
98
98
|
mcp_agent/resources/examples/workflows/router.py,sha256=J1yTAimFY53jcyd21cq1XAZvtOxnNsmtSjSp13M5EgE,1668
|
99
99
|
mcp_agent/resources/examples/workflows/sse.py,sha256=tdmmh7p87YNfcF_fCq3evAmc1Nek0oY0YOqLRKBLqKg,570
|
@@ -120,7 +120,7 @@ mcp_agent/workflows/llm/anthropic_utils.py,sha256=OFmsVmDQ22880duDWQrEeQEB47xtvu
|
|
120
120
|
mcp_agent/workflows/llm/augmented_llm.py,sha256=9cWy-4yNG13w4oQgXmisgWTcm6aoJIRCYTX85Bkf-MI,30554
|
121
121
|
mcp_agent/workflows/llm/augmented_llm_anthropic.py,sha256=opV4PTai2eoYUzJS0gCPGEy4pe-lT2Eo1Sao6Y_EIiY,20140
|
122
122
|
mcp_agent/workflows/llm/augmented_llm_openai.py,sha256=OUSmvY2m6HU1JOK5nEzKDHpHReT0ffjoHDFHk6aYhoc,21002
|
123
|
-
mcp_agent/workflows/llm/augmented_llm_passthrough.py,sha256=
|
123
|
+
mcp_agent/workflows/llm/augmented_llm_passthrough.py,sha256=IoMNOKK9l46bp4OxfXrB4uK7_4X7ufjuFyXSQCH4YnM,6219
|
124
124
|
mcp_agent/workflows/llm/augmented_llm_playback.py,sha256=5ypv3owJU6pscktqg9tkLQVKNgaA50e8OWmC1hAhrtE,4328
|
125
125
|
mcp_agent/workflows/llm/llm_selector.py,sha256=G7pIybuBDwtmyxUDov_QrNYH2FoI0qFRu2JfoxWUF5Y,11045
|
126
126
|
mcp_agent/workflows/llm/model_factory.py,sha256=UHePE5Ow03kpE44kjYtFGEhVFSYp0AY2yGri58yCBKU,7688
|
@@ -151,8 +151,8 @@ mcp_agent/workflows/swarm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJW
|
|
151
151
|
mcp_agent/workflows/swarm/swarm.py,sha256=-lAIeSWDqbGHGRPTvjiP9nIKWvxxy9DAojl9yQzO1Pw,11050
|
152
152
|
mcp_agent/workflows/swarm/swarm_anthropic.py,sha256=pW8zFx5baUWGd5Vw3nIDF2oVOOGNorij4qvGJKdYPcs,1624
|
153
153
|
mcp_agent/workflows/swarm/swarm_openai.py,sha256=wfteywvAGkT5bLmIxX_StHJq8144whYmCRnJASAjOes,1596
|
154
|
-
fast_agent_mcp-0.1.
|
155
|
-
fast_agent_mcp-0.1.
|
156
|
-
fast_agent_mcp-0.1.
|
157
|
-
fast_agent_mcp-0.1.
|
158
|
-
fast_agent_mcp-0.1.
|
154
|
+
fast_agent_mcp-0.1.10.dist-info/METADATA,sha256=Kum2eRyw2tDXTb1rG7JIf-2IrkOC-xWOnFyQpulgXq4,29760
|
155
|
+
fast_agent_mcp-0.1.10.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
156
|
+
fast_agent_mcp-0.1.10.dist-info/entry_points.txt,sha256=qPM7vwtN1_KmP3dXehxgiCxUBHtqP7yfenZigztvY-w,226
|
157
|
+
fast_agent_mcp-0.1.10.dist-info/licenses/LICENSE,sha256=cN3FxDURL9XuzE5mhK9L2paZo82LTfjwCYVT7e3j0e4,10939
|
158
|
+
fast_agent_mcp-0.1.10.dist-info/RECORD,,
|
mcp_agent/agents/agent.py
CHANGED
@@ -9,7 +9,10 @@ from mcp.types import (
|
|
9
9
|
ListToolsResult,
|
10
10
|
TextContent,
|
11
11
|
Tool,
|
12
|
+
EmbeddedResource,
|
13
|
+
ReadResourceResult,
|
12
14
|
)
|
15
|
+
from mcp_agent.mcp.prompt_message_multipart import PromptMessageMultipart
|
13
16
|
|
14
17
|
from mcp_agent.core.exceptions import PromptExitError
|
15
18
|
from mcp_agent.mcp.mcp_aggregator import MCPAggregator
|
@@ -320,6 +323,11 @@ class Agent(MCPAggregator):
|
|
320
323
|
],
|
321
324
|
)
|
322
325
|
|
326
|
+
async def read_resource(
|
327
|
+
self, server_name: str, resource_name: str
|
328
|
+
) -> ReadResourceResult:
|
329
|
+
return None
|
330
|
+
|
323
331
|
async def apply_prompt(
|
324
332
|
self, prompt_name: str, arguments: dict[str, str] = None
|
325
333
|
) -> str:
|
@@ -359,3 +367,107 @@ class Agent(MCPAggregator):
|
|
359
367
|
# The LLM will automatically generate a response if needed
|
360
368
|
result = await self._llm.apply_prompt_template(prompt_result, display_name)
|
361
369
|
return result
|
370
|
+
|
371
|
+
async def get_resource(self, server_name: str, resource_name: str):
|
372
|
+
"""
|
373
|
+
Get a resource directly from an MCP server by name.
|
374
|
+
|
375
|
+
Args:
|
376
|
+
server_name: Name of the MCP server to retrieve the resource from
|
377
|
+
resource_name: Name of the resource to retrieve
|
378
|
+
|
379
|
+
Returns:
|
380
|
+
The resource object from the MCP server
|
381
|
+
|
382
|
+
Raises:
|
383
|
+
ValueError: If the server doesn't exist or the resource couldn't be found
|
384
|
+
"""
|
385
|
+
if not self.initialized:
|
386
|
+
await self.initialize()
|
387
|
+
|
388
|
+
# Get the specified server connection
|
389
|
+
server = self.get_server(server_name)
|
390
|
+
if not server:
|
391
|
+
raise ValueError(f"Server '{server_name}' not found or not connected")
|
392
|
+
|
393
|
+
# Request the resource directly from the server
|
394
|
+
try:
|
395
|
+
resource_result = await server.get_resource(resource_name)
|
396
|
+
return resource_result
|
397
|
+
except Exception as e:
|
398
|
+
self.logger.error(
|
399
|
+
f"Error retrieving resource '{resource_name}' from server '{server_name}': {str(e)}"
|
400
|
+
)
|
401
|
+
raise ValueError(
|
402
|
+
f"Failed to retrieve resource '{resource_name}' from server '{server_name}': {str(e)}"
|
403
|
+
)
|
404
|
+
|
405
|
+
async def get_embedded_resources(
|
406
|
+
self, server_name: str, resource_name: str
|
407
|
+
) -> List[EmbeddedResource]:
|
408
|
+
"""
|
409
|
+
Get a resource from an MCP server and return it as a list of embedded resources ready for use in prompts.
|
410
|
+
|
411
|
+
Args:
|
412
|
+
server_name: Name of the MCP server to retrieve the resource from
|
413
|
+
resource_name: Name or URI of the resource to retrieve
|
414
|
+
|
415
|
+
Returns:
|
416
|
+
List of EmbeddedResource objects ready to use in a PromptMessageMultipart
|
417
|
+
|
418
|
+
Raises:
|
419
|
+
ValueError: If the server doesn't exist or the resource couldn't be found
|
420
|
+
"""
|
421
|
+
# Get the raw resource result
|
422
|
+
result: ReadResourceResult = await super().get_resource(
|
423
|
+
server_name, resource_name
|
424
|
+
)
|
425
|
+
|
426
|
+
# Convert each resource content to an EmbeddedResource
|
427
|
+
embedded_resources: List[EmbeddedResource] = []
|
428
|
+
for resource_content in result.contents:
|
429
|
+
embedded_resource = EmbeddedResource(
|
430
|
+
type="resource", resource=resource_content, annotations=None
|
431
|
+
)
|
432
|
+
embedded_resources.append(embedded_resource)
|
433
|
+
|
434
|
+
return embedded_resources
|
435
|
+
|
436
|
+
async def with_resource(
|
437
|
+
self,
|
438
|
+
prompt_content: Union[str, PromptMessageMultipart],
|
439
|
+
server_name: str,
|
440
|
+
resource_name: str,
|
441
|
+
) -> str:
|
442
|
+
"""
|
443
|
+
Create a prompt with the given content and resource, then send it to the agent.
|
444
|
+
|
445
|
+
Args:
|
446
|
+
prompt_content: Either a string message or an existing PromptMessageMultipart
|
447
|
+
server_name: Name of the MCP server to retrieve the resource from
|
448
|
+
resource_name: Name or URI of the resource to retrieve
|
449
|
+
|
450
|
+
Returns:
|
451
|
+
The agent's response as a string
|
452
|
+
"""
|
453
|
+
# Get the embedded resources
|
454
|
+
embedded_resources: List[EmbeddedResource] = await self.get_embedded_resources(
|
455
|
+
server_name, resource_name
|
456
|
+
)
|
457
|
+
|
458
|
+
# Create or update the prompt message
|
459
|
+
prompt: PromptMessageMultipart
|
460
|
+
if isinstance(prompt_content, str):
|
461
|
+
# Create a new prompt with the text and resources
|
462
|
+
content = [TextContent(type="text", text=prompt_content)]
|
463
|
+
content.extend(embedded_resources)
|
464
|
+
prompt = PromptMessageMultipart(role="user", content=content)
|
465
|
+
elif isinstance(prompt_content, PromptMessageMultipart):
|
466
|
+
# Add resources to the existing prompt
|
467
|
+
prompt = prompt_content
|
468
|
+
prompt.content.extend(embedded_resources)
|
469
|
+
else:
|
470
|
+
raise TypeError("prompt_content must be a string or PromptMessageMultipart")
|
471
|
+
|
472
|
+
# Send the prompt to the agent and return the response
|
473
|
+
return await self._llm.generate_prompt(prompt, None)
|
mcp_agent/context.py
CHANGED
@@ -24,7 +24,6 @@ from mcp_agent.executor.executor import Executor
|
|
24
24
|
from mcp_agent.executor.decorator_registry import (
|
25
25
|
DecoratorRegistry,
|
26
26
|
register_asyncio_decorators,
|
27
|
-
register_temporal_decorators,
|
28
27
|
)
|
29
28
|
from mcp_agent.executor.task_registry import ActivityRegistry
|
30
29
|
from mcp_agent.executor.executor import AsyncioExecutor
|
@@ -194,7 +193,6 @@ async def initialize_context(
|
|
194
193
|
|
195
194
|
context.decorator_registry = DecoratorRegistry()
|
196
195
|
register_asyncio_decorators(context.decorator_registry)
|
197
|
-
register_temporal_decorators(context.decorator_registry)
|
198
196
|
|
199
197
|
# Store the tracer in context if needed
|
200
198
|
context.tracer = trace.get_tracer(config.otel.service_name)
|
mcp_agent/core/factory.py
CHANGED
@@ -172,16 +172,17 @@ async def create_agents_by_type(
|
|
172
172
|
if agent_type == AgentType.BASIC:
|
173
173
|
# Get the agent name for special handling
|
174
174
|
agent_name = agent_data["config"].name
|
175
|
-
agent = Agent(
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
model=config.model,
|
181
|
-
request_params=config.default_request_params,
|
182
|
-
)
|
183
|
-
agent._llm = await agent.attach_llm(llm_factory)
|
175
|
+
agent = Agent(
|
176
|
+
config=config,
|
177
|
+
context=app_instance.context,
|
178
|
+
)
|
179
|
+
await agent.initialize()
|
184
180
|
|
181
|
+
llm_factory = model_factory_func(
|
182
|
+
model=config.model,
|
183
|
+
request_params=config.default_request_params,
|
184
|
+
)
|
185
|
+
agent._llm = await agent.attach_llm(llm_factory)
|
185
186
|
# Store the agent
|
186
187
|
instance = agent
|
187
188
|
|
@@ -222,16 +223,16 @@ async def create_agents_by_type(
|
|
222
223
|
default_request_params=base_params,
|
223
224
|
)
|
224
225
|
planner_agent = Agent(
|
225
|
-
config=planner_config,
|
226
|
+
config=planner_config,
|
227
|
+
context=app_instance.context,
|
226
228
|
)
|
227
229
|
planner_factory = model_factory_func(
|
228
230
|
model=config.model,
|
229
231
|
request_params=config.default_request_params,
|
230
232
|
)
|
231
233
|
|
232
|
-
|
233
|
-
|
234
|
-
|
234
|
+
planner = await planner_agent.attach_llm(planner_factory)
|
235
|
+
await planner.initialize()
|
235
236
|
# Create the orchestrator with pre-configured planner
|
236
237
|
instance = Orchestrator(
|
237
238
|
name=config.name,
|
mcp_agent/core/fastagent.py
CHANGED
@@ -16,7 +16,6 @@ from contextlib import asynccontextmanager
|
|
16
16
|
from functools import partial
|
17
17
|
|
18
18
|
from mcp_agent.app import MCPApp
|
19
|
-
from mcp_agent.context_dependent import ContextDependent
|
20
19
|
from mcp_agent.config import Settings
|
21
20
|
|
22
21
|
from mcp_agent.core.agent_app import AgentApp
|
@@ -64,7 +63,7 @@ from mcp_agent.mcp_server import AgentMCPServer
|
|
64
63
|
T = TypeVar("T") # For the wrapper classes
|
65
64
|
|
66
65
|
|
67
|
-
class FastAgent
|
66
|
+
class FastAgent:
|
68
67
|
"""
|
69
68
|
A decorator-based interface for MCP Agent applications.
|
70
69
|
Provides a simplified way to create and manage agents using decorators.
|
@@ -320,6 +319,7 @@ class FastAgent(ContextDependent):
|
|
320
319
|
"""
|
321
320
|
active_agents = {}
|
322
321
|
had_error = False
|
322
|
+
await self.app.initialize()
|
323
323
|
|
324
324
|
# Handle quiet mode by disabling logger settings after initialization
|
325
325
|
quiet_mode = hasattr(self, "args") and self.args.quiet
|
mcp_agent/core/proxies.py
CHANGED
@@ -8,6 +8,7 @@ from typing import List, Optional, Dict, Union, TYPE_CHECKING
|
|
8
8
|
from mcp_agent.agents.agent import Agent
|
9
9
|
from mcp_agent.app import MCPApp
|
10
10
|
from mcp_agent.mcp.prompt_message_multipart import PromptMessageMultipart
|
11
|
+
from mcp.types import EmbeddedResource
|
11
12
|
|
12
13
|
# Handle circular imports
|
13
14
|
if TYPE_CHECKING:
|
@@ -129,6 +130,43 @@ class LLMAgentProxy(BaseAgentProxy):
|
|
129
130
|
"""
|
130
131
|
return await self._agent.apply_prompt(prompt_name, arguments)
|
131
132
|
|
133
|
+
# Add the new methods
|
134
|
+
async def get_embedded_resources(
|
135
|
+
self, server_name: str, resource_name: str
|
136
|
+
) -> List[EmbeddedResource]:
|
137
|
+
"""
|
138
|
+
Get a resource from an MCP server and return it as a list of embedded resources ready for use in prompts.
|
139
|
+
|
140
|
+
Args:
|
141
|
+
server_name: Name of the MCP server to retrieve the resource from
|
142
|
+
resource_name: Name or URI of the resource to retrieve
|
143
|
+
|
144
|
+
Returns:
|
145
|
+
List of EmbeddedResource objects ready to use in a PromptMessageMultipart
|
146
|
+
"""
|
147
|
+
return await self._agent.get_embedded_resources(server_name, resource_name)
|
148
|
+
|
149
|
+
async def with_resource(
|
150
|
+
self,
|
151
|
+
prompt_content: Union[str, PromptMessageMultipart],
|
152
|
+
server_name: str,
|
153
|
+
resource_name: str,
|
154
|
+
) -> str:
|
155
|
+
"""
|
156
|
+
Create a prompt with the given content and resource, then send it to the agent.
|
157
|
+
|
158
|
+
Args:
|
159
|
+
prompt_content: Either a string message or an existing PromptMessageMultipart
|
160
|
+
server_name: Name of the MCP server to retrieve the resource from
|
161
|
+
resource_name: Name or URI of the resource to retrieve
|
162
|
+
|
163
|
+
Returns:
|
164
|
+
The agent's response as a string
|
165
|
+
"""
|
166
|
+
return await self._agent.with_resource(
|
167
|
+
prompt_content, server_name, resource_name
|
168
|
+
)
|
169
|
+
|
132
170
|
|
133
171
|
class WorkflowProxy(BaseAgentProxy):
|
134
172
|
"""Proxy for workflow types that implement generate_str() directly"""
|
mcp_agent/logging/listeners.py
CHANGED
@@ -177,10 +177,7 @@ class BatchingListener(FilteredListener):
|
|
177
177
|
|
178
178
|
if self._flush_task and not self._flush_task.done():
|
179
179
|
self._flush_task.cancel()
|
180
|
-
|
181
|
-
await self._flush_task
|
182
|
-
except asyncio.CancelledError:
|
183
|
-
pass
|
180
|
+
await self._flush_task
|
184
181
|
self._flush_task = None
|
185
182
|
await self.flush()
|
186
183
|
|
@@ -193,8 +190,8 @@ class BatchingListener(FilteredListener):
|
|
193
190
|
)
|
194
191
|
except asyncio.TimeoutError:
|
195
192
|
await self.flush()
|
196
|
-
except asyncio.CancelledError:
|
197
|
-
|
193
|
+
# except asyncio.CancelledError:
|
194
|
+
# break
|
198
195
|
finally:
|
199
196
|
await self.flush() # Final flush
|
200
197
|
|
@@ -7,7 +7,6 @@ from typing import Optional
|
|
7
7
|
|
8
8
|
from mcp import ClientSession
|
9
9
|
from mcp.shared.session import (
|
10
|
-
RequestResponder,
|
11
10
|
ReceiveResultT,
|
12
11
|
ReceiveNotificationT,
|
13
12
|
RequestId,
|
@@ -16,18 +15,11 @@ from mcp.shared.session import (
|
|
16
15
|
SendResultT,
|
17
16
|
)
|
18
17
|
from mcp.types import (
|
19
|
-
ClientResult,
|
20
|
-
CreateMessageRequest,
|
21
|
-
CreateMessageResult,
|
22
18
|
ErrorData,
|
23
|
-
JSONRPCNotification,
|
24
|
-
JSONRPCRequest,
|
25
|
-
ServerRequest,
|
26
|
-
TextContent,
|
27
|
-
ListRootsRequest,
|
28
19
|
ListRootsResult,
|
29
20
|
Root,
|
30
21
|
)
|
22
|
+
from pydantic import AnyUrl
|
31
23
|
|
32
24
|
from mcp_agent.config import MCPServerSettings
|
33
25
|
from mcp_agent.context_dependent import ContextDependent
|
@@ -36,6 +28,24 @@ from mcp_agent.logging.logger import get_logger
|
|
36
28
|
logger = get_logger(__name__)
|
37
29
|
|
38
30
|
|
31
|
+
async def list_roots(ctx: ClientSession) -> ListRootsResult:
|
32
|
+
"""List roots callback that will be called by the MCP library."""
|
33
|
+
|
34
|
+
roots = []
|
35
|
+
if (
|
36
|
+
hasattr(ctx, "session")
|
37
|
+
and hasattr(ctx.session, "server_config")
|
38
|
+
and ctx.session.server_config
|
39
|
+
and hasattr(ctx.session.server_config, "roots")
|
40
|
+
and ctx.session.server_config.roots
|
41
|
+
):
|
42
|
+
roots = [
|
43
|
+
Root(uri=AnyUrl(root.uri), name=root.name)
|
44
|
+
for root in ctx.session.server_config.roots
|
45
|
+
]
|
46
|
+
return ListRootsResult(roots=roots or [])
|
47
|
+
|
48
|
+
|
39
49
|
class MCPAgentClientSession(ClientSession, ContextDependent):
|
40
50
|
"""
|
41
51
|
MCP Agent framework acts as a client to the servers providing tools/resources/prompts for the agent workloads.
|
@@ -48,36 +58,9 @@ class MCPAgentClientSession(ClientSession, ContextDependent):
|
|
48
58
|
"""
|
49
59
|
|
50
60
|
def __init__(self, *args, **kwargs):
|
51
|
-
super().__init__(*args, **kwargs)
|
61
|
+
super().__init__(*args, **kwargs, list_roots_callback=list_roots)
|
52
62
|
self.server_config: Optional[MCPServerSettings] = None
|
53
63
|
|
54
|
-
async def _received_request(
|
55
|
-
self, responder: RequestResponder[ServerRequest, ClientResult]
|
56
|
-
) -> None:
|
57
|
-
logger.debug("Received request:", data=responder.request.model_dump())
|
58
|
-
request = responder.request.root
|
59
|
-
|
60
|
-
if isinstance(request, CreateMessageRequest):
|
61
|
-
return await self.handle_sampling_request(request, responder)
|
62
|
-
elif isinstance(request, ListRootsRequest):
|
63
|
-
# Handle list_roots request by returning configured roots
|
64
|
-
if hasattr(self, "server_config") and self.server_config.roots:
|
65
|
-
roots = [
|
66
|
-
Root(
|
67
|
-
uri=root.server_uri_alias or root.uri,
|
68
|
-
name=root.name,
|
69
|
-
)
|
70
|
-
for root in self.server_config.roots
|
71
|
-
]
|
72
|
-
|
73
|
-
await responder.respond(ListRootsResult(roots=roots))
|
74
|
-
else:
|
75
|
-
await responder.respond(ListRootsResult(roots=[]))
|
76
|
-
return
|
77
|
-
|
78
|
-
# Handle other requests as usual
|
79
|
-
await super()._received_request(responder)
|
80
|
-
|
81
64
|
async def send_request(
|
82
65
|
self,
|
83
66
|
request: SendRequestT,
|
@@ -89,7 +72,7 @@ class MCPAgentClientSession(ClientSession, ContextDependent):
|
|
89
72
|
logger.debug("send_request: response=", data=result.model_dump())
|
90
73
|
return result
|
91
74
|
except Exception as e:
|
92
|
-
logger.error(f"send_request failed: {e}")
|
75
|
+
logger.error(f"send_request failed: {str(e)}")
|
93
76
|
raise
|
94
77
|
|
95
78
|
async def send_notification(self, notification: SendNotificationT) -> None:
|
@@ -133,110 +116,3 @@ class MCPAgentClientSession(ClientSession, ContextDependent):
|
|
133
116
|
return await super().send_progress_notification(
|
134
117
|
progress_token=progress_token, progress=progress, total=total
|
135
118
|
)
|
136
|
-
|
137
|
-
async def _receive_loop(self) -> None:
|
138
|
-
async with (
|
139
|
-
self._read_stream,
|
140
|
-
self._write_stream,
|
141
|
-
self._incoming_message_stream_writer,
|
142
|
-
):
|
143
|
-
async for message in self._read_stream:
|
144
|
-
if isinstance(message, Exception):
|
145
|
-
await self._incoming_message_stream_writer.send(message)
|
146
|
-
elif isinstance(message.root, JSONRPCRequest):
|
147
|
-
validated_request = self._receive_request_type.model_validate(
|
148
|
-
message.root.model_dump(
|
149
|
-
by_alias=True, mode="json", exclude_none=True
|
150
|
-
)
|
151
|
-
)
|
152
|
-
responder = RequestResponder(
|
153
|
-
request_id=message.root.id,
|
154
|
-
request_meta=validated_request.root.params.meta
|
155
|
-
if validated_request.root.params
|
156
|
-
else None,
|
157
|
-
request=validated_request,
|
158
|
-
session=self,
|
159
|
-
)
|
160
|
-
|
161
|
-
await self._received_request(responder)
|
162
|
-
if not responder._responded:
|
163
|
-
await self._incoming_message_stream_writer.send(responder)
|
164
|
-
elif isinstance(message.root, JSONRPCNotification):
|
165
|
-
notification = self._receive_notification_type.model_validate(
|
166
|
-
message.root.model_dump(
|
167
|
-
by_alias=True, mode="json", exclude_none=True
|
168
|
-
)
|
169
|
-
)
|
170
|
-
|
171
|
-
await self._received_notification(notification)
|
172
|
-
await self._incoming_message_stream_writer.send(notification)
|
173
|
-
else: # Response or error
|
174
|
-
stream = self._response_streams.pop(message.root.id, None)
|
175
|
-
if stream:
|
176
|
-
await stream.send(message.root)
|
177
|
-
else:
|
178
|
-
await self._incoming_message_stream_writer.send(
|
179
|
-
RuntimeError(
|
180
|
-
"Received response with an unknown "
|
181
|
-
f"request ID: {message}"
|
182
|
-
)
|
183
|
-
)
|
184
|
-
|
185
|
-
async def handle_sampling_request(
|
186
|
-
self,
|
187
|
-
request: CreateMessageRequest,
|
188
|
-
responder: RequestResponder[ServerRequest, ClientResult],
|
189
|
-
):
|
190
|
-
logger.info("Handling sampling request: %s", request)
|
191
|
-
config = self.context.config
|
192
|
-
session = self.context.upstream_session
|
193
|
-
if session is None:
|
194
|
-
# TODO: saqadri - consider whether we should be handling the sampling request here as a client
|
195
|
-
logger.warning(
|
196
|
-
"Error: No upstream client available for sampling requests. Request:",
|
197
|
-
data=request,
|
198
|
-
)
|
199
|
-
try:
|
200
|
-
from anthropic import AsyncAnthropic
|
201
|
-
|
202
|
-
client = AsyncAnthropic(api_key=config.anthropic.api_key)
|
203
|
-
|
204
|
-
params = request.params
|
205
|
-
response = await client.messages.create(
|
206
|
-
model="claude-3-sonnet-20240229",
|
207
|
-
max_tokens=params.maxTokens,
|
208
|
-
messages=[
|
209
|
-
{
|
210
|
-
"role": m.role,
|
211
|
-
"content": m.content.text
|
212
|
-
if hasattr(m.content, "text")
|
213
|
-
else m.content.data,
|
214
|
-
}
|
215
|
-
for m in params.messages
|
216
|
-
],
|
217
|
-
system=getattr(params, "systemPrompt", None),
|
218
|
-
temperature=getattr(params, "temperature", 0.7),
|
219
|
-
stop_sequences=getattr(params, "stopSequences", None),
|
220
|
-
)
|
221
|
-
|
222
|
-
await responder.respond(
|
223
|
-
CreateMessageResult(
|
224
|
-
model="claude-3-sonnet-20240229",
|
225
|
-
role="assistant",
|
226
|
-
content=TextContent(type="text", text=response.content[0].text),
|
227
|
-
)
|
228
|
-
)
|
229
|
-
except Exception as e:
|
230
|
-
logger.error(f"Error handling sampling request: {e}")
|
231
|
-
await responder.respond(ErrorData(code=-32603, message=str(e)))
|
232
|
-
else:
|
233
|
-
try:
|
234
|
-
# If a session is available, we'll pass-through the sampling request to the upstream client
|
235
|
-
result = await session.send_request(
|
236
|
-
request=ServerRequest(request), result_type=CreateMessageResult
|
237
|
-
)
|
238
|
-
|
239
|
-
# Pass the result from the upstream client back to the server. We just act as a pass-through client here.
|
240
|
-
await responder.send_result(result)
|
241
|
-
except Exception as e:
|
242
|
-
await responder.send_error(code=-32603, message=str(e))
|
mcp_agent/mcp/mcp_aggregator.py
CHANGED
@@ -8,8 +8,8 @@ from typing import (
|
|
8
8
|
Callable,
|
9
9
|
TypeVar,
|
10
10
|
)
|
11
|
-
from mcp import GetPromptResult
|
12
|
-
from pydantic import BaseModel, ConfigDict
|
11
|
+
from mcp import GetPromptResult, ReadResourceResult
|
12
|
+
from pydantic import AnyUrl, BaseModel, ConfigDict
|
13
13
|
from mcp.client.session import ClientSession
|
14
14
|
from mcp.server.lowlevel.server import Server
|
15
15
|
from mcp.server.stdio import stdio_server
|
@@ -210,6 +210,7 @@ class MCPAggregator(ContextDependent):
|
|
210
210
|
"agent_name": self.agent_name,
|
211
211
|
},
|
212
212
|
)
|
213
|
+
|
213
214
|
await self._persistent_connection_manager.get_server(
|
214
215
|
server_name, client_session_factory=MCPAgentClientSession
|
215
216
|
)
|
@@ -821,6 +822,53 @@ class MCPAggregator(ContextDependent):
|
|
821
822
|
logger.debug(f"Available prompts across servers: {results}")
|
822
823
|
return results
|
823
824
|
|
825
|
+
async def get_resource(
|
826
|
+
self, server_name: str, resource_uri: str
|
827
|
+
) -> ReadResourceResult:
|
828
|
+
"""
|
829
|
+
Get a resource directly from an MCP server by URI.
|
830
|
+
|
831
|
+
Args:
|
832
|
+
server_name: Name of the MCP server to retrieve the resource from
|
833
|
+
resource_uri: URI of the resource to retrieve
|
834
|
+
|
835
|
+
Returns:
|
836
|
+
ReadResourceResult object containing the resource content
|
837
|
+
|
838
|
+
Raises:
|
839
|
+
ValueError: If the server doesn't exist or the resource couldn't be found
|
840
|
+
"""
|
841
|
+
if not self.initialized:
|
842
|
+
await self.load_servers()
|
843
|
+
|
844
|
+
if server_name not in self.server_names:
|
845
|
+
raise ValueError(f"Server '{server_name}' not found")
|
846
|
+
|
847
|
+
logger.info(
|
848
|
+
"Requesting resource",
|
849
|
+
data={
|
850
|
+
"progress_action": ProgressAction.CALLING_TOOL,
|
851
|
+
"resource_uri": resource_uri,
|
852
|
+
"server_name": server_name,
|
853
|
+
"agent_name": self.agent_name,
|
854
|
+
},
|
855
|
+
)
|
856
|
+
|
857
|
+
try:
|
858
|
+
uri = AnyUrl(resource_uri)
|
859
|
+
except Exception as e:
|
860
|
+
raise ValueError(f"Invalid resource URI: {resource_uri}. Error: {e}")
|
861
|
+
|
862
|
+
# Use the _execute_on_server method to call read_resource on the server
|
863
|
+
return await self._execute_on_server(
|
864
|
+
server_name=server_name,
|
865
|
+
operation_type="resource",
|
866
|
+
operation_name=resource_uri,
|
867
|
+
method_name="read_resource",
|
868
|
+
method_args={"uri": uri},
|
869
|
+
error_factory=lambda msg: ValueError(f"Failed to retrieve resource: {msg}"),
|
870
|
+
)
|
871
|
+
|
824
872
|
|
825
873
|
class MCPCompoundServer(Server):
|
826
874
|
"""
|
@@ -163,7 +163,6 @@ async def _server_lifecycle_task(server_conn: ServerConnection) -> None:
|
|
163
163
|
async with transport_context as (read_stream, write_stream):
|
164
164
|
# try:
|
165
165
|
server_conn.create_session(read_stream, write_stream)
|
166
|
-
# except FileNotFoundError as e:
|
167
166
|
|
168
167
|
async with server_conn.session:
|
169
168
|
await server_conn.initialize_session()
|
@@ -12,6 +12,8 @@ import logging
|
|
12
12
|
import sys
|
13
13
|
from pathlib import Path
|
14
14
|
from typing import List, Dict, Optional, Callable, Awaitable, Literal, Any
|
15
|
+
from mcp.server.fastmcp.resources import FileResource
|
16
|
+
from pydantic import AnyUrl
|
15
17
|
|
16
18
|
from mcp_agent.mcp import mime_utils, resource_utils
|
17
19
|
|
@@ -185,19 +187,19 @@ def create_prompt_handler(
|
|
185
187
|
|
186
188
|
|
187
189
|
# Type for resource handler
|
188
|
-
ResourceHandler = Callable[[], Awaitable[str]]
|
190
|
+
ResourceHandler = Callable[[], Awaitable[str | bytes]]
|
189
191
|
|
190
192
|
|
191
193
|
def create_resource_handler(resource_path: Path, mime_type: str) -> ResourceHandler:
|
192
194
|
"""Create a resource handler function for the given resource"""
|
193
195
|
|
194
|
-
async def get_resource() -> str:
|
196
|
+
async def get_resource() -> str | bytes:
|
195
197
|
is_binary = mime_utils.is_binary_content(mime_type)
|
196
198
|
|
197
199
|
if is_binary:
|
198
200
|
# For binary files, read in binary mode and base64 encode
|
199
201
|
with open(resource_path, "rb") as f:
|
200
|
-
return
|
202
|
+
return f.read()
|
201
203
|
else:
|
202
204
|
# For text files, read as utf-8 text
|
203
205
|
with open(resource_path, "r", encoding="utf-8") as f:
|
@@ -284,15 +286,14 @@ def register_prompt(file_path: Path):
|
|
284
286
|
exposed_resources[resource_id] = resource_file
|
285
287
|
mime_type = mime_utils.guess_mime_type(str(resource_file))
|
286
288
|
|
287
|
-
|
288
|
-
|
289
|
-
|
289
|
+
mcp.add_resource(
|
290
|
+
FileResource(
|
291
|
+
uri=AnyUrl(resource_id),
|
292
|
+
path=resource_file,
|
293
|
+
mime_type=mime_type,
|
294
|
+
is_binary=mime_utils.is_binary_content(mime_type),
|
295
|
+
)
|
290
296
|
)
|
291
|
-
mcp.resource(
|
292
|
-
resource_id,
|
293
|
-
description=f"Resource from {file_path.name}",
|
294
|
-
mime_type=mime_type,
|
295
|
-
)(resource_handler)
|
296
297
|
|
297
298
|
logger.info(
|
298
299
|
f"Registered resource: {resource_id} ({resource_file})"
|
mcp_agent/mcp/resource_utils.py
CHANGED
@@ -25,44 +25,6 @@ def find_resource_file(resource_path: str, prompt_files: List[Path]) -> Optional
|
|
25
25
|
return None
|
26
26
|
|
27
27
|
|
28
|
-
# TODO -- decide how to deal with this. Both Anthropic and OpenAI allow sending URLs in
|
29
|
-
# input message
|
30
|
-
# TODO -- used?
|
31
|
-
# async def fetch_remote_resource(
|
32
|
-
# url: str, timeout: int = HTTP_TIMEOUT
|
33
|
-
# ) -> ResourceContent:
|
34
|
-
# """
|
35
|
-
# Fetch a remote resource from a URL
|
36
|
-
|
37
|
-
# Returns:
|
38
|
-
# Tuple of (content, mime_type, is_binary)
|
39
|
-
# - content: Text content or base64-encoded binary content
|
40
|
-
# - mime_type: The MIME type of the resource
|
41
|
-
# - is_binary: Whether the content is binary (and base64-encoded)
|
42
|
-
# """
|
43
|
-
|
44
|
-
# async with httpx.AsyncClient(timeout=timeout) as client:
|
45
|
-
# response = await client.get(url)
|
46
|
-
# response.raise_for_status()
|
47
|
-
|
48
|
-
# # Get the content type or guess from URL
|
49
|
-
# mime_type = response.headers.get("content-type", "").split(";")[0]
|
50
|
-
# if not mime_type:
|
51
|
-
# mime_type = mime_utils.guess_mime_type(url)
|
52
|
-
|
53
|
-
# # Check if this is binary content
|
54
|
-
# is_binary = mime_utils.is_binary_content(mime_type)
|
55
|
-
|
56
|
-
# if is_binary:
|
57
|
-
# # For binary responses, get the binary content and base64 encode it
|
58
|
-
# content = base64.b64encode(response.content).decode("utf-8")
|
59
|
-
# else:
|
60
|
-
# # For text responses, just get the text
|
61
|
-
# content = response.text
|
62
|
-
|
63
|
-
# return content, mime_type, is_binary
|
64
|
-
|
65
|
-
|
66
28
|
def load_resource_content(
|
67
29
|
resource_path: str, prompt_files: List[Path]
|
68
30
|
) -> ResourceContent:
|
@@ -109,6 +71,36 @@ def create_resource_uri(path: str) -> str:
|
|
109
71
|
return f"resource://fast-agent/{Path(path).name}"
|
110
72
|
|
111
73
|
|
74
|
+
# Add this to your resource_utils.py module
|
75
|
+
|
76
|
+
|
77
|
+
def create_resource_reference(uri: str, mime_type: str) -> "EmbeddedResource":
|
78
|
+
"""
|
79
|
+
Create a reference to a resource without embedding its content directly.
|
80
|
+
|
81
|
+
This creates an EmbeddedResource that references another resource URI.
|
82
|
+
When the client receives this, it will make a separate request to fetch
|
83
|
+
the resource content using the provided URI.
|
84
|
+
|
85
|
+
Args:
|
86
|
+
uri: URI for the resource
|
87
|
+
mime_type: MIME type of the resource
|
88
|
+
|
89
|
+
Returns:
|
90
|
+
An EmbeddedResource object
|
91
|
+
"""
|
92
|
+
from mcp.types import EmbeddedResource, TextResourceContents
|
93
|
+
|
94
|
+
# Create a resource reference
|
95
|
+
resource_contents = TextResourceContents(
|
96
|
+
uri=uri,
|
97
|
+
mimeType=mime_type,
|
98
|
+
text="", # Empty text as we're just referencing
|
99
|
+
)
|
100
|
+
|
101
|
+
return EmbeddedResource(type="resource", resource=resource_contents)
|
102
|
+
|
103
|
+
|
112
104
|
def create_embedded_resource(
|
113
105
|
resource_path: str, content: str, mime_type: str, is_binary: bool = False
|
114
106
|
) -> EmbeddedResource:
|
@@ -149,6 +141,34 @@ def create_image_content(data: str, mime_type: str) -> ImageContent:
|
|
149
141
|
)
|
150
142
|
|
151
143
|
|
144
|
+
def create_blob_resource(
|
145
|
+
resource_path: str, content: str, mime_type: str
|
146
|
+
) -> EmbeddedResource:
|
147
|
+
"""Create an embedded resource for binary data"""
|
148
|
+
return EmbeddedResource(
|
149
|
+
type="resource",
|
150
|
+
resource=BlobResourceContents(
|
151
|
+
uri=resource_path,
|
152
|
+
mimeType=mime_type,
|
153
|
+
blob=content, # Content should already be base64 encoded
|
154
|
+
),
|
155
|
+
)
|
156
|
+
|
157
|
+
|
158
|
+
def create_text_resource(
|
159
|
+
resource_path: str, content: str, mime_type: str
|
160
|
+
) -> EmbeddedResource:
|
161
|
+
"""Create an embedded resource for text data"""
|
162
|
+
return EmbeddedResource(
|
163
|
+
type="resource",
|
164
|
+
resource=TextResourceContents(
|
165
|
+
uri=resource_path,
|
166
|
+
mimeType=mime_type,
|
167
|
+
text=content,
|
168
|
+
),
|
169
|
+
)
|
170
|
+
|
171
|
+
|
152
172
|
def normalize_uri(uri_or_filename: str) -> str:
|
153
173
|
"""
|
154
174
|
Normalize a URI or filename to ensure it's a valid URI.
|
mcp_agent/mcp/stdio.py
CHANGED
@@ -9,10 +9,12 @@ from anyio.streams.text import TextReceiveStream
|
|
9
9
|
from mcp.client.stdio import StdioServerParameters, get_default_environment
|
10
10
|
import mcp.types as types
|
11
11
|
from mcp_agent.logging.logger import get_logger
|
12
|
+
from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
|
12
13
|
|
13
14
|
logger = get_logger(__name__)
|
14
15
|
|
15
16
|
|
17
|
+
# TODO this will be removed when client library 1.4.2 is released
|
16
18
|
@asynccontextmanager
|
17
19
|
async def stdio_client_with_rich_stderr(server: StdioServerParameters):
|
18
20
|
"""
|
@@ -22,10 +24,16 @@ async def stdio_client_with_rich_stderr(server: StdioServerParameters):
|
|
22
24
|
Args:
|
23
25
|
server: The server parameters for the stdio connection
|
24
26
|
"""
|
27
|
+
read_stream: MemoryObjectReceiveStream[types.JSONRPCMessage | Exception]
|
28
|
+
read_stream_writer: MemoryObjectSendStream[types.JSONRPCMessage | Exception]
|
29
|
+
|
30
|
+
write_stream: MemoryObjectSendStream[types.JSONRPCMessage]
|
31
|
+
write_stream_reader: MemoryObjectReceiveStream[types.JSONRPCMessage]
|
32
|
+
|
25
33
|
read_stream_writer, read_stream = anyio.create_memory_object_stream(0)
|
26
34
|
write_stream, write_stream_reader = anyio.create_memory_object_stream(0)
|
27
|
-
|
28
35
|
# Open process with stderr piped for capture
|
36
|
+
|
29
37
|
process = await anyio.open_process(
|
30
38
|
[server.command, *server.args],
|
31
39
|
env=server.env if server.env is not None else get_default_environment(),
|
@@ -67,19 +75,19 @@ async def stdio_client_with_rich_stderr(server: StdioServerParameters):
|
|
67
75
|
except anyio.ClosedResourceError:
|
68
76
|
await anyio.lowlevel.checkpoint()
|
69
77
|
|
70
|
-
async def stderr_reader():
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
78
|
+
# async def stderr_reader():
|
79
|
+
# assert process.stderr, "Opened process is missing stderr"
|
80
|
+
# try:
|
81
|
+
# async for chunk in TextReceiveStream(
|
82
|
+
# process.stderr,
|
83
|
+
# encoding=server.encoding,
|
84
|
+
# errors=server.encoding_error_handler,
|
85
|
+
# ):
|
86
|
+
# if chunk.strip():
|
87
|
+
# # Let the logging system handle the formatting consistently
|
88
|
+
# logger.event("info", "mcpserver.stderr", chunk.rstrip(), None, {})
|
89
|
+
# except anyio.ClosedResourceError:
|
90
|
+
# await anyio.lowlevel.checkpoint()
|
83
91
|
|
84
92
|
async def stdin_writer():
|
85
93
|
assert process.stdin, "Opened process is missing stdin"
|
@@ -87,6 +95,7 @@ async def stdio_client_with_rich_stderr(server: StdioServerParameters):
|
|
87
95
|
async with write_stream_reader:
|
88
96
|
async for message in write_stream_reader:
|
89
97
|
json = message.model_dump_json(by_alias=True, exclude_none=True)
|
98
|
+
print(f"**********{id(process.stdin)}")
|
90
99
|
await process.stdin.send(
|
91
100
|
(json + "\n").encode(
|
92
101
|
encoding=server.encoding,
|
@@ -100,5 +109,4 @@ async def stdio_client_with_rich_stderr(server: StdioServerParameters):
|
|
100
109
|
async with anyio.create_task_group() as tg, process:
|
101
110
|
tg.start_soon(stdout_reader)
|
102
111
|
tg.start_soon(stdin_writer)
|
103
|
-
tg.start_soon(stderr_reader)
|
104
112
|
yield read_stream, write_stream
|
mcp_agent/mcp_server_registry.py
CHANGED
@@ -15,9 +15,9 @@ from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStre
|
|
15
15
|
from mcp import ClientSession
|
16
16
|
from mcp.client.stdio import (
|
17
17
|
StdioServerParameters,
|
18
|
-
stdio_client,
|
19
18
|
get_default_environment,
|
20
19
|
)
|
20
|
+
from mcp_agent.mcp.stdio import stdio_client_with_rich_stderr
|
21
21
|
from mcp.client.sse import sse_client
|
22
22
|
|
23
23
|
from mcp_agent.config import (
|
@@ -134,7 +134,10 @@ class ServerRegistry:
|
|
134
134
|
env={**get_default_environment(), **(config.env or {})},
|
135
135
|
)
|
136
136
|
|
137
|
-
async with
|
137
|
+
async with stdio_client_with_rich_stderr(server_params) as (
|
138
|
+
read_stream,
|
139
|
+
write_stream,
|
140
|
+
):
|
138
141
|
session = client_session_factory(
|
139
142
|
read_stream,
|
140
143
|
write_stream,
|
@@ -50,9 +50,9 @@ fast = FastAgent("Orchestrator-Workers")
|
|
50
50
|
async def main():
|
51
51
|
async with fast.run() as agent:
|
52
52
|
await agent()
|
53
|
-
|
54
|
-
|
55
|
-
|
53
|
+
await agent.author(
|
54
|
+
"write a 250 word short story about kittens discovering a castle, and save it to short_story.md"
|
55
|
+
)
|
56
56
|
|
57
57
|
# The orchestrator can be used just like any other agent
|
58
58
|
task = (
|
@@ -1,5 +1,5 @@
|
|
1
1
|
from typing import Any, List, Optional, Type, Union
|
2
|
-
|
2
|
+
import json
|
3
3
|
from mcp import GetPromptResult
|
4
4
|
from mcp.types import PromptMessage
|
5
5
|
from pydantic_core import from_json
|
@@ -45,11 +45,63 @@ class PassthroughLLM(AugmentedLLM):
|
|
45
45
|
request_params: Optional[RequestParams] = None,
|
46
46
|
) -> str:
|
47
47
|
"""Return the input message as a string."""
|
48
|
+
# Check if this is a special command to call a tool
|
49
|
+
if isinstance(message, str) and message.startswith("***CALL_TOOL "):
|
50
|
+
return await self._call_tool_and_return_result(message)
|
51
|
+
|
48
52
|
self.show_user_message(message, model="fastagent-passthrough", chat_turn=0)
|
49
53
|
await self.show_assistant_message(message, title="ASSISTANT/PASSTHROUGH")
|
50
54
|
|
51
55
|
return str(message)
|
52
56
|
|
57
|
+
async def _call_tool_and_return_result(self, command: str) -> str:
|
58
|
+
"""
|
59
|
+
Call a tool based on the command and return its result as a string.
|
60
|
+
|
61
|
+
Args:
|
62
|
+
command: The command string, expected format: "***CALL_TOOL <server>-<tool_name> [arguments_json]"
|
63
|
+
|
64
|
+
Returns:
|
65
|
+
Tool result as a string
|
66
|
+
"""
|
67
|
+
try:
|
68
|
+
# Parse the tool name and optional arguments
|
69
|
+
parts = command.split(" ", 2)
|
70
|
+
if len(parts) < 2:
|
71
|
+
return "Error: Invalid format. Expected '***CALL_TOOL <tool_name> [arguments_json]'"
|
72
|
+
|
73
|
+
tool_name = parts[1].strip()
|
74
|
+
arguments = None
|
75
|
+
|
76
|
+
# Parse optional JSON arguments if provided
|
77
|
+
if len(parts) > 2:
|
78
|
+
try:
|
79
|
+
arguments = json.loads(parts[2])
|
80
|
+
except json.JSONDecodeError:
|
81
|
+
return f"Error: Invalid JSON arguments: {parts[2]}"
|
82
|
+
|
83
|
+
# Call the tool and get the result
|
84
|
+
self.logger.info(f"Calling tool {tool_name} with arguments {arguments}")
|
85
|
+
result = await self.aggregator.call_tool(tool_name, arguments)
|
86
|
+
|
87
|
+
# Format the result as a string
|
88
|
+
if result.isError:
|
89
|
+
return f"Error calling tool '{tool_name}': {result.message}"
|
90
|
+
|
91
|
+
# Extract text content from result
|
92
|
+
result_text = []
|
93
|
+
for content_item in result.content:
|
94
|
+
if hasattr(content_item, "text"):
|
95
|
+
result_text.append(content_item.text)
|
96
|
+
else:
|
97
|
+
result_text.append(str(content_item))
|
98
|
+
|
99
|
+
return "\n".join(result_text)
|
100
|
+
|
101
|
+
except Exception as e:
|
102
|
+
self.logger.error(f"Error calling tool: {str(e)}")
|
103
|
+
return f"Error calling tool: {str(e)}"
|
104
|
+
|
53
105
|
async def generate_structured(
|
54
106
|
self,
|
55
107
|
message: Union[str, MessageParamT, List[MessageParamT]],
|
@@ -71,7 +123,10 @@ class PassthroughLLM(AugmentedLLM):
|
|
71
123
|
async def generate_prompt(
|
72
124
|
self, prompt: "PromptMessageMultipart", request_params: RequestParams | None
|
73
125
|
) -> str:
|
74
|
-
|
126
|
+
message = prompt.content[0].text if prompt.content else ""
|
127
|
+
if isinstance(message, str) and message.startswith("***CALL_TOOL "):
|
128
|
+
return await self._call_tool_and_return_result(message)
|
129
|
+
return await self.generate_str(message, request_params)
|
75
130
|
|
76
131
|
async def apply_prompt_template(
|
77
132
|
self, prompt_result: GetPromptResult, prompt_name: str
|
File without changes
|
File without changes
|
File without changes
|