aixtools 0.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aixtools might be problematic. Click here for more details.
- aixtools/.chainlit/config.toml +113 -0
- aixtools/.chainlit/translations/bn.json +214 -0
- aixtools/.chainlit/translations/en-US.json +214 -0
- aixtools/.chainlit/translations/gu.json +214 -0
- aixtools/.chainlit/translations/he-IL.json +214 -0
- aixtools/.chainlit/translations/hi.json +214 -0
- aixtools/.chainlit/translations/ja.json +214 -0
- aixtools/.chainlit/translations/kn.json +214 -0
- aixtools/.chainlit/translations/ml.json +214 -0
- aixtools/.chainlit/translations/mr.json +214 -0
- aixtools/.chainlit/translations/nl.json +214 -0
- aixtools/.chainlit/translations/ta.json +214 -0
- aixtools/.chainlit/translations/te.json +214 -0
- aixtools/.chainlit/translations/zh-CN.json +214 -0
- aixtools/__init__.py +11 -0
- aixtools/_version.py +34 -0
- aixtools/a2a/app.py +126 -0
- aixtools/a2a/google_sdk/__init__.py +0 -0
- aixtools/a2a/google_sdk/card.py +27 -0
- aixtools/a2a/google_sdk/pydantic_ai_adapter/agent_executor.py +199 -0
- aixtools/a2a/google_sdk/pydantic_ai_adapter/storage.py +26 -0
- aixtools/a2a/google_sdk/remote_agent_connection.py +88 -0
- aixtools/a2a/google_sdk/utils.py +59 -0
- aixtools/a2a/utils.py +115 -0
- aixtools/agents/__init__.py +12 -0
- aixtools/agents/agent.py +164 -0
- aixtools/agents/agent_batch.py +71 -0
- aixtools/agents/prompt.py +97 -0
- aixtools/app.py +143 -0
- aixtools/chainlit.md +14 -0
- aixtools/compliance/__init__.py +9 -0
- aixtools/compliance/private_data.py +138 -0
- aixtools/context.py +17 -0
- aixtools/db/__init__.py +17 -0
- aixtools/db/database.py +110 -0
- aixtools/db/vector_db.py +115 -0
- aixtools/google/client.py +25 -0
- aixtools/log_view/__init__.py +17 -0
- aixtools/log_view/app.py +195 -0
- aixtools/log_view/display.py +285 -0
- aixtools/log_view/export.py +51 -0
- aixtools/log_view/filters.py +41 -0
- aixtools/log_view/log_utils.py +26 -0
- aixtools/log_view/node_summary.py +229 -0
- aixtools/logfilters/__init__.py +7 -0
- aixtools/logfilters/context_filter.py +67 -0
- aixtools/logging/__init__.py +30 -0
- aixtools/logging/log_objects.py +227 -0
- aixtools/logging/logging_config.py +161 -0
- aixtools/logging/mcp_log_models.py +102 -0
- aixtools/logging/mcp_logger.py +172 -0
- aixtools/logging/model_patch_logging.py +87 -0
- aixtools/logging/open_telemetry.py +36 -0
- aixtools/mcp/__init__.py +9 -0
- aixtools/mcp/client.py +375 -0
- aixtools/mcp/example_client.py +30 -0
- aixtools/mcp/example_server.py +22 -0
- aixtools/mcp/fast_mcp_log.py +31 -0
- aixtools/mcp/faulty_mcp.py +319 -0
- aixtools/model_patch/model_patch.py +63 -0
- aixtools/server/__init__.py +29 -0
- aixtools/server/app_mounter.py +90 -0
- aixtools/server/path.py +72 -0
- aixtools/server/utils.py +70 -0
- aixtools/server/workspace_privacy.py +65 -0
- aixtools/testing/__init__.py +9 -0
- aixtools/testing/aix_test_model.py +149 -0
- aixtools/testing/mock_tool.py +66 -0
- aixtools/testing/model_patch_cache.py +279 -0
- aixtools/tools/doctor/__init__.py +3 -0
- aixtools/tools/doctor/tool_doctor.py +61 -0
- aixtools/tools/doctor/tool_recommendation.py +44 -0
- aixtools/utils/__init__.py +35 -0
- aixtools/utils/chainlit/cl_agent_show.py +82 -0
- aixtools/utils/chainlit/cl_utils.py +168 -0
- aixtools/utils/config.py +131 -0
- aixtools/utils/config_util.py +69 -0
- aixtools/utils/enum_with_description.py +37 -0
- aixtools/utils/files.py +17 -0
- aixtools/utils/persisted_dict.py +99 -0
- aixtools/utils/utils.py +167 -0
- aixtools/vault/__init__.py +7 -0
- aixtools/vault/vault.py +137 -0
- aixtools-0.0.0.dist-info/METADATA +669 -0
- aixtools-0.0.0.dist-info/RECORD +88 -0
- aixtools-0.0.0.dist-info/WHEEL +5 -0
- aixtools-0.0.0.dist-info/entry_points.txt +2 -0
- aixtools-0.0.0.dist-info/top_level.txt +1 -0
aixtools/mcp/client.py
ADDED
|
@@ -0,0 +1,375 @@
|
|
|
1
|
+
"""MCP server utilities with caching and robust error handling."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from contextlib import asynccontextmanager
|
|
5
|
+
from datetime import timedelta
|
|
6
|
+
from typing import Any, AsyncGenerator
|
|
7
|
+
|
|
8
|
+
import anyio
|
|
9
|
+
import httpx
|
|
10
|
+
from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
|
|
11
|
+
from cachebox import TTLCache
|
|
12
|
+
from mcp import types as mcp_types
|
|
13
|
+
from mcp.client import streamable_http
|
|
14
|
+
from mcp.shared.exceptions import McpError
|
|
15
|
+
from mcp.shared.message import SessionMessage
|
|
16
|
+
from pydantic_ai import RunContext, exceptions
|
|
17
|
+
from pydantic_ai.mcp import MCPServerStreamableHTTP, ToolResult
|
|
18
|
+
from pydantic_ai.toolsets.abstract import ToolsetTool
|
|
19
|
+
|
|
20
|
+
from aixtools.context import SessionIdTuple
|
|
21
|
+
from aixtools.logging.logging_config import get_logger
|
|
22
|
+
|
|
23
|
+
MCP_TOOL_CACHE_TTL = 300 # 5 minutes
|
|
24
|
+
DEFAULT_MCP_CONNECTION_TIMEOUT = 30
|
|
25
|
+
DEFAULT_MCP_READ_TIMEOUT = float(60 * 5) # 5 minutes
|
|
26
|
+
CACHE_KEY = "TOOL_LIST"
|
|
27
|
+
|
|
28
|
+
logger = get_logger(__name__)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def get_mcp_headers(session_id_tuple: SessionIdTuple) -> dict[str, str] | None:
|
|
32
|
+
"""
|
|
33
|
+
Generate headers for MCP server requests.
|
|
34
|
+
|
|
35
|
+
This function creates a dictionary of headers to be used in requests to
|
|
36
|
+
the MCP servers. If a `user_id` or `session_id` is provided, they are
|
|
37
|
+
included in the headers.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
session_id_tuple (SessionIdTuple): user_id and session_id tuple
|
|
41
|
+
Returns:
|
|
42
|
+
dict[str, str] | None: A dictionary of headers for MCP server requests,
|
|
43
|
+
or None if neither user_id nor session_id is
|
|
44
|
+
provided. When None is returned, default headers
|
|
45
|
+
from the client or transport will be used.
|
|
46
|
+
"""
|
|
47
|
+
headers = None
|
|
48
|
+
user_id, session_id = session_id_tuple
|
|
49
|
+
if session_id or user_id:
|
|
50
|
+
headers = {}
|
|
51
|
+
if session_id:
|
|
52
|
+
headers["session-id"] = session_id
|
|
53
|
+
if user_id:
|
|
54
|
+
headers["user-id"] = user_id
|
|
55
|
+
return headers
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def get_configured_mcp_servers(
|
|
59
|
+
session_id_tuple: SessionIdTuple, mcp_urls: list[str], timeout: int = DEFAULT_MCP_CONNECTION_TIMEOUT
|
|
60
|
+
):
|
|
61
|
+
"""
|
|
62
|
+
Retrieve the configured MCP server instances with optional caching.
|
|
63
|
+
|
|
64
|
+
Context values `user_id` and `session_id` are included in the headers for each server request.
|
|
65
|
+
|
|
66
|
+
Each server is wrapped in a try-except block to isolate them from each other.
|
|
67
|
+
If one server fails, it won't affect the others.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
session_id_tuple (SessionIdTuple): A tuple containing (user_id, session_id).
|
|
71
|
+
mcp_urls: (list[str], optional): A list of MCP server URLs to use.
|
|
72
|
+
timeout (int, optional): Timeout in seconds for MCP server connections. Defaults to 30 seconds.
|
|
73
|
+
Returns:
|
|
74
|
+
list[MCPServerStreamableHTTP]: A list of configured MCP server instances. If
|
|
75
|
+
neither user_id nor session_id is provided, the
|
|
76
|
+
server instances will use default headers defined
|
|
77
|
+
by the underlying HTTP implementation.
|
|
78
|
+
"""
|
|
79
|
+
headers = get_mcp_headers(session_id_tuple)
|
|
80
|
+
|
|
81
|
+
return [CachedMCPServerStreamableHTTP(url=url, headers=headers, timeout=timeout) for url in mcp_urls]
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class CachedMCPServerStreamableHTTP(MCPServerStreamableHTTP):
|
|
85
|
+
"""StreamableHTTP MCP server with cachebox-based TTL caching and robust error handling.
|
|
86
|
+
|
|
87
|
+
This class addresses the cancellation propagation issue by:
|
|
88
|
+
1. Using complete task isolation to prevent CancelledError propagation
|
|
89
|
+
2. Implementing comprehensive error handling for all MCP operations
|
|
90
|
+
3. Using fallback mechanisms when servers become unavailable
|
|
91
|
+
4. Overriding pydantic_ai methods to fix variable scoping bug
|
|
92
|
+
"""
|
|
93
|
+
|
|
94
|
+
def __init__(self, **kwargs):
|
|
95
|
+
super().__init__(**kwargs)
|
|
96
|
+
self._tools_cache = TTLCache(maxsize=1, ttl=MCP_TOOL_CACHE_TTL)
|
|
97
|
+
self._tools_list = None
|
|
98
|
+
self._isolation_lock = asyncio.Lock() # Lock for critical operations
|
|
99
|
+
|
|
100
|
+
async def _run_direct_or_isolated(self, func, fallback, timeout: float | None):
|
|
101
|
+
"""Run a coroutine in complete isolation to prevent cancellation propagation.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
func: Function that returns a coroutine to run
|
|
105
|
+
fallback: Function that takes an exception and returns a fallback value
|
|
106
|
+
timeout: Timeout in seconds. If None, then direct run is performed
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
The result of the coroutine on success, or fallback value on any exception
|
|
110
|
+
"""
|
|
111
|
+
try:
|
|
112
|
+
if timeout is None:
|
|
113
|
+
return await func()
|
|
114
|
+
|
|
115
|
+
task = asyncio.create_task(func())
|
|
116
|
+
|
|
117
|
+
# Use asyncio.wait to prevent cancellation propagation
|
|
118
|
+
done, pending = await asyncio.wait([task], timeout=timeout)
|
|
119
|
+
|
|
120
|
+
if pending:
|
|
121
|
+
# Cancel pending tasks safely
|
|
122
|
+
for t in pending:
|
|
123
|
+
t.cancel()
|
|
124
|
+
try:
|
|
125
|
+
await t
|
|
126
|
+
except (asyncio.CancelledError, Exception): # pylint: disable=broad-except
|
|
127
|
+
pass
|
|
128
|
+
raise TimeoutError(f"Task timed out after {timeout} seconds")
|
|
129
|
+
|
|
130
|
+
# Get result from completed task
|
|
131
|
+
completed_task = done.pop()
|
|
132
|
+
if exc := completed_task.exception():
|
|
133
|
+
raise exc
|
|
134
|
+
return completed_task.result()
|
|
135
|
+
|
|
136
|
+
except exceptions.ModelRetry as exc:
|
|
137
|
+
logger.warning("MCP %s: %s ModelRetry: %s", self.url, func.__name__, exc)
|
|
138
|
+
raise
|
|
139
|
+
except TimeoutError as exc:
|
|
140
|
+
logger.warning("MCP %s: %s timed out: %s", self.url, func.__name__, exc)
|
|
141
|
+
return fallback(exc)
|
|
142
|
+
except asyncio.CancelledError as exc:
|
|
143
|
+
logger.warning("MCP %s: %s was cancelled", self.url, func.__name__)
|
|
144
|
+
return fallback(exc)
|
|
145
|
+
except anyio.ClosedResourceError as exc:
|
|
146
|
+
logger.warning("MCP %s: %s closed resource.", self.url, func.__name__)
|
|
147
|
+
return fallback(exc)
|
|
148
|
+
except Exception as exc: # pylint: disable=broad-except
|
|
149
|
+
if str(exc) == "Attempted to exit cancel scope in a different task than it was entered in":
|
|
150
|
+
logger.warning("MCP %s: %s enter/exit cancel scope task mismatch.", self.url, func.__name__)
|
|
151
|
+
else:
|
|
152
|
+
logger.warning("MCP %s: %s exception %s: %s", self.url, func.__name__, type(exc), exc)
|
|
153
|
+
return fallback(exc)
|
|
154
|
+
|
|
155
|
+
@property
|
|
156
|
+
def _transport_client(self):
|
|
157
|
+
"""Override base transport client with wrapper logging and suppressing exceptions"""
|
|
158
|
+
return patched_streamablehttp_client
|
|
159
|
+
|
|
160
|
+
@asynccontextmanager
|
|
161
|
+
async def client_streams(self):
|
|
162
|
+
"""Override base client_streams with wrapper logging and suppressing exceptions"""
|
|
163
|
+
try:
|
|
164
|
+
async with super().client_streams() as streams: # pylint: disable=contextmanager-generator-missing-cleanup
|
|
165
|
+
try:
|
|
166
|
+
yield streams
|
|
167
|
+
except Exception as exc: # pylint: disable=broad-except
|
|
168
|
+
logger.error("MCP %s: client_streams; %s: %s", self.url, type(exc).__name__, exc)
|
|
169
|
+
except Exception as exc: # pylint: disable=broad-except
|
|
170
|
+
logger.error("MCP %s: client_streams: %s: %s", self.url, type(exc).__name__, exc)
|
|
171
|
+
|
|
172
|
+
async def __aenter__(self):
|
|
173
|
+
"""Enter the context of the cached MCP server with complete cancellation isolation."""
|
|
174
|
+
async with self._isolation_lock:
|
|
175
|
+
|
|
176
|
+
async def direct_init():
|
|
177
|
+
return await super(CachedMCPServerStreamableHTTP, self).__aenter__() # pylint: disable=super-with-arguments
|
|
178
|
+
|
|
179
|
+
def fallback(_exc):
|
|
180
|
+
self._client = None
|
|
181
|
+
return self
|
|
182
|
+
|
|
183
|
+
return await self._run_direct_or_isolated(direct_init, fallback, timeout=None)
|
|
184
|
+
|
|
185
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
186
|
+
"""Exit the context of the cached MCP server with complete cancellation isolation."""
|
|
187
|
+
async with self._isolation_lock:
|
|
188
|
+
# If we're being cancelled, just clean up
|
|
189
|
+
if exc_type is asyncio.CancelledError:
|
|
190
|
+
logger.warning("MCP %s: __aexit__ called with cancellation - cleaning up", self.url)
|
|
191
|
+
self._client = None
|
|
192
|
+
return True
|
|
193
|
+
|
|
194
|
+
# If client is already None, skip cleanup
|
|
195
|
+
if not self._client:
|
|
196
|
+
logger.warning("MCP %s: is uninitialized -> skipping cleanup", self.url)
|
|
197
|
+
return True
|
|
198
|
+
|
|
199
|
+
async def direct_cleanup():
|
|
200
|
+
return await super(CachedMCPServerStreamableHTTP, self).__aexit__(exc_type, exc_val, exc_tb) # pylint: disable=super-with-arguments
|
|
201
|
+
|
|
202
|
+
def fallback(_exc):
|
|
203
|
+
self._client = None
|
|
204
|
+
return True # Suppress exceptions to prevent propagation
|
|
205
|
+
|
|
206
|
+
return await self._run_direct_or_isolated(direct_cleanup, fallback, timeout=None)
|
|
207
|
+
|
|
208
|
+
async def list_tools(self) -> list[mcp_types.Tool]:
|
|
209
|
+
"""Override to fix variable scoping bug and add caching with cancellation isolation."""
|
|
210
|
+
# If client is not initialized, return empty list
|
|
211
|
+
if not self._client:
|
|
212
|
+
logger.warning("MCP %s: is uninitialized -> no tools", self.url)
|
|
213
|
+
return []
|
|
214
|
+
|
|
215
|
+
# First, check if we have a valid cached result
|
|
216
|
+
if CACHE_KEY in self._tools_cache:
|
|
217
|
+
logger.info("Using cached tools for %s", self.url)
|
|
218
|
+
return self._tools_cache[CACHE_KEY]
|
|
219
|
+
|
|
220
|
+
# Create isolated task to prevent cancellation propagation
|
|
221
|
+
async def isolated_list_tools():
|
|
222
|
+
"""Isolated list_tools with variable scoping bug fix."""
|
|
223
|
+
result = None # Initialize to prevent UnboundLocalError
|
|
224
|
+
async with self: # Ensure server is running
|
|
225
|
+
result = await self._client.list_tools()
|
|
226
|
+
if result:
|
|
227
|
+
self._tools_list = result.tools or []
|
|
228
|
+
self._tools_cache[CACHE_KEY] = self._tools_list
|
|
229
|
+
logger.info("MCP %s: list_tools returned %d tools", self.url, len(self._tools_list))
|
|
230
|
+
else:
|
|
231
|
+
logger.warning("MCP %s: list_tools returned no result", self.url)
|
|
232
|
+
return self._tools_list or []
|
|
233
|
+
|
|
234
|
+
def fallback(_exc):
|
|
235
|
+
return self._tools_list or []
|
|
236
|
+
|
|
237
|
+
return await self._run_direct_or_isolated(isolated_list_tools, fallback, timeout=5.0)
|
|
238
|
+
|
|
239
|
+
async def call_tool(
|
|
240
|
+
self,
|
|
241
|
+
name: str,
|
|
242
|
+
tool_args: dict[str, Any],
|
|
243
|
+
ctx: RunContext[Any],
|
|
244
|
+
tool: ToolsetTool[Any],
|
|
245
|
+
) -> ToolResult:
|
|
246
|
+
"""Call tool with complete isolation from cancellation using patched pydantic_ai."""
|
|
247
|
+
logger.info("MCP %s: call_tool '%s' started.", self.url, name)
|
|
248
|
+
|
|
249
|
+
# Early returns for uninitialized servers
|
|
250
|
+
if not self._client:
|
|
251
|
+
logger.warning("MCP %s: is uninitialized -> cannot call tool", self.url)
|
|
252
|
+
return f"There was an error with calling tool '{name}': MCP connection is uninitialized."
|
|
253
|
+
|
|
254
|
+
# Create isolated task to prevent cancellation propagation
|
|
255
|
+
async def isolated_call_tool():
|
|
256
|
+
"""Isolated call_tool using patched pydantic_ai methods."""
|
|
257
|
+
return await super(CachedMCPServerStreamableHTTP, self).call_tool(name, tool_args, ctx, tool) # pylint: disable=super-with-arguments
|
|
258
|
+
|
|
259
|
+
def fallback(exc):
|
|
260
|
+
return f"Exception {type(exc)} when calling tool '{name}': {exc}. Consider alternative approaches."
|
|
261
|
+
|
|
262
|
+
result = await self._run_direct_or_isolated(isolated_call_tool, fallback, timeout=3600.0)
|
|
263
|
+
logger.info("MCP %s: call_tool '%s' completed.", self.url, name)
|
|
264
|
+
return result
|
|
265
|
+
|
|
266
|
+
async def direct_call_tool(
|
|
267
|
+
self, name: str, args: dict[str, Any], metadata: dict[str, Any] | None = None
|
|
268
|
+
) -> ToolResult:
|
|
269
|
+
"""Override to fix variable scoping bug in direct_call_tool."""
|
|
270
|
+
result = None # Initialize to prevent UnboundLocalError
|
|
271
|
+
async with self: # Ensure server is running
|
|
272
|
+
try:
|
|
273
|
+
result = await self._client.send_request(
|
|
274
|
+
mcp_types.ClientRequest(
|
|
275
|
+
mcp_types.CallToolRequest(
|
|
276
|
+
method="tools/call",
|
|
277
|
+
params=mcp_types.CallToolRequestParams(
|
|
278
|
+
name=name,
|
|
279
|
+
arguments=args,
|
|
280
|
+
_meta=mcp_types.RequestParams.Meta(**metadata) if metadata else None,
|
|
281
|
+
),
|
|
282
|
+
)
|
|
283
|
+
),
|
|
284
|
+
mcp_types.CallToolResult,
|
|
285
|
+
)
|
|
286
|
+
except McpError as e:
|
|
287
|
+
raise exceptions.ModelRetry(e.error.message)
|
|
288
|
+
|
|
289
|
+
if not result:
|
|
290
|
+
raise exceptions.ModelRetry("No result from MCP server")
|
|
291
|
+
|
|
292
|
+
content = [await self._map_tool_result_part(part) for part in result.content]
|
|
293
|
+
|
|
294
|
+
if result.isError:
|
|
295
|
+
text = "\n".join(str(part) for part in content)
|
|
296
|
+
raise exceptions.ModelRetry(text)
|
|
297
|
+
|
|
298
|
+
return content[0] if len(content) == 1 else content
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
class PatchedStreamableHTTPTransport(streamable_http.StreamableHTTPTransport):
|
|
302
|
+
"""Patched StreamableHTTPTransport with exception suppression for _handle_post_request."""
|
|
303
|
+
|
|
304
|
+
async def _handle_post_request(self, ctx: streamable_http.RequestContext) -> None:
|
|
305
|
+
"""Patched _handle_post_request with proper error handling."""
|
|
306
|
+
try:
|
|
307
|
+
await super()._handle_post_request(ctx)
|
|
308
|
+
except Exception as exc: # pylint: disable=broad-except
|
|
309
|
+
logger.error("MCP %s: _handle_post_request %s: %s", self.url, type(exc).__name__, exc)
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
@asynccontextmanager
|
|
313
|
+
async def patched_streamablehttp_client( # noqa: PLR0913, pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-locals
|
|
314
|
+
url: str,
|
|
315
|
+
headers: dict[str, str] | None = None,
|
|
316
|
+
timeout: float | timedelta = 30,
|
|
317
|
+
sse_read_timeout: float | timedelta = DEFAULT_MCP_READ_TIMEOUT,
|
|
318
|
+
terminate_on_close: bool = True,
|
|
319
|
+
httpx_client_factory: streamable_http.McpHttpClientFactory = streamable_http.create_mcp_http_client,
|
|
320
|
+
auth: httpx.Auth | None = None,
|
|
321
|
+
) -> AsyncGenerator[
|
|
322
|
+
tuple[
|
|
323
|
+
MemoryObjectReceiveStream[SessionMessage | Exception],
|
|
324
|
+
MemoryObjectSendStream[SessionMessage],
|
|
325
|
+
streamable_http.GetSessionIdCallback,
|
|
326
|
+
],
|
|
327
|
+
None,
|
|
328
|
+
]:
|
|
329
|
+
"""Patched version of `streamablehttp_client` with exception suppression."""
|
|
330
|
+
try:
|
|
331
|
+
transport = PatchedStreamableHTTPTransport(url, headers, timeout, sse_read_timeout, auth)
|
|
332
|
+
|
|
333
|
+
read_stream_writer, read_stream = anyio.create_memory_object_stream[SessionMessage | Exception](0)
|
|
334
|
+
write_stream, write_stream_reader = anyio.create_memory_object_stream[SessionMessage](0)
|
|
335
|
+
async with anyio.create_task_group() as tg:
|
|
336
|
+
try:
|
|
337
|
+
async with httpx_client_factory(
|
|
338
|
+
headers=transport.request_headers,
|
|
339
|
+
timeout=httpx.Timeout(transport.timeout, read=transport.sse_read_timeout),
|
|
340
|
+
auth=transport.auth,
|
|
341
|
+
) as client:
|
|
342
|
+
# Define callbacks that need access to tg
|
|
343
|
+
def start_get_stream() -> None:
|
|
344
|
+
tg.start_soon(transport.handle_get_stream, client, read_stream_writer)
|
|
345
|
+
|
|
346
|
+
tg.start_soon(
|
|
347
|
+
transport.post_writer,
|
|
348
|
+
client,
|
|
349
|
+
write_stream_reader,
|
|
350
|
+
read_stream_writer,
|
|
351
|
+
write_stream,
|
|
352
|
+
start_get_stream,
|
|
353
|
+
tg,
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
try:
|
|
357
|
+
yield (
|
|
358
|
+
read_stream,
|
|
359
|
+
write_stream,
|
|
360
|
+
transport.get_session_id,
|
|
361
|
+
)
|
|
362
|
+
except GeneratorExit:
|
|
363
|
+
logger.warning("patched_streamablehttp_client: GeneratorExit caught, closing streams.")
|
|
364
|
+
finally:
|
|
365
|
+
if transport.session_id and terminate_on_close:
|
|
366
|
+
await transport.terminate_session(client)
|
|
367
|
+
tg.cancel_scope.cancel()
|
|
368
|
+
finally:
|
|
369
|
+
await read_stream_writer.aclose()
|
|
370
|
+
await write_stream.aclose()
|
|
371
|
+
except Exception as exc: # pylint: disable=broad-except
|
|
372
|
+
if str(exc) == "Attempted to exit cancel scope in a different task than it was entered in":
|
|
373
|
+
logger.warning("MCP %s: patched_streamablehttp_client: enter/exit cancel scope task mismatch.", url)
|
|
374
|
+
else:
|
|
375
|
+
logger.error("MCP %s: patched_streamablehttp_client: %s: %s", url, type(exc).__name__, exc)
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Example client implementation for Model Context Protocol (MCP) servers.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
|
|
7
|
+
from pydantic_ai.mcp import MCPServerSSE, MCPServerStdio
|
|
8
|
+
|
|
9
|
+
from aixtools.agents import get_agent, run_agent
|
|
10
|
+
|
|
11
|
+
USE_SEE = False
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
if USE_SEE:
|
|
15
|
+
server = MCPServerSSE(url="http://127.0.0.1:8000/sse")
|
|
16
|
+
else:
|
|
17
|
+
server = MCPServerStdio(command="fastmcp", args=["run", "aixtools/mcp/example_server.py"])
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
async def main(agent, prompt): # pylint: disable=redefined-outer-name
|
|
21
|
+
"""Run an agent with MCP servers and display the result."""
|
|
22
|
+
async with agent:
|
|
23
|
+
ret = await run_agent(agent, prompt)
|
|
24
|
+
print(f"Agent returned: {ret}")
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
if __name__ == "__main__":
|
|
28
|
+
agent = get_agent(mcp_servers=[server]) # pylint: disable=unexpected-keyword-arg
|
|
29
|
+
print(f"Agent created: {agent}")
|
|
30
|
+
asyncio.run(main(agent, "What is the add of 923048502345 and 795467090123481926349123941 ?"))
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Example server implementation for Model Context Protocol (MCP).
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from aixtools.mcp.fast_mcp_log import FastMcpLog
|
|
6
|
+
|
|
7
|
+
mcp = FastMcpLog("Demo")
|
|
8
|
+
# mcp = FastMCP("Demo")
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
# Add an addition tool
|
|
12
|
+
@mcp.tool()
|
|
13
|
+
def add(a: int, b: int) -> int:
|
|
14
|
+
"""Add two numbers"""
|
|
15
|
+
return a + b
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
# Add a dynamic greeting resource
|
|
19
|
+
@mcp.resource("greeting://{name}")
|
|
20
|
+
def get_greeting(name: str) -> str:
|
|
21
|
+
"""Get a personalized greeting"""
|
|
22
|
+
return f"Hello, {name}!"
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
"""
|
|
2
|
+
FastMCP logging implementation for Model Context Protocol.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import sys
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from fastmcp import FastMCP
|
|
9
|
+
from pydantic import AnyUrl
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class FastMcpLog(FastMCP):
|
|
13
|
+
"""A FastMCP with hooks for logging."""
|
|
14
|
+
|
|
15
|
+
async def _call_tool(self, key: str, arguments: dict[str, Any]):
|
|
16
|
+
print(f"Calling tool: {key} with arguments: {arguments}", file=sys.stderr)
|
|
17
|
+
ret = await super()._call_tool(key, arguments)
|
|
18
|
+
print(f"Tool returned: {ret}", file=sys.stderr)
|
|
19
|
+
return ret
|
|
20
|
+
|
|
21
|
+
async def _read_resource(self, uri: AnyUrl | str):
|
|
22
|
+
print(f"Reading resource: {uri}", file=sys.stderr)
|
|
23
|
+
ret = await super()._read_resource(uri)
|
|
24
|
+
print(f"Resource contents: {ret}", file=sys.stderr)
|
|
25
|
+
return ret
|
|
26
|
+
|
|
27
|
+
async def get_prompt(self, key: str, arguments: dict[str, Any] | None = None):
|
|
28
|
+
print(f"Getting prompt: {key} with arguments: {arguments}", file=sys.stderr)
|
|
29
|
+
ret = await super()._get_prompt(key, arguments)
|
|
30
|
+
print(f"Prompt result: {ret}", file=sys.stderr)
|
|
31
|
+
return ret
|