chuk-tool-processor 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chuk-tool-processor might be problematic. Click here for more details.

@@ -1 +0,0 @@
1
- # chuk_tool_processor/__init__.py
@@ -0,0 +1,21 @@
1
+ # chuk_tool_processor/mcp/__init__.py
2
+ """
3
+ MCP integration for CHUK Tool Processor.
4
+ """
5
+ from chuk_tool_processor.mcp.transport import MCPBaseTransport, StdioTransport, SSETransport
6
+ from chuk_tool_processor.mcp.stream_manager import StreamManager
7
+ from chuk_tool_processor.mcp.mcp_tool import MCPTool
8
+ from chuk_tool_processor.mcp.register_mcp_tools import register_mcp_tools
9
+ from chuk_tool_processor.mcp.setup_mcp_stdio import setup_mcp_stdio
10
+ from chuk_tool_processor.mcp.setup_mcp_sse import setup_mcp_sse
11
+
12
+ __all__ = [
13
+ "MCPBaseTransport",
14
+ "StdioTransport",
15
+ "SSETransport",
16
+ "StreamManager",
17
+ "MCPTool",
18
+ "register_mcp_tools",
19
+ "setup_mcp_stdio",
20
+ "setup_mcp_sse"
21
+ ]
@@ -0,0 +1,53 @@
1
+ # chuk_tool_processor/mcp/mcp_tool.py
2
+ """
3
+ MCP tool that uses StreamManager for execution.
4
+ """
5
+
6
+ from typing import Any
7
+
8
+ from chuk_tool_processor.mcp.stream_manager import StreamManager
9
+ from chuk_tool_processor.logging import get_logger
10
+
11
+ logger = get_logger("chuk_tool_processor.mcp.mcp_tool")
12
+
13
+ class MCPTool:
14
+ """
15
+ MCP tool that uses StreamManager for execution.
16
+
17
+ This tool handles both namespaced and non-namespaced execution.
18
+ """
19
+
20
+ def __init__(self, tool_name: str, stream_manager: StreamManager):
21
+ """
22
+ Initialize the MCP tool.
23
+
24
+ Args:
25
+ tool_name: Name of the MCP tool
26
+ stream_manager: StreamManager instance
27
+ """
28
+ self.tool_name = tool_name
29
+ self.stream_manager = stream_manager
30
+
31
+ async def execute(self, **kwargs: Any) -> Any:
32
+ """
33
+ Execute the tool using StreamManager.
34
+
35
+ Args:
36
+ **kwargs: Tool arguments
37
+
38
+ Returns:
39
+ Tool result
40
+ """
41
+ logger.debug(f"Executing MCP tool {self.tool_name}")
42
+
43
+ result = await self.stream_manager.call_tool(
44
+ tool_name=self.tool_name,
45
+ arguments=kwargs
46
+ )
47
+
48
+ if result.get("isError"):
49
+ error_msg = result.get("error", "Unknown error")
50
+ logger.error(f"Error executing MCP tool {self.tool_name}: {error_msg}")
51
+ raise RuntimeError(error_msg)
52
+
53
+ return result.get("content")
@@ -0,0 +1,82 @@
1
+ # chuk_tool_processor/mcp/register_mcp_tools.py
2
+ """
3
+ Registration functions for MCP tools.
4
+ """
5
+
6
+ from typing import List, Dict, Any
7
+
8
+ from chuk_tool_processor.mcp.mcp_tool import MCPTool
9
+ from chuk_tool_processor.mcp.stream_manager import StreamManager
10
+ from chuk_tool_processor.registry.provider import ToolRegistryProvider
11
+ from chuk_tool_processor.logging import get_logger
12
+
13
+ logger = get_logger("chuk_tool_processor.mcp.register")
14
+
15
+
16
+ def register_mcp_tools(
17
+ stream_manager: StreamManager,
18
+ namespace: str = "mcp"
19
+ ) -> List[str]:
20
+ """
21
+ Register MCP tools with the CHUK registry.
22
+
23
+ Args:
24
+ stream_manager: StreamManager instance
25
+ namespace: Namespace for the tools
26
+
27
+ Returns:
28
+ List of registered tool names
29
+ """
30
+ registry = ToolRegistryProvider.get_registry()
31
+ registered_tools = []
32
+
33
+ # Get all tools from StreamManager
34
+ mcp_tools = stream_manager.get_all_tools()
35
+
36
+ for tool_def in mcp_tools:
37
+ tool_name = tool_def.get("name")
38
+ if not tool_name:
39
+ logger.warning("Tool definition missing name")
40
+ continue
41
+
42
+ description = tool_def.get("description", f"MCP tool: {tool_name}")
43
+
44
+ try:
45
+ # Create tool
46
+ tool = MCPTool(tool_name, stream_manager)
47
+
48
+ # Register with registry under the original name in the given namespace
49
+ registry.register_tool(
50
+ tool,
51
+ name=tool_name,
52
+ namespace=namespace,
53
+ metadata={
54
+ "description": description,
55
+ "is_async": True,
56
+ "tags": {"mcp", "remote"},
57
+ "argument_schema": tool_def.get("inputSchema", {})
58
+ }
59
+ )
60
+
61
+ # Also register the tool in the default namespace with the namespaced name
62
+ # This allows calling the tool as either "echo" or "stdio.echo" from parsers
63
+ namespaced_tool_name = f"{namespace}.{tool_name}"
64
+ registry.register_tool(
65
+ tool,
66
+ name=namespaced_tool_name,
67
+ namespace="default",
68
+ metadata={
69
+ "description": description,
70
+ "is_async": True,
71
+ "tags": {"mcp", "remote", "namespaced"},
72
+ "argument_schema": tool_def.get("inputSchema", {})
73
+ }
74
+ )
75
+
76
+ registered_tools.append(tool_name)
77
+ logger.info(f"Registered MCP tool '{tool_name}' in namespace '{namespace}' (also as '{namespaced_tool_name}' in default)")
78
+
79
+ except Exception as e:
80
+ logger.error(f"Error registering MCP tool '{tool_name}': {e}")
81
+
82
+ return registered_tools
@@ -0,0 +1,74 @@
1
+ # chuk_tool_processor/mcp/setup_mcp_sse.py
2
+ """
3
+ Setup function for SSE transport MCP integration.
4
+ """
5
+ from __future__ import annotations
6
+
7
+ from typing import Dict, List, Optional
8
+
9
+ from chuk_tool_processor.core.processor import ToolProcessor
10
+ from chuk_tool_processor.mcp.stream_manager import StreamManager
11
+ from chuk_tool_processor.mcp.register_mcp_tools import register_mcp_tools
12
+ from chuk_tool_processor.logging import get_logger
13
+
14
+ logger = get_logger("chuk_tool_processor.mcp.setup_sse")
15
+
16
+
17
+ async def setup_mcp_sse(
18
+ servers: List[Dict[str, str]],
19
+ server_names: Optional[Dict[int, str]] = None,
20
+ default_timeout: float = 10.0,
21
+ max_concurrency: Optional[int] = None,
22
+ enable_caching: bool = True,
23
+ cache_ttl: int = 300,
24
+ enable_rate_limiting: bool = False,
25
+ global_rate_limit: Optional[int] = None,
26
+ tool_rate_limits: Optional[Dict[str, tuple]] = None,
27
+ enable_retries: bool = True,
28
+ max_retries: int = 3,
29
+ namespace: str = "mcp"
30
+ ) -> tuple[ToolProcessor, StreamManager]:
31
+ """
32
+ Set up MCP with SSE transport and CHUK Tool Processor.
33
+
34
+ Args:
35
+ servers: List of server configurations with "name" and "url" keys
36
+ server_names: Optional mapping of server indices to names
37
+ default_timeout: Default timeout for tool execution
38
+ max_concurrency: Maximum concurrent executions
39
+ enable_caching: Whether to enable caching
40
+ cache_ttl: Cache TTL in seconds
41
+ enable_rate_limiting: Whether to enable rate limiting
42
+ global_rate_limit: Global rate limit (requests per minute)
43
+ tool_rate_limits: Per-tool rate limits
44
+ enable_retries: Whether to enable retries
45
+ max_retries: Maximum retry attempts
46
+ namespace: Namespace for MCP tools
47
+
48
+ Returns:
49
+ Tuple of (processor, stream_manager)
50
+ """
51
+ # Create and initialize StreamManager with SSE transport
52
+ stream_manager = await StreamManager.create_with_sse(
53
+ servers=servers,
54
+ server_names=server_names
55
+ )
56
+
57
+ # Register MCP tools
58
+ registered_tools = register_mcp_tools(stream_manager, namespace)
59
+
60
+ # Create processor
61
+ processor = ToolProcessor(
62
+ default_timeout=default_timeout,
63
+ max_concurrency=max_concurrency,
64
+ enable_caching=enable_caching,
65
+ cache_ttl=cache_ttl,
66
+ enable_rate_limiting=enable_rate_limiting,
67
+ global_rate_limit=global_rate_limit,
68
+ tool_rate_limits=tool_rate_limits,
69
+ enable_retries=enable_retries,
70
+ max_retries=max_retries
71
+ )
72
+
73
+ logger.info(f"Set up MCP (SSE) with {len(registered_tools)} tools")
74
+ return processor, stream_manager
@@ -0,0 +1,78 @@
1
+ # chuk_tool_processor/mcp/setup_mcp_stdio.py
2
+ """
3
+ Setup function for stdio transport MCP integration.
4
+ """
5
+ from __future__ import annotations
6
+
7
+ from typing import Dict, List, Optional
8
+
9
+ from chuk_tool_processor.core.processor import ToolProcessor
10
+ from chuk_tool_processor.mcp.stream_manager import StreamManager
11
+ from chuk_tool_processor.mcp.register_mcp_tools import register_mcp_tools
12
+ from chuk_tool_processor.logging import get_logger
13
+
14
+ logger = get_logger("chuk_tool_processor.mcp.setup_stdio")
15
+
16
+
17
+ async def setup_mcp_stdio(
18
+ config_file: str,
19
+ servers: List[str],
20
+ server_names: Optional[Dict[int, str]] = None,
21
+ default_timeout: float = 10.0,
22
+ max_concurrency: Optional[int] = None,
23
+ enable_caching: bool = True,
24
+ cache_ttl: int = 300,
25
+ enable_rate_limiting: bool = False,
26
+ global_rate_limit: Optional[int] = None,
27
+ tool_rate_limits: Optional[Dict[str, tuple]] = None,
28
+ enable_retries: bool = True,
29
+ max_retries: int = 3,
30
+ namespace: str = "mcp"
31
+ ) -> tuple[ToolProcessor, StreamManager]:
32
+ """
33
+ Set up MCP with stdio transport and CHUK Tool Processor.
34
+
35
+ Args:
36
+ config_file: Path to the config file
37
+ servers: List of server names to connect to
38
+ server_names: Optional mapping of server indices to names
39
+ default_timeout: Default timeout for tool execution
40
+ max_concurrency: Maximum concurrent executions
41
+ enable_caching: Whether to enable caching
42
+ cache_ttl: Cache TTL in seconds
43
+ enable_rate_limiting: Whether to enable rate limiting
44
+ global_rate_limit: Global rate limit (requests per minute)
45
+ tool_rate_limits: Per-tool rate limits
46
+ enable_retries: Whether to enable retries
47
+ max_retries: Maximum retry attempts
48
+ namespace: Namespace for MCP tools
49
+
50
+ Returns:
51
+ Tuple of (processor, stream_manager)
52
+ """
53
+ # Create and initialize StreamManager with stdio transport
54
+ stream_manager = await StreamManager.create(
55
+ config_file=config_file,
56
+ servers=servers,
57
+ server_names=server_names,
58
+ transport_type="stdio"
59
+ )
60
+
61
+ # Register MCP tools
62
+ registered_tools = register_mcp_tools(stream_manager, namespace)
63
+
64
+ # Create processor
65
+ processor = ToolProcessor(
66
+ default_timeout=default_timeout,
67
+ max_concurrency=max_concurrency,
68
+ enable_caching=enable_caching,
69
+ cache_ttl=cache_ttl,
70
+ enable_rate_limiting=enable_rate_limiting,
71
+ global_rate_limit=global_rate_limit,
72
+ tool_rate_limits=tool_rate_limits,
73
+ enable_retries=enable_retries,
74
+ max_retries=max_retries
75
+ )
76
+
77
+ logger.info(f"Set up MCP (stdio) with {len(registered_tools)} tools")
78
+ return processor, stream_manager
@@ -0,0 +1,293 @@
1
+ # chuk_tool_processor/mcp/stream_manager.py
2
+ """
3
+ StreamManager for CHUK Tool Processor.
4
+ """
5
+ from __future__ import annotations
6
+
7
+ import asyncio
8
+ from typing import Any, Dict, List, Optional, Tuple
9
+
10
+ # --------------------------------------------------------------------------- #
11
+ # CHUK imports #
12
+ # --------------------------------------------------------------------------- #
13
+ from chuk_mcp.config import load_config
14
+ from chuk_tool_processor.mcp.transport import (
15
+ MCPBaseTransport,
16
+ StdioTransport,
17
+ SSETransport,
18
+ )
19
+ from chuk_tool_processor.logging import get_logger
20
+
21
+ logger = get_logger("chuk_tool_processor.mcp.stream_manager")
22
+
23
+
24
+ class StreamManager:
25
+ """
26
+ Manager for MCP server streams with support for multiple transport types.
27
+ """
28
+
29
+ # ------------------------------------------------------------------ #
30
+ # construction #
31
+ # ------------------------------------------------------------------ #
32
+ def __init__(self) -> None:
33
+ self.transports: Dict[str, MCPBaseTransport] = {}
34
+ self.server_info: List[Dict[str, Any]] = []
35
+ self.tool_to_server_map: Dict[str, str] = {}
36
+ self.server_names: Dict[int, str] = {}
37
+ self.all_tools: List[Dict[str, Any]] = []
38
+ self._lock = asyncio.Lock()
39
+
40
+ # ------------------------------------------------------------------ #
41
+ # factory helpers #
42
+ # ------------------------------------------------------------------ #
43
+ @classmethod
44
+ async def create(
45
+ cls,
46
+ config_file: str,
47
+ servers: List[str],
48
+ server_names: Optional[Dict[int, str]] = None,
49
+ transport_type: str = "stdio",
50
+ ) -> "StreamManager":
51
+ inst = cls()
52
+ await inst.initialize(config_file, servers, server_names, transport_type)
53
+ return inst
54
+
55
+ @classmethod
56
+ async def create_with_sse(
57
+ cls,
58
+ servers: List[Dict[str, str]],
59
+ server_names: Optional[Dict[int, str]] = None,
60
+ ) -> "StreamManager":
61
+ inst = cls()
62
+ await inst.initialize_with_sse(servers, server_names)
63
+ return inst
64
+
65
+ # ------------------------------------------------------------------ #
66
+ # initialisation – stdio / sse #
67
+ # ------------------------------------------------------------------ #
68
+ async def initialize(
69
+ self,
70
+ config_file: str,
71
+ servers: List[str],
72
+ server_names: Optional[Dict[int, str]] = None,
73
+ transport_type: str = "stdio",
74
+ ) -> None:
75
+ async with self._lock:
76
+ self.server_names = server_names or {}
77
+
78
+ for idx, server_name in enumerate(servers):
79
+ try:
80
+ if transport_type == "stdio":
81
+ params = await load_config(config_file, server_name)
82
+ transport: MCPBaseTransport = StdioTransport(params)
83
+ elif transport_type == "sse":
84
+ transport = SSETransport("http://localhost:8000")
85
+ else:
86
+ logger.error("Unsupported transport type: %s", transport_type)
87
+ continue
88
+
89
+ if not await transport.initialize():
90
+ logger.error("Failed to init %s", server_name)
91
+ continue
92
+
93
+ # store transport
94
+ self.transports[server_name] = transport
95
+
96
+ # ping + gather tools
97
+ status = "Up" if await transport.send_ping() else "Down"
98
+ tools = await transport.get_tools()
99
+
100
+ for t in tools:
101
+ name = t.get("name")
102
+ if name:
103
+ self.tool_to_server_map[name] = server_name
104
+ self.all_tools.extend(tools)
105
+
106
+ self.server_info.append(
107
+ {
108
+ "id": idx,
109
+ "name": server_name,
110
+ "tools": len(tools),
111
+ "status": status,
112
+ }
113
+ )
114
+ logger.info("Initialised %s – %d tool(s)", server_name, len(tools))
115
+ except Exception as exc: # noqa: BLE001
116
+ logger.error("Error initialising %s: %s", server_name, exc)
117
+
118
+ logger.info(
119
+ "StreamManager ready – %d server(s), %d tool(s)",
120
+ len(self.transports),
121
+ len(self.all_tools),
122
+ )
123
+
124
+ async def initialize_with_sse(
125
+ self,
126
+ servers: List[Dict[str, str]],
127
+ server_names: Optional[Dict[int, str]] = None,
128
+ ) -> None:
129
+ async with self._lock:
130
+ self.server_names = server_names or {}
131
+
132
+ for idx, cfg in enumerate(servers):
133
+ name, url = cfg.get("name"), cfg.get("url")
134
+ if not (name and url):
135
+ logger.error("Bad server config: %s", cfg)
136
+ continue
137
+ try:
138
+ transport = SSETransport(url, cfg.get("api_key"))
139
+ if not await transport.initialize():
140
+ logger.error("Failed to init SSE %s", name)
141
+ continue
142
+
143
+ self.transports[name] = transport
144
+ status = "Up" if await transport.send_ping() else "Down"
145
+ tools = await transport.get_tools()
146
+
147
+ for t in tools:
148
+ tname = t.get("name")
149
+ if tname:
150
+ self.tool_to_server_map[tname] = name
151
+ self.all_tools.extend(tools)
152
+
153
+ self.server_info.append(
154
+ {"id": idx, "name": name, "tools": len(tools), "status": status}
155
+ )
156
+ logger.info("Initialised SSE %s – %d tool(s)", name, len(tools))
157
+ except Exception as exc: # noqa: BLE001
158
+ logger.error("Error initialising SSE %s: %s", name, exc)
159
+
160
+ logger.info(
161
+ "StreamManager ready – %d SSE server(s), %d tool(s)",
162
+ len(self.transports),
163
+ len(self.all_tools),
164
+ )
165
+
166
+ # ------------------------------------------------------------------ #
167
+ # queries #
168
+ # ------------------------------------------------------------------ #
169
+ def get_all_tools(self) -> List[Dict[str, Any]]:
170
+ return self.all_tools
171
+
172
+ def get_server_for_tool(self, tool_name: str) -> Optional[str]:
173
+ return self.tool_to_server_map.get(tool_name)
174
+
175
+ def get_server_info(self) -> List[Dict[str, Any]]:
176
+ return self.server_info
177
+
178
+ # ------------------------------------------------------------------ #
179
+ # EXTRA HELPERS – ping / resources / prompts #
180
+ # ------------------------------------------------------------------ #
181
+ async def ping_servers(self) -> List[Dict[str, Any]]:
182
+ async def _ping_one(name: str, tr: MCPBaseTransport):
183
+ try:
184
+ ok = await tr.send_ping()
185
+ except Exception: # pragma: no cover
186
+ ok = False
187
+ return {"server": name, "ok": ok}
188
+
189
+ return await asyncio.gather(*(_ping_one(n, t) for n, t in self.transports.items()))
190
+
191
+ async def list_resources(self) -> List[Dict[str, Any]]:
192
+ out: List[Dict[str, Any]] = []
193
+
194
+ async def _one(name: str, tr: MCPBaseTransport):
195
+ if not hasattr(tr, "list_resources"):
196
+ return
197
+ try:
198
+ res = await tr.list_resources() # type: ignore[attr-defined]
199
+ # accept either {"resources": [...]} **or** a plain list
200
+ resources = (
201
+ res.get("resources", []) if isinstance(res, dict) else res
202
+ )
203
+ for item in resources:
204
+ item = dict(item)
205
+ item["server"] = name
206
+ out.append(item)
207
+ except Exception as exc:
208
+ logger.debug("resources/list failed for %s: %s", name, exc)
209
+
210
+ await asyncio.gather(*(_one(n, t) for n, t in self.transports.items()))
211
+ return out
212
+
213
+ async def list_prompts(self) -> List[Dict[str, Any]]:
214
+ out: List[Dict[str, Any]] = []
215
+
216
+ async def _one(name: str, tr: MCPBaseTransport):
217
+ if not hasattr(tr, "list_prompts"):
218
+ return
219
+ try:
220
+ res = await tr.list_prompts() # type: ignore[attr-defined]
221
+ prompts = res.get("prompts", []) if isinstance(res, dict) else res
222
+ for item in prompts:
223
+ item = dict(item)
224
+ item["server"] = name
225
+ out.append(item)
226
+ except Exception as exc:
227
+ logger.debug("prompts/list failed for %s: %s", name, exc)
228
+
229
+ await asyncio.gather(*(_one(n, t) for n, t in self.transports.items()))
230
+ return out
231
+
232
+ # ------------------------------------------------------------------ #
233
+ # tool execution #
234
+ # ------------------------------------------------------------------ #
235
+ async def call_tool(
236
+ self,
237
+ tool_name: str,
238
+ arguments: Dict[str, Any],
239
+ server_name: Optional[str] = None,
240
+ ) -> Dict[str, Any]:
241
+ server_name = server_name or self.get_server_for_tool(tool_name)
242
+ if not server_name or server_name not in self.transports:
243
+ # wording kept exactly for unit-test expectation
244
+ return {
245
+ "isError": True,
246
+ "error": f"No server found for tool: {tool_name}",
247
+ }
248
+ return await self.transports[server_name].call_tool(tool_name, arguments)
249
+
250
+ # ------------------------------------------------------------------ #
251
+ # shutdown #
252
+ # ------------------------------------------------------------------ #
253
+ async def close(self) -> None:
254
+ tasks = [tr.close() for tr in self.transports.values()]
255
+ if tasks:
256
+ try:
257
+ await asyncio.gather(*tasks)
258
+ except asyncio.CancelledError: # pragma: no cover
259
+ pass
260
+ except Exception as exc: # noqa: BLE001
261
+ logger.error("Error during close: %s", exc)
262
+
263
+ self.transports.clear()
264
+ self.server_info.clear()
265
+ self.tool_to_server_map.clear()
266
+ self.all_tools.clear()
267
+
268
+ # ------------------------------------------------------------------ #
269
+ # backwards-compat: streams helper #
270
+ # ------------------------------------------------------------------ #
271
+ def get_streams(self) -> List[Tuple[Any, Any]]:
272
+ """
273
+ Return a list of ``(read_stream, write_stream)`` tuples for **all**
274
+ transports. Older CLI commands rely on this helper.
275
+ """
276
+ pairs: List[Tuple[Any, Any]] = []
277
+
278
+ for tr in self.transports.values():
279
+ if hasattr(tr, "get_streams") and callable(tr.get_streams):
280
+ pairs.extend(tr.get_streams()) # type: ignore[arg-type]
281
+ continue
282
+
283
+ rd = getattr(tr, "read_stream", None)
284
+ wr = getattr(tr, "write_stream", None)
285
+ if rd and wr:
286
+ pairs.append((rd, wr))
287
+
288
+ return pairs
289
+
290
+ # convenience alias
291
+ @property
292
+ def streams(self) -> List[Tuple[Any, Any]]: # pragma: no cover
293
+ return self.get_streams()
@@ -0,0 +1,14 @@
1
+ # chuk_tool_processor/mcp/transport/__init__.py
2
+ """
3
+ MCP transport implementations.
4
+ """
5
+
6
+ from .base_transport import MCPBaseTransport
7
+ from .stdio_transport import StdioTransport
8
+ from .sse_transport import SSETransport
9
+
10
+ __all__ = [
11
+ "MCPBaseTransport",
12
+ "StdioTransport",
13
+ "SSETransport"
14
+ ]