chuk-tool-processor 0.6.3__py3-none-any.whl → 0.6.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chuk-tool-processor might be problematic. Click here for more details.

@@ -58,15 +58,15 @@ async def log_context_span(
58
58
  prev = log_context.get_copy()
59
59
  log_context.update(span_ctx)
60
60
 
61
- logger.info("Starting %s", operation)
61
+ logger.debug("Starting %s", operation)
62
62
  try:
63
63
  yield
64
64
  if log_duration:
65
- logger.info(
65
+ logger.debug(
66
66
  "Completed %s", operation, extra={"context": {"duration": time.time() - start}}
67
67
  )
68
68
  else:
69
- logger.info("Completed %s", operation)
69
+ logger.debug("Completed %s", operation)
70
70
  except Exception as exc:
71
71
  logger.exception(
72
72
  "Error in %s: %s", operation, exc, extra={"context": {"duration": time.time() - start}}
@@ -97,10 +97,10 @@ async def request_logging(
97
97
  logger = get_logger("chuk_tool_processor.request")
98
98
  request_id = log_context.start_request(request_id)
99
99
  start = time.time()
100
- logger.info("Starting request %s", request_id)
100
+ logger.debug("Starting request %s", request_id)
101
101
  try:
102
102
  yield request_id
103
- logger.info(
103
+ logger.debug(
104
104
  "Completed request %s",
105
105
  request_id,
106
106
  extra={"context": {"duration": time.time() - start}},
@@ -184,4 +184,4 @@ async def log_tool_call(tool_call: Any, tool_result: Any) -> None:
184
184
  if tool_result.error:
185
185
  logger.error("Tool %s failed: %s", tool_call.tool, tool_result.error, extra={"context": ctx})
186
186
  else:
187
- logger.info("Tool %s succeeded in %.3fs", tool_call.tool, dur, extra={"context": ctx})
187
+ logger.debug("Tool %s succeeded in %.3fs", tool_call.tool, dur, extra={"context": ctx})
@@ -95,7 +95,7 @@ async def register_mcp_tools(
95
95
  except Exception as exc:
96
96
  logger.error("Failed to register MCP tool '%s': %s", tool_name, exc)
97
97
 
98
- logger.info("MCP registration complete - %d tool(s) available", len(registered))
98
+ logger.debug("MCP registration complete - %d tool(s) available", len(registered))
99
99
  return registered
100
100
 
101
101
 
@@ -134,14 +134,14 @@ async def update_mcp_tools_stream_manager(
134
134
  if tool and hasattr(tool, 'set_stream_manager'):
135
135
  tool.set_stream_manager(new_stream_manager)
136
136
  updated_count += 1
137
- logger.debug(f"Updated StreamManager for tool '{namespace}:{tool_name}'")
137
+ logger.debug("Updated StreamManager for tool '%s:%s'", namespace, tool_name)
138
138
  except Exception as e:
139
- logger.warning(f"Failed to update StreamManager for tool '{namespace}:{tool_name}': {e}")
139
+ logger.warning("Failed to update StreamManager for tool '%s:%s': %s", namespace, tool_name, e)
140
140
 
141
141
  action = "connected" if new_stream_manager else "disconnected"
142
- logger.info(f"StreamManager {action} for {updated_count} tools in namespace '{namespace}'")
142
+ logger.debug("StreamManager %s for %d tools in namespace '%s'", action, updated_count, namespace)
143
143
 
144
144
  except Exception as e:
145
- logger.error(f"Failed to update tools in namespace '{namespace}': {e}")
145
+ logger.error("Failed to update tools in namespace '%s': %s", namespace, e)
146
146
 
147
147
  return updated_count
@@ -110,8 +110,8 @@ async def setup_mcp_http_streamable(
110
110
  max_retries=max_retries,
111
111
  )
112
112
 
113
- logger.info(
114
- "MCP (HTTP Streamable) initialised - %s tool%s registered into namespace '%s'",
113
+ logger.debug(
114
+ "MCP (HTTP Streamable) initialised - %d tool%s registered into namespace '%s'",
115
115
  len(registered),
116
116
  "" if len(registered) == 1 else "s",
117
117
  namespace,
@@ -89,8 +89,8 @@ async def setup_mcp_sse( # noqa: C901 - long but just a config facade
89
89
  max_retries=max_retries,
90
90
  )
91
91
 
92
- logger.info(
93
- "MCP (SSE) initialised - %s tool%s registered into namespace '%s'",
92
+ logger.debug(
93
+ "MCP (SSE) initialised - %d tool%s registered into namespace '%s'",
94
94
  len(registered),
95
95
  "" if len(registered) == 1 else "s",
96
96
  namespace,
@@ -73,8 +73,8 @@ async def setup_mcp_stdio( # noqa: C901 - long but just a config facade
73
73
  max_retries=max_retries,
74
74
  )
75
75
 
76
- logger.info(
77
- "MCP (stdio) initialised - %s tool%s registered into namespace '%s'",
76
+ logger.debug(
77
+ "MCP (stdio) initialised - %d tool%s registered into namespace '%s'",
78
78
  len(registered),
79
79
  "" if len(registered) == 1 else "s",
80
80
  namespace,
@@ -73,7 +73,7 @@ class StreamManager:
73
73
  )
74
74
  return inst
75
75
  except asyncio.TimeoutError:
76
- logger.error(f"StreamManager initialization timed out after {initialization_timeout}s")
76
+ logger.error("StreamManager initialization timed out after %ss", initialization_timeout)
77
77
  raise RuntimeError(f"StreamManager initialization timed out after {initialization_timeout}s")
78
78
 
79
79
  @classmethod
@@ -99,7 +99,7 @@ class StreamManager:
99
99
  )
100
100
  return inst
101
101
  except asyncio.TimeoutError:
102
- logger.error(f"SSE StreamManager initialization timed out after {initialization_timeout}s")
102
+ logger.error("SSE StreamManager initialization timed out after %ss", initialization_timeout)
103
103
  raise RuntimeError(f"SSE StreamManager initialization timed out after {initialization_timeout}s")
104
104
 
105
105
  @classmethod
@@ -125,7 +125,7 @@ class StreamManager:
125
125
  )
126
126
  return inst
127
127
  except asyncio.TimeoutError:
128
- logger.error(f"HTTP Streamable StreamManager initialization timed out after {initialization_timeout}s")
128
+ logger.error("HTTP Streamable StreamManager initialization timed out after %ss", initialization_timeout)
129
129
  raise RuntimeError(f"HTTP Streamable StreamManager initialization timed out after {initialization_timeout}s")
130
130
 
131
131
  # ------------------------------------------------------------------ #
@@ -196,7 +196,7 @@ class StreamManager:
196
196
  else:
197
197
  sse_url = "http://localhost:8000"
198
198
  api_key = None
199
- logger.warning(f"No URL configured for SSE transport, using default: {sse_url}")
199
+ logger.warning("No URL configured for SSE transport, using default: %s", sse_url)
200
200
 
201
201
  transport = SSETransport(
202
202
  sse_url,
@@ -215,7 +215,7 @@ class StreamManager:
215
215
  http_url = "http://localhost:8000"
216
216
  api_key = None
217
217
  session_id = None
218
- logger.warning(f"No URL configured for HTTP Streamable transport, using default: {http_url}")
218
+ logger.warning("No URL configured for HTTP Streamable transport, using default: %s", http_url)
219
219
 
220
220
  transport = HTTPStreamableTransport(
221
221
  http_url,
@@ -252,13 +252,13 @@ class StreamManager:
252
252
  "status": status,
253
253
  }
254
254
  )
255
- logger.info("Initialised %s - %d tool(s)", server_name, len(tools))
255
+ logger.debug("Initialised %s - %d tool(s)", server_name, len(tools))
256
256
  except asyncio.TimeoutError:
257
257
  logger.error("Timeout initialising %s", server_name)
258
258
  except Exception as exc:
259
259
  logger.error("Error initialising %s: %s", server_name, exc)
260
260
 
261
- logger.info(
261
+ logger.debug(
262
262
  "StreamManager ready - %d server(s), %d tool(s)",
263
263
  len(self.transports),
264
264
  len(self.all_tools),
@@ -307,13 +307,13 @@ class StreamManager:
307
307
  self.server_info.append(
308
308
  {"id": idx, "name": name, "tools": len(tools), "status": status}
309
309
  )
310
- logger.info("Initialised SSE %s - %d tool(s)", name, len(tools))
310
+ logger.debug("Initialised SSE %s - %d tool(s)", name, len(tools))
311
311
  except asyncio.TimeoutError:
312
312
  logger.error("Timeout initialising SSE %s", name)
313
313
  except Exception as exc:
314
314
  logger.error("Error initialising SSE %s: %s", name, exc)
315
315
 
316
- logger.info(
316
+ logger.debug(
317
317
  "StreamManager ready - %d SSE server(s), %d tool(s)",
318
318
  len(self.transports),
319
319
  len(self.all_tools),
@@ -364,13 +364,13 @@ class StreamManager:
364
364
  self.server_info.append(
365
365
  {"id": idx, "name": name, "tools": len(tools), "status": status}
366
366
  )
367
- logger.info("Initialised HTTP Streamable %s - %d tool(s)", name, len(tools))
367
+ logger.debug("Initialised HTTP Streamable %s - %d tool(s)", name, len(tools))
368
368
  except asyncio.TimeoutError:
369
369
  logger.error("Timeout initialising HTTP Streamable %s", name)
370
370
  except Exception as exc:
371
371
  logger.error("Error initialising HTTP Streamable %s: %s", name, exc)
372
372
 
373
- logger.info(
373
+ logger.debug(
374
374
  "StreamManager ready - %d HTTP Streamable server(s), %d tool(s)",
375
375
  len(self.transports),
376
376
  len(self.all_tools),
@@ -395,20 +395,20 @@ class StreamManager:
395
395
  return []
396
396
 
397
397
  if server_name not in self.transports:
398
- logger.error(f"Server '{server_name}' not found in transports")
398
+ logger.error("Server '%s' not found in transports", server_name)
399
399
  return []
400
400
 
401
401
  transport = self.transports[server_name]
402
402
 
403
403
  try:
404
404
  tools = await asyncio.wait_for(transport.get_tools(), timeout=10.0)
405
- logger.debug(f"Found {len(tools)} tools for server {server_name}")
405
+ logger.debug("Found %d tools for server %s", len(tools), server_name)
406
406
  return tools
407
407
  except asyncio.TimeoutError:
408
- logger.error(f"Timeout listing tools for server {server_name}")
408
+ logger.error("Timeout listing tools for server %s", server_name)
409
409
  return []
410
410
  except Exception as e:
411
- logger.error(f"Error listing tools for server {server_name}: {e}")
411
+ logger.error("Error listing tools for server %s: %s", server_name, e)
412
412
  return []
413
413
 
414
414
  # ------------------------------------------------------------------ #
@@ -541,7 +541,7 @@ class StreamManager:
541
541
  self._closed = True
542
542
  return
543
543
 
544
- logger.debug(f"Closing {len(self.transports)} transports...")
544
+ logger.debug("Closing %d transports...", len(self.transports))
545
545
 
546
546
  try:
547
547
  # Use shield to protect the cleanup operation from cancellation
@@ -551,7 +551,7 @@ class StreamManager:
551
551
  logger.debug("Close operation cancelled, performing synchronous cleanup")
552
552
  self._sync_cleanup()
553
553
  except Exception as e:
554
- logger.debug(f"Error during close: {e}")
554
+ logger.debug("Error during close: %s", e)
555
555
  self._sync_cleanup()
556
556
  finally:
557
557
  self._closed = True
@@ -565,7 +565,7 @@ class StreamManager:
565
565
  try:
566
566
  await self._concurrent_close(transport_items, close_results)
567
567
  except Exception as e:
568
- logger.debug(f"Concurrent close failed: {e}, falling back to sequential close")
568
+ logger.debug("Concurrent close failed: %s, falling back to sequential close", e)
569
569
  # Strategy 2: Fall back to sequential close
570
570
  await self._sequential_close(transport_items, close_results)
571
571
 
@@ -575,7 +575,7 @@ class StreamManager:
575
575
  # Log summary
576
576
  if close_results:
577
577
  successful_closes = sum(1 for _, success, _ in close_results if success)
578
- logger.debug(f"Transport cleanup: {successful_closes}/{len(close_results)} closed successfully")
578
+ logger.debug("Transport cleanup: %d/%d closed successfully", successful_closes, len(close_results))
579
579
 
580
580
  async def _concurrent_close(self, transport_items: List[Tuple[str, MCPBaseTransport]], close_results: List) -> None:
581
581
  """Try to close all transports concurrently."""
@@ -602,10 +602,10 @@ class StreamManager:
602
602
  for i, (name, _) in enumerate(close_tasks):
603
603
  result = results[i] if i < len(results) else None
604
604
  if isinstance(result, Exception):
605
- logger.debug(f"Transport {name} close failed: {result}")
605
+ logger.debug("Transport %s close failed: %s", name, result)
606
606
  close_results.append((name, False, str(result)))
607
607
  else:
608
- logger.debug(f"Transport {name} closed successfully")
608
+ logger.debug("Transport %s closed successfully", name)
609
609
  close_results.append((name, True, None))
610
610
 
611
611
  except asyncio.TimeoutError:
@@ -632,16 +632,16 @@ class StreamManager:
632
632
  self._close_single_transport(name, transport),
633
633
  timeout=0.5 # Short timeout per transport
634
634
  )
635
- logger.debug(f"Closed transport: {name}")
635
+ logger.debug("Closed transport: %s", name)
636
636
  close_results.append((name, True, None))
637
637
  except asyncio.TimeoutError:
638
- logger.debug(f"Transport {name} close timed out (normal during shutdown)")
638
+ logger.debug("Transport %s close timed out (normal during shutdown)", name)
639
639
  close_results.append((name, False, "timeout"))
640
640
  except asyncio.CancelledError:
641
- logger.debug(f"Transport {name} close cancelled during event loop shutdown")
641
+ logger.debug("Transport %s close cancelled during event loop shutdown", name)
642
642
  close_results.append((name, False, "cancelled"))
643
643
  except Exception as e:
644
- logger.debug(f"Error closing transport {name}: {e}")
644
+ logger.debug("Error closing transport %s: %s", name, e)
645
645
  close_results.append((name, False, str(e)))
646
646
 
647
647
  async def _close_single_transport(self, name: str, transport: MCPBaseTransport) -> None:
@@ -650,9 +650,9 @@ class StreamManager:
650
650
  if hasattr(transport, 'close') and callable(transport.close):
651
651
  await transport.close()
652
652
  else:
653
- logger.debug(f"Transport {name} has no close method")
653
+ logger.debug("Transport %s has no close method", name)
654
654
  except Exception as e:
655
- logger.debug(f"Error closing transport {name}: {e}")
655
+ logger.debug("Error closing transport %s: %s", name, e)
656
656
  raise
657
657
 
658
658
  def _sync_cleanup(self) -> None:
@@ -660,9 +660,9 @@ class StreamManager:
660
660
  try:
661
661
  transport_count = len(self.transports)
662
662
  self._cleanup_state()
663
- logger.debug(f"Synchronous cleanup completed for {transport_count} transports")
663
+ logger.debug("Synchronous cleanup completed for %d transports", transport_count)
664
664
  except Exception as e:
665
- logger.debug(f"Error during synchronous cleanup: {e}")
665
+ logger.debug("Error during synchronous cleanup: %s", e)
666
666
 
667
667
  def _cleanup_state(self) -> None:
668
668
  """Clean up internal state synchronously."""
@@ -673,7 +673,7 @@ class StreamManager:
673
673
  self.all_tools.clear()
674
674
  self.server_names.clear()
675
675
  except Exception as e:
676
- logger.debug(f"Error during state cleanup: {e}")
676
+ logger.debug("Error during state cleanup: %s", e)
677
677
 
678
678
  # ------------------------------------------------------------------ #
679
679
  # backwards-compat: streams helper #
@@ -107,7 +107,7 @@ class HTTPStreamableTransport(MCPBaseTransport):
107
107
  start_time = time.time()
108
108
 
109
109
  try:
110
- logger.info(f"Initializing HTTP Streamable transport to {self.url}")
110
+ logger.debug("Initializing HTTP Streamable transport to %s", self.url)
111
111
 
112
112
  # Create HTTP parameters for chuk-mcp (following SSE pattern)
113
113
  headers = {}
@@ -117,7 +117,7 @@ class HTTPStreamableTransport(MCPBaseTransport):
117
117
 
118
118
  if self.session_id:
119
119
  headers["X-Session-ID"] = self.session_id
120
- logger.debug(f"Using session ID: {self.session_id}")
120
+ logger.debug("Using session ID: %s", self.session_id)
121
121
 
122
122
  http_params = StreamableHTTPParameters(
123
123
  url=self.url,
@@ -154,7 +154,7 @@ class HTTPStreamableTransport(MCPBaseTransport):
154
154
  self._metrics["initialization_time"] = init_time
155
155
  self._metrics["last_ping_time"] = ping_time
156
156
 
157
- logger.info(f"HTTP Streamable transport initialized successfully in {init_time:.3f}s (ping: {ping_time:.3f}s)")
157
+ logger.debug("HTTP Streamable transport initialized successfully in %.3fs (ping: %.3fs)", init_time, ping_time)
158
158
  return True
159
159
  else:
160
160
  logger.warning("HTTP connection established but ping failed")
@@ -164,12 +164,12 @@ class HTTPStreamableTransport(MCPBaseTransport):
164
164
  return True
165
165
 
166
166
  except asyncio.TimeoutError:
167
- logger.error(f"HTTP Streamable initialization timed out after {self.connection_timeout}s")
167
+ logger.error("HTTP Streamable initialization timed out after %ss", self.connection_timeout)
168
168
  logger.error("This may indicate the server is not responding to MCP initialization")
169
169
  await self._cleanup()
170
170
  return False
171
171
  except Exception as e:
172
- logger.error(f"Error initializing HTTP Streamable transport: {e}", exc_info=True)
172
+ logger.error("Error initializing HTTP Streamable transport: %s", e, exc_info=True)
173
173
  await self._cleanup()
174
174
  return False
175
175
 
@@ -180,10 +180,11 @@ class HTTPStreamableTransport(MCPBaseTransport):
180
180
 
181
181
  # Log final metrics (enhanced from SSE)
182
182
  if self.enable_metrics and self._metrics["total_calls"] > 0:
183
- logger.info(
184
- f"HTTP Streamable transport closing - Total calls: {self._metrics['total_calls']}, "
185
- f"Success rate: {(self._metrics['successful_calls']/self._metrics['total_calls']*100):.1f}%, "
186
- f"Avg response time: {self._metrics['avg_response_time']:.3f}s"
183
+ logger.debug(
184
+ "HTTP Streamable transport closing - Total calls: %d, Success rate: %.1f%%, Avg response time: %.3fs",
185
+ self._metrics["total_calls"],
186
+ (self._metrics["successful_calls"] / self._metrics["total_calls"] * 100),
187
+ self._metrics["avg_response_time"]
187
188
  )
188
189
 
189
190
  try:
@@ -192,7 +193,7 @@ class HTTPStreamableTransport(MCPBaseTransport):
192
193
  logger.debug("HTTP Streamable context closed")
193
194
 
194
195
  except Exception as e:
195
- logger.debug(f"Error during transport close: {e}")
196
+ logger.debug("Error during transport close: %s", e)
196
197
  finally:
197
198
  await self._cleanup()
198
199
 
@@ -219,14 +220,14 @@ class HTTPStreamableTransport(MCPBaseTransport):
219
220
  if self.enable_metrics:
220
221
  ping_time = time.time() - start_time
221
222
  self._metrics["last_ping_time"] = ping_time
222
- logger.debug(f"Ping completed in {ping_time:.3f}s: {result}")
223
+ logger.debug("Ping completed in %.3fs: %s", ping_time, result)
223
224
 
224
225
  return bool(result)
225
226
  except asyncio.TimeoutError:
226
227
  logger.error("Ping timed out")
227
228
  return False
228
229
  except Exception as e:
229
- logger.error(f"Ping failed: {e}")
230
+ logger.error("Ping failed: %s", e)
230
231
  return False
231
232
 
232
233
  async def get_tools(self) -> List[Dict[str, Any]]:
@@ -248,12 +249,12 @@ class HTTPStreamableTransport(MCPBaseTransport):
248
249
  elif isinstance(tools_response, list):
249
250
  tools = tools_response
250
251
  else:
251
- logger.warning(f"Unexpected tools response type: {type(tools_response)}")
252
+ logger.warning("Unexpected tools response type: %s", type(tools_response))
252
253
  tools = []
253
254
 
254
255
  if self.enable_metrics:
255
256
  response_time = time.time() - start_time
256
- logger.debug(f"Retrieved {len(tools)} tools in {response_time:.3f}s")
257
+ logger.debug("Retrieved %d tools in %.3fs", len(tools), response_time)
257
258
 
258
259
  return tools
259
260
 
@@ -261,7 +262,7 @@ class HTTPStreamableTransport(MCPBaseTransport):
261
262
  logger.error("Get tools timed out")
262
263
  return []
263
264
  except Exception as e:
264
- logger.error(f"Error getting tools: {e}")
265
+ logger.error("Error getting tools: %s", e)
265
266
  return []
266
267
 
267
268
  async def call_tool(self, tool_name: str, arguments: Dict[str, Any],
@@ -280,7 +281,7 @@ class HTTPStreamableTransport(MCPBaseTransport):
280
281
  self._metrics["total_calls"] += 1
281
282
 
282
283
  try:
283
- logger.debug(f"Calling tool '{tool_name}' with timeout {tool_timeout}s")
284
+ logger.debug("Calling tool '%s' with timeout %ss", tool_name, tool_timeout)
284
285
 
285
286
  raw_response = await asyncio.wait_for(
286
287
  send_tools_call(
@@ -299,9 +300,9 @@ class HTTPStreamableTransport(MCPBaseTransport):
299
300
  self._update_metrics(response_time, not result.get("isError", False))
300
301
 
301
302
  if not result.get("isError", False):
302
- logger.debug(f"Tool '{tool_name}' completed successfully in {response_time:.3f}s")
303
+ logger.debug("Tool '%s' completed successfully in %.3fs", tool_name, response_time)
303
304
  else:
304
- logger.warning(f"Tool '{tool_name}' failed in {response_time:.3f}s: {result.get('error', 'Unknown error')}")
305
+ logger.warning("Tool '%s' failed in %.3fs: %s", tool_name, response_time, result.get('error', 'Unknown error'))
305
306
 
306
307
  return result
307
308
 
@@ -311,7 +312,7 @@ class HTTPStreamableTransport(MCPBaseTransport):
311
312
  self._update_metrics(response_time, False)
312
313
 
313
314
  error_msg = f"Tool execution timed out after {tool_timeout}s"
314
- logger.error(f"Tool '{tool_name}' {error_msg}")
315
+ logger.error("Tool '%s' %s", tool_name, error_msg)
315
316
  return {
316
317
  "isError": True,
317
318
  "error": error_msg
@@ -322,7 +323,7 @@ class HTTPStreamableTransport(MCPBaseTransport):
322
323
  self._update_metrics(response_time, False)
323
324
 
324
325
  error_msg = f"Tool execution failed: {str(e)}"
325
- logger.error(f"Tool '{tool_name}' error: {error_msg}")
326
+ logger.error("Tool '%s' error: %s", tool_name, error_msg)
326
327
  return {
327
328
  "isError": True,
328
329
  "error": error_msg
@@ -359,7 +360,7 @@ class HTTPStreamableTransport(MCPBaseTransport):
359
360
  logger.error("List resources timed out")
360
361
  return {}
361
362
  except Exception as e:
362
- logger.debug(f"Error listing resources: {e}")
363
+ logger.debug("Error listing resources: %s", e)
363
364
  return {}
364
365
 
365
366
  async def list_prompts(self) -> Dict[str, Any]:
@@ -381,7 +382,7 @@ class HTTPStreamableTransport(MCPBaseTransport):
381
382
  logger.error("List prompts timed out")
382
383
  return {}
383
384
  except Exception as e:
384
- logger.debug(f"Error listing prompts: {e}")
385
+ logger.debug("Error listing prompts: %s", e)
385
386
  return {}
386
387
 
387
388
  def _normalize_tool_response(self, raw_response: Dict[str, Any]) -> Dict[str, Any]:
@@ -1,364 +1,426 @@
1
1
  # chuk_tool_processor/mcp/transport/sse_transport.py
2
+ """
3
+ Fixed SSE transport that matches your server's actual behavior.
4
+ Based on your working debug script.
5
+ """
2
6
  from __future__ import annotations
3
7
 
4
8
  import asyncio
5
9
  import json
6
- from typing import Dict, Any, List, Optional
10
+ import uuid
11
+ from typing import Dict, Any, List, Optional, Tuple
7
12
  import logging
8
13
 
9
- from .base_transport import MCPBaseTransport
10
-
11
- # Import latest chuk-mcp SSE transport
12
- try:
13
- from chuk_mcp.transports.sse import sse_client
14
- from chuk_mcp.transports.sse.parameters import SSEParameters
15
- from chuk_mcp.protocol.messages import (
16
- send_initialize,
17
- send_ping,
18
- send_tools_list,
19
- send_tools_call,
20
- )
21
- HAS_SSE_SUPPORT = True
22
- except ImportError:
23
- HAS_SSE_SUPPORT = False
14
+ import httpx
24
15
 
25
- # Import optional resource and prompt support
26
- try:
27
- from chuk_mcp.protocol.messages import (
28
- send_resources_list,
29
- send_resources_read,
30
- send_prompts_list,
31
- send_prompts_get,
32
- )
33
- HAS_RESOURCES_PROMPTS = True
34
- except ImportError:
35
- HAS_RESOURCES_PROMPTS = False
16
+ from .base_transport import MCPBaseTransport
36
17
 
37
18
  logger = logging.getLogger(__name__)
38
19
 
39
20
 
40
21
  class SSETransport(MCPBaseTransport):
41
22
  """
42
- Updated SSE transport using latest chuk-mcp APIs.
43
-
44
- Supports all required abstract methods and provides full MCP functionality.
23
+ SSE transport that works with your server's two-step async pattern:
24
+ 1. POST messages to /messages endpoint
25
+ 2. Receive responses via SSE stream
45
26
  """
46
27
 
47
28
  def __init__(self, url: str, api_key: Optional[str] = None,
48
29
  connection_timeout: float = 30.0, default_timeout: float = 30.0):
49
- """
50
- Initialize SSE transport with latest chuk-mcp.
51
-
52
- Args:
53
- url: SSE server URL
54
- api_key: Optional API key for authentication
55
- connection_timeout: Timeout for initial connection
56
- default_timeout: Default timeout for operations
57
- """
58
- self.url = url
30
+ """Initialize SSE transport."""
31
+ self.url = url.rstrip('/')
59
32
  self.api_key = api_key
60
33
  self.connection_timeout = connection_timeout
61
34
  self.default_timeout = default_timeout
62
35
 
63
- # State tracking
64
- self._sse_context = None
65
- self._read_stream = None
66
- self._write_stream = None
36
+ # State
37
+ self.session_id = None
38
+ self.message_url = None
39
+ self.pending_requests: Dict[str, asyncio.Future] = {}
67
40
  self._initialized = False
68
41
 
69
- if not HAS_SSE_SUPPORT:
70
- logger.warning("SSE transport not available - operations will fail")
42
+ # HTTP clients
43
+ self.stream_client = None
44
+ self.send_client = None
45
+
46
+ # SSE stream
47
+ self.sse_task = None
48
+ self.sse_response = None
49
+ self.sse_stream_context = None
50
+
51
+ def _get_headers(self) -> Dict[str, str]:
52
+ """Get headers with auth if available."""
53
+ headers = {}
54
+ if self.api_key:
55
+ headers['Authorization'] = f'Bearer {self.api_key}'
56
+ return headers
71
57
 
72
58
  async def initialize(self) -> bool:
73
- """Initialize using latest chuk-mcp sse_client."""
74
- if not HAS_SSE_SUPPORT:
75
- logger.error("SSE transport not available in chuk-mcp")
76
- return False
77
-
59
+ """Initialize SSE connection and MCP handshake."""
78
60
  if self._initialized:
79
61
  logger.warning("Transport already initialized")
80
62
  return True
81
-
63
+
82
64
  try:
83
- logger.info("Initializing SSE transport...")
65
+ logger.debug("Initializing SSE transport...")
84
66
 
85
- # Create SSE parameters for latest chuk-mcp
86
- sse_params = SSEParameters(
87
- url=self.url,
88
- timeout=self.connection_timeout,
89
- auto_reconnect=True,
90
- max_reconnect_attempts=3
91
- )
67
+ # Create HTTP clients
68
+ self.stream_client = httpx.AsyncClient(timeout=self.connection_timeout)
69
+ self.send_client = httpx.AsyncClient(timeout=self.default_timeout)
92
70
 
93
- # Create and enter the context - this should handle the full MCP handshake
94
- self._sse_context = sse_client(sse_params)
71
+ # Connect to SSE stream
72
+ sse_url = f"{self.url}/sse"
73
+ logger.debug("Connecting to SSE: %s", sse_url)
95
74
 
96
- # The sse_client should handle the entire initialization process
97
- logger.debug("Establishing SSE connection and MCP handshake...")
98
- self._read_stream, self._write_stream = await asyncio.wait_for(
99
- self._sse_context.__aenter__(),
100
- timeout=self.connection_timeout
75
+ self.sse_stream_context = self.stream_client.stream(
76
+ 'GET', sse_url, headers=self._get_headers()
101
77
  )
78
+ self.sse_response = await self.sse_stream_context.__aenter__()
102
79
 
103
- # At this point, chuk-mcp should have already completed the MCP initialization
104
- # Let's verify the connection works with a simple ping
105
- logger.debug("Verifying connection with ping...")
106
- ping_success = await asyncio.wait_for(
107
- send_ping(self._read_stream, self._write_stream),
108
- timeout=5.0
109
- )
80
+ if self.sse_response.status_code != 200:
81
+ logger.error("SSE connection failed: %s", self.sse_response.status_code)
82
+ return False
110
83
 
111
- if ping_success:
112
- self._initialized = True
113
- logger.info("SSE transport initialized successfully")
114
- return True
115
- else:
116
- logger.warning("SSE connection established but ping failed")
117
- # Still consider it initialized since connection was established
84
+ logger.debug("SSE streaming connection established")
85
+
86
+ # Start SSE processing task
87
+ self.sse_task = asyncio.create_task(self._process_sse_stream())
88
+
89
+ # Wait for session discovery
90
+ logger.debug("Waiting for session discovery...")
91
+ for i in range(50): # 5 seconds max
92
+ if self.message_url:
93
+ break
94
+ await asyncio.sleep(0.1)
95
+
96
+ if not self.message_url:
97
+ logger.error("Failed to get session info from SSE")
98
+ return False
99
+
100
+ logger.debug("Session ready: %s", self.session_id)
101
+
102
+ # Now do MCP initialization
103
+ try:
104
+ init_response = await self._send_request("initialize", {
105
+ "protocolVersion": "2024-11-05",
106
+ "capabilities": {},
107
+ "clientInfo": {
108
+ "name": "chuk-tool-processor",
109
+ "version": "1.0.0"
110
+ }
111
+ })
112
+
113
+ if 'error' in init_response:
114
+ logger.error("Initialize failed: %s", init_response['error'])
115
+ return False
116
+
117
+ # Send initialized notification
118
+ await self._send_notification("notifications/initialized")
119
+
118
120
  self._initialized = True
121
+ logger.debug("SSE transport initialized successfully")
119
122
  return True
120
-
121
- except asyncio.TimeoutError:
122
- logger.error(f"SSE initialization timed out after {self.connection_timeout}s")
123
- logger.error("This may indicate the server is not responding to MCP initialization")
124
- await self._cleanup()
125
- return False
123
+
124
+ except Exception as e:
125
+ logger.error("MCP initialization failed: %s", e)
126
+ return False
127
+
126
128
  except Exception as e:
127
- logger.error(f"Error initializing SSE transport: {e}", exc_info=True)
129
+ logger.error("Error initializing SSE transport: %s", e, exc_info=True)
128
130
  await self._cleanup()
129
131
  return False
130
132
 
131
- async def close(self) -> None:
132
- """Close the SSE transport properly."""
133
- if not self._initialized:
134
- return
135
-
133
+ async def _process_sse_stream(self):
134
+ """Process the persistent SSE stream."""
136
135
  try:
137
- if self._sse_context is not None:
138
- await self._sse_context.__aexit__(None, None, None)
139
- logger.debug("SSE context closed")
136
+ logger.debug("Starting SSE stream processing...")
137
+
138
+ async for line in self.sse_response.aiter_lines():
139
+ line = line.strip()
140
+ if not line:
141
+ continue
142
+
143
+ # Handle session endpoint discovery
144
+ if not self.message_url and line.startswith('data:') and '/messages/' in line:
145
+ endpoint_path = line.split(':', 1)[1].strip()
146
+ self.message_url = f"{self.url}{endpoint_path}"
147
+
148
+ if 'session_id=' in endpoint_path:
149
+ self.session_id = endpoint_path.split('session_id=')[1].split('&')[0]
150
+
151
+ logger.debug("Got session info: %s", self.session_id)
152
+ continue
140
153
 
154
+ # Handle JSON-RPC responses
155
+ if line.startswith('data:'):
156
+ data_part = line.split(':', 1)[1].strip()
157
+
158
+ # Skip pings and empty data
159
+ if not data_part or data_part.startswith('ping'):
160
+ continue
161
+
162
+ try:
163
+ response_data = json.loads(data_part)
164
+
165
+ if 'jsonrpc' in response_data and 'id' in response_data:
166
+ request_id = str(response_data['id'])
167
+
168
+ # Resolve pending request
169
+ if request_id in self.pending_requests:
170
+ future = self.pending_requests.pop(request_id)
171
+ if not future.done():
172
+ future.set_result(response_data)
173
+ logger.debug("Resolved request: %s", request_id)
174
+
175
+ except json.JSONDecodeError:
176
+ pass # Not JSON, ignore
177
+
141
178
  except Exception as e:
142
- logger.debug(f"Error during transport close: {e}")
143
- finally:
144
- await self._cleanup()
179
+ logger.error("SSE stream error: %s", e)
145
180
 
146
- async def _cleanup(self) -> None:
147
- """Clean up internal state."""
148
- self._sse_context = None
149
- self._read_stream = None
150
- self._write_stream = None
151
- self._initialized = False
181
+ async def _send_request(self, method: str, params: Dict[str, Any] = None,
182
+ timeout: Optional[float] = None) -> Dict[str, Any]:
183
+ """Send request and wait for async response."""
184
+ if not self.message_url:
185
+ raise RuntimeError("Not connected")
186
+
187
+ request_id = str(uuid.uuid4())
188
+ message = {
189
+ "jsonrpc": "2.0",
190
+ "id": request_id,
191
+ "method": method,
192
+ "params": params or {}
193
+ }
194
+
195
+ # Create future for response
196
+ future = asyncio.Future()
197
+ self.pending_requests[request_id] = future
198
+
199
+ try:
200
+ # Send message
201
+ headers = {
202
+ 'Content-Type': 'application/json',
203
+ **self._get_headers()
204
+ }
205
+
206
+ response = await self.send_client.post(
207
+ self.message_url,
208
+ headers=headers,
209
+ json=message
210
+ )
211
+
212
+ if response.status_code == 202:
213
+ # Wait for async response
214
+ timeout = timeout or self.default_timeout
215
+ result = await asyncio.wait_for(future, timeout=timeout)
216
+ return result
217
+ elif response.status_code == 200:
218
+ # Immediate response
219
+ self.pending_requests.pop(request_id, None)
220
+ return response.json()
221
+ else:
222
+ self.pending_requests.pop(request_id, None)
223
+ raise RuntimeError(f"Request failed: {response.status_code}")
224
+
225
+ except asyncio.TimeoutError:
226
+ self.pending_requests.pop(request_id, None)
227
+ raise
228
+ except Exception:
229
+ self.pending_requests.pop(request_id, None)
230
+ raise
231
+
232
+ async def _send_notification(self, method: str, params: Dict[str, Any] = None):
233
+ """Send notification (no response expected)."""
234
+ if not self.message_url:
235
+ raise RuntimeError("Not connected")
236
+
237
+ message = {
238
+ "jsonrpc": "2.0",
239
+ "method": method,
240
+ "params": params or {}
241
+ }
242
+
243
+ headers = {
244
+ 'Content-Type': 'application/json',
245
+ **self._get_headers()
246
+ }
247
+
248
+ await self.send_client.post(
249
+ self.message_url,
250
+ headers=headers,
251
+ json=message
252
+ )
152
253
 
153
254
  async def send_ping(self) -> bool:
154
- """Send ping using latest chuk-mcp."""
255
+ """Send ping to check connection."""
155
256
  if not self._initialized:
156
- logger.error("Cannot send ping: transport not initialized")
157
257
  return False
158
258
 
159
259
  try:
160
- result = await asyncio.wait_for(
161
- send_ping(self._read_stream, self._write_stream),
162
- timeout=self.default_timeout
163
- )
164
- logger.debug(f"Ping result: {result}")
165
- return bool(result)
166
- except asyncio.TimeoutError:
167
- logger.error("Ping timed out")
168
- return False
169
- except Exception as e:
170
- logger.error(f"Ping failed: {e}")
260
+ # Your server might not support ping, so we'll just check if we can list tools
261
+ response = await self._send_request("tools/list", {}, timeout=5.0)
262
+ return 'error' not in response
263
+ except Exception:
171
264
  return False
172
265
 
173
266
  async def get_tools(self) -> List[Dict[str, Any]]:
174
- """Get tools list using latest chuk-mcp."""
267
+ """Get tools list."""
175
268
  if not self._initialized:
176
269
  logger.error("Cannot get tools: transport not initialized")
177
270
  return []
178
271
 
179
272
  try:
180
- tools_response = await asyncio.wait_for(
181
- send_tools_list(self._read_stream, self._write_stream),
182
- timeout=self.default_timeout
183
- )
273
+ response = await self._send_request("tools/list", {})
184
274
 
185
- # Normalize response
186
- if isinstance(tools_response, dict):
187
- tools = tools_response.get("tools", [])
188
- elif isinstance(tools_response, list):
189
- tools = tools_response
190
- else:
191
- logger.warning(f"Unexpected tools response type: {type(tools_response)}")
192
- tools = []
275
+ if 'error' in response:
276
+ logger.error("Error getting tools: %s", response['error'])
277
+ return []
193
278
 
194
- logger.debug(f"Retrieved {len(tools)} tools")
279
+ tools = response.get('result', {}).get('tools', [])
280
+ logger.debug("Retrieved %d tools", len(tools))
195
281
  return tools
196
282
 
197
- except asyncio.TimeoutError:
198
- logger.error("Get tools timed out")
199
- return []
200
283
  except Exception as e:
201
- logger.error(f"Error getting tools: {e}")
284
+ logger.error("Error getting tools: %s", e)
202
285
  return []
203
286
 
204
287
  async def call_tool(self, tool_name: str, arguments: Dict[str, Any],
205
288
  timeout: Optional[float] = None) -> Dict[str, Any]:
206
- """Call tool using latest chuk-mcp."""
289
+ """Call a tool."""
207
290
  if not self._initialized:
208
291
  return {
209
292
  "isError": True,
210
293
  "error": "Transport not initialized"
211
294
  }
212
295
 
213
- tool_timeout = timeout or self.default_timeout
214
-
215
296
  try:
216
- logger.debug(f"Calling tool {tool_name} with args: {arguments}")
297
+ logger.debug("Calling tool %s with args: %s", tool_name, arguments)
217
298
 
218
- raw_response = await asyncio.wait_for(
219
- send_tools_call(
220
- self._read_stream,
221
- self._write_stream,
222
- tool_name,
223
- arguments
224
- ),
225
- timeout=tool_timeout
299
+ response = await self._send_request(
300
+ "tools/call",
301
+ {
302
+ "name": tool_name,
303
+ "arguments": arguments
304
+ },
305
+ timeout=timeout
226
306
  )
227
307
 
228
- logger.debug(f"Tool {tool_name} raw response: {raw_response}")
229
- return self._normalize_tool_response(raw_response)
230
-
308
+ if 'error' in response:
309
+ return {
310
+ "isError": True,
311
+ "error": response['error'].get('message', 'Unknown error')
312
+ }
313
+
314
+ # Extract result
315
+ result = response.get('result', {})
316
+
317
+ # Handle content format
318
+ if 'content' in result:
319
+ content = result['content']
320
+ if isinstance(content, list) and len(content) == 1:
321
+ content_item = content[0]
322
+ if isinstance(content_item, dict) and content_item.get('type') == 'text':
323
+ text_content = content_item.get('text', '')
324
+ try:
325
+ # Try to parse as JSON
326
+ parsed_content = json.loads(text_content)
327
+ return {
328
+ "isError": False,
329
+ "content": parsed_content
330
+ }
331
+ except json.JSONDecodeError:
332
+ return {
333
+ "isError": False,
334
+ "content": text_content
335
+ }
336
+
337
+ return {
338
+ "isError": False,
339
+ "content": content
340
+ }
341
+
342
+ return {
343
+ "isError": False,
344
+ "content": result
345
+ }
346
+
231
347
  except asyncio.TimeoutError:
232
- logger.error(f"Tool {tool_name} timed out after {tool_timeout}s")
233
348
  return {
234
349
  "isError": True,
235
- "error": f"Tool execution timed out after {tool_timeout}s"
350
+ "error": "Tool execution timed out"
236
351
  }
237
352
  except Exception as e:
238
- logger.error(f"Error calling tool {tool_name}: {e}")
353
+ logger.error("Error calling tool %s: %s", tool_name, e)
239
354
  return {
240
355
  "isError": True,
241
- "error": f"Tool execution failed: {str(e)}"
356
+ "error": str(e)
242
357
  }
243
358
 
244
359
  async def list_resources(self) -> Dict[str, Any]:
245
- """List resources using latest chuk-mcp."""
246
- if not HAS_RESOURCES_PROMPTS:
247
- logger.debug("Resources/prompts not available in chuk-mcp")
248
- return {}
249
-
360
+ """List resources."""
250
361
  if not self._initialized:
251
362
  return {}
252
363
 
253
364
  try:
254
- response = await asyncio.wait_for(
255
- send_resources_list(self._read_stream, self._write_stream),
256
- timeout=self.default_timeout
257
- )
258
- return response if isinstance(response, dict) else {}
259
- except asyncio.TimeoutError:
260
- logger.error("List resources timed out")
261
- return {}
262
- except Exception as e:
263
- logger.debug(f"Error listing resources: {e}")
365
+ response = await self._send_request("resources/list", {}, timeout=10.0)
366
+ if 'error' in response:
367
+ logger.debug("Resources not supported: %s", response['error'])
368
+ return {}
369
+ return response.get('result', {})
370
+ except Exception:
264
371
  return {}
265
372
 
266
373
  async def list_prompts(self) -> Dict[str, Any]:
267
- """List prompts using latest chuk-mcp."""
268
- if not HAS_RESOURCES_PROMPTS:
269
- logger.debug("Resources/prompts not available in chuk-mcp")
270
- return {}
271
-
374
+ """List prompts."""
272
375
  if not self._initialized:
273
376
  return {}
274
377
 
275
378
  try:
276
- response = await asyncio.wait_for(
277
- send_prompts_list(self._read_stream, self._write_stream),
278
- timeout=self.default_timeout
279
- )
280
- return response if isinstance(response, dict) else {}
281
- except asyncio.TimeoutError:
282
- logger.error("List prompts timed out")
379
+ response = await self._send_request("prompts/list", {}, timeout=10.0)
380
+ if 'error' in response:
381
+ logger.debug("Prompts not supported: %s", response['error'])
382
+ return {}
383
+ return response.get('result', {})
384
+ except Exception:
283
385
  return {}
284
- except Exception as e:
285
- logger.debug(f"Error listing prompts: {e}")
286
- return {}
287
-
288
- def _normalize_tool_response(self, raw_response: Dict[str, Any]) -> Dict[str, Any]:
289
- """Normalize response for backward compatibility."""
290
- # Handle explicit error in response
291
- if "error" in raw_response:
292
- error_info = raw_response["error"]
293
- if isinstance(error_info, dict):
294
- error_msg = error_info.get("message", "Unknown error")
295
- else:
296
- error_msg = str(error_info)
297
-
298
- return {
299
- "isError": True,
300
- "error": error_msg
301
- }
302
386
 
303
- # Handle successful response with result
304
- if "result" in raw_response:
305
- result = raw_response["result"]
306
-
307
- if isinstance(result, dict) and "content" in result:
308
- return {
309
- "isError": False,
310
- "content": self._extract_content(result["content"])
311
- }
312
- else:
313
- return {
314
- "isError": False,
315
- "content": result
316
- }
317
-
318
- # Handle direct content-based response
319
- if "content" in raw_response:
320
- return {
321
- "isError": False,
322
- "content": self._extract_content(raw_response["content"])
323
- }
324
-
325
- # Fallback
326
- return {
327
- "isError": False,
328
- "content": raw_response
329
- }
387
+ async def close(self) -> None:
388
+ """Close the transport."""
389
+ await self._cleanup()
330
390
 
331
- def _extract_content(self, content_list: Any) -> Any:
332
- """Extract content from MCP content format."""
333
- if not isinstance(content_list, list) or not content_list:
334
- return content_list
391
+ async def _cleanup(self) -> None:
392
+ """Clean up resources."""
393
+ if self.sse_task:
394
+ self.sse_task.cancel()
395
+ try:
396
+ await self.sse_task
397
+ except asyncio.CancelledError:
398
+ pass
335
399
 
336
- # Handle single content item
337
- if len(content_list) == 1:
338
- content_item = content_list[0]
339
- if isinstance(content_item, dict):
340
- if content_item.get("type") == "text":
341
- text_content = content_item.get("text", "")
342
- # Try to parse JSON, fall back to plain text
343
- try:
344
- return json.loads(text_content)
345
- except json.JSONDecodeError:
346
- return text_content
347
- else:
348
- return content_item
400
+ if self.sse_stream_context:
401
+ try:
402
+ await self.sse_stream_context.__aexit__(None, None, None)
403
+ except Exception:
404
+ pass
349
405
 
350
- # Multiple content items
351
- return content_list
406
+ if self.stream_client:
407
+ await self.stream_client.aclose()
408
+
409
+ if self.send_client:
410
+ await self.send_client.aclose()
411
+
412
+ self._initialized = False
413
+ self.session_id = None
414
+ self.message_url = None
415
+ self.pending_requests.clear()
352
416
 
353
417
  def get_streams(self) -> List[tuple]:
354
- """Provide streams for backward compatibility."""
355
- if self._initialized and self._read_stream and self._write_stream:
356
- return [(self._read_stream, self._write_stream)]
418
+ """Not applicable for this transport."""
357
419
  return []
358
420
 
359
421
  def is_connected(self) -> bool:
360
- """Check connection status."""
361
- return self._initialized and self._read_stream is not None and self._write_stream is not None
422
+ """Check if connected."""
423
+ return self._initialized and self.session_id is not None
362
424
 
363
425
  async def __aenter__(self):
364
426
  """Context manager support."""
@@ -372,6 +434,6 @@ class SSETransport(MCPBaseTransport):
372
434
  await self.close()
373
435
 
374
436
  def __repr__(self) -> str:
375
- """String representation for debugging."""
437
+ """String representation."""
376
438
  status = "initialized" if self._initialized else "not initialized"
377
- return f"SSETransport(status={status}, url={self.url})"
439
+ return f"SSETransport(status={status}, url={self.url}, session={self.session_id})"
@@ -53,7 +53,7 @@ class StdioTransport(MCPBaseTransport):
53
53
  return True
54
54
 
55
55
  try:
56
- logger.info("Initializing STDIO transport...")
56
+ logger.debug("Initializing STDIO transport...")
57
57
  self._context = stdio_client(self.server_params)
58
58
  self._streams = await self._context.__aenter__()
59
59
 
@@ -61,13 +61,13 @@ class StdioTransport(MCPBaseTransport):
61
61
  init_result = await send_initialize(*self._streams)
62
62
  if init_result:
63
63
  self._initialized = True
64
- logger.info("STDIO transport initialized successfully")
64
+ logger.debug("STDIO transport initialized successfully")
65
65
  return True
66
66
  else:
67
67
  await self._cleanup()
68
68
  return False
69
69
  except Exception as e:
70
- logger.error(f"Error initializing STDIO transport: {e}")
70
+ logger.error("Error initializing STDIO transport: %s", e)
71
71
  await self._cleanup()
72
72
  return False
73
73
 
@@ -78,7 +78,7 @@ class StdioTransport(MCPBaseTransport):
78
78
  # Simple delegation - the StreamManager now calls this in the correct context
79
79
  await self._context.__aexit__(None, None, None)
80
80
  except Exception as e:
81
- logger.debug(f"Error during close: {e}")
81
+ logger.debug("Error during close: %s", e)
82
82
  finally:
83
83
  await self._cleanup()
84
84
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: chuk-tool-processor
3
- Version: 0.6.3
3
+ Version: 0.6.5
4
4
  Summary: Async-native framework for registering, discovering, and executing tools referenced in LLM responses
5
5
  Author-email: CHUK Team <chrishayuk@somejunkmailbox.com>
6
6
  Maintainer-email: CHUK Team <chrishayuk@somejunkmailbox.com>
@@ -20,7 +20,7 @@ Classifier: Framework :: AsyncIO
20
20
  Classifier: Typing :: Typed
21
21
  Requires-Python: >=3.11
22
22
  Description-Content-Type: text/markdown
23
- Requires-Dist: chuk-mcp>=0.5
23
+ Requires-Dist: chuk-mcp>=0.5.2
24
24
  Requires-Dist: dotenv>=0.9.9
25
25
  Requires-Dist: pydantic>=2.11.3
26
26
  Requires-Dist: uuid>=1.30
@@ -14,20 +14,20 @@ chuk_tool_processor/execution/wrappers/retry.py,sha256=giws8cxs7zfR2ZnBz3WYxtr1-
14
14
  chuk_tool_processor/logging/__init__.py,sha256=UDFPYU_bzeUUSN8qw3nMpo-FMKXetIi7OYtN3W3iPbg,3794
15
15
  chuk_tool_processor/logging/context.py,sha256=69EsAhCiya_4zyivz1uUJAhwo0rXpOoTvePWvaFYIi8,12225
16
16
  chuk_tool_processor/logging/formatter.py,sha256=RhlV6NqBYRBOtytDY49c9Y1J4l02ZjNXIgVRn03tfSQ,3061
17
- chuk_tool_processor/logging/helpers.py,sha256=c1mS1sb_rh4bKG0hisyvT7l7cirQfXPSyWeBqmqALRw,5941
17
+ chuk_tool_processor/logging/helpers.py,sha256=0j-PoFUGyzl9NQ6jJEcS3YKV8AJgs8VwUpYa-6UiWT0,5946
18
18
  chuk_tool_processor/logging/metrics.py,sha256=s59Au8q0eqGGtJMDqmJBZhbJHh4BWGE1CzT0iI8lRS8,3624
19
19
  chuk_tool_processor/mcp/__init__.py,sha256=QkHgRu_YAjmYNTEYMK4bYILu8KK6b0aziTKvBVTRXvI,1052
20
20
  chuk_tool_processor/mcp/mcp_tool.py,sha256=x4zivoBaO7xiepUTN5kfbGcNDNh8rAxvQdgyqduCdRw,17768
21
- chuk_tool_processor/mcp/register_mcp_tools.py,sha256=BYKO6PddixWq9-KxW_fDyzSLcAPam7_BWNopu9y63bs,4879
22
- chuk_tool_processor/mcp/setup_mcp_http_streamable.py,sha256=HCzJ8AtlY5dZt4wyOnzR9LHN6TkkEzEhDUpZEFj89Co,4537
23
- chuk_tool_processor/mcp/setup_mcp_sse.py,sha256=kzvy_tRlu66UN0iX1wBk7qSFXXUt7wPrUQB31XpP-zI,3794
24
- chuk_tool_processor/mcp/setup_mcp_stdio.py,sha256=Pp6ON_fn54uMsX8VyM6-_Lmk1I-uOLG1M65gfjvBj3w,2883
25
- chuk_tool_processor/mcp/stream_manager.py,sha256=3JSxoVpvAI0_gZt7Njhp0vgpTnh4mLt2Mf2RIE6jUFc,31485
21
+ chuk_tool_processor/mcp/register_mcp_tools.py,sha256=s6mQMtZr7dswT2WXDJ84zjOTSi3cOmtRTGGdLMl15bM,4897
22
+ chuk_tool_processor/mcp/setup_mcp_http_streamable.py,sha256=ZJUAj7LL4CRfc-CBl0SQJk0qfW12IuixR-7J2hbQ8S8,4538
23
+ chuk_tool_processor/mcp/setup_mcp_sse.py,sha256=4nf0V6cykAPLxtgsl8RTAYQdVWITUNu_3CIU1vcLjlo,3795
24
+ chuk_tool_processor/mcp/setup_mcp_stdio.py,sha256=L8anrx_b5HDsaMqAfbpWaHex084DTd76W8WBf3ClC48,2884
25
+ chuk_tool_processor/mcp/stream_manager.py,sha256=DN58d76J3Xkg9DI3f6EFe9qAG-ZsFa4dW0ARygOZkz4,31527
26
26
  chuk_tool_processor/mcp/transport/__init__.py,sha256=0DX7m_VvlXPxijc-88_QTLhq4ZqAgUgzBjSMGL9C_lM,963
27
27
  chuk_tool_processor/mcp/transport/base_transport.py,sha256=bqId34OMQMxzMXtrKq_86sot0_x0NS_ecaIllsCyy6I,3423
28
- chuk_tool_processor/mcp/transport/http_streamable_transport.py,sha256=jtjv3RQU7753hV3QV3ZLhzJlP1w9zOy-_hI7OOjEC9A,19067
29
- chuk_tool_processor/mcp/transport/sse_transport.py,sha256=OZjV5LL1o9MZzPvsSNn0z6nn5B2ndA5q2DXluPX-Zm8,13413
30
- chuk_tool_processor/mcp/transport/stdio_transport.py,sha256=QEpaGufkYmebrUZJMXHM-Q-Kj8TkkagorgUEqT17GwM,9095
28
+ chuk_tool_processor/mcp/transport/http_streamable_transport.py,sha256=3I3tNYU8r4YqCbNhMCkoucvZc6VS2ulzeUjDe2FbcRk,19108
29
+ chuk_tool_processor/mcp/transport/sse_transport.py,sha256=smsBrKm-U-nOwsWLz_jbAF7MkmQYx9jkZLWyEw3JFRE,15594
30
+ chuk_tool_processor/mcp/transport/stdio_transport.py,sha256=DPXLR_OxuCJ2bgwYDuT_iYC_CDcUIySvLNO7JzyiPyc,9099
31
31
  chuk_tool_processor/models/__init__.py,sha256=TC__rdVa0lQsmJHM_hbLDPRgToa_pQT_UxRcPZk6iVw,40
32
32
  chuk_tool_processor/models/execution_strategy.py,sha256=UVW35YIeMY2B3mpIKZD2rAkyOPayI6ckOOUALyf0YiQ,2115
33
33
  chuk_tool_processor/models/streaming_tool.py,sha256=0v2PSPTgZ5TS_PpVdohvVhh99fPwPQM_R_z4RU0mlLM,3541
@@ -54,7 +54,7 @@ chuk_tool_processor/registry/providers/__init__.py,sha256=eigwG_So11j7WbDGSWaKd3
54
54
  chuk_tool_processor/registry/providers/memory.py,sha256=6cMtUwLO6zrk3pguQRgxJ2CReHAzewgZsizWZhsoStk,5184
55
55
  chuk_tool_processor/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
56
  chuk_tool_processor/utils/validation.py,sha256=V5N1dH9sJlHepFIbiI2k2MU82o7nvnh0hKyIt2jdgww,4136
57
- chuk_tool_processor-0.6.3.dist-info/METADATA,sha256=up4IbXALgHjGbEvRXO0bjNz6_dxptdOfP4lFO_6KBbk,23461
58
- chuk_tool_processor-0.6.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
59
- chuk_tool_processor-0.6.3.dist-info/top_level.txt,sha256=7lTsnuRx4cOW4U2sNJWNxl4ZTt_J1ndkjTbj3pHPY5M,20
60
- chuk_tool_processor-0.6.3.dist-info/RECORD,,
57
+ chuk_tool_processor-0.6.5.dist-info/METADATA,sha256=zwmOSBOSAbk0A8oTfY34RokEr7VjtvUaEfWpGmuXSyg,23463
58
+ chuk_tool_processor-0.6.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
59
+ chuk_tool_processor-0.6.5.dist-info/top_level.txt,sha256=7lTsnuRx4cOW4U2sNJWNxl4ZTt_J1ndkjTbj3pHPY5M,20
60
+ chuk_tool_processor-0.6.5.dist-info/RECORD,,