chuk-tool-processor 0.6.4__py3-none-any.whl → 0.9.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chuk-tool-processor might be problematic. Click here for more details.

Files changed (66) hide show
  1. chuk_tool_processor/core/__init__.py +32 -1
  2. chuk_tool_processor/core/exceptions.py +225 -13
  3. chuk_tool_processor/core/processor.py +135 -104
  4. chuk_tool_processor/execution/strategies/__init__.py +6 -0
  5. chuk_tool_processor/execution/strategies/inprocess_strategy.py +142 -150
  6. chuk_tool_processor/execution/strategies/subprocess_strategy.py +202 -206
  7. chuk_tool_processor/execution/tool_executor.py +82 -84
  8. chuk_tool_processor/execution/wrappers/__init__.py +42 -0
  9. chuk_tool_processor/execution/wrappers/caching.py +150 -116
  10. chuk_tool_processor/execution/wrappers/circuit_breaker.py +370 -0
  11. chuk_tool_processor/execution/wrappers/rate_limiting.py +76 -43
  12. chuk_tool_processor/execution/wrappers/retry.py +116 -78
  13. chuk_tool_processor/logging/__init__.py +23 -17
  14. chuk_tool_processor/logging/context.py +40 -45
  15. chuk_tool_processor/logging/formatter.py +22 -21
  16. chuk_tool_processor/logging/helpers.py +28 -42
  17. chuk_tool_processor/logging/metrics.py +13 -15
  18. chuk_tool_processor/mcp/__init__.py +8 -12
  19. chuk_tool_processor/mcp/mcp_tool.py +158 -114
  20. chuk_tool_processor/mcp/register_mcp_tools.py +22 -22
  21. chuk_tool_processor/mcp/setup_mcp_http_streamable.py +57 -17
  22. chuk_tool_processor/mcp/setup_mcp_sse.py +57 -17
  23. chuk_tool_processor/mcp/setup_mcp_stdio.py +11 -11
  24. chuk_tool_processor/mcp/stream_manager.py +333 -276
  25. chuk_tool_processor/mcp/transport/__init__.py +22 -29
  26. chuk_tool_processor/mcp/transport/base_transport.py +180 -44
  27. chuk_tool_processor/mcp/transport/http_streamable_transport.py +505 -325
  28. chuk_tool_processor/mcp/transport/models.py +100 -0
  29. chuk_tool_processor/mcp/transport/sse_transport.py +607 -276
  30. chuk_tool_processor/mcp/transport/stdio_transport.py +597 -116
  31. chuk_tool_processor/models/__init__.py +21 -1
  32. chuk_tool_processor/models/execution_strategy.py +16 -21
  33. chuk_tool_processor/models/streaming_tool.py +28 -25
  34. chuk_tool_processor/models/tool_call.py +49 -31
  35. chuk_tool_processor/models/tool_export_mixin.py +22 -8
  36. chuk_tool_processor/models/tool_result.py +40 -77
  37. chuk_tool_processor/models/tool_spec.py +350 -0
  38. chuk_tool_processor/models/validated_tool.py +36 -18
  39. chuk_tool_processor/observability/__init__.py +30 -0
  40. chuk_tool_processor/observability/metrics.py +312 -0
  41. chuk_tool_processor/observability/setup.py +105 -0
  42. chuk_tool_processor/observability/tracing.py +345 -0
  43. chuk_tool_processor/plugins/__init__.py +1 -1
  44. chuk_tool_processor/plugins/discovery.py +11 -11
  45. chuk_tool_processor/plugins/parsers/__init__.py +1 -1
  46. chuk_tool_processor/plugins/parsers/base.py +1 -2
  47. chuk_tool_processor/plugins/parsers/function_call_tool.py +13 -8
  48. chuk_tool_processor/plugins/parsers/json_tool.py +4 -3
  49. chuk_tool_processor/plugins/parsers/openai_tool.py +12 -7
  50. chuk_tool_processor/plugins/parsers/xml_tool.py +4 -4
  51. chuk_tool_processor/registry/__init__.py +12 -12
  52. chuk_tool_processor/registry/auto_register.py +22 -30
  53. chuk_tool_processor/registry/decorators.py +127 -129
  54. chuk_tool_processor/registry/interface.py +26 -23
  55. chuk_tool_processor/registry/metadata.py +27 -22
  56. chuk_tool_processor/registry/provider.py +17 -18
  57. chuk_tool_processor/registry/providers/__init__.py +16 -19
  58. chuk_tool_processor/registry/providers/memory.py +18 -25
  59. chuk_tool_processor/registry/tool_export.py +42 -51
  60. chuk_tool_processor/utils/validation.py +15 -16
  61. chuk_tool_processor-0.9.7.dist-info/METADATA +1813 -0
  62. chuk_tool_processor-0.9.7.dist-info/RECORD +67 -0
  63. chuk_tool_processor-0.6.4.dist-info/METADATA +0 -697
  64. chuk_tool_processor-0.6.4.dist-info/RECORD +0 -60
  65. {chuk_tool_processor-0.6.4.dist-info → chuk_tool_processor-0.9.7.dist-info}/WHEEL +0 -0
  66. {chuk_tool_processor-0.6.4.dist-info → chuk_tool_processor-0.9.7.dist-info}/top_level.txt +0 -0
@@ -2,12 +2,13 @@
2
2
  """
3
3
  Structured JSON formatter for logging.
4
4
  """
5
+
5
6
  from __future__ import annotations
6
7
 
7
8
  import json
8
9
  import logging
9
- from datetime import datetime, timezone
10
- from typing import Any, Dict
10
+ from datetime import UTC, datetime
11
+ from typing import Any
11
12
 
12
13
  __all__ = ["StructuredFormatter"]
13
14
 
@@ -15,7 +16,7 @@ __all__ = ["StructuredFormatter"]
15
16
  class StructuredFormatter(logging.Formatter):
16
17
  """
17
18
  JSON formatter that can serialize BaseModels, datetimes, sets, etc.
18
-
19
+
19
20
  This formatter converts log records to JSON format with proper handling
20
21
  of various Python types, ensuring logs are machine-readable and structured.
21
22
  """
@@ -24,10 +25,10 @@ class StructuredFormatter(logging.Formatter):
24
25
  def _json_default(obj: Any) -> Any:
25
26
  """
26
27
  Custom JSON serializer for handling special types.
27
-
28
+
28
29
  Args:
29
30
  obj: Object to serialize
30
-
31
+
31
32
  Returns:
32
33
  JSON-serializable representation
33
34
  """
@@ -36,42 +37,42 @@ class StructuredFormatter(logging.Formatter):
36
37
  # Import pydantic inside the method to avoid global import errors
37
38
  # This allows the formatter to work even if pydantic is not installed
38
39
  from pydantic import BaseModel
40
+
39
41
  if isinstance(obj, BaseModel):
40
42
  return obj.model_dump()
41
43
  except (ImportError, AttributeError):
42
44
  # Either pydantic is not installed or the object doesn't have model_dump
43
45
  pass
44
-
46
+
45
47
  # Handle dates and datetimes
46
48
  try:
47
49
  from datetime import date
48
- if isinstance(obj, (datetime, date)):
50
+
51
+ if isinstance(obj, datetime | date):
49
52
  return obj.isoformat()
50
53
  except ImportError:
51
54
  pass
52
-
55
+
53
56
  # Sets → list
54
- if isinstance(obj, (set, frozenset)):
57
+ if isinstance(obj, set | frozenset):
55
58
  return list(obj)
56
-
59
+
57
60
  # Fall back to string representation
58
61
  return str(obj)
59
62
 
60
63
  def format(self, record: logging.LogRecord) -> str:
61
64
  """
62
65
  Format a log record as JSON.
63
-
66
+
64
67
  Args:
65
68
  record: Log record to format
66
-
69
+
67
70
  Returns:
68
71
  JSON string representation
69
72
  """
70
73
  # Build base data structure
71
- data: Dict[str, Any] = {
72
- "timestamp": datetime.fromtimestamp(record.created, timezone.utc)
73
- .isoformat()
74
- .replace("+00:00", "Z"),
74
+ data: dict[str, Any] = {
75
+ "timestamp": datetime.fromtimestamp(record.created, UTC).isoformat().replace("+00:00", "Z"),
75
76
  "level": record.levelname,
76
77
  "message": record.getMessage(),
77
78
  "logger": record.name,
@@ -81,18 +82,18 @@ class StructuredFormatter(logging.Formatter):
81
82
  "line": record.lineno,
82
83
  "function": record.funcName,
83
84
  }
84
-
85
+
85
86
  # Add exception traceback if present
86
87
  if record.exc_info:
87
88
  data["traceback"] = self.formatException(record.exc_info)
88
-
89
+
89
90
  # Add extra fields if present
90
91
  if hasattr(record, "extra"):
91
92
  data.update(record.extra)
92
-
93
+
93
94
  # Add context if present
94
95
  if hasattr(record, "context"):
95
96
  data["context"] = record.context
96
-
97
+
97
98
  # Serialize to JSON
98
- return json.dumps(data, default=self._json_default)
99
+ return json.dumps(data, default=self._json_default)
@@ -2,44 +2,40 @@
2
2
  """
3
3
  Async-native logging helpers for tracing and monitoring tool execution.
4
4
  """
5
+
5
6
  from __future__ import annotations
6
7
 
7
8
  import time
8
9
  import uuid
10
+ from collections.abc import AsyncGenerator
9
11
  from contextlib import asynccontextmanager
10
- from datetime import datetime, timezone
11
- from typing import Any, Dict, Optional, AsyncGenerator
12
+ from datetime import UTC, datetime
13
+ from typing import Any
12
14
 
13
15
  # Import context directly - avoid circular imports
14
16
  from .context import get_logger, log_context
15
17
 
16
- __all__ = [
17
- "log_context_span",
18
- "request_logging",
19
- "log_tool_call"
20
- ]
18
+ __all__ = ["log_context_span", "request_logging", "log_tool_call"]
19
+
21
20
 
22
21
  # --------------------------------------------------------------------------- #
23
22
  # async context-manager helpers
24
23
  # --------------------------------------------------------------------------- #
25
24
  @asynccontextmanager
26
25
  async def log_context_span(
27
- operation: str,
28
- extra: Optional[Dict[str, Any]] = None,
29
- *,
30
- log_duration: bool = True
26
+ operation: str, extra: dict[str, Any] | None = None, *, log_duration: bool = True
31
27
  ) -> AsyncGenerator[None, None]:
32
28
  """
33
29
  Create an async context manager for a logging span.
34
-
30
+
35
31
  This context manager tracks the execution of an operation,
36
32
  logging its start, completion, and duration.
37
-
33
+
38
34
  Args:
39
35
  operation: Name of the operation
40
36
  extra: Optional additional context to include
41
37
  log_duration: Whether to log the duration
42
-
38
+
43
39
  Yields:
44
40
  Nothing
45
41
  """
@@ -49,28 +45,22 @@ async def log_context_span(
49
45
  span_ctx = {
50
46
  "span_id": span_id,
51
47
  "operation": operation,
52
- "start_time": datetime.fromtimestamp(start, timezone.utc)
53
- .isoformat()
54
- .replace("+00:00", "Z"),
48
+ "start_time": datetime.fromtimestamp(start, UTC).isoformat().replace("+00:00", "Z"),
55
49
  }
56
50
  if extra:
57
51
  span_ctx.update(extra)
58
52
  prev = log_context.get_copy()
59
53
  log_context.update(span_ctx)
60
54
 
61
- logger.info("Starting %s", operation)
55
+ logger.debug("Starting %s", operation)
62
56
  try:
63
57
  yield
64
58
  if log_duration:
65
- logger.info(
66
- "Completed %s", operation, extra={"context": {"duration": time.time() - start}}
67
- )
59
+ logger.debug("Completed %s", operation, extra={"context": {"duration": time.time() - start}})
68
60
  else:
69
- logger.info("Completed %s", operation)
61
+ logger.debug("Completed %s", operation)
70
62
  except Exception as exc:
71
- logger.exception(
72
- "Error in %s: %s", operation, exc, extra={"context": {"duration": time.time() - start}}
73
- )
63
+ logger.exception("Error in %s: %s", operation, exc, extra={"context": {"duration": time.time() - start}})
74
64
  raise
75
65
  finally:
76
66
  log_context.clear()
@@ -79,28 +69,26 @@ async def log_context_span(
79
69
 
80
70
 
81
71
  @asynccontextmanager
82
- async def request_logging(
83
- request_id: Optional[str] = None
84
- ) -> AsyncGenerator[str, None]:
72
+ async def request_logging(request_id: str | None = None) -> AsyncGenerator[str, None]:
85
73
  """
86
74
  Create an async context manager for request logging.
87
-
75
+
88
76
  This context manager tracks a request from start to finish,
89
77
  including duration and any errors.
90
-
78
+
91
79
  Args:
92
80
  request_id: Optional request ID (generated if not provided)
93
-
81
+
94
82
  Yields:
95
83
  The request ID
96
84
  """
97
85
  logger = get_logger("chuk_tool_processor.request")
98
86
  request_id = log_context.start_request(request_id)
99
87
  start = time.time()
100
- logger.info("Starting request %s", request_id)
88
+ logger.debug("Starting request %s", request_id)
101
89
  try:
102
90
  yield request_id
103
- logger.info(
91
+ logger.debug(
104
92
  "Completed request %s",
105
93
  request_id,
106
94
  extra={"context": {"duration": time.time() - start}},
@@ -123,7 +111,7 @@ async def request_logging(
123
111
  async def log_tool_call(tool_call: Any, tool_result: Any) -> None:
124
112
  """
125
113
  Log a tool call and its result.
126
-
114
+
127
115
  Args:
128
116
  tool_call: The tool call object
129
117
  tool_result: The tool result object
@@ -140,24 +128,22 @@ async def log_tool_call(tool_call: Any, tool_result: Any) -> None:
140
128
  "tool": tool_call.tool,
141
129
  "arguments": tool_call.arguments,
142
130
  "result": (
143
- tool_result.result.model_dump()
144
- if hasattr(tool_result.result, "model_dump")
145
- else tool_result.result
131
+ tool_result.result.model_dump() if hasattr(tool_result.result, "model_dump") else tool_result.result
146
132
  ),
147
133
  "error": tool_result.error,
148
134
  "duration": dur,
149
135
  "machine": tool_result.machine,
150
136
  "pid": tool_result.pid,
151
137
  }
152
-
138
+
153
139
  # Add optional fields safely (handle MagicMock in tests)
154
140
  try:
155
141
  if hasattr(tool_result, "cached") and tool_result.cached:
156
142
  ctx["cached"] = True
157
143
  except (TypeError, ValueError):
158
144
  pass
159
-
160
- # Handle attempts field specifically
145
+
146
+ # Handle attempts field specifically
161
147
  if hasattr(tool_result, "attempts"):
162
148
  try:
163
149
  # First, try direct attribute access and direct comparison
@@ -173,7 +159,7 @@ async def log_tool_call(tool_call: Any, tool_result: Any) -> None:
173
159
  except (TypeError, ValueError):
174
160
  # If all else fails, just include the value
175
161
  ctx["attempts"] = tool_result.attempts
176
-
162
+
177
163
  try:
178
164
  if hasattr(tool_result, "stream_id") and tool_result.stream_id:
179
165
  ctx["stream_id"] = tool_result.stream_id
@@ -184,4 +170,4 @@ async def log_tool_call(tool_call: Any, tool_result: Any) -> None:
184
170
  if tool_result.error:
185
171
  logger.error("Tool %s failed: %s", tool_call.tool, tool_result.error, extra={"context": ctx})
186
172
  else:
187
- logger.info("Tool %s succeeded in %.3fs", tool_call.tool, dur, extra={"context": ctx})
173
+ logger.debug("Tool %s succeeded in %.3fs", tool_call.tool, dur, extra={"context": ctx})
@@ -2,10 +2,8 @@
2
2
  """
3
3
  Metrics logging for tool execution.
4
4
  """
5
- from __future__ import annotations
6
5
 
7
- import asyncio
8
- from typing import Dict, Any, Optional
6
+ from __future__ import annotations
9
7
 
10
8
  # Import directly from context to avoid circular imports
11
9
  from .context import get_logger
@@ -16,11 +14,11 @@ __all__ = ["metrics", "MetricsLogger"]
16
14
  class MetricsLogger:
17
15
  """
18
16
  Logger for collecting and reporting metrics about tool execution.
19
-
17
+
20
18
  Provides methods to log tool execution metrics and parser metrics
21
19
  in a structured format.
22
20
  """
23
-
21
+
24
22
  def __init__(self):
25
23
  """Initialize with logger."""
26
24
  self.logger = get_logger("chuk_tool_processor.metrics")
@@ -32,13 +30,13 @@ class MetricsLogger:
32
30
  success: bool,
33
31
  duration: float,
34
32
  *,
35
- error: Optional[str] = None,
33
+ error: str | None = None,
36
34
  cached: bool = False,
37
35
  attempts: int = 1,
38
36
  ) -> None:
39
37
  """
40
38
  Log metrics for a tool execution.
41
-
39
+
42
40
  Args:
43
41
  tool: Name of the tool
44
42
  success: Whether execution was successful
@@ -47,7 +45,7 @@ class MetricsLogger:
47
45
  cached: Whether the result was retrieved from cache
48
46
  attempts: Number of execution attempts
49
47
  """
50
- self.logger.info(
48
+ self.logger.debug(
51
49
  f"Tool execution metric: {tool}",
52
50
  extra={
53
51
  "context": {
@@ -71,14 +69,14 @@ class MetricsLogger:
71
69
  ) -> None:
72
70
  """
73
71
  Log metrics for a parser.
74
-
72
+
75
73
  Args:
76
74
  parser: Name of the parser
77
75
  success: Whether parsing was successful
78
76
  duration: Parsing duration in seconds
79
77
  num_calls: Number of tool calls parsed
80
78
  """
81
- self.logger.info(
79
+ self.logger.debug(
82
80
  f"Parser metric: {parser}",
83
81
  extra={
84
82
  "context": {
@@ -90,18 +88,18 @@ class MetricsLogger:
90
88
  }
91
89
  },
92
90
  )
93
-
91
+
94
92
  async def log_registry_metric(
95
93
  self,
96
94
  operation: str,
97
95
  success: bool,
98
96
  duration: float,
99
- tool: Optional[str] = None,
100
- namespace: Optional[str] = None,
97
+ tool: str | None = None,
98
+ namespace: str | None = None,
101
99
  ) -> None:
102
100
  """
103
101
  Log metrics for registry operations.
104
-
102
+
105
103
  Args:
106
104
  operation: Type of registry operation
107
105
  success: Whether operation was successful
@@ -125,4 +123,4 @@ class MetricsLogger:
125
123
 
126
124
 
127
125
  # Create global instance
128
- metrics = MetricsLogger()
126
+ metrics = MetricsLogger()
@@ -7,22 +7,18 @@ Updated to support the latest MCP transports:
7
7
  - SSE (Server-Sent Events)
8
8
  - HTTP Streamable (modern replacement for SSE, spec 2025-03-26)
9
9
  """
10
- from chuk_tool_processor.mcp.transport import (
11
- MCPBaseTransport,
12
- StdioTransport,
13
- SSETransport,
14
- HTTPStreamableTransport
15
- )
16
- from chuk_tool_processor.mcp.stream_manager import StreamManager
10
+
17
11
  from chuk_tool_processor.mcp.mcp_tool import MCPTool
18
12
  from chuk_tool_processor.mcp.register_mcp_tools import register_mcp_tools
19
- from chuk_tool_processor.mcp.setup_mcp_stdio import setup_mcp_stdio
20
- from chuk_tool_processor.mcp.setup_mcp_sse import setup_mcp_sse
21
13
  from chuk_tool_processor.mcp.setup_mcp_http_streamable import setup_mcp_http_streamable
14
+ from chuk_tool_processor.mcp.setup_mcp_sse import setup_mcp_sse
15
+ from chuk_tool_processor.mcp.setup_mcp_stdio import setup_mcp_stdio
16
+ from chuk_tool_processor.mcp.stream_manager import StreamManager
17
+ from chuk_tool_processor.mcp.transport import HTTPStreamableTransport, MCPBaseTransport, SSETransport, StdioTransport
22
18
 
23
19
  __all__ = [
24
20
  "MCPBaseTransport",
25
- "StdioTransport",
21
+ "StdioTransport",
26
22
  "SSETransport",
27
23
  "HTTPStreamableTransport",
28
24
  "StreamManager",
@@ -30,5 +26,5 @@ __all__ = [
30
26
  "register_mcp_tools",
31
27
  "setup_mcp_stdio",
32
28
  "setup_mcp_sse",
33
- "setup_mcp_http_streamable"
34
- ]
29
+ "setup_mcp_http_streamable",
30
+ ]