chuk-tool-processor 0.1.6__py3-none-any.whl → 0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of chuk-tool-processor might be problematic. Click here for more details.
- chuk_tool_processor/core/processor.py +345 -132
- chuk_tool_processor/execution/strategies/inprocess_strategy.py +522 -71
- chuk_tool_processor/execution/strategies/subprocess_strategy.py +559 -64
- chuk_tool_processor/execution/tool_executor.py +282 -24
- chuk_tool_processor/execution/wrappers/caching.py +465 -123
- chuk_tool_processor/execution/wrappers/rate_limiting.py +199 -86
- chuk_tool_processor/execution/wrappers/retry.py +133 -23
- chuk_tool_processor/logging/__init__.py +83 -10
- chuk_tool_processor/logging/context.py +218 -22
- chuk_tool_processor/logging/formatter.py +56 -13
- chuk_tool_processor/logging/helpers.py +91 -16
- chuk_tool_processor/logging/metrics.py +75 -6
- chuk_tool_processor/mcp/mcp_tool.py +80 -35
- chuk_tool_processor/mcp/register_mcp_tools.py +74 -56
- chuk_tool_processor/mcp/setup_mcp_sse.py +41 -36
- chuk_tool_processor/mcp/setup_mcp_stdio.py +39 -37
- chuk_tool_processor/mcp/transport/sse_transport.py +351 -105
- chuk_tool_processor/models/execution_strategy.py +52 -3
- chuk_tool_processor/models/streaming_tool.py +110 -0
- chuk_tool_processor/models/tool_call.py +56 -4
- chuk_tool_processor/models/tool_result.py +115 -9
- chuk_tool_processor/models/validated_tool.py +15 -13
- chuk_tool_processor/plugins/discovery.py +115 -70
- chuk_tool_processor/plugins/parsers/base.py +13 -5
- chuk_tool_processor/plugins/parsers/{function_call_tool_plugin.py → function_call_tool.py} +39 -20
- chuk_tool_processor/plugins/parsers/json_tool.py +50 -0
- chuk_tool_processor/plugins/parsers/openai_tool.py +88 -0
- chuk_tool_processor/plugins/parsers/xml_tool.py +74 -20
- chuk_tool_processor/registry/__init__.py +46 -7
- chuk_tool_processor/registry/auto_register.py +92 -28
- chuk_tool_processor/registry/decorators.py +134 -11
- chuk_tool_processor/registry/interface.py +48 -14
- chuk_tool_processor/registry/metadata.py +52 -6
- chuk_tool_processor/registry/provider.py +75 -36
- chuk_tool_processor/registry/providers/__init__.py +49 -10
- chuk_tool_processor/registry/providers/memory.py +59 -48
- chuk_tool_processor/registry/tool_export.py +208 -39
- chuk_tool_processor/utils/validation.py +18 -13
- chuk_tool_processor-0.2.dist-info/METADATA +401 -0
- chuk_tool_processor-0.2.dist-info/RECORD +58 -0
- {chuk_tool_processor-0.1.6.dist-info → chuk_tool_processor-0.2.dist-info}/WHEEL +1 -1
- chuk_tool_processor/plugins/parsers/json_tool_plugin.py +0 -38
- chuk_tool_processor/plugins/parsers/openai_tool_plugin.py +0 -76
- chuk_tool_processor-0.1.6.dist-info/METADATA +0 -462
- chuk_tool_processor-0.1.6.dist-info/RECORD +0 -57
- {chuk_tool_processor-0.1.6.dist-info → chuk_tool_processor-0.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# chuk_tool_processor/models/streaming_tool.py
|
|
3
|
+
"""
|
|
4
|
+
Base class for tools that support streaming results.
|
|
5
|
+
|
|
6
|
+
This enables tools to yield incremental results during their execution,
|
|
7
|
+
which is useful for long-running operations or real-time data processing.
|
|
8
|
+
"""
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import asyncio
|
|
12
|
+
from abc import abstractmethod
|
|
13
|
+
from typing import Any, AsyncIterator, List, TypeVar, Generic, ClassVar, Optional, Dict
|
|
14
|
+
|
|
15
|
+
from pydantic import BaseModel, ConfigDict
|
|
16
|
+
|
|
17
|
+
from chuk_tool_processor.models.validated_tool import ValidatedTool
|
|
18
|
+
|
|
19
|
+
T = TypeVar('T')
|
|
20
|
+
|
|
21
|
+
class StreamingTool(ValidatedTool):
|
|
22
|
+
"""
|
|
23
|
+
Base class for tools that support streaming responses.
|
|
24
|
+
|
|
25
|
+
Subclasses must implement _stream_execute which yields results one by one.
|
|
26
|
+
The executor should use stream_execute to access streaming results directly.
|
|
27
|
+
|
|
28
|
+
Example:
|
|
29
|
+
```python
|
|
30
|
+
class Counter(StreamingTool):
|
|
31
|
+
class Arguments(BaseModel):
|
|
32
|
+
count: int = 10
|
|
33
|
+
delay: float = 0.5
|
|
34
|
+
|
|
35
|
+
class Result(BaseModel):
|
|
36
|
+
value: int
|
|
37
|
+
|
|
38
|
+
async def _stream_execute(self, count: int, delay: float) -> AsyncIterator[Result]:
|
|
39
|
+
for i in range(count):
|
|
40
|
+
await asyncio.sleep(delay)
|
|
41
|
+
yield self.Result(value=i)
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
Streaming usage:
|
|
45
|
+
```python
|
|
46
|
+
counter_tool = Counter()
|
|
47
|
+
async for result in counter_tool.stream_execute(count=5, delay=0.1):
|
|
48
|
+
print(f"Count: {result.value}")
|
|
49
|
+
```
|
|
50
|
+
"""
|
|
51
|
+
# Mark this as a ClassVar so Pydantic doesn't treat it as a field
|
|
52
|
+
supports_streaming: ClassVar[bool] = True
|
|
53
|
+
|
|
54
|
+
# Use ConfigDict to configure model behavior
|
|
55
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
56
|
+
|
|
57
|
+
async def stream_execute(self, **kwargs: Any) -> AsyncIterator[Any]:
|
|
58
|
+
"""
|
|
59
|
+
Execute the tool and stream results incrementally.
|
|
60
|
+
|
|
61
|
+
This public method validates arguments and then delegates to _stream_execute.
|
|
62
|
+
It should be used directly by the executor to support true streaming.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
**kwargs: Keyword arguments for the tool
|
|
66
|
+
|
|
67
|
+
Yields:
|
|
68
|
+
Results as they are generated by the tool
|
|
69
|
+
"""
|
|
70
|
+
# Validate arguments using the Arguments model
|
|
71
|
+
args = self.Arguments(**kwargs)
|
|
72
|
+
|
|
73
|
+
# Stream results directly from _stream_execute
|
|
74
|
+
async for result in self._stream_execute(**args.model_dump()):
|
|
75
|
+
yield result
|
|
76
|
+
|
|
77
|
+
async def execute(self, **kwargs: Any) -> Any:
|
|
78
|
+
"""
|
|
79
|
+
Execute the tool and collect all results.
|
|
80
|
+
|
|
81
|
+
For streaming tools, this collects all results from stream_execute
|
|
82
|
+
into a list for compatibility with the regular execution model.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
**kwargs: Keyword arguments for the tool
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
List of all streamed results
|
|
89
|
+
"""
|
|
90
|
+
# Collect all streamed results into a list
|
|
91
|
+
results = []
|
|
92
|
+
async for chunk in self.stream_execute(**kwargs):
|
|
93
|
+
results.append(chunk)
|
|
94
|
+
|
|
95
|
+
return results
|
|
96
|
+
|
|
97
|
+
@abstractmethod
|
|
98
|
+
async def _stream_execute(self, **kwargs: Any) -> AsyncIterator[Any]:
|
|
99
|
+
"""
|
|
100
|
+
Execute the tool and yield results incrementally.
|
|
101
|
+
|
|
102
|
+
This must be implemented by streaming tool subclasses.
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
**kwargs: Tool-specific arguments
|
|
106
|
+
|
|
107
|
+
Yields:
|
|
108
|
+
Results as they are generated
|
|
109
|
+
"""
|
|
110
|
+
yield NotImplemented
|
|
@@ -1,7 +1,59 @@
|
|
|
1
1
|
# chuk_tool_processor/models/tool_call.py
|
|
2
|
-
|
|
3
|
-
|
|
2
|
+
"""
|
|
3
|
+
Model representing a tool call with arguments.
|
|
4
|
+
"""
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
import uuid
|
|
8
|
+
from typing import Any, Dict, Optional
|
|
9
|
+
|
|
10
|
+
from pydantic import BaseModel, Field, ConfigDict
|
|
4
11
|
|
|
5
12
|
class ToolCall(BaseModel):
|
|
6
|
-
|
|
7
|
-
|
|
13
|
+
"""
|
|
14
|
+
Represents a call to a tool with arguments.
|
|
15
|
+
|
|
16
|
+
Attributes:
|
|
17
|
+
id: Unique identifier for the tool call
|
|
18
|
+
tool: Name of the tool to call
|
|
19
|
+
namespace: Namespace the tool belongs to
|
|
20
|
+
arguments: Arguments to pass to the tool
|
|
21
|
+
"""
|
|
22
|
+
model_config = ConfigDict(extra='ignore')
|
|
23
|
+
|
|
24
|
+
id: str = Field(
|
|
25
|
+
default_factory=lambda: str(uuid.uuid4()),
|
|
26
|
+
description="Unique identifier for the tool call"
|
|
27
|
+
)
|
|
28
|
+
tool: str = Field(
|
|
29
|
+
...,
|
|
30
|
+
min_length=1,
|
|
31
|
+
description="Name of the tool to call; must be non-empty"
|
|
32
|
+
)
|
|
33
|
+
namespace: str = Field(
|
|
34
|
+
default="default",
|
|
35
|
+
description="Namespace the tool belongs to"
|
|
36
|
+
)
|
|
37
|
+
arguments: Dict[str, Any] = Field(
|
|
38
|
+
default_factory=dict,
|
|
39
|
+
description="Arguments to pass to the tool"
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
async def to_dict(self) -> Dict[str, Any]:
|
|
43
|
+
"""Convert to a dictionary for serialization."""
|
|
44
|
+
return {
|
|
45
|
+
"id": self.id,
|
|
46
|
+
"tool": self.tool,
|
|
47
|
+
"namespace": self.namespace,
|
|
48
|
+
"arguments": self.arguments
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
@classmethod
|
|
52
|
+
async def from_dict(cls, data: Dict[str, Any]) -> ToolCall:
|
|
53
|
+
"""Create a ToolCall from a dictionary."""
|
|
54
|
+
return cls(**data)
|
|
55
|
+
|
|
56
|
+
def __str__(self) -> str:
|
|
57
|
+
"""String representation of the tool call."""
|
|
58
|
+
args_str = ", ".join(f"{k}={v!r}" for k, v in self.arguments.items())
|
|
59
|
+
return f"ToolCall({self.tool}, {args_str})"
|
|
@@ -1,23 +1,44 @@
|
|
|
1
1
|
# chuk_tool_processor/models/tool_result.py
|
|
2
|
+
"""
|
|
3
|
+
Model representing the result of a tool execution.
|
|
4
|
+
"""
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
2
7
|
import os
|
|
3
|
-
|
|
4
|
-
from typing import Any, Optional
|
|
8
|
+
import uuid
|
|
5
9
|
from datetime import datetime, timezone
|
|
10
|
+
from typing import Any, Dict, Optional, List
|
|
11
|
+
|
|
12
|
+
from pydantic import BaseModel, Field, ConfigDict
|
|
6
13
|
|
|
7
14
|
class ToolResult(BaseModel):
|
|
8
15
|
"""
|
|
9
16
|
Represents the result of executing a tool.
|
|
10
|
-
|
|
17
|
+
|
|
18
|
+
Includes timing, host, and process metadata for diagnostics and tracing.
|
|
19
|
+
|
|
20
|
+
Attributes:
|
|
21
|
+
id: Unique identifier for the result
|
|
22
|
+
tool: Name of the tool that was executed
|
|
23
|
+
result: Return value from the tool execution
|
|
24
|
+
error: Error message if execution failed
|
|
25
|
+
start_time: UTC timestamp when execution started
|
|
26
|
+
end_time: UTC timestamp when execution finished
|
|
27
|
+
machine: Hostname where the tool ran
|
|
28
|
+
pid: Process ID of the worker
|
|
29
|
+
cached: Flag indicating if the result was retrieved from cache
|
|
30
|
+
attempts: Number of execution attempts made
|
|
31
|
+
stream_id: Optional identifier for streaming results
|
|
32
|
+
is_partial: Whether this is a partial streaming result
|
|
11
33
|
"""
|
|
12
|
-
# Configure Pydantic to ignore any extra fields
|
|
13
34
|
model_config = ConfigDict(extra='ignore')
|
|
14
35
|
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
description="True if this result was retrieved from cache"
|
|
36
|
+
id: str = Field(
|
|
37
|
+
default_factory=lambda: str(uuid.uuid4()),
|
|
38
|
+
description="Unique identifier for this result"
|
|
19
39
|
)
|
|
20
|
-
|
|
40
|
+
|
|
41
|
+
# Core fields
|
|
21
42
|
tool: str = Field(
|
|
22
43
|
...,
|
|
23
44
|
min_length=1,
|
|
@@ -31,6 +52,8 @@ class ToolResult(BaseModel):
|
|
|
31
52
|
None,
|
|
32
53
|
description="Error message if execution failed"
|
|
33
54
|
)
|
|
55
|
+
|
|
56
|
+
# Execution metadata
|
|
34
57
|
start_time: datetime = Field(
|
|
35
58
|
default_factory=lambda: datetime.now(timezone.utc),
|
|
36
59
|
description="UTC timestamp when execution started"
|
|
@@ -47,3 +70,86 @@ class ToolResult(BaseModel):
|
|
|
47
70
|
default_factory=lambda: os.getpid(),
|
|
48
71
|
description="Process ID of the worker"
|
|
49
72
|
)
|
|
73
|
+
|
|
74
|
+
# Extended features
|
|
75
|
+
cached: bool = Field(
|
|
76
|
+
default=False,
|
|
77
|
+
description="True if this result was retrieved from cache"
|
|
78
|
+
)
|
|
79
|
+
attempts: int = Field(
|
|
80
|
+
default=1,
|
|
81
|
+
description="Number of execution attempts made"
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
# Streaming support
|
|
85
|
+
stream_id: Optional[str] = Field(
|
|
86
|
+
default=None,
|
|
87
|
+
description="Identifier for this stream of results (for streaming tools)"
|
|
88
|
+
)
|
|
89
|
+
is_partial: bool = Field(
|
|
90
|
+
default=False,
|
|
91
|
+
description="True if this is a partial result in a stream"
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
@property
|
|
95
|
+
def is_success(self) -> bool:
|
|
96
|
+
"""Check if the execution was successful (no error)."""
|
|
97
|
+
return self.error is None
|
|
98
|
+
|
|
99
|
+
@property
|
|
100
|
+
def duration(self) -> float:
|
|
101
|
+
"""Calculate the execution duration in seconds."""
|
|
102
|
+
if not self.start_time or not self.end_time:
|
|
103
|
+
return 0.0
|
|
104
|
+
return (self.end_time - self.start_time).total_seconds()
|
|
105
|
+
|
|
106
|
+
async def to_dict(self) -> Dict[str, Any]:
|
|
107
|
+
"""Convert to a dictionary for serialization."""
|
|
108
|
+
return {
|
|
109
|
+
"id": self.id,
|
|
110
|
+
"tool": self.tool,
|
|
111
|
+
"result": self.result,
|
|
112
|
+
"error": self.error,
|
|
113
|
+
"success": self.is_success,
|
|
114
|
+
"duration": self.duration,
|
|
115
|
+
"start_time": self.start_time.isoformat(),
|
|
116
|
+
"end_time": self.end_time.isoformat(),
|
|
117
|
+
"machine": self.machine,
|
|
118
|
+
"pid": self.pid,
|
|
119
|
+
"cached": self.cached,
|
|
120
|
+
"attempts": self.attempts,
|
|
121
|
+
"stream_id": self.stream_id,
|
|
122
|
+
"is_partial": self.is_partial
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
@classmethod
|
|
126
|
+
def create_stream_chunk(
|
|
127
|
+
cls,
|
|
128
|
+
tool: str,
|
|
129
|
+
result: Any,
|
|
130
|
+
stream_id: Optional[str] = None
|
|
131
|
+
) -> ToolResult:
|
|
132
|
+
"""Create a partial streaming result."""
|
|
133
|
+
stream_id = stream_id or str(uuid.uuid4())
|
|
134
|
+
return cls(
|
|
135
|
+
tool=tool,
|
|
136
|
+
result=result,
|
|
137
|
+
stream_id=stream_id,
|
|
138
|
+
is_partial=True
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
@classmethod
|
|
142
|
+
async def from_dict(cls, data: Dict[str, Any]) -> ToolResult:
|
|
143
|
+
"""Create a ToolResult from a dictionary."""
|
|
144
|
+
# Handle datetime fields
|
|
145
|
+
if isinstance(data.get("start_time"), str):
|
|
146
|
+
data["start_time"] = datetime.fromisoformat(data["start_time"])
|
|
147
|
+
if isinstance(data.get("end_time"), str):
|
|
148
|
+
data["end_time"] = datetime.fromisoformat(data["end_time"])
|
|
149
|
+
|
|
150
|
+
return cls(**data)
|
|
151
|
+
|
|
152
|
+
def __str__(self) -> str:
|
|
153
|
+
"""String representation of the tool result."""
|
|
154
|
+
status = "success" if self.is_success else f"error: {self.error}"
|
|
155
|
+
return f"ToolResult({self.tool}, {status}, duration={self.duration:.3f}s)"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# chuk_tool_processor/models/validated_tool.py
|
|
2
2
|
"""
|
|
3
|
-
Self-contained base-class for *declarative* tools.
|
|
3
|
+
Self-contained base-class for *declarative* async-native tools.
|
|
4
4
|
|
|
5
5
|
Subclass it like so:
|
|
6
6
|
|
|
@@ -12,7 +12,7 @@ Subclass it like so:
|
|
|
12
12
|
class Result(BaseModel):
|
|
13
13
|
sum: int
|
|
14
14
|
|
|
15
|
-
def _execute(self, *, x: int, y: int) -> Result:
|
|
15
|
+
async def _execute(self, *, x: int, y: int) -> Result:
|
|
16
16
|
return self.Result(sum=x + y)
|
|
17
17
|
"""
|
|
18
18
|
from __future__ import annotations
|
|
@@ -93,7 +93,7 @@ class _ExportMixin:
|
|
|
93
93
|
# The public validated base-class
|
|
94
94
|
# --------------------------------------------------------------------------- #
|
|
95
95
|
class ValidatedTool(_ExportMixin, BaseModel):
|
|
96
|
-
"""Pydantic-validated base for new tools."""
|
|
96
|
+
"""Pydantic-validated base for new async-native tools."""
|
|
97
97
|
|
|
98
98
|
# ------------------------------------------------------------------ #
|
|
99
99
|
# Inner models – override in subclasses
|
|
@@ -107,11 +107,11 @@ class ValidatedTool(_ExportMixin, BaseModel):
|
|
|
107
107
|
# ------------------------------------------------------------------ #
|
|
108
108
|
# Public entry-point called by the processor
|
|
109
109
|
# ------------------------------------------------------------------ #
|
|
110
|
-
def execute(self: T_Validated, **kwargs: Any) -> BaseModel:
|
|
110
|
+
async def execute(self: T_Validated, **kwargs: Any) -> BaseModel:
|
|
111
111
|
"""Validate *kwargs*, run `_execute`, validate the result."""
|
|
112
112
|
try:
|
|
113
113
|
args = self.Arguments(**kwargs) # type: ignore[arg-type]
|
|
114
|
-
res = self._execute(**args.model_dump()) # type: ignore[arg-type]
|
|
114
|
+
res = await self._execute(**args.model_dump()) # type: ignore[arg-type]
|
|
115
115
|
|
|
116
116
|
return (
|
|
117
117
|
res
|
|
@@ -124,16 +124,16 @@ class ValidatedTool(_ExportMixin, BaseModel):
|
|
|
124
124
|
# ------------------------------------------------------------------ #
|
|
125
125
|
# Sub-classes must implement this
|
|
126
126
|
# ------------------------------------------------------------------ #
|
|
127
|
-
def _execute(self, **_kwargs: Any): # noqa: D401 – expected override
|
|
128
|
-
raise NotImplementedError("Tool must implement _execute()")
|
|
127
|
+
async def _execute(self, **_kwargs: Any): # noqa: D401 – expected override
|
|
128
|
+
raise NotImplementedError("Tool must implement async _execute()")
|
|
129
129
|
|
|
130
130
|
|
|
131
131
|
# --------------------------------------------------------------------------- #
|
|
132
|
-
# Decorator to retrofit validation onto classic
|
|
132
|
+
# Decorator to retrofit validation onto classic "imperative" tools
|
|
133
133
|
# --------------------------------------------------------------------------- #
|
|
134
134
|
def with_validation(cls): # noqa: D401 – factory
|
|
135
135
|
"""
|
|
136
|
-
Decorator that wraps an existing ``execute`` method with:
|
|
136
|
+
Decorator that wraps an existing async ``execute`` method with:
|
|
137
137
|
|
|
138
138
|
* argument validation (based on type hints)
|
|
139
139
|
* result validation (based on return annotation)
|
|
@@ -143,13 +143,15 @@ def with_validation(cls): # noqa: D401 – factory
|
|
|
143
143
|
validate_result,
|
|
144
144
|
)
|
|
145
145
|
|
|
146
|
-
original
|
|
146
|
+
original = cls.execute # type: ignore[attr-defined]
|
|
147
|
+
if not inspect.iscoroutinefunction(original):
|
|
148
|
+
raise TypeError(f"Tool {cls.__name__} must have an async execute method")
|
|
147
149
|
|
|
148
|
-
def
|
|
150
|
+
async def _async_wrapper(self, **kwargs): # type: ignore[override]
|
|
149
151
|
tool_name = cls.__name__
|
|
150
152
|
validated = validate_arguments(tool_name, original, kwargs)
|
|
151
|
-
result = original(self, **validated)
|
|
153
|
+
result = await original(self, **validated)
|
|
152
154
|
return validate_result(tool_name, original, result)
|
|
153
155
|
|
|
154
|
-
cls.execute =
|
|
156
|
+
cls.execute = _async_wrapper # type: ignore[assignment]
|
|
155
157
|
return cls
|
|
@@ -1,121 +1,164 @@
|
|
|
1
1
|
# chuk_tool_processor/plugins/discovery.py
|
|
2
|
-
"""
|
|
2
|
+
"""Async-friendly plugin discovery & registry utilities for chuk_tool_processor."""
|
|
3
|
+
|
|
3
4
|
from __future__ import annotations
|
|
4
5
|
|
|
5
6
|
import importlib
|
|
6
7
|
import inspect
|
|
7
8
|
import logging
|
|
8
9
|
import pkgutil
|
|
10
|
+
from types import ModuleType
|
|
9
11
|
from typing import Any, Dict, List, Optional, Set, Type
|
|
10
12
|
|
|
13
|
+
from chuk_tool_processor.plugins.parsers.base import ParserPlugin
|
|
14
|
+
from chuk_tool_processor.models.execution_strategy import ExecutionStrategy
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
"plugin_registry",
|
|
18
|
+
"PluginRegistry",
|
|
19
|
+
"PluginDiscovery",
|
|
20
|
+
"discover_default_plugins",
|
|
21
|
+
"discover_plugins",
|
|
22
|
+
"plugin",
|
|
23
|
+
]
|
|
24
|
+
|
|
11
25
|
logger = logging.getLogger(__name__)
|
|
12
26
|
|
|
13
27
|
|
|
28
|
+
# -----------------------------------------------------------------------------
|
|
29
|
+
# In-memory registry
|
|
30
|
+
# -----------------------------------------------------------------------------
|
|
14
31
|
class PluginRegistry:
|
|
15
|
-
"""
|
|
32
|
+
"""Thread-safe (GIL) in-memory registry keyed by *category → name*."""
|
|
16
33
|
|
|
17
|
-
def __init__(self) -> None:
|
|
34
|
+
def __init__(self) -> None:
|
|
35
|
+
# category → {name → object}
|
|
18
36
|
self._plugins: Dict[str, Dict[str, Any]] = {}
|
|
19
37
|
|
|
20
|
-
#
|
|
38
|
+
# --------------------------------------------------------------------- #
|
|
39
|
+
# Public API
|
|
40
|
+
# --------------------------------------------------------------------- #
|
|
21
41
|
def register_plugin(self, category: str, name: str, plugin: Any) -> None:
|
|
22
42
|
self._plugins.setdefault(category, {})[name] = plugin
|
|
23
43
|
logger.debug("Registered plugin %s.%s", category, name)
|
|
24
44
|
|
|
25
|
-
def get_plugin(self, category: str, name: str) -> Optional[Any]:
|
|
45
|
+
def get_plugin(self, category: str, name: str) -> Optional[Any]: # noqa: D401
|
|
26
46
|
return self._plugins.get(category, {}).get(name)
|
|
27
47
|
|
|
28
48
|
def list_plugins(self, category: str | None = None) -> Dict[str, List[str]]:
|
|
29
|
-
if category:
|
|
30
|
-
return {category:
|
|
31
|
-
return {cat:
|
|
49
|
+
if category is not None:
|
|
50
|
+
return {category: sorted(self._plugins.get(category, {}))}
|
|
51
|
+
return {cat: sorted(names) for cat, names in self._plugins.items()}
|
|
32
52
|
|
|
33
53
|
|
|
54
|
+
# -----------------------------------------------------------------------------
|
|
55
|
+
# Discovery
|
|
56
|
+
# -----------------------------------------------------------------------------
|
|
34
57
|
class PluginDiscovery:
|
|
35
|
-
"""
|
|
58
|
+
"""
|
|
59
|
+
Recursively scans *package_paths* for plugin classes and registers them.
|
|
36
60
|
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
self._seen: Set[str] = set()
|
|
61
|
+
* Parser plugins – concrete subclasses of :class:`ParserPlugin`
|
|
62
|
+
with an **async** ``try_parse`` coroutine.
|
|
40
63
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
from chuk_tool_processor.parsers.base import ParserPlugin as _PP # noqa: WPS433
|
|
44
|
-
except ModuleNotFoundError:
|
|
45
|
-
_PP = None
|
|
46
|
-
self.ParserPlugin = _PP
|
|
64
|
+
* Execution strategies – concrete subclasses of
|
|
65
|
+
:class:`ExecutionStrategy`.
|
|
47
66
|
|
|
48
|
-
|
|
49
|
-
|
|
67
|
+
* Explicitly-decorated plugins – classes tagged with ``@plugin(...)``.
|
|
68
|
+
"""
|
|
50
69
|
|
|
51
|
-
|
|
70
|
+
# ------------------------------------------------------------------ #
|
|
71
|
+
def __init__(self, registry: PluginRegistry) -> None:
|
|
72
|
+
self._registry = registry
|
|
73
|
+
self._seen_modules: Set[str] = set()
|
|
52
74
|
|
|
53
|
-
# ------------------------------------------------------------------
|
|
75
|
+
# ------------------------------------------------------------------ #
|
|
54
76
|
def discover_plugins(self, package_paths: List[str]) -> None:
|
|
55
|
-
|
|
56
|
-
|
|
77
|
+
"""Import every package in *package_paths* and walk its subtree."""
|
|
78
|
+
for pkg_path in package_paths:
|
|
79
|
+
self._walk(pkg_path)
|
|
57
80
|
|
|
58
|
-
# ------------------------------------------------------------------
|
|
81
|
+
# ------------------------------------------------------------------ #
|
|
82
|
+
# Internal helpers
|
|
83
|
+
# ------------------------------------------------------------------ #
|
|
59
84
|
def _walk(self, pkg_path: str) -> None:
|
|
60
85
|
try:
|
|
61
|
-
|
|
86
|
+
root_pkg = importlib.import_module(pkg_path)
|
|
62
87
|
except ImportError as exc: # pragma: no cover
|
|
63
88
|
logger.warning("Cannot import package %s: %s", pkg_path, exc)
|
|
64
89
|
return
|
|
65
90
|
|
|
66
|
-
|
|
67
|
-
|
|
91
|
+
self._inspect_module(root_pkg)
|
|
92
|
+
|
|
93
|
+
for _, mod_name, is_pkg in pkgutil.iter_modules(root_pkg.__path__, root_pkg.__name__ + "."):
|
|
94
|
+
if mod_name in self._seen_modules:
|
|
95
|
+
continue
|
|
96
|
+
self._seen_modules.add(mod_name)
|
|
97
|
+
|
|
98
|
+
try:
|
|
99
|
+
mod = importlib.import_module(mod_name)
|
|
100
|
+
except ImportError as exc: # pragma: no cover
|
|
101
|
+
logger.debug("Cannot import module %s: %s", mod_name, exc)
|
|
68
102
|
continue
|
|
69
|
-
|
|
70
|
-
self._inspect_module(
|
|
103
|
+
|
|
104
|
+
self._inspect_module(mod)
|
|
105
|
+
|
|
71
106
|
if is_pkg:
|
|
72
107
|
self._walk(mod_name)
|
|
73
108
|
|
|
74
|
-
# ------------------------------------------------------------------
|
|
75
|
-
def _inspect_module(self,
|
|
76
|
-
try:
|
|
77
|
-
module = importlib.import_module(mod_name)
|
|
78
|
-
except ImportError as exc: # pragma: no cover
|
|
79
|
-
logger.warning("Cannot import module %s: %s", mod_name, exc)
|
|
80
|
-
return
|
|
81
|
-
|
|
109
|
+
# ------------------------------------------------------------------ #
|
|
110
|
+
def _inspect_module(self, module: ModuleType) -> None:
|
|
82
111
|
for attr in module.__dict__.values():
|
|
83
112
|
if inspect.isclass(attr):
|
|
84
113
|
self._maybe_register(attr)
|
|
85
114
|
|
|
86
|
-
# ------------------------------------------------------------------
|
|
115
|
+
# ------------------------------------------------------------------ #
|
|
87
116
|
def _maybe_register(self, cls: Type) -> None:
|
|
88
|
-
"""Register *cls* in all
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
looks_like_parser = callable(getattr(cls, "try_parse", None))
|
|
92
|
-
if looks_like_parser and not inspect.isabstract(cls):
|
|
93
|
-
# skip ABC base itself if available
|
|
94
|
-
if self.ParserPlugin and cls is self.ParserPlugin:
|
|
95
|
-
pass
|
|
96
|
-
else:
|
|
97
|
-
self.registry.register_plugin("parser", cls.__name__, cls())
|
|
98
|
-
|
|
99
|
-
# --------------- execution strategies -------------------------
|
|
100
|
-
if (
|
|
101
|
-
issubclass(cls, self.ExecutionStrategy)
|
|
102
|
-
and cls is not self.ExecutionStrategy
|
|
103
|
-
and not inspect.isabstract(cls)
|
|
104
|
-
):
|
|
105
|
-
self.registry.register_plugin("execution_strategy", cls.__name__, cls)
|
|
106
|
-
|
|
107
|
-
# --------------- explicit @plugin decorator -------------------
|
|
108
|
-
meta = getattr(cls, "_plugin_meta", None)
|
|
109
|
-
if meta and not inspect.isabstract(cls):
|
|
110
|
-
self.registry.register_plugin(meta.get("category", "unknown"), meta.get("name", cls.__name__), cls())
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
# ----------------------------------------------------------------------
|
|
114
|
-
# public decorator helper
|
|
115
|
-
# ----------------------------------------------------------------------
|
|
117
|
+
"""Register *cls* in all matching plugin categories."""
|
|
118
|
+
if inspect.isabstract(cls):
|
|
119
|
+
return
|
|
116
120
|
|
|
121
|
+
# ------------------- Parser plugins -------------------------
|
|
122
|
+
if issubclass(cls, ParserPlugin) and cls is not ParserPlugin:
|
|
123
|
+
if not inspect.iscoroutinefunction(getattr(cls, "try_parse", None)):
|
|
124
|
+
logger.warning("Skipping parser plugin %s: try_parse is not async", cls.__qualname__)
|
|
125
|
+
else:
|
|
126
|
+
try:
|
|
127
|
+
self._registry.register_plugin("parser", cls.__name__, cls())
|
|
128
|
+
except Exception as exc: # pragma: no cover
|
|
129
|
+
logger.warning("Cannot instantiate parser plugin %s: %s", cls.__qualname__, exc)
|
|
130
|
+
|
|
131
|
+
# ---------------- Execution strategies ---------------------
|
|
132
|
+
if issubclass(cls, ExecutionStrategy) and cls is not ExecutionStrategy:
|
|
133
|
+
self._registry.register_plugin("execution_strategy", cls.__name__, cls)
|
|
134
|
+
|
|
135
|
+
# ------------- Explicit @plugin decorator ------------------
|
|
136
|
+
meta: Optional[dict] = getattr(cls, "_plugin_meta", None)
|
|
137
|
+
if meta:
|
|
138
|
+
category = meta.get("category", "unknown")
|
|
139
|
+
name = meta.get("name", cls.__name__)
|
|
140
|
+
try:
|
|
141
|
+
plugin_obj: Any = cls() if callable(getattr(cls, "__init__", None)) else cls
|
|
142
|
+
self._registry.register_plugin(category, name, plugin_obj)
|
|
143
|
+
except Exception as exc: # pragma: no cover
|
|
144
|
+
logger.warning("Cannot instantiate decorated plugin %s: %s", cls.__qualname__, exc)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
# -----------------------------------------------------------------------------
|
|
148
|
+
# Decorator helper
|
|
149
|
+
# -----------------------------------------------------------------------------
|
|
117
150
|
def plugin(category: str, name: str | None = None):
|
|
118
|
-
"""
|
|
151
|
+
"""
|
|
152
|
+
Decorator that marks a concrete class as a plugin for *category*.
|
|
153
|
+
|
|
154
|
+
Example
|
|
155
|
+
-------
|
|
156
|
+
```python
|
|
157
|
+
@plugin("transport", name="sse")
|
|
158
|
+
class MySSETransport:
|
|
159
|
+
...
|
|
160
|
+
```
|
|
161
|
+
"""
|
|
119
162
|
|
|
120
163
|
def decorator(cls):
|
|
121
164
|
cls._plugin_meta = {"category": category, "name": name or cls.__name__}
|
|
@@ -124,15 +167,17 @@ def plugin(category: str, name: str | None = None):
|
|
|
124
167
|
return decorator
|
|
125
168
|
|
|
126
169
|
|
|
127
|
-
#
|
|
170
|
+
# -----------------------------------------------------------------------------
|
|
128
171
|
# Singletons & convenience wrappers
|
|
129
|
-
#
|
|
172
|
+
# -----------------------------------------------------------------------------
|
|
130
173
|
plugin_registry = PluginRegistry()
|
|
131
174
|
|
|
132
175
|
|
|
133
176
|
def discover_default_plugins() -> None:
|
|
177
|
+
"""Discover plugins shipped inside *chuk_tool_processor.plugins*."""
|
|
134
178
|
PluginDiscovery(plugin_registry).discover_plugins(["chuk_tool_processor.plugins"])
|
|
135
179
|
|
|
136
180
|
|
|
137
181
|
def discover_plugins(package_paths: List[str]) -> None:
|
|
182
|
+
"""Discover plugins from arbitrary external *package_paths*."""
|
|
138
183
|
PluginDiscovery(plugin_registry).discover_plugins(package_paths)
|