agent-handler-sdk 0.1.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agent_handler_sdk-0.1.8/PKG-INFO +15 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/__init__.py +3 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/auth.py +24 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/cli.py +81 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/connector.py +106 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/eval_types.py +113 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/exceptions.py +4 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/invocation.py +55 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/registry.py +89 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/templates/connector/README.md.tpl +34 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/templates/connector/evals.json.tpl +55 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/templates/connector/init.py.tpl +12 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/templates/connector/metadata.yaml.tpl +3 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/templates/connector/pyproject.toml.tpl +29 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/templates/connector/test_handlers.py.tpl +19 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/tool.py +141 -0
- agent_handler_sdk-0.1.8/agent_handler_sdk/utils.py +208 -0
- agent_handler_sdk-0.1.8/pyproject.toml +49 -0
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: agent-handler-sdk
|
|
3
|
+
Version: 0.1.8
|
|
4
|
+
Summary: Agent Handler SDK for defining and invoking LLM tools
|
|
5
|
+
Author: David Dalmaso
|
|
6
|
+
Author-email: david.dalmaso@merge.dev
|
|
7
|
+
Requires-Python: >=3.10
|
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
|
9
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
14
|
+
Requires-Dist: jsonschema (>=4.0,<5.0)
|
|
15
|
+
Requires-Dist: pydantic (>=2.0,<3.0)
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# agent_handler_sdk/auth.py
|
|
2
|
+
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class AuthContext:
|
|
7
|
+
"""
|
|
8
|
+
Auth context for tool execution that provides secure access to secrets.
|
|
9
|
+
|
|
10
|
+
This class provides an isolated container for secrets during tool execution.
|
|
11
|
+
Each tool execution should receive its own instance.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
def __init__(self, secrets: Optional[dict[str, str]] = None, oauth2_token: Optional[str] = None):
|
|
15
|
+
self._secrets = secrets or {}
|
|
16
|
+
self._oauth2_token = oauth2_token
|
|
17
|
+
|
|
18
|
+
def get(self, key: str, default: Optional[str] = None) -> Optional[str]:
|
|
19
|
+
"""Get a secret value by key"""
|
|
20
|
+
return self._secrets.get(key, default)
|
|
21
|
+
|
|
22
|
+
def get_oauth2_token(self) -> Optional[str]:
|
|
23
|
+
"""Get the OAuth token from the secrets"""
|
|
24
|
+
return self._oauth2_token
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Any
|
|
4
|
+
import importlib.resources as pkg_resources
|
|
5
|
+
from agent_handler_sdk import __version__ as sdk_version
|
|
6
|
+
|
|
7
|
+
# Use str() to convert Traversable to string path
|
|
8
|
+
TEMPLATE_DIR = Path(str(pkg_resources.files("agent_handler_sdk"))) / "templates" / "connector"
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def get_sdk_version() -> str:
|
|
12
|
+
return sdk_version
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def render_template(filename: str, **context: Any) -> str:
|
|
16
|
+
"""
|
|
17
|
+
Load a template file from the SDK's templates/connector directory
|
|
18
|
+
and format it with the given context.
|
|
19
|
+
"""
|
|
20
|
+
template_path = TEMPLATE_DIR.joinpath(filename)
|
|
21
|
+
text = template_path.read_text()
|
|
22
|
+
return text.format(**context)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def scaffold_connector() -> int:
|
|
26
|
+
"""
|
|
27
|
+
Usage: ahs-scaffold <slug> [--target-dir <dir>]
|
|
28
|
+
|
|
29
|
+
Creates:
|
|
30
|
+
<target-dir>/connectors/<slug>/
|
|
31
|
+
pyproject.toml
|
|
32
|
+
metadata.yaml
|
|
33
|
+
<slug>_connector/
|
|
34
|
+
__init__.py
|
|
35
|
+
tools/
|
|
36
|
+
evals/
|
|
37
|
+
tests/
|
|
38
|
+
test_handlers.py
|
|
39
|
+
"""
|
|
40
|
+
args = sys.argv[1:]
|
|
41
|
+
if not args:
|
|
42
|
+
print(scaffold_connector.__doc__)
|
|
43
|
+
sys.exit(1)
|
|
44
|
+
|
|
45
|
+
slug = args[0]
|
|
46
|
+
# Generate human-readable name by replacing hyphens with spaces and capitalizing words
|
|
47
|
+
human_readable_name = " ".join(word.capitalize() for word in slug.replace("-", " ").split())
|
|
48
|
+
target = Path(".")
|
|
49
|
+
if "--target-dir" in args:
|
|
50
|
+
idx = args.index("--target-dir")
|
|
51
|
+
target = Path(args[idx + 1])
|
|
52
|
+
|
|
53
|
+
version = get_sdk_version()
|
|
54
|
+
|
|
55
|
+
base = target / "connectors" / slug
|
|
56
|
+
pkg_dir = base / f"{slug}_connector"
|
|
57
|
+
tools_dir = pkg_dir / "tools"
|
|
58
|
+
tests_dir = base / "tests"
|
|
59
|
+
evals_dir = base / "evals"
|
|
60
|
+
|
|
61
|
+
# Create directories
|
|
62
|
+
for d in (base, pkg_dir, tools_dir, tests_dir, evals_dir):
|
|
63
|
+
d.mkdir(parents=True, exist_ok=True)
|
|
64
|
+
|
|
65
|
+
# Map template → output path
|
|
66
|
+
files_to_render = {
|
|
67
|
+
"pyproject.toml.tpl": base / "pyproject.toml",
|
|
68
|
+
"metadata.yaml.tpl": base / "metadata.yaml",
|
|
69
|
+
"init.py.tpl": pkg_dir / "__init__.py",
|
|
70
|
+
"test_handlers.py.tpl": tests_dir / "test_handlers.py",
|
|
71
|
+
"evals.json.tpl": evals_dir / "evals.json",
|
|
72
|
+
"README.md.tpl": base / "README.md",
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
# Render each template with both name & version
|
|
76
|
+
for tpl_name, out_path in files_to_render.items():
|
|
77
|
+
content = render_template(tpl_name, name=slug, version=version, human_readable_name=human_readable_name)
|
|
78
|
+
out_path.write_text(content, encoding="utf-8")
|
|
79
|
+
|
|
80
|
+
print(f"Scaffolded connector '{slug}' (SDK v{version}) at {base}")
|
|
81
|
+
return 0
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
from typing import List, Optional, Set, Dict, Any, Union, Awaitable, Callable
|
|
2
|
+
from .registry import ConnectorRegistry
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class Connector:
|
|
6
|
+
def __init__(
|
|
7
|
+
self,
|
|
8
|
+
namespace: str,
|
|
9
|
+
include_tools: Optional[List[str]] = None,
|
|
10
|
+
include_tags: Optional[List[str]] = None,
|
|
11
|
+
):
|
|
12
|
+
"""
|
|
13
|
+
namespace: unique prefix (e.g. "jira").
|
|
14
|
+
include_tools: explicit list of fully-qualified tool names.
|
|
15
|
+
include_tags: whitelist of tags to filter tools by.
|
|
16
|
+
"""
|
|
17
|
+
self.namespace = namespace
|
|
18
|
+
self.include_tools = set(include_tools) if include_tools else None
|
|
19
|
+
self.include_tags = set(include_tags) if include_tags else None
|
|
20
|
+
|
|
21
|
+
def tool(
|
|
22
|
+
self,
|
|
23
|
+
name: Optional[str] = None,
|
|
24
|
+
desc: str = "",
|
|
25
|
+
tags: Optional[List[str]] = None,
|
|
26
|
+
read_only: Optional[bool] = None,
|
|
27
|
+
destructive: Optional[bool] = None,
|
|
28
|
+
idempotent: Optional[bool] = None,
|
|
29
|
+
open_world: Optional[bool] = None,
|
|
30
|
+
) -> Callable[[Callable], Callable]:
|
|
31
|
+
# Wraps agent_handler_sdk.tool to inject qualified name & tags
|
|
32
|
+
from .tool import tool as _tool
|
|
33
|
+
|
|
34
|
+
def decorator(fn: Callable) -> Callable:
|
|
35
|
+
qualified = f"{self.namespace}__{name or fn.__name__}"
|
|
36
|
+
return _tool(
|
|
37
|
+
name=qualified,
|
|
38
|
+
desc=desc,
|
|
39
|
+
tags=tags,
|
|
40
|
+
read_only=read_only,
|
|
41
|
+
destructive=destructive,
|
|
42
|
+
idempotent=idempotent,
|
|
43
|
+
open_world=open_world,
|
|
44
|
+
)(fn)
|
|
45
|
+
|
|
46
|
+
return decorator
|
|
47
|
+
|
|
48
|
+
def list_tools(self) -> List[Dict]:
|
|
49
|
+
# Get all specs that match the namespace
|
|
50
|
+
namespace_prefix = f"{self.namespace}__"
|
|
51
|
+
specs = [t for t in ConnectorRegistry.list_tools() if t["name"].startswith(namespace_prefix)]
|
|
52
|
+
|
|
53
|
+
# Filter by explicit tool names if specified
|
|
54
|
+
if self.include_tools is not None:
|
|
55
|
+
specs = [t for t in specs if t["name"] in self.include_tools]
|
|
56
|
+
|
|
57
|
+
# Filter by tags if specified
|
|
58
|
+
if self.include_tags is not None:
|
|
59
|
+
# Get the tool specs from the registry to access tags
|
|
60
|
+
tool_specs = {
|
|
61
|
+
t.name: t for t in ConnectorRegistry._tools.values() if t.name in [spec["name"] for spec in specs]
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
# Filter specs based on tags
|
|
65
|
+
specs = [spec for spec in specs if any(tag in self.include_tags for tag in tool_specs[spec["name"]].tags)]
|
|
66
|
+
|
|
67
|
+
return specs
|
|
68
|
+
|
|
69
|
+
def get_tool(self, name: str) -> Dict:
|
|
70
|
+
return ConnectorRegistry.get_tool(name)
|
|
71
|
+
|
|
72
|
+
def call_tool(self, tool_name: str, params: dict) -> Any:
|
|
73
|
+
"""
|
|
74
|
+
Validate and invoke a registered tool by name.
|
|
75
|
+
|
|
76
|
+
For synchronous tools, returns the result directly.
|
|
77
|
+
For async tools, this will run the event loop and return the awaited result.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
tool_name: The name of the tool to invoke
|
|
81
|
+
params: Dictionary of parameters to pass to the tool
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
The result of the tool invocation
|
|
85
|
+
"""
|
|
86
|
+
from .invocation import invoke as _invoke
|
|
87
|
+
|
|
88
|
+
return _invoke(tool_name, params, connector=self)
|
|
89
|
+
|
|
90
|
+
async def call_tool_async(self, tool_name: str, params: dict) -> Any:
|
|
91
|
+
"""
|
|
92
|
+
Validate and invoke a registered tool by name asynchronously.
|
|
93
|
+
|
|
94
|
+
For synchronous tools, this will run them in a thread pool.
|
|
95
|
+
For async tools, this will await the coroutine directly.
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
tool_name: The name of the tool to invoke
|
|
99
|
+
params: Dictionary of parameters to pass to the tool
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
The result of the tool invocation
|
|
103
|
+
"""
|
|
104
|
+
from .invocation import invoke_async as _invoke_async
|
|
105
|
+
|
|
106
|
+
return await _invoke_async(tool_name, params, connector=self)
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
from typing import List, Dict, Any, Optional, Literal, Union
|
|
2
|
+
from pydantic import BaseModel, Extra
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class JsonSchema(BaseModel):
|
|
7
|
+
type: Optional[str] = None
|
|
8
|
+
properties: Optional[Dict[str, "JsonSchema"]] = None
|
|
9
|
+
items: Optional[Union["JsonSchema", List["JsonSchema"]]] = None
|
|
10
|
+
required: Optional[List[str]] = None
|
|
11
|
+
enum: Optional[List[Any]] = None
|
|
12
|
+
description: Optional[str] = None
|
|
13
|
+
additional_properties: Optional[Union[bool, "JsonSchema"]] = None
|
|
14
|
+
model: Optional[str] = None
|
|
15
|
+
|
|
16
|
+
class Config:
|
|
17
|
+
arbitrary_types_allowed = True
|
|
18
|
+
extra = "allow"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
JsonSchema.model_rebuild()
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class DataSourceConfig(BaseModel):
|
|
25
|
+
input_schema: JsonSchema
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class MessageContent(BaseModel):
|
|
29
|
+
type: str
|
|
30
|
+
text: str
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class MessageInput(BaseModel):
|
|
34
|
+
type: str
|
|
35
|
+
role: str
|
|
36
|
+
content: MessageContent
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class BaseEvaluator(BaseModel):
|
|
40
|
+
name: str
|
|
41
|
+
id: str
|
|
42
|
+
type: str # Discriminator for future extension
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class ReferenceToolCallsMatchEvaluator(BaseEvaluator):
|
|
46
|
+
type: Literal["reference_tool_calls_match"]
|
|
47
|
+
enforce_ordering: bool
|
|
48
|
+
fail_on_args_mismatch: bool
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class LabelModelEvaluator(BaseEvaluator):
|
|
52
|
+
type: Literal["label_model"]
|
|
53
|
+
passing_labels: Optional[List[str]]
|
|
54
|
+
labels: Optional[List[str]]
|
|
55
|
+
model: Optional[str]
|
|
56
|
+
input: List[MessageInput]
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
Evaluator = Union[ReferenceToolCallsMatchEvaluator, LabelModelEvaluator, BaseEvaluator]
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class EvalMetadata(BaseModel):
|
|
63
|
+
description: Optional[str]
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class EvalItemInput(BaseModel, extra=Extra.allow):
|
|
67
|
+
input: str
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class EvalItem(BaseModel, extra=Extra.allow):
|
|
71
|
+
"""
|
|
72
|
+
Schema for individual eval items.
|
|
73
|
+
Supports both runtime evaluation (with id and tool_calls) and connector eval files (flexible input).
|
|
74
|
+
"""
|
|
75
|
+
|
|
76
|
+
input: Union[str, EvalItemInput] # Can be either a string or EvalItemInput object
|
|
77
|
+
id: Optional[str] = None # Optional for connector eval files
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class ConnectorEvalBundle(BaseModel):
|
|
81
|
+
"""
|
|
82
|
+
Schema for eval bundles stored in connector /evals/ folders.
|
|
83
|
+
This matches the JSON structure that contains config, items, and prompts together.
|
|
84
|
+
"""
|
|
85
|
+
|
|
86
|
+
data_source_config: DataSourceConfig
|
|
87
|
+
items: List[EvalItem]
|
|
88
|
+
prompts: List[MessageInput]
|
|
89
|
+
name: str
|
|
90
|
+
metadata: Optional[EvalMetadata] = None
|
|
91
|
+
|
|
92
|
+
def to_eval_config(self) -> "EvalConfig":
|
|
93
|
+
"""
|
|
94
|
+
Convert this bundle to an EvalConfig for use with the eval runner.
|
|
95
|
+
Note: This creates a minimal EvalConfig without testing_evaluators.
|
|
96
|
+
"""
|
|
97
|
+
return EvalConfig(
|
|
98
|
+
id=None,
|
|
99
|
+
created_at=None,
|
|
100
|
+
updated_at=None,
|
|
101
|
+
data_source_config=self.data_source_config,
|
|
102
|
+
testing_evaluators=[], # Empty list since connector evals don't define evaluators
|
|
103
|
+
metadata=self.metadata,
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
class EvalConfig(BaseModel):
|
|
108
|
+
id: Optional[str]
|
|
109
|
+
created_at: Optional[datetime]
|
|
110
|
+
updated_at: Optional[datetime]
|
|
111
|
+
data_source_config: DataSourceConfig
|
|
112
|
+
testing_evaluators: Optional[List[Evaluator]] = []
|
|
113
|
+
metadata: Optional[EvalMetadata]
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
from typing import Any, Optional, Union, Awaitable
|
|
2
|
+
import asyncio
|
|
3
|
+
import jsonschema
|
|
4
|
+
from .registry import ConnectorRegistry
|
|
5
|
+
from .connector import Connector
|
|
6
|
+
from .registry import ToolSpec
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def invoke(tool_name: str, params: dict, connector: Optional[Connector] = None) -> Any:
|
|
10
|
+
"""
|
|
11
|
+
Validate and invoke a registered tool by name.
|
|
12
|
+
If a Connector is provided, enforce whitelist based on that instance.
|
|
13
|
+
|
|
14
|
+
For synchronous tools, returns the result directly.
|
|
15
|
+
For async tools, this will run the event loop and return the awaited result.
|
|
16
|
+
"""
|
|
17
|
+
spec = _get_validated_tool_spec(tool_name, params, connector)
|
|
18
|
+
|
|
19
|
+
if spec.is_async:
|
|
20
|
+
# For async functions, run the event loop
|
|
21
|
+
return asyncio.run(spec.fn(**params))
|
|
22
|
+
else:
|
|
23
|
+
# For sync functions, call directly
|
|
24
|
+
return spec.fn(**params)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
async def invoke_async(tool_name: str, params: dict, connector: Optional[Connector] = None) -> Any:
|
|
28
|
+
"""
|
|
29
|
+
Validate and invoke a registered tool by name asynchronously.
|
|
30
|
+
If a Connector is provided, enforce whitelist based on that instance.
|
|
31
|
+
|
|
32
|
+
For synchronous tools, this will run them in a thread pool.
|
|
33
|
+
For async tools, this will await the coroutine directly.
|
|
34
|
+
"""
|
|
35
|
+
spec = _get_validated_tool_spec(tool_name, params, connector)
|
|
36
|
+
|
|
37
|
+
if spec.is_async:
|
|
38
|
+
# For async functions, await directly
|
|
39
|
+
return await spec.fn(**params)
|
|
40
|
+
else:
|
|
41
|
+
# For sync functions, run in a thread pool
|
|
42
|
+
loop = asyncio.get_event_loop()
|
|
43
|
+
return await loop.run_in_executor(None, lambda: spec.fn(**params))
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _get_validated_tool_spec(tool_name: str, params: dict, connector: Optional[Connector] = None) -> ToolSpec:
|
|
47
|
+
"""Helper function to get and validate a tool spec."""
|
|
48
|
+
if connector:
|
|
49
|
+
allowed = {t["name"] for t in connector.list_tools()}
|
|
50
|
+
if tool_name not in allowed:
|
|
51
|
+
raise PermissionError(f"{tool_name!r} not allowed in this context")
|
|
52
|
+
|
|
53
|
+
spec = ConnectorRegistry.get_tool_spec(tool_name)
|
|
54
|
+
jsonschema.validate(params, spec.param_schema)
|
|
55
|
+
return spec
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
from typing import Any, Dict, Callable, List, Optional, Tuple
|
|
2
|
+
import inspect
|
|
3
|
+
|
|
4
|
+
# Mapping from ToolSpec field name to MCP hint name
|
|
5
|
+
TOOL_ANNOTATION_FIELDS: List[Tuple[str, str]] = [
|
|
6
|
+
("read_only", "readOnlyHint"),
|
|
7
|
+
("destructive", "destructiveHint"),
|
|
8
|
+
("idempotent", "idempotentHint"),
|
|
9
|
+
("open_world", "openWorldHint"),
|
|
10
|
+
]
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ToolSpec:
|
|
14
|
+
def __init__(
|
|
15
|
+
self,
|
|
16
|
+
name: str,
|
|
17
|
+
description: str,
|
|
18
|
+
fn: Callable,
|
|
19
|
+
param_schema: Dict[str, Any],
|
|
20
|
+
tags: List[str],
|
|
21
|
+
read_only: Optional[bool] = None,
|
|
22
|
+
destructive: Optional[bool] = None,
|
|
23
|
+
idempotent: Optional[bool] = None,
|
|
24
|
+
open_world: Optional[bool] = None,
|
|
25
|
+
):
|
|
26
|
+
self.name = name
|
|
27
|
+
self.description = description
|
|
28
|
+
self.fn = fn
|
|
29
|
+
self.param_schema = param_schema
|
|
30
|
+
self.tags = tags
|
|
31
|
+
self.is_async = inspect.iscoroutinefunction(fn)
|
|
32
|
+
self.read_only = read_only
|
|
33
|
+
self.destructive = destructive
|
|
34
|
+
self.idempotent = idempotent
|
|
35
|
+
self.open_world = open_world
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class ConnectorRegistry:
|
|
39
|
+
_tools: Dict[str, ToolSpec] = {}
|
|
40
|
+
|
|
41
|
+
@classmethod
|
|
42
|
+
def register_tool(
|
|
43
|
+
cls,
|
|
44
|
+
name: str,
|
|
45
|
+
description: str,
|
|
46
|
+
fn: Callable,
|
|
47
|
+
param_schema: Dict[str, Any],
|
|
48
|
+
tags: List[str],
|
|
49
|
+
read_only: Optional[bool] = None,
|
|
50
|
+
destructive: Optional[bool] = None,
|
|
51
|
+
idempotent: Optional[bool] = None,
|
|
52
|
+
open_world: Optional[bool] = None,
|
|
53
|
+
) -> None:
|
|
54
|
+
cls._tools[name] = ToolSpec(
|
|
55
|
+
name, description, fn, param_schema, tags, read_only, destructive, idempotent, open_world
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
@classmethod
|
|
59
|
+
def _format_tool_spec(cls, spec: ToolSpec) -> Dict[str, Any]:
|
|
60
|
+
result: Dict[str, Any] = {
|
|
61
|
+
"name": spec.name,
|
|
62
|
+
"description": spec.description,
|
|
63
|
+
"input_schema": spec.param_schema,
|
|
64
|
+
}
|
|
65
|
+
# Add annotations if any are set
|
|
66
|
+
annotations: Dict[str, bool] = {}
|
|
67
|
+
for field_name, hint_name in TOOL_ANNOTATION_FIELDS:
|
|
68
|
+
value = getattr(spec, field_name, None)
|
|
69
|
+
if value is not None:
|
|
70
|
+
annotations[hint_name] = value
|
|
71
|
+
if annotations:
|
|
72
|
+
result["annotations"] = annotations
|
|
73
|
+
return result
|
|
74
|
+
|
|
75
|
+
@classmethod
|
|
76
|
+
def list_tools(cls) -> List[Dict[str, Any]]:
|
|
77
|
+
return [cls._format_tool_spec(t) for t in cls._tools.values()]
|
|
78
|
+
|
|
79
|
+
@classmethod
|
|
80
|
+
def get_tool(cls, name: str) -> Dict[str, Any]:
|
|
81
|
+
if name not in cls._tools:
|
|
82
|
+
raise ValueError(f"Tool {name!r} not found in registry")
|
|
83
|
+
return cls._format_tool_spec(cls._tools[name])
|
|
84
|
+
|
|
85
|
+
@classmethod
|
|
86
|
+
def get_tool_spec(cls, name: str) -> ToolSpec:
|
|
87
|
+
if name not in cls._tools:
|
|
88
|
+
raise ValueError(f"Tool {name!r} not found in registry")
|
|
89
|
+
return cls._tools[name]
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# {name} Connector
|
|
2
|
+
|
|
3
|
+
Basic **{name}** connector for Agent Handler (SDK v{version}).
|
|
4
|
+
|
|
5
|
+
## Overview
|
|
6
|
+
|
|
7
|
+
This repository provides the `{name}` connector, exposing a set of tools under the **`{name}`** namespace that can be called by your LLM via the Agent Handler SDK.
|
|
8
|
+
|
|
9
|
+
## Prerequisites
|
|
10
|
+
|
|
11
|
+
Install **Poetry**:
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
# Install Poetry
|
|
15
|
+
pip install poetry
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
## Installing
|
|
19
|
+
|
|
20
|
+
Install the Agent Handler SDK and this connector:
|
|
21
|
+
|
|
22
|
+
```bash
|
|
23
|
+
# From your connector’s root (where pyproject.toml lives)
|
|
24
|
+
poetry install
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
## Testing
|
|
28
|
+
|
|
29
|
+
Run the unit tests for this connector:
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
# From your connector’s root (where pyproject.toml lives)
|
|
33
|
+
poetry run pytest
|
|
34
|
+
```
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
[
|
|
2
|
+
{
|
|
3
|
+
"data_source_config": {
|
|
4
|
+
"input_schema": {
|
|
5
|
+
"type": "object",
|
|
6
|
+
"properties": {
|
|
7
|
+
"input": { "type": "string" },
|
|
8
|
+
"reference_value": { "type": "string" },
|
|
9
|
+
"reference_tools": {
|
|
10
|
+
"type": "array",
|
|
11
|
+
"items": {
|
|
12
|
+
"type": "object",
|
|
13
|
+
"properties": {
|
|
14
|
+
"name": { "type": "string" },
|
|
15
|
+
"args": { "type": "object" }
|
|
16
|
+
},
|
|
17
|
+
"required": ["name"]
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
"required": ["input"]
|
|
22
|
+
}
|
|
23
|
+
},
|
|
24
|
+
"items": [
|
|
25
|
+
{
|
|
26
|
+
"input": "Tell me about George Washington",
|
|
27
|
+
"reference_value": null,
|
|
28
|
+
"reference_tools": [
|
|
29
|
+
{
|
|
30
|
+
"name": "wikipedia__search",
|
|
31
|
+
"args": {
|
|
32
|
+
"query": "George Washington"
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
]
|
|
36
|
+
},
|
|
37
|
+
{
|
|
38
|
+
"input": "Where does London's name come from? Don't use any tools to solve this.",
|
|
39
|
+
"reference_value": "London's name is believed to originate from the Latin word \"Londinium,\" which was the name used during the Roman period when the city was established as a settlement. The exact origin of \"Londinium\" is uncertain, but it may derive from a pre-Roman or Celtic word. Over time, the name evolved through various forms, such as \"Londinium\" in Latin and \"Lunden\" in Old English, eventually becoming \"London\" as we know it today.",
|
|
40
|
+
"reference_tools": []
|
|
41
|
+
]
|
|
42
|
+
}
|
|
43
|
+
],
|
|
44
|
+
"prompts": [
|
|
45
|
+
{
|
|
46
|
+
"type": "text",
|
|
47
|
+
"role": "user",
|
|
48
|
+
"content": {
|
|
49
|
+
"type": "text",
|
|
50
|
+
"text": "{{input.input}}"
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
]
|
|
54
|
+
}
|
|
55
|
+
]
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import pkgutil
|
|
2
|
+
import importlib
|
|
3
|
+
from agent_handler_sdk.connector import Connector
|
|
4
|
+
|
|
5
|
+
# single Connector instance for this package
|
|
6
|
+
{name} = Connector(namespace="{name}")
|
|
7
|
+
|
|
8
|
+
# auto-import all modules in tools/
|
|
9
|
+
package = __name__ + ".tools"
|
|
10
|
+
path = f"{{__path__[0]}}/tools"
|
|
11
|
+
for _, m, _ in pkgutil.iter_modules([path]):
|
|
12
|
+
importlib.import_module(f"{{package}}.{{m}}")
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
[tool.poetry]
|
|
2
|
+
name = "{name}-connector"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = "Basic {name} connector for Agent Handler"
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
authors = ["Your Name <you@example.com>"]
|
|
7
|
+
|
|
8
|
+
[tool.poetry.dependencies]
|
|
9
|
+
python = ">=3.10"
|
|
10
|
+
agent-handler-sdk = "^{version}"
|
|
11
|
+
|
|
12
|
+
[tool.poetry.dev-dependencies]
|
|
13
|
+
pytest = "^8.3.0"
|
|
14
|
+
pytest-cov = "^4.0.0"
|
|
15
|
+
pytest-asyncio = "^0.24.0"
|
|
16
|
+
pytest-mock = "^3.11.1"
|
|
17
|
+
mypy = "^1.5.1"
|
|
18
|
+
pre-commit = "^3.4.0"
|
|
19
|
+
tox = "^4.11.1"
|
|
20
|
+
ruff = "^0.7.4"
|
|
21
|
+
|
|
22
|
+
[build-system]
|
|
23
|
+
requires = ["poetry-core>=1.0.0,<2.0.0"]
|
|
24
|
+
build-backend = "poetry.core.masonry.api"
|
|
25
|
+
|
|
26
|
+
[tool.mypy]
|
|
27
|
+
files = ["{name}_connector/**/*.py"]
|
|
28
|
+
python_version = "3.10"
|
|
29
|
+
disallow_untyped_defs = true
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from {name}_connector import {name}
|
|
3
|
+
from agent_handler_sdk.invocation import invoke
|
|
4
|
+
|
|
5
|
+
@pytest.mark.parametrize("tool_name,params,expected", [
|
|
6
|
+
("{name}__example", {{}}, {{"status": "ok"}}),
|
|
7
|
+
])
|
|
8
|
+
def test_{name}_operations(tool_name, params, expected):
|
|
9
|
+
# Directly invoke tools using the SDK
|
|
10
|
+
result = invoke(tool_name, params, connector={name})
|
|
11
|
+
assert result == expected
|
|
12
|
+
|
|
13
|
+
@pytest.mark.parametrize("tool_name,params,expected", [
|
|
14
|
+
("{name}__example", {{}}, {{"status": "ok"}}),
|
|
15
|
+
])
|
|
16
|
+
def test_{name}_operations_with_connector(tool_name, params, expected):
|
|
17
|
+
# Invoke tools using the Connector
|
|
18
|
+
result = {name}.call_tool(tool_name, params)
|
|
19
|
+
assert result == expected
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
from typing import (
|
|
3
|
+
Callable,
|
|
4
|
+
Optional,
|
|
5
|
+
List,
|
|
6
|
+
Any,
|
|
7
|
+
Dict,
|
|
8
|
+
get_type_hints,
|
|
9
|
+
get_origin,
|
|
10
|
+
get_args,
|
|
11
|
+
TypeVar,
|
|
12
|
+
cast,
|
|
13
|
+
)
|
|
14
|
+
from pydantic import BaseModel, create_model
|
|
15
|
+
from .registry import ConnectorRegistry
|
|
16
|
+
from .utils import convert_type_hint_to_json_schema
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
F = TypeVar("F", bound=Callable[..., Any])
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def tool(
|
|
23
|
+
name: Optional[str] = None,
|
|
24
|
+
desc: str = "",
|
|
25
|
+
tags: Optional[List[str]] = None,
|
|
26
|
+
read_only: Optional[bool] = None,
|
|
27
|
+
destructive: Optional[bool] = None,
|
|
28
|
+
idempotent: Optional[bool] = None,
|
|
29
|
+
open_world: Optional[bool] = None,
|
|
30
|
+
) -> Callable[[F], F]:
|
|
31
|
+
"""
|
|
32
|
+
Decorator to register a function as a tool.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
name: Optional name for the tool. Defaults to the function name.
|
|
36
|
+
desc: Description of the tool.
|
|
37
|
+
tags: Optional list of tags for the tool.
|
|
38
|
+
read_only: If True, the tool does not modify its environment (MCP annotation).
|
|
39
|
+
destructive: If True, the tool may perform destructive updates (MCP annotation).
|
|
40
|
+
idempotent: If True, repeated calls with same args have no additional effect (MCP annotation).
|
|
41
|
+
open_world: If True, the tool may interact with external entities (MCP annotation).
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
The decorated function.
|
|
45
|
+
"""
|
|
46
|
+
tags_list: List[str] = tags or []
|
|
47
|
+
|
|
48
|
+
def decorator(fn: F) -> F:
|
|
49
|
+
sig = inspect.signature(fn)
|
|
50
|
+
type_hints = get_type_hints(fn)
|
|
51
|
+
|
|
52
|
+
# Create a clean schema structure
|
|
53
|
+
schema: Dict[str, Any] = {"type": "object", "properties": {}}
|
|
54
|
+
required: List[str] = []
|
|
55
|
+
|
|
56
|
+
# Process each parameter
|
|
57
|
+
for param_name, param in sig.parameters.items():
|
|
58
|
+
# Get the type hint
|
|
59
|
+
type_hint = type_hints.get(param_name, Any)
|
|
60
|
+
|
|
61
|
+
# Determine if the parameter is required
|
|
62
|
+
if param.default is inspect.Parameter.empty:
|
|
63
|
+
required.append(param_name)
|
|
64
|
+
|
|
65
|
+
# Convert type hint to JSON schema
|
|
66
|
+
schema["properties"][param_name] = convert_type_hint_to_json_schema(type_hint)
|
|
67
|
+
|
|
68
|
+
# If there are required fields, add them to the schema
|
|
69
|
+
if required:
|
|
70
|
+
schema["required"] = required
|
|
71
|
+
|
|
72
|
+
# Check if the function is async
|
|
73
|
+
is_async = inspect.iscoroutinefunction(fn)
|
|
74
|
+
|
|
75
|
+
# Create a wrapper function that converts dictionaries to Pydantic models
|
|
76
|
+
if is_async:
|
|
77
|
+
# Type ignore for the conditional function variants issue
|
|
78
|
+
async def wrapped_fn(**kwargs: Any) -> Any: # type: ignore
|
|
79
|
+
converted_kwargs = convert_kwargs(kwargs, type_hints)
|
|
80
|
+
return await fn(**converted_kwargs)
|
|
81
|
+
|
|
82
|
+
else:
|
|
83
|
+
|
|
84
|
+
def wrapped_fn(**kwargs: Any) -> Any: # type: ignore
|
|
85
|
+
converted_kwargs = convert_kwargs(kwargs, type_hints)
|
|
86
|
+
return fn(**converted_kwargs)
|
|
87
|
+
|
|
88
|
+
# Preserve the original function's signature and docstring
|
|
89
|
+
wrapped_fn.__name__ = fn.__name__
|
|
90
|
+
wrapped_fn.__doc__ = fn.__doc__
|
|
91
|
+
wrapped_fn.__annotations__ = fn.__annotations__
|
|
92
|
+
|
|
93
|
+
tool_name = name or fn.__name__
|
|
94
|
+
ConnectorRegistry.register_tool(
|
|
95
|
+
name=tool_name,
|
|
96
|
+
description=desc,
|
|
97
|
+
fn=wrapped_fn,
|
|
98
|
+
param_schema=schema,
|
|
99
|
+
tags=tags_list,
|
|
100
|
+
read_only=read_only,
|
|
101
|
+
destructive=destructive,
|
|
102
|
+
idempotent=idempotent,
|
|
103
|
+
open_world=open_world,
|
|
104
|
+
)
|
|
105
|
+
return fn
|
|
106
|
+
|
|
107
|
+
return decorator
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def convert_kwargs(kwargs: Dict[str, Any], type_hints: Dict[str, Any]) -> Dict[str, Any]:
|
|
111
|
+
"""Helper function to convert dictionaries to Pydantic models based on type hints."""
|
|
112
|
+
converted_kwargs: Dict[str, Any] = {}
|
|
113
|
+
for param_name, param_value in kwargs.items():
|
|
114
|
+
if param_name in type_hints:
|
|
115
|
+
param_type = type_hints[param_name]
|
|
116
|
+
# Check if it's a Pydantic model
|
|
117
|
+
if (
|
|
118
|
+
isinstance(param_value, dict)
|
|
119
|
+
and hasattr(param_type, "model_validate")
|
|
120
|
+
and issubclass(param_type, BaseModel)
|
|
121
|
+
):
|
|
122
|
+
# Convert dict to Pydantic model
|
|
123
|
+
converted_kwargs[param_name] = param_type.model_validate(param_value)
|
|
124
|
+
# Handle List[PydanticModel]
|
|
125
|
+
elif (
|
|
126
|
+
isinstance(param_value, list)
|
|
127
|
+
and get_origin(param_type) is list
|
|
128
|
+
and len(get_args(param_type)) > 0
|
|
129
|
+
and hasattr(get_args(param_type)[0], "model_validate")
|
|
130
|
+
and issubclass(get_args(param_type)[0], BaseModel)
|
|
131
|
+
):
|
|
132
|
+
model_class = get_args(param_type)[0]
|
|
133
|
+
converted_kwargs[param_name] = [
|
|
134
|
+
model_class.model_validate(item) if isinstance(item, dict) else item for item in param_value
|
|
135
|
+
]
|
|
136
|
+
else:
|
|
137
|
+
converted_kwargs[param_name] = param_value
|
|
138
|
+
else:
|
|
139
|
+
converted_kwargs[param_name] = param_value
|
|
140
|
+
|
|
141
|
+
return converted_kwargs
|
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
from typing import Any, Dict, List, Optional, Union, get_type_hints, get_origin, get_args, Type
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from pydantic import BaseModel
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def convert_type_hint_to_json_schema(type_hint: Any) -> Dict[str, Any]:
|
|
7
|
+
"""
|
|
8
|
+
Convert a Python type hint to a JSON schema representation.
|
|
9
|
+
Handles primitive types, lists, tuples, unions, optional types, and Pydantic models.
|
|
10
|
+
"""
|
|
11
|
+
# Handle None type
|
|
12
|
+
if type_hint is type(None):
|
|
13
|
+
return {"type": "null"}
|
|
14
|
+
|
|
15
|
+
# Handle primitive types
|
|
16
|
+
if type_hint in (int, float, str, bool):
|
|
17
|
+
return _convert_primitive_type(type_hint)
|
|
18
|
+
|
|
19
|
+
# Handle Pydantic models
|
|
20
|
+
if hasattr(type_hint, "model_json_schema") and issubclass(type_hint, BaseModel):
|
|
21
|
+
return _convert_pydantic_type(type_hint)
|
|
22
|
+
|
|
23
|
+
# Handle container types (list, tuple, dict)
|
|
24
|
+
origin = get_origin(type_hint)
|
|
25
|
+
if origin is list:
|
|
26
|
+
return _convert_list_type(type_hint)
|
|
27
|
+
elif origin is tuple:
|
|
28
|
+
return _convert_tuple_type(type_hint)
|
|
29
|
+
elif origin is dict:
|
|
30
|
+
return _convert_dict_type(type_hint)
|
|
31
|
+
|
|
32
|
+
# Handle Union and Optional types
|
|
33
|
+
if origin is Union:
|
|
34
|
+
return _convert_union_type(type_hint)
|
|
35
|
+
if origin is Optional:
|
|
36
|
+
return _convert_optional_type(type_hint)
|
|
37
|
+
|
|
38
|
+
# Handle Enum types
|
|
39
|
+
if isinstance(type_hint, type) and issubclass(type_hint, Enum):
|
|
40
|
+
return _convert_enum_type(type_hint)
|
|
41
|
+
|
|
42
|
+
# Default to string for unknown types
|
|
43
|
+
return {"type": "string"}
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _convert_primitive_type(type_hint: Type) -> Dict[str, Any]:
|
|
47
|
+
"""Convert primitive Python types to JSON schema types."""
|
|
48
|
+
type_mapping = {
|
|
49
|
+
int: {"type": "integer"},
|
|
50
|
+
float: {"type": "number"},
|
|
51
|
+
str: {"type": "string"},
|
|
52
|
+
bool: {"type": "boolean"},
|
|
53
|
+
}
|
|
54
|
+
return type_mapping.get(type_hint, {"type": "string"})
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def _convert_list_type(type_hint: Any) -> Dict[str, Any]:
|
|
58
|
+
"""Convert Python list type to JSON schema array."""
|
|
59
|
+
item_type = get_args(type_hint)[0] if get_args(type_hint) else Any
|
|
60
|
+
return {"type": "array", "items": convert_type_hint_to_json_schema(item_type)}
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _convert_tuple_type(type_hint: Any) -> Dict[str, Any]:
|
|
64
|
+
"""Convert Python tuple type to JSON schema array with constraints."""
|
|
65
|
+
args = get_args(type_hint)
|
|
66
|
+
if not args:
|
|
67
|
+
return {"type": "array"}
|
|
68
|
+
|
|
69
|
+
# Handle tuple with variable args (Tuple[int, ...])
|
|
70
|
+
if len(args) == 2 and args[1] is Ellipsis:
|
|
71
|
+
return {"type": "array", "items": convert_type_hint_to_json_schema(args[0])}
|
|
72
|
+
|
|
73
|
+
# Handle fixed-length tuples
|
|
74
|
+
return {
|
|
75
|
+
"type": "array",
|
|
76
|
+
"minItems": len(args),
|
|
77
|
+
"maxItems": len(args),
|
|
78
|
+
"items": [convert_type_hint_to_json_schema(arg) for arg in args],
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _convert_dict_type(type_hint: Any) -> Dict[str, Any]:
|
|
83
|
+
"""Convert Python dict type to JSON schema object."""
|
|
84
|
+
args = get_args(type_hint)
|
|
85
|
+
key_type = args[0] if len(args) > 0 else Any
|
|
86
|
+
value_type = args[1] if len(args) > 1 else Any
|
|
87
|
+
|
|
88
|
+
# Only str keys are supported in JSON
|
|
89
|
+
if key_type is not str and key_type is not Any:
|
|
90
|
+
key_type = str
|
|
91
|
+
|
|
92
|
+
return {"type": "object", "additionalProperties": convert_type_hint_to_json_schema(value_type)}
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def _convert_union_type(type_hint: Any) -> Dict[str, Any]:
|
|
96
|
+
"""Convert Python Union type to JSON schema anyOf."""
|
|
97
|
+
union_args = get_args(type_hint)
|
|
98
|
+
|
|
99
|
+
# Handle Optional (Union[Type, None])
|
|
100
|
+
if len(union_args) == 2 and type(None) in union_args:
|
|
101
|
+
return _convert_optional_union(union_args)
|
|
102
|
+
|
|
103
|
+
# Handle regular Union types
|
|
104
|
+
return {"anyOf": [convert_type_hint_to_json_schema(arg) for arg in union_args]}
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def _convert_optional_union(union_args: tuple) -> Dict[str, Any]:
|
|
108
|
+
"""Handle Optional as Union[Type, None]."""
|
|
109
|
+
# Get the non-None type
|
|
110
|
+
actual_type = union_args[0] if union_args[1] is type(None) else union_args[1]
|
|
111
|
+
return convert_type_hint_to_json_schema(actual_type)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def _convert_optional_type(type_hint: Any) -> Dict[str, Any]:
|
|
115
|
+
"""Convert Python Optional type to JSON schema."""
|
|
116
|
+
actual_type = get_args(type_hint)[0]
|
|
117
|
+
return convert_type_hint_to_json_schema(actual_type)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _convert_enum_type(type_hint: Type[Enum]) -> Dict[str, Any]:
|
|
121
|
+
"""Convert Python Enum type to JSON schema enum."""
|
|
122
|
+
enum_values = [item.value for item in type_hint]
|
|
123
|
+
return {"enum": enum_values}
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def _convert_pydantic_type(model: Type[BaseModel]) -> Dict[str, Any]:
|
|
127
|
+
"""
|
|
128
|
+
Convert a Pydantic model to a flattened JSON schema without references.
|
|
129
|
+
"""
|
|
130
|
+
# Get the model schema
|
|
131
|
+
schema = model.model_json_schema()
|
|
132
|
+
|
|
133
|
+
# Create a flattened version without references
|
|
134
|
+
flattened_schema = {"type": "object", "properties": {}}
|
|
135
|
+
|
|
136
|
+
# Get the definitions section
|
|
137
|
+
defs = schema.get("$defs", {})
|
|
138
|
+
|
|
139
|
+
# Copy properties and resolve any references
|
|
140
|
+
if "properties" in schema:
|
|
141
|
+
flattened_schema["properties"] = _resolve_references(schema["properties"], defs)
|
|
142
|
+
|
|
143
|
+
# Copy required fields if present
|
|
144
|
+
if "required" in schema:
|
|
145
|
+
flattened_schema["required"] = schema["required"]
|
|
146
|
+
|
|
147
|
+
# Copy title if present
|
|
148
|
+
if "title" in schema:
|
|
149
|
+
flattened_schema["title"] = schema["title"]
|
|
150
|
+
|
|
151
|
+
return flattened_schema
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def _resolve_references(obj: Any, schema_defs: Dict[str, Any]) -> Any:
|
|
155
|
+
"""
|
|
156
|
+
Recursively resolve JSON schema references.
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
obj: The object to resolve references in
|
|
160
|
+
schema_defs: The definitions dictionary containing referenced schemas
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
The object with all references resolved
|
|
164
|
+
"""
|
|
165
|
+
if isinstance(obj, dict):
|
|
166
|
+
# If this is a reference, resolve it
|
|
167
|
+
if "$ref" in obj and len(obj) == 1:
|
|
168
|
+
return _resolve_single_reference(obj, schema_defs)
|
|
169
|
+
|
|
170
|
+
# Process each property in the object
|
|
171
|
+
result = {}
|
|
172
|
+
for key, value in obj.items():
|
|
173
|
+
if key == "items" and "$ref" in value:
|
|
174
|
+
# Special handling for array items with references
|
|
175
|
+
ref_path = value["$ref"].split("/")[-1]
|
|
176
|
+
if ref_path in schema_defs:
|
|
177
|
+
# Replace with the referenced schema
|
|
178
|
+
result[key] = _resolve_references(schema_defs[ref_path], schema_defs)
|
|
179
|
+
else:
|
|
180
|
+
# Recursively process the value
|
|
181
|
+
result[key] = _resolve_references(value, schema_defs)
|
|
182
|
+
return result
|
|
183
|
+
elif isinstance(obj, list):
|
|
184
|
+
# Process each item in the list
|
|
185
|
+
return [_resolve_references(item, schema_defs) for item in obj]
|
|
186
|
+
else:
|
|
187
|
+
# Return primitive values as is
|
|
188
|
+
return obj
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def _resolve_single_reference(ref_obj: Dict[str, Any], schema_defs: Dict[str, Any]) -> Any:
|
|
192
|
+
"""
|
|
193
|
+
Resolve a single reference object.
|
|
194
|
+
|
|
195
|
+
Args:
|
|
196
|
+
ref_obj: The reference object containing a $ref key
|
|
197
|
+
schema_defs: The definitions dictionary containing referenced schemas
|
|
198
|
+
|
|
199
|
+
Returns:
|
|
200
|
+
The resolved reference
|
|
201
|
+
"""
|
|
202
|
+
ref_path = ref_obj["$ref"].split("/")[-1]
|
|
203
|
+
if ref_path in schema_defs:
|
|
204
|
+
# Replace with a copy of the referenced schema
|
|
205
|
+
resolved = schema_defs[ref_path].copy()
|
|
206
|
+
# Recursively resolve any references in the referenced schema
|
|
207
|
+
return _resolve_references(resolved, schema_defs)
|
|
208
|
+
return ref_obj # Reference not found, return as is
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
[tool.poetry]
|
|
2
|
+
name = "agent-handler-sdk"
|
|
3
|
+
version = "0.1.8"
|
|
4
|
+
description = "Agent Handler SDK for defining and invoking LLM tools"
|
|
5
|
+
authors = ["David Dalmaso <david.dalmaso@merge.dev>", "Gil Feig <gil@merge.dev>"]
|
|
6
|
+
packages = [
|
|
7
|
+
{ include = "agent_handler_sdk" }
|
|
8
|
+
]
|
|
9
|
+
|
|
10
|
+
[tool.poetry.dependencies]
|
|
11
|
+
python = ">=3.10"
|
|
12
|
+
pydantic = "^2.0"
|
|
13
|
+
jsonschema = "^4.0"
|
|
14
|
+
|
|
15
|
+
[tool.poetry.dev-dependencies]
|
|
16
|
+
pytest = "^7.0"
|
|
17
|
+
pytest-asyncio = "^0.20.3"
|
|
18
|
+
mypy = "^1.4.1"
|
|
19
|
+
black = "^23.3.0"
|
|
20
|
+
pre-commit = "^2.20.0"
|
|
21
|
+
types-jsonschema = "^4.17.0"
|
|
22
|
+
|
|
23
|
+
[tool.poetry.group.dev.dependencies]
|
|
24
|
+
pytest-asyncio = "^0.20.3"
|
|
25
|
+
pre-commit = "^2.20.0"
|
|
26
|
+
types-jsonschema = "^4.23.0.20241208"
|
|
27
|
+
|
|
28
|
+
[tool.poetry.scripts]
|
|
29
|
+
ahs-scaffold = "agent_handler_sdk.cli:scaffold_connector"
|
|
30
|
+
|
|
31
|
+
[build-system]
|
|
32
|
+
requires = ["poetry-core>=1.0.0"]
|
|
33
|
+
build-backend = "poetry.core.masonry.api"
|
|
34
|
+
|
|
35
|
+
[tool.black]
|
|
36
|
+
line-length = 120
|
|
37
|
+
target-version = ["py310"]
|
|
38
|
+
include = '\.pyi?$'
|
|
39
|
+
|
|
40
|
+
[tool.mypy]
|
|
41
|
+
python_version = "3.10"
|
|
42
|
+
warn_return_any = true
|
|
43
|
+
warn_unused_configs = true
|
|
44
|
+
disallow_untyped_defs = true
|
|
45
|
+
disallow_incomplete_defs = true
|
|
46
|
+
check_untyped_defs = true
|
|
47
|
+
disallow_untyped_decorators = true
|
|
48
|
+
no_implicit_optional = true
|
|
49
|
+
strict_optional = true
|