agent-handler-sdk 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of agent-handler-sdk might be problematic. Click here for more details.
- agent_handler_sdk-0.1.0/PKG-INFO +14 -0
- agent_handler_sdk-0.1.0/agent_handler/__init__.py +1 -0
- agent_handler_sdk-0.1.0/agent_handler/cli.py +88 -0
- agent_handler_sdk-0.1.0/agent_handler/connector.py +94 -0
- agent_handler_sdk-0.1.0/agent_handler/exceptions.py +4 -0
- agent_handler_sdk-0.1.0/agent_handler/invocation.py +55 -0
- agent_handler_sdk-0.1.0/agent_handler/registry.py +60 -0
- agent_handler_sdk-0.1.0/agent_handler/templates/connector/README.md.tpl +34 -0
- agent_handler_sdk-0.1.0/agent_handler/templates/connector/handlers.py.tpl +5 -0
- agent_handler_sdk-0.1.0/agent_handler/templates/connector/init.py.tpl +12 -0
- agent_handler_sdk-0.1.0/agent_handler/templates/connector/pyproject.toml.tpl +29 -0
- agent_handler_sdk-0.1.0/agent_handler/templates/connector/test_handlers.py.tpl +19 -0
- agent_handler_sdk-0.1.0/agent_handler/tool.py +129 -0
- agent_handler_sdk-0.1.0/agent_handler/utils.py +208 -0
- agent_handler_sdk-0.1.0/pyproject.toml +49 -0
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: agent-handler-sdk
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Agent Handler SDK for defining and invoking LLM tools
|
|
5
|
+
Author: David Dalmaso
|
|
6
|
+
Author-email: david.dalmaso@merge.dev
|
|
7
|
+
Requires-Python: >=3.10
|
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
|
9
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
13
|
+
Requires-Dist: jsonschema (>=4.0,<5.0)
|
|
14
|
+
Requires-Dist: pydantic (>=2.0,<3.0)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# agent_handler package root
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Dict, Any
|
|
4
|
+
import importlib.resources as pkg_resources
|
|
5
|
+
|
|
6
|
+
try:
|
|
7
|
+
# Python 3.11+
|
|
8
|
+
import tomllib as toml # type: ignore
|
|
9
|
+
except ImportError:
|
|
10
|
+
# For older versions
|
|
11
|
+
import tomli as toml
|
|
12
|
+
|
|
13
|
+
# Use str() to convert Traversable to string path
|
|
14
|
+
TEMPLATE_DIR = Path(str(pkg_resources.files("agent_handler"))) / "templates" / "connector"
|
|
15
|
+
SDK_ROOT = Path(__file__).parent.parent # adjust if your structure is different
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def get_sdk_version() -> str:
|
|
19
|
+
pyproject = SDK_ROOT / "pyproject.toml"
|
|
20
|
+
data = toml.loads(pyproject.read_text(encoding="utf-8"))
|
|
21
|
+
# if you use Poetry format:
|
|
22
|
+
return str(data["tool"]["poetry"]["version"])
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def render_template(filename: str, **context: Any) -> str:
|
|
26
|
+
"""
|
|
27
|
+
Load a template file from the SDK's templates/connector directory
|
|
28
|
+
and format it with the given context.
|
|
29
|
+
"""
|
|
30
|
+
template_path = TEMPLATE_DIR.joinpath(filename)
|
|
31
|
+
text = template_path.read_text()
|
|
32
|
+
return text.format(**context)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def scaffold_connector() -> int:
|
|
36
|
+
"""
|
|
37
|
+
Usage: ahs-scaffold <connector-name> [--target-dir <dir>]
|
|
38
|
+
|
|
39
|
+
Creates:
|
|
40
|
+
<target-dir>/connectors/<name>/
|
|
41
|
+
pyproject.toml
|
|
42
|
+
metadata.yaml
|
|
43
|
+
<name>_connector/
|
|
44
|
+
__init__.py
|
|
45
|
+
tools/
|
|
46
|
+
handlers.py
|
|
47
|
+
tests/
|
|
48
|
+
test_handlers.py
|
|
49
|
+
"""
|
|
50
|
+
args = sys.argv[1:]
|
|
51
|
+
if not args:
|
|
52
|
+
print(scaffold_connector.__doc__)
|
|
53
|
+
sys.exit(1)
|
|
54
|
+
|
|
55
|
+
name = args[0]
|
|
56
|
+
target = Path(".")
|
|
57
|
+
if "--target-dir" in args:
|
|
58
|
+
idx = args.index("--target-dir")
|
|
59
|
+
target = Path(args[idx + 1])
|
|
60
|
+
|
|
61
|
+
version = get_sdk_version()
|
|
62
|
+
|
|
63
|
+
base = target / "connectors" / name
|
|
64
|
+
pkg_dir = base / f"{name}_connector"
|
|
65
|
+
tools_dir = pkg_dir / "tools"
|
|
66
|
+
tests_dir = base / "tests"
|
|
67
|
+
|
|
68
|
+
# Create directories
|
|
69
|
+
for d in (base, pkg_dir, tools_dir, tests_dir):
|
|
70
|
+
d.mkdir(parents=True, exist_ok=True)
|
|
71
|
+
|
|
72
|
+
# Map template → output path
|
|
73
|
+
files_to_render = {
|
|
74
|
+
"pyproject.toml.tpl": base / "pyproject.toml",
|
|
75
|
+
"metadata.yaml.tpl": base / "metadata.yaml",
|
|
76
|
+
"init.py.tpl": pkg_dir / "__init__.py",
|
|
77
|
+
"handlers.py.tpl": tools_dir / "handlers.py",
|
|
78
|
+
"test_handlers.py.tpl": tests_dir / "test_handlers.py",
|
|
79
|
+
"README.md.tpl": base / "README.md",
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
# Render each template with both name & version
|
|
83
|
+
for tpl_name, out_path in files_to_render.items():
|
|
84
|
+
content = render_template(tpl_name, name=name, version=version)
|
|
85
|
+
out_path.write_text(content, encoding="utf-8")
|
|
86
|
+
|
|
87
|
+
print(f"Scaffolded connector '{name}' (SDK v{version}) at {base}")
|
|
88
|
+
return 0
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
from typing import List, Optional, Set, Dict, Any, Union, Awaitable, Callable
|
|
2
|
+
from .registry import ConnectorRegistry
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class Connector:
|
|
6
|
+
def __init__(
|
|
7
|
+
self,
|
|
8
|
+
namespace: str,
|
|
9
|
+
include_tools: Optional[List[str]] = None,
|
|
10
|
+
include_tags: Optional[List[str]] = None,
|
|
11
|
+
):
|
|
12
|
+
"""
|
|
13
|
+
namespace: unique prefix (e.g. "jira").
|
|
14
|
+
include_tools: explicit list of fully-qualified tool names.
|
|
15
|
+
include_tags: whitelist of tags to filter tools by.
|
|
16
|
+
"""
|
|
17
|
+
self.namespace = namespace
|
|
18
|
+
self.include_tools = set(include_tools) if include_tools else None
|
|
19
|
+
self.include_tags = set(include_tags) if include_tags else None
|
|
20
|
+
|
|
21
|
+
def tool(
|
|
22
|
+
self,
|
|
23
|
+
name: Optional[str] = None,
|
|
24
|
+
desc: str = "",
|
|
25
|
+
tags: Optional[List[str]] = None,
|
|
26
|
+
) -> Callable[[Callable], Callable]:
|
|
27
|
+
# Wraps agent_handler_sdk.tool to inject qualified name & tags
|
|
28
|
+
from .tool import tool as _tool
|
|
29
|
+
|
|
30
|
+
def decorator(fn: Callable) -> Callable:
|
|
31
|
+
qualified = f"{self.namespace}__{name or fn.__name__}"
|
|
32
|
+
return _tool(name=qualified, desc=desc, tags=tags)(fn)
|
|
33
|
+
|
|
34
|
+
return decorator
|
|
35
|
+
|
|
36
|
+
def list_tools(self) -> List[Dict]:
|
|
37
|
+
# Get all specs that match the namespace
|
|
38
|
+
namespace_prefix = f"{self.namespace}__"
|
|
39
|
+
specs = [t for t in ConnectorRegistry.list_tools() if t["name"].startswith(namespace_prefix)]
|
|
40
|
+
|
|
41
|
+
# Filter by explicit tool names if specified
|
|
42
|
+
if self.include_tools is not None:
|
|
43
|
+
specs = [t for t in specs if t["name"] in self.include_tools]
|
|
44
|
+
|
|
45
|
+
# Filter by tags if specified
|
|
46
|
+
if self.include_tags is not None:
|
|
47
|
+
# Get the tool specs from the registry to access tags
|
|
48
|
+
tool_specs = {
|
|
49
|
+
t.name: t for t in ConnectorRegistry._tools.values() if t.name in [spec["name"] for spec in specs]
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
# Filter specs based on tags
|
|
53
|
+
specs = [spec for spec in specs if any(tag in self.include_tags for tag in tool_specs[spec["name"]].tags)]
|
|
54
|
+
|
|
55
|
+
return specs
|
|
56
|
+
|
|
57
|
+
def get_tool(self, name: str) -> Dict:
|
|
58
|
+
return ConnectorRegistry.get_tool(name)
|
|
59
|
+
|
|
60
|
+
def call_tool(self, tool_name: str, params: dict) -> Any:
|
|
61
|
+
"""
|
|
62
|
+
Validate and invoke a registered tool by name.
|
|
63
|
+
|
|
64
|
+
For synchronous tools, returns the result directly.
|
|
65
|
+
For async tools, this will run the event loop and return the awaited result.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
tool_name: The name of the tool to invoke
|
|
69
|
+
params: Dictionary of parameters to pass to the tool
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
The result of the tool invocation
|
|
73
|
+
"""
|
|
74
|
+
from .invocation import invoke as _invoke
|
|
75
|
+
|
|
76
|
+
return _invoke(tool_name, params, connector=self)
|
|
77
|
+
|
|
78
|
+
async def call_tool_async(self, tool_name: str, params: dict) -> Any:
|
|
79
|
+
"""
|
|
80
|
+
Validate and invoke a registered tool by name asynchronously.
|
|
81
|
+
|
|
82
|
+
For synchronous tools, this will run them in a thread pool.
|
|
83
|
+
For async tools, this will await the coroutine directly.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
tool_name: The name of the tool to invoke
|
|
87
|
+
params: Dictionary of parameters to pass to the tool
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
The result of the tool invocation
|
|
91
|
+
"""
|
|
92
|
+
from .invocation import invoke_async as _invoke_async
|
|
93
|
+
|
|
94
|
+
return await _invoke_async(tool_name, params, connector=self)
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
from typing import Any, Optional, Union, Awaitable
|
|
2
|
+
import asyncio
|
|
3
|
+
import jsonschema
|
|
4
|
+
from .registry import ConnectorRegistry
|
|
5
|
+
from .connector import Connector
|
|
6
|
+
from .registry import ToolSpec
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def invoke(tool_name: str, params: dict, connector: Optional[Connector] = None) -> Any:
|
|
10
|
+
"""
|
|
11
|
+
Validate and invoke a registered tool by name.
|
|
12
|
+
If a Connector is provided, enforce whitelist based on that instance.
|
|
13
|
+
|
|
14
|
+
For synchronous tools, returns the result directly.
|
|
15
|
+
For async tools, this will run the event loop and return the awaited result.
|
|
16
|
+
"""
|
|
17
|
+
spec = _get_validated_tool_spec(tool_name, params, connector)
|
|
18
|
+
|
|
19
|
+
if spec.is_async:
|
|
20
|
+
# For async functions, run the event loop
|
|
21
|
+
return asyncio.run(spec.fn(**params))
|
|
22
|
+
else:
|
|
23
|
+
# For sync functions, call directly
|
|
24
|
+
return spec.fn(**params)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
async def invoke_async(tool_name: str, params: dict, connector: Optional[Connector] = None) -> Any:
|
|
28
|
+
"""
|
|
29
|
+
Validate and invoke a registered tool by name asynchronously.
|
|
30
|
+
If a Connector is provided, enforce whitelist based on that instance.
|
|
31
|
+
|
|
32
|
+
For synchronous tools, this will run them in a thread pool.
|
|
33
|
+
For async tools, this will await the coroutine directly.
|
|
34
|
+
"""
|
|
35
|
+
spec = _get_validated_tool_spec(tool_name, params, connector)
|
|
36
|
+
|
|
37
|
+
if spec.is_async:
|
|
38
|
+
# For async functions, await directly
|
|
39
|
+
return await spec.fn(**params)
|
|
40
|
+
else:
|
|
41
|
+
# For sync functions, run in a thread pool
|
|
42
|
+
loop = asyncio.get_event_loop()
|
|
43
|
+
return await loop.run_in_executor(None, lambda: spec.fn(**params))
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _get_validated_tool_spec(tool_name: str, params: dict, connector: Optional[Connector] = None) -> ToolSpec:
|
|
47
|
+
"""Helper function to get and validate a tool spec."""
|
|
48
|
+
if connector:
|
|
49
|
+
allowed = {t["name"] for t in connector.list_tools()}
|
|
50
|
+
if tool_name not in allowed:
|
|
51
|
+
raise PermissionError(f"{tool_name!r} not allowed in this context")
|
|
52
|
+
|
|
53
|
+
spec = ConnectorRegistry.get_tool_spec(tool_name)
|
|
54
|
+
jsonschema.validate(params, spec.param_schema)
|
|
55
|
+
return spec
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
from typing import Any, Dict, Callable, List
|
|
2
|
+
import inspect
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class ToolSpec:
|
|
6
|
+
def __init__(
|
|
7
|
+
self,
|
|
8
|
+
name: str,
|
|
9
|
+
description: str,
|
|
10
|
+
fn: Callable,
|
|
11
|
+
param_schema: Dict[str, Any],
|
|
12
|
+
tags: List[str],
|
|
13
|
+
):
|
|
14
|
+
self.name = name
|
|
15
|
+
self.description = description
|
|
16
|
+
self.fn = fn
|
|
17
|
+
self.param_schema = param_schema
|
|
18
|
+
self.tags = tags
|
|
19
|
+
self.is_async = inspect.iscoroutinefunction(fn)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class ConnectorRegistry:
|
|
23
|
+
_tools: Dict[str, ToolSpec] = {}
|
|
24
|
+
|
|
25
|
+
@classmethod
|
|
26
|
+
def register_tool(
|
|
27
|
+
cls,
|
|
28
|
+
name: str,
|
|
29
|
+
description: str,
|
|
30
|
+
fn: Callable,
|
|
31
|
+
param_schema: Dict[str, Any],
|
|
32
|
+
tags: List[str],
|
|
33
|
+
) -> None:
|
|
34
|
+
if name in cls._tools:
|
|
35
|
+
raise ValueError(f"Tool {name!r} already registered")
|
|
36
|
+
cls._tools[name] = ToolSpec(name, description, fn, param_schema, tags)
|
|
37
|
+
|
|
38
|
+
@classmethod
|
|
39
|
+
def _format_tool_spec(cls, spec: ToolSpec) -> Dict[str, Any]:
|
|
40
|
+
return {
|
|
41
|
+
"name": spec.name,
|
|
42
|
+
"description": spec.description,
|
|
43
|
+
"input_schema": spec.param_schema,
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
@classmethod
|
|
47
|
+
def list_tools(cls) -> List[Dict[str, Any]]:
|
|
48
|
+
return [cls._format_tool_spec(t) for t in cls._tools.values()]
|
|
49
|
+
|
|
50
|
+
@classmethod
|
|
51
|
+
def get_tool(cls, name: str) -> Dict[str, Any]:
|
|
52
|
+
if name not in cls._tools:
|
|
53
|
+
raise ValueError(f"Tool {name!r} not found in registry")
|
|
54
|
+
return cls._format_tool_spec(cls._tools[name])
|
|
55
|
+
|
|
56
|
+
@classmethod
|
|
57
|
+
def get_tool_spec(cls, name: str) -> ToolSpec:
|
|
58
|
+
if name not in cls._tools:
|
|
59
|
+
raise ValueError(f"Tool {name!r} not found in registry")
|
|
60
|
+
return cls._tools[name]
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# {name} Connector
|
|
2
|
+
|
|
3
|
+
Basic **{name}** connector for Agent Handler (SDK v{version}).
|
|
4
|
+
|
|
5
|
+
## Overview
|
|
6
|
+
|
|
7
|
+
This repository provides the `{name}` connector, exposing a set of tools under the **`{name}`** namespace that can be called by your LLM via the Agent Handler SDK.
|
|
8
|
+
|
|
9
|
+
## Prerequisites
|
|
10
|
+
|
|
11
|
+
Install **Poetry**:
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
# Install Poetry
|
|
15
|
+
pip install poetry
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
## Installing
|
|
19
|
+
|
|
20
|
+
Install the Agent Handler SDK and this connector:
|
|
21
|
+
|
|
22
|
+
```bash
|
|
23
|
+
# From your connector’s root (where pyproject.toml lives)
|
|
24
|
+
poetry install
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
## Testing
|
|
28
|
+
|
|
29
|
+
Run the unit tests for this connector:
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
# From your connector’s root (where pyproject.toml lives)
|
|
33
|
+
poetry run pytest
|
|
34
|
+
```
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import pkgutil
|
|
2
|
+
import importlib
|
|
3
|
+
from agent_handler.connector import Connector
|
|
4
|
+
|
|
5
|
+
# single Connector instance for this package
|
|
6
|
+
{name} = Connector(namespace="{name}")
|
|
7
|
+
|
|
8
|
+
# auto-import all modules in tools/
|
|
9
|
+
package = __name__ + ".tools"
|
|
10
|
+
path = f"{{__path__[0]}}/tools"
|
|
11
|
+
for _, m, _ in pkgutil.iter_modules([path]):
|
|
12
|
+
importlib.import_module(f"{{package}}.{{m}}")
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
[tool.poetry]
|
|
2
|
+
name = "{name}-connector"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = "Basic {name} connector for Agent Handler"
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
authors = ["Your Name <you@example.com>"]
|
|
7
|
+
|
|
8
|
+
[tool.poetry.dependencies]
|
|
9
|
+
python = ">=3.10"
|
|
10
|
+
agent-handler-sdk = "^{version}"
|
|
11
|
+
|
|
12
|
+
[tool.poetry.dev-dependencies]
|
|
13
|
+
pytest = "^8.3.0"
|
|
14
|
+
pytest-cov = "^4.0.0"
|
|
15
|
+
pytest-asyncio = "^0.24.0"
|
|
16
|
+
pytest-mock = "^3.11.1"
|
|
17
|
+
mypy = "^1.5.1"
|
|
18
|
+
pre-commit = "^3.4.0"
|
|
19
|
+
tox = "^4.11.1"
|
|
20
|
+
ruff = "^0.7.4"
|
|
21
|
+
|
|
22
|
+
[build-system]
|
|
23
|
+
requires = ["poetry-core>=1.0.0,<2.0.0"]
|
|
24
|
+
build-backend = "poetry.core.masonry.api"
|
|
25
|
+
|
|
26
|
+
[tool.mypy]
|
|
27
|
+
files = ["{name}_connector/**/*.py"]
|
|
28
|
+
python_version = "3.10"
|
|
29
|
+
disallow_untyped_defs = true
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from {name}_connector import {name}
|
|
3
|
+
from agent_handler.invocation import invoke
|
|
4
|
+
|
|
5
|
+
@pytest.mark.parametrize("tool_name,params,expected", [
|
|
6
|
+
("{name}__example", {{}}, {{"status": "ok"}}),
|
|
7
|
+
])
|
|
8
|
+
def test_{name}_operations(tool_name, params, expected):
|
|
9
|
+
# Directly invoke tools using the SDK
|
|
10
|
+
result = invoke(tool_name, params, connector={name})
|
|
11
|
+
assert result == expected
|
|
12
|
+
|
|
13
|
+
@pytest.mark.parametrize("tool_name,params,expected", [
|
|
14
|
+
("{name}__example", {{}}, {{"status": "ok"}}),
|
|
15
|
+
])
|
|
16
|
+
def test_{name}_operations_with_connector(tool_name, params, expected):
|
|
17
|
+
# Invoke tools using the Connector
|
|
18
|
+
result = {name}.call_tool(tool_name, params)
|
|
19
|
+
assert result == expected
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
from typing import (
|
|
3
|
+
Callable,
|
|
4
|
+
Optional,
|
|
5
|
+
List,
|
|
6
|
+
Any,
|
|
7
|
+
Dict,
|
|
8
|
+
get_type_hints,
|
|
9
|
+
get_origin,
|
|
10
|
+
get_args,
|
|
11
|
+
TypeVar,
|
|
12
|
+
cast,
|
|
13
|
+
)
|
|
14
|
+
from pydantic import BaseModel, create_model
|
|
15
|
+
from .registry import ConnectorRegistry
|
|
16
|
+
from .utils import convert_type_hint_to_json_schema
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
F = TypeVar("F", bound=Callable[..., Any])
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def tool(
|
|
23
|
+
name: Optional[str] = None,
|
|
24
|
+
desc: str = "",
|
|
25
|
+
tags: Optional[List[str]] = None,
|
|
26
|
+
) -> Callable[[F], F]:
|
|
27
|
+
"""
|
|
28
|
+
Decorator to register a function as a tool.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
name: Optional name for the tool. Defaults to the function name.
|
|
32
|
+
desc: Description of the tool.
|
|
33
|
+
tags: Optional list of tags for the tool.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
The decorated function.
|
|
37
|
+
"""
|
|
38
|
+
tags_list: List[str] = tags or []
|
|
39
|
+
|
|
40
|
+
def decorator(fn: F) -> F:
|
|
41
|
+
sig = inspect.signature(fn)
|
|
42
|
+
type_hints = get_type_hints(fn)
|
|
43
|
+
|
|
44
|
+
# Create a clean schema structure
|
|
45
|
+
schema: Dict[str, Any] = {"type": "object", "properties": {}}
|
|
46
|
+
required: List[str] = []
|
|
47
|
+
|
|
48
|
+
# Process each parameter
|
|
49
|
+
for param_name, param in sig.parameters.items():
|
|
50
|
+
# Get the type hint
|
|
51
|
+
type_hint = type_hints.get(param_name, Any)
|
|
52
|
+
|
|
53
|
+
# Determine if the parameter is required
|
|
54
|
+
if param.default is inspect.Parameter.empty:
|
|
55
|
+
required.append(param_name)
|
|
56
|
+
|
|
57
|
+
# Convert type hint to JSON schema
|
|
58
|
+
schema["properties"][param_name] = convert_type_hint_to_json_schema(type_hint)
|
|
59
|
+
|
|
60
|
+
# If there are required fields, add them to the schema
|
|
61
|
+
if required:
|
|
62
|
+
schema["required"] = required
|
|
63
|
+
|
|
64
|
+
# Check if the function is async
|
|
65
|
+
is_async = inspect.iscoroutinefunction(fn)
|
|
66
|
+
|
|
67
|
+
# Create a wrapper function that converts dictionaries to Pydantic models
|
|
68
|
+
if is_async:
|
|
69
|
+
# Type ignore for the conditional function variants issue
|
|
70
|
+
async def wrapped_fn(**kwargs: Any) -> Any: # type: ignore
|
|
71
|
+
converted_kwargs = convert_kwargs(kwargs, type_hints)
|
|
72
|
+
return await fn(**converted_kwargs)
|
|
73
|
+
|
|
74
|
+
else:
|
|
75
|
+
|
|
76
|
+
def wrapped_fn(**kwargs: Any) -> Any: # type: ignore
|
|
77
|
+
converted_kwargs = convert_kwargs(kwargs, type_hints)
|
|
78
|
+
return fn(**converted_kwargs)
|
|
79
|
+
|
|
80
|
+
# Preserve the original function's signature and docstring
|
|
81
|
+
wrapped_fn.__name__ = fn.__name__
|
|
82
|
+
wrapped_fn.__doc__ = fn.__doc__
|
|
83
|
+
wrapped_fn.__annotations__ = fn.__annotations__
|
|
84
|
+
|
|
85
|
+
tool_name = name or fn.__name__
|
|
86
|
+
ConnectorRegistry.register_tool(
|
|
87
|
+
name=tool_name,
|
|
88
|
+
description=desc,
|
|
89
|
+
fn=wrapped_fn,
|
|
90
|
+
param_schema=schema,
|
|
91
|
+
tags=tags_list,
|
|
92
|
+
)
|
|
93
|
+
return fn
|
|
94
|
+
|
|
95
|
+
return decorator
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def convert_kwargs(kwargs: Dict[str, Any], type_hints: Dict[str, Any]) -> Dict[str, Any]:
|
|
99
|
+
"""Helper function to convert dictionaries to Pydantic models based on type hints."""
|
|
100
|
+
converted_kwargs: Dict[str, Any] = {}
|
|
101
|
+
for param_name, param_value in kwargs.items():
|
|
102
|
+
if param_name in type_hints:
|
|
103
|
+
param_type = type_hints[param_name]
|
|
104
|
+
# Check if it's a Pydantic model
|
|
105
|
+
if (
|
|
106
|
+
isinstance(param_value, dict)
|
|
107
|
+
and hasattr(param_type, "model_validate")
|
|
108
|
+
and issubclass(param_type, BaseModel)
|
|
109
|
+
):
|
|
110
|
+
# Convert dict to Pydantic model
|
|
111
|
+
converted_kwargs[param_name] = param_type.model_validate(param_value)
|
|
112
|
+
# Handle List[PydanticModel]
|
|
113
|
+
elif (
|
|
114
|
+
isinstance(param_value, list)
|
|
115
|
+
and get_origin(param_type) is list
|
|
116
|
+
and len(get_args(param_type)) > 0
|
|
117
|
+
and hasattr(get_args(param_type)[0], "model_validate")
|
|
118
|
+
and issubclass(get_args(param_type)[0], BaseModel)
|
|
119
|
+
):
|
|
120
|
+
model_class = get_args(param_type)[0]
|
|
121
|
+
converted_kwargs[param_name] = [
|
|
122
|
+
model_class.model_validate(item) if isinstance(item, dict) else item for item in param_value
|
|
123
|
+
]
|
|
124
|
+
else:
|
|
125
|
+
converted_kwargs[param_name] = param_value
|
|
126
|
+
else:
|
|
127
|
+
converted_kwargs[param_name] = param_value
|
|
128
|
+
|
|
129
|
+
return converted_kwargs
|
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
from typing import Any, Dict, List, Optional, Union, get_type_hints, get_origin, get_args, Type
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from pydantic import BaseModel
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def convert_type_hint_to_json_schema(type_hint: Any) -> Dict[str, Any]:
|
|
7
|
+
"""
|
|
8
|
+
Convert a Python type hint to a JSON schema representation.
|
|
9
|
+
Handles primitive types, lists, tuples, unions, optional types, and Pydantic models.
|
|
10
|
+
"""
|
|
11
|
+
# Handle None type
|
|
12
|
+
if type_hint is type(None):
|
|
13
|
+
return {"type": "null"}
|
|
14
|
+
|
|
15
|
+
# Handle primitive types
|
|
16
|
+
if type_hint in (int, float, str, bool):
|
|
17
|
+
return _convert_primitive_type(type_hint)
|
|
18
|
+
|
|
19
|
+
# Handle Pydantic models
|
|
20
|
+
if hasattr(type_hint, "model_json_schema") and issubclass(type_hint, BaseModel):
|
|
21
|
+
return _convert_pydantic_type(type_hint)
|
|
22
|
+
|
|
23
|
+
# Handle container types (list, tuple, dict)
|
|
24
|
+
origin = get_origin(type_hint)
|
|
25
|
+
if origin is list:
|
|
26
|
+
return _convert_list_type(type_hint)
|
|
27
|
+
elif origin is tuple:
|
|
28
|
+
return _convert_tuple_type(type_hint)
|
|
29
|
+
elif origin is dict:
|
|
30
|
+
return _convert_dict_type(type_hint)
|
|
31
|
+
|
|
32
|
+
# Handle Union and Optional types
|
|
33
|
+
if origin is Union:
|
|
34
|
+
return _convert_union_type(type_hint)
|
|
35
|
+
if origin is Optional:
|
|
36
|
+
return _convert_optional_type(type_hint)
|
|
37
|
+
|
|
38
|
+
# Handle Enum types
|
|
39
|
+
if isinstance(type_hint, type) and issubclass(type_hint, Enum):
|
|
40
|
+
return _convert_enum_type(type_hint)
|
|
41
|
+
|
|
42
|
+
# Default to string for unknown types
|
|
43
|
+
return {"type": "string"}
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _convert_primitive_type(type_hint: Type) -> Dict[str, Any]:
|
|
47
|
+
"""Convert primitive Python types to JSON schema types."""
|
|
48
|
+
type_mapping = {
|
|
49
|
+
int: {"type": "integer"},
|
|
50
|
+
float: {"type": "number"},
|
|
51
|
+
str: {"type": "string"},
|
|
52
|
+
bool: {"type": "boolean"},
|
|
53
|
+
}
|
|
54
|
+
return type_mapping.get(type_hint, {"type": "string"})
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def _convert_list_type(type_hint: Any) -> Dict[str, Any]:
|
|
58
|
+
"""Convert Python list type to JSON schema array."""
|
|
59
|
+
item_type = get_args(type_hint)[0] if get_args(type_hint) else Any
|
|
60
|
+
return {"type": "array", "items": convert_type_hint_to_json_schema(item_type)}
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _convert_tuple_type(type_hint: Any) -> Dict[str, Any]:
|
|
64
|
+
"""Convert Python tuple type to JSON schema array with constraints."""
|
|
65
|
+
args = get_args(type_hint)
|
|
66
|
+
if not args:
|
|
67
|
+
return {"type": "array"}
|
|
68
|
+
|
|
69
|
+
# Handle tuple with variable args (Tuple[int, ...])
|
|
70
|
+
if len(args) == 2 and args[1] is Ellipsis:
|
|
71
|
+
return {"type": "array", "items": convert_type_hint_to_json_schema(args[0])}
|
|
72
|
+
|
|
73
|
+
# Handle fixed-length tuples
|
|
74
|
+
return {
|
|
75
|
+
"type": "array",
|
|
76
|
+
"minItems": len(args),
|
|
77
|
+
"maxItems": len(args),
|
|
78
|
+
"items": [convert_type_hint_to_json_schema(arg) for arg in args],
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _convert_dict_type(type_hint: Any) -> Dict[str, Any]:
|
|
83
|
+
"""Convert Python dict type to JSON schema object."""
|
|
84
|
+
args = get_args(type_hint)
|
|
85
|
+
key_type = args[0] if len(args) > 0 else Any
|
|
86
|
+
value_type = args[1] if len(args) > 1 else Any
|
|
87
|
+
|
|
88
|
+
# Only str keys are supported in JSON
|
|
89
|
+
if key_type is not str and key_type is not Any:
|
|
90
|
+
key_type = str
|
|
91
|
+
|
|
92
|
+
return {"type": "object", "additionalProperties": convert_type_hint_to_json_schema(value_type)}
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def _convert_union_type(type_hint: Any) -> Dict[str, Any]:
|
|
96
|
+
"""Convert Python Union type to JSON schema anyOf."""
|
|
97
|
+
union_args = get_args(type_hint)
|
|
98
|
+
|
|
99
|
+
# Handle Optional (Union[Type, None])
|
|
100
|
+
if len(union_args) == 2 and type(None) in union_args:
|
|
101
|
+
return _convert_optional_union(union_args)
|
|
102
|
+
|
|
103
|
+
# Handle regular Union types
|
|
104
|
+
return {"anyOf": [convert_type_hint_to_json_schema(arg) for arg in union_args]}
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def _convert_optional_union(union_args: tuple) -> Dict[str, Any]:
|
|
108
|
+
"""Handle Optional as Union[Type, None]."""
|
|
109
|
+
# Get the non-None type
|
|
110
|
+
actual_type = union_args[0] if union_args[1] is type(None) else union_args[1]
|
|
111
|
+
return convert_type_hint_to_json_schema(actual_type)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def _convert_optional_type(type_hint: Any) -> Dict[str, Any]:
|
|
115
|
+
"""Convert Python Optional type to JSON schema."""
|
|
116
|
+
actual_type = get_args(type_hint)[0]
|
|
117
|
+
return convert_type_hint_to_json_schema(actual_type)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _convert_enum_type(type_hint: Type[Enum]) -> Dict[str, Any]:
|
|
121
|
+
"""Convert Python Enum type to JSON schema enum."""
|
|
122
|
+
enum_values = [item.value for item in type_hint]
|
|
123
|
+
return {"enum": enum_values}
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def _convert_pydantic_type(model: Type[BaseModel]) -> Dict[str, Any]:
|
|
127
|
+
"""
|
|
128
|
+
Convert a Pydantic model to a flattened JSON schema without references.
|
|
129
|
+
"""
|
|
130
|
+
# Get the model schema
|
|
131
|
+
schema = model.model_json_schema()
|
|
132
|
+
|
|
133
|
+
# Create a flattened version without references
|
|
134
|
+
flattened_schema = {"type": "object", "properties": {}}
|
|
135
|
+
|
|
136
|
+
# Get the definitions section
|
|
137
|
+
defs = schema.get("$defs", {})
|
|
138
|
+
|
|
139
|
+
# Copy properties and resolve any references
|
|
140
|
+
if "properties" in schema:
|
|
141
|
+
flattened_schema["properties"] = _resolve_references(schema["properties"], defs)
|
|
142
|
+
|
|
143
|
+
# Copy required fields if present
|
|
144
|
+
if "required" in schema:
|
|
145
|
+
flattened_schema["required"] = schema["required"]
|
|
146
|
+
|
|
147
|
+
# Copy title if present
|
|
148
|
+
if "title" in schema:
|
|
149
|
+
flattened_schema["title"] = schema["title"]
|
|
150
|
+
|
|
151
|
+
return flattened_schema
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def _resolve_references(obj: Any, schema_defs: Dict[str, Any]) -> Any:
|
|
155
|
+
"""
|
|
156
|
+
Recursively resolve JSON schema references.
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
obj: The object to resolve references in
|
|
160
|
+
schema_defs: The definitions dictionary containing referenced schemas
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
The object with all references resolved
|
|
164
|
+
"""
|
|
165
|
+
if isinstance(obj, dict):
|
|
166
|
+
# If this is a reference, resolve it
|
|
167
|
+
if "$ref" in obj and len(obj) == 1:
|
|
168
|
+
return _resolve_single_reference(obj, schema_defs)
|
|
169
|
+
|
|
170
|
+
# Process each property in the object
|
|
171
|
+
result = {}
|
|
172
|
+
for key, value in obj.items():
|
|
173
|
+
if key == "items" and "$ref" in value:
|
|
174
|
+
# Special handling for array items with references
|
|
175
|
+
ref_path = value["$ref"].split("/")[-1]
|
|
176
|
+
if ref_path in schema_defs:
|
|
177
|
+
# Replace with the referenced schema
|
|
178
|
+
result[key] = _resolve_references(schema_defs[ref_path], schema_defs)
|
|
179
|
+
else:
|
|
180
|
+
# Recursively process the value
|
|
181
|
+
result[key] = _resolve_references(value, schema_defs)
|
|
182
|
+
return result
|
|
183
|
+
elif isinstance(obj, list):
|
|
184
|
+
# Process each item in the list
|
|
185
|
+
return [_resolve_references(item, schema_defs) for item in obj]
|
|
186
|
+
else:
|
|
187
|
+
# Return primitive values as is
|
|
188
|
+
return obj
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def _resolve_single_reference(ref_obj: Dict[str, Any], schema_defs: Dict[str, Any]) -> Any:
|
|
192
|
+
"""
|
|
193
|
+
Resolve a single reference object.
|
|
194
|
+
|
|
195
|
+
Args:
|
|
196
|
+
ref_obj: The reference object containing a $ref key
|
|
197
|
+
schema_defs: The definitions dictionary containing referenced schemas
|
|
198
|
+
|
|
199
|
+
Returns:
|
|
200
|
+
The resolved reference
|
|
201
|
+
"""
|
|
202
|
+
ref_path = ref_obj["$ref"].split("/")[-1]
|
|
203
|
+
if ref_path in schema_defs:
|
|
204
|
+
# Replace with a copy of the referenced schema
|
|
205
|
+
resolved = schema_defs[ref_path].copy()
|
|
206
|
+
# Recursively resolve any references in the referenced schema
|
|
207
|
+
return _resolve_references(resolved, schema_defs)
|
|
208
|
+
return ref_obj # Reference not found, return as is
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
[tool.poetry]
|
|
2
|
+
name = "agent-handler-sdk"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = "Agent Handler SDK for defining and invoking LLM tools"
|
|
5
|
+
authors = ["David Dalmaso <david.dalmaso@merge.dev>"]
|
|
6
|
+
packages = [
|
|
7
|
+
{ include = "agent_handler" }
|
|
8
|
+
]
|
|
9
|
+
|
|
10
|
+
[tool.poetry.dependencies]
|
|
11
|
+
python = ">=3.10"
|
|
12
|
+
pydantic = "^2.0"
|
|
13
|
+
jsonschema = "^4.0"
|
|
14
|
+
|
|
15
|
+
[tool.poetry.dev-dependencies]
|
|
16
|
+
pytest = "^7.0"
|
|
17
|
+
pytest-asyncio = "^0.20.3"
|
|
18
|
+
mypy = "^1.4.1"
|
|
19
|
+
black = "^23.3.0"
|
|
20
|
+
pre-commit = "^2.20.0"
|
|
21
|
+
types-jsonschema = "^4.17.0"
|
|
22
|
+
|
|
23
|
+
[tool.poetry.group.dev.dependencies]
|
|
24
|
+
pytest-asyncio = "^0.20.3"
|
|
25
|
+
pre-commit = "^2.20.0"
|
|
26
|
+
types-jsonschema = "^4.23.0.20241208"
|
|
27
|
+
|
|
28
|
+
[tool.poetry.scripts]
|
|
29
|
+
ahs-scaffold = "agent_handler.cli:scaffold_connector"
|
|
30
|
+
|
|
31
|
+
[build-system]
|
|
32
|
+
requires = ["poetry-core>=1.0.0"]
|
|
33
|
+
build-backend = "poetry.core.masonry.api"
|
|
34
|
+
|
|
35
|
+
[tool.black]
|
|
36
|
+
line-length = 120
|
|
37
|
+
target-version = ["py310"]
|
|
38
|
+
include = '\.pyi?$'
|
|
39
|
+
|
|
40
|
+
[tool.mypy]
|
|
41
|
+
python_version = "3.10"
|
|
42
|
+
warn_return_any = true
|
|
43
|
+
warn_unused_configs = true
|
|
44
|
+
disallow_untyped_defs = true
|
|
45
|
+
disallow_incomplete_defs = true
|
|
46
|
+
check_untyped_defs = true
|
|
47
|
+
disallow_untyped_decorators = true
|
|
48
|
+
no_implicit_optional = true
|
|
49
|
+
strict_optional = true
|