agnt5 0.2.8a13__cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agnt5/__init__.py +96 -0
- agnt5/_compat.py +16 -0
- agnt5/_core.abi3.so +0 -0
- agnt5/_retry_utils.py +169 -0
- agnt5/_schema_utils.py +312 -0
- agnt5/_sentry.py +515 -0
- agnt5/_telemetry.py +182 -0
- agnt5/agent.py +1774 -0
- agnt5/client.py +741 -0
- agnt5/context.py +178 -0
- agnt5/entity.py +795 -0
- agnt5/exceptions.py +102 -0
- agnt5/function.py +321 -0
- agnt5/lm.py +969 -0
- agnt5/tool.py +657 -0
- agnt5/tracing.py +196 -0
- agnt5/types.py +110 -0
- agnt5/version.py +19 -0
- agnt5/worker.py +1701 -0
- agnt5/workflow.py +1087 -0
- agnt5-0.2.8a13.dist-info/METADATA +26 -0
- agnt5-0.2.8a13.dist-info/RECORD +23 -0
- agnt5-0.2.8a13.dist-info/WHEEL +5 -0
agnt5/__init__.py
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AGNT5 Python SDK - Build durable, resilient agent-first applications.
|
|
3
|
+
|
|
4
|
+
This SDK provides high-level components for building agents, tools, and workflows
|
|
5
|
+
with built-in durability guarantees and state management.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from ._compat import _import_error, _rust_available
|
|
9
|
+
from .agent import Agent, AgentContext, AgentRegistry, AgentResult, Handoff, agent, handoff
|
|
10
|
+
from .client import Client, RunError
|
|
11
|
+
from .context import Context
|
|
12
|
+
from .function import FunctionContext
|
|
13
|
+
from .workflow import WorkflowContext
|
|
14
|
+
from .entity import (
|
|
15
|
+
Entity,
|
|
16
|
+
EntityRegistry,
|
|
17
|
+
EntityStateAdapter,
|
|
18
|
+
EntityType,
|
|
19
|
+
create_entity_context,
|
|
20
|
+
with_entity_context,
|
|
21
|
+
)
|
|
22
|
+
from .exceptions import (
|
|
23
|
+
AGNT5Error,
|
|
24
|
+
CheckpointError,
|
|
25
|
+
ConfigurationError,
|
|
26
|
+
ExecutionError,
|
|
27
|
+
RetryError,
|
|
28
|
+
StateError,
|
|
29
|
+
WaitingForUserInputException,
|
|
30
|
+
)
|
|
31
|
+
from .function import FunctionRegistry, function
|
|
32
|
+
from .tool import AskUserTool, RequestApprovalTool, Tool, ToolRegistry, tool
|
|
33
|
+
from .types import BackoffPolicy, BackoffType, FunctionConfig, RetryPolicy, WorkflowConfig
|
|
34
|
+
from .version import _get_version
|
|
35
|
+
from .worker import Worker
|
|
36
|
+
from .workflow import WorkflowRegistry, workflow
|
|
37
|
+
|
|
38
|
+
# Expose simplified language model API (recommended)
|
|
39
|
+
from . import lm
|
|
40
|
+
|
|
41
|
+
# Expose Sentry utilities for custom error tracking (optional)
|
|
42
|
+
from . import _sentry as sentry
|
|
43
|
+
|
|
44
|
+
__version__ = _get_version()
|
|
45
|
+
|
|
46
|
+
__all__ = [
|
|
47
|
+
# Version
|
|
48
|
+
"__version__",
|
|
49
|
+
# Core components
|
|
50
|
+
"Context",
|
|
51
|
+
"FunctionContext",
|
|
52
|
+
"WorkflowContext",
|
|
53
|
+
"AgentContext",
|
|
54
|
+
"Client",
|
|
55
|
+
"Worker",
|
|
56
|
+
"function",
|
|
57
|
+
"FunctionRegistry",
|
|
58
|
+
"Entity",
|
|
59
|
+
"EntityType",
|
|
60
|
+
"EntityRegistry",
|
|
61
|
+
"EntityStateAdapter",
|
|
62
|
+
"with_entity_context",
|
|
63
|
+
"create_entity_context",
|
|
64
|
+
"workflow",
|
|
65
|
+
"WorkflowRegistry",
|
|
66
|
+
"tool",
|
|
67
|
+
"Tool",
|
|
68
|
+
"ToolRegistry",
|
|
69
|
+
"AskUserTool",
|
|
70
|
+
"RequestApprovalTool",
|
|
71
|
+
"agent",
|
|
72
|
+
"Agent",
|
|
73
|
+
"AgentRegistry",
|
|
74
|
+
"AgentResult",
|
|
75
|
+
"Handoff",
|
|
76
|
+
"handoff",
|
|
77
|
+
# Types
|
|
78
|
+
"RetryPolicy",
|
|
79
|
+
"BackoffPolicy",
|
|
80
|
+
"BackoffType",
|
|
81
|
+
"FunctionConfig",
|
|
82
|
+
"WorkflowConfig",
|
|
83
|
+
# Exceptions
|
|
84
|
+
"AGNT5Error",
|
|
85
|
+
"ConfigurationError",
|
|
86
|
+
"ExecutionError",
|
|
87
|
+
"RetryError",
|
|
88
|
+
"StateError",
|
|
89
|
+
"CheckpointError",
|
|
90
|
+
"WaitingForUserInputException",
|
|
91
|
+
"RunError",
|
|
92
|
+
# Language Model (Simplified API)
|
|
93
|
+
"lm",
|
|
94
|
+
# Sentry integration (Optional)
|
|
95
|
+
"sentry",
|
|
96
|
+
]
|
agnt5/_compat.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Compatibility utilities for the AGNT5 Python SDK.
|
|
3
|
+
|
|
4
|
+
This module handles runtime compatibility checks and provides utilities
|
|
5
|
+
for cross-referencing throughout the project.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
# Check if Rust core is available
|
|
9
|
+
try:
|
|
10
|
+
from . import _core
|
|
11
|
+
|
|
12
|
+
_rust_available = True
|
|
13
|
+
_import_error = None
|
|
14
|
+
except ImportError as e:
|
|
15
|
+
_rust_available = False
|
|
16
|
+
_import_error = e
|
agnt5/_core.abi3.so
ADDED
|
Binary file
|
agnt5/_retry_utils.py
ADDED
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
"""Retry and backoff utilities for durable execution.
|
|
2
|
+
|
|
3
|
+
This module provides utilities for parsing retry policies, calculating backoff delays,
|
|
4
|
+
and executing functions with retry logic.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import asyncio
|
|
10
|
+
from typing import Any, Dict, Optional, Union
|
|
11
|
+
|
|
12
|
+
from .exceptions import RetryError
|
|
13
|
+
from .types import BackoffPolicy, BackoffType, HandlerFunc, RetryPolicy
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def parse_retry_policy(retries: Optional[Union[int, Dict[str, Any], RetryPolicy]]) -> RetryPolicy:
|
|
17
|
+
"""Parse retry configuration from various forms.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
retries: Can be:
|
|
21
|
+
- int: max_attempts (e.g., 5)
|
|
22
|
+
- dict: RetryPolicy parameters (e.g., {"max_attempts": 5, "initial_interval_ms": 1000})
|
|
23
|
+
- RetryPolicy: pass through
|
|
24
|
+
- None: use default
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
RetryPolicy instance
|
|
28
|
+
"""
|
|
29
|
+
if retries is None:
|
|
30
|
+
return RetryPolicy()
|
|
31
|
+
elif isinstance(retries, int):
|
|
32
|
+
return RetryPolicy(max_attempts=retries)
|
|
33
|
+
elif isinstance(retries, dict):
|
|
34
|
+
return RetryPolicy(**retries)
|
|
35
|
+
elif isinstance(retries, RetryPolicy):
|
|
36
|
+
return retries
|
|
37
|
+
else:
|
|
38
|
+
raise TypeError(f"retries must be int, dict, or RetryPolicy, got {type(retries)}")
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def parse_backoff_policy(backoff: Optional[Union[str, Dict[str, Any], BackoffPolicy]]) -> BackoffPolicy:
|
|
42
|
+
"""Parse backoff configuration from various forms.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
backoff: Can be:
|
|
46
|
+
- str: backoff type ("constant", "linear", "exponential")
|
|
47
|
+
- dict: BackoffPolicy parameters (e.g., {"type": "exponential", "multiplier": 2.0})
|
|
48
|
+
- BackoffPolicy: pass through
|
|
49
|
+
- None: use default
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
BackoffPolicy instance
|
|
53
|
+
"""
|
|
54
|
+
if backoff is None:
|
|
55
|
+
return BackoffPolicy()
|
|
56
|
+
elif isinstance(backoff, str):
|
|
57
|
+
backoff_type = BackoffType(backoff.lower())
|
|
58
|
+
return BackoffPolicy(type=backoff_type)
|
|
59
|
+
elif isinstance(backoff, dict):
|
|
60
|
+
# Convert string type to enum if present
|
|
61
|
+
if "type" in backoff and isinstance(backoff["type"], str):
|
|
62
|
+
backoff = {**backoff, "type": BackoffType(backoff["type"].lower())}
|
|
63
|
+
return BackoffPolicy(**backoff)
|
|
64
|
+
elif isinstance(backoff, BackoffPolicy):
|
|
65
|
+
return backoff
|
|
66
|
+
else:
|
|
67
|
+
raise TypeError(f"backoff must be str, dict, or BackoffPolicy, got {type(backoff)}")
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def calculate_backoff_delay(
|
|
71
|
+
attempt: int,
|
|
72
|
+
retry_policy: RetryPolicy,
|
|
73
|
+
backoff_policy: BackoffPolicy,
|
|
74
|
+
) -> float:
|
|
75
|
+
"""Calculate backoff delay in seconds based on attempt number.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
attempt: Current attempt number (0-indexed)
|
|
79
|
+
retry_policy: Retry configuration
|
|
80
|
+
backoff_policy: Backoff configuration
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
Delay in seconds
|
|
84
|
+
"""
|
|
85
|
+
if backoff_policy.type == BackoffType.CONSTANT:
|
|
86
|
+
delay_ms = retry_policy.initial_interval_ms
|
|
87
|
+
elif backoff_policy.type == BackoffType.LINEAR:
|
|
88
|
+
delay_ms = retry_policy.initial_interval_ms * (attempt + 1)
|
|
89
|
+
else: # EXPONENTIAL
|
|
90
|
+
delay_ms = retry_policy.initial_interval_ms * (backoff_policy.multiplier**attempt)
|
|
91
|
+
|
|
92
|
+
# Cap at max_interval_ms
|
|
93
|
+
delay_ms = min(delay_ms, retry_policy.max_interval_ms)
|
|
94
|
+
return delay_ms / 1000.0 # Convert to seconds
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
async def execute_with_retry(
|
|
98
|
+
handler: HandlerFunc,
|
|
99
|
+
ctx: Any, # FunctionContext, but avoid circular import
|
|
100
|
+
retry_policy: RetryPolicy,
|
|
101
|
+
backoff_policy: BackoffPolicy,
|
|
102
|
+
needs_context: bool,
|
|
103
|
+
*args: Any,
|
|
104
|
+
**kwargs: Any,
|
|
105
|
+
) -> Any:
|
|
106
|
+
"""Execute handler with retry logic.
|
|
107
|
+
|
|
108
|
+
Args:
|
|
109
|
+
handler: The function handler to execute
|
|
110
|
+
ctx: Context for logging and attempt tracking (FunctionContext)
|
|
111
|
+
retry_policy: Retry configuration
|
|
112
|
+
backoff_policy: Backoff configuration
|
|
113
|
+
needs_context: Whether handler accepts ctx parameter
|
|
114
|
+
*args: Arguments to pass to handler (excluding ctx if needs_context=False)
|
|
115
|
+
**kwargs: Keyword arguments to pass to handler
|
|
116
|
+
|
|
117
|
+
Returns:
|
|
118
|
+
Result of successful execution
|
|
119
|
+
|
|
120
|
+
Raises:
|
|
121
|
+
RetryError: If all retry attempts fail
|
|
122
|
+
"""
|
|
123
|
+
# Import here to avoid circular dependency
|
|
124
|
+
from .function import FunctionContext
|
|
125
|
+
|
|
126
|
+
last_error: Optional[Exception] = None
|
|
127
|
+
|
|
128
|
+
for attempt in range(retry_policy.max_attempts):
|
|
129
|
+
try:
|
|
130
|
+
# Create context for this attempt (FunctionContext is immutable)
|
|
131
|
+
attempt_ctx = FunctionContext(
|
|
132
|
+
run_id=ctx.run_id,
|
|
133
|
+
attempt=attempt,
|
|
134
|
+
retry_policy=retry_policy
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
# Execute handler (pass context only if needed)
|
|
138
|
+
if needs_context:
|
|
139
|
+
result = await handler(attempt_ctx, *args, **kwargs)
|
|
140
|
+
else:
|
|
141
|
+
result = await handler(*args, **kwargs)
|
|
142
|
+
return result
|
|
143
|
+
|
|
144
|
+
except Exception as e:
|
|
145
|
+
last_error = e
|
|
146
|
+
ctx.logger.warning(
|
|
147
|
+
f"Function execution failed (attempt {attempt + 1}/{retry_policy.max_attempts}): {e}"
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
# If this was the last attempt, raise RetryError
|
|
151
|
+
if attempt == retry_policy.max_attempts - 1:
|
|
152
|
+
raise RetryError(
|
|
153
|
+
f"Function failed after {retry_policy.max_attempts} attempts",
|
|
154
|
+
attempts=retry_policy.max_attempts,
|
|
155
|
+
last_error=e,
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
# Calculate backoff delay
|
|
159
|
+
delay = calculate_backoff_delay(attempt, retry_policy, backoff_policy)
|
|
160
|
+
ctx.logger.info(f"Retrying in {delay:.2f} seconds...")
|
|
161
|
+
await asyncio.sleep(delay)
|
|
162
|
+
|
|
163
|
+
# Should never reach here, but for type safety
|
|
164
|
+
assert last_error is not None
|
|
165
|
+
raise RetryError(
|
|
166
|
+
f"Function failed after {retry_policy.max_attempts} attempts",
|
|
167
|
+
attempts=retry_policy.max_attempts,
|
|
168
|
+
last_error=last_error,
|
|
169
|
+
)
|
agnt5/_schema_utils.py
ADDED
|
@@ -0,0 +1,312 @@
|
|
|
1
|
+
"""Schema conversion utilities for structured output support.
|
|
2
|
+
|
|
3
|
+
This module provides utilities to convert Python dataclasses and Pydantic models
|
|
4
|
+
to JSON Schema format for LLM structured output generation, function signatures,
|
|
5
|
+
and tool definitions.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import dataclasses
|
|
11
|
+
import inspect
|
|
12
|
+
from typing import Any, Callable, Dict, Optional, Tuple, get_args, get_origin, get_type_hints
|
|
13
|
+
|
|
14
|
+
try:
|
|
15
|
+
from pydantic import BaseModel
|
|
16
|
+
PYDANTIC_AVAILABLE = True
|
|
17
|
+
except ImportError:
|
|
18
|
+
BaseModel = None # type: ignore
|
|
19
|
+
PYDANTIC_AVAILABLE = False
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def detect_format_type(response_format: Any) -> Tuple[str, Dict[str, Any]]:
|
|
23
|
+
"""Auto-detect format type and convert to JSON schema.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
response_format: Pydantic model, dataclass, or dict
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
Tuple of (format_type, json_schema)
|
|
30
|
+
- format_type: "pydantic", "dataclass", or "raw"
|
|
31
|
+
- json_schema: JSON schema dictionary
|
|
32
|
+
|
|
33
|
+
Raises:
|
|
34
|
+
ValueError: If format type is not supported
|
|
35
|
+
"""
|
|
36
|
+
# Check for Pydantic model
|
|
37
|
+
if PYDANTIC_AVAILABLE and isinstance(response_format, type) and issubclass(response_format, BaseModel):
|
|
38
|
+
return 'pydantic', pydantic_to_json_schema(response_format)
|
|
39
|
+
|
|
40
|
+
# Check for dataclass
|
|
41
|
+
if dataclasses.is_dataclass(response_format):
|
|
42
|
+
return 'dataclass', dataclass_to_json_schema(response_format)
|
|
43
|
+
|
|
44
|
+
# Check for raw dict
|
|
45
|
+
if isinstance(response_format, dict):
|
|
46
|
+
return 'raw', response_format
|
|
47
|
+
|
|
48
|
+
raise ValueError(
|
|
49
|
+
f"Unsupported response_format type: {type(response_format)}. "
|
|
50
|
+
f"Expected Pydantic model, dataclass, or dict."
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def pydantic_to_json_schema(model: type) -> Dict[str, Any]:
|
|
55
|
+
"""Convert Pydantic model to JSON schema.
|
|
56
|
+
|
|
57
|
+
Supports both Pydantic v1 and v2 APIs.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
model: Pydantic BaseModel class
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
JSON schema dictionary
|
|
64
|
+
"""
|
|
65
|
+
if not PYDANTIC_AVAILABLE:
|
|
66
|
+
raise ImportError("Pydantic is not installed. Install with: pip install pydantic")
|
|
67
|
+
|
|
68
|
+
if not (isinstance(model, type) and issubclass(model, BaseModel)):
|
|
69
|
+
raise ValueError(f"Expected Pydantic BaseModel class, got {type(model)}")
|
|
70
|
+
|
|
71
|
+
try:
|
|
72
|
+
# Try Pydantic v2 API first
|
|
73
|
+
if hasattr(model, 'model_json_schema'):
|
|
74
|
+
schema = model.model_json_schema()
|
|
75
|
+
# Fall back to Pydantic v1 API
|
|
76
|
+
elif hasattr(model, 'schema'):
|
|
77
|
+
schema = model.schema()
|
|
78
|
+
else:
|
|
79
|
+
# Fallback for edge cases
|
|
80
|
+
schema = {"type": "object"}
|
|
81
|
+
except Exception:
|
|
82
|
+
# If schema generation fails, return basic object schema
|
|
83
|
+
schema = {"type": "object"}
|
|
84
|
+
|
|
85
|
+
# Ensure we have the required fields
|
|
86
|
+
if "type" not in schema:
|
|
87
|
+
schema["type"] = "object"
|
|
88
|
+
|
|
89
|
+
return schema
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def dataclass_to_json_schema(cls: type) -> Dict[str, Any]:
|
|
93
|
+
"""Convert Python dataclass to JSON schema.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
cls: Dataclass type
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
JSON schema dictionary
|
|
100
|
+
"""
|
|
101
|
+
if not dataclasses.is_dataclass(cls):
|
|
102
|
+
raise ValueError(f"Expected dataclass, got {type(cls)}")
|
|
103
|
+
|
|
104
|
+
properties: Dict[str, Any] = {}
|
|
105
|
+
required: list[str] = []
|
|
106
|
+
|
|
107
|
+
for field in dataclasses.fields(cls):
|
|
108
|
+
# Convert field type to JSON schema
|
|
109
|
+
field_schema = _type_to_schema(field.type)
|
|
110
|
+
properties[field.name] = field_schema
|
|
111
|
+
|
|
112
|
+
# Check if field is required (no default value)
|
|
113
|
+
if field.default == dataclasses.MISSING and field.default_factory == dataclasses.MISSING: # type: ignore
|
|
114
|
+
required.append(field.name)
|
|
115
|
+
|
|
116
|
+
schema = {
|
|
117
|
+
"type": "object",
|
|
118
|
+
"properties": properties,
|
|
119
|
+
"required": required,
|
|
120
|
+
"additionalProperties": False
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
return schema
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def _type_to_schema(python_type: Any) -> Dict[str, Any]:
|
|
127
|
+
"""Convert Python type hint to JSON schema type.
|
|
128
|
+
|
|
129
|
+
Args:
|
|
130
|
+
python_type: Python type annotation
|
|
131
|
+
|
|
132
|
+
Returns:
|
|
133
|
+
JSON schema type definition
|
|
134
|
+
"""
|
|
135
|
+
# Handle Optional types
|
|
136
|
+
origin = get_origin(python_type)
|
|
137
|
+
args = get_args(python_type)
|
|
138
|
+
|
|
139
|
+
# Handle Optional[X] which is Union[X, None]
|
|
140
|
+
if origin is type(None) or python_type is type(None):
|
|
141
|
+
return {"type": "null"}
|
|
142
|
+
|
|
143
|
+
# Handle Union types (including Optional)
|
|
144
|
+
if origin is Union: # type: ignore
|
|
145
|
+
# Filter out None from union types
|
|
146
|
+
non_none_types = [t for t in args if t is not type(None)]
|
|
147
|
+
if len(non_none_types) == 1:
|
|
148
|
+
# Optional[X] case
|
|
149
|
+
return _type_to_schema(non_none_types[0])
|
|
150
|
+
else:
|
|
151
|
+
# True Union - use anyOf
|
|
152
|
+
return {"anyOf": [_type_to_schema(t) for t in non_none_types]}
|
|
153
|
+
|
|
154
|
+
# Handle List types
|
|
155
|
+
if origin is list:
|
|
156
|
+
item_type = args[0] if args else Any
|
|
157
|
+
return {
|
|
158
|
+
"type": "array",
|
|
159
|
+
"items": _type_to_schema(item_type)
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
# Handle Dict types
|
|
163
|
+
if origin is dict:
|
|
164
|
+
value_type = args[1] if len(args) > 1 else Any
|
|
165
|
+
return {
|
|
166
|
+
"type": "object",
|
|
167
|
+
"additionalProperties": _type_to_schema(value_type)
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
# Handle basic types
|
|
171
|
+
if python_type == str:
|
|
172
|
+
return {"type": "string"}
|
|
173
|
+
elif python_type == int:
|
|
174
|
+
return {"type": "integer"}
|
|
175
|
+
elif python_type == float:
|
|
176
|
+
return {"type": "number"}
|
|
177
|
+
elif python_type == bool:
|
|
178
|
+
return {"type": "boolean"}
|
|
179
|
+
elif python_type == dict:
|
|
180
|
+
return {"type": "object"}
|
|
181
|
+
elif python_type == list:
|
|
182
|
+
return {"type": "array"}
|
|
183
|
+
elif python_type == Any:
|
|
184
|
+
return {} # Any type - no restrictions
|
|
185
|
+
|
|
186
|
+
# Fallback for unknown types
|
|
187
|
+
return {"type": "string", "description": f"Type: {python_type}"}
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
# Import Union for type checking
|
|
191
|
+
try:
|
|
192
|
+
from typing import Union
|
|
193
|
+
except ImportError:
|
|
194
|
+
Union = None # type: ignore
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def is_pydantic_model(type_hint: Any) -> bool:
|
|
198
|
+
"""Check if a type hint is a Pydantic model.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
type_hint: Type annotation to check
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
True if type_hint is a Pydantic BaseModel subclass
|
|
205
|
+
"""
|
|
206
|
+
if not PYDANTIC_AVAILABLE:
|
|
207
|
+
return False
|
|
208
|
+
|
|
209
|
+
try:
|
|
210
|
+
return isinstance(type_hint, type) and issubclass(type_hint, BaseModel)
|
|
211
|
+
except TypeError:
|
|
212
|
+
return False
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def extract_function_schemas(func: Callable[..., Any]) -> Tuple[Optional[Dict[str, Any]], Optional[Dict[str, Any]]]:
|
|
216
|
+
"""Extract input and output schemas from function type hints.
|
|
217
|
+
|
|
218
|
+
Supports both plain Python types and Pydantic models.
|
|
219
|
+
Pydantic models provide richer validation and schema generation.
|
|
220
|
+
|
|
221
|
+
Args:
|
|
222
|
+
func: Function to extract schemas from
|
|
223
|
+
|
|
224
|
+
Returns:
|
|
225
|
+
Tuple of (input_schema, output_schema) where each is a JSON Schema dict or None
|
|
226
|
+
"""
|
|
227
|
+
try:
|
|
228
|
+
# Get type hints
|
|
229
|
+
hints = get_type_hints(func)
|
|
230
|
+
sig = inspect.signature(func)
|
|
231
|
+
|
|
232
|
+
# Build input schema from parameters (excluding 'ctx')
|
|
233
|
+
input_properties = {}
|
|
234
|
+
required_params = []
|
|
235
|
+
|
|
236
|
+
for param_name, param in sig.parameters.items():
|
|
237
|
+
if param_name == "ctx":
|
|
238
|
+
continue
|
|
239
|
+
|
|
240
|
+
# Get type hint for this parameter
|
|
241
|
+
if param_name in hints:
|
|
242
|
+
param_type = hints[param_name]
|
|
243
|
+
|
|
244
|
+
# Check if it's a Pydantic model
|
|
245
|
+
if is_pydantic_model(param_type):
|
|
246
|
+
# Use Pydantic's schema generation
|
|
247
|
+
input_properties[param_name] = pydantic_to_json_schema(param_type)
|
|
248
|
+
else:
|
|
249
|
+
# Use basic type conversion
|
|
250
|
+
input_properties[param_name] = _type_to_schema(param_type)
|
|
251
|
+
else:
|
|
252
|
+
# No type hint, use generic object
|
|
253
|
+
input_properties[param_name] = {"type": "object"}
|
|
254
|
+
|
|
255
|
+
# Check if parameter is required (no default value)
|
|
256
|
+
if param.default is inspect.Parameter.empty:
|
|
257
|
+
required_params.append(param_name)
|
|
258
|
+
|
|
259
|
+
input_schema = None
|
|
260
|
+
if input_properties:
|
|
261
|
+
input_schema = {
|
|
262
|
+
"type": "object",
|
|
263
|
+
"properties": input_properties,
|
|
264
|
+
}
|
|
265
|
+
if required_params:
|
|
266
|
+
input_schema["required"] = required_params
|
|
267
|
+
|
|
268
|
+
# Add description from docstring if available
|
|
269
|
+
if func.__doc__:
|
|
270
|
+
docstring = inspect.cleandoc(func.__doc__)
|
|
271
|
+
first_line = docstring.split('\n')[0].strip()
|
|
272
|
+
if first_line:
|
|
273
|
+
input_schema["description"] = first_line
|
|
274
|
+
|
|
275
|
+
# Build output schema from return type hint
|
|
276
|
+
output_schema = None
|
|
277
|
+
if "return" in hints:
|
|
278
|
+
return_type = hints["return"]
|
|
279
|
+
|
|
280
|
+
# Check if return type is a Pydantic model
|
|
281
|
+
if is_pydantic_model(return_type):
|
|
282
|
+
output_schema = pydantic_to_json_schema(return_type)
|
|
283
|
+
else:
|
|
284
|
+
output_schema = _type_to_schema(return_type)
|
|
285
|
+
|
|
286
|
+
return input_schema, output_schema
|
|
287
|
+
|
|
288
|
+
except Exception:
|
|
289
|
+
# If schema extraction fails, return None schemas
|
|
290
|
+
return None, None
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
def extract_function_metadata(func: Callable[..., Any]) -> Dict[str, str]:
|
|
294
|
+
"""Extract metadata from function including description from docstring.
|
|
295
|
+
|
|
296
|
+
Args:
|
|
297
|
+
func: Function to extract metadata from
|
|
298
|
+
|
|
299
|
+
Returns:
|
|
300
|
+
Dictionary with metadata fields like 'description'
|
|
301
|
+
"""
|
|
302
|
+
metadata = {}
|
|
303
|
+
|
|
304
|
+
# Extract description from docstring
|
|
305
|
+
if func.__doc__:
|
|
306
|
+
# Get first line of docstring as description
|
|
307
|
+
docstring = inspect.cleandoc(func.__doc__)
|
|
308
|
+
first_line = docstring.split('\n')[0].strip()
|
|
309
|
+
if first_line:
|
|
310
|
+
metadata["description"] = first_line
|
|
311
|
+
|
|
312
|
+
return metadata
|