agnt5 0.3.2a1__cp310-abi3-manylinux_2_34_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of agnt5 might be problematic. Click here for more details.

agnt5/__init__.py ADDED
@@ -0,0 +1,119 @@
1
+ """
2
+ AGNT5 Python SDK - Build durable, resilient agent-first applications.
3
+
4
+ This SDK provides high-level components for building agents, tools, and workflows
5
+ with built-in durability guarantees and state management.
6
+ """
7
+
8
+ from ._compat import _import_error, _rust_available
9
+ from .agent import Agent, AgentContext, AgentRegistry, AgentResult, Handoff, agent, handoff
10
+ from .checkpoint import CheckpointClient
11
+ from .client import AsyncClient, Client, RunError
12
+ from .context import Context
13
+ from .function import FunctionContext
14
+ from .workflow import WorkflowContext
15
+ from .entity import (
16
+ Entity,
17
+ EntityRegistry,
18
+ EntityStateAdapter,
19
+ EntityType,
20
+ StateType,
21
+ create_entity_context,
22
+ query,
23
+ with_entity_context,
24
+ )
25
+ from .exceptions import (
26
+ AGNT5Error,
27
+ CheckpointError,
28
+ ConfigurationError,
29
+ ExecutionError,
30
+ RetryError,
31
+ StateError,
32
+ WaitingForUserInputException,
33
+ )
34
+ from .function import FunctionRegistry, function
35
+ from .memory import ConversationMemory, MemoryMessage, MemoryMetadata, MemoryResult, MemoryScope, SemanticMemory
36
+ from .tool import AskUserTool, RequestApprovalTool, Tool, ToolRegistry, tool
37
+ from .types import BackoffPolicy, BackoffType, FunctionConfig, RetryPolicy, WorkflowConfig
38
+ from .version import _get_version
39
+ from .worker import Worker
40
+ from .workflow import WorkflowRegistry, workflow
41
+
42
+ # Expose simplified language model API (recommended)
43
+ from . import lm
44
+
45
+ # Expose streaming events module for typed event streaming
46
+ from . import events
47
+ from .events import Event, EventType
48
+
49
+ # Expose Sentry utilities for custom error tracking (optional)
50
+ from . import _sentry as sentry
51
+
52
+ __version__ = _get_version()
53
+
54
+ __all__ = [
55
+ # Version
56
+ "__version__",
57
+ # Core components
58
+ "Context",
59
+ "FunctionContext",
60
+ "WorkflowContext",
61
+ "AgentContext",
62
+ "CheckpointClient",
63
+ "Client",
64
+ "AsyncClient",
65
+ "Worker",
66
+ "function",
67
+ "FunctionRegistry",
68
+ "Entity",
69
+ "EntityType",
70
+ "EntityRegistry",
71
+ "EntityStateAdapter",
72
+ "StateType",
73
+ "query",
74
+ "with_entity_context",
75
+ "create_entity_context",
76
+ "workflow",
77
+ "WorkflowRegistry",
78
+ "tool",
79
+ "Tool",
80
+ "ToolRegistry",
81
+ "AskUserTool",
82
+ "RequestApprovalTool",
83
+ "agent",
84
+ "Agent",
85
+ "AgentRegistry",
86
+ "AgentResult",
87
+ "Handoff",
88
+ "handoff",
89
+ # Memory
90
+ "ConversationMemory",
91
+ "MemoryMessage",
92
+ "MemoryMetadata",
93
+ "MemoryResult",
94
+ "MemoryScope",
95
+ "SemanticMemory",
96
+ # Types
97
+ "RetryPolicy",
98
+ "BackoffPolicy",
99
+ "BackoffType",
100
+ "FunctionConfig",
101
+ "WorkflowConfig",
102
+ # Exceptions
103
+ "AGNT5Error",
104
+ "ConfigurationError",
105
+ "ExecutionError",
106
+ "RetryError",
107
+ "StateError",
108
+ "CheckpointError",
109
+ "WaitingForUserInputException",
110
+ "RunError",
111
+ # Language Model (Simplified API)
112
+ "lm",
113
+ # Streaming Events (for typed SSE events)
114
+ "events",
115
+ "Event",
116
+ "EventType",
117
+ # Sentry integration (Optional)
118
+ "sentry",
119
+ ]
agnt5/_compat.py ADDED
@@ -0,0 +1,16 @@
1
+ """
2
+ Compatibility utilities for the AGNT5 Python SDK.
3
+
4
+ This module handles runtime compatibility checks and provides utilities
5
+ for cross-referencing throughout the project.
6
+ """
7
+
8
+ # Check if Rust core is available
9
+ try:
10
+ from . import _core
11
+
12
+ _rust_available = True
13
+ _import_error = None
14
+ except ImportError as e:
15
+ _rust_available = False
16
+ _import_error = e
agnt5/_core.abi3.so ADDED
Binary file
agnt5/_retry_utils.py ADDED
@@ -0,0 +1,196 @@
1
+ """Retry and backoff utilities for durable execution.
2
+
3
+ This module provides utilities for parsing retry policies, calculating backoff delays,
4
+ and executing functions with retry logic.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import asyncio
10
+ import inspect
11
+ from typing import Any, Dict, Optional, Union
12
+
13
+ from .exceptions import RetryError
14
+ from .types import BackoffPolicy, BackoffType, HandlerFunc, RetryPolicy
15
+
16
+
17
+ def parse_retry_policy(retries: Optional[Union[int, Dict[str, Any], RetryPolicy]]) -> RetryPolicy:
18
+ """Parse retry configuration from various forms.
19
+
20
+ Args:
21
+ retries: Can be:
22
+ - int: max_attempts (e.g., 5)
23
+ - dict: RetryPolicy parameters (e.g., {"max_attempts": 5, "initial_interval_ms": 1000})
24
+ - RetryPolicy: pass through
25
+ - None: use default
26
+
27
+ Returns:
28
+ RetryPolicy instance
29
+ """
30
+ if retries is None:
31
+ return RetryPolicy()
32
+ elif isinstance(retries, int):
33
+ return RetryPolicy(max_attempts=retries)
34
+ elif isinstance(retries, dict):
35
+ return RetryPolicy(**retries)
36
+ elif isinstance(retries, RetryPolicy):
37
+ return retries
38
+ else:
39
+ raise TypeError(f"retries must be int, dict, or RetryPolicy, got {type(retries)}")
40
+
41
+
42
+ def parse_backoff_policy(backoff: Optional[Union[str, Dict[str, Any], BackoffPolicy]]) -> BackoffPolicy:
43
+ """Parse backoff configuration from various forms.
44
+
45
+ Args:
46
+ backoff: Can be:
47
+ - str: backoff type ("constant", "linear", "exponential")
48
+ - dict: BackoffPolicy parameters (e.g., {"type": "exponential", "multiplier": 2.0})
49
+ - BackoffPolicy: pass through
50
+ - None: use default
51
+
52
+ Returns:
53
+ BackoffPolicy instance
54
+ """
55
+ if backoff is None:
56
+ return BackoffPolicy()
57
+ elif isinstance(backoff, str):
58
+ backoff_type = BackoffType(backoff.lower())
59
+ return BackoffPolicy(type=backoff_type)
60
+ elif isinstance(backoff, dict):
61
+ # Convert string type to enum if present
62
+ if "type" in backoff and isinstance(backoff["type"], str):
63
+ backoff = {**backoff, "type": BackoffType(backoff["type"].lower())}
64
+ return BackoffPolicy(**backoff)
65
+ elif isinstance(backoff, BackoffPolicy):
66
+ return backoff
67
+ else:
68
+ raise TypeError(f"backoff must be str, dict, or BackoffPolicy, got {type(backoff)}")
69
+
70
+
71
+ def calculate_backoff_delay(
72
+ attempt: int,
73
+ retry_policy: RetryPolicy,
74
+ backoff_policy: BackoffPolicy,
75
+ ) -> float:
76
+ """Calculate backoff delay in seconds based on attempt number.
77
+
78
+ Args:
79
+ attempt: Current attempt number (0-indexed)
80
+ retry_policy: Retry configuration
81
+ backoff_policy: Backoff configuration
82
+
83
+ Returns:
84
+ Delay in seconds
85
+ """
86
+ if backoff_policy.type == BackoffType.CONSTANT:
87
+ delay_ms = retry_policy.initial_interval_ms
88
+ elif backoff_policy.type == BackoffType.LINEAR:
89
+ delay_ms = retry_policy.initial_interval_ms * (attempt + 1)
90
+ else: # EXPONENTIAL
91
+ delay_ms = retry_policy.initial_interval_ms * (backoff_policy.multiplier**attempt)
92
+
93
+ # Cap at max_interval_ms
94
+ delay_ms = min(delay_ms, retry_policy.max_interval_ms)
95
+ return delay_ms / 1000.0 # Convert to seconds
96
+
97
+
98
+ async def execute_with_retry(
99
+ handler: HandlerFunc,
100
+ ctx: Any, # FunctionContext, but avoid circular import
101
+ retry_policy: RetryPolicy,
102
+ backoff_policy: BackoffPolicy,
103
+ needs_context: bool,
104
+ timeout_ms: Optional[int],
105
+ *args: Any,
106
+ **kwargs: Any,
107
+ ) -> Any:
108
+ """Execute handler with retry logic and optional timeout.
109
+
110
+ Args:
111
+ handler: The function handler to execute
112
+ ctx: Context for logging and attempt tracking (FunctionContext)
113
+ retry_policy: Retry configuration
114
+ backoff_policy: Backoff configuration
115
+ needs_context: Whether handler accepts ctx parameter
116
+ timeout_ms: Maximum execution time in milliseconds (None for no timeout)
117
+ *args: Arguments to pass to handler (excluding ctx if needs_context=False)
118
+ **kwargs: Keyword arguments to pass to handler
119
+
120
+ Returns:
121
+ Result of successful execution
122
+
123
+ Raises:
124
+ RetryError: If all retry attempts fail
125
+ asyncio.TimeoutError: If function execution exceeds timeout_ms
126
+ """
127
+ # Import here to avoid circular dependency
128
+ from .function import FunctionContext
129
+
130
+ last_error: Optional[Exception] = None
131
+
132
+ for attempt in range(retry_policy.max_attempts):
133
+ try:
134
+ # Create context for this attempt (FunctionContext is immutable)
135
+ # Propagate streaming context from parent for real-time SSE log delivery
136
+ attempt_ctx = FunctionContext(
137
+ run_id=ctx.run_id,
138
+ attempt=attempt,
139
+ retry_policy=retry_policy,
140
+ is_streaming=getattr(ctx, '_is_streaming', False),
141
+ tenant_id=getattr(ctx, '_tenant_id', None),
142
+ )
143
+
144
+ # Execute handler (pass context only if needed)
145
+ if needs_context:
146
+ result = handler(attempt_ctx, *args, **kwargs)
147
+ else:
148
+ result = handler(*args, **kwargs)
149
+
150
+ # Check if result is an async generator (streaming function)
151
+ # Async generators cannot be retried - return immediately for streaming consumption
152
+ if inspect.isasyncgen(result):
153
+ return result
154
+
155
+ # For coroutines, apply timeout and await
156
+ if inspect.iscoroutine(result):
157
+ if timeout_ms is not None:
158
+ timeout_seconds = timeout_ms / 1000.0
159
+ try:
160
+ result = await asyncio.wait_for(result, timeout=timeout_seconds)
161
+ except asyncio.TimeoutError:
162
+ # Re-raise with more context
163
+ raise asyncio.TimeoutError(
164
+ f"Function execution timed out after {timeout_ms}ms"
165
+ )
166
+ else:
167
+ result = await result
168
+
169
+ return result
170
+
171
+ except Exception as e:
172
+ last_error = e
173
+ ctx.logger.warning(
174
+ f"Function execution failed (attempt {attempt + 1}/{retry_policy.max_attempts}): {e}"
175
+ )
176
+
177
+ # If this was the last attempt, raise RetryError
178
+ if attempt == retry_policy.max_attempts - 1:
179
+ raise RetryError(
180
+ f"Function failed after {retry_policy.max_attempts} attempts",
181
+ attempts=retry_policy.max_attempts,
182
+ last_error=e,
183
+ )
184
+
185
+ # Calculate backoff delay
186
+ delay = calculate_backoff_delay(attempt, retry_policy, backoff_policy)
187
+ ctx.logger.info(f"Retrying in {delay:.2f} seconds...")
188
+ await asyncio.sleep(delay)
189
+
190
+ # Should never reach here, but for type safety
191
+ assert last_error is not None
192
+ raise RetryError(
193
+ f"Function failed after {retry_policy.max_attempts} attempts",
194
+ attempts=retry_policy.max_attempts,
195
+ last_error=last_error,
196
+ )
agnt5/_schema_utils.py ADDED
@@ -0,0 +1,312 @@
1
+ """Schema conversion utilities for structured output support.
2
+
3
+ This module provides utilities to convert Python dataclasses and Pydantic models
4
+ to JSON Schema format for LLM structured output generation, function signatures,
5
+ and tool definitions.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import dataclasses
11
+ import inspect
12
+ from typing import Any, Callable, Dict, Optional, Tuple, get_args, get_origin, get_type_hints
13
+
14
+ try:
15
+ from pydantic import BaseModel
16
+ PYDANTIC_AVAILABLE = True
17
+ except ImportError:
18
+ BaseModel = None # type: ignore
19
+ PYDANTIC_AVAILABLE = False
20
+
21
+
22
+ def detect_format_type(response_format: Any) -> Tuple[str, Dict[str, Any]]:
23
+ """Auto-detect format type and convert to JSON schema.
24
+
25
+ Args:
26
+ response_format: Pydantic model, dataclass, or dict
27
+
28
+ Returns:
29
+ Tuple of (format_type, json_schema)
30
+ - format_type: "pydantic", "dataclass", or "raw"
31
+ - json_schema: JSON schema dictionary
32
+
33
+ Raises:
34
+ ValueError: If format type is not supported
35
+ """
36
+ # Check for Pydantic model
37
+ if PYDANTIC_AVAILABLE and isinstance(response_format, type) and issubclass(response_format, BaseModel):
38
+ return 'pydantic', pydantic_to_json_schema(response_format)
39
+
40
+ # Check for dataclass
41
+ if dataclasses.is_dataclass(response_format):
42
+ return 'dataclass', dataclass_to_json_schema(response_format)
43
+
44
+ # Check for raw dict
45
+ if isinstance(response_format, dict):
46
+ return 'raw', response_format
47
+
48
+ raise ValueError(
49
+ f"Unsupported response_format type: {type(response_format)}. "
50
+ f"Expected Pydantic model, dataclass, or dict."
51
+ )
52
+
53
+
54
+ def pydantic_to_json_schema(model: type) -> Dict[str, Any]:
55
+ """Convert Pydantic model to JSON schema.
56
+
57
+ Supports both Pydantic v1 and v2 APIs.
58
+
59
+ Args:
60
+ model: Pydantic BaseModel class
61
+
62
+ Returns:
63
+ JSON schema dictionary
64
+ """
65
+ if not PYDANTIC_AVAILABLE:
66
+ raise ImportError("Pydantic is not installed. Install with: pip install pydantic")
67
+
68
+ if not (isinstance(model, type) and issubclass(model, BaseModel)):
69
+ raise ValueError(f"Expected Pydantic BaseModel class, got {type(model)}")
70
+
71
+ try:
72
+ # Try Pydantic v2 API first
73
+ if hasattr(model, 'model_json_schema'):
74
+ schema = model.model_json_schema()
75
+ # Fall back to Pydantic v1 API
76
+ elif hasattr(model, 'schema'):
77
+ schema = model.schema()
78
+ else:
79
+ # Fallback for edge cases
80
+ schema = {"type": "object"}
81
+ except Exception:
82
+ # If schema generation fails, return basic object schema
83
+ schema = {"type": "object"}
84
+
85
+ # Ensure we have the required fields
86
+ if "type" not in schema:
87
+ schema["type"] = "object"
88
+
89
+ return schema
90
+
91
+
92
+ def dataclass_to_json_schema(cls: type) -> Dict[str, Any]:
93
+ """Convert Python dataclass to JSON schema.
94
+
95
+ Args:
96
+ cls: Dataclass type
97
+
98
+ Returns:
99
+ JSON schema dictionary
100
+ """
101
+ if not dataclasses.is_dataclass(cls):
102
+ raise ValueError(f"Expected dataclass, got {type(cls)}")
103
+
104
+ properties: Dict[str, Any] = {}
105
+ required: list[str] = []
106
+
107
+ for field in dataclasses.fields(cls):
108
+ # Convert field type to JSON schema
109
+ field_schema = _type_to_schema(field.type)
110
+ properties[field.name] = field_schema
111
+
112
+ # Check if field is required (no default value)
113
+ if field.default == dataclasses.MISSING and field.default_factory == dataclasses.MISSING: # type: ignore
114
+ required.append(field.name)
115
+
116
+ schema = {
117
+ "type": "object",
118
+ "properties": properties,
119
+ "required": required,
120
+ "additionalProperties": False
121
+ }
122
+
123
+ return schema
124
+
125
+
126
+ def _type_to_schema(python_type: Any) -> Dict[str, Any]:
127
+ """Convert Python type hint to JSON schema type.
128
+
129
+ Args:
130
+ python_type: Python type annotation
131
+
132
+ Returns:
133
+ JSON schema type definition
134
+ """
135
+ # Handle Optional types
136
+ origin = get_origin(python_type)
137
+ args = get_args(python_type)
138
+
139
+ # Handle Optional[X] which is Union[X, None]
140
+ if origin is type(None) or python_type is type(None):
141
+ return {"type": "null"}
142
+
143
+ # Handle Union types (including Optional)
144
+ if origin is Union: # type: ignore
145
+ # Filter out None from union types
146
+ non_none_types = [t for t in args if t is not type(None)]
147
+ if len(non_none_types) == 1:
148
+ # Optional[X] case
149
+ return _type_to_schema(non_none_types[0])
150
+ else:
151
+ # True Union - use anyOf
152
+ return {"anyOf": [_type_to_schema(t) for t in non_none_types]}
153
+
154
+ # Handle List types
155
+ if origin is list:
156
+ item_type = args[0] if args else Any
157
+ return {
158
+ "type": "array",
159
+ "items": _type_to_schema(item_type)
160
+ }
161
+
162
+ # Handle Dict types
163
+ if origin is dict:
164
+ value_type = args[1] if len(args) > 1 else Any
165
+ return {
166
+ "type": "object",
167
+ "additionalProperties": _type_to_schema(value_type)
168
+ }
169
+
170
+ # Handle basic types
171
+ if python_type == str:
172
+ return {"type": "string"}
173
+ elif python_type == int:
174
+ return {"type": "integer"}
175
+ elif python_type == float:
176
+ return {"type": "number"}
177
+ elif python_type == bool:
178
+ return {"type": "boolean"}
179
+ elif python_type == dict:
180
+ return {"type": "object"}
181
+ elif python_type == list:
182
+ return {"type": "array"}
183
+ elif python_type == Any:
184
+ return {} # Any type - no restrictions
185
+
186
+ # Fallback for unknown types
187
+ return {"type": "string", "description": f"Type: {python_type}"}
188
+
189
+
190
+ # Import Union for type checking
191
+ try:
192
+ from typing import Union
193
+ except ImportError:
194
+ Union = None # type: ignore
195
+
196
+
197
+ def is_pydantic_model(type_hint: Any) -> bool:
198
+ """Check if a type hint is a Pydantic model.
199
+
200
+ Args:
201
+ type_hint: Type annotation to check
202
+
203
+ Returns:
204
+ True if type_hint is a Pydantic BaseModel subclass
205
+ """
206
+ if not PYDANTIC_AVAILABLE:
207
+ return False
208
+
209
+ try:
210
+ return isinstance(type_hint, type) and issubclass(type_hint, BaseModel)
211
+ except TypeError:
212
+ return False
213
+
214
+
215
+ def extract_function_schemas(func: Callable[..., Any]) -> Tuple[Optional[Dict[str, Any]], Optional[Dict[str, Any]]]:
216
+ """Extract input and output schemas from function type hints.
217
+
218
+ Supports both plain Python types and Pydantic models.
219
+ Pydantic models provide richer validation and schema generation.
220
+
221
+ Args:
222
+ func: Function to extract schemas from
223
+
224
+ Returns:
225
+ Tuple of (input_schema, output_schema) where each is a JSON Schema dict or None
226
+ """
227
+ try:
228
+ # Get type hints
229
+ hints = get_type_hints(func)
230
+ sig = inspect.signature(func)
231
+
232
+ # Build input schema from parameters (excluding 'ctx')
233
+ input_properties = {}
234
+ required_params = []
235
+
236
+ for param_name, param in sig.parameters.items():
237
+ if param_name == "ctx":
238
+ continue
239
+
240
+ # Get type hint for this parameter
241
+ if param_name in hints:
242
+ param_type = hints[param_name]
243
+
244
+ # Check if it's a Pydantic model
245
+ if is_pydantic_model(param_type):
246
+ # Use Pydantic's schema generation
247
+ input_properties[param_name] = pydantic_to_json_schema(param_type)
248
+ else:
249
+ # Use basic type conversion
250
+ input_properties[param_name] = _type_to_schema(param_type)
251
+ else:
252
+ # No type hint, use generic object
253
+ input_properties[param_name] = {"type": "object"}
254
+
255
+ # Check if parameter is required (no default value)
256
+ if param.default is inspect.Parameter.empty:
257
+ required_params.append(param_name)
258
+
259
+ input_schema = None
260
+ if input_properties:
261
+ input_schema = {
262
+ "type": "object",
263
+ "properties": input_properties,
264
+ }
265
+ if required_params:
266
+ input_schema["required"] = required_params
267
+
268
+ # Add description from docstring if available
269
+ if func.__doc__:
270
+ docstring = inspect.cleandoc(func.__doc__)
271
+ first_line = docstring.split('\n')[0].strip()
272
+ if first_line:
273
+ input_schema["description"] = first_line
274
+
275
+ # Build output schema from return type hint
276
+ output_schema = None
277
+ if "return" in hints:
278
+ return_type = hints["return"]
279
+
280
+ # Check if return type is a Pydantic model
281
+ if is_pydantic_model(return_type):
282
+ output_schema = pydantic_to_json_schema(return_type)
283
+ else:
284
+ output_schema = _type_to_schema(return_type)
285
+
286
+ return input_schema, output_schema
287
+
288
+ except Exception:
289
+ # If schema extraction fails, return None schemas
290
+ return None, None
291
+
292
+
293
+ def extract_function_metadata(func: Callable[..., Any]) -> Dict[str, str]:
294
+ """Extract metadata from function including description from docstring.
295
+
296
+ Args:
297
+ func: Function to extract metadata from
298
+
299
+ Returns:
300
+ Dictionary with metadata fields like 'description'
301
+ """
302
+ metadata = {}
303
+
304
+ # Extract description from docstring
305
+ if func.__doc__:
306
+ # Get first line of docstring as description
307
+ docstring = inspect.cleandoc(func.__doc__)
308
+ first_line = docstring.split('\n')[0].strip()
309
+ if first_line:
310
+ metadata["description"] = first_line
311
+
312
+ return metadata