agnt5 0.3.2a1__cp310-abi3-manylinux_2_34_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of agnt5 might be problematic. Click here for more details.

@@ -0,0 +1,112 @@
1
+ """Agent decorator for registering agents."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import functools
6
+ from typing import Any, Callable, List, Optional
7
+
8
+ from ..lm import LanguageModel
9
+
10
+ from .core import Agent
11
+ from .registry import AgentRegistry
12
+
13
+
14
+ def agent(
15
+ _func: Optional[Callable] = None,
16
+ *,
17
+ name: Optional[str] = None,
18
+ model: Optional[LanguageModel] = None,
19
+ instructions: Optional[str] = None,
20
+ tools: Optional[List[Any]] = None,
21
+ model_name: str = "gpt-4o-mini",
22
+ temperature: float = 0.7,
23
+ max_iterations: int = 10,
24
+ ) -> Callable:
25
+ """
26
+ Decorator to register a function as an agent and automatically register it.
27
+
28
+ This decorator allows you to define agents as functions that create and return Agent instances.
29
+ The agent will be automatically registered in the AgentRegistry for discovery by the worker.
30
+
31
+ Args:
32
+ name: Agent name (defaults to function name)
33
+ model: Language model instance (required if not provided in function)
34
+ instructions: System instructions (required if not provided in function)
35
+ tools: List of tools available to the agent
36
+ model_name: Model name to use
37
+ temperature: LLM temperature
38
+ max_iterations: Maximum reasoning iterations
39
+
40
+ Returns:
41
+ Decorated function that returns an Agent instance
42
+
43
+ Example:
44
+ ```python
45
+ from agnt5 import agent, tool
46
+ from agnt5.lm import OpenAILanguageModel
47
+
48
+ @agent(
49
+ name="research_agent",
50
+ model=OpenAILanguageModel(),
51
+ instructions="You are a research assistant.",
52
+ tools=[search_web, analyze_data]
53
+ )
54
+ def create_researcher():
55
+ # Agent is created and registered automatically
56
+ pass
57
+
58
+ # Or create agent directly
59
+ @agent
60
+ def my_agent():
61
+ from agnt5.lm import OpenAILanguageModel
62
+ return Agent(
63
+ name="my_agent",
64
+ model=OpenAILanguageModel(),
65
+ instructions="You are a helpful assistant."
66
+ )
67
+ ```
68
+ """
69
+
70
+ def decorator(func: Callable) -> Callable:
71
+ # Determine agent name
72
+ agent_name = name or func.__name__
73
+
74
+ # Create the agent
75
+ @functools.wraps(func)
76
+ def wrapper(*args, **kwargs) -> Agent:
77
+ # Check if function returns an Agent
78
+ result = func(*args, **kwargs)
79
+ if isinstance(result, Agent):
80
+ # Function creates its own agent
81
+ agent_instance = result
82
+ elif model is not None and instructions is not None:
83
+ # Create agent from decorator parameters
84
+ agent_instance = Agent(
85
+ name=agent_name,
86
+ model=model,
87
+ instructions=instructions,
88
+ tools=tools,
89
+ model_name=model_name,
90
+ temperature=temperature,
91
+ max_iterations=max_iterations,
92
+ )
93
+ else:
94
+ raise ValueError(
95
+ f"Agent decorator for '{agent_name}' requires either "
96
+ "the decorated function to return an Agent instance, "
97
+ "or 'model' and 'instructions' parameters to be provided"
98
+ )
99
+
100
+ # Register agent
101
+ AgentRegistry.register(agent_instance)
102
+ return agent_instance
103
+
104
+ # Create agent immediately and store reference
105
+ agent_instance = wrapper()
106
+
107
+ # Return the agent instance itself (so it can be used directly)
108
+ return agent_instance
109
+
110
+ if _func is None:
111
+ return decorator
112
+ return decorator(_func)
agnt5/agent/handoff.py ADDED
@@ -0,0 +1,105 @@
1
+ """Agent handoff support for multi-agent systems.
2
+
3
+ Handoffs enable one agent to delegate control to another specialized agent,
4
+ following the pattern popularized by LangGraph and OpenAI Agents SDK.
5
+ """
6
+
7
+ from typing import Optional, TYPE_CHECKING
8
+
9
+ if TYPE_CHECKING:
10
+ from .core import Agent
11
+
12
+
13
+ class Handoff:
14
+ """Configuration for agent-to-agent handoff.
15
+
16
+ The handoff is exposed to the LLM as a tool named 'transfer_to_{agent_name}'
17
+ that allows explicit delegation with conversation history.
18
+
19
+ Example:
20
+ ```python
21
+ specialist = Agent(name="specialist", ...)
22
+
23
+ # Simple: Pass agent directly (auto-wrapped with defaults)
24
+ coordinator = Agent(
25
+ name="coordinator",
26
+ handoffs=[specialist] # Agent auto-converted to Handoff
27
+ )
28
+
29
+ # Advanced: Use Handoff for custom configuration
30
+ coordinator = Agent(
31
+ name="coordinator",
32
+ handoffs=[
33
+ Handoff(
34
+ agent=specialist,
35
+ description="Custom description for LLM",
36
+ tool_name="custom_transfer_name",
37
+ pass_full_history=False
38
+ )
39
+ ]
40
+ )
41
+ ```
42
+ """
43
+
44
+ def __init__(
45
+ self,
46
+ agent: "Agent",
47
+ description: Optional[str] = None,
48
+ tool_name: Optional[str] = None,
49
+ pass_full_history: bool = True,
50
+ ):
51
+ """Initialize handoff configuration.
52
+
53
+ Args:
54
+ agent: Target agent to hand off to
55
+ description: Description shown to LLM (defaults to agent instructions)
56
+ tool_name: Custom tool name (defaults to 'transfer_to_{agent_name}')
57
+ pass_full_history: Whether to pass full conversation history to target agent
58
+ """
59
+ self.agent = agent
60
+ self.description = description or agent.instructions or f"Transfer to {agent.name}"
61
+ self.tool_name = tool_name or f"transfer_to_{agent.name}"
62
+ self.pass_full_history = pass_full_history
63
+
64
+
65
+ def handoff(
66
+ agent: "Agent",
67
+ description: Optional[str] = None,
68
+ tool_name: Optional[str] = None,
69
+ pass_full_history: bool = True,
70
+ ) -> Handoff:
71
+ """Create a handoff configuration for agent-to-agent delegation.
72
+
73
+ This is a convenience function for creating Handoff instances with a clean API.
74
+
75
+ Args:
76
+ agent: Target agent to hand off to
77
+ description: Description shown to LLM
78
+ tool_name: Custom tool name
79
+ pass_full_history: Whether to pass full conversation history
80
+
81
+ Returns:
82
+ Handoff configuration
83
+
84
+ Example:
85
+ ```python
86
+ from agnt5 import Agent, handoff
87
+
88
+ research_agent = Agent(name="researcher", ...)
89
+ writer_agent = Agent(name="writer", ...)
90
+
91
+ coordinator = Agent(
92
+ name="coordinator",
93
+ handoffs=[
94
+ handoff(research_agent, "Transfer for research tasks"),
95
+ handoff(writer_agent, "Transfer for writing tasks"),
96
+ ]
97
+ )
98
+ ```
99
+ """
100
+ return Handoff(
101
+ agent=agent,
102
+ description=description,
103
+ tool_name=tool_name,
104
+ pass_full_history=pass_full_history,
105
+ )
@@ -0,0 +1,68 @@
1
+ """Global agent registry for lookups."""
2
+
3
+ import logging
4
+ from typing import Dict, Optional, TYPE_CHECKING
5
+
6
+ if TYPE_CHECKING:
7
+ from .core import Agent
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+ # Global registry for agents
12
+ _AGENT_REGISTRY: Dict[str, "Agent"] = {}
13
+
14
+
15
+ class AgentRegistry:
16
+ """Registry for looking up agents by name.
17
+
18
+ This provides a global registry where agents can be registered
19
+ and looked up by name. Useful for multi-agent systems where
20
+ agents need to discover each other.
21
+
22
+ Example:
23
+ ```python
24
+ # Register agent
25
+ agent = Agent(name="researcher", ...)
26
+ AgentRegistry.register(agent)
27
+
28
+ # Look up agent
29
+ found = AgentRegistry.get("researcher")
30
+ ```
31
+ """
32
+
33
+ @staticmethod
34
+ def register(agent: "Agent") -> None:
35
+ """Register an agent.
36
+
37
+ Args:
38
+ agent: Agent to register
39
+ """
40
+ if agent.name in _AGENT_REGISTRY:
41
+ logger.warning(f"Overwriting existing agent '{agent.name}'")
42
+ _AGENT_REGISTRY[agent.name] = agent
43
+
44
+ @staticmethod
45
+ def get(name: str) -> Optional["Agent"]:
46
+ """Get agent by name.
47
+
48
+ Args:
49
+ name: Name of agent to look up
50
+
51
+ Returns:
52
+ Agent if found, None otherwise
53
+ """
54
+ return _AGENT_REGISTRY.get(name)
55
+
56
+ @staticmethod
57
+ def all() -> Dict[str, "Agent"]:
58
+ """Get all registered agents.
59
+
60
+ Returns:
61
+ Copy of the agent registry
62
+ """
63
+ return _AGENT_REGISTRY.copy()
64
+
65
+ @staticmethod
66
+ def clear() -> None:
67
+ """Clear all registered agents."""
68
+ _AGENT_REGISTRY.clear()
agnt5/agent/result.py ADDED
@@ -0,0 +1,39 @@
1
+ """Agent execution result."""
2
+
3
+ from typing import Any, Dict, List, Optional, TYPE_CHECKING
4
+
5
+ if TYPE_CHECKING:
6
+ from ..context import Context
7
+
8
+
9
+ class AgentResult:
10
+ """Result from agent execution.
11
+
12
+ Attributes:
13
+ output: The final text output from the agent
14
+ tool_calls: List of tool calls made during execution
15
+ context: The execution context
16
+ handoff_to: Name of agent that was handed off to (if any)
17
+ handoff_metadata: Additional metadata from handoff
18
+ """
19
+
20
+ def __init__(
21
+ self,
22
+ output: str,
23
+ tool_calls: List[Dict[str, Any]],
24
+ context: "Context",
25
+ handoff_to: Optional[str] = None,
26
+ handoff_metadata: Optional[Dict[str, Any]] = None,
27
+ ):
28
+ self.output = output
29
+ self.tool_calls = tool_calls
30
+ self.context = context
31
+ self.handoff_to = handoff_to
32
+ self.handoff_metadata = handoff_metadata or {}
33
+
34
+ def __repr__(self) -> str:
35
+ return (
36
+ f"AgentResult(output={self.output[:50]!r}..., "
37
+ f"tool_calls={len(self.tool_calls)}, "
38
+ f"handoff_to={self.handoff_to})"
39
+ )
agnt5/checkpoint.py ADDED
@@ -0,0 +1,246 @@
1
+ """Step-level checkpoint client for durable workflow execution.
2
+
3
+ This module provides the CheckpointClient class for step-level memoization,
4
+ enabling workflows to skip re-execution of completed steps after crashes.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import json
10
+ import logging
11
+ from dataclasses import dataclass
12
+ from typing import Any, Optional
13
+
14
+ from ._telemetry import setup_module_logger
15
+
16
+ logger = setup_module_logger(__name__)
17
+
18
+
19
+ @dataclass
20
+ class CheckpointResult:
21
+ """Result of a checkpoint operation.
22
+
23
+ Attributes:
24
+ success: Whether the checkpoint was processed successfully
25
+ error_message: Error message if the checkpoint failed
26
+ memoized: Whether the step was already memoized (for step_started)
27
+ cached_output: Cached output if memoized (as bytes)
28
+ """
29
+
30
+ success: bool
31
+ error_message: Optional[str] = None
32
+ memoized: bool = False
33
+ cached_output: Optional[bytes] = None
34
+
35
+
36
+ class CheckpointClient:
37
+ """Client for step-level checkpoint and memoization.
38
+
39
+ This client provides synchronous checkpoint calls that enable
40
+ durable step execution with platform-side memoization.
41
+
42
+ The typical flow for a durable step:
43
+ 1. Call step_started() before execution
44
+ 2. If memoized, return the cached result
45
+ 3. Otherwise, execute the step
46
+ 4. Call step_completed() with the result
47
+
48
+ Example:
49
+ ```python
50
+ async with CheckpointClient() as client:
51
+ result = await client.step_started(run_id, "step:fetch_data:0", "fetch_data", "function")
52
+ if result.memoized:
53
+ return json.loads(result.cached_output)
54
+
55
+ # Execute the step
56
+ data = await fetch_data()
57
+
58
+ # Record completion
59
+ await client.step_completed(run_id, "step:fetch_data:0", "fetch_data", "function", json.dumps(data).encode())
60
+ return data
61
+ ```
62
+ """
63
+
64
+ def __init__(self, endpoint: Optional[str] = None):
65
+ """Initialize checkpoint client.
66
+
67
+ Args:
68
+ endpoint: Worker Coordinator endpoint URL.
69
+ Defaults to AGNT5_COORDINATOR_ENDPOINT env var or http://localhost:34186
70
+ """
71
+ try:
72
+ from ._core import PyCheckpointClient
73
+
74
+ self._client = PyCheckpointClient(endpoint)
75
+ self._connected = False
76
+ except ImportError as e:
77
+ logger.warning(f"Checkpoint client not available (Rust core not loaded): {e}")
78
+ self._client = None
79
+ self._connected = False
80
+
81
+ async def connect(self) -> None:
82
+ """Connect to the Worker Coordinator.
83
+
84
+ Must be called before making checkpoint calls.
85
+ """
86
+ if self._client is None:
87
+ raise RuntimeError("Checkpoint client not available (Rust core not loaded)")
88
+
89
+ await self._client.connect()
90
+ self._connected = True
91
+ logger.debug("Checkpoint client connected to coordinator")
92
+
93
+ async def __aenter__(self) -> "CheckpointClient":
94
+ """Async context manager entry - connects to coordinator."""
95
+ await self.connect()
96
+ return self
97
+
98
+ async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
99
+ """Async context manager exit."""
100
+ # No explicit disconnect needed - connection is cleaned up on drop
101
+ pass
102
+
103
+ async def step_started(
104
+ self,
105
+ run_id: str,
106
+ step_key: str,
107
+ step_name: str,
108
+ step_type: str = "function",
109
+ input_payload: Optional[bytes] = None,
110
+ ) -> CheckpointResult:
111
+ """Send a step started checkpoint and check for memoized result.
112
+
113
+ Call this before executing a step. If the step is memoized,
114
+ the result will contain the cached output.
115
+
116
+ Args:
117
+ run_id: The workflow run ID
118
+ step_key: Unique key for this step (e.g., "step:greet:0")
119
+ step_name: Human-readable step name
120
+ step_type: Type of step (e.g., "function", "activity", "llm_call")
121
+ input_payload: Input data for the step (optional, for logging)
122
+
123
+ Returns:
124
+ CheckpointResult with memoized=True and cached_output if step was memoized
125
+ """
126
+ if self._client is None:
127
+ return CheckpointResult(success=False, error_message="Client not available")
128
+
129
+ if not self._connected:
130
+ await self.connect()
131
+
132
+ result = await self._client.step_started(
133
+ run_id, step_key, step_name, step_type, input_payload
134
+ )
135
+
136
+ return CheckpointResult(
137
+ success=result.success,
138
+ error_message=result.error_message,
139
+ memoized=result.memoized,
140
+ cached_output=result.cached_output,
141
+ )
142
+
143
+ async def step_completed(
144
+ self,
145
+ run_id: str,
146
+ step_key: str,
147
+ step_name: str,
148
+ step_type: str,
149
+ output_payload: bytes,
150
+ latency_ms: Optional[int] = None,
151
+ ) -> CheckpointResult:
152
+ """Send a step completed checkpoint.
153
+
154
+ Call this after successfully executing a step to record the result
155
+ for future memoization.
156
+
157
+ Args:
158
+ run_id: The workflow run ID
159
+ step_key: Unique key for this step
160
+ step_name: Human-readable step name
161
+ step_type: Type of step
162
+ output_payload: Output data from the step
163
+ latency_ms: Step execution latency in milliseconds
164
+
165
+ Returns:
166
+ CheckpointResult indicating success or failure
167
+ """
168
+ if self._client is None:
169
+ return CheckpointResult(success=False, error_message="Client not available")
170
+
171
+ if not self._connected:
172
+ await self.connect()
173
+
174
+ result = await self._client.step_completed(
175
+ run_id, step_key, step_name, step_type, output_payload, latency_ms
176
+ )
177
+
178
+ return CheckpointResult(
179
+ success=result.success,
180
+ error_message=result.error_message,
181
+ memoized=result.memoized,
182
+ cached_output=result.cached_output,
183
+ )
184
+
185
+ async def step_failed(
186
+ self,
187
+ run_id: str,
188
+ step_key: str,
189
+ step_name: str,
190
+ step_type: str,
191
+ error_message: str,
192
+ error_type: str,
193
+ ) -> CheckpointResult:
194
+ """Send a step failed checkpoint.
195
+
196
+ Call this when a step fails to record the error.
197
+
198
+ Args:
199
+ run_id: The workflow run ID
200
+ step_key: Unique key for this step
201
+ step_name: Human-readable step name
202
+ step_type: Type of step
203
+ error_message: Error message
204
+ error_type: Error type/class name
205
+
206
+ Returns:
207
+ CheckpointResult indicating success or failure
208
+ """
209
+ if self._client is None:
210
+ return CheckpointResult(success=False, error_message="Client not available")
211
+
212
+ if not self._connected:
213
+ await self.connect()
214
+
215
+ result = await self._client.step_failed(
216
+ run_id, step_key, step_name, step_type, error_message, error_type
217
+ )
218
+
219
+ return CheckpointResult(
220
+ success=result.success,
221
+ error_message=result.error_message,
222
+ memoized=result.memoized,
223
+ cached_output=result.cached_output,
224
+ )
225
+
226
+ async def get_memoized_step(
227
+ self, run_id: str, step_key: str
228
+ ) -> Optional[bytes]:
229
+ """Check if a step result is memoized.
230
+
231
+ Use this for quick memoization lookups before executing expensive steps.
232
+
233
+ Args:
234
+ run_id: The workflow run ID
235
+ step_key: Unique key for this step
236
+
237
+ Returns:
238
+ The cached output bytes if memoized, None otherwise
239
+ """
240
+ if self._client is None:
241
+ return None
242
+
243
+ if not self._connected:
244
+ await self.connect()
245
+
246
+ return await self._client.get_memoized_step(run_id, step_key)