edda-framework 0.10.0__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,163 @@
1
+ """
2
+ Durable LLM call decorator for Edda + Mirascope V2 integration.
3
+
4
+ This module provides the @durable_call decorator that combines
5
+ Mirascope's @llm.call with Edda's @activity for durable LLM calls.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import functools
11
+ import inspect
12
+ from collections.abc import Callable
13
+ from typing import Any, TypeVar
14
+
15
+ from edda.activity import activity
16
+ from edda.context import WorkflowContext
17
+
18
+ from .types import DurableResponse
19
+
20
+ F = TypeVar("F", bound=Callable[..., Any])
21
+
22
+
23
+ def _import_mirascope() -> Any:
24
+ """
25
+ Lazy import Mirascope components.
26
+
27
+ Raises:
28
+ ImportError: If mirascope is not installed.
29
+ """
30
+ try:
31
+ from mirascope import llm
32
+
33
+ return llm
34
+ except ImportError as e:
35
+ raise ImportError(
36
+ "Mirascope not installed. Install with: pip install 'mirascope[anthropic]' "
37
+ "or pip install 'edda-framework[mirascope]'"
38
+ ) from e
39
+
40
+
41
+ def durable_call(
42
+ model: str,
43
+ *,
44
+ tools: list[Any] | None = None,
45
+ response_model: type | None = None,
46
+ json_mode: bool = False,
47
+ **call_params: Any,
48
+ ) -> Callable[[F], F]:
49
+ """
50
+ Decorator that makes an LLM call durable through Edda's activity system.
51
+
52
+ This decorator combines Mirascope V2's @llm.call with Edda's @activity,
53
+ providing automatic caching, retry, and crash recovery for LLM calls.
54
+
55
+ Args:
56
+ model: Model identifier in "provider/model" format
57
+ (e.g., "anthropic/claude-sonnet-4-20250514", "openai/gpt-4").
58
+ tools: Optional list of tool functions for function calling.
59
+ response_model: Optional Pydantic model for structured output.
60
+ json_mode: Whether to enable JSON mode.
61
+ **call_params: Additional parameters passed to the LLM provider.
62
+
63
+ Returns:
64
+ A decorator that transforms the function into a durable LLM call.
65
+
66
+ Example:
67
+ Basic usage::
68
+
69
+ @durable_call("anthropic/claude-sonnet-4-20250514")
70
+ async def summarize(text: str) -> str:
71
+ return f"Summarize this text: {text}"
72
+
73
+ @workflow
74
+ async def my_workflow(ctx: WorkflowContext, text: str) -> str:
75
+ response = await summarize(ctx, text)
76
+ return response["content"]
77
+
78
+ With tools::
79
+
80
+ def get_weather(city: str) -> str:
81
+ '''Get the weather for a city.'''
82
+ return f"Sunny in {city}"
83
+
84
+ @durable_call(
85
+ "anthropic/claude-sonnet-4-20250514",
86
+ tools=[get_weather],
87
+ )
88
+ async def weather_assistant(query: str) -> str:
89
+ return query
90
+
91
+ With structured output::
92
+
93
+ class BookInfo(BaseModel):
94
+ title: str
95
+ author: str
96
+ year: int
97
+
98
+ @durable_call(
99
+ "anthropic/claude-sonnet-4-20250514",
100
+ response_model=BookInfo,
101
+ )
102
+ async def extract_book_info(text: str) -> str:
103
+ return f"Extract book information from: {text}"
104
+
105
+ Note:
106
+ - The decorated function must return a string (the prompt).
107
+ - When called, the first argument must be the WorkflowContext.
108
+ - The response is returned as a dictionary (DurableResponse.to_dict()).
109
+ """
110
+ llm = _import_mirascope()
111
+
112
+ # Extract provider from model string (e.g., "anthropic/claude-..." -> "anthropic")
113
+ provider = model.split("/")[0] if "/" in model else "unknown"
114
+
115
+ def decorator(func: F) -> F:
116
+ # Apply Mirascope V2's @llm.call decorator with unified model string
117
+ mirascope_decorated = llm.call(
118
+ model,
119
+ tools=tools,
120
+ response_model=response_model,
121
+ json_mode=json_mode,
122
+ **call_params,
123
+ )(func)
124
+
125
+ # Determine if the original function is async
126
+ is_async = inspect.iscoroutinefunction(func)
127
+
128
+ @activity
129
+ @functools.wraps(func)
130
+ async def async_wrapper(
131
+ ctx: WorkflowContext, # noqa: ARG001 - Required by @activity decorator
132
+ *args: Any,
133
+ **kwargs: Any,
134
+ ) -> dict[str, Any]:
135
+ # Call the Mirascope-decorated function
136
+ if is_async or inspect.iscoroutinefunction(mirascope_decorated):
137
+ response = await mirascope_decorated(*args, **kwargs)
138
+ else:
139
+ response = mirascope_decorated(*args, **kwargs)
140
+
141
+ # Handle structured output (response_model)
142
+ # For structured output, the response is the Pydantic model itself
143
+ if response_model is not None and hasattr(response, "model_dump"):
144
+ return {
145
+ "content": "",
146
+ "model": model,
147
+ "provider": provider,
148
+ "structured_output": response.model_dump(),
149
+ }
150
+
151
+ # Convert to serializable format
152
+ return DurableResponse.from_mirascope(response, provider).to_dict()
153
+
154
+ # Store metadata for introspection
155
+ async_wrapper._mirascope_func = mirascope_decorated # type: ignore[union-attr]
156
+ async_wrapper._provider = provider # type: ignore[union-attr]
157
+ async_wrapper._model = model # type: ignore[union-attr]
158
+ async_wrapper._tools = tools # type: ignore[union-attr]
159
+ async_wrapper._response_model = response_model # type: ignore[union-attr]
160
+
161
+ return async_wrapper # type: ignore[return-value]
162
+
163
+ return decorator
@@ -0,0 +1,268 @@
1
+ """
2
+ Type definitions for Edda + Mirascope integration.
3
+
4
+ This module provides serializable response types that bridge
5
+ Mirascope's response objects with Edda's activity system.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from dataclasses import dataclass, field
11
+ from typing import Any
12
+
13
+
14
+ @dataclass
15
+ class DurableResponse:
16
+ """
17
+ Serializable representation of a Mirascope LLM response.
18
+
19
+ This class captures the essential parts of an LLM response
20
+ in a JSON-serializable format for Edda's activity caching.
21
+
22
+ Attributes:
23
+ content: The text content of the response.
24
+ model: The model identifier used for the call.
25
+ provider: The provider name (e.g., "anthropic", "openai").
26
+ usage: Token usage statistics (input, output, total).
27
+ tool_calls: List of tool calls requested by the model.
28
+ stop_reason: The reason the model stopped generating.
29
+ raw: Raw response data for debugging/advanced use.
30
+ """
31
+
32
+ content: str
33
+ model: str
34
+ provider: str
35
+ usage: dict[str, int] | None = None
36
+ tool_calls: list[dict[str, Any]] | None = None
37
+ stop_reason: str | None = None
38
+ raw: dict[str, Any] = field(default_factory=dict)
39
+
40
+ def to_dict(self) -> dict[str, Any]:
41
+ """Convert to JSON-serializable dictionary."""
42
+ return {
43
+ "content": self.content,
44
+ "model": self.model,
45
+ "provider": self.provider,
46
+ "usage": self.usage,
47
+ "tool_calls": self.tool_calls,
48
+ "stop_reason": self.stop_reason,
49
+ "raw": self.raw,
50
+ }
51
+
52
+ @classmethod
53
+ def from_dict(cls, data: dict[str, Any]) -> DurableResponse:
54
+ """Create from dictionary (for replay)."""
55
+ return cls(
56
+ content=data.get("content", ""),
57
+ model=data.get("model", ""),
58
+ provider=data.get("provider", ""),
59
+ usage=data.get("usage"),
60
+ tool_calls=data.get("tool_calls"),
61
+ stop_reason=data.get("stop_reason"),
62
+ raw=data.get("raw", {}),
63
+ )
64
+
65
+ @classmethod
66
+ def _extract_content(cls, response: Any) -> str:
67
+ """
68
+ Extract text content from a Mirascope response.
69
+
70
+ Handles Mirascope V2's response format where content can be:
71
+ - A plain string
72
+ - A list of Text/ContentBlock objects with .text attribute
73
+ - None
74
+
75
+ Args:
76
+ response: The Mirascope CallResponse object.
77
+
78
+ Returns:
79
+ The extracted text content as a string.
80
+ """
81
+ if not hasattr(response, "content"):
82
+ return str(response)
83
+
84
+ content = response.content
85
+ if content is None:
86
+ return ""
87
+ if isinstance(content, str):
88
+ return content
89
+
90
+ # Handle Mirascope V2's list of Text/ContentBlock objects
91
+ # e.g., [Text(type='text', text='Hello!')]
92
+ if isinstance(content, list):
93
+ text_parts = []
94
+ for item in content:
95
+ if hasattr(item, "text"):
96
+ text_parts.append(item.text)
97
+ elif isinstance(item, str):
98
+ text_parts.append(item)
99
+ else:
100
+ text_parts.append(str(item))
101
+ return "".join(text_parts)
102
+
103
+ return str(content)
104
+
105
+ @classmethod
106
+ def _extract_model(cls, response: Any) -> str:
107
+ """
108
+ Extract model string from a Mirascope response.
109
+
110
+ Handles Mirascope V2 where response.model is a Model object,
111
+ not a string. Use model_id for the string version.
112
+
113
+ Args:
114
+ response: The Mirascope CallResponse object.
115
+
116
+ Returns:
117
+ The model identifier as a string.
118
+ """
119
+ # Mirascope V2: use model_id (string) instead of model (Model object)
120
+ if hasattr(response, "model_id"):
121
+ return str(response.model_id)
122
+
123
+ # Fallback: try model attribute
124
+ model = getattr(response, "model", "")
125
+ if isinstance(model, str):
126
+ return model
127
+
128
+ # If model is an object, try to get a string representation
129
+ return str(model) if model else ""
130
+
131
+ @classmethod
132
+ def _extract_usage(cls, response: Any) -> dict[str, Any] | None:
133
+ """
134
+ Extract usage statistics from a Mirascope response.
135
+
136
+ Handles Mirascope V2 where usage may be in response.raw.usage
137
+ instead of response.usage.
138
+
139
+ Args:
140
+ response: The Mirascope CallResponse object.
141
+
142
+ Returns:
143
+ Usage statistics as a dict, or None if not available.
144
+ """
145
+ usage = None
146
+
147
+ # Try direct usage attribute first
148
+ if hasattr(response, "usage") and response.usage is not None:
149
+ if hasattr(response.usage, "model_dump"):
150
+ usage = response.usage.model_dump()
151
+ elif isinstance(response.usage, dict):
152
+ usage = response.usage
153
+
154
+ # Mirascope V2: try response.raw.usage
155
+ if usage is None and hasattr(response, "raw") and response.raw is not None:
156
+ raw = response.raw
157
+ if hasattr(raw, "usage") and raw.usage is not None:
158
+ if hasattr(raw.usage, "model_dump"):
159
+ usage = raw.usage.model_dump()
160
+ elif isinstance(raw.usage, dict):
161
+ usage = raw.usage
162
+
163
+ return usage
164
+
165
+ @classmethod
166
+ def _extract_stop_reason(cls, response: Any) -> str | None:
167
+ """
168
+ Extract stop reason from a Mirascope response.
169
+
170
+ Handles various attribute names across different providers
171
+ and Mirascope versions.
172
+
173
+ Args:
174
+ response: The Mirascope CallResponse object.
175
+
176
+ Returns:
177
+ The stop reason as a string, or None if not available.
178
+ """
179
+ # Try common attribute names
180
+ stop_reason = getattr(response, "stop_reason", None)
181
+ if stop_reason is None:
182
+ stop_reason = getattr(response, "finish_reason", None)
183
+
184
+ # Mirascope V2: try response.raw.stop_reason
185
+ if stop_reason is None and hasattr(response, "raw") and response.raw is not None:
186
+ stop_reason = getattr(response.raw, "stop_reason", None)
187
+ if stop_reason is None:
188
+ stop_reason = getattr(response.raw, "finish_reason", None)
189
+
190
+ return stop_reason
191
+
192
+ @classmethod
193
+ def _parse_tool_args(cls, args: Any) -> dict[str, Any]:
194
+ """
195
+ Parse tool arguments from various formats.
196
+
197
+ Mirascope V2 returns args as a JSON string (e.g., '{"city": "Tokyo"}'),
198
+ while we need a dict for execution.
199
+
200
+ Args:
201
+ args: Tool arguments (string, dict, or None).
202
+
203
+ Returns:
204
+ Parsed arguments as a dict.
205
+ """
206
+ import json
207
+
208
+ if args is None:
209
+ return {}
210
+ if isinstance(args, dict):
211
+ return args
212
+ if isinstance(args, str):
213
+ try:
214
+ parsed = json.loads(args)
215
+ return parsed if isinstance(parsed, dict) else {}
216
+ except json.JSONDecodeError:
217
+ return {}
218
+ return {}
219
+
220
+ @classmethod
221
+ def from_mirascope(cls, response: Any, provider: str) -> DurableResponse:
222
+ """
223
+ Convert a Mirascope response to DurableResponse.
224
+
225
+ Args:
226
+ response: The Mirascope CallResponse object.
227
+ provider: The provider name (e.g., "anthropic").
228
+
229
+ Returns:
230
+ A DurableResponse instance with serializable data.
231
+ """
232
+ # Extract tool calls if available
233
+ tool_calls = None
234
+ if hasattr(response, "tool_calls") and response.tool_calls:
235
+ tool_calls = []
236
+ for tc in response.tool_calls:
237
+ if hasattr(tc, "model_dump"):
238
+ tc_dict = tc.model_dump()
239
+ # Ensure args is a dict, not a JSON string
240
+ tc_dict["args"] = cls._parse_tool_args(tc_dict.get("args"))
241
+ tool_calls.append(tc_dict)
242
+ elif isinstance(tc, dict):
243
+ tc["args"] = cls._parse_tool_args(tc.get("args"))
244
+ tool_calls.append(tc)
245
+ else:
246
+ # Fallback: extract common attributes
247
+ raw_args = getattr(tc, "args", None) or getattr(tc, "arguments", {})
248
+ tool_calls.append(
249
+ {
250
+ "name": getattr(tc, "name", None) or getattr(tc, "tool_name", None),
251
+ "args": cls._parse_tool_args(raw_args),
252
+ "id": getattr(tc, "id", None) or getattr(tc, "tool_call_id", None),
253
+ }
254
+ )
255
+
256
+ return cls(
257
+ content=cls._extract_content(response),
258
+ model=cls._extract_model(response),
259
+ provider=provider,
260
+ usage=cls._extract_usage(response),
261
+ tool_calls=tool_calls,
262
+ stop_reason=cls._extract_stop_reason(response),
263
+ )
264
+
265
+ @property
266
+ def has_tool_calls(self) -> bool:
267
+ """Check if the response contains tool calls."""
268
+ return bool(self.tool_calls)