openai-agents 0.2.8__py3-none-any.whl → 0.6.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. agents/__init__.py +105 -4
  2. agents/_debug.py +15 -4
  3. agents/_run_impl.py +1203 -96
  4. agents/agent.py +164 -19
  5. agents/apply_diff.py +329 -0
  6. agents/editor.py +47 -0
  7. agents/exceptions.py +35 -0
  8. agents/extensions/experimental/__init__.py +6 -0
  9. agents/extensions/experimental/codex/__init__.py +92 -0
  10. agents/extensions/experimental/codex/codex.py +89 -0
  11. agents/extensions/experimental/codex/codex_options.py +35 -0
  12. agents/extensions/experimental/codex/codex_tool.py +1142 -0
  13. agents/extensions/experimental/codex/events.py +162 -0
  14. agents/extensions/experimental/codex/exec.py +263 -0
  15. agents/extensions/experimental/codex/items.py +245 -0
  16. agents/extensions/experimental/codex/output_schema_file.py +50 -0
  17. agents/extensions/experimental/codex/payloads.py +31 -0
  18. agents/extensions/experimental/codex/thread.py +214 -0
  19. agents/extensions/experimental/codex/thread_options.py +54 -0
  20. agents/extensions/experimental/codex/turn_options.py +36 -0
  21. agents/extensions/handoff_filters.py +13 -1
  22. agents/extensions/memory/__init__.py +120 -0
  23. agents/extensions/memory/advanced_sqlite_session.py +1285 -0
  24. agents/extensions/memory/async_sqlite_session.py +239 -0
  25. agents/extensions/memory/dapr_session.py +423 -0
  26. agents/extensions/memory/encrypt_session.py +185 -0
  27. agents/extensions/memory/redis_session.py +261 -0
  28. agents/extensions/memory/sqlalchemy_session.py +334 -0
  29. agents/extensions/models/litellm_model.py +449 -36
  30. agents/extensions/models/litellm_provider.py +3 -1
  31. agents/function_schema.py +47 -5
  32. agents/guardrail.py +16 -2
  33. agents/{handoffs.py → handoffs/__init__.py} +89 -47
  34. agents/handoffs/history.py +268 -0
  35. agents/items.py +237 -11
  36. agents/lifecycle.py +75 -14
  37. agents/mcp/server.py +280 -37
  38. agents/mcp/util.py +24 -3
  39. agents/memory/__init__.py +22 -2
  40. agents/memory/openai_conversations_session.py +91 -0
  41. agents/memory/openai_responses_compaction_session.py +249 -0
  42. agents/memory/session.py +19 -261
  43. agents/memory/sqlite_session.py +275 -0
  44. agents/memory/util.py +20 -0
  45. agents/model_settings.py +14 -3
  46. agents/models/__init__.py +13 -0
  47. agents/models/chatcmpl_converter.py +303 -50
  48. agents/models/chatcmpl_helpers.py +63 -0
  49. agents/models/chatcmpl_stream_handler.py +290 -68
  50. agents/models/default_models.py +58 -0
  51. agents/models/interface.py +4 -0
  52. agents/models/openai_chatcompletions.py +103 -49
  53. agents/models/openai_provider.py +10 -4
  54. agents/models/openai_responses.py +162 -46
  55. agents/realtime/__init__.py +4 -0
  56. agents/realtime/_util.py +14 -3
  57. agents/realtime/agent.py +7 -0
  58. agents/realtime/audio_formats.py +53 -0
  59. agents/realtime/config.py +78 -10
  60. agents/realtime/events.py +18 -0
  61. agents/realtime/handoffs.py +2 -2
  62. agents/realtime/items.py +17 -1
  63. agents/realtime/model.py +13 -0
  64. agents/realtime/model_events.py +12 -0
  65. agents/realtime/model_inputs.py +18 -1
  66. agents/realtime/openai_realtime.py +696 -150
  67. agents/realtime/session.py +243 -23
  68. agents/repl.py +7 -3
  69. agents/result.py +197 -38
  70. agents/run.py +949 -168
  71. agents/run_context.py +13 -2
  72. agents/stream_events.py +1 -0
  73. agents/strict_schema.py +14 -0
  74. agents/tool.py +413 -15
  75. agents/tool_context.py +22 -1
  76. agents/tool_guardrails.py +279 -0
  77. agents/tracing/__init__.py +2 -0
  78. agents/tracing/config.py +9 -0
  79. agents/tracing/create.py +4 -0
  80. agents/tracing/processor_interface.py +84 -11
  81. agents/tracing/processors.py +65 -54
  82. agents/tracing/provider.py +64 -7
  83. agents/tracing/spans.py +105 -0
  84. agents/tracing/traces.py +116 -16
  85. agents/usage.py +134 -12
  86. agents/util/_json.py +19 -1
  87. agents/util/_transforms.py +12 -2
  88. agents/voice/input.py +5 -4
  89. agents/voice/models/openai_stt.py +17 -9
  90. agents/voice/pipeline.py +2 -0
  91. agents/voice/pipeline_config.py +4 -0
  92. {openai_agents-0.2.8.dist-info → openai_agents-0.6.8.dist-info}/METADATA +44 -19
  93. openai_agents-0.6.8.dist-info/RECORD +134 -0
  94. {openai_agents-0.2.8.dist-info → openai_agents-0.6.8.dist-info}/WHEEL +1 -1
  95. openai_agents-0.2.8.dist-info/RECORD +0 -103
  96. {openai_agents-0.2.8.dist-info → openai_agents-0.6.8.dist-info}/licenses/LICENSE +0 -0
agents/mcp/util.py CHANGED
@@ -1,8 +1,9 @@
1
1
  import functools
2
2
  import json
3
3
  from dataclasses import dataclass
4
- from typing import TYPE_CHECKING, Any, Callable, Optional, Union
4
+ from typing import TYPE_CHECKING, Any, Callable, Optional, Protocol, Union
5
5
 
6
+ import httpx
6
7
  from typing_extensions import NotRequired, TypedDict
7
8
 
8
9
  from .. import _debug
@@ -21,6 +22,21 @@ if TYPE_CHECKING:
21
22
  from .server import MCPServer
22
23
 
23
24
 
25
+ class HttpClientFactory(Protocol):
26
+ """Protocol for HTTP client factory functions.
27
+
28
+ This interface matches the MCP SDK's McpHttpClientFactory but is defined locally
29
+ to avoid accessing internal MCP SDK modules.
30
+ """
31
+
32
+ def __call__(
33
+ self,
34
+ headers: Optional[dict[str, str]] = None,
35
+ timeout: Optional[httpx.Timeout] = None,
36
+ auth: Optional[httpx.Auth] = None,
37
+ ) -> httpx.AsyncClient: ...
38
+
39
+
24
40
  @dataclass
25
41
  class ToolFilterContext:
26
42
  """Context information available to tool filter functions."""
@@ -185,9 +201,14 @@ class MCPUtil:
185
201
 
186
202
  try:
187
203
  result = await server.call_tool(tool.name, json_data)
204
+ except UserError:
205
+ # Re-raise UserError as-is (it already has a good message)
206
+ raise
188
207
  except Exception as e:
189
- logger.error(f"Error invoking MCP tool {tool.name}: {e}")
190
- raise AgentsException(f"Error invoking MCP tool {tool.name}: {e}") from e
208
+ logger.error(f"Error invoking MCP tool {tool.name} on server '{server.name}': {e}")
209
+ raise AgentsException(
210
+ f"Error invoking MCP tool {tool.name} on server '{server.name}': {e}"
211
+ ) from e
191
212
 
192
213
  if _debug.DONT_LOG_TOOL_DATA:
193
214
  logger.debug(f"MCP tool {tool.name} completed.")
agents/memory/__init__.py CHANGED
@@ -1,3 +1,23 @@
1
- from .session import Session, SQLiteSession
1
+ from .openai_conversations_session import OpenAIConversationsSession
2
+ from .openai_responses_compaction_session import OpenAIResponsesCompactionSession
3
+ from .session import (
4
+ OpenAIResponsesCompactionArgs,
5
+ OpenAIResponsesCompactionAwareSession,
6
+ Session,
7
+ SessionABC,
8
+ is_openai_responses_compaction_aware_session,
9
+ )
10
+ from .sqlite_session import SQLiteSession
11
+ from .util import SessionInputCallback
2
12
 
3
- __all__ = ["Session", "SQLiteSession"]
13
+ __all__ = [
14
+ "Session",
15
+ "SessionABC",
16
+ "SessionInputCallback",
17
+ "SQLiteSession",
18
+ "OpenAIConversationsSession",
19
+ "OpenAIResponsesCompactionSession",
20
+ "OpenAIResponsesCompactionArgs",
21
+ "OpenAIResponsesCompactionAwareSession",
22
+ "is_openai_responses_compaction_aware_session",
23
+ ]
@@ -0,0 +1,91 @@
1
+ from __future__ import annotations
2
+
3
+ from openai import AsyncOpenAI
4
+
5
+ from agents.models._openai_shared import get_default_openai_client
6
+
7
+ from ..items import TResponseInputItem
8
+ from .session import SessionABC
9
+
10
+
11
+ async def start_openai_conversations_session(openai_client: AsyncOpenAI | None = None) -> str:
12
+ _maybe_openai_client = openai_client
13
+ if openai_client is None:
14
+ _maybe_openai_client = get_default_openai_client() or AsyncOpenAI()
15
+ # this never be None here
16
+ _openai_client: AsyncOpenAI = _maybe_openai_client # type: ignore [assignment]
17
+
18
+ response = await _openai_client.conversations.create(items=[])
19
+ return response.id
20
+
21
+
22
+ class OpenAIConversationsSession(SessionABC):
23
+ def __init__(
24
+ self,
25
+ *,
26
+ conversation_id: str | None = None,
27
+ openai_client: AsyncOpenAI | None = None,
28
+ ):
29
+ self._session_id: str | None = conversation_id
30
+ _openai_client = openai_client
31
+ if _openai_client is None:
32
+ _openai_client = get_default_openai_client() or AsyncOpenAI()
33
+ # this never be None here
34
+ self._openai_client: AsyncOpenAI = _openai_client
35
+
36
+ async def _get_session_id(self) -> str:
37
+ if self._session_id is None:
38
+ self._session_id = await start_openai_conversations_session(self._openai_client)
39
+ return self._session_id
40
+
41
+ async def _clear_session_id(self) -> None:
42
+ self._session_id = None
43
+
44
+ async def get_items(self, limit: int | None = None) -> list[TResponseInputItem]:
45
+ session_id = await self._get_session_id()
46
+ all_items = []
47
+ if limit is None:
48
+ async for item in self._openai_client.conversations.items.list(
49
+ conversation_id=session_id,
50
+ order="asc",
51
+ ):
52
+ # calling model_dump() to make this serializable
53
+ all_items.append(item.model_dump(exclude_unset=True))
54
+ else:
55
+ async for item in self._openai_client.conversations.items.list(
56
+ conversation_id=session_id,
57
+ limit=limit,
58
+ order="desc",
59
+ ):
60
+ # calling model_dump() to make this serializable
61
+ all_items.append(item.model_dump(exclude_unset=True))
62
+ if limit is not None and len(all_items) >= limit:
63
+ break
64
+ all_items.reverse()
65
+
66
+ return all_items # type: ignore
67
+
68
+ async def add_items(self, items: list[TResponseInputItem]) -> None:
69
+ session_id = await self._get_session_id()
70
+ await self._openai_client.conversations.items.create(
71
+ conversation_id=session_id,
72
+ items=items,
73
+ )
74
+
75
+ async def pop_item(self) -> TResponseInputItem | None:
76
+ session_id = await self._get_session_id()
77
+ items = await self.get_items(limit=1)
78
+ if not items:
79
+ return None
80
+ item_id: str = str(items[0]["id"]) # type: ignore [typeddict-item]
81
+ await self._openai_client.conversations.items.delete(
82
+ conversation_id=session_id, item_id=item_id
83
+ )
84
+ return items[0]
85
+
86
+ async def clear_session(self) -> None:
87
+ session_id = await self._get_session_id()
88
+ await self._openai_client.conversations.delete(
89
+ conversation_id=session_id,
90
+ )
91
+ await self._clear_session_id()
@@ -0,0 +1,249 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from typing import TYPE_CHECKING, Any, Callable
5
+
6
+ from openai import AsyncOpenAI
7
+
8
+ from ..models._openai_shared import get_default_openai_client
9
+ from .openai_conversations_session import OpenAIConversationsSession
10
+ from .session import (
11
+ OpenAIResponsesCompactionArgs,
12
+ OpenAIResponsesCompactionAwareSession,
13
+ SessionABC,
14
+ )
15
+
16
+ if TYPE_CHECKING:
17
+ from ..items import TResponseInputItem
18
+ from .session import Session
19
+
20
+ logger = logging.getLogger("openai-agents.openai.compaction")
21
+
22
+ DEFAULT_COMPACTION_THRESHOLD = 10
23
+
24
+
25
+ def select_compaction_candidate_items(
26
+ items: list[TResponseInputItem],
27
+ ) -> list[TResponseInputItem]:
28
+ """Select compaction candidate items.
29
+
30
+ Excludes user messages and compaction items.
31
+ """
32
+
33
+ def _is_user_message(item: TResponseInputItem) -> bool:
34
+ if not isinstance(item, dict):
35
+ return False
36
+ if item.get("type") == "message":
37
+ return item.get("role") == "user"
38
+ return item.get("role") == "user" and "content" in item
39
+
40
+ return [
41
+ item
42
+ for item in items
43
+ if not (
44
+ _is_user_message(item) or (isinstance(item, dict) and item.get("type") == "compaction")
45
+ )
46
+ ]
47
+
48
+
49
+ def default_should_trigger_compaction(context: dict[str, Any]) -> bool:
50
+ """Default decision: compact when >= 10 candidate items exist."""
51
+ return len(context["compaction_candidate_items"]) >= DEFAULT_COMPACTION_THRESHOLD
52
+
53
+
54
+ def is_openai_model_name(model: str) -> bool:
55
+ """Validate model name follows OpenAI conventions."""
56
+ trimmed = model.strip()
57
+ if not trimmed:
58
+ return False
59
+
60
+ # Handle fine-tuned models: ft:gpt-4.1:org:proj:suffix
61
+ without_ft_prefix = trimmed[3:] if trimmed.startswith("ft:") else trimmed
62
+ root = without_ft_prefix.split(":", 1)[0]
63
+
64
+ # Allow gpt-* and o* models
65
+ if root.startswith("gpt-"):
66
+ return True
67
+ if root.startswith("o") and root[1:2].isdigit():
68
+ return True
69
+
70
+ return False
71
+
72
+
73
+ class OpenAIResponsesCompactionSession(SessionABC, OpenAIResponsesCompactionAwareSession):
74
+ """Session decorator that triggers responses.compact when stored history grows.
75
+
76
+ Works with OpenAI Responses API models only. Wraps any Session (except
77
+ OpenAIConversationsSession) and automatically calls the OpenAI responses.compact
78
+ API after each turn when the decision hook returns True.
79
+ """
80
+
81
+ def __init__(
82
+ self,
83
+ session_id: str,
84
+ underlying_session: Session,
85
+ *,
86
+ client: AsyncOpenAI | None = None,
87
+ model: str = "gpt-4.1",
88
+ should_trigger_compaction: Callable[[dict[str, Any]], bool] | None = None,
89
+ ):
90
+ """Initialize the compaction session.
91
+
92
+ Args:
93
+ session_id: Identifier for this session.
94
+ underlying_session: Session store that holds the compacted history. Cannot be
95
+ OpenAIConversationsSession.
96
+ client: OpenAI client for responses.compact API calls. Defaults to
97
+ get_default_openai_client() or new AsyncOpenAI().
98
+ model: Model to use for responses.compact. Defaults to "gpt-4.1". Must be an
99
+ OpenAI model name (gpt-*, o*, or ft:gpt-*).
100
+ should_trigger_compaction: Custom decision hook. Defaults to triggering when
101
+ 10+ compaction candidates exist.
102
+ """
103
+ if isinstance(underlying_session, OpenAIConversationsSession):
104
+ raise ValueError(
105
+ "OpenAIResponsesCompactionSession cannot wrap OpenAIConversationsSession "
106
+ "because it manages its own history on the server."
107
+ )
108
+
109
+ if not is_openai_model_name(model):
110
+ raise ValueError(f"Unsupported model for OpenAI responses compaction: {model}")
111
+
112
+ self.session_id = session_id
113
+ self.underlying_session = underlying_session
114
+ self._client = client
115
+ self.model = model
116
+ self.should_trigger_compaction = (
117
+ should_trigger_compaction or default_should_trigger_compaction
118
+ )
119
+
120
+ # cache for incremental candidate tracking
121
+ self._compaction_candidate_items: list[TResponseInputItem] | None = None
122
+ self._session_items: list[TResponseInputItem] | None = None
123
+ self._response_id: str | None = None
124
+ self._deferred_response_id: str | None = None
125
+
126
+ @property
127
+ def client(self) -> AsyncOpenAI:
128
+ if self._client is None:
129
+ self._client = get_default_openai_client() or AsyncOpenAI()
130
+ return self._client
131
+
132
+ async def run_compaction(self, args: OpenAIResponsesCompactionArgs | None = None) -> None:
133
+ """Run compaction using responses.compact API."""
134
+ if args and args.get("response_id"):
135
+ self._response_id = args["response_id"]
136
+
137
+ if not self._response_id:
138
+ raise ValueError(
139
+ "OpenAIResponsesCompactionSession.run_compaction requires a response_id"
140
+ )
141
+
142
+ compaction_candidate_items, session_items = await self._ensure_compaction_candidates()
143
+
144
+ force = args.get("force", False) if args else False
145
+ should_compact = force or self.should_trigger_compaction(
146
+ {
147
+ "response_id": self._response_id,
148
+ "compaction_candidate_items": compaction_candidate_items,
149
+ "session_items": session_items,
150
+ }
151
+ )
152
+
153
+ if not should_compact:
154
+ logger.debug(f"skip: decision hook declined compaction for {self._response_id}")
155
+ return
156
+
157
+ self._deferred_response_id = None
158
+ logger.debug(f"compact: start for {self._response_id} using {self.model}")
159
+
160
+ compacted = await self.client.responses.compact(
161
+ previous_response_id=self._response_id,
162
+ model=self.model,
163
+ )
164
+
165
+ await self.underlying_session.clear_session()
166
+ output_items: list[TResponseInputItem] = []
167
+ if compacted.output:
168
+ for item in compacted.output:
169
+ if isinstance(item, dict):
170
+ output_items.append(item)
171
+ else:
172
+ # Suppress Pydantic literal warnings: responses.compact can return
173
+ # user-style input_text content inside ResponseOutputMessage.
174
+ output_items.append(
175
+ item.model_dump(exclude_unset=True, warnings=False) # type: ignore
176
+ )
177
+
178
+ if output_items:
179
+ await self.underlying_session.add_items(output_items)
180
+
181
+ self._compaction_candidate_items = select_compaction_candidate_items(output_items)
182
+ self._session_items = output_items
183
+
184
+ logger.debug(
185
+ f"compact: done for {self._response_id} "
186
+ f"(output={len(output_items)}, candidates={len(self._compaction_candidate_items)})"
187
+ )
188
+
189
+ async def get_items(self, limit: int | None = None) -> list[TResponseInputItem]:
190
+ return await self.underlying_session.get_items(limit)
191
+
192
+ async def _defer_compaction(self, response_id: str) -> None:
193
+ if self._deferred_response_id is not None:
194
+ return
195
+ compaction_candidate_items, session_items = await self._ensure_compaction_candidates()
196
+ should_compact = self.should_trigger_compaction(
197
+ {
198
+ "response_id": response_id,
199
+ "compaction_candidate_items": compaction_candidate_items,
200
+ "session_items": session_items,
201
+ }
202
+ )
203
+ if should_compact:
204
+ self._deferred_response_id = response_id
205
+
206
+ def _get_deferred_compaction_response_id(self) -> str | None:
207
+ return self._deferred_response_id
208
+
209
+ def _clear_deferred_compaction(self) -> None:
210
+ self._deferred_response_id = None
211
+
212
+ async def add_items(self, items: list[TResponseInputItem]) -> None:
213
+ await self.underlying_session.add_items(items)
214
+ if self._compaction_candidate_items is not None:
215
+ new_candidates = select_compaction_candidate_items(items)
216
+ if new_candidates:
217
+ self._compaction_candidate_items.extend(new_candidates)
218
+ if self._session_items is not None:
219
+ self._session_items.extend(items)
220
+
221
+ async def pop_item(self) -> TResponseInputItem | None:
222
+ popped = await self.underlying_session.pop_item()
223
+ if popped:
224
+ self._compaction_candidate_items = None
225
+ self._session_items = None
226
+ return popped
227
+
228
+ async def clear_session(self) -> None:
229
+ await self.underlying_session.clear_session()
230
+ self._compaction_candidate_items = []
231
+ self._session_items = []
232
+ self._deferred_response_id = None
233
+
234
+ async def _ensure_compaction_candidates(
235
+ self,
236
+ ) -> tuple[list[TResponseInputItem], list[TResponseInputItem]]:
237
+ """Lazy-load and cache compaction candidates."""
238
+ if self._compaction_candidate_items is not None and self._session_items is not None:
239
+ return (self._compaction_candidate_items[:], self._session_items[:])
240
+
241
+ history = await self.underlying_session.get_items()
242
+ candidates = select_compaction_candidate_items(history)
243
+ self._compaction_candidate_items = candidates
244
+ self._session_items = history
245
+
246
+ logger.debug(
247
+ f"candidates: initialized (history={len(history)}, candidates={len(candidates)})"
248
+ )
249
+ return (candidates[:], history[:])